hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79555c3ab7464b582554b316a767eadbd25d15e5 | 1,534 | py | Python | tests/urls.py | mvpoland/django-rest-framework-oauth | 5afc705df1351ac411595b9db4d4f0f21ff08fc0 | [
"BSD-3-Clause"
] | null | null | null | tests/urls.py | mvpoland/django-rest-framework-oauth | 5afc705df1351ac411595b9db4d4f0f21ff08fc0 | [
"BSD-3-Clause"
] | null | null | null | tests/urls.py | mvpoland/django-rest-framework-oauth | 5afc705df1351ac411595b9db4d4f0f21ff08fc0 | [
"BSD-3-Clause"
] | null | null | null | from django.http import HttpResponse
from django.urls import include, re_path
from rest_framework.permissions import IsAuthenticated
from rest_framework_oauth import permissions
from rest_framework_oauth.authentication import OAuthAuthentication, OAuth2Authentication
from rest_framework.views import APIView
class MockView(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request):
return HttpResponse({'a': 1, 'b': 2, 'c': 3})
def post(self, request):
return HttpResponse({'a': 1, 'b': 2, 'c': 3})
def put(self, request):
return HttpResponse({'a': 1, 'b': 2, 'c': 3})
class OAuth2AuthenticationDebug(OAuth2Authentication):
allow_query_params_token = True
urlpatterns = [
re_path(r'^oauth/$', MockView.as_view(authentication_classes=[OAuthAuthentication])),
re_path(
r'^oauth-with-scope/$',
MockView.as_view(
authentication_classes=[OAuthAuthentication],
permission_classes=[permissions.TokenHasReadWriteScope]
)
),
re_path(r'^oauth2/', include('provider.oauth2.urls')),
re_path(r'^oauth2-test/$', MockView.as_view(authentication_classes=[OAuth2Authentication])),
re_path(r'^oauth2-test-debug/$', MockView.as_view(authentication_classes=[OAuth2AuthenticationDebug])),
re_path(
r'^oauth2-with-scope-test/$',
MockView.as_view(
authentication_classes=[OAuth2Authentication],
permission_classes=[permissions.TokenHasReadWriteScope]
)
),
]
| 31.958333 | 107 | 0.70013 |
79555d35ea3f68107e09c48885befc1544833a4b | 390 | py | Python | src/wsgi.py | firewut/data-transform-pipelines-api | c62a7aa5fd57102fa67cf715dc78c3365b739925 | [
"MIT"
] | 2 | 2019-01-09T07:42:17.000Z | 2021-08-25T02:43:47.000Z | src/wsgi.py | firewut/data-transform-pipelines-api | c62a7aa5fd57102fa67cf715dc78c3365b739925 | [
"MIT"
] | null | null | null | src/wsgi.py | firewut/data-transform-pipelines-api | c62a7aa5fd57102fa67cf715dc78c3365b739925 | [
"MIT"
] | null | null | null | """
WSGI config for data_transform project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
application = get_wsgi_application()
| 22.941176 | 78 | 0.787179 |
79555dacd0fab0641bf26d9c7dcec5ec6d9bd19a | 21,518 | py | Python | src/python/pants/util/dirutil.py | mpopenko-exos/pants | 47d27037c8b13291fc9023e56ddd1b1defdf1b8e | [
"Apache-2.0"
] | null | null | null | src/python/pants/util/dirutil.py | mpopenko-exos/pants | 47d27037c8b13291fc9023e56ddd1b1defdf1b8e | [
"Apache-2.0"
] | 1 | 2018-09-04T17:37:34.000Z | 2018-09-04T19:42:58.000Z | src/python/pants/util/dirutil.py | mpopenko-exos/pants | 47d27037c8b13291fc9023e56ddd1b1defdf1b8e | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import atexit
import errno
import os
import shutil
import stat
import tempfile
import threading
import uuid
from collections import defaultdict
from contextlib import contextmanager
from pathlib import Path
from typing import (
Any,
Callable,
DefaultDict,
FrozenSet,
Iterable,
Iterator,
List,
Optional,
Sequence,
Set,
Tuple,
Union,
overload,
)
from typing_extensions import Literal
from pants.util.strutil import ensure_text
def longest_dir_prefix(path: str, prefixes: Sequence[str]) -> Optional[str]:
"""Given a list of prefixes, return the one that is the longest prefix to the given path.
Returns None if there are no matches.
"""
longest_match, longest_prefix = 0, None
for prefix in prefixes:
if fast_relpath_optional(path, prefix) is not None and len(prefix) > longest_match:
longest_match, longest_prefix = len(prefix), prefix
return longest_prefix
def fast_relpath(path: str, start: str) -> str:
"""A prefix-based relpath, with no normalization or support for returning `..`."""
relpath = fast_relpath_optional(path, start)
if relpath is None:
raise ValueError(f'{start} is not a directory containing {path}')
return relpath
def fast_relpath_optional(path: str, start: str) -> Optional[str]:
"""A prefix-based relpath, with no normalization or support for returning `..`.
Returns None if `start` is not a directory-aware prefix of `path`.
"""
if len(start) == 0:
# Empty prefix.
return path
# Determine where the matchable prefix ends.
pref_end = len(start) - 1 if start[-1] == '/' else len(start)
if pref_end > len(path):
# The prefix is too long to match.
return None
elif path[:pref_end] == start[:pref_end] and (len(path) == pref_end or path[pref_end] == '/'):
# The prefix matches, and the entries are either identical, or the suffix indicates that
# the prefix is a directory.
return path[pref_end+1:]
return None
def ensure_relative_file_name(path: Path) -> str:
"""Return a string representing the `path`, with a leading './'.
This ensures that the returned string can be used as the executable file when executing a
subprocess, without putting the executable file on the PATH.
"""
if path.is_absolute():
raise ValueError(f'path {path} is expected to be relative!')
return f'./{path}'
def safe_mkdir(directory: str, clean: bool = False) -> None:
"""Ensure a directory is present.
If it's not there, create it. If it is, no-op. If clean is True, ensure the dir is empty.
:API: public
"""
if clean:
safe_rmtree(directory)
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def safe_mkdir_for(path: str, clean: bool = False) -> None:
"""Ensure that the parent directory for a file is present.
If it's not there, create it. If it is, no-op.
"""
safe_mkdir(os.path.dirname(path), clean=clean)
def safe_mkdir_for_all(paths: Sequence[str]) -> None:
"""Make directories which would contain all of the passed paths.
This avoids attempting to re-make the same directories, which may be noticeably expensive if many
paths mostly fall in the same set of directories.
"""
created_dirs: Set[str] = set()
for path in paths:
dir_to_make = os.path.dirname(path)
if dir_to_make not in created_dirs:
safe_mkdir(dir_to_make)
created_dirs.add(dir_to_make)
def safe_file_dump(
filename: str, payload: Union[bytes, str] = '', mode: str = 'w', makedirs: bool = False
) -> None:
"""Write a string to a file.
This method is "safe" to the extent that `safe_open` is "safe". See the explanation on the method
doc there.
When `payload` is an empty string (the default), this method can be used as a concise way to
create an empty file along with its containing directory (or truncate it if it already exists).
:param filename: The filename of the file to write to.
:param payload: The string to write to the file.
:param mode: A mode argument for the python `open` builtin which should be a write mode variant.
Defaults to 'w'.
:param makedirs: Whether to make all parent directories of this file before making it.
"""
if makedirs:
os.makedirs(os.path.dirname(filename), exist_ok=True)
with safe_open(filename, mode=mode) as f:
f.write(payload)
@overload
def maybe_read_file(filename: str) -> Optional[str]: ...
@overload
def maybe_read_file(filename: str, binary_mode: Literal[False]) -> Optional[str]: ...
@overload
def maybe_read_file(filename: str, binary_mode: Literal[True]) -> Optional[bytes]: ...
@overload
def maybe_read_file(filename: str, binary_mode: bool) -> Optional[Union[bytes, str]]: ...
def maybe_read_file(filename: str, binary_mode: bool = False) -> Optional[Union[bytes, str]]:
"""Read and return the contents of a file in a single file.read().
:param filename: The filename of the file to read.
:param binary_mode: Read from file as bytes or unicode.
:returns: The contents of the file, or None if opening the file fails for any reason
"""
try:
return read_file(filename, binary_mode=binary_mode)
except IOError:
return None
@overload
def read_file(filename: str) -> str: ...
@overload
def read_file(filename: str, binary_mode: Literal[False]) -> str: ...
@overload
def read_file(filename: str, binary_mode: Literal[True]) -> bytes: ...
@overload
def read_file(filename: str, binary_mode: bool) -> Union[str, bytes]: ...
def read_file(filename: str, binary_mode: bool = False) -> Union[bytes, str]:
"""Read and return the contents of a file in a single file.read().
:param filename: The filename of the file to read.
:param binary_mode: Read from file as bytes or unicode.
:returns: The contents of the file.
"""
mode = 'rb' if binary_mode else 'r'
with open(filename, mode) as f:
content: Union[bytes, str] = f.read()
return content
def safe_walk(path: Union[bytes, str], **kwargs: Any) -> Iterator[Tuple[str, List[str], List[str]]]:
"""Just like os.walk, but ensures that the returned values are unicode objects.
This isn't strictly safe, in that it is possible that some paths
will not be decodeable, but that case is rare, and the only
alternative is to somehow avoid all interaction between paths and
unicode objects, which seems especially tough in the presence of
unicode_literals. See e.g.
https://mail.python.org/pipermail/python-dev/2008-December/083856.html
:API: public
"""
# If os.walk is given a text argument, it yields text values; if it
# is given a binary argument, it yields binary values.
return os.walk(ensure_text(path), **kwargs)
class ExistingFileError(ValueError):
"""Indicates a copy operation would over-write a file with a directory."""
class ExistingDirError(ValueError):
"""Indicates a copy operation would over-write a directory with a file."""
def mergetree(
src: str, dst: str, symlinks: bool = False, ignore=None, file_filter=None
) -> None:
"""Just like `shutil.copytree`, except the `dst` dir may exist.
The `src` directory will be walked and its contents copied into `dst`. If `dst` already exists the
`src` tree will be overlayed in it; ie: existing files in `dst` will be over-written with files
from `src` when they have the same subtree path.
"""
safe_mkdir(dst)
if not file_filter:
file_filter = lambda _: True
for src_path, dirnames, filenames in safe_walk(src, topdown=True, followlinks=True):
ignorenames: FrozenSet[str] = frozenset()
if ignore:
to_ignore = ignore(src_path, dirnames + filenames)
if to_ignore:
ignorenames = frozenset(to_ignore)
src_relpath = os.path.relpath(src_path, src)
dst_path = os.path.join(dst, src_relpath)
visit_dirs = []
for dirname in dirnames:
if dirname in ignorenames:
continue
src_dir = os.path.join(src_path, dirname)
dst_dir = os.path.join(dst_path, dirname)
if os.path.exists(dst_dir):
if not os.path.isdir(dst_dir):
raise ExistingFileError('While copying the tree at {} to {}, encountered directory {} in '
'the source tree that already exists in the destination as a '
'non-directory.'.format(src, dst, dst_dir))
visit_dirs.append(dirname)
elif symlinks and os.path.islink(src_dir):
link = os.readlink(src_dir)
os.symlink(link, dst_dir)
# We need to halt the walk at a symlink dir; so we do not place dirname in visit_dirs
# here.
else:
os.makedirs(dst_dir)
visit_dirs.append(dirname)
# In-place mutate dirnames to halt the walk when the dir is ignored by the caller.
dirnames[:] = visit_dirs
for filename in filenames:
if filename in ignorenames:
continue
src_file_relpath = os.path.join(src_relpath, filename)
if not file_filter(src_file_relpath):
continue
dst_filename = os.path.join(dst_path, filename)
if os.path.exists(dst_filename):
if not os.path.isfile(dst_filename):
raise ExistingDirError('While copying the tree at {} to {}, encountered file {} in the '
'source tree that already exists in the destination as a non-file.'
.format(src, dst, dst_filename))
else:
os.unlink(dst_filename)
src_filename = os.path.join(src_path, filename)
if symlinks and os.path.islink(src_filename):
link = os.readlink(src_filename)
os.symlink(link, dst_filename)
else:
shutil.copy2(src_filename, dst_filename)
_MkdtempCleanerType = Callable[[], None]
_MKDTEMP_CLEANER: Optional[_MkdtempCleanerType] = None
_MKDTEMP_DIRS: DefaultDict[int, Set[str]] = defaultdict(set)
_MKDTEMP_LOCK = threading.RLock()
def _mkdtemp_atexit_cleaner() -> None:
for td in _MKDTEMP_DIRS.pop(os.getpid(), []):
safe_rmtree(td)
def _mkdtemp_unregister_cleaner() -> None:
global _MKDTEMP_CLEANER
_MKDTEMP_CLEANER = None
def _mkdtemp_register_cleaner(cleaner: _MkdtempCleanerType) -> None:
global _MKDTEMP_CLEANER
assert callable(cleaner)
if _MKDTEMP_CLEANER is None:
atexit.register(cleaner)
_MKDTEMP_CLEANER = cleaner
def safe_mkdtemp(cleaner: _MkdtempCleanerType = _mkdtemp_atexit_cleaner, **kw: Any) -> str:
"""Create a temporary directory that is cleaned up on process exit.
Arguments are as to tempfile.mkdtemp.
:API: public
"""
# Proper lock sanitation on fork [issue 6721] would be desirable here.
with _MKDTEMP_LOCK:
return register_rmtree(tempfile.mkdtemp(**kw), cleaner=cleaner)
def register_rmtree(directory: str, cleaner: _MkdtempCleanerType = _mkdtemp_atexit_cleaner) -> str:
"""Register an existing directory to be cleaned up at process exit."""
with _MKDTEMP_LOCK:
_mkdtemp_register_cleaner(cleaner)
_MKDTEMP_DIRS[os.getpid()].add(directory)
return directory
def safe_rmtree(directory: str) -> None:
"""Delete a directory if it's present. If it's not present, no-op.
Note that if the directory argument is a symlink, only the symlink will
be deleted.
:API: public
"""
if os.path.islink(directory):
safe_delete(directory)
else:
shutil.rmtree(directory, ignore_errors=True)
def safe_open(filename, *args, **kwargs):
"""Open a file safely, ensuring that its directory exists.
:API: public
"""
safe_mkdir_for(filename)
return open(filename, *args, **kwargs)
def safe_delete(filename: str) -> None:
"""Delete a file safely. If it's not present, no-op."""
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def safe_concurrent_rename(src: str, dst: str) -> None:
"""Rename src to dst, ignoring errors due to dst already existing.
Useful when concurrent processes may attempt to create dst, and it doesn't matter who wins.
"""
# Delete dst, in case it existed (with old content) even before any concurrent processes
# attempted this write. This ensures that at least one process writes the new content.
if os.path.isdir(src): # Note that dst may not exist, so we test for the type of src.
safe_rmtree(dst)
else:
safe_delete(dst)
try:
shutil.move(src, dst)
except IOError as e:
if e.errno != errno.EEXIST:
raise
def safe_rm_oldest_items_in_dir(
root_dir: str, num_of_items_to_keep: int, excludes: Iterable[str] = frozenset()
) -> None:
"""
Keep `num_of_items_to_keep` newly modified items besides `excludes` in `root_dir` then remove the rest.
:param root_dir: the folder to examine
:param num_of_items_to_keep: number of files/folders/symlinks to keep after the cleanup
:param excludes: absolute paths excluded from removal (must be prefixed with `root_dir`)
"""
if os.path.isdir(root_dir):
found_files = []
for old_file in os.listdir(root_dir):
full_path = os.path.join(root_dir, old_file)
if full_path not in excludes:
found_files.append((full_path, os.path.getmtime(full_path)))
found_files = sorted(found_files, key=lambda x: x[1], reverse=True)
for cur_file, _ in found_files[num_of_items_to_keep:]:
rm_rf(cur_file)
@contextmanager
def safe_concurrent_creation(target_path: str) -> Iterator[str]:
"""A contextmanager that yields a temporary path and renames it to a final target path when the
contextmanager exits.
Useful when concurrent processes may attempt to create a file, and it doesn't matter who wins.
:param target_path: The final target path to rename the temporary path to.
:yields: A temporary path containing the original path with a unique (uuid4) suffix.
"""
safe_mkdir_for(target_path)
tmp_path = f'{target_path}.tmp.{uuid.uuid4().hex}'
try:
yield tmp_path
except Exception:
rm_rf(tmp_path)
raise
else:
if os.path.exists(tmp_path):
safe_concurrent_rename(tmp_path, target_path)
def chmod_plus_x(path: str) -> None:
"""Equivalent of unix `chmod a+x path`"""
path_mode = os.stat(path).st_mode
path_mode &= int('777', 8)
if path_mode & stat.S_IRUSR:
path_mode |= stat.S_IXUSR
if path_mode & stat.S_IRGRP:
path_mode |= stat.S_IXGRP
if path_mode & stat.S_IROTH:
path_mode |= stat.S_IXOTH
os.chmod(path, path_mode)
def absolute_symlink(source_path: str, target_path: str) -> None:
"""Create a symlink at target pointing to source using the absolute path.
:param source_path: Absolute path to source file
:param target_path: Absolute path to intended symlink
:raises ValueError if source_path or link_path are not unique, absolute paths
:raises OSError on failure UNLESS file already exists or no such file/directory
"""
if not os.path.isabs(source_path):
raise ValueError(f"Path for source : {source_path} must be absolute")
if not os.path.isabs(target_path):
raise ValueError(f"Path for link : {target_path} must be absolute")
if source_path == target_path:
raise ValueError(f"Path for link is identical to source : {source_path}")
try:
if os.path.lexists(target_path):
if os.path.islink(target_path) or os.path.isfile(target_path):
os.unlink(target_path)
else:
shutil.rmtree(target_path)
safe_mkdir_for(target_path)
os.symlink(source_path, target_path)
except OSError as e:
# Another run may beat us to deletion or creation.
if not (e.errno == errno.EEXIST or e.errno == errno.ENOENT):
raise
def relative_symlink(source_path: str, link_path: str) -> None:
"""Create a symlink at link_path pointing to relative source
:param source_path: Absolute path to source file
:param link_path: Absolute path to intended symlink
:raises ValueError if source_path or link_path are not unique, absolute paths
:raises OSError on failure UNLESS file already exists or no such file/directory
"""
if not os.path.isabs(source_path):
raise ValueError(f"Path for source:{source_path} must be absolute")
if not os.path.isabs(link_path):
raise ValueError(f"Path for link:{link_path} must be absolute")
if source_path == link_path:
raise ValueError(f"Path for link is identical to source:{source_path}")
# The failure state below had a long life as an uncaught error. No behavior was changed here, it just adds a catch.
# Raising an exception does differ from absolute_symlink, which takes the liberty of deleting existing directories.
if os.path.isdir(link_path) and not os.path.islink(link_path):
raise ValueError(f"Path for link would overwrite an existing directory: {link_path}")
try:
if os.path.lexists(link_path):
os.unlink(link_path)
rel_path = os.path.relpath(source_path, os.path.dirname(link_path))
safe_mkdir_for(link_path)
os.symlink(rel_path, link_path)
except OSError as e:
# Another run may beat us to deletion or creation.
if not (e.errno == errno.EEXIST or e.errno == errno.ENOENT):
raise
def relativize_path(path: str, rootdir: str) -> str:
"""
:API: public
"""
# Note that we can't test for length and return the shorter of the two, because we need these
# paths to be stable across systems (e.g., because they get embedded in analysis files),
# and this choice might be inconsistent across systems. So we assume the relpath is always
# shorter. We relativize because of a known case of very long full path prefixes on Mesos,
# so this seems like the right heuristic.
# Note also that we mustn't call realpath on the path - we need to preserve the symlink structure.
return os.path.relpath(path, rootdir)
# When running pants under mesos/aurora, the sandbox pathname can be very long. Since it gets
# prepended to most components in the classpath (some from ivy, the rest from the build),
# in some runs the classpath gets too big and exceeds ARG_MAX.
# We prevent this by using paths relative to the current working directory.
def relativize_paths(paths: Sequence[str], rootdir: str) -> List[str]:
return [relativize_path(path, rootdir) for path in paths]
def touch(path: str, times: Optional[Union[int, Tuple[int, int]]] = None):
"""Equivalent of unix `touch path`.
:API: public
:path: The file to touch.
:times Either a tuple of (atime, mtime) or else a single time to use for both. If not
specified both atime and mtime are updated to the current time.
"""
if isinstance(times, tuple) and len(times) > 2:
raise ValueError(
"`times` must either be a tuple of (atime, mtime) or else a single time to use for both."
)
if isinstance(times, int):
times = (times, times)
with safe_open(path, 'a'):
os.utime(path, times)
def recursive_dirname(f: str) -> Iterator[str]:
"""Given a relative path like 'a/b/c/d', yield all ascending path components like:
'a/b/c/d'
'a/b/c'
'a/b'
'a'
''
"""
prev = None
while f != prev:
yield f
prev = f
f = os.path.dirname(f)
yield ''
def get_basedir(path: str) -> str:
"""Returns the base directory of a path.
Examples:
get_basedir('foo/bar/baz') --> 'foo'
get_basedir('/foo/bar/baz') --> ''
get_basedir('foo') --> 'foo'
"""
return path[:path.index(os.sep)] if os.sep in path else path
def rm_rf(name: str) -> None:
"""Remove a file or a directory similarly to running `rm -rf <name>` in a UNIX shell.
:param name: the name of the file or directory to remove.
:raises: OSError on error.
"""
if not os.path.exists(name):
return
try:
# Avoid using safe_rmtree so we can detect failures.
shutil.rmtree(name)
except OSError as e:
if e.errno == errno.ENOTDIR:
# 'Not a directory', but a file. Attempt to os.unlink the file, raising OSError on failure.
safe_delete(name)
elif e.errno != errno.ENOENT:
# Pass on 'No such file or directory', otherwise re-raise OSError to surface perm issues etc.
raise
def split_basename_and_dirname(path: str) -> Tuple[str, str]:
if not os.path.isfile(path):
raise ValueError(f"{path} does not exist or is not a regular file.")
return os.path.dirname(path), os.path.basename(path)
def check_no_overlapping_paths(paths: Iterable[str]) -> None:
"""Given a list of paths, ensure that all are unique and do not have the same prefix."""
for path in paths:
list_copy_without_path = list(paths)
list_copy_without_path.remove(path)
if path in list_copy_without_path:
raise ValueError(f'{path} appeared more than once. All paths must be unique.')
for p in list_copy_without_path:
if path in p:
raise ValueError(f'{path} and {p} have the same prefix. All paths must be unique and cannot overlap.')
def is_executable(path: str) -> bool:
"""Returns whether a path names an existing executable file."""
return os.path.isfile(path) and os.access(path, os.X_OK)
def is_readable_dir(path: str) -> bool:
"""Returns whether a path names an existing directory we can list and read files from."""
return os.path.isdir(path) and os.access(path, os.R_OK) and os.access(path, os.X_OK)
def is_writable_dir(path: str) -> bool:
"""Returns whether a path names an existing directory that we can create and modify files in.
We call is_readable_dir(), so this definition of "writable" is a superset of that.
"""
return is_readable_dir(path) and os.access(path, os.W_OK)
| 33.833333 | 117 | 0.702853 |
79555e7e4f1a2b07d8eacc83f48b7d8900adb2d6 | 208 | py | Python | tests/packages/named/named-package2/setup.py | sbg/dante | 104543c3ccb5e762d3e9cd6e8fa04c5fa91e2227 | [
"Apache-2.0"
] | 9 | 2017-11-03T15:53:01.000Z | 2019-10-01T14:09:56.000Z | tests/packages/named/named-package2/setup.py | sbg/dante | 104543c3ccb5e762d3e9cd6e8fa04c5fa91e2227 | [
"Apache-2.0"
] | 4 | 2019-10-01T12:53:58.000Z | 2021-04-26T15:39:16.000Z | tests/packages/named/named-package2/setup.py | sbg/dante | 104543c3ccb5e762d3e9cd6e8fa04c5fa91e2227 | [
"Apache-2.0"
] | 5 | 2017-11-03T15:50:40.000Z | 2021-09-13T08:50:45.000Z | from setuptools import setup, find_packages
install_requires = []
setup(
name='named-package2',
version='0.0.1+local-version',
install_requires=install_requires,
packages=find_packages(),
)
| 18.909091 | 43 | 0.725962 |
79555e92ec3964d0528f7c25dc48495c351e09d2 | 2,341 | py | Python | Chapter02/rest.py | PacktPublishing/JUNOS-Automation-Cookbook | d61a007f585207c44d1ad5147c3660d8049ffc7d | [
"MIT"
] | 8 | 2018-02-25T17:22:40.000Z | 2021-11-08T13:10:37.000Z | Chapter02/rest.py | PacktPublishing/JUNOS-Automation-Cookbook | d61a007f585207c44d1ad5147c3660d8049ffc7d | [
"MIT"
] | null | null | null | Chapter02/rest.py | PacktPublishing/JUNOS-Automation-Cookbook | d61a007f585207c44d1ad5147c3660d8049ffc7d | [
"MIT"
] | 7 | 2017-12-28T14:04:09.000Z | 2021-06-18T05:08:11.000Z | #!/usr/bin/env python
import sys
import httplib
import ssl
import base64
import argparse
import getpass
import json
import os
import io
# Error handling
def onError(exception_type, exception, traceback):
print "%s: %s" % (exception_type.__name__, exception)
sys.excepthook = onError
cmdline = argparse.ArgumentParser(description="Python JUNOS REST Client")
cmdline.add_argument("target", help="Target router to query")
cmdline.add_argument("-t", choices=["xml", "json", "text"], help="Type of output", default="xml")
cmdline.add_argument("-r", metavar="rpc-call", help="RPC call to make", default="get-software-information")
cmdline.add_argument("-c", metavar="certificate", help="Router's self-signed certificate .pem file")
cmdline.add_argument("-p", metavar="port", help="TCP port", default=8443)
cmdline.add_argument("-u", metavar="username", help="Remote username", default=getpass.getuser())
args=cmdline.parse_args()
try:
passwordFile = os.path.expanduser("~")+"/.pwaccess"
if os.stat(passwordFile)[0]&63==0:
passwords = json.load(io.open(passwordFile))
password = passwords[args.u+"@"+args.target]
else:
sys.stderr.write("Warning: password file "+passwordFile+" must be user RW (0600) only!\n")
sys.exit(1)
except Exception as e:
print(e)
password=getpass.getpass("Password: ")
basicAuthorization = base64.b64encode(args.u+":"+password)
context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=args.c)
context.check_hostname=False
if args.c==None:
context.verify_mode=ssl.CERT_NONE
conn = httplib.HTTPSConnection(args.target, args.p, context=context)
headers = { 'Authorization' : 'Basic %s' % basicAuthorization,
'Accept': "text/xml" if args.t=="xml" else
"application/json" if args.t=="json" else "text/plain" }
try:
conn.request("GET", '/rpc/'+args.r, headers=headers)
except ssl.SSLError as e:
sys.stderr.write("SSL error: "+str(e))
sys.exit()
response = conn.getresponse()
responseData = response.read()
print response.status, httplib.responses[response.status]
if responseData:
print responseData
if args.t=="json":
data = json.loads(responseData)
if 'software-information' in data:
print "Software version: ", data['software-information'][0]['junos-version'][0]['data']
| 30.802632 | 107 | 0.70739 |
79555fa242bc028693b40063a38396141daba34d | 4,113 | py | Python | plugins/mutePersonal.py | micodev/botShell | e41de4545460cbb2bd00815a8681275af432ec69 | [
"MIT"
] | 24 | 2020-07-22T00:14:25.000Z | 2021-09-24T12:30:05.000Z | plugins/mutePersonal.py | kossofe/botShell | 01f09d93c0ef560cefd37c59421acafe937521cd | [
"MIT"
] | null | null | null | plugins/mutePersonal.py | kossofe/botShell | 01f09d93c0ef560cefd37c59421acafe937521cd | [
"MIT"
] | 19 | 2020-07-17T21:01:31.000Z | 2022-02-17T11:07:43.000Z | import asyncio
from utilities import utilities
from Db.mute_sql import addMuteUser, getMutedUsers, getMutedUser, remMuteUser
from telethon import utils, errors
import re
def escape(strin):
alphanumeric = ""
for character in strin:
if character.isalnum():
alphanumeric += character
else:
alphanumeric += "-"
return alphanumeric
async def mute_user(message, from_id, chat_id, name):
try:
if getMutedUser(chat_id, from_id):
return await message.reply("User already muted.")
await utilities.client.edit_permissions(chat_id, from_id, send_messages=False)
addMuteUser(chat_id, escape(name), from_id)
return await message.reply("User muted successfully.")
except errors.ChatAdminRequiredError as e:
return await message.reply("Make me admin in group first.")
except errors.UserAdminInvalidError:
return await message.reply("Do not use it with admin dude.")
except Exception as e:
utilities.prRed(str(type(e)) + " Error : " + str(e))
return await message.reply(str(e))
async def unmute_user(message, from_id, chat_id):
try:
if not getMutedUser(chat_id, from_id):
return await message.reply("User already unmuted.")
await utilities.client.edit_permissions(chat_id, from_id, send_messages=True)
remMuteUser(chat_id, from_id)
return await message.reply("User unmuted successfully.")
except errors.ChatAdminRequiredError as e:
return await message.reply("Make me admin in group first.")
except errors.UserAdminInvalidError:
return await message.reply("Do not use it with admin dude.")
except Exception as e:
utilities.prRed(str(type(e)) + " Error : " + str(e))
return await message.reply(str(e))
async def run(message, matches, chat_id, step, crons=None):
response = []
if message.is_private:
return []
if matches == "getMuted":
muted = getMutedUsers(chat_id)
for user in muted:
print(user.user_id)
if matches[0] == "mu":
if re.match(r"@[a-zA-Z][\w\d]{3,30}[a-zA-Z\d]", matches[1]):
user = await utilities.client.get_entity(matches[1])
name = user.first_name
return [mute_user(message, user.id, chat_id, name)]
elif re.match(r"(\d)", matches[1]):
return [mute_user(message, matches[1], chat_id, "muteById")]
else:
return [message.reply("please, use by reply or use valid username and id")]
elif matches[0] == "rmu":
if re.match(r"@[a-zA-Z][\w\d]{3,30}[a-zA-Z\d]", matches[1]):
user = await utilities.client.get_entity(matches[1])
name = user.first_name
return [unmute_user(message, user.id, chat_id)]
elif re.match(r"(\d)", matches[1]):
return [unmute_user(message, matches[1], chat_id)]
else:
return [message.reply("please, use by reply or use valid username and id")]
elif matches == "mu":
if message.is_reply:
msg = await message.get_reply_message()
fromId = msg.from_id
chat_id = msg.chat_id
name = (await msg.get_sender()).first_name
return [mute_user(message, fromId, chat_id, name)]
elif matches == "rmu":
if message.is_reply:
msg = await message.get_reply_message()
fromId = msg.from_id
chat_id = msg.chat_id
return [unmute_user(message, fromId, chat_id)]
return response
plugin = {
"name": "mute users",
"desc": "Mute/unmute users in chat.",
"usage": [
"[!/#]mu in reply to message to mute user.",
"[!/#]rmu in reply to message to unmute user.",
"[!/#]mu <id or username> to mute user by id/username.",
"[!/#]rmu <id or username> to unmute user by id/username.",
],
"run": run,
"sudo": True,
"patterns": [
"^[!/#](getMuted)",
"^[!/#](mu)$",
"^[!/#](rmu)$",
"^[!/#](mu) (.+)$",
"^[!/#](rmu) (.+)$",
],
}
| 36.078947 | 87 | 0.601021 |
7955601b3b1dcf87c800839d203364236dbf8570 | 909 | py | Python | pmdarima/compat/statsmodels.py | Saravji/pmdarima | 7f42e36beb888d9e1e7e41b0d9c9f7419c730a3a | [
"MIT"
] | 1 | 2020-11-22T00:41:47.000Z | 2020-11-22T00:41:47.000Z | pmdarima/compat/statsmodels.py | Saravji/pmdarima | 7f42e36beb888d9e1e7e41b0d9c9f7419c730a3a | [
"MIT"
] | null | null | null | pmdarima/compat/statsmodels.py | Saravji/pmdarima | 7f42e36beb888d9e1e7e41b0d9c9f7419c730a3a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Handle inconsistencies in the statsmodels API versions
from __future__ import absolute_import
__all__ = [
'bind_df_model'
]
DEFAULT_NON_SEASONAL_MAXITER = 500 # ARMA, ARIMA
DEFAULT_SEASONAL_MAXITER = 50 # SARIMAX
def bind_df_model(model_fit, arima_results):
"""Set model degrees of freedom.
Older versions of statsmodels don't handle this issue. Sets the
model degrees of freedom in place if not already present.
Parameters
----------
model_fit : ARMA, ARIMA or SARIMAX
The fitted model.
arima_results : ModelResultsWrapper
The results wrapper.
"""
if not hasattr(arima_results, 'df_model'):
df_model = model_fit.k_exog + model_fit.k_trend + \
model_fit.k_ar + model_fit.k_ma + \
model_fit.k_seasonal_ar + model_fit.k_seasonal_ma
setattr(arima_results, 'df_model', df_model)
| 26.735294 | 67 | 0.687569 |
795560e62a1fe03a1eb6a033c3777ff984164caa | 1,946 | py | Python | setup.py | google/tmppy | faf67af1213ee709f28cc5f492ec4903c51d4104 | [
"Apache-2.0"
] | 27 | 2017-10-02T01:17:35.000Z | 2021-10-16T23:31:46.000Z | setup.py | google/tmppy | faf67af1213ee709f28cc5f492ec4903c51d4104 | [
"Apache-2.0"
] | 51 | 2017-10-01T09:38:22.000Z | 2018-10-13T16:39:39.000Z | setup.py | google/tmppy | faf67af1213ee709f28cc5f492ec4903c51d4104 | [
"Apache-2.0"
] | 9 | 2017-11-04T13:12:27.000Z | 2021-10-16T23:31:38.000Z | #!/usr/bin/env python3
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
import codecs
import os
import m2r
parent_path = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(parent_path, 'README.md'), encoding='utf-8') as f:
long_description = m2r.convert(f.read())
setuptools.setup(
name='TMPPy',
version='0.1.3',
description='A subset of Python that can be compiled to C++ meta-functions using the py2tmp _compiler',
long_description=long_description,
url='https://github.com/google/tmppy',
author='Marco Poletti',
author_email='poletti.marco@gmail.com',
license='Apache 2.0',
keywords='C++ metaprogramming _compiler templates',
python_requires='>=3',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: Apache Software License',
# TODO: check which 3.x Python versions work and update this.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
packages=setuptools.find_packages(exclude=['*.tests', 'extras']),
data_files=[('include/tmppy', ['include/tmppy/tmppy.h'])],
entry_points={
'console_scripts': ['py2tmp=py2tmp:main'],
},
)
| 36.037037 | 107 | 0.689106 |
79556149473003dd5d83b37c79ef7076efc18f47 | 6,698 | py | Python | Transformers/Python/tests/test_cli.py | wubero/Lampion | 8a81b3381dee48ffab8cf7ee1b57e0eea8aaeba2 | [
"MIT"
] | 1 | 2022-02-20T11:42:18.000Z | 2022-02-20T11:42:18.000Z | Transformers/Python/tests/test_cli.py | ciselab/Lampion | ba457d152a83e9b58072ec4676cc340b5b5afb1b | [
"MIT"
] | 28 | 2021-12-06T07:10:27.000Z | 2022-03-25T09:42:51.000Z | Transformers/Python/tests/test_cli.py | wubero/Lampion | 8a81b3381dee48ffab8cf7ee1b57e0eea8aaeba2 | [
"MIT"
] | 2 | 2021-11-25T08:32:45.000Z | 2022-02-20T11:42:27.000Z | import os
import libcst
import lampion.cli
import pytest
# This prefix works for running pytest on project root
path_prefix: str = "./tests"
# You may want to change it to "./" to run the tests in the IDE.
def test_read_input_files_on_folder_should_give_two_csts():
csts = lampion.cli.read_input_dir(f"{path_prefix}/test_inputs/multiple_files")
assert len(csts) == 2
def test_read_input_files_on_buggy_folder_should_give_no_csts():
# The read_input should not find valid files, but should also not fail.
# A "graceful" empty dir is expected
csts = lampion.cli.read_input_dir(f"{path_prefix}/test_inputs/buggy_files")
assert len(csts) == 0
def test_read_input_files_on_buggy_and_ok_folder_should_give_two_csts():
# The buggy files should be ignored, while the ok files should be read in.
# This means 2 buggy, 2 ok.
csts = lampion.cli.read_input_dir(f"{path_prefix}/test_inputs/buggy_and_ok_files")
assert len(csts) == 2
def test_read_input_file_on_buggy_file_should_throw_error():
with pytest.raises(libcst._exceptions.ParserSyntaxError):
lampion.cli.read_input_dir(f"{path_prefix}/test_inputs/buggy_files/faulty_py3.py")
def test_read_input_file_on_python2_file_should_throw_error():
with pytest.raises(libcst._exceptions.ParserSyntaxError):
lampion.cli.read_input_dir(f"{path_prefix}/test_inputs/buggy_files/python2.py")
def test_read_input_files_on_file_should_give_one_cst():
csts = lampion.cli.read_input_dir(f"{path_prefix}/test_inputs/hello_world.py")
assert len(csts) == 1
def test_read_input_files_on_empty_dir_should_not_give_value_error():
csts = lampion.cli.read_input_dir(f"{path_prefix}/test_inputs/no_files")
assert len(csts) == 0
def test_read_input_files_on_bad_path_should_raise_value_error():
with pytest.raises(ValueError):
lampion.cli.read_input_dir("./made_up_folder")
def test_read_input_files_on_bad_path_should_raise_FileNotFoundError():
with pytest.raises(ValueError):
lampion.cli.read_input_dir("./made_up_file.py")
def test_main_with_good_file_should_not_fail():
lampion.cli.run(f"{path_prefix}/test_inputs/hello_world.py")
def test_main_with_good_folder_should_not_fail():
lampion.cli.run(f"{path_prefix}/test_inputs/multiple_files")
def test_main_with_bad_folder_should_fail():
with pytest.raises(ValueError):
lampion.cli.run("./made_up_folder")
def test_main_with_bad_file_should_fail():
with pytest.raises(ValueError):
lampion.cli.run("./made_up_file.py")
def test_read_config_bad_path_raises_valueError():
with pytest.raises(ValueError):
lampion.cli.read_config_file("./made_up_folder")
def test_read_config_with_none_value_gives_empty_dict():
config = lampion.cli.read_config_file(None)
assert len(config) == 0
def test_read_config_with_empty_files_gives_empty_dict():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/empty.properties")
assert len(config) == 0
def test_read_config_with_example_file_finds_values():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test1.properties")
assert config is not {}
def test_read_config_with_wrong_ending_finds_values():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test3.txt")
assert config is not {}
def test_read_config_parses_bools_properly():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/parse_booleans.properties")
value_a = config["a"]
value_b = config["b"]
value_c = config["c"]
value_d = config["d"]
assert value_a == True
assert value_b == True
assert value_c == False
assert value_d == False
def test_read_config_parses_bools_to_types():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/parse_booleans.properties")
value_a = config["a"]
value_b = config["b"]
value_c = config["c"]
value_d = config["d"]
assert isinstance(value_a, bool)
assert isinstance(value_b, bool)
assert isinstance(value_c, bool)
assert isinstance(value_d, bool)
def test_read_config_does_not_parse_doubles():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/parse_doubles.properties")
value_a = config["a"]
value_b = config["b"]
value_c = config["c"]
assert value_a == "1.5"
assert value_b == "2,6"
assert value_c == "0.1"
assert isinstance(value_a, str)
assert isinstance(value_b, str)
assert isinstance(value_c, str)
def test_read_config_parses_ints_properly():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/parse_ints.properties")
value_a = config["a"]
value_transformations = config["transformations"]
assert value_a == 50
assert value_transformations == 10
def test_read_config_with_example_file_finds_values_variant_b():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test1.properties")
assert config is not {}
def test_read_config_with_example_file_2_has_seed():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test2.properties")
assert config["seed"] is not None
assert config["seed"] == 123
def test_read_config_with_example_file_1_has_transformers_as_expected():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test1.properties")
assert config["AddUnusedVariableTransformer"] == True
assert config["UnusedVariableStringRandomness"] == "full"
assert config["AddCommentTransformer"] == False
assert config["AddCommentStringRandomness"] == "pseudo"
def test_read_config_with_example_file_2_has_transformers_as_expected():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test2.properties")
assert config["AddUnusedVariableTransformer"] == False
assert config["UnusedVariableStringRandomness"] == "full"
assert config["AddCommentTransformer"] == False
def test_read_config_two_different_files_have_different_dicts():
config1 = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test1.properties")
config2 = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test2.properties")
assert config1 != config2
def test_read_config_with_lots_of_whitespace_should_be_parsed_without_any_whitespaces():
config = lampion.cli.read_config_file(f"{path_prefix}/test_configs/test3.properties")
assert config["AddUnusedVariableTransformer"] == False
assert config["UnusedVariableStringRandomness"] == "full"
assert config["AddCommentTransformer"] == False
assert config["seed"] == 123
assert config["transformation_scope"] == "global"
| 31.446009 | 98 | 0.760376 |
79556273e3f6be30dac5ffba489d5f379dd76077 | 132 | py | Python | Python/count.py | NewGuonx/DST_algorithm | 8c4b2f0fe1f43044a5c37a7f993d339862d9182c | [
"Unlicense"
] | 1 | 2022-01-23T05:29:17.000Z | 2022-01-23T05:29:17.000Z | Python/count.py | sonaspy/dst_algorithms | 8c4b2f0fe1f43044a5c37a7f993d339862d9182c | [
"Unlicense"
] | null | null | null | Python/count.py | sonaspy/dst_algorithms | 8c4b2f0fe1f43044a5c37a7f993d339862d9182c | [
"Unlicense"
] | null | null | null |
def count(data, target):
n = 0
for item in data:
if item == target: # found a match
n += 1
return n
| 14.666667 | 52 | 0.492424 |
7955627ec05bd4cc43ff2357a3000575fe7ec9f7 | 3,453 | py | Python | src/next_scraper/tests/tasks/test_scraper.py | EnTeQuAk/next-scraper | c362ff362976125c142caabd08e224c0156970aa | [
"BSD-3-Clause"
] | null | null | null | src/next_scraper/tests/tasks/test_scraper.py | EnTeQuAk/next-scraper | c362ff362976125c142caabd08e224c0156970aa | [
"BSD-3-Clause"
] | 8 | 2021-03-19T00:17:16.000Z | 2021-12-13T20:34:55.000Z | src/next_scraper/tests/tasks/test_scraper.py | EnTeQuAk/next-scraper | c362ff362976125c142caabd08e224c0156970aa | [
"BSD-3-Clause"
] | null | null | null | import pytest
import responses
from next_scraper.models import COMPLETED, NEW, Report
from next_scraper.tasks.scraper import (
extract_information_from_page,
fetch_status_from_links,
mark_report_as_completed,
)
@pytest.fixture(autouse=True)
def example_page_structure():
responses.add(
responses.GET,
"https://example.com",
content_type="text/html",
body='<!doctype html><html><a href="/broken">foo</a><a href="/success"></a>',
)
responses.add(
responses.GET,
"https://example.com/broken",
content_type="text/html",
status=404,
body="Not Found",
)
responses.add(
responses.GET, "https://example.com/success", content_type="text/html", body=""
)
@pytest.mark.django_db
def test_fetch_status_from_links():
report = Report.objects.create(original_url="https://example.com")
fetch_status_from_links(
["https://example.com/broken", "https://example.com/success"], report.pk
)
report.refresh_from_db()
assert report.broken_links == 1
@pytest.mark.django_db
def test_report_as_completed():
report = Report.objects.create(original_url="https://example.com", state=NEW)
mark_report_as_completed(report.pk)
report.refresh_from_db()
assert report.state == COMPLETED
@pytest.mark.django_db
def test_extract_information_from_page_basics():
report_pk = extract_information_from_page("https://example.com")
report = Report.objects.get(pk=report_pk)
assert report.state == COMPLETED
assert report.status_code == 200
assert report.broken_links == 1
assert report.internal_links == 2
assert report.external_links == 0
assert report.celery_group_id is not None
assert not report.may_contain_login_form
assert report.html_version == "html5"
assert report.title == ""
assert report.headings == {"h1": 0, "h2": 0, "h3": 0, "h4": 0, "h5": 0, "h6": 0}
@pytest.mark.django_db
def test_extract_information_from_page_headings():
responses.add(
responses.GET,
"https://headings.com",
content_type="text/html",
body="<!doctype html><html><h1>foo</h1><h3>bar</h3>",
)
report_pk = extract_information_from_page("https://headings.com")
report = Report.objects.get(pk=report_pk)
assert report.state == COMPLETED
assert report.status_code == 200
assert report.headings == {"h1": 1, "h2": 0, "h3": 1, "h4": 0, "h5": 0, "h6": 0}
@pytest.mark.django_db
def test_extract_information_from_page_title():
responses.add(
responses.GET,
"https://title.com",
content_type="text/html",
body="<!doctype html><html><head><title>Page title</title></head>",
)
report_pk = extract_information_from_page("https://title.com")
report = Report.objects.get(pk=report_pk)
assert report.state == COMPLETED
assert report.status_code == 200
assert report.title == "Page title"
@pytest.mark.django_db
def test_extract_information_from_login_form():
responses.add(
responses.GET,
"https://login.com",
content_type="text/html",
body="<!doctype html><html><form><input type=text></form>",
)
report_pk = extract_information_from_page("https://login.com")
report = Report.objects.get(pk=report_pk)
assert report.state == COMPLETED
assert report.status_code == 200
assert report.may_contain_login_form
| 28.073171 | 87 | 0.674486 |
795562d4c9c573d90398cbfc2c0b03a92050f925 | 891 | py | Python | replication/johnson_alahi_li_2016/download_images.py | jbueltemeier/pystiche_papers | d162c2db87251f9e3280fea35cf149d030dc335b | [
"BSD-3-Clause"
] | 4 | 2020-06-08T11:49:41.000Z | 2021-04-09T16:16:42.000Z | replication/johnson_alahi_li_2016/download_images.py | jbueltemeier/pystiche_papers | d162c2db87251f9e3280fea35cf149d030dc335b | [
"BSD-3-Clause"
] | 229 | 2020-06-08T07:24:25.000Z | 2021-01-08T07:16:02.000Z | replication/johnson_alahi_li_2016/download_images.py | jbueltemeier/pystiche_papers | d162c2db87251f9e3280fea35cf149d030dc335b | [
"BSD-3-Clause"
] | 2 | 2020-06-09T06:10:50.000Z | 2020-07-29T08:07:40.000Z | from torchvision.datasets.utils import download_and_extract_archive
import pystiche_papers.johnson_alahi_li_2016 as paper
from utils import ArgumentParser, make_description
def main(args):
paper.images().download(args.images_source_dir)
if args.no_dataset:
return
download_and_extract_archive(
"http://images.cocodataset.org/zips/train2014.zip",
args.dataset_dir,
md5="0da8c0bd3d6becc4dcb32757491aca88",
)
def parse_args():
parser = ArgumentParser(description=make_description("download the images"))
parser.add_images_source_dir_argument()
parser.add_dataset_dir_argument()
parser.add_argument(
"--no-dataset",
action="store_true",
help="If given, do not download the dataset (~13GB).",
)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
main(args)
| 23.447368 | 80 | 0.710438 |
795562e6d65763032476a8bacd0a9585f8314bf9 | 5,833 | py | Python | rsspotify/rsspotify.py | pablomd314/rsspotify | 2f68e4ebe500b2627ce625390290921504706349 | [
"MIT"
] | 2 | 2018-09-05T19:29:42.000Z | 2021-12-11T23:25:09.000Z | rsspotify/rsspotify.py | pablomd314/rsspotify | 2f68e4ebe500b2627ce625390290921504706349 | [
"MIT"
] | 7 | 2018-09-05T19:30:24.000Z | 2022-03-11T23:31:54.000Z | rsspotify/rsspotify.py | pablomd314/rsspotify | 2f68e4ebe500b2627ce625390290921504706349 | [
"MIT"
] | null | null | null | import requests
import copy
from . import rss
import urllib
import time
import requests
import socket
import datetime
class SpotifyClient(object):
"""This class handles all communication with the spotify server"""
CALLBACK_PATH = "/v1/authorization"
TOKEN_URL = "https://accounts.spotify.com/api/token"
# constructor needs to authenticate client
def __init__(self, client_id, client_secret, hostname, port):
self.CALLBACK_HOST = hostname
self.CALLBACK_PORT = port
self.client_id = client_id
self.client_secret = client_secret
print("Please authorize app here: {0}".format(get_authorize_link(client_id,
self.CALLBACK_HOST, self.CALLBACK_PORT)))
http_response = self.start_callback_server()
# once we've gotten our callback, we have the authorization_code
# and refresh token
if not http_response.startswith("GET {0}?code=".format(self.CALLBACK_PATH)):
raise Exception("Bad request.")
authorization_code = http_response.split()[1].split("=")[1]
self.get_tokens(authorization_code)
print(self.authorization_token, self.refresh_token)
if self.authorization_token is None or self.refresh_token is None:
raise Exception("Failed to get tokens.")
def start_callback_server(self):
print("!!!! CALLBACK SERVER !!!!")
HOST = self.CALLBACK_HOST
PORT = self.CALLBACK_PORT
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((HOST, PORT))
s.listen(1)
conn, addr = s.accept()
with conn:
data = conn.recv(1024)
conn.sendall(b"""HTTP/1.1 200 OK
Date: Mon, 25 Aug 2018 11:11:53 GMT
Server: Apache/2.2.14 (Win32)
Last-Modified: Mon, 25 Aug 2018 11:11:53 GMT
Content-Length: 0
Content-Type: text/html
Connection: Closed!""")
return data.decode("utf-8")
def get_tokens(self, authorization_code):
payload = {"grant_type": "authorization_code",
"code": authorization_code,
"redirect_uri": "http://{0}:{1}{2}".format(self.CALLBACK_HOST,
self.CALLBACK_PORT, self.CALLBACK_PATH),
"client_id": self.client_id,
"client_secret": self.client_secret}
r = requests.post(self.TOKEN_URL, data=payload)
j = r.json()
self.tokensExpiry = time.time() + j.get("expires_in")
self.authorization_token, self.refresh_token = j.get("access_token"), j.get("refresh_token")
def refresh_tokens(self):
payload = {"grant_type": "refresh_token",
"refresh_token": self.refresh_token,
"client_id": self.client_id,
"client_secret": self.client_secret}
r = requests.post(self.TOKEN_URL, data=payload)
j = r.json()
self.tokensExpiry = time.time() + j.get("expires_in")
self.authorization_token = j.get("access_token")
def get(self, api_link):
if self.tokensExpiry <= time.time():
self.refresh_tokens()
headers = {"Authorization": "Bearer {0}".format(self.authorization_token),
"Content-Type": "application/json", "Accept": "application/json"}
try:
r = requests.get(api_link, headers=headers)
except Exception as e:
raise e
j = r.json()
return j
def get_artist_info(self, artist_id):
api_link = "https://api.spotify.com/v1/artists/{0}".format(artist_id)
j = self.get(api_link)
return j
def get_artist_feed(self, artist_id):
# check if need to refresh
api_link = "https://api.spotify.com/v1/artists/{0}/albums".format(artist_id)
j = self.get(api_link)
artist_info = self.get_artist_info(artist_id)
config = {
"version": "2.0",
"channel": {
"title": artist_info["name"] + " (Artist Feed)",
"description": "The latest releases by {0}".format(artist_info["name"]),
"link": artist_info["external_urls"]["spotify"],
"items": []
}
}
item = {"title": "RSS Solutions for Restaurants", "description": "Do less."}
for x in j['items']:
i = copy.copy(item)
i['title'] = x['name']
i['description'] = x['album_type'].capitalize()
i['link'] = x['external_urls']['spotify']
i['pubDate'] = generate_datetime(x['release_date'], x['release_date_precision'])
config["channel"]["items"].append(i)
return rss.RSSElement(config)
def search_for_artists(self, query):
# check if need to refresh
api_link = "https://api.spotify.com/v1/search?{0}".format(
urllib.parse.urlencode({
"type": "artist",
"q": query
}))
j = self.get(api_link)
list_of_artists = j.get('artists').get('items')
return list_of_artists
def get_authorize_link(client_id, hostname, port):
response_type = "code"
redirect_uri = "http://{0}:{1}/v1/authorization".format(hostname, port)
return "https://accounts.spotify.com/authorize/?{0}".format(
urllib.parse.urlencode({
"client_id": client_id,
"response_type": response_type,
"redirect_uri": redirect_uri
}))
def generate_datetime(date_string, date_precision):
if date_precision == "day":
x = datetime.datetime.strptime(date_string, "%Y-%m-%d")
elif date_precision == "month":
x = datetime.datetime.strptime(date_string, "%Y-%m")
elif date_precision == "year":
x = datetime.datetime.strptime(date_string, "%Y")
else:
x = datetime.datetime.now()
return x
| 38.886667 | 100 | 0.610663 |
795562ef7c6edd41a513a86a7c5ab991448e0298 | 909 | py | Python | jp.atcoder/agc043/agc043_b/28076515.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-09T03:06:25.000Z | 2022-02-09T03:06:25.000Z | jp.atcoder/agc043/agc043_b/28076515.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-05T22:53:18.000Z | 2022-02-09T01:29:30.000Z | jp.atcoder/agc043/agc043_b/28076515.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | null | null | null | import typing
def main() -> typing.NoReturn:
n = int(input())
a = list(map(int, input()))
# odd, even
a = [x - 1 for x in a]
k = 1 << 20
cnt2 = [0] * k
cnt2[2] = 1
for i in range(2, k):
if i * 2 >= k: break
cnt2[i * 2] = cnt2[i] + 1
for i in range(k - 1):
cnt2[i + 1] += cnt2[i]
def is_odd(a: typing.List[int]) -> bool:
n = len(a)
bl = 0
for i in range(n):
# n - 1 choose i
# (n - 1)! / i! * (n - 1 - i)!
assert cnt2[n - 1] >= cnt2[i] + cnt2[n - 1 - i]
bl ^= (a[i] & 1) and cnt2[n - 1] == cnt2[i] + cnt2[n - 1 - i]
return bl
if is_odd(a):
print(1)
return
if 1 in a:
print(0)
return
a = [x // 2 for x in a]
if is_odd(a):
print(2)
else:
print(0)
main()
| 21.642857 | 74 | 0.382838 |
79556494159046c5f9b0e4231b784eafd8934c37 | 5,488 | py | Python | hdp-ambari-mpack-3.1.4.0/stacks/HDP/3.0/services/ZOOKEEPER/package/scripts/zookeeper.py | dropoftruth/dfhz_hdp_mpack | 716f0396dce25803365c1aed9904b74fbe396f79 | [
"Apache-2.0"
] | 3 | 2022-01-05T10:10:36.000Z | 2022-02-21T06:57:06.000Z | hdp-ambari-mpack-3.1.4.0/stacks/HDP/3.0/services/ZOOKEEPER/package/scripts/zookeeper.py | dropoftruth/dfhz_hdp_mpack | 716f0396dce25803365c1aed9904b74fbe396f79 | [
"Apache-2.0"
] | 13 | 2019-06-05T07:47:00.000Z | 2019-12-29T08:29:27.000Z | hdp-ambari-mpack-3.1.4.0/stacks/HDP/3.0/services/ZOOKEEPER/package/scripts/zookeeper.py | dropoftruth/dfhz_hdp_mpack | 716f0396dce25803365c1aed9904b74fbe396f79 | [
"Apache-2.0"
] | 2 | 2022-01-05T09:09:20.000Z | 2022-02-21T07:02:06.000Z | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import os
import sys
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions import conf_select,stack_select
from resource_management.libraries.functions.constants import StackFeature
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.generate_logfeeder_input_config import generate_logfeeder_input_config
from resource_management.libraries.functions.version import compare_versions, format_stack_version
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.core.resources.system import Directory, File
from resource_management.core.resources.service import ServiceConfig
from resource_management.core.source import InlineTemplate, Template
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def zookeeper(type = None, upgrade_type=None):
import params
Directory(params.config_dir,
owner=params.zk_user,
create_parents = True,
group=params.user_group
)
File(os.path.join(params.config_dir, "zookeeper-env.sh"),
content=InlineTemplate(params.zk_env_sh_template),
owner=params.zk_user,
group=params.user_group
)
configFile("zoo.cfg", template_name="zoo.cfg.j2")
configFile("configuration.xsl", template_name="configuration.xsl.j2")
Directory(params.zk_pid_dir,
owner=params.zk_user,
create_parents = True,
group=params.user_group,
mode=0755,
)
Directory(params.zk_log_dir,
owner=params.zk_user,
create_parents = True,
group=params.user_group,
mode=0755,
)
Directory(params.zk_data_dir,
owner=params.zk_user,
create_parents = True,
cd_access="a",
group=params.user_group,
mode=0755,
)
if type == 'server':
myid = str(sorted(params.zookeeper_hosts).index(params.hostname) + 1)
File(os.path.join(params.zk_data_dir, "myid"),
mode = 0644,
content = myid
)
generate_logfeeder_input_config('zookeeper', Template("input.config-zookeeper.json.j2", extra_imports=[default]))
if (params.log4j_props != None):
File(os.path.join(params.config_dir, "log4j.properties"),
mode=0644,
group=params.user_group,
owner=params.zk_user,
content=InlineTemplate(params.log4j_props)
)
elif (os.path.exists(os.path.join(params.config_dir, "log4j.properties"))):
File(os.path.join(params.config_dir, "log4j.properties"),
mode=0644,
group=params.user_group,
owner=params.zk_user
)
if params.security_enabled:
if type == "server":
configFile("zookeeper_jaas.conf", template_name="zookeeper_jaas.conf.j2")
configFile("zookeeper_client_jaas.conf", template_name="zookeeper_client_jaas.conf.j2")
else:
configFile("zookeeper_client_jaas.conf", template_name="zookeeper_client_jaas.conf.j2")
File(os.path.join(params.config_dir, "zoo_sample.cfg"),
owner=params.zk_user,
group=params.user_group
)
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def zookeeper(type = None, upgrade_type=None):
import params
configFile("zoo.cfg", template_name="zoo.cfg.j2", mode="f")
configFile("configuration.xsl", template_name="configuration.xsl.j2", mode="f")
ServiceConfig(params.zookeeper_win_service_name,
action="change_user",
username = params.zk_user,
password = Script.get_password(params.zk_user))
Directory(params.zk_data_dir,
owner=params.zk_user,
mode="(OI)(CI)F",
create_parents = True
)
if (params.log4j_props != None):
File(os.path.join(params.config_dir, "log4j.properties"),
mode="f",
owner=params.zk_user,
content=params.log4j_props
)
elif (os.path.exists(os.path.join(params.config_dir, "log4j.properties"))):
File(os.path.join(params.config_dir, "log4j.properties"),
mode="f",
owner=params.zk_user
)
if type == 'server':
myid = str(sorted(params.zookeeper_hosts).index(params.hostname) + 1)
File(os.path.join(params.zk_data_dir, "myid"),
owner=params.zk_user,
mode = "f",
content = myid
)
def configFile(name, template_name=None, mode=None):
import params
File(os.path.join(params.config_dir, name),
content=Template(template_name),
owner=params.zk_user,
group=params.user_group,
mode=mode
)
| 33.668712 | 117 | 0.71137 |
7955650dc7639ef2089060bd7a321b20fda5d53d | 2,956 | py | Python | convert_pyproject.py | 0xdecaff/starknet.py | b746d7b1539d8b753ed946e5e26358a72973facc | [
"MIT"
] | null | null | null | convert_pyproject.py | 0xdecaff/starknet.py | b746d7b1539d8b753ed946e5e26358a72973facc | [
"MIT"
] | null | null | null | convert_pyproject.py | 0xdecaff/starknet.py | b746d7b1539d8b753ed946e5e26358a72973facc | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: José Sánchez-Gallego (gallegoj@uw.edu)
# @Date: 2019-12-18
# @Filename: create_setup.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
# This is a temporary solution for the fact that pip install . fails with
# poetry when there is no setup.py and an extension needs to be compiled.
# See https://github.com/python-poetry/poetry/issues/1516. Running this
# script creates a setup.py filled out with information generated by
# poetry when parsing the pyproject.toml.
import os
import sys
from distutils.version import StrictVersion
# If there is a global installation of poetry, prefer that.
from pathlib import Path
lib = os.path.expanduser("~/.poetry/lib")
vendors = os.path.join(lib, "poetry", "_vendor")
current_vendors = os.path.join(
vendors, "py{}".format(".".join(str(v) for v in sys.version_info[:2]))
)
sys.path.insert(0, lib)
sys.path.insert(0, current_vendors)
try:
try:
from poetry.core.factory import Factory
from poetry.core.masonry.builders.sdist import SdistBuilder
except (ImportError, ModuleNotFoundError):
from poetry.masonry.builders.sdist import SdistBuilder
from poetry.factory import Factory
from poetry.__version__ import __version__
except (ImportError, ModuleNotFoundError) as ee:
raise ImportError(
f"install poetry by doing pip install poetry to use this script: {ee}"
)
# Generate a Poetry object that knows about the metadata in pyproject.toml
factory = Factory()
poetry = factory.create_poetry(os.path.dirname(__file__))
# Use the SdistBuilder to genrate a blob for setup.py
if StrictVersion(__version__) >= StrictVersion("1.1.0b1"):
sdist_builder = SdistBuilder(poetry, None)
else:
sdist_builder = SdistBuilder(poetry, None, None)
setuppy_blob = sdist_builder.build_setup()
with open("setup.py", "wb") as unit:
unit.write(Path("builder.py").read_bytes()) # Append dummy builder defs
unit.write(setuppy_blob)
unit.write(b"\n# This setup.py was autogenerated using poetry.\n")
extra_setup_args = [
"'ext_modules': [CryptoExtension()],\n",
"'long_description_content_type': 'text/markdown',\n",
'\'cmdclass\': {"build_py": BuildPy, "build_ext": BuildCrypto },\n',
]
with open("setup.py", "r+", encoding="utf-8") as setup_file:
lines = setup_file.readlines()
assert len(lines) != 0
lines = [
line
for line in lines
if "from build import *" not in line and "build(setup_kwargs)" not in line
]
last_setup_line = None
for number, line in enumerate(lines):
if "python_requires" in line:
last_setup_line = number
break
assert last_setup_line is not None
setup_file.seek(0)
setup_file.writelines(
[
*lines[0 : last_setup_line + 1],
*extra_setup_args,
*lines[last_setup_line + 1 :],
]
)
| 31.784946 | 82 | 0.69046 |
7955653be7b7ccfa4dad6ecd54057e61f639b8a9 | 764 | py | Python | krux/types/null.py | claydodo/cab | 0b68b8c88006a6b34874cfb3c450f5d2a0becb2c | [
"Unlicense"
] | null | null | null | krux/types/null.py | claydodo/cab | 0b68b8c88006a6b34874cfb3c450f5d2a0becb2c | [
"Unlicense"
] | 1 | 2018-11-07T07:53:16.000Z | 2018-11-07T07:53:16.000Z | krux/types/null.py | claydodo/krux | 0b68b8c88006a6b34874cfb3c450f5d2a0becb2c | [
"Unlicense"
] | null | null | null | # -*- coding:utf-8 -*-
from .singleton import Singleton
class NullClass(Singleton):
"""A do-nothing class.
From A. Martelli et al. Python Cookbook. (O'Reilly)
Thanks to Dinu C. Gherman, Holger Krekel.
"""
def __init__(self, *args, **kwargs): pass
def __call__(self, *args, **kwargs): return self
def __repr__(self): return "Null"
def __nonzero__(self): return False
def __getattr__(self, name): return self
def __setattr__(self, name, value): return self
def __delattr__(self, name): return self
def __len__(self): return 0
def __iter__(self): return iter(())
def __getitem__(self, i): return self
def __delitem__(self, i): return self
def __setitem__(self, i): return self
Null = NullClass()
| 28.296296 | 55 | 0.668848 |
7955656cf83a42dd61cf35865309df14c0ebe986 | 252 | py | Python | ibis_vega_transform/util.py | xmnlab/ibis-vega-transform | 75b4c5e4a8452dbeb5a81e19eae817b4d9c64999 | [
"Apache-2.0"
] | 20 | 2019-08-12T17:18:10.000Z | 2021-09-15T15:38:28.000Z | ibis_vega_transform/util.py | xmnlab/ibis-vega-transform | 75b4c5e4a8452dbeb5a81e19eae817b4d9c64999 | [
"Apache-2.0"
] | 63 | 2019-07-29T00:07:02.000Z | 2022-01-30T21:57:22.000Z | ibis_vega_transform/util.py | isabella232/ibis-vega-transform | 6019825a05cefec153363ded01d5ded4a6befa75 | [
"Apache-2.0"
] | 6 | 2019-07-19T15:13:36.000Z | 2022-03-06T03:01:54.000Z | from typing import *
V = TypeVar("V")
def promote_list(val: Union[V, List[V]]) -> List[V]:
"""
Maybe wrap a value in a list. If it is already a list, does nothing.
"""
if not isinstance(val, list):
val = [val]
return val
| 19.384615 | 72 | 0.583333 |
7955660173adaccd72b64fd42be40574447d044b | 1,806 | py | Python | chrome/test/enterprise/e2e/policy/force_google_safe_search/force_google_safe_search.py | sarang-apps/darshan_browser | 173649bb8a7c656dc60784d19e7bb73e07c20daa | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | chrome/test/enterprise/e2e/policy/force_google_safe_search/force_google_safe_search.py | sarang-apps/darshan_browser | 173649bb8a7c656dc60784d19e7bb73e07c20daa | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | chrome/test/enterprise/e2e/policy/force_google_safe_search/force_google_safe_search.py | sarang-apps/darshan_browser | 173649bb8a7c656dc60784d19e7bb73e07c20daa | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import logging
from absl import flags
from chrome_ent_test.infra.core import environment, before_all, test
from infra import ChromeEnterpriseTestCase
FLAGS = flags.FLAGS
@environment(file="../policy_test.asset.textpb")
class ForceGoogleSafeSearchTest(ChromeEnterpriseTestCase):
@before_all
def setup(self):
self.InstallChrome('client2012')
self.InstallWebDriver('client2012')
@test
def test_ForceGoogleSafeSearchEnabled(self):
# enable policy ForceGoogleSafeSearch
self.SetPolicy('win2012-dc', 'ForceGoogleSafeSearch', 1, 'DWORD')
self.RunCommand('client2012', 'gpupdate /force')
logging.info('ForceGoogleSafeSearch ENABLED')
d = os.path.dirname(os.path.abspath(__file__))
output = self.RunWebDriverTest(
'client2012',
os.path.join(d, 'force_google_safe_search_webdriver_test.py'))
logging.info('url used: %s', output)
# assert that safe search is enabled
self.assertIn('safe=active', output)
self.assertIn('ssui=on', output)
@test
def test_ForceGoogleSafeSearchDisabled(self):
# disable policy ForceGoogleSafeSearch
self.SetPolicy('win2012-dc', 'ForceGoogleSafeSearch', 0, 'DWORD')
self.RunCommand('client2012', 'gpupdate /force')
d = os.path.dirname(os.path.abspath(__file__))
logging.info('ForceGoogleSafeSearch DISABLED')
output = self.RunWebDriverTest(
'client2012',
os.path.join(d, 'force_google_safe_search_webdriver_test.py'))
logging.info('url used: %s', output)
# assert that safe search is NOT enabled
self.assertNotIn('safe=active', output)
self.assertNotIn('ssui=on', output)
| 34.075472 | 72 | 0.732558 |
79556621c35d7e91142a839d2aefc4612160ce2d | 1,959 | py | Python | tests/test_views/customer/request/test_cancel.py | a-belhadj/squest | 8714fefc332ab1ab349508488455f4a1f2ab8a82 | [
"Apache-2.0"
] | null | null | null | tests/test_views/customer/request/test_cancel.py | a-belhadj/squest | 8714fefc332ab1ab349508488455f4a1f2ab8a82 | [
"Apache-2.0"
] | null | null | null | tests/test_views/customer/request/test_cancel.py | a-belhadj/squest | 8714fefc332ab1ab349508488455f4a1f2ab8a82 | [
"Apache-2.0"
] | null | null | null | from django.core.exceptions import PermissionDenied
from django.urls import reverse
from service_catalog.models import Request
from tests.base_test_request import BaseTestRequest
class TestCustomerRequestViewTest(BaseTestRequest):
def setUp(self):
super(TestCustomerRequestViewTest, self).setUp()
def _assert_can_cancel(self):
args = {
'request_id': self.test_request.id
}
url = reverse('service_catalog:customer_request_cancel', kwargs=args)
response = self.client.post(url)
self.assertEquals(302, response.status_code)
self.assertEquals(0, Request.objects.filter(id=self.test_request.id).count())
def _assert_cannot_cancel(self):
args = {
'request_id': self.test_request.id
}
url = reverse('service_catalog:customer_request_cancel', kwargs=args)
self.client.post(url)
self.assertRaises(PermissionDenied)
def test_request_cancel_by_admin(self):
self._assert_can_cancel()
def test_request_cancel_by_owner(self):
self.client.login(username=self.standard_user, password=self.common_password)
self._assert_can_cancel()
def test_request_cancel_by_other(self):
self.client.login(username=self.standard_user_2, password=self.common_password)
self._assert_cannot_cancel()
def test_request_cannot_be_canceled_once_accepted(self):
for state in ["ACCEPTED", "FAILED", "COMPLETE", "PROCESSING"]:
self.test_request.state = state
self._assert_cannot_cancel()
def test_admin_can_cancel_from_admin_view(self):
args = {
'request_id': self.test_request.id
}
url = reverse('service_catalog:admin_request_cancel', kwargs=args)
response = self.client.post(url)
self.assertEquals(302, response.status_code)
self.assertEquals(0, Request.objects.filter(id=self.test_request.id).count())
| 36.277778 | 87 | 0.702399 |
79556699efd8551c1863fa67df39167df56d2a8e | 3,595 | py | Python | setup.py | KDahlgren/nyctea | 725940d46a63ca4189283bcc716ad0c96aab48ec | [
"MIT"
] | null | null | null | setup.py | KDahlgren/nyctea | 725940d46a63ca4189283bcc716ad0c96aab48ec | [
"MIT"
] | null | null | null | setup.py | KDahlgren/nyctea | 725940d46a63ca4189283bcc716ad0c96aab48ec | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# based on https://github.com/KDahlgren/pyLDFI/blob/master/setup.py
import os, sys, time
C4_FINDAPR_PATH = "./lib/c4/cmake/FindApr.cmake"
#################
# GETAPR_LIST #
#################
def getAPR_list() :
cmd = 'find / -name "apr_file_io.h" | grep -v "Permission denied" > out.txt'
print "Finding Apache Runtime library using command: " + cmd
time.sleep(5) # message to user
os.system( cmd )
fo = open( "out.txt", "r" )
pathList = []
for path in fo :
path = path.strip()
path_split = path.split( "/" )
path_split = path_split[:len(path_split)-1]
path = "/".join( path_split )
pathList.append( path )
os.system( 'rm out.txt' )
return pathList
########################
# DE DUPLICATE SETUP #
########################
# this script modifies the contents of FindAPR.cmake in the c4 submodule
# prior to compilation.
# need to ensure only one SET command exists in FindAPR.cmake after discovering
# a valid apr library.
def deduplicateSetup() :
# http://stackoverflow.com/questions/4710067/deleting-a-specific-line-in-a-file-python
# protect against multiple runs of setup
f = open( C4_FINDAPR_PATH, "r+" )
d = f.readlines()
f.seek(0)
for i in d:
if not "set(APR_INCLUDES" in i :
f.write(i)
f.truncate()
f.close()
#############
# SET APR #
#############
def setAPR( path ) :
# set one of the candidate APR paths
newCmd = 'set(APR_INCLUDES "' + path + '")'
cmd = "(head -48 " + C4_FINDAPR_PATH + "; " + "echo '" + newCmd + "'; " + "tail -n +49 " + C4_FINDAPR_PATH + ")" + " > temp ; mv temp " + C4_FINDAPR_PATH + ";"
os.system( cmd )
os.system( "make c4" )
##########################
# CHECK FOR MAKE ERROR #
##########################
def checkForMakeError( path ) :
flag = True
if os.path.exists( os.path.dirname(os.path.abspath( __file__ )) + "/c4_out.txt" ) :
fo = open( "./c4_out.txt", "r" )
for line in fo :
line = line.strip()
if containsError( line ) :
print "failed path apr = " + path
flag = False
fo.close()
os.system( "rm ./c4_out.txt" ) # clean up
return flag
####################
# CONTAINS ERROR #
####################
def containsError( line ) :
if "error generated." in line :
return True
#elif "Error" in line :
# return True
else :
return False
##########
# MAIN #
##########
def main() :
print "Running nyctea setup with args : \n" + str(sys.argv)
# clean any existing libs
os.system( "make clean" )
# download submodules
os.system( "make get-submodules" )
# # ---------------------------------------------- #
# # run make for c4
# # find candidate apr locations
# apr_path_cands = getAPR_list()
#
# # set correct apr location
# flag = True
# for path in apr_path_cands :
# try :
# deduplicateSetup()
# except IOError :
# setAPR( path )
#
# setAPR( path )
#
# try :
# flag = checkForMakeError( path )
# except IOError :
# print "./c4_out.txt does not exist"
#
# # found a valid apr library
# if flag :
# print ">>> C4 installed successfully <<<"
# print "... Done installing C4 Datalog evaluator"
# print "C4 install using APR path : " + path
# print "done installing c4."
# break
# else :
# sys.exit( "failed to install C4. No fully functioning APR found." )
# # ---------------------------------------------- #
os.system( "make iapyx" )
##############################
# MAIN THREAD OF EXECUTION #
##############################
main()
#########
# EOF #
#########
| 24.455782 | 161 | 0.54548 |
795566e6524f2eabc80e56e8e99405cc5bc8f706 | 10,987 | py | Python | aioquant/event.py | littleyanglovegithub/aioquant | fc0d866980fdc37c810cff02de72efe53acaad05 | [
"MIT"
] | 175 | 2020-09-22T17:34:30.000Z | 2022-03-29T10:17:54.000Z | aioquant/event.py | littleyanglovegithub/aioquant | fc0d866980fdc37c810cff02de72efe53acaad05 | [
"MIT"
] | null | null | null | aioquant/event.py | littleyanglovegithub/aioquant | fc0d866980fdc37c810cff02de72efe53acaad05 | [
"MIT"
] | 141 | 2020-09-29T03:24:38.000Z | 2022-03-30T11:06:45.000Z | # -*— coding:utf-8 -*-
"""
Event Center.
Author: HuangTao
Date: 2018/05/04
Email: huangtao@ifclover.com
"""
import json
import zlib
import asyncio
import aioamqp
from aioquant.utils import logger
from aioquant.configure import config
from aioquant.tasks import LoopRunTask, SingleTask
from aioquant.market import Orderbook, Trade, Kline
from aioquant.utils.decorator import async_method_locker
__all__ = ("EventCenter", "EventKline", "EventOrderbook", "EventTrade", )
class Event:
"""Event base.
Attributes:
name: Event name.
exchange: Exchange name.
queue: Queue name.
routing_key: Routing key name.
pre_fetch_count: How may message per fetched, default is `1`.
data: Message content.
"""
def __init__(self, name=None, exchange=None, queue=None, routing_key=None, pre_fetch_count=1, data=None):
"""Initialize."""
self._name = name
self._exchange = exchange
self._queue = queue
self._routing_key = routing_key
self._pre_fetch_count = pre_fetch_count
self._data = data
self._callback = None # Asynchronous callback function.
@property
def name(self):
return self._name
@property
def exchange(self):
return self._exchange
@property
def queue(self):
return self._queue
@property
def routing_key(self):
return self._routing_key
@property
def prefetch_count(self):
return self._pre_fetch_count
@property
def data(self):
return self._data
def dumps(self):
d = {
"n": self.name,
"d": self.data
}
s = json.dumps(d)
b = zlib.compress(s.encode("utf8"))
return b
def loads(self, b):
b = zlib.decompress(b)
d = json.loads(b.decode("utf8"))
self._name = d.get("n")
self._data = d.get("d")
return d
def parse(self):
raise NotImplemented
def subscribe(self, callback, multi=False):
"""Subscribe a event.
Args:
callback: Asynchronous callback function.
multi: If subscribe multiple channels?
"""
from aioquant import quant
self._callback = callback
SingleTask.run(quant.event_center.subscribe, self, self.callback, multi)
def publish(self):
"""Publish a event."""
from aioquant import quant
SingleTask.run(quant.event_center.publish, self)
async def callback(self, channel, body, envelope, properties):
self._exchange = envelope.exchange_name
self._routing_key = envelope.routing_key
self.loads(body)
o = self.parse()
await self._callback(o)
def __str__(self):
info = "EVENT: name={n}, exchange={e}, queue={q}, routing_key={r}, data={d}".format(
e=self.exchange, q=self.queue, r=self.routing_key, n=self.name, d=self.data)
return info
def __repr__(self):
return str(self)
class EventKline(Event):
"""Kline event.
Attributes:
kline: Kline object.
* NOTE:
Publisher: Market server.
Subscriber: Any servers.
"""
def __init__(self, kline: Kline):
"""Initialize."""
name = "EVENT_KLINE"
exchange = "Kline"
routing_key = "{p}.{s}".format(p=kline.platform, s=kline.symbol)
queue = "{sid}.{ex}.{rk}".format(sid=config.server_id, ex=exchange, rk=routing_key)
super(EventKline, self).__init__(name, exchange, queue, routing_key, data=kline.smart)
def parse(self):
kline = Kline().load_smart(self.data)
return kline
class EventOrderbook(Event):
"""Orderbook event.
Attributes:
orderbook: Orderbook object.
* NOTE:
Publisher: Market server.
Subscriber: Any servers.
"""
def __init__(self, orderbook: Orderbook):
"""Initialize."""
name = "EVENT_ORDERBOOK"
exchange = "Orderbook"
routing_key = "{p}.{s}".format(p=orderbook.platform, s=orderbook.symbol)
queue = "{sid}.{ex}.{rk}".format(sid=config.server_id, ex=exchange, rk=routing_key)
super(EventOrderbook, self).__init__(name, exchange, queue, routing_key, data=orderbook.smart)
def parse(self):
orderbook = Orderbook().load_smart(self.data)
return orderbook
class EventTrade(Event):
"""Trade event.
Attributes:
trade: Trade object.
* NOTE:
Publisher: Market server.
Subscriber: Any servers.
"""
def __init__(self, trade: Trade):
"""Initialize."""
name = "EVENT_TRADE"
exchange = "Trade"
routing_key = "{p}.{s}".format(p=trade.platform, s=trade.symbol)
queue = "{sid}.{ex}.{rk}".format(sid=config.server_id, ex=exchange, rk=routing_key)
super(EventTrade, self).__init__(name, exchange, queue, routing_key, data=trade.smart)
def parse(self):
trade = Trade().load_smart(self.data)
return trade
class EventCenter:
"""Event center.
"""
def __init__(self):
self._host = config.rabbitmq.get("host", "localhost")
self._port = config.rabbitmq.get("port", 5672)
self._username = config.rabbitmq.get("username", "guest")
self._password = config.rabbitmq.get("password", "guest")
self._protocol = None
self._channel = None # Connection channel.
self._connected = False # If connect success.
self._subscribers = [] # e.g. `[(event, callback, multi), ...]`
self._event_handler = {} # e.g. `{"exchange:routing_key": [callback_function, ...]}`
# Register a loop run task to check TCP connection's healthy.
LoopRunTask.register(self._check_connection, 10)
# Create MQ connection.
asyncio.get_event_loop().run_until_complete(self.connect())
@async_method_locker("EventCenter.subscribe")
async def subscribe(self, event: Event, callback=None, multi=False):
"""Subscribe a event.
Args:
event: Event type.
callback: Asynchronous callback.
multi: If subscribe multiple channel(routing_key) ?
"""
logger.info("NAME:", event.name, "EXCHANGE:", event.exchange, "QUEUE:", event.queue, "ROUTING_KEY:",
event.routing_key, caller=self)
self._subscribers.append((event, callback, multi))
async def publish(self, event):
"""Publish a event.
Args:
event: A event to publish.
"""
if not self._connected:
logger.warn("RabbitMQ not ready right now!", caller=self)
return
data = event.dumps()
await self._channel.basic_publish(payload=data, exchange_name=event.exchange, routing_key=event.routing_key)
async def connect(self, reconnect=False):
"""Connect to RabbitMQ server and create default exchange.
Args:
reconnect: If this invoke is a re-connection ?
"""
logger.info("host:", self._host, "port:", self._port, caller=self)
if self._connected:
return
# Create a connection.
try:
transport, protocol = await aioamqp.connect(host=self._host, port=self._port, login=self._username,
password=self._password, login_method="PLAIN")
except Exception as e:
logger.error("connection error:", e, caller=self)
return
finally:
if self._connected:
return
channel = await protocol.channel()
self._protocol = protocol
self._channel = channel
self._connected = True
logger.info("Rabbitmq initialize success!", caller=self)
# Create default exchanges.
exchanges = ["Orderbook", "Kline", "Trade"]
for name in exchanges:
await self._channel.exchange_declare(exchange_name=name, type_name="topic")
logger.debug("create default exchanges success!", caller=self)
if reconnect:
self._bind_and_consume()
else:
# Maybe we should waiting for all modules to be initialized successfully.
asyncio.get_event_loop().call_later(5, self._bind_and_consume)
def _bind_and_consume(self):
async def do_them():
for event, callback, multi in self._subscribers:
await self._initialize(event, callback, multi)
SingleTask.run(do_them)
async def _initialize(self, event: Event, callback=None, multi=False):
if event.queue:
await self._channel.queue_declare(queue_name=event.queue, auto_delete=True)
queue_name = event.queue
else:
result = await self._channel.queue_declare(exclusive=True)
queue_name = result["queue"]
await self._channel.queue_bind(queue_name=queue_name, exchange_name=event.exchange,
routing_key=event.routing_key)
await self._channel.basic_qos(prefetch_count=event.prefetch_count)
if callback:
if multi:
await self._channel.basic_consume(callback=callback, queue_name=queue_name, no_ack=True)
logger.info("multi message queue:", queue_name, caller=self)
else:
await self._channel.basic_consume(self._on_consume_event_msg, queue_name=queue_name)
logger.info("queue:", queue_name, caller=self)
self._add_event_handler(event, callback)
async def _on_consume_event_msg(self, channel, body, envelope, properties):
try:
key = "{exchange}:{routing_key}".format(exchange=envelope.exchange_name, routing_key=envelope.routing_key)
funcs = self._event_handler[key]
for func in funcs:
SingleTask.run(func, channel, body, envelope, properties)
except:
logger.error("event handle error! body:", body, caller=self)
return
finally:
await self._channel.basic_client_ack(delivery_tag=envelope.delivery_tag) # response ack
def _add_event_handler(self, event: Event, callback):
key = "{exchange}:{routing_key}".format(exchange=event.exchange, routing_key=event.routing_key)
if key in self._event_handler:
self._event_handler[key].append(callback)
else:
self._event_handler[key] = [callback]
logger.debug("event handlers:", self._event_handler.keys(), caller=self)
async def _check_connection(self, *args, **kwargs):
if self._connected and self._channel and self._channel.is_open:
return
logger.error("CONNECTION LOSE! START RECONNECT RIGHT NOW!", caller=self)
self._connected = False
self._protocol = None
self._channel = None
self._event_handler = {}
SingleTask.run(self.connect, reconnect=True)
| 32.89521 | 118 | 0.61864 |
795567fa39fe2006d4d54b78840bda6d4dc35b86 | 4,959 | py | Python | release_notes_clear.py | SergeBakharev/content | d66cc274f5bf6f9f0e9ed7e4df1af7b6f305aacf | [
"MIT"
] | 2 | 2020-07-27T10:35:41.000Z | 2020-12-14T15:44:18.000Z | release_notes_clear.py | SergeBakharev/content | d66cc274f5bf6f9f0e9ed7e4df1af7b6f305aacf | [
"MIT"
] | 48 | 2022-03-08T13:45:00.000Z | 2022-03-31T14:32:05.000Z | release_notes_clear.py | SergeBakharev/content | d66cc274f5bf6f9f0e9ed7e4df1af7b6f305aacf | [
"MIT"
] | 2 | 2021-12-13T13:07:21.000Z | 2022-03-05T02:23:34.000Z | import os
import json
import argparse
from datetime import datetime
import yaml
from demisto_sdk.commands.common.constants import UNRELEASE_HEADER, INTEGRATIONS_DIR, SCRIPTS_DIR, PLAYBOOKS_DIR, \
REPORTS_DIR, DASHBOARDS_DIR, WIDGETS_DIR, INCIDENT_FIELDS_DIR, LAYOUTS_DIR, CLASSIFIERS_DIR, INDICATOR_TYPES_DIR
from demisto_sdk.commands.common.tools import server_version_compare, run_command, get_release_notes_file_path, \
print_warning
from demisto_sdk.commands.common.legacy_git_tools import filter_changed_files
from release_notes import LAYOUT_TYPE_TO_NAME
CHANGE_LOG_FORMAT = UNRELEASE_HEADER + '\n\n## [{version}] - {date}\n'
FILE_TYPE_DICT = {
'.yml': yaml.safe_load,
'.json': json.load,
}
def get_changed_content_entities(modified_files, added_files):
# when renaming a file, it will appear as a tuple of (old path, new path) under modified_files
return added_files.union([(file_path[1] if isinstance(file_path, tuple) else file_path)
for file_path in modified_files])
def get_file_data(file_path):
extension = os.path.splitext(file_path)[1]
if extension not in FILE_TYPE_DICT:
return {}
load_function = FILE_TYPE_DICT[extension]
with open(file_path, 'r') as file_obj:
data = load_function(file_obj)
return data
def should_clear(file_path, current_server_version="0.0.0"):
"""
scan folder and remove all references to release notes
:param file_path: path of the yml/json file
:param current_server_version: current server version
"""
data = get_file_data(file_path)
if not data:
return False
version = data.get('fromversion') or data.get('fromVersion')
if version and server_version_compare(current_server_version, str(version)) < 0:
print_warning('keeping release notes for ({})\nto be published on {} version release'.format(file_path,
version))
return False
return True
def get_new_header(file_path):
data = get_file_data(file_path)
mapping = {
# description
INTEGRATIONS_DIR: ('Integration', data.get('description', '')),
PLAYBOOKS_DIR: ('Playbook', data.get('description', '')),
REPORTS_DIR: ('Report', data.get('description', '')),
DASHBOARDS_DIR: ('Dashboard', data.get('description', '')),
WIDGETS_DIR: ('Widget', data.get('description', '')),
# comment
SCRIPTS_DIR: ('Script', data.get('comment', '')),
# custom
LAYOUTS_DIR: ('Layout', '{} - {}'.format(data.get('typeId'), LAYOUT_TYPE_TO_NAME.get(data.get('kind', '')))),
# should have RN when added
INCIDENT_FIELDS_DIR: ('Incident Field', data.get('name', '')),
CLASSIFIERS_DIR: ('Classifier', data.get('brandName', '')),
# reputations.json has name at first layer
INDICATOR_TYPES_DIR: ('Reputation', data.get('id', data.get('name', ''))),
}
for entity_dir in mapping:
if entity_dir in file_path:
entity_type, description = mapping[entity_dir]
return '#### New {}\n{}'.format(entity_type, description)
# should never get here
return '#### New Content File'
def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('version', help='Release version')
arg_parser.add_argument('git_sha1', help='commit sha1 to compare changes with')
arg_parser.add_argument('server_version', help='Server version')
arg_parser.add_argument('-d', '--date', help='release date in the format %Y-%m-%d', required=False)
args = arg_parser.parse_args()
date = args.date if args.date else datetime.now().strftime('%Y-%m-%d')
# get changed yaml/json files (filter only relevant changed files)
change_log = run_command('git diff --name-status {}'.format(args.git_sha1))
modified_files, added_files, _, _, _, _, _ = filter_changed_files(change_log)
for file_path in get_changed_content_entities(modified_files, added_files):
if not should_clear(file_path, args.server_version):
continue
rn_path = get_release_notes_file_path(file_path)
if os.path.isfile(rn_path):
# if file exist, mark the current notes as release relevant
with open(rn_path, 'r+') as rn_file:
text = rn_file.read()
rn_file.seek(0)
text = text.replace(UNRELEASE_HEADER, CHANGE_LOG_FORMAT.format(version=args.version, date=date))
rn_file.write(text)
else:
# if file doesn't exist, create it with new header
with open(rn_path, 'w') as rn_file:
text = CHANGE_LOG_FORMAT.format(version=args.version, date=date) + get_new_header(file_path)
rn_file.write(text)
run_command('git add {}'.format(rn_path))
if __name__ == '__main__':
main()
| 39.047244 | 117 | 0.660012 |
79556835c82408d3575bfa20638a6ad9f898b902 | 3,692 | py | Python | docs/conf.py | TorchUQ/torchuq | 5335c5948385c7b9bde4baefc9399d79a7cb07ef | [
"MIT"
] | 27 | 2021-12-01T06:22:46.000Z | 2022-03-30T13:16:52.000Z | docs/conf.py | yyht/torchuq | 5335c5948385c7b9bde4baefc9399d79a7cb07ef | [
"MIT"
] | null | null | null | docs/conf.py | yyht/torchuq | 5335c5948385c7b9bde4baefc9399d79a7cb07ef | [
"MIT"
] | 2 | 2022-01-13T06:59:05.000Z | 2022-02-05T23:01:41.000Z | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'torchuq'
copyright = '2021, TorchUQ team'
author = 'torchuq team'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.extlinks',
'sphinx.ext.napoleon',
'sphinx.ext.imgmath',
'sphinx_autodoc_typehints',
'myst_parser',
]
# For .md files (along with myst_parser)
source_suffix = ['.rst', '.md']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'furo'
html_title = 'TorchUQ Documentation'
html_permalinks_icon = '¶'
html_show_sourcelink = True
html_show_sphinx = True
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
"dark_css_variables": {
"color-problematic": "#b30000",
"color-foreground-primary:": "black",
"color-foreground-secondary": "#5a5c63",
"color-foreground-muted": "#646776",
"color-foreground-border": "#878787",
"color-background-primary": "white",
"color-background-secondary": "#f8f9fb",
"color-background-hover": "#efeff4ff",
"color-background-hover--transparent": "#efeff400",
"color-background-border": "#eeebee",
"color-brand-primary": "#2962ff",
"color-brand-content": "#2a5adf",
"color-highlighted-background": "#ddeeff",
"color-guilabel-background": "#ddeeff80",
"color-guilabel-border": "#bedaf580",
"color-highlight-on-target": "#ffffcc",
"color-admonition-background": "transparent",
"color-card-background": "transparent",
},
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
#html_sidebars = {
#'**': ['logo-text.html', 'globaltoc.html', 'searchbox.html']
#}
| 35.5 | 79 | 0.660347 |
795568d7136f58d75872584bb710fca00a2b59ce | 2,101 | py | Python | python/Closure_Decorator/metaclasses/slotstyped.py | rghvat/learning-synopsis | 2f4fefc2d467a8cfb51ea4be4c0c88732787118a | [
"Apache-2.0"
] | null | null | null | python/Closure_Decorator/metaclasses/slotstyped.py | rghvat/learning-synopsis | 2f4fefc2d467a8cfb51ea4be4c0c88732787118a | [
"Apache-2.0"
] | null | null | null | python/Closure_Decorator/metaclasses/slotstyped.py | rghvat/learning-synopsis | 2f4fefc2d467a8cfb51ea4be4c0c88732787118a | [
"Apache-2.0"
] | null | null | null | # file: slotstyped.py
"""Use of descriptor and metaclass to get slots with
given types.
"""
from __future__ import print_function
class TypDescriptor(object):
"""Descriptor with type.
"""
def __init__(self, data_type, default_value=None):
self.name = None
self._internal_name = None
self.data_type = data_type
if default_value:
self.default_value = default_value
else:
self.default_value = data_type()
def __get__(self, instance, cls):
return getattr(instance, self._internal_name, self.default_value)
def __set__(self, instance, value):
if not isinstance(value, self.data_type):
raise TypeError('Required data type is %s. Got %s' % (
self.data_type, type(value)))
setattr(instance, self._internal_name, value)
def __delete__(self, instance):
raise AttributeError('Cannot delete %r' % instance)
class TypeProtected(type):
"""Metaclass to save descriptor values in slots.
"""
def __new__(mcl, name, bases, cdict):
slots = []
for attr, value in cdict.items():
if isinstance(value, TypDescriptor):
value.name = attr
value._internal_name = '_' + attr
slots.append(value._internal_name)
cdict['__slots__'] = slots
return super(TypeProtected, mcl).__new__(mcl, name, bases, cdict)
if __name__ == '__main__':
from meta_2_3 import with_metaclass
class Typed(with_metaclass(TypeProtected)):
pass
class MyClass(Typed):
"""Test class."""
attr1 = TypDescriptor(int)
attr2 = TypDescriptor(float, 5.5)
def main():
"""Test it.
"""
my_inst = MyClass()
print(my_inst.attr1)
print(my_inst.attr2)
print(dir(my_inst))
print(my_inst.__slots__)
my_inst.attr1 = 100
print(my_inst.attr1)
# this will fail
try:
my_inst.unknown = 100
except AttributeError:
print('cannot do this')
main()
| 25.938272 | 73 | 0.60257 |
795569e23ab3de6b2908e81d64ceaf4fbecd1735 | 6,520 | py | Python | script/testing/util/test_server.py | Yriuns/noisepage | 7f2f6a28972a8967e0bd6ffe4336170713b56c08 | [
"MIT"
] | null | null | null | script/testing/util/test_server.py | Yriuns/noisepage | 7f2f6a28972a8967e0bd6ffe4336170713b56c08 | [
"MIT"
] | null | null | null | script/testing/util/test_server.py | Yriuns/noisepage | 7f2f6a28972a8967e0bd6ffe4336170713b56c08 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import os
import sys
import time
import traceback
from typing import List
from util import constants
from util.db_server import NoisePageServer
from util.test_case import TestCase
from util.constants import LOG, ErrorCode
from util.periodic_task import PeriodicTask
from util.common import (run_command, print_file, print_pipe, update_mem_info)
class TestServer:
""" Class to run general tests """
def __init__(self, args):
""" Locations and misc. variable initialization """
# clean up the command line args
args = {k: v for k, v in args.items() if v}
# server output
db_output_file = args.get("db_output_file", constants.DEFAULT_DB_OUTPUT_FILE)
db_host = args.get("db_host", constants.DEFAULT_DB_HOST)
db_port = args.get("db_port", constants.DEFAULT_DB_PORT)
build_type = args.get("build_type", "")
server_args = args.get("server_args", {})
self.is_dry_run = args.get("dry_run",False)
self.db_instance = NoisePageServer(db_host, db_port, build_type, server_args, db_output_file)
# whether the server should stop the whole test if one of test cases failed
self.continue_on_error = args.get("continue_on_error", constants.DEFAULT_CONTINUE_ON_ERROR)
# memory info collection
self.collect_mem_info = args.get("collect_mem_info", False)
# incremental metrics
self.incremental_metric_freq = args.get("incremental_metric_freq", constants.INCREMENTAL_METRIC_FREQ)
return
def run_pre_suite(self):
pass
def run_post_suite(self):
pass
def run_test(self, test_case: TestCase):
""" Run the tests """
if not test_case.test_command or not test_case.test_command_cwd:
msg = "test command should be provided"
raise RuntimeError(msg)
# run the pre test tasks
test_case.run_pre_test()
# start a thread to collect the memory info if needed
if self.collect_mem_info:
# spawn a thread to collect memory info
self.collect_mem_thread = PeriodicTask(
self.incremental_metric_freq, update_mem_info,
self.db_instance.db_process.pid, self.incremental_metric_freq,
test_case.mem_metrics.mem_info_dict)
# collect the initial memory info
update_mem_info(self.db_instance.db_process.pid, self.incremental_metric_freq,
test_case.mem_metrics.mem_info_dict)
# run the actual test
with open(test_case.test_output_file, "w+") as test_output_fd:
ret_val, _, _ = run_command(test_case.test_command,
test_case.test_error_msg,
stdout=test_output_fd,
stderr=test_output_fd,
cwd=test_case.test_command_cwd)
# stop the thread to collect the memory info if started
if self.collect_mem_info:
self.collect_mem_thread.stop()
# run the post test tasks
test_case.run_post_test()
self.db_instance.delete_wal()
return ret_val
def run(self, test_suite):
""" Orchestrate the overall test execution """
if not isinstance(test_suite, List):
test_suite = [test_suite]
try:
self.run_pre_suite()
test_suite_ret_vals = self.run_test_suite(test_suite)
test_suite_result = self.determine_test_suite_result(test_suite_ret_vals)
except:
traceback.print_exc(file=sys.stdout)
test_suite_result = constants.ErrorCode.ERROR
finally:
# after the test suite finish, stop the database instance
self.db_instance.stop_db()
return self.handle_test_suite_result(test_suite_result)
def run_test_suite(self, test_suite):
""" Execute all the tests in the test suite """
test_suite_ret_vals = {}
for test_case in test_suite:
try:
# catch the exception from run_db(), stop_db(), and restart_db()
# in case the db is unable to start/stop/restart
if test_case.db_restart:
self.db_instance.restart_db(self.is_dry_run)
elif not self.db_instance.db_process:
self.db_instance.run_db(self.is_dry_run)
except:
traceback.print_exc(file=sys.stdout)
test_suite_ret_vals[test_case] = constants.ErrorCode.ERROR
# early termination in case of db is unable to start/stop/restart
break
if not self.is_dry_run:
try:
test_case_ret_val = self.run_test(test_case)
print_file(test_case.test_output_file)
test_suite_ret_vals[test_case] = test_case_ret_val
except:
print_file(test_case.test_output_file)
if not self.continue_on_error:
raise
else:
traceback.print_exc(file=sys.stdout)
test_suite_ret_vals[test_case] = constants.ErrorCode.ERROR
return test_suite_ret_vals
def determine_test_suite_result(self, test_suite_ret_vals):
"""
Based on all the test suite resultes this determines whether the test
suite was a success or error
"""
for test_case, test_result in test_suite_ret_vals.items():
if test_result is None or test_result != constants.ErrorCode.SUCCESS:
return constants.ErrorCode.ERROR
return constants.ErrorCode.SUCCESS
def handle_test_suite_result(self, test_suite_result):
"""
Determine what to do based on the result. If continue_on_error is
True then it will mask any errors and return success. Otherwise,
it will return the result of the test suite.
"""
if test_suite_result is None or test_suite_result != constants.ErrorCode.SUCCESS:
LOG.error("The test suite failed")
return test_suite_result
def print_db_logs(self):
"""
Print out the remaining DB logs
"""
LOG.info("************ DB Logs Start ************")
print_pipe(self.db_instance.db_process)
LOG.info("************* DB Logs End *************")
| 40 | 109 | 0.621933 |
79556a6651c8232513c2e305f9f84c214d4e0acc | 2,777 | py | Python | bots/PLManBot.py | wen96/pl-man2 | b861fd8adaf28b81747d09934c6437cbb259370b | [
"MIT"
] | null | null | null | bots/PLManBot.py | wen96/pl-man2 | b861fd8adaf28b81747d09934c6437cbb259370b | [
"MIT"
] | null | null | null | bots/PLManBot.py | wen96/pl-man2 | b861fd8adaf28b81747d09934c6437cbb259370b | [
"MIT"
] | null | null | null | #!/usr/bin/python2.7
import sys
import json
def say(txt):
sys.stderr.write('PLBot >> '+txt)
sys.stderr.flush()
class Entity:
class Types():
generator = 6
tree = 1
dot = 7
enemy = 3
empty = 0
fireDoor = 4
typeEntity = "None"
alive = False
def __init__(self, JSONObject):
self.typeEntity = JSONObject["typeEntity"]
self.alive = JSONObject["alive"]
if (self.typeEntity == self.Types.generator):
self.numbers = JSONObject["attributes"][0]["value"].split(';')
self.numbers = map(float, self.numbers)
def isEnemy(self):
return self.typeEntity == 3
def isTree(self):
return self.typeEntity == 1
def isDot(self):
return self.typeEntity == 7
def isEmpty(self):
return self.typeEntity == 0
def isFireDoorGenerator(self):
return self.typeEntity == self.Types.generator
def isFireDoor(self):
return self.typeEntity == 4
def isExit(self):
return self.typeEntity == 5
class GameData:
value = ""
def __init__(self, JSONObject):
self.value = JSONObject["value"]
class GameStatus:
value = "continue"
def __init__(self, JSONObject=None):
if (JSONObject):
self.value = JSONObject["value"]
def finished(self):
return self.value != "continue"
def hasWin(self):
return self.value == "win"
def hasLose(self):
return self.value == "lose"
class PLManParser:
@classmethod
def parseCommand(cls, command):
try:
objectLoad = json.loads(command)
except ValueError:
return GameStatus()
if objectLoad["type"] == "visual_entity":
return Entity(objectLoad)
elif objectLoad["type"] == "custom_data":
if objectLoad["key"] == "status":
return GameStatus(objectLoad)
else:
return GameData(objectLoad)
else:
return objectLoad
class PLMan:
@classmethod
def command(cls, cm):
sys.stdout.write(cm+'\n')
sys.stdout.flush()
pipeBlocked = True
res = "\n"
while(pipeBlocked or res == "\n"):
try:
res = sys.stdin.readline()
sys.stdin.flush()
pipeBlocked = False
except IOError:
pass
return PLManParser.parseCommand(res[:(len(res) - 1)])
@classmethod
def see(cls, direction):
return cls.command('see '+direction)
@classmethod
def move(cls, direction):
return cls.command('move '+direction)
@classmethod
def use(cls, direction):
return cls.command('use '+direction)
| 22.395161 | 74 | 0.562838 |
79556b1cdea602a696524f2db2dcbb15ecb6bf3a | 1,999 | py | Python | examples/pyelement/spring_mass_damper.py | peekwez/tacs | ac80d859b1cd8befb322efd338bf6c2ec1aefa03 | [
"Apache-2.0"
] | 1 | 2021-02-17T11:56:46.000Z | 2021-02-17T11:56:46.000Z | examples/pyelement/spring_mass_damper.py | peekwez/tacs | ac80d859b1cd8befb322efd338bf6c2ec1aefa03 | [
"Apache-2.0"
] | 2 | 2020-06-29T12:42:37.000Z | 2020-10-14T18:03:37.000Z | examples/pyelement/spring_mass_damper.py | peekwez/tacs | ac80d859b1cd8befb322efd338bf6c2ec1aefa03 | [
"Apache-2.0"
] | 5 | 2020-04-27T20:04:34.000Z | 2021-07-15T13:04:15.000Z | from mpi4py import MPI
from tacs import TACS, elements
import numpy as np
# Define an element in TACS using the pyElement feature
class SpringMassDamper(elements.pyElement):
def __init__(self, num_nodes, num_disps, m, c, k):
super(SpringMassDamper, self).__init__(num_disps, num_nodes)
self.m = m
self.c = c
self.k = k
def getInitConditions(self, v, dv, ddv, xpts):
'''Define the initial conditions'''
v[0] = -0.5
dv[0] = 1.0
return
def addResidual(self, time, res, X, v, dv, ddv):
'''Add the residual of the governing equations'''
res[0] += self.m*ddv[0] + self.c*dv[0] + self.k*v[0]
return
def addJacobian(self, time, J, alpha, beta, gamma, X, v, dv, ddv):
'''Add the Jacobian of the governing equations'''
J[0] += alpha*self.k + beta*self.c + gamma*self.m
return
if __name__ == '__main__':
# Create instance of user-defined element
num_nodes = 1
num_disps = 1
m = 1.0
c = 0.5
k = 5.0+1j*1e-30
spr = SpringMassDamper(num_nodes, num_disps, m, c, k)
# Add user-defined element to TACS
comm = MPI.COMM_WORLD
assembler = TACS.Assembler.create(comm, 1, 1, 1)
conn = np.array([0], dtype=np.intc)
ptr = np.array([0, 1], dtype=np.intc)
assembler.setElementConnectivity(conn, ptr)
assembler.setElements([spr])
assembler.initialize()
# Create instance of integrator
t0 = 0.0
dt = 0.01
num_steps = 1000
tf = num_steps*dt
order = 2
bdf = TACS.BDFIntegrator(assembler, t0, tf, num_steps, order)
# Integrate governing equations
#bdf.integrate()
bdf.iterate(0)
for step in range(1,num_steps+1):
bdf.iterate(step)
_, uvec, _, _ = bdf.getStates(num_steps)
u = uvec.getArray()
print "f = ", u
print "df/dx, approx = ", u.imag/1e-30
# Write out solution
bdf.writeRawSolution('spring.dat', 0)
| 27.383562 | 70 | 0.598299 |
79556b66e33f3a52b915b4d27f02f9c6645eef7c | 459 | py | Python | setup.py | olescheller/slpp-23 | 0a18a4abfa67c2a7b1a1e00768f383525af01195 | [
"MIT"
] | null | null | null | setup.py | olescheller/slpp-23 | 0a18a4abfa67c2a7b1a1e00768f383525af01195 | [
"MIT"
] | null | null | null | setup.py | olescheller/slpp-23 | 0a18a4abfa67c2a7b1a1e00768f383525af01195 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from setuptools import setup
setup(
name='SLPP-23',
description='SLPP-23 is a simple lua-python data structures parser',
version='1.2',
author='Ilya Skriblovsky',
author_email='ilyaskriblovsky@gmail.com',
url='https://github.com/IlyaSkriblovsky/slpp-23',
license='https://github.com/IlyaSkriblovsky/slpp-23/blob/master/LICENSE',
keywords=['lua'],
py_modules=['slpp'],
install_requires=['six'],
)
| 28.6875 | 77 | 0.688453 |
79556bd114a808b5a88cab0d97cf9aa2e7ebfbfa | 3,736 | py | Python | ExplorationModule/ICM.py | ariel415el/RL-Exploration | 87e5bb77862cf7545634e89bde99b9e953ebb8cf | [
"Apache-2.0"
] | null | null | null | ExplorationModule/ICM.py | ariel415el/RL-Exploration | 87e5bb77862cf7545634e89bde99b9e953ebb8cf | [
"Apache-2.0"
] | null | null | null | ExplorationModule/ICM.py | ariel415el/RL-Exploration | 87e5bb77862cf7545634e89bde99b9e953ebb8cf | [
"Apache-2.0"
] | null | null | null | import torch
from torch import nn
class Forward_module(nn.Module):
def __init__(self, action_dim, hidden_dim, activation=nn.ReLU(inplace=True)):
super(Forward_module, self).__init__()
action_embedd_dim=128
self.action_encoder = nn.Embedding(action_dim, action_embedd_dim)
self.layers = nn.Sequential(nn.Linear(hidden_dim + action_embedd_dim, hidden_dim),
activation,
nn.Linear(hidden_dim, hidden_dim),
activation,
nn.Linear(hidden_dim, hidden_dim),
)
def forward(self, feature_maps, actions):
actions = self.action_encoder(actions)
ns_latent = torch.cat((feature_maps, actions), dim=-1)
ns_latent = self.layers(ns_latent)
return ns_latent
class Inverse_module(nn.Module):
def __init__(self, action_dim, hidden_dim, activation=nn.ReLU(inplace=True)):
super(Inverse_module, self).__init__()
self.layers = nn.Sequential(nn.Linear(2 * hidden_dim, hidden_dim),
activation,
nn.Linear(hidden_dim, hidden_dim),
activation,
nn.Linear(hidden_dim, action_dim)
) # softmax in cross entropy loss
def forward(self, s_featues, ns_featues):
input = torch.cat((s_featues, ns_featues), dim=-1)
return self.layers(input)
class ICM_module(nn.Module):
def __init__(self, state_encoder, action_dim, activation=nn.ReLU(inplace=True)):
super(ICM_module, self).__init__()
self.state_feature_extractor = state_encoder
self.inverse_module = Inverse_module(action_dim, state_encoder.features_space, activation)
self.forward_module = Forward_module(action_dim, state_encoder.features_space, activation)
class ICM(object):
def __init__(self, state_encoder, action_dim, lr=0.001, intrinsic_reward_scale=1.0, beta=0.2):
super(ICM, self).__init__()
self.action_dim = action_dim
self._beta = beta
self._intrinsic_reward_scale = intrinsic_reward_scale
self.cross_entropy_loss = nn.CrossEntropyLoss()
self.module = ICM_module(state_encoder, action_dim, activation=nn.ReLU())
self.curiosity_optimizer = torch.optim.Adam(self.module.parameters(), lr=lr)
def get_intrinsic_reward(self, states, next_states, actions):
s_featues = self.module.state_feature_extractor(states)
ns_featues = self.module.state_feature_extractor(next_states)
# inverse loss
estimated_actions = self.module.inverse_module(s_featues, ns_featues)
loss_i = self.cross_entropy_loss(estimated_actions, actions)
# Forward loss
estimated_ns_features = self.module.forward_module(s_featues, actions)
# features_dists = (0.5*(ns_featues - estimated_ns_features).pow(2)).sum(1)
features_dists = 0.5*(ns_featues - estimated_ns_features).norm(2, dim=-1).pow(2)
loss_f = features_dists.mean()
# Intrinsic reward
intrisic_rewards = self._intrinsic_reward_scale*features_dists
# Optimize
curiosity_loss = (1-self._beta)*loss_i + self._beta*loss_f
self.curiosity_optimizer.zero_grad()
curiosity_loss.backward()
self.curiosity_optimizer.step()
self.debug_loss = curiosity_loss.item()
return intrisic_rewards.detach().cpu().numpy()
def get_last_debug_loss(self):
return self.debug_loss | 45.012048 | 98 | 0.629283 |
79556c1fec789dea9365221f877d53295f05c3a8 | 5,155 | py | Python | Functions.py | shaofengzhu/codespacefunc | 96f2e9d65d4486fd87fde9aea411fd034fefd992 | [
"MIT"
] | null | null | null | Functions.py | shaofengzhu/codespacefunc | 96f2e9d65d4486fd87fde9aea411fd034fefd992 | [
"MIT"
] | null | null | null | Functions.py | shaofengzhu/codespacefunc | 96f2e9d65d4486fd87fde9aea411fd034fefd992 | [
"MIT"
] | null | null | null | from __future__ import annotations
import itertools
import pandas as pd
import base64
from urllib.parse import quote as urlquote
from urllib.request import urlopen
def myadd(x, y):
"""Basic addition test"""
return x + y
def badadd(x : float, y : float) -> float:
"""Another addition test"""
return x + y + 42
def get_table() -> NumberMatrix:
"""Ensure list-of-list can be returned"""
return [[1, 2], [3, 4]]
def get_table_2() -> pd.DataFrame:
"""Ensure DataFrame can be returned"""
return pd.DataFrame([[1, 2], [3, 4]])
# Constants should not be exported
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
# Underscore-prefixed names should not be exported
def _column_names():
yield from alphabet
for i in itertools.count(start=1):
yield from (f"{c}{i}" for c in alphabet)
def get_pandas(rows : int, columns : int) -> NumberMatrix:
"""Ensure pandas DataFrames can be returned
rows: Number of rows to generate
columns: Number of columns to generate
Returns: two-dimensional matrix of random numbers"""
import numpy as np
import pandas as pd
data = np.random.rand(rows, columns)
column_names = list(itertools.islice(_column_names(), columns))
df = pd.DataFrame(data, columns=column_names)
return df
def get_names(columns : int) -> StringMatrix:
"""Test returning a matrix of strings
columns: Number of columns worth of names to return"""
return [list(itertools.islice(_column_names(), columns))]
PNG = 'iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAYAAAAeP4ixAAAABGdBTUEAALGPC/xhBQAAAAlwSFlzAAAOwgAADsIBFShKgAAAABh0RVh0U29mdHdhcmUAcGFpbnQubmV0IDQuMS40E0BoxAAACAxJREFUaEPtmHtQlNcZxp1OOpN00pm20+lM/+s/6XQmnWkbbzXEgLcgkChRomKT1iC01VW8hBhFpeINDWJVQhSRYCSiKLesiRIRqjUmNQjKZVcgwCKBILAEBBaQ29PnnO8ssuwy4nph/+CdeWb55PvO9/72fd/nHBw3FmPxJGJD7mXK5GqaFGM0eR0tMz0Tlmf6CfXUpmumH2/Ku6yydhDiwdBrcDXNTSzH1MMl+E1kIf5wwICoy7fx9OY8k8raQbgQyIz4Uinx8xtJFQhMq8Ivt9/A7/cbMP4DI57dkj88iO+y0ELPJaubJ7z6lkvLc8maepGrSts+AgMDTf7+/pg+fbpLa/HixR0iV5W2fYyBPGGNgbiaxkBcTQ8M4j7fHRNiJoxIk7dMtnnZ49QDg3i85oEp66eMSG46N5uXPU6Nzcijkq+vL6KiopCVlYXCwkJUVlaiqqpqQOK6qKgI2dnZ2LNnD3x8fByuM6og0dHR6OjoQGd3L/IqzUj9pgoHs24iUl+ArSl52J1xHQc+L8TJL8uQX9GA3r5+mM1mhISE2K01aiDh4eEQcfLrKszfdxGzd2fBM+ILvLLjHGZu+xwzwvWY9q9P4bE5DS9vTMHUDafgt/sMbpga0N3dDZ1OZ7Pegw/7XA/pRg+rYkMxrpQ1wDsyG2/G/BfRmUYs3JejIM4oiHS4b0rF1NBTeGn9Sby4LgkzNibj+6Y2GI3GhwNxn+eOiXsnPrQsFgsOZZfCi5W4XtUkq5Nb3iAhpm8hRJgV4rSCOIEpIccxee0x7E3/Rt7v5+fnPMijkohd+iLMjjiPi4Y6eZ1VWOMAIhluCuLP7yRi0pqjWHM4S94fEBAwsN6oguxIL4Dnzkx4R2RiZfyX8NzGlgrLIATnQkBsIMR7hHjXCvExJq5KwKpD5+XzLgOyPe0GZm0/i5lbP2MlxHATQg23FeLFd5MkxGQFMSE4HsEHM+XzTxwkODhY7gP19fXo7e2VSYiwdPWg2XIXze1CXTY6klWkQYR8wrkgxGoN4oUVcVj54Tm1ghbCxbh2/6VLlxp5+TOVum0MBZnmPQ1uy91GrONpx9HT24cz+TVYnnAVc/Zk39dm3d7THEqDOIZJq49KiPErjuBPy2OxIuYsv4UrwO13NNWvB5o+AHoa0NfXV+IQZiiIu587xseOH5EiLkegv59DfcYgbVY4lC0EHWoYm51iA/GRBqE7jD8uOzQEZC1QtwqoXcFPAvVZ+M7+OJX+vbBrrZnawfF+8lrghdb2VvzHeBs+kTkSQjiUgJglILY6stlkG5sVDjVxFSFWHsELCsIGpG4N8H0wIXRAzT+B74KAlgwB0kP9SiFo4eyMhIWFyf5ddyKPEBc0CDqUGO6hENKhhM3aOZSA4FwMghBaG/sF0H6RECsJsZwA/wCqlwK3lvCaVWEQJEAhaOEsSHJyMro5G2ImZu86L23WxqFGYLMaRJyci8EgidkFQPMnrMIyrQrVAYT4G1D1JlDpz/ZqEyCHFYIWzoLk5+ejvL6VEJyLoRCDbFYbbsc2KxxqKIT/zhR0d9GcatlS1YEEeJsAfwVMfyHEQqDCD+goEEOfpxC0cBbEZDLJE63nTutwf6YcKsOhQ/mEp2LR7k+xcFc6FuxMg9+OFMzfdgrztiZLLSLAgYyr6Oz8gS61XbWSqMJbhGAVKhcA5fOBb32BOxdERRoVghbOgtTV1eFKab0Dm03XIOhQbnSoLSe+4iHwDjeDGuBuuQN9S5UBnUVMUM9KcMCrB1dhEavwBiFeJ8QcoMwH+CFdgPQqBC2cBRF/EBXeatIcymqzAw6l2eyHZ29oyTZuu7cnDEjY6mrHAy2rsFi1kqpC2WtAqTdQ4knnyhSt1aEQtHAWJDc3Fw13Ooe1Wb9devR0twANm+0hpK2qvWG4gZZVmEeIuYR4lRBehJgFGHn8sVwXILbHFWdBkpKSpA2+/n6mcihbm92vz6OF0kbtIAZVoUZUYZiBtrZSKVtJVOHmTEJMo2bQtbpEa51UCFo4CxIaGipBdqReUw5la7OJOQa2AC10AGLQDi2r8Hf7ga4YNNCylUQVXiEEkzd6AMVTef8G+V6CBCkELZwF8fb2RltbG4pumVkNe5tNzCkmyDFVBQc79P0GumQ2AVQrGdwJ8RLlxipfExBd1C8UghYChOpdunRpx4NKr9fLo24cT7LWM5T1IJiYQxdq+dj5gZZVYCsZWAUBIFQbYa3GIZX+vSBEZFBQUIIz0ul0Ca2trbQmIL+iXs5FZOpVvJ/yP1wtqaVNfqSq4GCga9ki5ng6WhwNIZZ7x0EqhhWMJvx+ah+/gL1UlPbZov2xRYhyu2o8iuDaT3PhCKpZvmlwNB0ZfqBrNvL3JwhD0zBzlhoTKVaw4ehA0oOD64uD4mnq1+rVjyf4rqf4kueo54Xk2838pp0a6BD5ONeZo9b6HT9/ql71ZENm0shWGclAV7Dlmnl0N3GWxCzcA3leLTd6wSS6YGb/j2SgW7+SieNutQZSzbZjcI3fquVGL5hEAdq/VlUYZoe22mrdAZE2W/G0BtKUIiCExT6jlhu9YBLsE0YjB97RDm14WUvaKgP/zVqN/h4BEq+WGt0gwo+YDL9ihoXHle9otzc51NaBtoHgtYk23XJB3s5z1HV+OP7fktEIJiOcbB2lWXNfJ3fmfA72ObZPqjbg7YTss8hf875OQvybPz6rlnCtEIkxSX8qliqgzExY7AtmqphKoGhd+Ll6ZAQxbtz/Abfak9NtFSYAAAAAAElFTkSuQmCC'
def get_image():
"""Return an image"""
return {"data": PNG, "mimeType": "image/png"}
def countrydata(country: str) -> AnyMatrix:
#country = urlquote(country)
data = pd.read_csv(f"https://e2efunc.azurewebsites.net/api/covidata?country={country}")
data.fillna(0.0, inplace=True)
return data
# return data.tail(20)
def countryplot(country: str):
#country = urlquote(country)
with urlopen(f"https://e2efunc.azurewebsites.net/api/covidata?country={country}&output=plot") as u:
return {
"data": base64.b64encode(u.read()).decode("ascii"),
"mimeType": "image/png"
}
| 62.108434 | 2,929 | 0.82968 |
79556c99f1f10056f2785fc7d534d20afde7c17c | 9,528 | py | Python | bin/EvaluatorDirectVASP.py | quanshengwu/PyChemia | 98e9f7a1118b694dbda3ee75411ff8f8d7b9688b | [
"MIT"
] | 1 | 2021-03-26T12:34:45.000Z | 2021-03-26T12:34:45.000Z | bin/EvaluatorDirectVASP.py | quanshengwu/PyChemia | 98e9f7a1118b694dbda3ee75411ff8f8d7b9688b | [
"MIT"
] | null | null | null | bin/EvaluatorDirectVASP.py | quanshengwu/PyChemia | 98e9f7a1118b694dbda3ee75411ff8f8d7b9688b | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import argparse
import logging
import os
import numpy as np
from pychemia import pcm_log
from pychemia.analysis import StructureAnalysis
from pychemia.code.vasp.task import IonRelaxation
from pychemia.db import get_database
from pychemia.evaluator import DirectEvaluator
from pychemia.utils.serializer import generic_serializer
from pychemia.utils.periodic import atomic_number
def worker(db_settings, entry_id, workdir, target_forces, relaxator_params):
pcdb = get_database(db_settings)
pcm_log.info('[%s]: Starting relaxation. Target forces: %7.3e' % (str(entry_id), target_forces))
if pcdb.is_locked(entry_id):
return
else:
pcdb.lock(entry_id)
structure = pcdb.get_structure(entry_id)
structure = structure.scale()
print('relaxator_params', relaxator_params)
relaxer = IonRelaxation(structure, workdir=workdir, target_forces=target_forces, waiting=False,
binary=relaxator_params['binary'], encut=1.3, kp_grid=None, kp_density=1E4,
relax_cell=True, max_calls=10)
print('relaxing on:', relaxer.workdir)
relaxer.run(relaxator_params['nmpiparal'])
pcm_log.info('[%s]: Finished relaxation. Target forces: %7.3e' % (str(entry_id), target_forces))
filename = workdir + os.sep + 'OUTCAR'
if os.path.isfile(filename):
forces, stress, total_energy = relaxer.get_forces_stress_energy()
if forces is not None:
magnitude_forces = np.apply_along_axis(np.linalg.norm, 1, forces)
print('Forces: Max: %9.3e Avg: %9.3e' % (np.max(magnitude_forces), np.average(magnitude_forces)))
print('Stress: ', np.max(np.abs(stress.flatten())))
if forces is None:
pcm_log.error('No forces found on %s' % filename)
if stress is None:
pcm_log.error('No stress found on %s' % filename)
if total_energy is None:
pcm_log.error('No total_energy found on %s' % filename)
new_structure = relaxer.get_final_geometry()
if forces is not None and stress is not None and total_energy is not None and new_structure is not None:
pcm_log.info('[%s]: Updating properties' % str(entry_id))
pcdb.update(entry_id, structure=new_structure)
te = total_energy
pcdb.entries.update({'_id': entry_id},
{'$set': {'status.relaxation': 'succeed',
'status.target_forces': target_forces,
'properties.forces': generic_serializer(forces),
'properties.stress': generic_serializer(stress),
'properties.energy': te,
'properties.energy_pa': te / new_structure.natom,
'properties.energy_pf': te / new_structure.get_composition().gcd}})
# Fingerprint
# Update the fingerprints only if the two structures are really different
diffnatom = structure.natom != new_structure.natom
diffcell = np.max(np.abs((structure.cell - new_structure.cell).flatten()))
diffreduced = np.max(np.abs((structure.reduced - new_structure.reduced).flatten()))
if diffnatom != 0 or diffcell > 1E-7 or diffreduced > 1E-7:
analysis = StructureAnalysis(new_structure, radius=50)
x, ys = analysis.fp_oganov(delta=0.01, sigma=0.01)
fingerprint = {'_id': entry_id}
for k in ys:
atomic_number1 = atomic_number(new_structure.species[k[0]])
atomic_number2 = atomic_number(new_structure.species[k[1]])
pair = '%06d' % min(atomic_number1 * 1000 + atomic_number2,
atomic_number2 * 1000 + atomic_number1)
fingerprint[pair] = list(ys[k])
if pcdb.db.fingerprints.find_one({'_id': entry_id}) is None:
pcdb.db.fingerprints.insert(fingerprint)
else:
pcdb.db.fingerprints.update({'_id': entry_id}, fingerprint)
else:
pcm_log.debug('Original and new structures are very similar.')
pcm_log.debug('Max diff cell: %10.3e' % np.max(np.absolute((structure.cell -
new_structure.cell).flatten())))
if structure.natom == new_structure.natom:
pcm_log.debug('Max diff reduced coordinates: %10.3e' %
np.max(np.absolute((structure.reduced - new_structure.reduced).flatten())))
else:
pcdb.entries.update({'_id': entry_id}, {'$set': {'status.relaxation': 'failed'}})
pcm_log.error('Bad data after relaxation. Tagging relaxation as failed')
else:
pcm_log.error('ERROR: File not found %s' % filename)
pcm_log.info('[%s]: Unlocking the entry' % str(entry_id))
pcdb.unlock(entry_id)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('pychemia')
logger.addHandler(logging.NullHandler())
logger.setLevel(logging.DEBUG)
description = """Launch VASP for non-evaluated entries in a PyChemia Database"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-t', '--host',
default='localhost', metavar='server', type=str,
help='Hostname or address (default: localhost)')
parser.add_argument('-o', '--port',
default=27017, metavar='port', type=int,
help='MongoDB port (default: 27017)')
parser.add_argument('-u', '--user',
default=None, metavar='username', type=str,
help='Username (default: None)')
parser.add_argument('-p', '--passwd',
default=None, metavar='password', type=str,
help='Password (default: None)')
parser.add_argument('-d', '--dbname',
default=None, metavar='dbname', type=str,
help='PyChemia Database name (default: None)')
parser.add_argument('-b', '--binary',
default=None, metavar='path', type=str,
help='VASP binary (default: None)')
parser.add_argument('-f', '--target_forces',
default=1E-3, metavar='x', type=float,
help='Target Forces (default: 1E-3)')
parser.add_argument('-n', '--nparal',
default=1, metavar='N', type=int,
help='Number of parallel processes (default: 1)')
parser.add_argument('-m', '--nmpiparal',
default=1, metavar='N', type=int,
help='Number of MPI parallel processes (default: 1)')
parser.add_argument('-r', '--replicaset',
default=None, metavar='name', type=str,
help='ReplicaSet (default: None)')
parser.add_argument('-w', '--workdir',
default='.', metavar='path', type=str,
help='Working Directory (default: None)')
parser.add_argument('--evaluate_all', action='store_true',
help='Evaluate All (default: No)')
parser.add_argument('--waiting', action='store_true',
help='Waiting (default: No)')
parser.add_argument('--ssl', action='store_true',
help='Use SSL to connect to MongoDB (default: No)')
args = parser.parse_args()
if args.dbname is None:
parser.print_help()
exit(1)
print(args)
db_settings = {'name': args.dbname, 'host': args.host, 'port': args.port, 'ssl': args.ssl,
'replicaset': args.replicaset}
if args.user is not None:
if args.passwd is None:
raise ValueError('Password is mandatory if user is entered')
db_settings['user'] = args.user
db_settings['passwd'] = args.passwd
if args.binary is None:
args.binary = 'vasp'
if args.nmpiparal is None:
args.nmpiparal = 1
relaxator_params = {'binary': args.binary, 'nmpiparal': args.nmpiparal}
print('pyChemia Evaluator using VASP')
print('dbname : %s' % args.dbname)
print('host : %s' % args.host)
print('port : %d' % args.port)
print('user : %s' % args.user)
print('replicaset: %s' % args.replicaset)
print('workdir : %s' % args.workdir)
print('nparal : %d' % args.nparal)
print('nmpiparal : %d' % args.nmpiparal)
print('binary : %s' % str(args.binary))
print('target-forces : %.2E' % args.target_forces)
print('evaluate_all : %s' % str(args.evaluate_all))
print('waiting : %s' % str(args.waiting))
print('ssl : %s' % str(args.ssl))
print(db_settings)
evaluator = DirectEvaluator(db_settings, args.workdir,
target_forces=args.target_forces,
nparal=args.nparal,
relaxator_params=relaxator_params,
worker=worker,
evaluate_all=args.evaluate_all,
waiting=args.waiting)
evaluator.run()
| 48.365482 | 112 | 0.570109 |
79556cc295505bf3b15b86c1962f8830a06cdc1f | 179 | py | Python | neuralcorefres/model/__init__.py | RyanElliott10/NeuralCorefRes | a0ca5c614cc1638ab7bd230fcfefbd26120ed800 | [
"MIT"
] | 2 | 2020-02-23T01:00:22.000Z | 2020-06-17T21:39:57.000Z | neuralcorefres/model/__init__.py | RyanElliott10/NeuralCorefRes | a0ca5c614cc1638ab7bd230fcfefbd26120ed800 | [
"MIT"
] | 9 | 2020-02-27T01:08:55.000Z | 2022-03-12T00:16:12.000Z | neuralcorefres/model/__init__.py | RyanElliott10/NeuralCorefRes | a0ca5c614cc1638ab7bd230fcfefbd26120ed800 | [
"MIT"
] | null | null | null | from neuralcorefres.model.cluster_network import *
from neuralcorefres.model.word_embedding import *
__all__ = [
"ClusterNetwork",
"WordEmbedding",
"EMBEDDING_DIM"
]
| 19.888889 | 50 | 0.748603 |
79556d0d005637542fca359d73602170662e30bd | 3,937 | py | Python | samples/report_test_sam.py | ivmfnal/metacat | 9eeea5f5e7867395c7203b8a7789607d3bdbd903 | [
"BSD-3-Clause"
] | 1 | 2022-03-18T19:01:29.000Z | 2022-03-18T19:01:29.000Z | samples/report_test_sam.py | ivmfnal/metacat | 9eeea5f5e7867395c7203b8a7789607d3bdbd903 | [
"BSD-3-Clause"
] | null | null | null | samples/report_test_sam.py | ivmfnal/metacat | 9eeea5f5e7867395c7203b8a7789607d3bdbd903 | [
"BSD-3-Clause"
] | null | null | null | import os,time,sys,datetime, glob, fnmatch,string,subprocess, json
import samweb_client
samweb = samweb_client.SAMWebClient(experiment='dune')
month = sys.argv[1]
first = sys.argv[1]
last = sys.argv[2]
out = open("report_"+first+"_"+last+".tex",'w')
top = "\\documentclass[10pt]{article}\n"
top += "\\setlength{\\textwidth}{6.5in}\n"
top += "\\setlength{\\oddsidemargin}{0.00in}"
top += "\\begin{document} \n"
begin = "\\begin{table}\n\\begin{tabular}{rrrrrrrr}\n"
out.write(top)
out.write(begin)
header = "Expt& version& tier& stream&files& events& size, TB& event size, MB\\\\\n"
out.write(header)
for expt in ["protodune-sp","protodune-dp"]:
for stream in ["physics","cosmics","test","commissioning","ALL"]:
for tier in ["raw","full-reconstructed","pandora_info","hit-reconstructed","ALL"]:
#for expt in ["protodune-dp"]:
# for stream in ["cosmics","ALL"]:
# for tier in ["full-reconstructed"]:
for version in ["v08ALL","raw"]:
if (tier == "raw" and version != "raw") or (tier != "raw" and version == "raw" ):
continue
command = ""
command += "data_tier "+tier
command += " and data_stream " + stream
command += " and run_type " + expt
command += " and file_type detector "
command += " and create_date >= " + first
command += " and create_date <= " + last
if tier != "raw":
command += " and version " + version
command = command.replace("ALL","%")
print (command)
result = samweb.listFilesSummary(command)
#print result
file_count = result["file_count"]
if file_count == 0:
continue
events = result["total_event_count"]
ssize = result["total_file_size"]/1000/1000/1000/1000.
fsize = ssize/events*1000*1000
#print (expt,version,tier,stream,file_count,events,ssize," TB",fsize," MB")
data = "%s& %s& %s& %s& %s& %d& %10.1f& %10.1f\\\\\n"%(expt,version,tier,stream,file_count,events,ssize,fsize)
data = data.replace("_","$\_$")
data = data.replace("%","ALL")
print (data)
out.write(data)
end = "\\end{tabular}\n"
out.write(end)
end = "\\caption{Summary of data %s to %s}\n\\end{table}\n"%(first,last)
out.write(end)
#out.close()
#out = open("report_"+first+"_"+last+".tex",'w')
top = "\\begin{table}\n\\begin{tabular}{rrrrrrrr}\n"
out.write(top)
header = "Expt&type&version&tier&files&events&size(TB)&size(MB)\\\\\n"
out.write(header)
for expt in ["protodune-sp","protodune-dp","fardetALL","neardetALL","ALL"]:
for version in ["v07ALL","v08ALL","raw","ALL"]:
for tier in ["simulated","detector-simulated","full-reconstructed","pandora_info","ALL",]:
command = "version " + version
command += " and run_type "+expt
command += " and file_type mc "
command += " and data_tier "+tier
#command += " and data_stream " + stream
#command += " and run_type " + expt
command += " and create_date >= " + first
command += " and create_date <= " + last
#print "%"+command
command = command.replace("ALL","%")
print (command)
result = samweb.listFilesSummary(command)
print (result)
file_count = result["file_count"]
if file_count == 0:
continue
events = result["total_event_count"]
ssize = result["total_file_size"]/1000/1000/1000/1000.
if events == None:
events = 0
fsize = 0.0
else:
fsize = ssize/events*1000*1000
print (expt,version,tier,events,ssize," TB")
data = "%s & mc & %s& %s &%s &%d &%10.1f&%10.1f\\\\\n"%(expt,version,tier,file_count,events,ssize,fsize)
data = data.replace("_","$\_$")
out.write(data)
end = "\\end{tabular}\n"
out.write(end)
end = "\\caption{Summary of mc production %s to %s}\n\\end{table}"%(first,last)
out.write(end)
out.write("\\end{document}\n")
out.close()
| 36.453704 | 118 | 0.597155 |
79556d2c61c7934694e98502f96f9afcd81b4ebc | 2,170 | py | Python | basic_skills/models_ex/models_django_extensions.py | bluebamus/django_miscellaneous_book | 22e0851b3a07aeef94bb723b334f036ed5c17f72 | [
"MIT"
] | null | null | null | basic_skills/models_ex/models_django_extensions.py | bluebamus/django_miscellaneous_book | 22e0851b3a07aeef94bb723b334f036ed5c17f72 | [
"MIT"
] | null | null | null | basic_skills/models_ex/models_django_extensions.py | bluebamus/django_miscellaneous_book | 22e0851b3a07aeef94bb723b334f036ed5c17f72 | [
"MIT"
] | null | null | null | # 공식 문서 : https://django-extensions.readthedocs.io/en/latest/
# 가장 많이 사용되는 명령어들
# @ shell_plus
# Django shell with autoloading of the apps database models and subclasses of user-defined classes.
# tab 키를 이용한 자동 완성
# 실행
# python manage.py shell_plus --ipython
# @ create_command
# Creates a command extension directory structure within the specified application.
# This makes it easy to get started with adding a command extension to your application.
# 해당 기능은 따로 살표볼 필요가 있음
# @ graph_models
# Creates a GraphViz dot file. You need to send this output to a file yourself.
# Great for graphing your models.
# Pass multiple application names to combine all the models into a single dot file.
# 진행 중이던 django 프로젝트에 참여하게 되었을 때,
# 그 프로젝트를 가장 빨리 이해하는 방법은 models.py를 파악하는 것입니다.
# 대부분의 로직들이 models에서 관리되기 때문입니다.
# 코드를 단순히 읽는 것보다도 graph로 각 모델 간의 관계를 그림으로 표현해 줍니다.
# 전체 혹은 선택적으로 그릴 수 있음
# 참고 : https://blog.isaccchoi.com/programing/Django-ERD-%EB%A7%8C%EB%93%A4%EA%B8%B0/
# 실행
# $ ./manage.py graph_models -a > my_project.dot # 문제 없음
# graphviz를 이용해 더 좋은 이미지 생성
# 전체 모델에 대한 그래프 출력 $ python manage.py graph_models -a -g -o models.png
# 특정 앱에 대한 그래프 출력 $ python manage.py graph_models board -o models.png
# python manage.py graph_models your_app your_model -o /tmp/models.png
# or
# python manage.py graph_models -a -g -o my_project_visualized.png
# 위 두 방식 다 에러가 남, 윈도우에서의 문제
# 해결책 pip install --global-option = build_ext --global-option = "-IC : \ Program Files (x86) \ Graphviz2.38 \ include"--global-option = "-LC : \ Program Files (x86) \ Graphviz2 .38 \ lib \ release \ lib "pygraphviz
# 하지만 해결 안됨, 똑같이 적어서는 안되고 내 시스템의 경로와 비교하여 수정해야함
# 리눅스에서 차후에 시도해 보기로함
# 참고 https://pythonq.com/so/python/1769802
# @ admin_generator app : app의 관리자 화면을 생성해 줍니다.
# @ clean_pyc : *.pyc 파일을 모두 지워 줍니다. settings.py에 BASE_DIR을 설정해야 합니다.
# 세부 명령어 참고 : https://c10106.tistory.com/4066
# @ notes : 파이썬 파일에 적어둔 # TODO: 주석들을 찾아 표시합니다.
# @ pipchecker : 사용 중인 패키지의 업데이트 현황을 알려줍니다.
# @ runserver_plus : 기본 웹 서버보다 향상된 웹 서버를 띄웁니다.
# 서버 오류가 발생하면 웹 상에서 바로 디버깅할 수 있습니다. Werkzeug를 설치해야 합니다.
# @ shell_plus : INSTALLED_APP에 설치된 앱의 모델들이 import된 셸을 띄웁니다
# (ipython과 함께 사용하면 더 편합니다). | 31.449275 | 214 | 0.715207 |
79556dc5de9be8a71f4008a1fb2013a512c3845a | 9,697 | py | Python | pygoogle.py | bharadwaj6/slack-google | 4b5e9d97487fe74d9ad609e45aa9aecce36c0332 | [
"MIT"
] | 5 | 2015-05-19T22:56:28.000Z | 2021-08-23T14:39:59.000Z | pygoogle.py | bharadwaj6/slack-google | 4b5e9d97487fe74d9ad609e45aa9aecce36c0332 | [
"MIT"
] | 2 | 2015-05-19T22:57:28.000Z | 2019-03-20T17:39:01.000Z | pygoogle.py | bharadwaj6/slack-google | 4b5e9d97487fe74d9ad609e45aa9aecce36c0332 | [
"MIT"
] | 1 | 2017-08-20T07:52:24.000Z | 2017-08-20T07:52:24.000Z | #!/usr/bin/python
"""
Google AJAX Search Module
http://code.google.com/apis/ajaxsearch/documentation/reference.html
Needs Python 2.6 or later
"""
try:
import json
except ImportError, e:
import simplejson as json
except ImportError, e:
print e
exit()
import sys
import urllib
import logging
import argparse
__author__ = "Kiran Bandla"
__version__ = "0.2"
URL = 'http://ajax.googleapis.com/ajax/services/search/web?'
# Web Search Specific Arguments
# http://code.google.com/apis/ajaxsearch/documentation/reference.html#_fonje_web
# SAFE,FILTER
"""
SAFE
This optional argument supplies the search safety level which may be one of:
* safe=active - enables the highest level of safe search filtering
* safe=moderate - enables moderate safe search filtering (default)
* safe=off - disables safe search filtering
"""
SAFE_ACTIVE = "active"
SAFE_MODERATE = "moderate"
SAFE_OFF = "off"
"""
FILTER
This optional argument controls turning on or off the duplicate content filter:
* filter=0 - Turns off the duplicate content filter
* filter=1 - Turns on the duplicate content filter (default)
"""
FILTER_OFF = 0
FILTER_ON = 1
# Standard URL Arguments
# http://code.google.com/apis/ajaxsearch/documentation/reference.html#_fonje_args
"""
RSZ
This optional argument supplies the number of results that the application would like to recieve.
A value of small indicates a small result set size or 4 results.
A value of large indicates a large result set or 8 results. If this argument is not supplied, a value of small is assumed.
"""
RSZ_SMALL = "small"
RSZ_LARGE = "large"
"""
HL
This optional argument supplies the host language of the application making the request.
If this argument is not present then the system will choose a value based on the value of the Accept-Language http header.
If this header is not present, a value of en is assumed.
"""
class PyGoogle:
def __init__(self, query, pages=10, hl='en', log_level=logging.INFO):
self.pages = pages # Number of pages. default 10
self.query = query
self.filter = FILTER_ON # Controls turning on or off the duplicate content filter. On = 1.
self.rsz = RSZ_LARGE # Results per page. small = 4 /large = 8
self.safe = SAFE_OFF # SafeBrowsing - active/moderate/off
self.hl = hl # Defaults to English (en)
self.__setup_logging(level=log_level)
def __setup_logging(self, level):
logger = logging.getLogger('pygoogle')
logger.setLevel(level)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(module)s %(levelname)s %(funcName)s| %(message)s'))
logger.addHandler(handler)
self.logger = logger
def __search__(self, print_results=False):
'''
returns list of results if successful or False otherwise
'''
results = []
for page in range(0, self.pages):
rsz = 8
if self.rsz == RSZ_SMALL:
rsz = 4
args = {'q': self.query,
'v': '1.0',
'start': page * rsz,
'rsz': self.rsz,
'safe': self.safe,
'filter': self.filter,
'hl': self.hl
}
self.logger.debug('search: "%s" page# : %s' % (self.query, page))
q = urllib.urlencode(args)
search_results = urllib.urlopen(URL + q)
data = json.loads(search_results.read())
if 'responseStatus' not in data:
self.logger.error('response does not have a responseStatus key')
continue
if data.get('responseStatus') != 200:
self.logger.debug('responseStatus is not 200')
self.logger.error('responseDetails : %s' % (data.get('responseDetails', None)))
continue
if print_results:
if 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result:
print '[%s]' % (urllib.unquote(result['titleNoFormatting']))
print result['content'].strip("<b>...</b>").replace("<b>",
'').replace("</b>",
'').replace("'",
"'").strip()
print urllib.unquote(result['unescapedUrl']) + '\n'
else:
# no responseData key was found in 'data'
self.logger.error('no responseData key found in response. very unusal')
results.append(data)
return results
def search(self):
"""Returns a dict of Title/URLs"""
results = {}
search_results = self.__search__()
if not search_results:
self.logger.info('No results returned')
return results
for data in search_results:
if 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result and 'titleNoFormatting' in result:
title = urllib.unquote(result['titleNoFormatting'])
results[title] = urllib.unquote(result['unescapedUrl'])
else:
self.logger.error('no responseData key found in response')
self.logger.error(data)
return results
def search_page_wise(self):
"""Returns a dict of page-wise urls"""
results = {}
for page in range(0, self.pages):
args = {'q': self.query,
'v': '1.0',
'start': page,
'rsz': RSZ_LARGE,
'safe': SAFE_OFF,
'filter': FILTER_ON,
}
q = urllib.urlencode(args)
search_results = urllib.urlopen(URL + q)
data = json.loads(search_results.read())
urls = []
if 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result and 'unescapedUrl' in result:
url = urllib.unquote(result['unescapedUrl'])
urls.append(url)
else:
self.logger.error('no responseData key found in response')
results[page] = urls
return results
def get_result_urls(self):
"""Returns list of result URLs"""
results = []
search_results = self.__search__()
if not search_results:
self.logger.info('No results returned')
return results
for data in search_results:
if data and 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result:
results.append(urllib.unquote(result['unescapedUrl']))
return results
def get_result_titles(self):
"""Returns list of result titles"""
results = []
search_results = self.__search__()
if not search_results:
self.logger.info('No results returned')
return results
for data in search_results:
if data and 'responseData' in data and 'results' in data['responseData']:
for result in data['responseData']['results']:
if result:
results.append(urllib.unquote(result['titleNoFormatting']))
return results
def get_result_count(self):
"""Returns the number of results"""
temp = self.pages
self.pages = 1
result_count = 0
search_results = self.__search__()
if not search_results:
return 0
try:
result_count = search_results[0]
if not isinstance(result_count, dict):
return 0
result_count = result_count.get('responseData', None)
if result_count:
if 'cursor' in result_count and 'estimatedResultCount' in result_count['cursor']:
return result_count['cursor']['estimatedResultCount']
return 0
except Exception, e:
self.logger.error(e)
finally:
self.pages = temp
return result_count
def display_results(self):
"""Prints results (for command line)"""
self.__search__(True)
def main():
parser = argparse.ArgumentParser(description='A simple Google search module for Python')
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=False, help='Verbose mode')
parser.add_argument('-p', '--pages', dest='pages', action='store', default=1, help='Number of pages to return. Max 10')
parser.add_argument('-hl', '--language', dest='language', action='store', default='en', help="language. default is 'en'")
parser.add_argument('query', nargs='*', default=None)
args = parser.parse_args()
query = ' '.join(args.query)
log_level = logging.INFO
if args.verbose:
log_level = logging.DEBUG
if not query:
parser.print_help()
exit()
search = PyGoogle(log_level=log_level, query=query, pages=args.pages, hl=args.language)
search.display_results()
if __name__ == "__main__":
main()
| 38.943775 | 125 | 0.573579 |
79556f505fd1055fa9e10beed90152514bf61294 | 28,972 | py | Python | tests/asyncio/test_asyncio_client.py | Keylekan/python-socketio | 0ccfef2dc5d0e28c2106e5f440a9a674608a9bc8 | [
"MIT"
] | 2 | 2019-03-28T07:28:16.000Z | 2019-03-28T07:28:16.000Z | tests/asyncio/test_asyncio_client.py | Keylekan/python-socketio | 0ccfef2dc5d0e28c2106e5f440a9a674608a9bc8 | [
"MIT"
] | null | null | null | tests/asyncio/test_asyncio_client.py | Keylekan/python-socketio | 0ccfef2dc5d0e28c2106e5f440a9a674608a9bc8 | [
"MIT"
] | null | null | null | import asyncio
import sys
import unittest
import six
if six.PY3:
from unittest import mock
else:
import mock
from socketio import asyncio_client
from socketio import asyncio_namespace
from engineio import exceptions as engineio_exceptions
from socketio import exceptions
from socketio import packet
def AsyncMock(*args, **kwargs):
"""Return a mock asynchronous function."""
m = mock.MagicMock(*args, **kwargs)
async def mock_coro(*args, **kwargs):
return m(*args, **kwargs)
mock_coro.mock = m
return mock_coro
def _run(coro):
"""Run the given coroutine."""
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(sys.version_info < (3, 5), 'only for Python 3.5+')
class TestAsyncClient(unittest.TestCase):
def test_is_asyncio_based(self):
c = asyncio_client.AsyncClient()
self.assertEqual(c.is_asyncio_based(), True)
def test_connect(self):
c = asyncio_client.AsyncClient()
c.eio.connect = AsyncMock()
_run(c.connect('url', headers='headers', transports='transports',
namespaces=['/foo', '/', '/bar'],
socketio_path='path'))
self.assertEqual(c.connection_url, 'url')
self.assertEqual(c.connection_headers, 'headers')
self.assertEqual(c.connection_transports, 'transports')
self.assertEqual(c.connection_namespaces, ['/foo', '/', '/bar'])
self.assertEqual(c.socketio_path, 'path')
self.assertEqual(c.namespaces, ['/foo', '/bar'])
c.eio.connect.mock.assert_called_once_with(
'url', headers='headers', transports='transports',
engineio_path='path')
def test_connect_one_namespace(self):
c = asyncio_client.AsyncClient()
c.eio.connect = AsyncMock()
_run(c.connect('url', headers='headers', transports='transports',
namespaces='/foo',
socketio_path='path'))
self.assertEqual(c.connection_url, 'url')
self.assertEqual(c.connection_headers, 'headers')
self.assertEqual(c.connection_transports, 'transports')
self.assertEqual(c.connection_namespaces, ['/foo'])
self.assertEqual(c.socketio_path, 'path')
self.assertEqual(c.namespaces, ['/foo'])
c.eio.connect.mock.assert_called_once_with(
'url', headers='headers', transports='transports',
engineio_path='path')
def test_connect_default_namespaces(self):
c = asyncio_client.AsyncClient()
c.eio.connect = AsyncMock()
c.on('foo', mock.MagicMock(), namespace='/foo')
c.on('bar', mock.MagicMock(), namespace='/')
_run(c.connect('url', headers='headers', transports='transports',
socketio_path='path'))
self.assertEqual(c.connection_url, 'url')
self.assertEqual(c.connection_headers, 'headers')
self.assertEqual(c.connection_transports, 'transports')
self.assertEqual(c.connection_namespaces, None)
self.assertEqual(c.socketio_path, 'path')
self.assertEqual(c.namespaces, ['/foo'])
c.eio.connect.mock.assert_called_once_with(
'url', headers='headers', transports='transports',
engineio_path='path')
def test_connect_error(self):
c = asyncio_client.AsyncClient()
c.eio.connect = AsyncMock(
side_effect=engineio_exceptions.ConnectionError('foo'))
c.on('foo', mock.MagicMock(), namespace='/foo')
c.on('bar', mock.MagicMock(), namespace='/')
self.assertRaises(
exceptions.ConnectionError, _run, c.connect(
'url', headers='headers', transports='transports',
socketio_path='path'))
def test_wait_no_reconnect(self):
c = asyncio_client.AsyncClient()
c.eio.wait = AsyncMock()
c.sleep = AsyncMock()
c._reconnect_task = None
_run(c.wait())
c.eio.wait.mock.assert_called_once_with()
c.sleep.mock.assert_called_once_with(1)
def test_wait_reconnect_failed(self):
c = asyncio_client.AsyncClient()
c.eio.wait = AsyncMock()
c.sleep = AsyncMock()
states = ['disconnected']
async def fake_wait():
c.eio.state = states.pop(0)
c._reconnect_task = fake_wait()
_run(c.wait())
c.eio.wait.mock.assert_called_once_with()
c.sleep.mock.assert_called_once_with(1)
def test_wait_reconnect_successful(self):
c = asyncio_client.AsyncClient()
c.eio.wait = AsyncMock()
c.sleep = AsyncMock()
states = ['connected', 'disconnected']
async def fake_wait():
c.eio.state = states.pop(0)
c._reconnect_task = fake_wait()
c._reconnect_task = fake_wait()
_run(c.wait())
self.assertEqual(c.eio.wait.mock.call_count, 2)
self.assertEqual(c.sleep.mock.call_count, 2)
def test_emit_no_arguments(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
_run(c.emit('foo'))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo'], id=None, binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_emit_one_argument(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
_run(c.emit('foo', 'bar'))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo', 'bar'], id=None,
binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_emit_one_argument_list(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
_run(c.emit('foo', ['bar', 'baz']))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo', ['bar', 'baz']], id=None,
binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_emit_two_arguments(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
_run(c.emit('foo', ('bar', 'baz')))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo', 'bar', 'baz'], id=None,
binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_emit_namespace(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
_run(c.emit('foo', namespace='/foo'))
expected_packet = packet.Packet(packet.EVENT, namespace='/foo',
data=['foo'], id=None, binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_emit_with_callback(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
c._generate_ack_id = mock.MagicMock(return_value=123)
_run(c.emit('foo', callback='cb'))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo'], id=123, binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
c._generate_ack_id.assert_called_once_with('/', 'cb')
def test_emit_namespace_with_callback(self):
c = asyncio_client.AsyncClient()
c._send_packet = AsyncMock()
c._generate_ack_id = mock.MagicMock(return_value=123)
_run(c.emit('foo', namespace='/foo', callback='cb'))
expected_packet = packet.Packet(packet.EVENT, namespace='/foo',
data=['foo'], id=123, binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
c._generate_ack_id.assert_called_once_with('/foo', 'cb')
def test_emit_binary(self):
c = asyncio_client.AsyncClient(binary=True)
c._send_packet = AsyncMock()
_run(c.emit('foo', b'bar'))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo', b'bar'], id=None,
binary=True)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_emit_not_binary(self):
c = asyncio_client.AsyncClient(binary=False)
c._send_packet = AsyncMock()
_run(c.emit('foo', 'bar'))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo', 'bar'], id=None,
binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_send(self):
c = asyncio_client.AsyncClient()
c.emit = AsyncMock()
_run(c.send('data', 'namespace', 'callback'))
c.emit.mock.assert_called_once_with(
'message', data='data', namespace='namespace',
callback='callback', wait=False, timeout=60)
def test_send_with_defaults(self):
c = asyncio_client.AsyncClient()
c.emit = AsyncMock()
_run(c.send('data'))
c.emit.mock.assert_called_once_with(
'message', data='data', namespace=None, callback=None, wait=False,
timeout=60)
def test_call(self):
c = asyncio_client.AsyncClient()
async def fake_event_wait():
c._generate_ack_id.call_args_list[0][0][1]('foo', 321)
c._send_packet = AsyncMock()
c._generate_ack_id = mock.MagicMock(return_value=123)
c.eio = mock.MagicMock()
c.eio.create_event.return_value.wait = fake_event_wait
self.assertEqual(_run(c.call('foo')), ('foo', 321))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo'], id=123, binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_call_with_timeout(self):
c = asyncio_client.AsyncClient()
async def fake_event_wait():
await asyncio.sleep(1)
c._send_packet = AsyncMock()
c._generate_ack_id = mock.MagicMock(return_value=123)
c.eio = mock.MagicMock()
c.eio.create_event.return_value.wait = fake_event_wait
self.assertRaises(exceptions.TimeoutError, _run,
c.call('foo', timeout=0.01))
expected_packet = packet.Packet(packet.EVENT, namespace='/',
data=['foo'], id=123, binary=False)
self.assertEqual(c._send_packet.mock.call_count, 1)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_disconnect(self):
c = asyncio_client.AsyncClient()
c._trigger_event = AsyncMock()
c._send_packet = AsyncMock()
c.eio = mock.MagicMock()
c.eio.disconnect = AsyncMock()
c.eio.state = 'connected'
_run(c.disconnect())
self.assertEqual(c._trigger_event.mock.call_count, 0)
self.assertEqual(c._send_packet.mock.call_count, 1)
expected_packet = packet.Packet(packet.DISCONNECT, namespace='/')
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
c.eio.disconnect.mock.assert_called_once_with(abort=True)
def test_disconnect_namespaces(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo', '/bar']
c._trigger_event = AsyncMock()
c._send_packet = AsyncMock()
c.eio = mock.MagicMock()
c.eio.disconnect = AsyncMock()
c.eio.state = 'connected'
_run(c.disconnect())
self.assertEqual(c._trigger_event.mock.call_count, 0)
self.assertEqual(c._send_packet.mock.call_count, 3)
expected_packet = packet.Packet(packet.DISCONNECT, namespace='/foo')
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
expected_packet = packet.Packet(packet.DISCONNECT, namespace='/bar')
self.assertEqual(c._send_packet.mock.call_args_list[1][0][0].encode(),
expected_packet.encode())
expected_packet = packet.Packet(packet.DISCONNECT, namespace='/')
self.assertEqual(c._send_packet.mock.call_args_list[2][0][0].encode(),
expected_packet.encode())
def test_start_background_task(self):
c = asyncio_client.AsyncClient()
c.eio.start_background_task = mock.MagicMock(return_value='foo')
self.assertEqual(c.start_background_task('foo', 'bar', baz='baz'),
'foo')
c.eio.start_background_task.assert_called_once_with('foo', 'bar',
baz='baz')
def test_sleep(self):
c = asyncio_client.AsyncClient()
c.eio.sleep = AsyncMock()
_run(c.sleep(1.23))
c.eio.sleep.mock.assert_called_once_with(1.23)
def test_send_packet(self):
c = asyncio_client.AsyncClient()
c.eio.send = AsyncMock()
_run(c._send_packet(packet.Packet(packet.EVENT, 'foo', binary=False)))
c.eio.send.mock.assert_called_once_with('2"foo"', binary=False)
def test_send_packet_binary(self):
c = asyncio_client.AsyncClient()
c.eio.send = AsyncMock()
_run(c._send_packet(packet.Packet(packet.EVENT, b'foo', binary=True)))
self.assertTrue(c.eio.send.mock.call_args_list == [
mock.call('51-{"_placeholder":true,"num":0}', binary=False),
mock.call(b'foo', binary=True)
] or c.eio.send.mock.call_args_list == [
mock.call('51-{"num":0,"_placeholder":true}', binary=False),
mock.call(b'foo', binary=True)
])
def test_send_packet_default_binary_py3(self):
c = asyncio_client.AsyncClient()
c.eio.send = AsyncMock()
_run(c._send_packet(packet.Packet(packet.EVENT, 'foo')))
c.eio.send.mock.assert_called_once_with('2"foo"', binary=False)
def test_handle_connect(self):
c = asyncio_client.AsyncClient()
c._trigger_event = AsyncMock()
c._send_packet = AsyncMock()
_run(c._handle_connect('/'))
c._trigger_event.mock.assert_called_once_with('connect', namespace='/')
c._send_packet.mock.assert_not_called()
def test_handle_connect_with_namespaces(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo', '/bar']
c._trigger_event = AsyncMock()
c._send_packet = AsyncMock()
_run(c._handle_connect('/'))
c._trigger_event.mock.assert_called_once_with('connect', namespace='/')
self.assertEqual(c._send_packet.mock.call_count, 2)
expected_packet = packet.Packet(packet.CONNECT, namespace='/foo')
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
expected_packet = packet.Packet(packet.CONNECT, namespace='/bar')
self.assertEqual(c._send_packet.mock.call_args_list[1][0][0].encode(),
expected_packet.encode())
def test_handle_connect_namespace(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo']
c._trigger_event = AsyncMock()
c._send_packet = AsyncMock()
_run(c._handle_connect('/foo'))
_run(c._handle_connect('/bar'))
self.assertEqual(c._trigger_event.mock.call_args_list, [
mock.call('connect', namespace='/foo'),
mock.call('connect', namespace='/bar')
])
c._send_packet.mock.assert_not_called()
self.assertEqual(c.namespaces, ['/foo', '/bar'])
def test_handle_disconnect(self):
c = asyncio_client.AsyncClient()
c._trigger_event = AsyncMock()
_run(c._handle_disconnect('/'))
c._trigger_event.mock.assert_called_once_with(
'disconnect', namespace='/')
def test_handle_disconnect_namespace(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo', '/bar']
c._trigger_event = AsyncMock()
_run(c._handle_disconnect('/foo'))
c._trigger_event.mock.assert_called_once_with(
'disconnect', namespace='/foo')
self.assertEqual(c.namespaces, ['/bar'])
def test_handle_disconnect_unknown_namespace(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo', '/bar']
c._trigger_event = AsyncMock()
_run(c._handle_disconnect('/baz'))
c._trigger_event.mock.assert_called_once_with(
'disconnect', namespace='/baz')
self.assertEqual(c.namespaces, ['/foo', '/bar'])
def test_handle_event(self):
c = asyncio_client.AsyncClient()
c._trigger_event = AsyncMock()
_run(c._handle_event('/', None, ['foo', ('bar', 'baz')]))
c._trigger_event.mock.assert_called_once_with(
'foo', '/', ('bar', 'baz'))
def test_handle_event_with_id_no_arguments(self):
c = asyncio_client.AsyncClient(binary=True)
c._trigger_event = AsyncMock(return_value=None)
c._send_packet = AsyncMock()
_run(c._handle_event('/', 123, ['foo', ('bar', 'baz')]))
c._trigger_event.mock.assert_called_once_with(
'foo', '/', ('bar', 'baz'))
self.assertEqual(c._send_packet.mock.call_count, 1)
expected_packet = packet.Packet(packet.ACK, namespace='/', id=123,
data=[], binary=None)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_handle_event_with_id_one_argument(self):
c = asyncio_client.AsyncClient(binary=True)
c._trigger_event = AsyncMock(return_value='ret')
c._send_packet = AsyncMock()
_run(c._handle_event('/', 123, ['foo', ('bar', 'baz')]))
c._trigger_event.mock.assert_called_once_with(
'foo', '/', ('bar', 'baz'))
self.assertEqual(c._send_packet.mock.call_count, 1)
expected_packet = packet.Packet(packet.ACK, namespace='/', id=123,
data=['ret'], binary=None)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_handle_event_with_id_one_list_argument(self):
c = asyncio_client.AsyncClient(binary=True)
c._trigger_event = AsyncMock(return_value=['a', 'b'])
c._send_packet = AsyncMock()
_run(c._handle_event('/', 123, ['foo', ('bar', 'baz')]))
c._trigger_event.mock.assert_called_once_with(
'foo', '/', ('bar', 'baz'))
self.assertEqual(c._send_packet.mock.call_count, 1)
expected_packet = packet.Packet(packet.ACK, namespace='/', id=123,
data=[['a', 'b']], binary=None)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_handle_event_with_id_two_arguments(self):
c = asyncio_client.AsyncClient(binary=True)
c._trigger_event = AsyncMock(return_value=('a', 'b'))
c._send_packet = AsyncMock()
_run(c._handle_event('/', 123, ['foo', ('bar', 'baz')]))
c._trigger_event.mock.assert_called_once_with(
'foo', '/', ('bar', 'baz'))
self.assertEqual(c._send_packet.mock.call_count, 1)
expected_packet = packet.Packet(packet.ACK, namespace='/', id=123,
data=['a', 'b'], binary=None)
self.assertEqual(c._send_packet.mock.call_args_list[0][0][0].encode(),
expected_packet.encode())
def test_handle_ack(self):
c = asyncio_client.AsyncClient()
mock_cb = mock.MagicMock()
c.callbacks['/foo'] = {123: mock_cb}
_run(c._handle_ack('/foo', 123, ['bar', 'baz']))
mock_cb.assert_called_once_with('bar', 'baz')
self.assertNotIn(123, c.callbacks['/foo'])
def test_handle_ack_async(self):
c = asyncio_client.AsyncClient()
mock_cb = AsyncMock()
c.callbacks['/foo'] = {123: mock_cb}
_run(c._handle_ack('/foo', 123, ['bar', 'baz']))
mock_cb.mock.assert_called_once_with('bar', 'baz')
self.assertNotIn(123, c.callbacks['/foo'])
def test_handle_ack_not_found(self):
c = asyncio_client.AsyncClient()
mock_cb = mock.MagicMock()
c.callbacks['/foo'] = {123: mock_cb}
_run(c._handle_ack('/foo', 124, ['bar', 'baz']))
mock_cb.assert_not_called()
self.assertIn(123, c.callbacks['/foo'])
def test_handle_error(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo', '/bar']
c._handle_error('/bar')
self.assertEqual(c.namespaces, ['/foo'])
def test_handle_error_unknown_namespace(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo', '/bar']
c._handle_error('/baz')
self.assertEqual(c.namespaces, ['/foo', '/bar'])
def test_trigger_event(self):
c = asyncio_client.AsyncClient()
handler = mock.MagicMock()
c.on('foo', handler)
_run(c._trigger_event('foo', '/', 1, '2'))
handler.assert_called_once_with(1, '2')
def test_trigger_event_namespace(self):
c = asyncio_client.AsyncClient()
handler = AsyncMock()
c.on('foo', handler, namespace='/bar')
_run(c._trigger_event('foo', '/bar', 1, '2'))
handler.mock.assert_called_once_with(1, '2')
def test_trigger_event_class_namespace(self):
c = asyncio_client.AsyncClient()
result = []
class MyNamespace(asyncio_namespace.AsyncClientNamespace):
def on_foo(self, a, b):
result.append(a)
result.append(b)
c.register_namespace(MyNamespace('/'))
_run(c._trigger_event('foo', '/', 1, '2'))
self.assertEqual(result, [1, '2'])
@mock.patch('socketio.client.random.random', side_effect=[1, 0, 0.5])
def test_handle_reconnect(self, random):
c = asyncio_client.AsyncClient()
c._reconnect_task = 'foo'
c.sleep = AsyncMock()
c.connect = AsyncMock(
side_effect=[ValueError, exceptions.ConnectionError, None])
_run(c._handle_reconnect())
self.assertEqual(c.sleep.mock.call_count, 3)
self.assertEqual(c.sleep.mock.call_args_list, [
mock.call(1.5),
mock.call(1.5),
mock.call(4.0)
])
self.assertEqual(c._reconnect_task, None)
@mock.patch('socketio.client.random.random', side_effect=[1, 0, 0.5])
def test_handle_reconnect_max_delay(self, random):
c = asyncio_client.AsyncClient(reconnection_delay_max=3)
c._reconnect_task = 'foo'
c.sleep = AsyncMock()
c.connect = AsyncMock(
side_effect=[ValueError, exceptions.ConnectionError, None])
_run(c._handle_reconnect())
self.assertEqual(c.sleep.mock.call_count, 3)
self.assertEqual(c.sleep.mock.call_args_list, [
mock.call(1.5),
mock.call(1.5),
mock.call(3.0)
])
self.assertEqual(c._reconnect_task, None)
@mock.patch('socketio.client.random.random', side_effect=[1, 0, 0.5])
def test_handle_reconnect_max_attempts(self, random):
c = asyncio_client.AsyncClient(reconnection_attempts=2)
c._reconnect_task = 'foo'
c.sleep = AsyncMock()
c.connect = AsyncMock(
side_effect=[ValueError, exceptions.ConnectionError, None])
_run(c._handle_reconnect())
self.assertEqual(c.sleep.mock.call_count, 2)
self.assertEqual(c.sleep.mock.call_args_list, [
mock.call(1.5),
mock.call(1.5)
])
self.assertEqual(c._reconnect_task, 'foo')
def test_handle_eio_message(self):
c = asyncio_client.AsyncClient()
c._handle_connect = AsyncMock()
c._handle_disconnect = AsyncMock()
c._handle_event = AsyncMock()
c._handle_ack = AsyncMock()
c._handle_error = mock.MagicMock()
_run(c._handle_eio_message('0'))
c._handle_connect.mock.assert_called_with(None)
_run(c._handle_eio_message('0/foo'))
c._handle_connect.mock.assert_called_with('/foo')
_run(c._handle_eio_message('1'))
c._handle_disconnect.mock.assert_called_with(None)
_run(c._handle_eio_message('1/foo'))
c._handle_disconnect.mock.assert_called_with('/foo')
_run(c._handle_eio_message('2["foo"]'))
c._handle_event.mock.assert_called_with(None, None, ['foo'])
_run(c._handle_eio_message('3/foo,["bar"]'))
c._handle_ack.mock.assert_called_with('/foo', None, ['bar'])
_run(c._handle_eio_message('4'))
c._handle_error.assert_called_with(None)
_run(c._handle_eio_message('4/foo'))
c._handle_error.assert_called_with('/foo')
_run(c._handle_eio_message('51-{"_placeholder":true,"num":0}'))
self.assertEqual(c._binary_packet.packet_type, packet.BINARY_EVENT)
_run(c._handle_eio_message(b'foo'))
c._handle_event.mock.assert_called_with(None, None, b'foo')
_run(c._handle_eio_message(
'62-/foo,{"1":{"_placeholder":true,"num":1},'
'"2":{"_placeholder":true,"num":0}}'))
self.assertEqual(c._binary_packet.packet_type, packet.BINARY_ACK)
_run(c._handle_eio_message(b'bar'))
_run(c._handle_eio_message(b'foo'))
c._handle_ack.mock.assert_called_with('/foo', None, {'1': b'foo',
'2': b'bar'})
self.assertRaises(ValueError, _run, c._handle_eio_message('9'))
def test_eio_disconnect(self):
c = asyncio_client.AsyncClient()
c._trigger_event = AsyncMock()
c.eio.state = 'connected'
_run(c._handle_eio_disconnect())
c._trigger_event.mock.assert_called_once_with(
'disconnect', namespace='/')
def test_eio_disconnect_namespaces(self):
c = asyncio_client.AsyncClient()
c.namespaces = ['/foo', '/bar']
c._trigger_event = AsyncMock()
c.eio.state = 'connected'
_run(c._handle_eio_disconnect())
c._trigger_event.mock.assert_any_call('disconnect', namespace='/foo')
c._trigger_event.mock.assert_any_call('disconnect', namespace='/bar')
c._trigger_event.mock.assert_any_call('disconnect', namespace='/')
def test_eio_disconnect_reconnect(self):
c = asyncio_client.AsyncClient(reconnection=True)
c.start_background_task = mock.MagicMock()
c.eio.state = 'connected'
_run(c._handle_eio_disconnect())
c.start_background_task.assert_called_once_with(c._handle_reconnect)
def test_eio_disconnect_self_disconnect(self):
c = asyncio_client.AsyncClient(reconnection=True)
c.start_background_task = mock.MagicMock()
c.eio.state = 'disconnected'
_run(c._handle_eio_disconnect())
c.start_background_task.assert_not_called()
def test_eio_disconnect_no_reconnect(self):
c = asyncio_client.AsyncClient(reconnection=False)
c.start_background_task = mock.MagicMock()
c.eio.state = 'connected'
_run(c._handle_eio_disconnect())
c.start_background_task.assert_not_called()
| 43.306428 | 79 | 0.609865 |
79556f65bc2c37da01b5ef759e8a570c4c0ab101 | 6,207 | py | Python | h2o-perf/bench/py/h2oPerf/Process.py | vkuznet/h2o | e08f7014f228cbaecfb21f57379970e6a3ac0756 | [
"Apache-2.0"
] | null | null | null | h2o-perf/bench/py/h2oPerf/Process.py | vkuznet/h2o | e08f7014f228cbaecfb21f57379970e6a3ac0756 | [
"Apache-2.0"
] | null | null | null | h2o-perf/bench/py/h2oPerf/Process.py | vkuznet/h2o | e08f7014f228cbaecfb21f57379970e6a3ac0756 | [
"Apache-2.0"
] | null | null | null | from Scrape import *
from Table import *
import re
import os
import subprocess
import time
import atexit
class Process:
"""
@param test_dir: Full absolute path to the test directory.
@param test_short_dir: Path from h2o/R/tests to the test directory.
@param output_dir: The directory where we can create an output file for this process.
@return: The test object.
"""
def __init__(self, test_dir, test_short_dir, output_dir):
self.test_dir = test_dir
self.test_short_dir = test_short_dir
self.output_dir = output_dir
self.test_name = ""
self.output_file_name = ""
self.canceled = False
self.terminated = False
self.returncode = None #self.__did_complete__()
self.ip = None
self.pid = -1
self.port = None
self.port = None
self.child = None
def poll(self):
"""
Poll to see if process completed.
"""
return self.__did_complete__()
def cancel(self):
"""
Cancel a process.
"""
if (self.pid <= 0):
self.canceled = True
def terminate(self):
"""
Terminate the process. (Due to a signal.)
"""
self.terminated = True
if (self.pid > 0):
print("Killing R process with PID {}".format(self.pid))
try:
self.child.terminate()
except OSError:
pass
self.pid = -1
def get_test_dir_file_name(self):
"""
@return: The full absolute path of this test.
"""
return os.path.join(self.test_dir, self.test_name)
def get_test_name(self):
"""
@return: The file name (no directory) of this test.
"""
return self.test_name
def get_ip(self):
"""
@return: IP of the cloud where this test ran.
"""
return self.ip
def get_port(self):
"""
@return: Integer port number of the cloud where this test ran.
"""
return int(self.port)
def get_passed(self):
"""
@return: True if the test passed, False otherwise.
"""
return (self.returncode == 0)
def get_completed(self):
"""
@return: True if the test completed (pass or fail), False otherwise.
"""
return (self.returncode > self.__did_not_complete__())
def get_output_dir_file_name(self):
"""
@return: Full path to the output file which you can paste to a terminal window.
"""
return (os.path.join(self.output_dir, self.output_file_name))
def __str__(self):
s = ""
s += "Teste: {}/{}\n".format(self.test_dir, self.test_name)
return s
def __did_complete__(self):
"""
Check if a R subprocess finished.
"""
child = self.child
if (child is None):
return False
child.poll()
if (child.returncode is None):
return False
self.pid = -1
self.returncode = child.returncode
return True
def __did_not_complete__(self):
"""
returncode marker to know if test ran or not.
"""
return -9999999
class RProc(Process):
"""
This class represents a connection to an R subprocess.
@param rfile: This is the name of the R file that is
to be subproccessed. Example: gbm_test1_Parse.R
"""
def __init__(self, test_dir, test_short_dir, output_dir, rfile, perfdb):
self.perfdb = perfdb
self.rfile = rfile
self.rtype = self.__get_type__()
self.test_dir = test_dir
self.test_short_dir = test_short_dir
self.output_dir = output_dir
self.test_name = self.rfile
self.output_file_name = ""
self.did_time_pass = 0
self.did_correct_pass = 0
self.contaminated = 0
self.contamination_message = ""
self.canceled = False
self.terminated = False
self.returncode = None
self.ip = None
self.pid = -1
self.port = None
self.port = None
self.child = None
def start(self, ip, port):
"""
Start an R subproccess.
"""
self.ip = ip
self.port = port
print
print "DEBUG RPROCESS: "
print "TEST NAME: " + self.test_name
print
print "RFILE : " + self.rfile
print
cmd = ["R", "-f", self.rfile, "--args", self.ip + ":" + str(self.port)]
short_dir = re.sub(r'[\\/]', "_", self.test_short_dir)
self.output_file_name = os.path.join(self.output_dir,
short_dir + "_" + self.test_name + ".out")
print "DEBUG PROCESS OUT FILE NAME: "
print "OUT FILE NAME: " + self.output_file_name
f = open(self.output_file_name, "w")
self.child = subprocess.Popen(args=cmd,
stdout = f,
stderr = subprocess.STDOUT,
cwd = self.test_dir)
@atexit.register
def kill_process():
try:
self.child.terminate()
except OSError:
pass
self.pid = self.child.pid
def scrape_phase(self):
scraper = Scraper(self.perfdb, self.rtype[0], self.test_dir, self.test_short_dir, self.output_dir, self.output_file_name)
res = scraper.scrape()
self.contaminated = scraper.contaminated
self.contamination_message = scraper.contamination_message
self.did_time_pass = scraper.did_time_pass
self.did_correct_pass = scraper.did_correct_pass
return res
def block(self):
while(True):
if self.terminated:
return None
if self.poll():
break
time.sleep(1)
def __get_type__(self):
"""
Returns the type: 'parse', 'model', 'predict'
"""
types = ['parse', 'model', 'predict']
rf = self.rfile.lower()
return [t for t in types if t in rf]
| 29.140845 | 129 | 0.548252 |
79556faff30786883034c2fe5054437219017687 | 1,464 | py | Python | src/utils/class_utils.py | minhhoangbui/PICK-pytorch | c74d2d1e5d1f8c7e837ea9776146bc84a7ecf30a | [
"MIT"
] | null | null | null | src/utils/class_utils.py | minhhoangbui/PICK-pytorch | c74d2d1e5d1f8c7e837ea9776146bc84a7ecf30a | [
"MIT"
] | null | null | null | src/utils/class_utils.py | minhhoangbui/PICK-pytorch | c74d2d1e5d1f8c7e837ea9776146bc84a7ecf30a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author: Wenwen Yu
# @Created Time: 7/8/2020 9:26 PM
from collections import Counter
from torchtext.vocab import Vocab
from pathlib import Path
class ClassVocab(Vocab):
def __init__(self, classes, **kwargs):
"""
convert key to index(stoi), and get key string by index(itos)
:param classes: list or str, key string or entity list
:param kwargs:
"""
cls_list = None
if isinstance(classes, str):
cls_list = list(classes)
if isinstance(classes, Path):
p = Path(classes)
if not p.exists():
raise RuntimeError('Key file is not found')
with p.open(encoding='utf8') as f:
classes = f.read()
classes = classes.strip()
cls_list = list(classes)
elif isinstance(classes, list):
cls_list = classes
c = Counter(cls_list)
specials = ['<pad>', '<unk>']
self.special_count = len(specials)
super().__init__(c, specials=specials, **kwargs)
def entities2iob_labels(entities: list):
"""
get all iob string label by entities
:param entities:
:return:
"""
tags = []
for e in entities:
tags.append('B-{}'.format(e))
tags.append('I-{}'.format(e))
tags.append('O')
return tags
keys_vocab_cls = ClassVocab(Path(__file__).parent.joinpath('keys.txt'), specials_first=False)
| 27.622642 | 93 | 0.579235 |
79557077fa258f827444964aeb3dc04df42652ec | 1,242 | py | Python | scraper/storage_spiders/nguyenkimcom.py | chongiadung/choinho | d2a216fe7a5064d73cdee3e928a7beef7f511fd1 | [
"MIT"
] | null | null | null | scraper/storage_spiders/nguyenkimcom.py | chongiadung/choinho | d2a216fe7a5064d73cdee3e928a7beef7f511fd1 | [
"MIT"
] | 10 | 2020-02-11T23:34:28.000Z | 2022-03-11T23:16:12.000Z | scraper/storage_spiders/nguyenkimcom.py | chongiadung/choinho | d2a216fe7a5064d73cdee3e928a7beef7f511fd1 | [
"MIT"
] | 3 | 2018-08-05T14:54:25.000Z | 2021-06-07T01:49:59.000Z | # Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//h1[@class='block_product-title']",
'price' : "//span[contains(@id, 'sec_discounted_price')]",
'category' : "//div[@class='breadcrumbs clearfix']/a",
'description' : "//div[@id='content_description']",
'images' : "//div[@class='border-image-wrap cm-preview-wrapper']/a/img/@data-original",
'canonical' : "",
'base_url' : "//base/@href",
'brand' : "",
'in_stock' : "",
'guarantee' : "",
'promotion' : ""
}
name = 'nguyenkim.com'
allowed_domains = ['nguyenkim.com']
start_urls = ['http://www.nguyenkim.com/']
tracking_url = 'http://click.accesstrade.vn/adv.php?rk=0000uy0000uw&url=,utm_source=accesstrade&utm_medium=affiliate&utm_campaign=nguyenkim&at_sessionid={clickid}'
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = ['']
rules = [
Rule(LinkExtractor(allow = ['/[a-zA-Z0-9-.]+\.html$']), 'parse_item'),
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+/$'], deny=['sort_by=','sort_order=','features_hash=','items_per_page=']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'),
]
| 41.4 | 163 | 0.6562 |
7955707c5abf2cb50a9775a228ae8ffcec988414 | 8,150 | py | Python | sdk/python/pulumi_azure_native/compute/v20180401/get_snapshot.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/compute/v20180401/get_snapshot.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/compute/v20180401/get_snapshot.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetSnapshotResult',
'AwaitableGetSnapshotResult',
'get_snapshot',
]
@pulumi.output_type
class GetSnapshotResult:
"""
Snapshot resource.
"""
def __init__(__self__, creation_data=None, disk_size_gb=None, encryption_settings=None, id=None, location=None, managed_by=None, name=None, os_type=None, provisioning_state=None, sku=None, tags=None, time_created=None, type=None):
if creation_data and not isinstance(creation_data, dict):
raise TypeError("Expected argument 'creation_data' to be a dict")
pulumi.set(__self__, "creation_data", creation_data)
if disk_size_gb and not isinstance(disk_size_gb, int):
raise TypeError("Expected argument 'disk_size_gb' to be a int")
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if encryption_settings and not isinstance(encryption_settings, dict):
raise TypeError("Expected argument 'encryption_settings' to be a dict")
pulumi.set(__self__, "encryption_settings", encryption_settings)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if managed_by and not isinstance(managed_by, str):
raise TypeError("Expected argument 'managed_by' to be a str")
pulumi.set(__self__, "managed_by", managed_by)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if os_type and not isinstance(os_type, str):
raise TypeError("Expected argument 'os_type' to be a str")
pulumi.set(__self__, "os_type", os_type)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if time_created and not isinstance(time_created, str):
raise TypeError("Expected argument 'time_created' to be a str")
pulumi.set(__self__, "time_created", time_created)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="creationData")
def creation_data(self) -> 'outputs.CreationDataResponse':
"""
Disk source information. CreationData information cannot be changed after the disk has been created.
"""
return pulumi.get(self, "creation_data")
@property
@pulumi.getter(name="diskSizeGB")
def disk_size_gb(self) -> Optional[int]:
"""
If creationData.createOption is Empty, this field is mandatory and it indicates the size of the VHD to create. If this field is present for updates or creation with other options, it indicates a resize. Resizes are only allowed if the disk is not attached to a running VM, and can only increase the disk's size.
"""
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="encryptionSettings")
def encryption_settings(self) -> Optional['outputs.EncryptionSettingsResponse']:
"""
Encryption settings for disk or snapshot
"""
return pulumi.get(self, "encryption_settings")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="managedBy")
def managed_by(self) -> str:
"""
Unused. Always Null.
"""
return pulumi.get(self, "managed_by")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[str]:
"""
The Operating System type.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The disk provisioning state.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SnapshotSkuResponse']:
"""
The snapshots sku name. Can be Standard_LRS, Premium_LRS, or Standard_ZRS.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The time when the disk was created.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
class AwaitableGetSnapshotResult(GetSnapshotResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSnapshotResult(
creation_data=self.creation_data,
disk_size_gb=self.disk_size_gb,
encryption_settings=self.encryption_settings,
id=self.id,
location=self.location,
managed_by=self.managed_by,
name=self.name,
os_type=self.os_type,
provisioning_state=self.provisioning_state,
sku=self.sku,
tags=self.tags,
time_created=self.time_created,
type=self.type)
def get_snapshot(resource_group_name: Optional[str] = None,
snapshot_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSnapshotResult:
"""
Snapshot resource.
:param str resource_group_name: The name of the resource group.
:param str snapshot_name: The name of the snapshot that is being created. The name can't be changed after the snapshot is created. Supported characters for the name are a-z, A-Z, 0-9 and _. The max name length is 80 characters.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['snapshotName'] = snapshot_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:compute/v20180401:getSnapshot', __args__, opts=opts, typ=GetSnapshotResult).value
return AwaitableGetSnapshotResult(
creation_data=__ret__.creation_data,
disk_size_gb=__ret__.disk_size_gb,
encryption_settings=__ret__.encryption_settings,
id=__ret__.id,
location=__ret__.location,
managed_by=__ret__.managed_by,
name=__ret__.name,
os_type=__ret__.os_type,
provisioning_state=__ret__.provisioning_state,
sku=__ret__.sku,
tags=__ret__.tags,
time_created=__ret__.time_created,
type=__ret__.type)
| 36.547085 | 319 | 0.644663 |
795570d732e51273d942674e2a11fbf1dd0381a0 | 1,510 | py | Python | test/test_manager/convention_checker.py | jonghenhan/iotivity | 7dfc2bc6a5c0506cf88bc23e88e38fe1b795da31 | [
"Apache-2.0"
] | 301 | 2015-01-20T16:11:32.000Z | 2021-11-25T04:29:36.000Z | test/test_manager/convention_checker.py | jonghenhan/iotivity | 7dfc2bc6a5c0506cf88bc23e88e38fe1b795da31 | [
"Apache-2.0"
] | 13 | 2015-06-04T09:55:15.000Z | 2020-09-23T00:38:07.000Z | test/test_manager/convention_checker.py | jonghenhan/iotivity | 7dfc2bc6a5c0506cf88bc23e88e38fe1b795da31 | [
"Apache-2.0"
] | 233 | 2015-01-26T03:41:59.000Z | 2022-03-18T23:54:04.000Z | #!/usr/bin/python3
'''
/******************************************************************
*
* Copyright 2018 Samsung Electronics All Rights Reserved.
*
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************/
'''
import sys
from configuration import *
from ite.tc.container import TestSpecContainer
module_filter = ''
if len(sys.argv) > 1:
module_filter = sys.argv[1]
print("### Start to Check TC Name & Comment Rule Conventions for Iotivity SEC Test Codes\n")
checker = TestSpecContainer()
api_valid_convention = checker.extract_api_testspec(API_TC_SRC_DIR, module_filter)
sampleapp_valid_convention = checker.extract_sampleapp_testspec(SAMPLEAPP_TC_SRC_DIR, module_filter)
print("\n### Checking TC Name & Comment Rule Conventions for Iotivity SEC Test Codes is Done!!")
if api_valid_convention == False or sampleapp_valid_convention == False:
print ("\n==> There is a violation of API TC or Sample App TC naming convention")
exit(1)
| 35.952381 | 100 | 0.696026 |
795570f4e51641eb729ca3ec1821ef838aa6ba02 | 40 | py | Python | dipy/viz/__init__.py | stefanv/dipy | 4d4518861a796502826f053c17161487db126487 | [
"BSD-3-Clause"
] | null | null | null | dipy/viz/__init__.py | stefanv/dipy | 4d4518861a796502826f053c17161487db126487 | [
"BSD-3-Clause"
] | null | null | null | dipy/viz/__init__.py | stefanv/dipy | 4d4518861a796502826f053c17161487db126487 | [
"BSD-3-Clause"
] | null | null | null | # Init file for visualization package
| 10 | 37 | 0.775 |
79557145dfbba0454fb8fd012db16e366e2bcdf4 | 3,111 | py | Python | hail/python/hail/methods/__init__.py | mitochon/hail | 25e5e5b8da1d978468d2cee393426ade46484a87 | [
"MIT"
] | null | null | null | hail/python/hail/methods/__init__.py | mitochon/hail | 25e5e5b8da1d978468d2cee393426ade46484a87 | [
"MIT"
] | 3 | 2017-06-16T18:10:45.000Z | 2017-07-21T17:44:13.000Z | hail/python/hail/methods/__init__.py | mitochon/hail | 25e5e5b8da1d978468d2cee393426ade46484a87 | [
"MIT"
] | 2 | 2018-01-30T00:50:52.000Z | 2018-03-22T20:04:01.000Z | from .family_methods import (trio_matrix, mendel_errors,
transmission_disequilibrium_test, de_novo)
from .impex import (export_elasticsearch, export_gen, export_bgen, export_plink,
export_vcf, import_locus_intervals, import_bed, import_fam, grep,
import_bgen, import_gen, import_table, import_plink, read_matrix_table,
read_table, get_vcf_metadata, import_vcf, import_gvcfs, import_vcfs,
index_bgen, import_matrix_table)
from .statgen import (skat, impute_sex, genetic_relatedness_matrix, realized_relationship_matrix,
pca, hwe_normalized_pca, _blanczos_pca, _hwe_normalized_blanczos, split_multi,
filter_alleles, filter_alleles_hts, split_multi_hts, balding_nichols_model,
ld_prune, row_correlation, ld_matrix, linear_mixed_model,
linear_regression_rows, _linear_regression_rows_nd,
logistic_regression_rows, poisson_regression_rows,
linear_mixed_regression_rows, lambda_gc)
from .qc import sample_qc, variant_qc, vep, concordance, nirvana, summarize_variants
from .misc import rename_duplicates, maximal_independent_set, filter_intervals
from .relatedness import identity_by_descent, king, pc_relate
__all__ = ['trio_matrix',
'linear_mixed_model',
'skat',
'identity_by_descent',
'impute_sex',
'linear_regression_rows',
'_linear_regression_rows_nd',
'logistic_regression_rows',
'poisson_regression_rows',
'linear_mixed_regression_rows',
'lambda_gc',
'sample_qc',
'variant_qc',
'genetic_relatedness_matrix',
'realized_relationship_matrix',
'pca',
'hwe_normalized_pca',
'_blanczos_pca',
'_hwe_normalized_blanczos',
'pc_relate',
'rename_duplicates',
'split_multi',
'split_multi_hts',
'mendel_errors',
'export_elasticsearch',
'export_gen',
'export_bgen',
'export_plink',
'export_vcf',
'vep',
'concordance',
'maximal_independent_set',
'import_locus_intervals',
'import_bed',
'import_fam',
'import_matrix_table',
'nirvana',
'transmission_disequilibrium_test',
'grep',
'import_bgen',
'import_gen',
'import_table',
'import_plink',
'read_matrix_table',
'read_table',
'get_vcf_metadata',
'import_vcf',
'import_vcfs',
'import_gvcfs',
'index_bgen',
'balding_nichols_model',
'ld_prune',
'filter_intervals',
'de_novo',
'filter_alleles',
'filter_alleles_hts',
'summarize_variants',
'row_correlation',
'ld_matrix',
'king'
]
| 38.8875 | 100 | 0.585021 |
7955719d11f5fca63a67be6aeb63fd0550942da5 | 1,045 | py | Python | utils/http.py | Tbizla/lambot.py | 66b8e114c031cc378550364c30e2364cbabac88e | [
"MIT"
] | null | null | null | utils/http.py | Tbizla/lambot.py | 66b8e114c031cc378550364c30e2364cbabac88e | [
"MIT"
] | null | null | null | utils/http.py | Tbizla/lambot.py | 66b8e114c031cc378550364c30e2364cbabac88e | [
"MIT"
] | null | null | null | import asyncio
import aiohttp
from utils import cache
# Removes the aiohttp ClientSession instance warning.
class HTTPSession(aiohttp.ClientSession):
""" Abstract class for aiohttp. """
def __init__(self, loop=None):
super().__init__(loop=loop or asyncio.get_event_loop())
def __del__(self):
"""
Closes the ClientSession instance
cleanly when the instance is deleted.
Useful for things like when the interpreter closes.
This would be perfect if discord.py had this as well. :thinking:
"""
if not self.closed:
self.close()
session = HTTPSession()
@cache.async_cache()
async def query(url, method="get", res_method="text", *args, **kwargs):
async with getattr(session, method.lower())(url, *args, **kwargs) as res:
return await getattr(res, res_method)()
async def get(url, *args, **kwargs):
return await query(url, "get", *args, **kwargs)
async def post(url, *args, **kwargs):
return await query(url, "post", *args, **kwargs) | 26.794872 | 77 | 0.65933 |
795572c9b94bb7768dceaf079d1a37eb32b17fd2 | 2,736 | py | Python | tests/test_gr.py | torakses/django-localflavor | 17ca87095d6f8c3f3888016085a2edb5951889f4 | [
"BSD-3-Clause"
] | null | null | null | tests/test_gr.py | torakses/django-localflavor | 17ca87095d6f8c3f3888016085a2edb5951889f4 | [
"BSD-3-Clause"
] | null | null | null | tests/test_gr.py | torakses/django-localflavor | 17ca87095d6f8c3f3888016085a2edb5951889f4 | [
"BSD-3-Clause"
] | null | null | null | from django.test import SimpleTestCase
from localflavor.gr.forms import GRMobilePhoneNumberField, GRPhoneNumberField, GRPostalCodeField, GRTaxNumberCodeField
class GRLocalFlavorTests(SimpleTestCase):
def test_GRTaxNumberField(self):
"""The valid tests are from greek tax numbers (AFMs) found on the internet with a google search."""
error = ['Enter a valid greek tax number (9 digits).']
valid = {
'090051291': '090051291',
'997881842': '997881842',
'090220804': '090220804',
'090000045': '090000045',
'099757704': '099757704',
}
invalid = {
'123456789': error,
'123 32 12 3213': error,
'32 123 5345': error,
'0': error,
'abc': error,
'00000': error,
'000000000': error,
'1111111': error,
'3123123': error,
'312312334534': error,
'999999999': error,
'123123123': error,
'321000123': error,
'd21000123': error,
}
self.assertFieldOutput(GRTaxNumberCodeField, valid, invalid)
def test_GRPostalCodeField(self):
error = ['Enter a valid 5-digit greek postal code.']
valid = {
'51642': '51642',
'21742': '21742',
'75006': '75006',
'85017': '85017',
}
invalid = {
'12 34': error,
'124567': error,
'04567': error,
'94567': error,
'124567': error,
'1345': error,
'134115': error,
'b231a': error,
}
self.assertFieldOutput(GRPostalCodeField, valid, invalid)
def test_GRPhoneNumberField(self):
error = ['Enter a 10-digit greek phone number.']
valid = {
'2109292921': '2109292921',
'+301109292921': '+301109292921',
}
invalid = {
'12 34': error,
'124567': error,
'21092929211': error,
'661232123': error,
'694555555a': error,
}
self.assertFieldOutput(GRPhoneNumberField, valid, invalid)
def test_GRMobilePhoneNumberField(self):
error = ['Enter a greek mobile phone number starting with 69.']
valid = {
'6945555555': '6945555555',
'6931234567': '6931234567',
'+306971234567': '+306971234567',
}
invalid = {
'12 34': error,
'124567': error,
'21092929211': error,
'2102233444': error,
'2111234567': error,
}
self.assertFieldOutput(GRMobilePhoneNumberField, valid, invalid)
| 30.065934 | 118 | 0.518275 |
7955739b89d0d07687808e42cbe3b0c1835466e2 | 26 | py | Python | Python/hellolies.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 1,428 | 2018-10-03T15:15:17.000Z | 2019-03-31T18:38:36.000Z | Python/hellolies.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 1,162 | 2018-10-03T15:05:49.000Z | 2018-10-18T14:17:52.000Z | Python/hellolies.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 3,909 | 2018-10-03T15:07:19.000Z | 2019-03-31T18:39:08.000Z | print("Hello, pyLadies!")
| 13 | 25 | 0.692308 |
795575b87a89617896694dd2d7a9ccf5ecabb04a | 4,532 | py | Python | src/oidctest/site_setup.py | auth0/oidctest | 13948bfdf08430addcbacbb8011a359d0a3952e3 | [
"Apache-2.0"
] | null | null | null | src/oidctest/site_setup.py | auth0/oidctest | 13948bfdf08430addcbacbb8011a359d0a3952e3 | [
"Apache-2.0"
] | null | null | null | src/oidctest/site_setup.py | auth0/oidctest | 13948bfdf08430addcbacbb8011a359d0a3952e3 | [
"Apache-2.0"
] | null | null | null | import datetime
import filecmp
import os
import shutil
import subprocess
def modification_date(filename):
t = os.path.getmtime(filename)
return datetime.datetime.fromtimestamp(t)
def copy_if_not_same(src, dst, overwrite=False):
try:
os.stat(dst)
except OSError:
shutil.copy(src, dst)
return True
if filecmp.cmp(src, dst):
return False
else:
if overwrite:
shutil.copy(src, dst)
return True
return False
def oidc_op_setup(distroot):
# for _dir in ['server']:
# _op_dir = os.path.join(distroot, 'test_tool', 'test_op', _dir)
# if os.path.isdir(_dir) is False:
# shutil.copytree(_op_dir, 'server')
# os.chdir('server')
for _dir in ['backup', 'certs', 'entities', 'export', 'keys', 'log',
'requests', 'server_log', 'tar']:
if os.path.isdir(_dir) is False:
os.mkdir(_dir)
_op_dir = os.path.join(distroot, 'test_tool', 'cp', 'test_op', )
for _dir in ['entity_info', 'flows', 'html', 'static', 'tool']:
_src = os.path.join(_op_dir, _dir)
if os.path.isdir(_dir):
shutil.rmtree(_dir)
shutil.copytree(_src, _dir)
for _fname in ['run_example.sh', 'config_example.py', 'config_server.py',
'tt_config_example.py', 'op_test_tool.py', 'version.py']:
_file = os.path.join(_op_dir, _fname)
copy_if_not_same(_file, _fname, True)
subprocess.call(
["make_entity_info.py", "-i", "https://example.com", "-p", "C.T.T.T",
"-s", "-e", "-w", "diana@localhost:8040", "-t", "CTTT"])
subprocess.call(
["make_entity_info.py", "-i", "https://example.com", "-p", "C.F.T.F",
"-t", "CFTF"])
subprocess.call(
["make_entity_info.py", "-i", "https://example.com", "-p", "C.F.F.F",
"-t", "CFFF"])
# os.chdir('..')
def oidc_rpinst_setup(distroot):
for _dir in ['certs', 'keys', 'server_log', 'log']:
if os.path.isdir(_dir) is False:
os.mkdir(_dir)
_op_dir = os.path.join(distroot, 'test_tool', 'test_rp', 'rpinst')
for _dir in ['static', 'htdocs', 'flows']:
_src = os.path.join(_op_dir, _dir)
if os.path.isdir(_dir):
shutil.rmtree(_dir)
shutil.copytree(_src, _dir)
for _fname in ['run.sh', 'example_conf.py', 'profiles.json',
'path2port.csv']:
_file = os.path.join(_op_dir, _fname)
copy_if_not_same(_file, _fname, True)
def oidc_cp_rplib_setup(distroot):
for _dir in ['certs', 'keys', 'log']:
if os.path.isdir(_dir) is False:
os.mkdir(_dir)
_op_dir = os.path.join(distroot, 'test_tool', 'cp', 'test_rplib', 'rp')
for _dir in ['static', 'flows']:
_src = os.path.join(_op_dir, _dir)
if os.path.isdir(_dir):
shutil.rmtree(_dir)
shutil.copytree(_src, _dir)
for _fname in ['run_example.sh', 'example_conf.py', 'links.json',
'server.py', 'version.py']:
_file = os.path.join(_op_dir, _fname)
copy_if_not_same(_file, _fname, True)
def oidc_rplib_setup(distroot):
for _dir in ['certs', 'keys', 'log']:
if os.path.isdir(_dir) is False:
os.mkdir(_dir)
_op_dir = os.path.join(distroot, 'test_tool', 'test_rp', 'rplib',
'op')
for _dir in ['static', 'htdocs', '_static', 'flows']:
_src = os.path.join(_op_dir, _dir)
if os.path.isdir(_dir):
shutil.rmtree(_dir)
shutil.copytree(_src, _dir)
for _fname in ['example_conf.py', 'test_rp_op.py', 'setup.py', 'run.sh',
'link.json']:
_file = os.path.join(_op_dir, _fname)
copy_if_not_same(_file, _fname, overwrite=True)
def fedoidc_rplib_setup(distroot):
for _dir in ['certs', 'keys', 'log', 'fo_jwks', 'ms_dir']:
if os.path.isdir(_dir) is False:
os.mkdir(_dir)
_op_dir = os.path.join(distroot, 'test_tool', 'cp', 'test_rplib', 'fed_rp')
for _dir in ['static', 'flows']:
_src = os.path.join(_op_dir, _dir)
if os.path.isdir(_dir):
shutil.rmtree(_dir)
shutil.copytree(_src, _dir)
for _fname in ['setup.py', 'example_conf.py', 'fed_conf_usage.py',
'install.sh', 'links.json', 'pre.html',
'server.py', 'example_run.sh', 'bogus_sms.py']:
_file = os.path.join(_op_dir, _fname)
copy_if_not_same(_file, _fname, overwrite=True)
| 31.692308 | 79 | 0.572816 |
795576979d2601fa8d8249dcf15dac88260e9a93 | 1,937 | py | Python | sample-code/examples/python/android_simple.py | githubanly/appium | b5540de9229937cf0e92bb4a914d69c2b70c2097 | [
"Apache-2.0"
] | 1 | 2017-11-02T09:34:46.000Z | 2017-11-02T09:34:46.000Z | sample-code/examples/python/android_simple.py | githubanly/appium | b5540de9229937cf0e92bb4a914d69c2b70c2097 | [
"Apache-2.0"
] | null | null | null | sample-code/examples/python/android_simple.py | githubanly/appium | b5540de9229937cf0e92bb4a914d69c2b70c2097 | [
"Apache-2.0"
] | null | null | null | import os
from time import sleep
import unittest
from appium import webdriver
# Returns abs path relative to this file and not cwd
PATH = lambda p: os.path.abspath(
os.path.join(os.path.dirname(__file__), p)
)
class SimpleAndroidTests(unittest.TestCase):
def setUp(self):
desired_caps = {}
desired_caps['platformName'] = 'Android'
desired_caps['platformVersion'] = '4.2'
desired_caps['deviceName'] = 'Android Emulator'
desired_caps['app'] = PATH(
'../../../sample-code/apps/ApiDemos/bin/ApiDemos-debug.apk'
)
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
def tearDown(self):
# end the session
self.driver.quit()
def test_find_elements(self):
el = self.driver.find_element_by_accessibility_id('Graphics')
el.click()
el = self.driver.find_element_by_accessibility_id('Arcs')
self.assertIsNotNone(el)
self.driver.back()
el = self.driver.find_element_by_name("App")
self.assertIsNotNone(el)
els = self.driver.find_elements_by_android_uiautomator("new UiSelector().clickable(true)")
self.assertEqual(12, len(els))
els = self.driver.find_elements_by_android_uiautomator('new UiSelector().enabled(true)')
self.assertEqual(20, len(els))
self.assertEqual("API Demos", els[7].text)
def test_simple_actions(self):
el = self.driver.find_element_by_accessibility_id('Graphics')
el.click()
el = self.driver.find_element_by_accessibility_id('Arcs')
el.click()
main = self.driver.find_elements_by_android_uiautomator('new UiSelector().clickable(false)')[7]
self.assertEqual("Graphics/Arcs", main.text)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(SimpleAndroidTests)
unittest.TextTestRunner(verbosity=2).run(suite)
| 31.241935 | 103 | 0.673722 |
795577710c19397bf3996ae04fe76ea9e6ab7d84 | 7,194 | py | Python | DeepLearning/functions.py | CSID-DGU/2021-2-OSSP2-DodgeCounselor-5 | 4e3fbc89540739f44f126cddd1a0a0fabcbc427f | [
"MIT"
] | null | null | null | DeepLearning/functions.py | CSID-DGU/2021-2-OSSP2-DodgeCounselor-5 | 4e3fbc89540739f44f126cddd1a0a0fabcbc427f | [
"MIT"
] | null | null | null | DeepLearning/functions.py | CSID-DGU/2021-2-OSSP2-DodgeCounselor-5 | 4e3fbc89540739f44f126cddd1a0a0fabcbc427f | [
"MIT"
] | null | null | null | from typing import Mapping
from requests.models import HTTPError
from riotwatcher import LolWatcher
from riotwatcher._apis.league_of_legends.SummonerApiV4 import SummonerApiV4
from riotwatcher._apis.league_of_legends.MatchApiV5 import MatchApiV5
key = '키 입력해주세요!' #Production key
watcher = LolWatcher(key)
def nameSlice(input) : #멀티서치 기능을 위해 사용되는 함수
player = input.split(". ")
for i in range(len(player)) :
for j in range(len(player[i])) :
if (player[i][j] == '님') and (player[i][j+1] == '이') and (player[i][j+2] == " ") :
player[i] = player[i][0:j]
break
return player
def getUserNames(TIER, DIVISION, PAGE) : #특정 티어, 디비전, 페이지의 유저명 모두 가져오기
playerList=[] # 가져온 플레이어들의 소환사명을 저장하기 위한 리스트
players = watcher.league.entries('KR','RANKED_SOLO_5x5', TIER, DIVISION, PAGE, ) #리스트로 저장됨
for i in range(len(players)) : #구해온 정보에서 소환사명만 빼서 저장
playerList.append(players[i]['summonerName'])
return playerList
def getSummonerInfo(playerName) : #PlayerName을 이용하여 PlayerName에 따른 SummonerDTO를 반환해주는 함수
#infoList에 플레이어들의 정보(SummonerDTO)가 리스트로 담김
return watcher.summoner.by_name("KR", playerName)
def getMatchBySummonerDTO(infoList, gameCount) : #SummonDTO에서 얻을 수 있는 puuid를 이용하여 최근 n개의 게임에 대한
return watcher.match.matchlist_by_puuid("asia", infoList['puuid'], None, gameCount, None, "ranked") #Puuid를 이용하여 각 유저의 랭크게임 gameCount개에 대한 MatchID 가져오기
def getUserLoc(matchInfo, playerName) : #해당 게임에서 유저가 몇 번째 플레이어인지 찾아내서 위치 반환
for i in range (10) :
if playerName == matchInfo['info']['participants'][i]['summonerName'] :
return i
def getMatchInfoByMatchID(matchList) : #MatchID로 MatchINFO를 가져옴
matchInfo = []
for i in range(len(matchList)) :
matchInfo.append(watcher.match.by_id('asia', matchList[i]))
return matchInfo
def getPositionKR(pos) : #해당 게임에서 유저의 포지션을 한글로 반환함(탑, 정글, 미드, 원딜, 서폿)
if (pos == "TOP") :
return "탑"
elif (pos == "JUNGLE") :
return "정글"
elif (pos == "MIDDLE") :
return "미드"
elif (pos == "BOTTOM") :
return "원딜"
else :
return "서폿"
def DeathKing(matchInfo, userLoc):
#데스수가 게임시간-5 보다 크거나 같으면 대가리 박은걸로 간주
gameDuration = matchInfo['info']['gameDuration']
if gameDuration >= 1000 :
gameDuration /= 6000
else :
gameDuration /= 60
print("게임 시간 :", gameDuration)
death_count = matchInfo['info']['participants'][userLoc]['deaths']
if death_count >= gameDuration - 5:
return True
else:
return False
def buySameItems(singleMatchInfo, playerNum) : #어떤 게임에서 한 플레이어가 같은 아이템을 3개 이상 구매했는가?
count = 0
noItem = True
item = []
item.append(singleMatchInfo['info']['participants'][playerNum]['item0'])
item.append(singleMatchInfo['info']['participants'][playerNum]['item1'])
item.append(singleMatchInfo['info']['participants'][playerNum]['item2'])
item.append(singleMatchInfo['info']['participants'][playerNum]['item3'])
item.append(singleMatchInfo['info']['participants'][playerNum]['item4'])
item.append(singleMatchInfo['info']['participants'][playerNum]['item5'])
for i in range(6) :
if (item[i] != 0) :
noItem = False
for i in range(6) :
for j in range(i + 1, 6) :
if (item[i] == item[j] and item[i] != 0) :
count += 1
if (count >= 3 or noItem) :
return True
else :
return False
def UseCorrectSpell(singleMatchInfo, playerNum) : #한 게임에서 플레이어 포지션에 따른 스펠의 적절성 체크
if singleMatchInfo['info']['participants'][playerNum]['teamPosition'] == "JUNGLE" : #포지션이 정글인 경우
if singleMatchInfo['info']['participants'][playerNum]['summoner1Id'] == 11 or singleMatchInfo['info']['participants'][playerNum]['summoner2Id'] == 11 :
return True #강타 있으면 승패 상관없이 True
else :
return False #정글러가 강타 없으면 승패 상관없이 False
else : #정글러가 아닌 경우
if singleMatchInfo['info']['participants'][playerNum]['summoner1Id'] == 11 or singleMatchInfo['info']['participants'][playerNum]['summoner2Id'] == 11 :
if (singleMatchInfo['info']['participants'][playerNum]['win']) :
return True #정글러가 아닌데 강타 들었어도, 이겼으면 True
else :
return False #정글러가 아닌데 강타 들고 졌으면? False
else : #강타 안들었으면 True
return True
def damageDiffByPosition(matchInfo, userLoc):
pos = matchInfo['info']['participants'][userLoc]['teamPosition']
otherPlayerLoc = 0
if pos=='UTILITY': #서폿은 나가있어
return 0
# 자신과 같은 position의 딜량찾기
if userLoc < 5:
for j in range(5,10):
if matchInfo['info']['participants'][j]['teamPosition'] == pos:
otherPlayerLoc = j
break
else:
for j in range(0,5):
if matchInfo['info']['participants'][j]['teamPosition'] == pos:
otherPlayerLoc = j
break
# 같은 포지션의 딜량을 나누어서 몇배인지 확인
dmgDiff = matchInfo['info']['participants'][otherPlayerLoc]['totalDamageDealt'] / matchInfo['info']['participants'][userLoc]['totalDamageDealt']
# 3배 이상 차이나면 그냥 3을 반환, 그렇게 차이가 크지 않다면 그 값을 반환
if dmgDiff >= 3 :
return 3
elif dmgDiff >= 2 :
return dmgDiff
else:
return 0
def goldDiffByPostion(matchInfo, userLoc) :
pos = matchInfo['info']['participants'][userLoc]['teamPosition']
otherPlayerLoc = 0
if pos=='UTILITY': #서폿은 나가있어
return 0
# 자신과 같은 position의 위치 찾기
if userLoc < 5:
for j in range(5,10):
if matchInfo['info']['participants'][j]['teamPosition'] == pos:
otherPlayerLoc = j
break
else:
for j in range(0,5):
if matchInfo['info']['participants'][j]['teamPosition'] == pos:
otherPlayerLoc = j
break
# 같은 포지션의 두명
goldDiff = matchInfo['info']['participants'][otherPlayerLoc]['goldEarned'] / matchInfo['info']['participants'][userLoc]['goldEarned']
# 3배 넘게 차이나면 그냥 3을 반환, 그렇게 차이가 크지 않다면 값 자체를 반환
if goldDiff >= 3 :
return 3
elif goldDiff >= 1.2 :
return goldDiff
else:
return 0
def visionScoreDiffByPosition(matchInfo, userLoc) :
pos = matchInfo['info']['participants'][userLoc]['teamPosition']
otherPlayerLoc = 0
if pos=='UTILITY': #서폿은 나가있어
return 0
# 자신과 같은 position의 위치 찾기
if userLoc < 5:
for j in range(5,10):
if matchInfo['info']['participants'][j]['teamPosition'] == pos:
otherPlayerLoc = j
break
else:
for j in range(0,5):
if matchInfo['info']['participants'][j]['teamPosition'] == pos:
otherPlayerLoc = j
break
try :
vScoreDiff = matchInfo['info']['participants'][otherPlayerLoc]['visionScore'] / matchInfo['info']['participants'][userLoc]['visionScore']
except :
return 0
#3배 넘게 차이나면 그냥 3을 반환, 그렇게 차이가 크지 않다면 값 자체를 반환
if vScoreDiff >= 3 :
return 3
elif vScoreDiff >= 1.2 :
return vScoreDiff
else:
return 0 | 35.438424 | 159 | 0.60342 |
7955781663faf78bbb640c47ae14eed1462458d5 | 7,932 | py | Python | andes/models/governor.py | lacknc/Andes | 3664ff69406ac01381b675393d6ac96fcf080b25 | [
"Apache-2.0"
] | null | null | null | andes/models/governor.py | lacknc/Andes | 3664ff69406ac01381b675393d6ac96fcf080b25 | [
"Apache-2.0"
] | null | null | null | andes/models/governor.py | lacknc/Andes | 3664ff69406ac01381b675393d6ac96fcf080b25 | [
"Apache-2.0"
] | null | null | null | from cvxopt import matrix, sparse, spmatrix # NOQA
from cvxopt import mul, div, log, sin, cos # NOQA
from .base import ModelBase
from ..consts import Fx0, Fy0, Gx0, Gy0 # NOQA
from ..consts import Fx, Fy, Gx, Gy # NOQA
class GovernorBase(ModelBase):
"""Turbine governor base class"""
def __init__(self, system, name):
super(GovernorBase, self).__init__(system, name)
self._group = 'Governor'
self.param_remove('Vn')
self._data.update({
'gen': None,
'pmax': 999.0,
'pmin': 0.0,
'R': 0.05,
'wref0': 1.0,
})
self._descr.update({
'gen': 'Generator index',
'pmax': 'Maximum turbine output in Syn Sn',
'pmin': 'Minimum turbine output in Syn Sn',
'R': 'Speed regulation droop',
'wref0': 'Initial reference speed',
})
self._units.update({
'pmax': 'pu',
'pmin': 'pu',
'wref0': 'pu',
'R': 'pu'
})
self._params.extend(['pmax', 'pmin', 'R', 'wref0'])
self._algebs.extend(['wref', 'pout'])
self._fnamey.extend(['\\omega_{ref}', 'P_{out}'])
self._service.extend(['pm0', 'gain'])
self._mandatory.extend(['gen', 'R'])
self._powers.extend(['pmax', 'pmin'])
self.calls.update({
'init1': True,
'gcall': True,
'fcall': True,
'jac0': True,
})
def base(self):
if not self.n:
return
self.copy_data_ext(
model='Synchronous', field='Sn', dest='Sn', idx=self.gen)
super(GovernorBase, self).base()
self.R = self.system.mva * div(self.R, self.Sn)
def init1(self, dae):
self.gain = div(1.0, self.R)
# values
self.copy_data_ext(
model='Synchronous', field='pm0', dest='pm0', idx=self.gen)
# indices
self.copy_data_ext(
model='Synchronous', field='omega', dest='omega', idx=self.gen)
self.copy_data_ext(
model='Synchronous', field='pm', dest='pm', idx=self.gen)
self.init_limit(
key='pm0', lower=self.pmin, upper=self.pmax, limit=True)
dae.y[self.wref] = self.wref0
dae.y[self.pout] = self.pm0
def gcall(self, dae):
dae.g[self.pm] += self.pm0 - mul(
self.u, dae.y[self.pout]) # update the Syn.pm equations
dae.g[self.wref] = dae.y[self.wref] - self.wref0
def jac0(self, dae):
dae.add_jac(Gy0, -self.u, self.pm, self.pout)
dae.add_jac(Gy0, 1.0, self.wref, self.wref)
class TG1(GovernorBase):
"""Turbine governor model"""
def __init__(self, system, name):
super(TG1, self).__init__(system, name)
self._name = "TG1"
self._data.update({
'T3': 0.0,
'T4': 12.0,
'T5': 50.0,
'Tc': 0.56,
'Ts': 0.1,
})
self._params.extend(['T3', 'T4', 'T5', 'Tc', 'Ts'])
self._descr.update({
'T3': 'Transient gain time constant',
'T4': 'Power fraction time constant',
'T5': 'Reheat time constant',
'Tc': 'Servo time constant',
'Ts': 'Governor time constant',
})
self._units.update({
'T3': 's',
'T4': 's',
'T5': 's',
'Tc': 's',
'Ts': 's'
})
self._mandatory.extend(['T5', 'Tc', 'Ts'])
self._service.extend(['iTs', 'iTc', 'iT5', 'k1', 'k2', 'k3', 'k4'])
self._states.extend(['xg1', 'xg2', 'xg3'])
self._fnamex.extend(['x_{g1}', 'x_{g2}', 'x_{g3}'])
self._algebs.extend(['pin'])
self._fnamey.extend(['P_{in}'])
self._init()
def init1(self, dae):
super(TG1, self).init1(dae)
self.iTs = div(1, self.Ts)
self.iTc = div(1, self.Tc)
self.iT5 = div(1, self.T5)
self.k1 = mul(self.T3, self.iTc)
self.k2 = 1 - self.k1
self.k3 = mul(self.T4, self.iT5)
self.k4 = 1 - self.k3
dae.x[self.xg1] = mul(self.u, self.pm0)
dae.x[self.xg2] = mul(self.u, self.k2, self.pm0)
dae.x[self.xg3] = mul(self.u, self.k4, self.pm0)
dae.y[self.pin] = self.pm0
def fcall(self, dae):
dae.f[self.xg1] = mul(self.u, dae.y[self.pin] - dae.x[self.xg1],
self.iTs)
dae.f[self.xg2] = mul(self.u,
mul(self.k2, dae.x[self.xg1]) - dae.x[self.xg2],
self.iTc)
dae.f[self.xg3] = mul(
self.u,
mul(self.k4, dae.x[self.xg2] + mul(self.k1, dae.x[self.xg1])) -
dae.x[self.xg3], self.iT5)
def gcall(self, dae):
dae.g[self.pin] = self.pm0 + mul(
self.gain, dae.y[self.wref] - dae.x[self.omega]) - dae.y[self.pin]
dae.hard_limit(self.pin, self.pmin, self.pmax)
dae.g[self.pout] = dae.x[self.xg3] + mul(
self.k3,
dae.x[self.xg2] + mul(self.k1, dae.x[self.xg1])) - dae.y[self.pout]
super(TG1, self).gcall(dae)
def jac0(self, dae):
super(TG1, self).jac0(dae)
dae.add_jac(Gy0, -self.u + 1e-6, self.pin, self.pin)
dae.add_jac(Gx0, -mul(self.u, self.gain), self.pin, self.omega)
dae.add_jac(Gy0, mul(self.u, self.gain), self.pin, self.wref)
dae.add_jac(Fx0, -mul(self.u, self.iTs) + 1e-6, self.xg1, self.xg1)
dae.add_jac(Fy0, mul(self.u, self.iTs), self.xg1, self.pin)
dae.add_jac(Fx0, mul(self.u, self.k2, self.iTc), self.xg2, self.xg1)
dae.add_jac(Fx0, -mul(self.u, self.iTc), self.xg2, self.xg2)
dae.add_jac(Fx0, mul(self.u, self.k4, self.iT5), self.xg3, self.xg2)
dae.add_jac(Fx0, mul(self.u, self.k4, self.k1, self.iT5), self.xg3,
self.xg1)
dae.add_jac(Fx0, -mul(self.u, self.iT5), self.xg3, self.xg3)
dae.add_jac(Gx0, self.u, self.pout, self.xg3)
dae.add_jac(Gx0, mul(self.u, self.k3), self.pout, self.xg2)
dae.add_jac(Gx0, mul(self.u, self.k3, self.k1), self.pout, self.xg1)
dae.add_jac(Gy0, -self.u + 1e-6, self.pout, self.pout)
class TG2(GovernorBase):
"""Simplified governor model"""
def __init__(self, system, name):
super(TG2, self).__init__(system, name)
self._name = 'TG2'
self._data.update({
'T1': 0.2,
'T2': 10.0,
})
self._descr.update({
'T1': 'Transient gain time constant',
'T2': 'Governor time constant',
})
self._units.update({'T1': 's', 'T2': 's'})
self._params.extend(['T1', 'T2'])
self._service.extend(['T12', 'iT2'])
self._mandatory.extend(['T2'])
self._states.extend(['xg'])
self._fnamex.extend(['x_g'])
self._init()
def init1(self, dae):
super(TG2, self).init1(dae)
self.T12 = div(self.T1, self.T2)
self.iT2 = div(1, self.T2)
def fcall(self, dae):
dae.f[self.xg] = mul(
self.iT2,
mul(self.gain, 1 - self.T12, self.wref0 - dae.x[self.omega]) -
dae.x[self.xg])
def gcall(self, dae):
pm = dae.x[self.xg] + self.pm0 + mul(self.gain, self.T12,
self.wref0 - dae.x[self.omega])
dae.g[self.pout] = pm - dae.y[self.pout]
dae.hard_limit(self.pout, self.pmin, self.pmax)
super(TG2, self).gcall(dae)
def jac0(self, dae):
super(TG2, self).jac0(dae)
dae.add_jac(Fx0, -self.iT2, self.xg, self.xg)
dae.add_jac(Fx0, -mul(self.iT2, self.gain, 1 - self.T12), self.xg,
self.omega)
dae.add_jac(Gx0, 1.0, self.pout, self.xg)
dae.add_jac(Gx0, -mul(self.gain, self.T12), self.pout, self.omega)
dae.add_jac(Gy0, -1.0, self.pout, self.pout)
| 34.337662 | 79 | 0.51412 |
79557908af643ff61a993898842166e0b7e58271 | 4,271 | py | Python | toontown/leveleditor/ScrollMenu.py | chrisd149/OpenLevelEditor | 2527f84910b198a38de140b533d07d65fe30c6c9 | [
"MIT"
] | 25 | 2020-11-23T13:55:42.000Z | 2022-03-26T06:17:18.000Z | toontown/leveleditor/ScrollMenu.py | chrisd149/OpenLevelEditor | 2527f84910b198a38de140b533d07d65fe30c6c9 | [
"MIT"
] | 15 | 2020-11-25T14:47:24.000Z | 2021-04-21T23:40:57.000Z | toontown/leveleditor/ScrollMenu.py | chrisd149/OpenLevelEditor | 2527f84910b198a38de140b533d07d65fe30c6c9 | [
"MIT"
] | 11 | 2020-11-25T13:57:02.000Z | 2022-02-23T14:25:15.000Z | ###########################################################
# Class to create and maintain a scrolled list
# that can be embedded in a LevelAttribute instance
###########################################################
from direct.gui.DirectGui import *
from toontown.toonbase import ToontownGlobals
class ScrollMenu:
def __init__(self, nodePath, textList):
self.action = None # Call back fucntion
self.textList = textList
self.parent = nodePath
self.frame = None
self.initialState = None # To maintain backward compatibility
def createScrolledList(self):
# First create a frame in which direct elements maybe placed
self.frame = DirectFrame(scale = 1.1, relief = 1,
frameSize = (-0.5, 0.2, -0.05, 0.59),
frameColor = (0.737, 0.573, 0.345, 0.000))
numItemsVisible = 9
itemHeight = 0.05
gui = loader.loadModel("resources/level_editor_gui.bam")
myScrolledList = DirectScrolledList(
decButton_pos = (0.4, 0, 0.53),
decButton_scale = 0.1,
decButton_relief = None,
decButton_image = (
gui.find("**/arrow_u_n"),
gui.find("**/arrow_u_d"),
gui.find("**/arrow_u_r"),
gui.find("**/arrow_u_i")
),
incButton_pos = (0.4, 0, -0.02),
incButton_scale = 0.1,
incButton_relief = None,
incButton_image = (
gui.find("**/arrow_d_n"),
gui.find("**/arrow_d_d"),
gui.find("**/arrow_d_r"),
gui.find("**/arrow_d_i")
),
image = gui.find("**/editor_list_frame"),
image_pos = (.4, 0, 0.26),
image_scale = (1.4, 1, 0.7),
frameSize = (-0.1, 0.9, -0.05, 0.59),
frameColor = (0, 0, 1, 0.0),
pos = (-0.5, 0, 0),
items = [],
numItemsVisible = numItemsVisible,
forceHeight = itemHeight,
itemFrame_pos = (0.4, 0, 0.45),
)
for t in self.textList:
myScrolledList.addItem(DirectButton(text = (t, t, t),
text_scale = 0.05, command = self.__selected,
extraArgs = [t], relief = None, text_style = 3,
text_font = ToontownGlobals.getToonFont(),
text0_fg = (0.152, 0.750, 0.258, 1),
text1_fg = (0.152, 0.750, 0.258, 1),
text2_fg = (0.977, 0.816, 0.133, 1), ))
myScrolledList.reparentTo(self.frame)
# An exit button
b1 = DirectButton(parent = self.frame, text = "Exit", text_font = ToontownGlobals.getSignFont(),
text0_fg = (0.152, 0.750, 0.258, 1), text1_fg = (0.152, 0.750, 0.258, 1),
text2_fg = (0.977, 0.816, 0.133, 1), text_scale = 0.05, borderWidth = (0.01, 0.01),
relief = 1, command = self.__hide)
b1.setPos(0.15, 0, -0.025)
self.frame.reparentTo(self.parent)
def __selected(self, text):
if (self.action):
self.action(text)
def __hide(self):
self.frame.reparentTo(self.parent)
#######################################################
# Functions that allow compaitibility with the
# existing architecture that is tied into pie menu's
#######################################################
def spawnPieMenuTask(self):
# Where did the user press the button?
originX = base.direct.dr.mouseX
originY = base.direct.dr.mouseY
# Pop up menu
self.frame.reparentTo(aspect2d)
self.frame.setPos(originX, 0.0, originY)
def removePieMenuTask(self):
pass
def setInitialState(self, state):
self.initialState = state
def getInitialState(self):
return self.initialState
| 37.79646 | 109 | 0.465933 |
795579369523e4c2308fe7b1ff10bc1ad73e9197 | 5,061 | py | Python | pyqtgraph/canvas/CanvasTemplate_pyqt6.py | abbasegbeyemi/pyqtgraph | 6aeafce477d1d7eebb9d2fe824d4c5573ef9ceed | [
"MIT"
] | 69 | 2020-01-06T13:31:06.000Z | 2022-03-29T11:23:14.000Z | pyqtgraph/canvas/CanvasTemplate_pyqt6.py | abbasegbeyemi/pyqtgraph | 6aeafce477d1d7eebb9d2fe824d4c5573ef9ceed | [
"MIT"
] | 67 | 2019-11-30T14:45:05.000Z | 2022-03-14T20:26:06.000Z | pyqtgraph/canvas/CanvasTemplate_pyqt6.py | abbasegbeyemi/pyqtgraph | 6aeafce477d1d7eebb9d2fe824d4c5573ef9ceed | [
"MIT"
] | 13 | 2020-01-06T13:44:40.000Z | 2022-03-29T11:23:17.000Z | # Form implementation generated from reading ui file 'pyqtgraph\canvas\CanvasTemplate.ui'
#
# Created by: PyQt6 UI code generator 6.0.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic6 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt6 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(821, 578)
self.gridLayout_2 = QtWidgets.QGridLayout(Form)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setSpacing(0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Orientations.Horizontal)
self.splitter.setObjectName("splitter")
self.view = GraphicsView(self.splitter)
self.view.setObjectName("view")
self.vsplitter = QtWidgets.QSplitter(self.splitter)
self.vsplitter.setOrientation(QtCore.Qt.Orientations.Vertical)
self.vsplitter.setObjectName("vsplitter")
self.canvasCtrlWidget = QtWidgets.QWidget(self.vsplitter)
self.canvasCtrlWidget.setObjectName("canvasCtrlWidget")
self.gridLayout = QtWidgets.QGridLayout(self.canvasCtrlWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.autoRangeBtn = QtWidgets.QPushButton(self.canvasCtrlWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Policy.Minimum, QtWidgets.QSizePolicy.Policy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.autoRangeBtn.sizePolicy().hasHeightForWidth())
self.autoRangeBtn.setSizePolicy(sizePolicy)
self.autoRangeBtn.setObjectName("autoRangeBtn")
self.gridLayout.addWidget(self.autoRangeBtn, 0, 0, 1, 2)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.redirectCheck = QtWidgets.QCheckBox(self.canvasCtrlWidget)
self.redirectCheck.setObjectName("redirectCheck")
self.horizontalLayout.addWidget(self.redirectCheck)
self.redirectCombo = CanvasCombo(self.canvasCtrlWidget)
self.redirectCombo.setObjectName("redirectCombo")
self.horizontalLayout.addWidget(self.redirectCombo)
self.gridLayout.addLayout(self.horizontalLayout, 1, 0, 1, 2)
self.itemList = TreeWidget(self.canvasCtrlWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Policy.Expanding, QtWidgets.QSizePolicy.Policy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(100)
sizePolicy.setHeightForWidth(self.itemList.sizePolicy().hasHeightForWidth())
self.itemList.setSizePolicy(sizePolicy)
self.itemList.setHeaderHidden(True)
self.itemList.setObjectName("itemList")
self.itemList.headerItem().setText(0, "1")
self.gridLayout.addWidget(self.itemList, 2, 0, 1, 2)
self.resetTransformsBtn = QtWidgets.QPushButton(self.canvasCtrlWidget)
self.resetTransformsBtn.setObjectName("resetTransformsBtn")
self.gridLayout.addWidget(self.resetTransformsBtn, 3, 0, 1, 2)
self.mirrorSelectionBtn = QtWidgets.QPushButton(self.canvasCtrlWidget)
self.mirrorSelectionBtn.setObjectName("mirrorSelectionBtn")
self.gridLayout.addWidget(self.mirrorSelectionBtn, 4, 0, 1, 1)
self.reflectSelectionBtn = QtWidgets.QPushButton(self.canvasCtrlWidget)
self.reflectSelectionBtn.setObjectName("reflectSelectionBtn")
self.gridLayout.addWidget(self.reflectSelectionBtn, 4, 1, 1, 1)
self.canvasItemCtrl = QtWidgets.QWidget(self.vsplitter)
self.canvasItemCtrl.setObjectName("canvasItemCtrl")
self.ctrlLayout = QtWidgets.QGridLayout(self.canvasItemCtrl)
self.ctrlLayout.setContentsMargins(0, 0, 0, 0)
self.ctrlLayout.setSpacing(0)
self.ctrlLayout.setObjectName("ctrlLayout")
self.gridLayout_2.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "PyQtGraph"))
self.autoRangeBtn.setText(_translate("Form", "Auto Range"))
self.redirectCheck.setToolTip(_translate("Form", "Check to display all local items in a remote canvas."))
self.redirectCheck.setText(_translate("Form", "Redirect"))
self.resetTransformsBtn.setText(_translate("Form", "Reset Transforms"))
self.mirrorSelectionBtn.setText(_translate("Form", "Mirror Selection"))
self.reflectSelectionBtn.setText(_translate("Form", "MirrorXY"))
from ..widgets.GraphicsView import GraphicsView
from ..widgets.TreeWidget import TreeWidget
from .CanvasManager import CanvasCombo
| 54.419355 | 122 | 0.731278 |
7955796b612420f1c46039caa5095378cf80c404 | 2,746 | py | Python | models/alexnet.py | sytelus/convNet.pytorch | efc0f1aa428b190e492cc7ef5eedcd58ff46b1d8 | [
"MIT"
] | 1 | 2019-04-12T12:30:16.000Z | 2019-04-12T12:30:16.000Z | models/alexnet.py | sytelus/convNet.pytorch | efc0f1aa428b190e492cc7ef5eedcd58ff46b1d8 | [
"MIT"
] | null | null | null | models/alexnet.py | sytelus/convNet.pytorch | efc0f1aa428b190e492cc7ef5eedcd58ff46b1d8 | [
"MIT"
] | null | null | null | import torch.nn as nn
import torchvision.transforms as transforms
__all__ = ['alexnet']
class AlexNetOWT_BN(nn.Module):
def __init__(self, num_classes=1000):
super(AlexNetOWT_BN, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2,
bias=False),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.Conv2d(64, 192, kernel_size=5, padding=2, bias=False),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(192),
nn.Conv2d(192, 384, kernel_size=3, padding=1, bias=False),
nn.ReLU(inplace=True),
nn.BatchNorm2d(384),
nn.Conv2d(384, 256, kernel_size=3, padding=1, bias=False),
nn.ReLU(inplace=True),
nn.BatchNorm2d(256),
nn.Conv2d(256, 256, kernel_size=3, padding=1, bias=False),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.ReLU(inplace=True),
nn.BatchNorm2d(256)
)
self.classifier = nn.Sequential(
nn.Linear(256 * 6 * 6, 4096, bias=False),
nn.BatchNorm1d(4096),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
nn.Linear(4096, 4096, bias=False),
nn.BatchNorm1d(4096),
nn.ReLU(inplace=True),
nn.Dropout(0.5),
nn.Linear(4096, num_classes)
)
self.regime = [
{'epoch': 0, 'optimizer': 'SGD', 'lr': 1e-2,
'weight_decay': 5e-4, 'momentum': 0.9},
{'epoch': 10, 'lr': 5e-3},
{'epoch': 15, 'lr': 1e-3, 'weight_decay': 0},
{'epoch': 20, 'lr': 5e-4},
{'epoch': 25, 'lr': 1e-4}
]
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
self.input_transform = {
'train': transforms.Compose([
transforms.Scale(256),
transforms.RandomCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize
]),
'eval': transforms.Compose([
transforms.Scale(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize
])
}
def forward(self, x):
x = self.features(x)
x = x.view(-1, 256 * 6 * 6)
x = self.classifier(x)
return x
def alexnet(**kwargs):
num_classes = getattr(kwargs, 'num_classes', 1000)
return AlexNetOWT_BN(num_classes)
| 34.325 | 70 | 0.509104 |
79557a05cfb6cdf8246b2dc70de2ba6ec0b88220 | 833 | py | Python | google/ads/google_ads/v3/services/shopping_performance_view_service_client_config.py | andy0937/google-ads-python | cb5da7f4a75076828d1fc3524b08cc167670435a | [
"Apache-2.0"
] | 1 | 2019-11-30T23:42:39.000Z | 2019-11-30T23:42:39.000Z | google/ads/google_ads/v3/services/shopping_performance_view_service_client_config.py | andy0937/google-ads-python | cb5da7f4a75076828d1fc3524b08cc167670435a | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v3/services/shopping_performance_view_service_client_config.py | andy0937/google-ads-python | cb5da7f4a75076828d1fc3524b08cc167670435a | [
"Apache-2.0"
] | 1 | 2020-03-13T00:14:31.000Z | 2020-03-13T00:14:31.000Z | config = {
"interfaces": {
"google.ads.googleads.v3.services.ShoppingPerformanceViewService": {
"retry_codes": {
"idempotent": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
],
"non_idempotent": []
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 5000,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 3600000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 3600000,
"total_timeout_millis": 3600000
}
},
"methods": {
"GetShoppingPerformanceView": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
}
}
}
}
}
| 26.03125 | 72 | 0.542617 |
79557bc6b9e3f977b219d84947921d7d4efcf56b | 998 | py | Python | lanczos_bin/lanczos.py | chentyl/SLQ_analysis | 19a43dd3c1f034e1c3b22a92da9e9fc1666d4f46 | [
"MIT"
] | null | null | null | lanczos_bin/lanczos.py | chentyl/SLQ_analysis | 19a43dd3c1f034e1c3b22a92da9e9fc1666d4f46 | [
"MIT"
] | null | null | null | lanczos_bin/lanczos.py | chentyl/SLQ_analysis | 19a43dd3c1f034e1c3b22a92da9e9fc1666d4f46 | [
"MIT"
] | null | null | null | import numpy as np
import scipy as sp
def exact_lanczos(A,q0,k,reorth=True):
"""
run Lanczos with reorthogonalization
Input
-----
A : entries of diagonal matrix A
q0 : starting vector
k : number of iterations
B : entries of diagonal weights for orthogonalization
"""
n = len(q0)
Q = np.zeros((n,k),dtype=A.dtype)
a = np.zeros(k,dtype=A.dtype)
b = np.zeros(k-1,dtype=A.dtype)
Q[:,0] = q0 / np.sqrt(q0.T@q0)
for i in range(1,k+1):
# expand Krylov space
qi = A@Q[:,i-1] - b[i-2]*Q[:,i-2] if i>1 else A@Q[:,i-1]
a[i-1] = qi.T@Q[:,i-1]
qi -= a[i-1]*Q[:,i-1]
if reorth:
qi -= Q@(Q.T@qi) # regular GS
#for j in range(i-1): # modified GS (a bit too slow)
# qi -= (qi.T@Q[:,j])*Q[:,j]
if i < k:
b[i-1] = np.sqrt(qi.T@qi)
Q[:,i] = qi / b[i-1]
return Q,(a,b)
| 23.761905 | 64 | 0.45992 |
79557c92c45bf784188632998fb22214071e8770 | 3,450 | py | Python | tests/chainer_tests/functions_tests/normalization_tests/test_batch_normalization.py | ytoyama/yans_chainer_hackathon | 744e7a5a67da8dec2869879f0adfae2d43eaf75c | [
"MIT"
] | null | null | null | tests/chainer_tests/functions_tests/normalization_tests/test_batch_normalization.py | ytoyama/yans_chainer_hackathon | 744e7a5a67da8dec2869879f0adfae2d43eaf75c | [
"MIT"
] | null | null | null | tests/chainer_tests/functions_tests/normalization_tests/test_batch_normalization.py | ytoyama/yans_chainer_hackathon | 744e7a5a67da8dec2869879f0adfae2d43eaf75c | [
"MIT"
] | 1 | 2018-11-18T00:36:51.000Z | 2018-11-18T00:36:51.000Z | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
# fully-connected usage
class TestBatchNormalization(unittest.TestCase):
aggr_axes = 0
def setUp(self):
self.func = functions.BatchNormalization(3)
self.func.gamma = numpy.random.uniform(
.5, 1, self.func.gamma.shape).astype(numpy.float32)
self.func.beta = numpy.random.uniform(
-1, 1, self.func.beta.shape).astype(numpy.float32)
self.func.ggamma.fill(0)
self.func.gbeta.fill(0)
self.gamma = self.func.gamma.copy().reshape(1, 3) # fixed on CPU
self.beta = self.func.beta.copy().reshape(1, 3) # fixed on CPU
self.x = numpy.random.uniform(-1, 1, (7, 3)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (7, 3)).astype(numpy.float32)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = self.func(x)
self.assertEqual(y.data.dtype, numpy.float32)
mean = self.x.mean(axis=self.aggr_axes, keepdims=True)
std = numpy.sqrt(
self.x.var(axis=self.aggr_axes, keepdims=True) + self.func.eps)
y_expect = self.gamma * (self.x - mean) / std + self.beta
gradient_check.assert_allclose(y_expect, y.data)
self.assertEqual(numpy.float32, y.data.dtype)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.func.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = self.func(x)
y.grad = y_grad
y.backward()
func = y.creator
f = lambda: func.forward((x.data,))
gx, ggamma, gbeta = gradient_check.numerical_grad(
f, (x.data, func.gamma, func.beta), (y.grad,), eps=1e-2)
gradient_check.assert_allclose(gx, x.grad, rtol=1e-3, atol=1e-4)
gradient_check.assert_allclose(ggamma, func.ggamma)
gradient_check.assert_allclose(gbeta, func.gbeta)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.func.to_gpu()
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
# convolutional usage
class TestBatchNormalization2D(TestBatchNormalization):
aggr_axes = 0, 2, 3
def setUp(self):
self.func = functions.BatchNormalization(3)
self.func.gamma = numpy.random.uniform(
.5, 1, self.func.gamma.shape).astype(numpy.float32)
self.func.beta = numpy.random.uniform(
-1, 1, self.func.beta.shape).astype(numpy.float32)
self.func.ggamma.fill(0)
self.func.gbeta.fill(0)
self.gamma = self.func.gamma.copy().reshape(1, 3, 1, 1) # fixed on CPU
self.beta = self.func.beta.copy().reshape(1, 3, 1, 1) # fixed on CPU
self.x = numpy.random.uniform(-1, 1,
(7, 3, 2, 2)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1,
(7, 3, 2, 2)).astype(numpy.float32)
testing.run_module(__name__, __file__)
| 32.54717 | 79 | 0.628696 |
79557cfae3c0edeea6c032e6b4206659f33213c1 | 216 | py | Python | finetune-dataset/convert_png.py | ikhovryak/PyTorchHackathon | 7a75edeccaee15ff142f9561c1c98fe49ca81e8c | [
"MIT"
] | 1 | 2020-07-28T15:41:36.000Z | 2020-07-28T15:41:36.000Z | finetune-dataset/convert_png.py | ikhovryak/PyTorchHackathon | 7a75edeccaee15ff142f9561c1c98fe49ca81e8c | [
"MIT"
] | null | null | null | finetune-dataset/convert_png.py | ikhovryak/PyTorchHackathon | 7a75edeccaee15ff142f9561c1c98fe49ca81e8c | [
"MIT"
] | 1 | 2020-08-25T04:03:48.000Z | 2020-08-25T04:03:48.000Z | from PIL import Image
import os, sys
idx = 0
for d in os.listdir("./"):
# print(d)
if ".jpg" in d or ".jpeg" in d:
im = Image.open(d)
im.save("image_" + str(idx) + ".png")
idx += 1
| 16.615385 | 45 | 0.5 |
79557d8b09b31a37c4510903a7d7883ec2899bc9 | 800 | py | Python | laos/common/base/views.py | denismakogon/aiohttp-restful-api-service | 0b99d6e3a9b90860f6730e1784f10e2f4454b8c0 | [
"Apache-2.0"
] | null | null | null | laos/common/base/views.py | denismakogon/aiohttp-restful-api-service | 0b99d6e3a9b90860f6730e1784f10e2f4454b8c0 | [
"Apache-2.0"
] | null | null | null | laos/common/base/views.py | denismakogon/aiohttp-restful-api-service | 0b99d6e3a9b90860f6730e1784f10e2f4454b8c0 | [
"Apache-2.0"
] | null | null | null | # Author: Denys Makogon
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ServiceViewBase(object):
view_key = None
def __init__(self, data):
self.data = data
def serialize(self):
return str({self.view_key: self.data}).encode('ascii')
| 32 | 78 | 0.69875 |
79557e49c8bdc67ec572cafab139bb36dae64674 | 16,052 | py | Python | idaes/core/base/tests/test_control_volume_base.py | OOAmusat/idaes-pse | ae7d3bb8e372bc32822dcdcb75e9fd96b78da539 | [
"RSA-MD"
] | null | null | null | idaes/core/base/tests/test_control_volume_base.py | OOAmusat/idaes-pse | ae7d3bb8e372bc32822dcdcb75e9fd96b78da539 | [
"RSA-MD"
] | null | null | null | idaes/core/base/tests/test_control_volume_base.py | OOAmusat/idaes-pse | ae7d3bb8e372bc32822dcdcb75e9fd96b78da539 | [
"RSA-MD"
] | 1 | 2022-03-17T11:08:43.000Z | 2022-03-17T11:08:43.000Z | #################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
Tests for ControlVolumeBlockData.
Author: Andrew Lee
"""
import inspect
import pytest
from pyomo.environ import ConcreteModel, Block, Set, units
from pyomo.common.config import ConfigBlock, ConfigValue
from idaes.core import (
ControlVolumeBlockData,
CONFIG_Template,
MaterialBalanceType,
EnergyBalanceType,
MomentumBalanceType,
FlowDirection,
declare_process_block_class,
FlowsheetBlockData,
UnitModelBlockData,
useDefault,
PhysicalParameterBlock,
ReactionParameterBlock,
MaterialFlowBasis,
StateBlock,
StateBlockData,
ReactionBlockBase,
ReactionBlockDataBase,
)
from idaes.core.util.exceptions import (
ConfigurationError,
DynamicError,
PropertyPackageError,
BurntToast,
)
# -----------------------------------------------------------------------------
# Test Enumerators for balance type options
@pytest.mark.unit
def test_material_balance_type():
assert len(MaterialBalanceType) == 6
# Test that error is raised when given non-member
with pytest.raises(AttributeError):
MaterialBalanceType.foo # pylint: disable=no-member
@pytest.mark.unit
def test_energy_balance_type():
assert len(EnergyBalanceType) == 6
# Test that error is raised when given non-member
with pytest.raises(AttributeError):
EnergyBalanceType.foo # pylint: disable=no-member
@pytest.mark.unit
def test_momentum_balance_type():
assert len(MomentumBalanceType) == 5
# Test that error is raised when given non-member
with pytest.raises(AttributeError):
MomentumBalanceType.foo # pylint: disable=no-member
@pytest.mark.unit
def testflow_direction():
assert len(FlowDirection) == 2
# Test that error is raised when given non-member
with pytest.raises(AttributeError):
FlowDirection.foo # pylint: disable=no-member
# -----------------------------------------------------------------------------
# Test CONFIG_Template
@pytest.mark.unit
def test_CONFIG_Template():
c = CONFIG_Template()
assert len(c) == 18
for i in c:
if i == "dynamic":
assert c[i] == useDefault
elif i == "material_balance_type":
assert c[i] == MaterialBalanceType.componentPhase
elif i == "energy_balance_type":
assert c[i] == EnergyBalanceType.enthalpyTotal
elif i == "momentum_balance_type":
assert c[i] == MomentumBalanceType.pressureTotal
elif i == "property_package":
assert c[i] == useDefault
elif i == "reaction_package":
assert c[i] is None
elif i in ["property_package_args", "reaction_package_args"]:
assert isinstance(c[i], ConfigBlock)
assert len(c[i]) == 0
else:
assert c[i] is False
@pytest.mark.unit
def test_CONFIG_Template_validation_general():
# No config argument takes a string, float/int or list
c = CONFIG_Template()
for i in c:
with pytest.raises(ValueError):
c[i] = "foo"
with pytest.raises(ValueError):
c[i] = 10.0
with pytest.raises(ValueError):
c[i] = [1, 2]
@pytest.mark.unit
def test_CONFIG_Template_true_false():
# Check arguments that accept True/False as values
c = CONFIG_Template()
for i in c:
if i not in [
"material_balance_type",
"energy_balance_type",
"momentum_balance_type",
"property_package",
"reaction_package",
"property_package_args",
"reaction_package_args",
]:
c[i] = True
c[i] = False
@pytest.mark.unit
def test_CONFIG_Template_material_balance_type():
c = CONFIG_Template()
for i in MaterialBalanceType:
c["material_balance_type"] = i
@pytest.mark.unit
def test_CONFIG_Template_energy_balance_type():
c = CONFIG_Template()
for i in EnergyBalanceType:
c["energy_balance_type"] = i
@pytest.mark.unit
def test_CONFIG_Template_momentum_balance_type():
c = CONFIG_Template()
for i in MomentumBalanceType:
c["momentum_balance_type"] = i
# -----------------------------------------------------------------------------
# Mockup classes for testing
@declare_process_block_class("Flowsheet")
class _Flowsheet(FlowsheetBlockData):
def build(self):
super(_Flowsheet, self).build()
@declare_process_block_class("Unit")
class _UnitData(UnitModelBlockData):
CONFIG = UnitModelBlockData.CONFIG()
CONFIG.declare("property_package", ConfigValue(default=None))
CONFIG.declare("property_package_args", ConfigValue(default={}))
def build(self):
super(_UnitData, self).build()
# -----------------------------------------------------------------------------
# Testing ControlVolumeBlockData
@declare_process_block_class("CVFrame")
class CVFrameData(ControlVolumeBlockData):
def build(self):
super(ControlVolumeBlockData, self).build()
@pytest.mark.unit
def test_config_block():
cv = CVFrame(concrete=True)
assert len(cv.config) == 7
assert cv.config.dynamic == useDefault
assert cv.config.has_holdup is useDefault
assert cv.config.property_package == useDefault
assert isinstance(cv.config.property_package_args, ConfigBlock)
assert len(cv.config.property_package_args) == 0
assert cv.config.reaction_package is None
assert isinstance(cv.config.reaction_package_args, ConfigBlock)
assert len(cv.config.reaction_package_args) == 0
assert cv.config.auto_construct is False
# -----------------------------------------------------------------------------
# Test _setup_dynamics
@pytest.mark.unit
def test_setup_dynamics_use_parent_value():
# Test that dynamic = None works correctly
m = ConcreteModel()
m.fs = Flowsheet(default={"dynamic": False})
m.fs.u = Unit(default={"dynamic": False})
m.fs.u.cv = CVFrame()
m.fs.u.cv._setup_dynamics()
assert m.fs.u.cv.config.dynamic is False
assert m.fs.u.cv.config.has_holdup is False
@pytest.mark.unit
def test_setup_dynamics_use_parent_value_fail_no_dynamic():
# Test that default falls back to flowsheet
fs = Flowsheet(default={"dynamic": False}, concrete=True)
# Create a Block (with no dynamic attribute)
fs.b = Block()
fs.b.cv = CVFrame()
fs.b.cv._setup_dynamics()
assert fs.b.cv.config.dynamic is False
@pytest.mark.unit
def test_setup_dynamics_dynamic_in_ss():
# Test that dynamic = None works correctly
fs = Flowsheet(default={"dynamic": False}, concrete=True)
# Create a Block (with no dynamic attribute)
fs.b = Block()
# Add a time attribute to make sure the correct failure triggers
fs.b.time_ref = Set(initialize=[0])
fs.b.cv = CVFrame(default={"dynamic": True, "has_holdup": True})
# _setup_dynamics should return DynamicError
with pytest.raises(DynamicError):
fs.b.cv._setup_dynamics()
@pytest.mark.unit
def test_setup_dynamics_dynamic_holdup_inconsistent():
# Test that dynamic = None works correctly
fs = Flowsheet(default={"dynamic": True, "time_units": units.s}, concrete=True)
# Create a Block (with no dynamic attribute)
fs.b = Block()
# Add a time attribute to make sure the correct failure triggers
fs.b.time_ref = Set(initialize=[0])
fs.b.cv = CVFrame(default={"dynamic": True, "has_holdup": False})
# _setup_dynamics should return ConfigurationError
with pytest.raises(ConfigurationError):
fs.b.cv._setup_dynamics()
# -----------------------------------------------------------------------------
# Test _get_property_package
@declare_process_block_class("PropertyParameterBlock")
class _PropertyParameterBlock(PhysicalParameterBlock):
def build(self):
super(_PropertyParameterBlock, self).build()
frm = inspect.stack()[1]
self._package_module = inspect.getmodule(frm[0])
self.phase_list = Set(initialize=["p1", "p2"])
self.component_list = Set(initialize=["c1", "c2"])
@pytest.mark.unit
def test_get_property_package_set():
m = ConcreteModel()
m.pp = PropertyParameterBlock()
m.cv = CVFrame(default={"property_package": m.pp})
m.cv._get_property_package()
@pytest.mark.unit
def test_get_property_package_default_args():
m = ConcreteModel()
m.pp = PropertyParameterBlock(default={"default_arguments": {"test": "foo"}})
m.cv = CVFrame(default={"property_package": m.pp})
m.cv._get_property_package()
assert m.cv.config.property_package_args["test"] == "foo"
@pytest.mark.unit
def test_get_reaction_package_module_combine_args():
# Test that local and default args combine correctly
m = ConcreteModel()
m.pp = PropertyParameterBlock(
default={"default_arguments": {"test1": "foo", "test2": "bar"}}
)
m.cv = CVFrame(
default={
"property_package": m.pp,
"property_package_args": {"test2": "baz", "test3": "bar"},
}
)
m.cv._get_property_package()
assert m.cv.config.property_package_args["test1"] == "foo"
assert m.cv.config.property_package_args["test2"] == "baz"
assert m.cv.config.property_package_args["test3"] == "bar"
# -----------------------------------------------------------------------------
# Test _get_default_prop_pack
@pytest.mark.unit
def test_get_default_prop_pack_works():
m = ConcreteModel()
m.fs = Flowsheet()
m.fs.pp = PropertyParameterBlock()
m.fs.config.default_property_package = m.fs.pp
m.fs.cv = CVFrame()
assert m.fs.cv._get_default_prop_pack() == m.fs.pp
# TODO : should test more failure modes
@pytest.mark.unit
def test_get_default_prop_pack_no_default():
m = ConcreteModel()
m.fs = Flowsheet()
m.fs.cv = CVFrame()
with pytest.raises(ConfigurationError):
m.fs.cv._get_default_prop_pack()
@pytest.mark.unit
def test_get_property_package_call_to_get_default_prop_pack():
m = ConcreteModel()
m.fs = Flowsheet()
m.fs.pp = PropertyParameterBlock()
m.fs.config.default_property_package = m.fs.pp
m.fs.cv = CVFrame()
m.fs.cv._get_property_package()
assert m.fs.cv.config.property_package == m.fs.pp
# -----------------------------------------------------------------------------
# Test _get_indexing_sets
@pytest.mark.unit
def test_get_indexing_sets_missing_phase_list():
m = ConcreteModel()
m.pp = PropertyParameterBlock()
m.pp.del_component(m.pp.phase_list)
m.cv = CVFrame(default={"property_package": m.pp})
m.cv._get_property_package()
with pytest.raises(PropertyPackageError):
m.cv._get_indexing_sets()
@pytest.mark.unit
def test_get_indexing_sets_missing_component_list():
m = ConcreteModel()
m.pp = PropertyParameterBlock()
m.pp.del_component(m.pp.component_list)
m.cv = CVFrame(default={"property_package": m.pp})
m.cv._get_property_package()
with pytest.raises(PropertyPackageError):
m.cv._get_indexing_sets()
# -----------------------------------------------------------------------------
# Test _get_reaction_package
@pytest.mark.unit
def test_get_reaction_package_none():
m = ConcreteModel()
m.r = CVFrame()
m.r._get_reaction_package()
assert hasattr(m.r, "reaction_module") is False
@declare_process_block_class("ReactionParameterTestBlock")
class _ReactionParameterBlock(ReactionParameterBlock):
def build(self):
super(ReactionParameterBlock, self).build()
frm = inspect.stack()[1]
self._package_module = inspect.getmodule(frm[0])
@pytest.mark.unit
def test_get_reaction_package_module():
m = ConcreteModel()
m.rp = ReactionParameterTestBlock(default={"default_arguments": {"test": "foo"}})
m.cv = CVFrame(default={"reaction_package": m.rp})
m.cv._get_reaction_package()
assert m.cv.config.reaction_package == m.rp
assert m.cv.config.reaction_package_args["test"] == "foo"
@pytest.mark.unit
def test_get_reaction_package_module_default_args():
# Test that local and default args combine correctly
m = ConcreteModel()
m.rp = ReactionParameterTestBlock(
default={"default_arguments": {"test1": "foo", "test2": "bar"}}
)
m.cv = CVFrame(
default={
"reaction_package": m.rp,
"reaction_package_args": {"test2": "baz", "test3": "bar"},
}
)
m.cv._get_reaction_package()
assert m.cv.config.reaction_package_args["test1"] == "foo"
assert m.cv.config.reaction_package_args["test2"] == "baz"
assert m.cv.config.reaction_package_args["test3"] == "bar"
# -----------------------------------------------------------------------------
# Test build and auto_construct methods
@pytest.mark.unit
def test_build():
m = ConcreteModel()
m.fs = Flowsheet()
m.fs.pp = PropertyParameterBlock()
m.fs.cv = CVFrame(default={"property_package": m.fs.pp})
super(CVFrameData, m.fs.cv).build()
@pytest.mark.unit
def test_add_geometry():
m = ConcreteModel()
m.fs = Flowsheet()
m.fs.cv = CVFrame()
with pytest.raises(NotImplementedError):
m.fs.cv.add_geometry()
@pytest.mark.unit
def test_auto_construct():
m = ConcreteModel()
m.fs = Flowsheet()
m.fs.pp = PropertyParameterBlock()
m.fs.cv = CVFrame(default={"property_package": m.fs.pp, "auto_construct": True})
with pytest.raises(NotImplementedError):
super(CVFrameData, m.fs.cv).build()
# -----------------------------------------------------------------------------
# Test NotImplementedErrors for all property and balance type methods
@pytest.mark.unit
def test_add_state_blocks():
m = ConcreteModel()
m.cv = CVFrame()
with pytest.raises(NotImplementedError):
m.cv.add_state_blocks()
@pytest.mark.unit
def test_add_reaction_blocks():
m = ConcreteModel()
m.cv = CVFrame()
with pytest.raises(NotImplementedError):
m.cv.add_reaction_blocks()
@pytest.mark.unit
def test_add_material_balances():
m = ConcreteModel()
m.cv = CVFrame()
for t in MaterialBalanceType:
if t == MaterialBalanceType.none:
assert m.cv.add_material_balances(t) is None
elif t == MaterialBalanceType.useDefault:
with pytest.raises(ConfigurationError):
m.cv.add_material_balances(t)
else:
with pytest.raises(NotImplementedError):
m.cv.add_material_balances(t)
@pytest.mark.unit
def test_add_energy_balances():
m = ConcreteModel()
m.cv = CVFrame()
for t in EnergyBalanceType:
if t == EnergyBalanceType.none:
assert m.cv.add_energy_balances(t) is None
elif t == EnergyBalanceType.useDefault:
with pytest.raises(ConfigurationError):
m.cv.add_energy_balances(t)
else:
with pytest.raises(NotImplementedError):
m.cv.add_energy_balances(t)
@pytest.mark.unit
def test_add_momentum_balances():
m = ConcreteModel()
m.cv = CVFrame()
for t in MomentumBalanceType:
if t == MomentumBalanceType.none:
assert m.cv.add_momentum_balances(t) is None
else:
with pytest.raises(NotImplementedError):
m.cv.add_momentum_balances(t)
| 29.67098 | 85 | 0.643534 |
79557eef7fb7f8c7f84ddb06392032a4edc0f3c1 | 3,381 | py | Python | centralreport/cr/utils/text.py | Ninir/CentralReport | 655b060ae0aa7df404f3799824e7635f5997e662 | [
"Apache-2.0",
"ZPL-2.0"
] | 1 | 2015-11-03T14:32:01.000Z | 2015-11-03T14:32:01.000Z | centralreport/cr/utils/text.py | Ninir/CentralReport | 655b060ae0aa7df404f3799824e7635f5997e662 | [
"Apache-2.0",
"ZPL-2.0"
] | null | null | null | centralreport/cr/utils/text.py | Ninir/CentralReport | 655b060ae0aa7df404f3799824e7635f5997e662 | [
"Apache-2.0",
"ZPL-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
CentralReport - Text module
Contains useful functions to working with strings
https://github.com/miniche/CentralReport/
"""
import math
def removeSpecialsCharacters(text):
"""
Removes specials characters in string (\n, \r and \l).
"""
text = str.replace(text, '\n', '')
text = str.replace(text, '\r', '')
text = str.replace(text, '\l', '')
return text
def numberSeparators(number, separator=' '):
"""
Adds a separator every 3 digits in the number.
"""
if not isinstance(number, str):
number = str(number)
# Remove decimal part
str_number = number.split('.')
if len(str_number[0]) <= 3:
str_number[0] = str_number[0]
else:
str_number[0] = numberSeparators(str_number[0][:-3]) + separator + str_number[0][-3:]
# Verify if the var "number" have a decimal part.
if len(str_number) > 1:
return str_number[0] + '.' + str_number[1]
return str_number[0]
def textToBool(text):
"""
Converts a text to a boolean.
"""
true_values = ['True', 'true', 't', 'T', '1']
if text in true_values:
return True
return False
def secondsToPhraseTime(seconds):
"""
Converts seconds to a phrase time (ex: 65 = 1 minute 5 seconds).
"""
ONE_DAY = 60 * 60 * 24
ONE_HOUR = 60 * 60
ONE_MINUTE = 60
ONE_YEAR = 60 * 60 * 24 * 365
remaining_seconds = seconds
result_string = ''
if remaining_seconds > ONE_YEAR:
years = remaining_seconds / ONE_YEAR
years = math.floor(years)
remaining_seconds = remaining_seconds - years * ONE_YEAR
result_string += '1 year ' if 1 == years else str(int(years)) + ' years '
if ONE_DAY < remaining_seconds:
days = remaining_seconds / ONE_DAY
days = math.floor(days)
remaining_seconds = remaining_seconds - days * ONE_DAY
result_string += '1 day ' if 1 == days else str(int(days)) + ' days '
if ONE_HOUR < remaining_seconds:
hours = remaining_seconds / ONE_HOUR
hours = math.floor(hours)
remaining_seconds = remaining_seconds - hours * ONE_HOUR
result_string += '1 hour ' if 1 == hours else str(int(hours)) + ' hours '
if ONE_MINUTE < remaining_seconds:
minutes = remaining_seconds / ONE_MINUTE
minutes = math.floor(minutes)
remaining_seconds = remaining_seconds - minutes * ONE_MINUTE
result_string += '1 minute ' if 1 == minutes else str(int(minutes)) + ' minutes '
result_string += '1 second ' if 1 == remaining_seconds else str(int(remaining_seconds)) + ' seconds '
return str(result_string)
def convertByte(byte_to_convert):
"""
Converts byte to most biggest unit.
"""
TBYTE = 1024 * 1024 * 1024 * 1024
GBYTE = 1024 * 1024 * 1024
MBYTE = 1024 * 1024
KBYTE = 1024
if byte_to_convert / TBYTE >= 1:
return str(round(byte_to_convert / TBYTE, 2)) + " TB"
elif byte_to_convert / GBYTE >= 1:
return str(round(byte_to_convert / GBYTE, 2)) + " GB"
elif byte_to_convert / MBYTE >= 1:
return str(round(byte_to_convert / MBYTE, 2)) + " MB"
elif byte_to_convert / KBYTE >= 1:
return str(round(byte_to_convert / KBYTE, 2)) + " KB"
else:
return str(round(byte_to_convert, 0)) + " B"
| 26.414063 | 105 | 0.608104 |
79557efd3887efedb4829d406cb947a39c8ff247 | 1,063 | py | Python | swagger_server/test/test_metrics_controller.py | DITAS-Project/data-analytics | e337aa707129b02750162f0cd60b5199a07ade22 | [
"Apache-2.0"
] | null | null | null | swagger_server/test/test_metrics_controller.py | DITAS-Project/data-analytics | e337aa707129b02750162f0cd60b5199a07ade22 | [
"Apache-2.0"
] | 7 | 2019-03-04T17:48:48.000Z | 2019-11-04T14:11:30.000Z | swagger_server/test/test_metrics_controller.py | DITAS-Project/data-analytics | e337aa707129b02750162f0cd60b5199a07ade22 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
from flask import json
from six import BytesIO
from swagger_server.models.metric_res import MetricRes # noqa: E501
from swagger_server.test import BaseTestCase
class TestMetricsController(BaseTestCase):
"""MetricsController integration test stubs"""
def test_getmetrics(self):
"""Test case for getmetrics
"""
query_string = [('vdcId', 'vdcId_example'),
('operationID', 'operationID_example'),
('name', 'name_example'),
('startTime', '2013-10-20T19:20:30+01:00'),
('endTime', '2013-10-20T19:20:30+01:00')]
response = self.client.open(
'/data-analytics/meter/{vdcId}/'.format(vdcId='vdcId_example'),
method='GET',
query_string=query_string)
self.assert200(response,
'Response body is : ' + response.data.decode('utf-8'))
if __name__ == '__main__':
import unittest
unittest.main()
| 29.527778 | 77 | 0.594544 |
7955816b9b21e4dff735f40d8ad1053707638d9b | 6,609 | py | Python | CoNSEPT/Dataset.py | PayamDiba/CoNSEPT | 496a549d2cbfb89d13bfd7683fb16a93554af71e | [
"MIT"
] | 1 | 2021-03-17T18:37:12.000Z | 2021-03-17T18:37:12.000Z | CoNSEPT/Dataset.py | PayamDiba/CoNSEPT | 496a549d2cbfb89d13bfd7683fb16a93554af71e | [
"MIT"
] | null | null | null | CoNSEPT/Dataset.py | PayamDiba/CoNSEPT | 496a549d2cbfb89d13bfd7683fb16a93554af71e | [
"MIT"
] | 3 | 2021-03-08T04:12:18.000Z | 2021-11-19T15:58:00.000Z | """
@author: Payam Dibaeinia
"""
import numpy as np
import pandas as pd
import tensorflow as tf
from CoNSEPT.DataBuilder import InlineData
import os
class dataset(object):
def __init__(self,
seq_file,
expression_file,
TF_file,
nBins,
nTrain,
nValid,
nTest,
out_path,
LLR = False,
training = True):
"""
seq_file: path to A FASTA formatted sequence file
expression_file: path to expression file of each enhancer in one or more conditions/bins
TF_file: path to expression file of TFs in the same conditions of expression_file
PWM: path to the PWM file
nTrain: first nTrain enhancers are used for training
nValid: next nValid enhancers are used for validation
nTest: last nTest enhancers are used for testing
nBins: number of conditions in the expression and TF file
training: if False, all data is scanned with no offset, otherwise only test data is scanned with no offset. It can
be considered as an augmentation method.
"""
self.nTrain_ = nTrain
self.nTest_ = nTest
self.nValid_ = nValid
self.nBins_ = nBins
self.tfExpr = self.read_TF_file(TF_file)
seqExpr = self.read_expr_file(expression_file)
seq_names, max_length, aug = self._read_seq(seq_file)
if seq_names != list(seqExpr.index):
raise ValueError('Input files are inconsistent, use the same order for sequences in the input sequence and expression files')
if self.tfExpr.shape[1] != nBins or seqExpr.shape[1] != nBins:
raise ValueError('Input files are inconsistent, tf or gene expression files have different number of conditions than nBins')
if aug: # Note that augmentation updates nTrain and seq_names
self.nTrain_, seq_file, seq_names = self._augment_data(seq_file, max_length, nTrain, out_path)
self.data_ = InlineData(seq_file, seq_names, seqExpr, self.tfExpr, self.nTrain_, nValid, nTest, nBins)
def _read_seq(self, seq_file):
""" Reads sequences, extracts sequences names and the max length. Also determines wether augmentation/padding is needed
seq_file: input sequence file
return: seq_names, max_length, augmentation
"""
seq_names = []
max_length = 0
aug = False
with open(seq_file,'r') as f:
rows = f.readlines()
for currR in rows:
r = currR.split('\n')[0]
if r[0] == ">":
seq_names.append(r[1:])
else:
currLen = len(r)
if aug == False and max_length != 0 and currLen != max_length:
aug = True
max_length = max(max_length, currLen)
return seq_names, max_length, aug
def _augment_data(self, seq_file, max_length, nTrain, path):
"""
equalizes all sequnece lenghts and augment training sequences
"""
seq_names = []
seqPath = path + '/seq_augmented.fa'
if os.path.exists(seqPath):
os.remove(seqPath)
with open(seq_file,'r') as fr:
rows = fr.readlines()
with open(seqPath,'w') as fw:
# Equalize and augment training sequences when needed
nAugTrain = 0
for currR in rows[:2*nTrain]:
r = currR.split('\n')[0]
if r[0] == ">":
name = r
continue
elif len(r) < max_length:
currSeq = self._aug(r, max_length)
else:
currSeq = [r]
for s in currSeq:
nAugTrain += 1
fw.write(name+'\n')
fw.write(s+'\n')
seq_names.append(name[1:])
# Equalize remaining sequences when needed
for currR in rows[2*nTrain:]:
r = currR.split('\n')[0]
if r[0] == ">":
name = r
continue
elif len(r) < max_length:
currSeq = self._equalize(r, max_length)
else:
currSeq = r
fw.write(name+'\n')
fw.write(currSeq+'\n')
seq_names.append(name[1:])
return nAugTrain, seqPath, seq_names
def _aug(self,seq, max_length, nAug = 10):
ret = []
d = max_length - len(seq)
start = 'N' * 0
end = 'N' * (d - 0)
s = start + seq + end
ret.append(s) # Make sure that one augmentation placing the short sequence at the beginning exists in data
nAug = int(min(d+1, nAug)) #do additional augmentations
p = np.random.choice(range(1, d+1), nAug - 1, replace = False).tolist()
for ns in p:
start = 'N' * ns
end = 'N' * (d - ns)
sAug = start + seq + end
ret.append(sAug)
return ret
def _equalize(self, seq, max_length):
d = max_length - len(seq)
start = 'N' * 0
end = 'N' * (d - 0)
s = start + seq + end
return s
def read_expr_file(self, expr_file):
df = pd.read_csv(expr_file, header=0, index_col=0, sep='\t')
df.index = df.index.astype('str')
return df
def read_TF_file(self, TF_file):
df = pd.read_csv(TF_file, header=0, index_col=0, sep='\t')
df.index = df.index.astype(str)
return df
def batch(self, type, nBatch = -1, shuffle = True):
"""
returns tensorflow datasets
type: 'train', 'test', 'valid'
nBatch: batch size, if -1 all data is returned
shuffle: wether to shuffle data after each epoch
"""
if type == 'train':
if nBatch == -1:
nBatch = self.nTrain_ * self.nBins_
return self.data_.get_dataset(type, shuffle, nBatch)
elif type == 'valid':
if nBatch == -1:
nBatch = self.nValid_ * self.nBins_
return self.data_.get_dataset(type, False, nBatch)
elif type == 'test':
if nBatch == -1:
nBatch = self.nTest_ * self.nBins_
return self.data_.get_dataset(type, False, nBatch)
| 33.045 | 137 | 0.529581 |
7955821a50929a27abcb2f0662032ebb62034c75 | 19,338 | py | Python | src/cogent3/util/dict_array.py | Lmaster20/cogent3 | 1d5ff1ba2b3d42736f8f04de8507b5cd585b4fe9 | [
"BSD-3-Clause"
] | null | null | null | src/cogent3/util/dict_array.py | Lmaster20/cogent3 | 1d5ff1ba2b3d42736f8f04de8507b5cd585b4fe9 | [
"BSD-3-Clause"
] | null | null | null | src/cogent3/util/dict_array.py | Lmaster20/cogent3 | 1d5ff1ba2b3d42736f8f04de8507b5cd585b4fe9 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
"""Wrapper for numpy arrays so that they can be indexed by name
>>> a = numpy.identity(3, int)
>>> b = DictArrayTemplate('abc', 'ABC').wrap(a)
>>> b[0]
===========
A B C
-----------
1 0 0
-----------
>>> b['a']
===========
A B C
-----------
1 0 0
-----------
>>> b.keys()
['a', 'b', 'c']
>>> b['a'].keys()
['A', 'B', 'C']
"""
import json
from collections import defaultdict
from itertools import combinations, product
import numpy
from cogent3.format import table
from cogent3.util.misc import get_object_provenance, open_
__author__ = "Peter Maxwell"
__copyright__ = "Copyright 2007-2020, The Cogent Project"
__credits__ = ["Peter Maxwell", "Gavin Huttley", "Ben Kaehler"]
__license__ = "BSD-3"
__version__ = "2020.7.2a"
__maintainer__ = "Peter Maxwell"
__email__ = "pm67nz@gmail.com"
__status__ = "Production"
def convert_1D_dict(data, row_order=None):
"""returns a 1D list and header as dict keys
Parameters
----------
data : dict
a 1D dict
row_order
series with column headings. If not provided, the sorted top level dict
keys are used.
"""
if row_order is None:
row_order = list(sorted(data))
rows = [data[c] for c in row_order]
return rows, row_order
def convert2Ddistance(dists, header=None, row_order=None):
"""returns a 2 dimensional list, header and row order
Parameters
----------
dists : dict
a 1Ddict with {(a, b): dist, ..}
header
series with column headings. If not provided, the sorted top level dict
keys are used.
row_order
a specified order to generate the rows
Returns
-------
2D list, header and row_order. If a dist not present, it's set to 0, or
the symmetric value e.g. (a, b) -> (b, a).
"""
if header is None:
names = set()
for pair in dists:
names.update(set(pair))
header = list(sorted(names))
rows = []
for i in range(len(header)):
n1 = header[i]
row = []
for j in range(len(header)):
n2 = header[j]
dist = dists.get((n1, n2), dists.get((n2, n1), 0))
row.append(dist)
rows.append(row)
row_order = header[:]
return rows, row_order, header
def convert2DDict(twoDdict, header=None, row_order=None, make_symmetric=False):
"""returns a 2 dimensional list, header and row order
Parameters
----------
twoDdict : dict
a 2 dimensional dict with top level keys corresponding to column
headings, lower level keys correspond to row headings
header
series with column headings. If not provided, the sorted top level dict
keys are used.
row_order
a specified order to generate the rows
make_symmetric : bool
if True, twoDdict[a][b] == twoDdict[b][a]
"""
if not row_order:
row_order = list(twoDdict.keys())
row_order.sort()
if not header: # we assume columns consistent across dict
header = list(twoDdict[row_order[0]].keys())
header.sort()
if make_symmetric:
combined = list(sorted(set(header) | set(row_order)))
header = row_order = combined
data = defaultdict(dict)
for k1, k2 in combinations(combined, 2):
if k1 in twoDdict:
val = twoDdict[k1].get(k2, 0)
elif k2 in twoDdict:
val = twoDdict[k2].get(k1, 0)
else:
val = 0
data[k1][k2] = data[k2][k1] = val
for k in data:
data[k][k] = 0
twoDdict = data
# make list of lists
rows = []
for row in row_order:
elements = []
for column in header:
elements.append(twoDdict[row][column])
rows.append(elements)
return rows, row_order, header
def convert_dict(data, header=None, row_order=None):
"""returns a list, DictArrayTemplate args
Parameters
----------
data : dict
a 1D or 2D dict
header
series with column headings. If not provided, the sorted top level dict
keys are used.
row_order
a specified order to generate the rows
"""
first_key = list(data)[0]
if type(first_key) == tuple and len(first_key) == 2:
rows, row_order, header = convert2Ddistance(data, header, row_order)
elif hasattr(data[first_key], "keys"):
rows, row_order, header = convert2DDict(data, header, row_order)
else:
rows, row_order = convert_1D_dict(data, header)
return rows, row_order, header
def convert_series(data, row_order=None, header=None):
"""returns a list, header and row order
Parameters
----------
data : dict
a 1D or 2D dict
header
series with column headings. If not provided, the sorted top level dict
keys are used.
row_order
a specified order to generate the rows
"""
first_element = data[0]
nrows = len(data)
try:
ncols = len(first_element)
except TypeError:
ncols = 1
if header is not None:
dim_h = header if isinstance(header, int) else len(header)
else:
dim_h = None
if row_order is not None:
dim_r = row_order if isinstance(row_order, int) else len(row_order)
else:
dim_r = None
if nrows == 1 and ncols > 1:
if dim_h is not None and dim_h != ncols:
raise ValueError(
f"mismatch between number columns={dim_h} "
f"and number of elements in data={ncols}"
)
elif dim_r is not None and dim_r != 1:
raise ValueError(
f"mismatch between number rows={dim_r} "
f"and number of rows in data={ncols}"
)
if not header:
header = None if ncols == 1 else ncols
row_order = row_order if row_order else nrows
return data, row_order, header
def convert_for_dictarray(data, header=None, row_order=None):
"""returns a list, header and row order from data
Parameters
----------
data : iterable
data series, dictarray, dict, etc..
header
series with column headings. If not provided, the sorted top level dict
keys are used.
row_order
a specified order to generate the rows
"""
if isinstance(data, DictArray):
header = data.template.names[0]
row_order = data.template.names[1]
data = data.array.copy()
elif hasattr(data, "keys"): # dictlike, it could be defaultdict
data, row_order, header = convert_dict(data, header, row_order)
else:
data, row_order, header = convert_series(data, header, row_order)
return data, row_order, header
class NumericKey(int):
"""a distinct numerical type for use as a DictArray key"""
def __new__(cls, val):
result = int.__new__(cls, val)
return result
class DictArrayTemplate(object):
def __init__(self, *dimensions):
self.names = []
self.ordinals = []
for names in dimensions:
if names is None:
continue
elif isinstance(names, int):
names = list(range(names))
else:
names = [NumericKey(v) if type(v) == int else v for v in names]
self.names.append(names)
self.ordinals.append(dict((c, i) for (i, c) in enumerate(names)))
self._shape = tuple(len(keys) for keys in self.names)
def __eq__(self, other):
return self is other or (
isinstance(other, DictArrayTemplate) and self.names == other.names
)
def _dict2list(self, value, depth=0):
# Unpack (possibly nested) dictionary into correct order of elements
if depth < len(self._shape):
return [self._dict2list(value[key], depth + 1) for key in self.names[depth]]
else:
return value
def unwrap(self, value):
"""Convert to a simple numpy array"""
if isinstance(value, DictArray):
if value.template == self:
value = value.array
else:
raise ValueError # used to return None, which can't be right
elif isinstance(value, dict):
value = self._dict2list(value)
value = numpy.asarray(value)
assert value.shape == self._shape, (value.shape, self._shape)
return value
def wrap(self, array, dtype=None):
if hasattr(array, "keys"):
if len(self._shape) == 2:
r, h = self.names[:2]
else:
r, h = self.names[0], None
array, _, _ = convert_for_dictarray(array, h, r)
array = numpy.asarray(array, dtype=dtype)
for (dim, categories) in enumerate(self.names):
assert len(categories) == numpy.shape(array)[dim], "cats=%s; dim=%s" % (
categories,
dim,
)
return DictArray(array, self)
def interpret_index(self, names):
if isinstance(names, numpy.ndarray) and "int" in names.dtype.name:
# the numpy item() method casts to the nearest Python type
names = tuple(v.item() for v in names)
if not isinstance(names, tuple):
names = (names,)
index = []
remaining = []
for (ordinals, allnames, name) in zip(self.ordinals, self.names, names):
if type(name) not in (int, slice, list, numpy.ndarray):
name = ordinals[name]
elif isinstance(name, slice):
start = name.start
stop = name.stop
try:
start = allnames.index(start)
except ValueError:
# either None, or it's an int index
pass
try:
stop = allnames.index(stop)
except ValueError:
# as above
pass
name = slice(start, stop, name.step)
remaining.append(allnames.__getitem__(name))
elif type(name) in (list, numpy.ndarray):
name = [n if type(n) == int else ordinals[n] for n in name]
remaining.append([allnames[i] for i in name])
index.append(name)
remaining.extend(self.names[len(index) :])
klass = type(self)(*remaining) if remaining else None
return (tuple(index), klass)
class DictArray(object):
"""Wraps a numpy array so that it can be indexed with strings. Behaves
like nested dictionaries (only ordered).
Notes
-----
Used for things like substitution matrices and bin probabilities.
Indexing can be done via conventional integer based operations, using
keys, lists of int/keys.
Behaviour differs from numpy array indexing when you provide lists of
indices. Such indexing is applied sequentially, e.g. darr[[0, 2], [1, 2]]
will return the intersection of rows [0, 2] with columns [1, 2]. In numpy,
the result would instead be the elements at [0, 1], [2, 2].
"""
def __init__(self, *args, **kwargs):
"""allow alternate ways of creating for time being"""
if len(args) == 1:
vals, row_keys, col_keys = convert_for_dictarray(args[0])
dtype = kwargs.get("dtype", None)
self.array = numpy.asarray(vals, dtype=dtype)
self.template = DictArrayTemplate(row_keys, col_keys)
elif len(args) == 2:
if not isinstance(args[1], DictArrayTemplate):
raise NotImplementedError
self.array = args[0]
self.template = args[1]
else:
if "dtype" in kwargs or "typecode" in kwargs:
dtype = kwargs["dtype"]
kwargs.pop("dtype", None)
kwargs.pop("typecode", None)
else:
dtype = None
create_new = DictArrayTemplate(*args[1:]).wrap(args[0], dtype=dtype)
self.__dict__ = create_new.__dict__
self.shape = self.array.shape
def to_array(self):
return self.array
def __array__(self, dtype=None):
array = self.array
if dtype is not None:
array = array.astype(dtype)
return array
def to_dict(self, flatten=False):
"""returns data as a dict
Parameters
----------
flatten : bool
returns a 1D dictionary
"""
names = self.template.names
shape = self.shape
result = {}
if len(names) == 1:
result = {names[0][i]: self.array[i] for i in range(len(names[0]))}
elif flatten:
for indices in product(*[range(n) for n in shape]):
value = self.array[indices]
coord = tuple(n[i] for n, i in zip(names, indices))
result[coord] = value
else:
for indices in product(*[range(n) for n in shape]):
value = self.array[indices]
coord = tuple(n[i] for n, i in zip(names, indices))
current = result
nested = coord[0]
for nested in coord[:-1]:
current[nested] = current.get(nested, {})
current[nested][coord[-1]] = value
return result
def to_rich_dict(self):
data = self.array.tolist()
result = {
"type": get_object_provenance(self.template),
"array": data,
"names": self.template.names,
"version": __version__,
}
return result
def to_json(self):
return json.dumps(self.to_rich_dict())
def __getitem__(self, names):
(index, remaining) = self.template.interpret_index(names)
if list in {type(v) for v in index}:
result = self.array
for dim, indices in enumerate(index):
if isinstance(indices, slice):
indices = (
(indices,)
if dim == 0
else (slice(None, None),) * dim + (indices,)
)
result = result[tuple(indices)]
continue
if isinstance(indices, int):
indices = [indices]
result = result.take(indices, axis=dim)
else:
result = self.array[index]
if remaining is not None:
result = self.__class__(result.reshape(remaining._shape), remaining)
return result
def __iter__(self):
(index, remaining) = self.template.interpret_index(0)
for elt in self.array:
if remaining is None:
yield elt
else:
yield remaining.wrap(elt)
def __len__(self):
return len(self.template.names[0])
def keys(self):
return self.template.names[0][:]
def items(self):
return [(n, self[n]) for n in list(self.keys())]
def __repr__(self):
if self.array.ndim > 2:
return "%s dimensional %s" % (self.array.ndim, type(self).__name__)
t = self.to_table()
t.set_repr_policy(show_shape=False)
return str(t)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
if self is other:
return True
elif isinstance(other, DictArray):
return self.template == other.template and numpy.all(
self.array == other.array
)
elif isinstance(other, type(self.array)):
return self.array == other
elif isinstance(other, dict):
return self.to_dict() == other
else:
return False
def to_normalized(self, by_row=False, by_column=False):
"""returns a DictArray as frequencies
Parameters
----------
by_row
rows sum to 1
by_col
columns sum to 1
"""
assert not (by_row and by_column)
# todo need to check there are two dimension!
if by_row:
axis = 1
elif by_column:
axis = 0
else:
axis = None
result = self.array / self.array.sum(axis=axis, keepdims=True)
return self.template.wrap(result)
def row_sum(self):
"""returns DictArray summed across rows"""
axis = 1 if len(self.shape) == 2 else 0
result = self.array.sum(axis=1)
template = DictArrayTemplate(self.template.names[0])
return template.wrap(result)
def col_sum(self):
"""returns DictArray summed across columns"""
result = self.array.sum(axis=0)
template = DictArrayTemplate(self.template.names[1])
return template.wrap(result)
def _repr_html_(self):
if self.array.ndim > 2:
return "%s dimensional %s" % (self.array.ndim, type(self).__name__)
t = self.to_table()
t.set_repr_policy(show_shape=False)
return t._repr_html_()
def to_string(self, format="tsv", sep=None):
"""Return the data as a formatted string.
Parameters
----------
format
possible formats are 'csv', or 'tsv' (default).
sep
A string separator for delineating columns, e.g. ',' or
'\t'. Overrides format.
"""
if format.lower() not in ("tsv", "csv"):
msg = f"'{format}' not supported"
raise ValueError(msg)
sep = sep or {"tsv": "\t", "csv": ","}[format.lower()]
data = self.to_dict(flatten=True)
rows = [[f"dim-{i + 1}" for i in range(self.array.ndim)] + ["value"]] + [
list(map(lambda x: str(x), row))
for row in [list(k) + [v] for k, v in data.items()]
]
return "\n".join([sep.join(row) for row in rows])
def to_table(self):
"""return Table instance
Notes
-----
Raises ValueError if number of dimensions > 2
"""
ndim = self.array.ndim
if ndim > 2:
raise ValueError(f"cannot make 2D table from {ndim}D array")
from .table import Table
header = self.template.names[0] if ndim == 1 else self.template.names[1]
index = "" if ndim == 2 else None
if ndim == 1:
data = {c: [v] for c, v in zip(header, self.array)}
else:
data = {c: self.array[:, i].tolist() for i, c in enumerate(header)}
data[""] = self.template.names[0]
return Table(header=header, data=data, index=index)
def write(self, path, format="tsv", sep="\t"):
"""
writes a flattened version to path
Parameters
----------
path : str
format
possible formats are 'rest'/'rst', 'markdown'/'md',
'latex', 'html', 'phylip', 'bedgraph', 'csv', 'tsv', or 'simple'
(default).
sep : str
used to split fields, will be inferred from path suffix if not
provided
"""
data = self.to_string(format=format, sep=sep)
with open_(path, "w") as outfile:
outfile.write(data)
| 31.291262 | 88 | 0.555228 |
7955828e79eaac5f49d938bc89d352d19b6d2e89 | 122,190 | py | Python | pysnmp-with-texts/CTRON-REMOTE-ACCESS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/CTRON-REMOTE-ACCESS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/CTRON-REMOTE-ACCESS-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CTRON-REMOTE-ACCESS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CTRON-REMOTE-ACCESS-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:30:39 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter32, Integer32, Unsigned32, ModuleIdentity, iso, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, TimeTicks, Gauge32, MibIdentifier, enterprises, Bits, Counter64, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Integer32", "Unsigned32", "ModuleIdentity", "iso", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "TimeTicks", "Gauge32", "MibIdentifier", "enterprises", "Bits", "Counter64", "NotificationType")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
class Index(Integer32):
pass
class DLCI(Integer32):
pass
cabletron = MibIdentifier((1, 3, 6, 1, 4, 1, 52))
mibs = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4))
ctron = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1))
ctDataLink = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2))
ctronWan = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7))
ctRemoteAccess = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2))
ctRemoteConnection = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1))
ctDs1Ext = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2))
ctRs232Ext = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3))
ctFrDcp = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4))
ctDDSExt = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5))
ctPPPExt = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6))
ctWanalyzer = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7))
ctDs1Alarms = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8))
ctIPPQFilters = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9))
ctDs3Ext = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 10))
ctRemNumConnections = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemNumConnections.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemNumConnections.setDescription('This object describes the number of physical remote access connections on the platform.')
ctRemPhysPortTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2), )
if mibBuilder.loadTexts: ctRemPhysPortTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortTable.setDescription('A list of the descriptions of the physical remote access ports of this platform.')
ctRemPhysPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctRemConnectionIndex"))
if mibBuilder.loadTexts: ctRemPhysPortEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortEntry.setDescription('A physical port entry. It contains objects relating to a given physical remote access port')
ctRemConnectionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemConnectionIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemConnectionIndex.setDescription('A value indicating the remote connection this entry is located on.')
ctRemPhysPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("none", 1), ("t1", 2), ("e1", 3), ("synchronous", 4), ("dds", 5), ("di", 6), ("hdsl", 7), ("isdnBRI", 8), ("ds30", 9), ("t1dds", 10))).clone('none')).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemPhysPortType.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortType.setDescription('The type of physical port that this entry describes. None indicates that the physical port has no connector.')
ctRemPhysPortSpecificMib = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 3), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemPhysPortSpecificMib.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortSpecificMib.setDescription('This object identifies an instance of the index object in the first group of objects in the MIB specific to the physical port.')
ctRemPhysPortProtMgrType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("none", 1), ("pppNailedUp", 2), ("isdnPriPpp", 3), ("isdnBriPpp", 4), ("frameRelayPvcRtr", 5), ("frameRelayPvcSw", 6), ("hdlc", 7))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemPhysPortProtMgrType.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortProtMgrType.setDescription('This object identifies an instance of the type of Protocol Manager residing on this physical port. Before a physical port may be used, the type of manager must be designated. For example, if the physical port was going to be used as an ISDN PRI interface, with dial up PPP links, then the manager type would be set to (4), ISDN-PRI-PPP.')
ctRemPhysPortProtMgrIfaceNum = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemPhysPortProtMgrIfaceNum.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortProtMgrIfaceNum.setDescription('This object identifies the interface number that will be assigned to the Protocol Managers IFO')
ctRemPhysPortWanIfaceNum = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemPhysPortWanIfaceNum.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortWanIfaceNum.setDescription("This object identifies the interface number that will be assigned to the Wanio's IFO")
ctRemPhysPortProtMgrMaxIfos = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemPhysPortProtMgrMaxIfos.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortProtMgrMaxIfos.setDescription("This object identifies the maximum number of IFO's that can be created on this physical interface.")
ctRemPhysPortProtMgrIfaceList = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemPhysPortProtMgrIfaceList.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysPortProtMgrIfaceList.setDescription("This object identifies a list of interfaces that will be assigned to this Protocol Managers data IFO's")
ctRemPhysAlarmTimeOut = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemPhysAlarmTimeOut.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysAlarmTimeOut.setDescription('Whenever the physical port is configured for nailed-up PPP this object specifies the time the physical port must remain in a failed state as a condition to begin the backup procedure. The backup procedure is begun for any active interface(s) that were connected over the failed physical port and which have been configured for backup.')
ctRemPhysWpimType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 2, 1, 10), Integer32().clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemPhysWpimType.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPhysWpimType.setDescription('This Mib object determines the Mode of operation that the T1/DDS Wpim module will function as. Setting a value of 1 will set the mode to DDS, setting a value of 2 will set the mode to T1.')
ctRemInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3), )
if mibBuilder.loadTexts: ctRemInterfaceTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemInterfaceTable.setDescription('The interface table for Cabletron remote interfaces. It is a list of items that are pertinant to the interfaces that are part of remote physical ports. The number of entries is dependent upon the total number of remote interfaces configured.')
ctRemInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctRemIntEntIfIndex"))
if mibBuilder.loadTexts: ctRemInterfaceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemInterfaceEntry.setDescription('A remote interface entry. It contains objects relating to an interface that is defined for remote Cabletron products.')
ctRemIntEntIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemIntEntIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctRemIntEntIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctRemIntEntCompression = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntCompression.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntCompression.setDescription('This flag will indicate whether compression should take place on this interface.')
ctRemIntEntCompRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemIntEntCompRatio.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntCompRatio.setDescription('This string indicates the current compression ratio on this interface.')
ctRemIntEntCompStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemIntEntCompStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntCompStatus.setDescription('This flag will indicate whether compression has been nogotiated on this interface.')
ctRemIntEntMTU = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntMTU.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntMTU.setDescription('The MTU to be negotiated for this interface. This will not be updated with the actual MTU, which can be found in the ifTable.')
ctRemIntEntCongestion = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntCongestion.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntCongestion.setDescription('This object identifies the Leaky Bucket algorithm flag for a Frame Relay circuit on this intertface. The option is supported when this flag is enabled, and not supported when this flag is disabled.')
ctRemIntEntMaxProfiles = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntMaxProfiles.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntMaxProfiles.setDescription('This object specifies the maximum number of neighbor profiles. A neighbor profile describes a possible connection. The maximum should be set to account for support of PPP multilink. That is to say, if PPP multilink is being used then the number of profiles must at least accomodate the maximum possible number of additional more BW connections. As such, each neighbor profile would then describe a possible connection to the same remote end point.')
ctRemIntEntTxIdleTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntTxIdleTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntTxIdleTimeout.setDescription('This object identifies the idle timeout value in which a packet needs to be transmitted before the interface is automatically disconnected. Allowed values are in increments of five seconds including zero.')
ctRemIntEntRxIdleTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntRxIdleTimeout.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntRxIdleTimeout.setDescription('This object identifies the idle timeout in which a packet needs to be received before the interface is automatically disconnected. Allowed values are in increments of five seconds including zero.')
ctRemIntEntCircuitName = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 10), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntCircuitName.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntCircuitName.setDescription('This object associates a circuit name with a specific interface.')
ctRemIntEntEncryption = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntEncryption.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntEncryption.setDescription('This flag will indicate whether encryption should take place on this interface.')
ctRemIntEntEncryptStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 3, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemIntEntEncryptStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntEncryptStatus.setDescription('This flag will indicate whether Encryption has been nogotiated on this interface.')
ctRemPrimaryInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 4), )
if mibBuilder.loadTexts: ctRemPrimaryInterfaceTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPrimaryInterfaceTable.setDescription('The interface table for Cabletron remote interfaces. It is a list of items that are pertinant to the interfaces that are part of remote physical ports. The number of entries is dependent upon the total number of remote interfaces configured.')
ctRemPrimaryInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 4, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctRemPriIntEntIfIndex"))
if mibBuilder.loadTexts: ctRemPrimaryInterfaceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPrimaryInterfaceEntry.setDescription('A remote interface entry. It contains objects relating to an interface that is defined for remote Cabletron products.')
ctRemPriIntEntIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemPriIntEntIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctRemPriIntEntIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPriIntEntIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctRemPriIntEntConnectRetryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 4, 1, 2), Integer32().clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemPriIntEntConnectRetryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemPriIntEntConnectRetryInterval.setDescription('This object specifies the interval in seconds between retries for restoring the primary interface.')
ctRemBackupInterfaceTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5), )
if mibBuilder.loadTexts: ctRemBackupInterfaceTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemBackupInterfaceTable.setDescription('The interface table for Cabletron remote interfaces. It is a list of items that are pertinant to the interfaces that are part of remote physical ports. The number of entries is dependent upon the total number of remote interfaces configured.')
ctRemBackupInterfaceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctRemIntEntBackupIfIndex"))
if mibBuilder.loadTexts: ctRemBackupInterfaceEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemBackupInterfaceEntry.setDescription('A remote interface entry. It contains objects relating to an interface that is defined for remote Cabletron products.')
ctRemIntEntBackupIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemIntEntBackupIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctRemIntEntBackupIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctRemIntEntBackupIfNum = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntBackupIfNum.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupIfNum.setDescription('This object identifies which ifIndex will backup this interface in the event of a communications failure.')
ctRemIntEntBackupIfInUseCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemIntEntBackupIfInUseCnt.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupIfInUseCnt.setDescription('This object identifies how many other interfaces this interface will backup.')
ctRemIntEntBackupIfTimeToConnect = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntBackupIfTimeToConnect.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupIfTimeToConnect.setDescription('This object identifies how long to wait in seconds before attempting to switchover to the backup interface. Allowed values are in increments of five seconds including zero.')
ctRemIntEntBackupIfTimeToDisconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntBackupIfTimeToDisconnect.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupIfTimeToDisconnect.setDescription('This object identifies how long to wait in seconds before attempting to switchover to the primary interface. Allowed values are in increments of five seconds including zero.')
ctRemIntEntBackupIfOverride = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntBackupIfOverride.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupIfOverride.setDescription('This object identifies if the interface will switch back to the primary link after it has become active again. A force condition will not allow the interface to switch back.')
ctRemIntEntBackupConnectRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntBackupConnectRetries.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupConnectRetries.setDescription('This object specifies the number of tries to bring-up the backup interface before giving up.')
ctRemIntEntBackupConnectRetryInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 5, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemIntEntBackupConnectRetryInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemIntEntBackupConnectRetryInterval.setDescription('This object specifies the interval in seconds between retries for bringing up backup interface before giving up.')
ctRemExtPhysPortTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6), )
if mibBuilder.loadTexts: ctRemExtPhysPortTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortTable.setDescription('A list of the descriptions of the physical remote access ports of this platform.')
ctRemExtPhysPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctRemExtConnectionIndex"))
if mibBuilder.loadTexts: ctRemExtPhysPortEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortEntry.setDescription('A physical port entry. It contains objects relating to a given physical remote access port')
ctRemExtConnectionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemExtConnectionIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtConnectionIndex.setDescription('A value indicating the remote connection this entry is located on.')
ctRemExtProtMgrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemExtProtMgrIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtProtMgrIndex.setDescription('A value indicating the protMgr number for this entry.')
ctRemExtPhysPortProtMgrType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("none", 1), ("pppNailedUp", 2), ("isdnPriPpp", 3), ("isdnBriPpp", 4), ("frameRelayPvcRtr", 5), ("frameRelayPvcSw", 6), ("hdlc", 7))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrType.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrType.setDescription('This object identifies an instance of the type of Protocol Manager residing on this physical port. Before a physical port may be used, the type of manager must be designated. For example, if the physical port was going to be used as an ISDN PRI interface, with dial up PPP links, then the manager type would be set to (4), ISDN-PRI-PPP.')
ctRemExtPhysPortProtMgrEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrEnable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrEnable.setDescription('This object identifies an instance of the the enable flag of Protocol Manager residing on this physical port.')
ctRemExtPhysPortProtMgrIfaceNum = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrIfaceNum.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrIfaceNum.setDescription('This object identifies the interface number that will be assigned to the Protocol Managers IFO')
ctRemExtPhysPortProtMgrMaxIfos = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrMaxIfos.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrMaxIfos.setDescription("This object identifies the maximum number of IFO's that can be created on this physical interface.")
ctRemExtPhysPortProtMgrIfaceList = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrIfaceList.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrIfaceList.setDescription("This object identifies a list of interfaces that will be assigned to this Protocol Managers data IFO's")
ctRemExtPhysPortProtMgrChannelList = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 1, 6, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrChannelList.setStatus('mandatory')
if mibBuilder.loadTexts: ctRemExtPhysPortProtMgrChannelList.setDescription("This object identifies a list of channles/timeslots that will be assigned to this Protocol Managers data IFO's")
ctDs1ExtensionsTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1), )
if mibBuilder.loadTexts: ctDs1ExtensionsTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsTable.setDescription('This table is an extension to the standard ds1 configuration table. It is a list of items that are pertinant to ds1 ports on a platform. There is one entry per ds1 physical port on the platform.')
ctDs1ExtensionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctDs1ExtensionsEntryIndex"))
if mibBuilder.loadTexts: ctDs1ExtensionsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsEntry.setDescription('A ds1 extensions entry containing objects relating to the particular ds1 physical port.')
ctDs1ExtensionsEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDs1ExtensionsEntryIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsEntryIndex.setDescription('A unique value for each ds1 physical port on the platform.')
ctDs1ExtensionsNumInterfaces = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDs1ExtensionsNumInterfaces.setReference('rfc-1213')
if mibBuilder.loadTexts: ctDs1ExtensionsNumInterfaces.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsNumInterfaces.setDescription('The number of interfaces on this physical port. This number inidicates the number of entries this physical port uses in the ifTable.')
ctDs1ExtensionsToggleFracTable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("update-table", 1), ("display-new", 2), ("display-old", 3), ("restore-old", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDs1ExtensionsToggleFracTable.setReference('rfc-1406')
if mibBuilder.loadTexts: ctDs1ExtensionsToggleFracTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsToggleFracTable.setDescription('Setting this object to 1 will cause the Ds1 Fractional Table to be updated with the new values, as entered. Setting this object to 2, will cause the Ds1 Fractional Table to view as the table being entered. Setting this object to 3, will cause the Ds1 Fractional Table to be the table that is currently in use, regardless of any changes being entered. Setting this object to 4, will cause any changes that have been made to the Ds1 Fractional Table since the last update-table to be deleted. For physical ports that are of type synch, or for Ds1 ports that do not support the Ds1 Fractional Table, this object will have no affect, and will always return a 1.')
ctDs1ExtensionsLineBuildOut = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("unknown", 1), ("zero", 2), ("minus-7point5", 3), ("minus-15", 4), ("a133to266feet", 5), ("a266to399feet", 6), ("a399to533feet", 7), ("a533to655feet", 8))).clone('zero')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDs1ExtensionsLineBuildOut.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsLineBuildOut.setDescription('The line build out setting for this ds1 physical port. Unknown indicates that the setting is neither readable or writable.')
ctDs1ExtensionsCFADuration = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDs1ExtensionsCFADuration.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsCFADuration.setDescription('The length of time (in seconds), that the ds1 port will remain in the Carrier Failure Alarm state after the alarm condition has cleared. For those Ds1 ports not supporting Carrier Failure Alarm duration, this object always returns 1, and setting the object has no effect.')
ctDs1ExtensionsDIEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDs1ExtensionsDIEnable.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsDIEnable.setDescription('Enables or disables Drop and Insert functionality on a D/I WPIM. If enabled, all zero assigned timeslots will designate D/I interface channels, if disabled all zero channels will designate dummy channels as on a T1 or E1.')
ctDs1ExtensionsTotalValidIntervals = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDs1ExtensionsTotalValidIntervals.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1ExtensionsTotalValidIntervals.setDescription('The number of previous intervals for which valid data was collected for the DS1 mib, which is currently RFC1406. This counter is similar to RFC1406 dsx1ValidIntervals except that the count represents the total number of complete 15 minute intervals since the ds1 interface has been online, and does not stop at the 24 hour period.')
wanDs1ExtensionsBertTestMode = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("internal", 2), ("manual", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wanDs1ExtensionsBertTestMode.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertTestMode.setDescription('This object is used to request that a type of process be in control of the testing function. This is used to differentiate between automated test procedures and those manually controlled by a user. When read, this object reflects which type of process is actually in control of the testing function. The implementation is free to prioritize or deny requests in a proprietary manner.')
wanDs1ExtensionsBertRun = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wanDs1ExtensionsBertRun.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertRun.setDescription('Controls the sending of test data over this physical port. The data is a pattern described by wanDs1ExtensionsBertTestPattern.')
wanDs1ExtensionsBertCurrentResults = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanDs1ExtensionsBertCurrentResults.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertCurrentResults.setDescription('Indicates the results of the testing for the last completed sampling period. A non-negative number is the error rate in bits per million bits.')
wanDs1ExtensionsBertCumulativeResults = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanDs1ExtensionsBertCumulativeResults.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertCumulativeResults.setDescription('Indicates the total number of errors since the testing was enabled. It is not a rate.')
wanDs1ExtensionsBertPeakResults = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanDs1ExtensionsBertPeakResults.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertPeakResults.setDescription('Indicates the highest value of wanDs1ExtensionsBertCurrentResults since the testing was enabled. A non-negative number is the error rate in bits per million bits.')
wanDs1ExtensionsBertAverageResult = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanDs1ExtensionsBertAverageResult.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertAverageResult.setDescription('Indicates the average value of wanDs1ExtensionsBertCurrentResults since the testing was enabled. A non-negative number is the error rate in bits per million bits.')
wanDs1ExtensionsBertTestPattern = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("patternOther", 1), ("pattern1s", 2), ("pattern63", 3), ("pattern511", 4), ("pattern2047", 5), ("pattern3in24", 6), ("patternQRSS", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wanDs1ExtensionsBertTestPattern.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertTestPattern.setDescription('This object is used to specify the pattern of the test data.')
wanDs1ExtensionsBertSamplePeriod = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 15), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wanDs1ExtensionsBertSamplePeriod.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertSamplePeriod.setDescription('This object is the duration of time in seconds at which the current test results will be periodically written to wanDs1ExtensionsBertCurrentResults. The range is 1 to 3600, with a default value of 10.')
wanDs1ExtensionsBertNumPeriods = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanDs1ExtensionsBertNumPeriods.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertNumPeriods.setDescription(' The number of sampling periods that have elapsed during the current test run.')
wanDs1ExtensionsBertTestTraps = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: wanDs1ExtensionsBertTestTraps.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertTestTraps.setDescription('This object is used to enable or disable the sending of a trap at the conclusion of the measurement period. The trap will contain the information described by ctDs1BertCurrentResults. The default value is disabled.')
wanDs1ExtensionsBertDataStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("idle", 1), ("waitingForLink", 2), ("waitingForLoopback", 3), ("running", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanDs1ExtensionsBertDataStatus.setStatus('mandatory')
if mibBuilder.loadTexts: wanDs1ExtensionsBertDataStatus.setDescription('This object provides the operating status of the transmission of test data packets.')
ctDs1WanDriverTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 2), )
if mibBuilder.loadTexts: ctDs1WanDriverTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1WanDriverTable.setDescription('This table is an extension to the standard ds1 configuration table. It is a list of items that are pertinant to the hdlc driver on a platform. There is one entry per ds1 physical port on the platform.')
ctDs1WanDriverEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 2, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctDs1WanDriverEntryIndex"), (0, "CTRON-REMOTE-ACCESS-MIB", "ctDs1WanDriverChannelIndex"))
if mibBuilder.loadTexts: ctDs1WanDriverEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1WanDriverEntry.setDescription('A ds1 wan driver entry containing objects relating to the particular ds1 physical port pertaining to a specific channel on the wan driver chip.')
ctDs1WanDriverEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDs1WanDriverEntryIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1WanDriverEntryIndex.setDescription('A unique value for each ds1 physical port on the platform.')
ctDs1WanDriverChannelIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDs1WanDriverChannelIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1WanDriverChannelIndex.setDescription('A unique value for each channel on a wan driver on the platform.')
ctDs1WanDriverLineCode = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("jBZS", 2), ("invHDLC", 3))).clone('none')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDs1WanDriverLineCode.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1WanDriverLineCode.setDescription('This object controls the individual channels line coding on a wan driver device on the platform.')
ctDs1WanDriverCRCBits = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("crc16", 1), ("crc32", 2))).clone('crc16')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDs1WanDriverCRCBits.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs1WanDriverCRCBits.setDescription('This object controls the individual channels CRC bits generation on a wan driver device on the platform.')
ctRs232ExtensionsTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3, 1), )
if mibBuilder.loadTexts: ctRs232ExtensionsTable.setReference('rfc-1317')
if mibBuilder.loadTexts: ctRs232ExtensionsTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRs232ExtensionsTable.setDescription('This table is an extension to the standard rs232-like mib. It is a list of items that are pertinant to rs232-like ports on a platform. There is one entry per synchronous physical port on the platform.')
ctRs232ExtensionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3, 1, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctRs232ExtensionsEntryIndex"))
if mibBuilder.loadTexts: ctRs232ExtensionsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctRs232ExtensionsEntry.setDescription('A rs232-like extensions entry containing objects relating to the particular rs232-like physical port.')
ctRs232ExtensionsEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctRs232ExtensionsEntryIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctRs232ExtensionsEntryIndex.setDescription('A unique value for each rs232-like physical port on the platform.')
ctRs232ExtensionsCTSEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRs232ExtensionsCTSEnable.setReference('rfc-1317')
if mibBuilder.loadTexts: ctRs232ExtensionsCTSEnable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRs232ExtensionsCTSEnable.setDescription('This object indicates whether the platform should generate CTS. When disabled the platform will not generate CTS, when enabled, the platform will generate CTS. The actual state of the CTS line is available in the rs-232 like mib.')
ctRs232ExtensionsDSREnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRs232ExtensionsDSREnable.setReference('rfc-1317 ')
if mibBuilder.loadTexts: ctRs232ExtensionsDSREnable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRs232ExtensionsDSREnable.setDescription('This object indicates whether the platform should generate DSR. When disabled the platform will not generate DSR, when enabled, the platform will generate DSR. The actual state of the DSR line is available in the rs-232 like mib.')
ctRs232ExtensionsRTSEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRs232ExtensionsRTSEnable.setReference('rfc-1317')
if mibBuilder.loadTexts: ctRs232ExtensionsRTSEnable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRs232ExtensionsRTSEnable.setDescription('This object indicates whether the platform should generate RTS. When disabled the platform will not generate RTS, when enabled, the platform will generate RTS. The actual state of the RTS line is available in the rs-232 like mib.')
ctRs232ExtensionsDTREnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctRs232ExtensionsDTREnable.setReference('rfc-1317 ')
if mibBuilder.loadTexts: ctRs232ExtensionsDTREnable.setStatus('mandatory')
if mibBuilder.loadTexts: ctRs232ExtensionsDTREnable.setDescription('This object indicates whether the platform should generate DTR. When disabled the platform will not generate DSR, when enabled, the platform will generate DTR. The actual state of the DTR line is available in the rs-232 like mib.')
frDcpCircuitTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4, 1), )
if mibBuilder.loadTexts: frDcpCircuitTable.setStatus('mandatory')
if mibBuilder.loadTexts: frDcpCircuitTable.setDescription('A table containing data compression information about specific Data Link Connection Identifiers and corresponding virtual circuit. This information is used to support the frCircuitTable following RFC-1315.')
frDcpCircuitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4, 1, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "frDcpCircuitIfIndex"), (0, "CTRON-REMOTE-ACCESS-MIB", "frDcpCircuitDlci"))
if mibBuilder.loadTexts: frDcpCircuitEntry.setStatus('mandatory')
if mibBuilder.loadTexts: frDcpCircuitEntry.setDescription('The data compression information regarding a single Data Link Connection Identifier.')
frDcpCircuitIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4, 1, 1, 1), Index()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frDcpCircuitIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: frDcpCircuitIfIndex.setDescription('The ifIndex value of ifEntry that this DCP virtual circuit object is layered onto.')
frDcpCircuitDlci = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4, 1, 1, 2), DLCI()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frDcpCircuitDlci.setReference('Draft American National Standard T1.618-1991, Section 3.3.6')
if mibBuilder.loadTexts: frDcpCircuitDlci.setStatus('mandatory')
if mibBuilder.loadTexts: frDcpCircuitDlci.setDescription('The Data Link Connection Identifier for this DCP virtual circuit object.')
frDcpCircuitEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2))).clone('off')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: frDcpCircuitEnable.setStatus('mandatory')
if mibBuilder.loadTexts: frDcpCircuitEnable.setDescription('Indicates whether data compression should take place on this particular end side virtual circuit.')
frDcpCircuitStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: frDcpCircuitStatus.setStatus('mandatory')
if mibBuilder.loadTexts: frDcpCircuitStatus.setDescription('On indicates that the compression has been successfully negotiated and is operating. Off indicates that the compression negotiation has failed and compression is not operating at this time, or compression has been terminated by either peer.')
frDcpCircuitRatio = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 4, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setMaxAccess("readonly")
if mibBuilder.loadTexts: frDcpCircuitRatio.setStatus('mandatory')
if mibBuilder.loadTexts: frDcpCircuitRatio.setDescription('The ratio of uncompressed to compressed transmitted data. If the data compression status is off, 1:1 will be displayed.')
ctDDSConfigTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1), )
if mibBuilder.loadTexts: ctDDSConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSConfigTable.setDescription('The DDS Configuration Table')
ctDDSConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctDDSLineIndex"))
if mibBuilder.loadTexts: ctDDSConfigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSConfigEntry.setDescription('An Entry in the DDS Configuration Table')
ctDDSLineIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDDSLineIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSLineIndex.setDescription('A unique value for each ds1 physical port on the platform.')
ctDDSIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDDSIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSIfIndex.setDescription('Index associated with this dds physical port')
ctDDSLineMode = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ddsPri", 1), ("ddsSc", 2), ("ddsCc", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDDSLineMode.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSLineMode.setDescription('DDS-PRI is full duplex 56k link, No framing in loop data so loop speed is also 56k. Data stream: --d1 d2 d3 d4 d5 d6 d7-- DDS-SC is also full duplex 56k link, requires framing bits to distinguish between primary and secondary channels. --d1 d2 d3 d4 d5 d6 d7 F C/S-- where F is framing bit and C/S for secondary channel as well as used for control bit. With these two extra bits the loop speed is increased to 72k. primary channel rate is 56k. DDS-CC is full duplex 64k link. Does not require framing bit but uses a same format as SC, nineth bit position is lumped with primary channel and not used for control. Loop speed is 72k, Primary channel rate is 64k. --d1 d2 d3 d4 d5 d6 d7 F d8--')
ctDDSLineCoding = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ddsNone", 1), ("ddsJBZS", 2), ("otherLineCode", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDDSLineCoding.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSLineCoding.setDescription('This object describes the variety of Zero Code suppression on this interface. JBZS refers to Jammed Bit Zero suppresion, which forces a 1 bit every 8 bit periods. None refers to a mode in which some other means is used to insure pulse density requirements.')
ctDDSLoopbackConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("ddsNoLoop", 1), ("ddsLocalLoop", 2), ("ddsLineLoop", 3), ("ddsOtherLoop", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDDSLoopbackConfig.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSLoopbackConfig.setDescription('This variable represents the loopback configuration of the DDS interface. ddsNoLoop means the interface is not in loopback state, ddsLocalLoop means the signal is transmitted and looped back to the same interface, ddsLineLoop means the received signal at this interface does not go through the device but is looped back out, and ddsOtherLoop represents a loop not defined here.')
ctDDSLineStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("ddsNoAlarm", 1), ("ddsLossOfSignal", 2), ("ddsOutOfService", 3), ("ddsOutOfFrame", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDDSLineStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSLineStatus.setDescription('This variable describes the current operational status of DDS line.')
ctDDSTxClockSource = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ddsLoopTiming", 1), ("ddsLocalTiming", 2), ("ddsThroughTiming", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDDSTxClockSource.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSTxClockSource.setDescription("The source of Transmit clock. 'ddsLoopTiming' indicates that the recovered receive clock is used as transmit clock. 'ddsLocalTiming' indicates that a local clock source is used. 'ddsThroughTiming' indicates that recovered receive clock from another interface is used as transmit clock.")
ctDDSPortInSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDDSPortInSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSPortInSpeed.setDescription(' This variable describes the current operational receive speed of DDS line. It is a read only value based entirely on DDS type (CC or PRI).')
ctDDSPortOutSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 5, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDDSPortOutSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: ctDDSPortOutSpeed.setDescription('This variable describes the current operational transmit speed of DDS line. It is a read only value based entirely on DDS type CC or PRI')
ctPppCountersTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 1), )
if mibBuilder.loadTexts: ctPppCountersTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppCountersTable.setDescription('Table containing the parameters for the local PPP entity related to the counters and timers.')
ctPppCountersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 1, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctPppCountersIfIndex"))
if mibBuilder.loadTexts: ctPppCountersEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppCountersEntry.setDescription('PPP counter/timer information for a particular PPP link.')
ctPppCountersIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppCountersIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctPppCountersIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppCountersIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctPppCountersMaxTerminate = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppCountersMaxTerminate.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppCountersMaxTerminate.setDescription('MaxTerminate indicates the number of Terminate- Request packets sent without receiving a Terminate-Ack before assuming that the peer in unable to respond.')
ctPppCountersMaxConfigure = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppCountersMaxConfigure.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppCountersMaxConfigure.setDescription('MaxConfigure indicates the number of Configure- Request packets sent without receiving a Configure-Ack, Configre-Nak or Configure-Reject before assuming that the peer in unable to respond.')
ctPppCountersMaxFailure = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppCountersMaxFailure.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppCountersMaxFailure.setDescription('MaxFailure indicates the number of Configure-Nak packets sent without sending a Configure-Ack before assuming that the configuration is not converging. Any further Configure-Reject packets for peer requested options are converted to Configure-Reject packets, and locally desires options are no longer appended.')
ctPppCountersRestartTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppCountersRestartTimer.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppCountersRestartTimer.setDescription('The Restart timer is used to time transmissions of Configure-Request and Terminate-Request packets. Expiration of the Restart-Timer causes a Timeout event, and retransmission of the corresponding Configure-Request or Terminate-Request packet.')
ctPppLcpExtTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2), )
if mibBuilder.loadTexts: ctPppLcpExtTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtTable.setDescription('Table containing the parameters for the local PPP entity related to the counters and timers.')
ctPppLcpExtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctPppLcpExtIfIndex"))
if mibBuilder.loadTexts: ctPppLcpExtEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtEntry.setDescription('PPP LCP configurationinformation for a particular PPP link.')
ctPppLcpExtIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctPppLcpExtIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctPppLcpExtAuthenticationProt = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("pap", 2), ("chap", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctPppLcpExtAuthenticationProt.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtAuthenticationProt.setDescription('Enables a particular authentication protocol on this PPP link(now modified for read-only.')
ctPppLcpExtQualityProt = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("lqr", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtQualityProt.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtQualityProt.setDescription('Enables a particular link-quality protocol on this PPP link.')
ctPppLcpExtPFC = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtPFC.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtPFC.setDescription('Enables protocol field compression on this PPP link.')
ctPppLcpExtACFC = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtACFC.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtACFC.setDescription('Enables address and control field compression on this PPP link.')
ctPppLcpExtSelfDescribePadding = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtSelfDescribePadding.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtSelfDescribePadding.setDescription('This Configuration Option provides a method for an implementation to indicate to the peer that it understands self-describing pads when padding is added at the end of the PPP Information field.')
ctPppLcpExtCallback = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtCallback.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtCallback.setDescription('This Configuration Option provides a method for an implementation to request a dial-up peer to call back.')
ctPppLcpExtCompoundFrames = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtCompoundFrames.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtCompoundFrames.setDescription('This Configuration Option provides a method for an implementation to send multiple PPP encapsulated packets within the same frame.')
ctPppLcpExtMru = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMru.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMru.setDescription('Enables and disables Mru status.')
ctPppLcpExtAccm = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtAccm.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtAccm.setDescription('Enables and disables Accm status.')
ctPppLcpExtEchoRequest = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtEchoRequest.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtEchoRequest.setDescription('A zero indicated no Echo Request and any other value provides an interval to send Echo-Requests to the Peer. The Echo-Request provides a Data Link Layer loopback mechanism for use in exercising both directions of the link. This is useful in debugging, link quality determination. LCP MUST be in the OPENED state for an Echo-Request to occur.')
ctPppLcpExtReplyCounter = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctPppLcpExtReplyCounter.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtReplyCounter.setDescription('Increments a counter if the Local Device correctly received an Echo-Reply from the Peer. LCP MUST be in the OPENED state for an Echo-Reply to occur.')
ctPppLcpExtMpCapable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 2, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpCapable.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpCapable.setDescription('Multilink Protocol (MP) option selector. Set to a value of one when MP support is desired otherwise set to a value of two. The default value is two. When set to two the attempt is made to negotiate MP support. MP support is symmetrical.')
ctPppBncpExtTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 3), )
if mibBuilder.loadTexts: ctPppBncpExtTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppBncpExtTable.setDescription('Table containing the parameters for the local PPP entity related to the counters and timers.')
ctPppBncpExtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 3, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctPppBncpExtIfIndex"))
if mibBuilder.loadTexts: ctPppBncpExtEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppBncpExtEntry.setDescription('PPP LCP configuration information for a particular PPP link.')
ctPppBncpExtIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppBncpExtIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctPppBncpExtIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppBncpExtIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctPppBncpExtCrcStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppBncpExtCrcStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppBncpExtCrcStatus.setDescription('Enables and disables CRC status. First our platform must support CRC. If the platform does, then setting to disabled will not send CRC. The default is enabled, which sends CRC.')
ctPppMpExtTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4), )
if mibBuilder.loadTexts: ctPppMpExtTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppMpExtTable.setDescription('Table containing the parameters for the local PPP entity related to the counters and timers.')
ctPppMpExtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctPppMpExtIfIndex"))
if mibBuilder.loadTexts: ctPppMpExtEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppMpExtEntry.setDescription('PPP LCP configurationinformation for a particular PPP link.')
ctPppMpExtIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppMpExtIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctPppMpExtIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppMpExtIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctPppLcpExtMpLUT = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpLUT.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpLUT.setDescription('Used to set the Line Utilization Threshold (LUT) % value to compare against the linear weighted percentage by which to determine when more/less bandwidth is to be added/removed. Linear weighting is averaged over the period of time specified by ctPppLcpExtMpHistoryTime.')
ctPppLcpExtMpHistoryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpHistoryTime.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpHistoryTime.setDescription('Used to set the history time value in seconds for the number of line utilization reading(s) desired to compute the average line utilization. That is to say, it specifies the window size over which to compute the average line utilization.')
ctPppLcpExtMpMoreBW = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpMoreBW.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpMoreBW.setDescription('Used to set the number of consecutive line utilization average readings that must exceed ctPppLcpExtMpLUT as a condition of when to increase the bandwidth if more BW is permitted.')
ctPppLcpExtMpLessBW = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpLessBW.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpLessBW.setDescription('Used to set the number of consecutive line utilization average readings that must fall below ctPppLcpExtMpLUT as a condition of when to remove possible previously added bandwidth.')
ctPppLcpExtMpMaxChannels = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpMaxChannels.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpMaxChannels.setDescription('Used to set the maxium number of channels an MP capable connection is allowed.')
ctPppLcpExtMpChannelsToAdd = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpChannelsToAdd.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpChannelsToAdd.setDescription('Used to set the number of additional channel(s) to increment by whenever the need for more bandwidth is determined.')
ctPppLcpExtMpChannelsToRemove = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 4, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppLcpExtMpChannelsToRemove.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppLcpExtMpChannelsToRemove.setDescription('Used to set the number of channel(s) to decrement by whenever the need for possible previously added bandwidth is determined to no longer be needed.')
ctPppEcpExtTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 5), )
if mibBuilder.loadTexts: ctPppEcpExtTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppEcpExtTable.setDescription('Table containing the parameters for the local PPP entity related to the encrpytion control protocol.')
ctPppEcpExtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 5, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctPppEcpExtIfIndex"))
if mibBuilder.loadTexts: ctPppEcpExtEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppEcpExtEntry.setDescription('PPP ECP configuration information for a particular PPP link.')
ctPppEcpExtIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1024))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppEcpExtIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctPppEcpExtIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppEcpExtIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctPppEcpKey = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 5, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(14, 14)).setFixedLength(14)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppEcpKey.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppEcpKey.setDescription('A 56-bit key (14 hex characters) used for the DES encryption.')
ctPppEcpIV = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 6, 5, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctPppEcpIV.setStatus('mandatory')
if mibBuilder.loadTexts: ctPppEcpIV.setDescription('A 64-bit initialization vector (16 hex characters) used for the DES encryption in CBC mode.')
ctWanalyzerTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1), )
if mibBuilder.loadTexts: ctWanalyzerTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerTable.setDescription('Table containing the parameters for the local Wanalyzer entity related to the configured Protocol.')
ctWanalyzerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctWanalyzerIfIndex"))
if mibBuilder.loadTexts: ctWanalyzerEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerEntry.setDescription('Information for a particular interface.')
ctWanalyzerIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctWanalyzerIfIndex.setReference('rfc-1213')
if mibBuilder.loadTexts: ctWanalyzerIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerIfIndex.setDescription('The ifIndex of the interface. This is the index of this entry in the ifTable.')
ctWanalyzerEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctWanalyzerEnabled.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerEnabled.setDescription('This Persistent Object is a flag which indicates whether or not the WANalyzer is enabled or disabled. Upon system initialization this flag defaults to disabled unless a value for this object is found in NVRAM.')
ctWanalyzerMaxEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctWanalyzerMaxEntries.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerMaxEntries.setDescription('This Persistent Object is an integer greater than 1 which specifies the maximum total number of entries which the table will handle. When the WANalyzer is enabled this value defaults to a value dependent on the implementation unless a value is found in NVRAM.')
ctWanalyzerClearAll = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctWanalyzerClearAll.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerClearAll.setDescription('When set to 2, the entire table of entries is cleared. Upon clearing of the table, this value is reset to 1. This object is not present if the WANalyzer is disabled.')
ctWanalyzerClearInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctWanalyzerClearInterface.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerClearInterface.setDescription('When set to 2, all entries for this MIB-II interface index will be removed from the table. Upon complete removal of those entries, this value is reset to 1. This object is not present if the WANalyzer is disabled.')
ctWanalyzerDisplayInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctWanalyzerDisplayInterface.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerDisplayInterface.setDescription('When set to a valid MIB-II interface number, the table displays only those entries in the table which were received from that MIB-II interface. This value is set to -1 upon initialization and indicates that entries from all interfaces will be displayed. This object is not present if the WANalyzer is disabled.')
ctWanalyzerCurrEntries = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctWanalyzerCurrEntries.setStatus('mandatory')
if mibBuilder.loadTexts: ctWanalyzerCurrEntries.setDescription('A non-negative integer indicating the current number of entries in the table. This object is not present if the WANalyzer is disabled.')
wanalyzerMessageTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 2), )
if mibBuilder.loadTexts: wanalyzerMessageTable.setStatus('mandatory')
if mibBuilder.loadTexts: wanalyzerMessageTable.setDescription('A table containing all of the message entries for this device. If the WANalyzer is disabled, this table will not be present.')
wanalyzerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 2, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "wanMessageIndex"))
if mibBuilder.loadTexts: wanalyzerEntry.setStatus('mandatory')
if mibBuilder.loadTexts: wanalyzerEntry.setDescription('A message entry in the WANalyzer table corresponding to a particular interface in a particular instance.')
wanMessageIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanMessageIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wanMessageIndex.setDescription('An integer between 1 and wanalyzerMaxEntries which indicates the overall position this entry has in the WANalyzer table.')
wanMessageInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 2, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanMessageInterfaceIndex.setStatus('mandatory')
if mibBuilder.loadTexts: wanMessageInterfaceIndex.setDescription('The MIB-II interface index representing the interface which sent this message.')
wanMessageDate = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanMessageDate.setStatus('mandatory')
if mibBuilder.loadTexts: wanMessageDate.setDescription('The date when this message was received by the WANalyzer in the standard MMDDYY format.')
wanMessageTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanMessageTime.setStatus('mandatory')
if mibBuilder.loadTexts: wanMessageTime.setDescription('The time of day when this message was received by the WANalyzer in the standard HHMMSS format.')
wanMessageCode = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 7, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256), SingleValueConstraint(257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 1000, 1001, 1002, 1003, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510))).clone(namedValues=NamedValues(("wanalyzerLastMessageRepeated", 1), ("pppAuthentication", 2), ("pppBncpThisLayerStart", 3), ("pppBncpThisLayerFinished", 4), ("pppBncpThisLayerUp", 5), ("pppBncpThisLayerDown", 6), ("pppBncpInitializeRestartCount", 7), ("pppBncpZeroRestartCount", 8), ("pppBncpRcvConfReqGood", 9), ("pppBncpRcvConfReqBad", 10), ("pppBncpReceiveConfigureAck", 11), ("pppBncpReceiveConfigureNak", 12), ("pppBncpReceiveConfigureReject", 13), ("pppBncpReceiveTerminateRequest", 14), ("pppBncpReceiveTerminateAck", 15), ("pppBncpReceiveCodeRejectPermitted", 16), ("pppBncpReceiveCodeRejectCatastrophic", 17), ("pppBncpReceiveProtocolRejectPermitted", 18), ("pppBncpReceiveEchoRequest", 19), ("pppBncpReceiveEchoReply", 20), ("pppBncpReceiveDiscardRequest", 21), ("pppBncpReceiveUnknownCode", 22), ("pppBncpIllegalAction", 23), ("pppBncpSendConfigureRequest", 24), ("pppBncpSendConfigureAck", 25), ("pppBncpSendConfigureNak", 26), ("pppBncpSendConfigureReject", 27), ("pppBncpSendTerminateRequest", 28), ("pppBncpSendTerminateAck", 29), ("pppBncpSendCodeReject", 30), ("pppBncpSendProtocolReject", 31), ("pppBncpSendEchoReply", 32), ("pppBncpInitialState", 33), ("pppBncpStartingState", 34), ("pppBncpClosedState", 35), ("pppBncpStoppedState", 36), ("pppBncpClosingState", 37), ("pppBncpStoppingState", 38), ("pppBncpReqSentState", 39), ("pppBncpAckRcvdState", 40), ("pppBncpAckSentState", 41), ("pppBncpOpenedState", 42), ("pppBncpEthernetMacType", 43), ("pppBncpTokenRingMacType", 44), ("pppBncpFddiMacType", 45), ("pppBncpBridgeIdRcvReq", 46), ("pppBncpBridgeIdRcvNak", 47), ("pppBncpBridgeIdRcvRej", 48), ("pppBncpBridgeIdXmitReq", 49), ("pppBncpMacSelectRcvReq", 50), ("pppBncpMacSelectRcvNak", 51), ("pppBncpMacSelectRcvRej", 52), ("pppBncpMacSelectXmitReq", 53), ("pppBncpTinygramRcvReq", 54), ("pppBncpTinygramRcvNak", 55), ("pppBncpTinygramRcvRej", 56), ("pppBncpTinygramXmitReq", 57), ("pppBncpLanIdRcvReq", 58), ("pppBncpLanIdRcvNak", 59), ("pppBncpLanIdRcvRej", 60), ("pppBncpLanIdXmitReq", 61), ("pppCcpThisLayerStart", 62), ("pppCcpThisLayerFinished", 63), ("pppCcpThisLayerUp", 64), ("pppCcpThisLayerDown", 65), ("pppCcpInitializeRestartCount", 66), ("pppCcpZeroRestartCount", 67), ("pppCcpRcvConfReqGood", 68), ("pppCcpRcvConfReqBad", 69), ("pppCcpReceiveConfigureAck", 70), ("pppCcpReceiveConfigureNak", 71), ("pppCcpReceiveConfigureReject", 72), ("pppCcpReceiveTerminateRequest", 73), ("pppCcpReceiveTerminateAck", 74), ("pppCcpReceiveCodeRejectPermitted", 75), ("pppCcpReceiveCodeRejectCatastrophic", 76), ("pppCcpReceiveProtocolRejectPermitted", 77), ("pppCcpReceiveEchoRequest", 78), ("pppCcpReceiveEchoReply", 79), ("pppCcpReceiveDiscardRequest", 80), ("pppCcpReceiveUnknownCode", 81), ("pppCcpIllegalAction", 82), ("pppCcpSendConfigureRequest", 83), ("pppCcpSendConfigureAck", 84), ("pppCcpSendConfigureNak", 85), ("pppCcpSendConfigureReject", 86), ("pppCcpSendTerminateRequest", 87), ("pppCcpSendTerminateAck", 88), ("pppCcpSendCodeReject", 89), ("pppCcpSendProtocolReject", 90), ("pppCcpSendEchoReply", 91), ("pppCcpInitialState", 92), ("pppCcpStartingState", 93), ("pppCcpClosedState", 94), ("pppCcpStoppedState", 95), ("pppCcpClosingState", 96), ("pppCcpStoppingState", 97), ("pppCcpReqSentState", 98), ("pppCcpAckRcvdState", 99), ("pppCcpAckSentState", 100), ("pppCcpOpenedState", 101), ("pppCcpProprietaryCompRcvReq", 102), ("pppCcpProprietaryCompRcvNak", 103), ("pppCcpProprietaryCompRcvRej", 104), ("pppCcpProprietaryCompXmitReq", 105), ("pppCcpPredictorType1RcvReq", 106), ("pppCcpPredictorType1RcvNak", 107), ("pppCcpPredictorType1RcvRej", 108), ("pppCcpPredictorType1XmitReq", 109), ("pppCcpPredictorType2RcvReq", 110), ("pppCcpPredictorType2RcvNak", 111), ("pppCcpPredictorType2RcvRej", 112), ("pppCcpPredictorType2XmitReq", 113), ("pppCcpPuddleJumperRcvReq", 114), ("pppCcpPuddleJumperRcvNak", 115), ("pppCcpPuddleJumperRcvRej", 116), ("pppCcpPuddleJumperXmitReq", 117), ("pppCcpHpPpcRcvReq", 118), ("pppCcpHpPpcRcvNak", 119), ("pppCcpHpPpcRcvRej", 120), ("pppCcpHpPpcXmitReq", 121), ("pppCcpStacLzsRcvReq", 122), ("pppCcpStacLzsRcvNak", 123), ("pppCcpStacLzsRcvRej", 124), ("pppCcpStacLzsXmitReq", 125), ("pppCcpMsPpcRcvReq", 126), ("pppCcpMsPpcRcvNak", 127), ("pppCcpMsPpcRcvRej", 128), ("pppCcpMsPpcXmitReq", 129), ("pppCcpGandalfFzaRcvReq", 130), ("pppCcpGandalfFzaRcvNak", 131), ("pppCcpGandalfFzaRcvRej", 132), ("pppCcpGandalfFzaXmitReq", 133), ("pppCcpV42bisRcvReq", 134), ("pppCcpV42bisRcvNak", 135), ("pppCcpV42bisRcvRej", 136), ("pppCcpV42bisXmitReq", 137), ("pppCcpBsdLzwRcvReq", 138), ("pppCcpBsdLzwRcvNak", 139), ("pppCcpBsdLzwRcvRej", 140), ("pppCcpBsdLzwXmitReq", 141), ("pppCcpStackDcpRcvReq", 142), ("pppCcpStackDcpRcvNak", 143), ("pppCcpStackDcpRcvRej", 144), ("pppCcpStackDcpXmitReq", 145), ("pppChapChallengeReceived", 146), ("pppChapResponseReceived", 147), ("pppChapSuccessReceived", 148), ("pppChapFailureReceived", 149), ("pppChapSuccessSent", 150), ("pppChapFailureSent", 151), ("pppChapChallengeSent", 152), ("pppChapResponseSent", 153), ("pppIpcpThisLayerStart", 154), ("pppIpcpThisLayerFinished", 155), ("pppIpcpThisLayerUp", 156), ("pppIpcpThisLayerDown", 157), ("pppIpcpInitializeRestartCount", 158), ("pppIpcpZeroRestartCount", 159), ("pppIpcpRcvConfReqGood", 160), ("pppIpcpRcvConfReqBad", 161), ("pppIpcpReceiveConfigureAck", 162), ("pppIpcpReceiveConfigureNak", 163), ("pppIpcpReceiveConfigureReject", 164), ("pppIpcpReceiveTerminateRequest", 165), ("pppIpcpReceiveTerminateAck", 166), ("pppIpcpReceiveCodeRejectPermitted", 167), ("pppIpcpReceiveCodeRejectCatastrophic", 168), ("pppIpcpReceiveProtocolRejectPermitted", 169), ("pppIpcpReceiveEchoRequest", 170), ("pppIpcpReceiveEchoReply", 171), ("pppIpcpReceiveDiscardRequest", 172), ("pppIpcpReceiveUnknownCode", 173), ("pppIpcpIllegalAction", 174), ("pppIpcpSendConfigureRequest", 175), ("pppIpcpSendConfigureAck", 176), ("pppIpcpSendConfigureNak", 177), ("pppIpcpSendConfigureReject", 178), ("pppIpcpSendTerminateRequest", 179), ("pppIpcpSendTerminateAck", 180), ("pppIpcpSendCodeReject", 181), ("pppIpcpSendProtocolReject", 182), ("pppIpcpSendEchoReply", 183), ("pppIpcpInitialState", 184), ("pppIpcpStartingState", 185), ("pppIpcpClosedState", 186), ("pppIpcpStoppedState", 187), ("pppIpcpClosingState", 188), ("pppIpcpStoppingState", 189), ("pppIpcpReqSentState", 190), ("pppIpcpAckRcvdState", 191), ("pppIpcpAckSentState", 192), ("pppIpcpOpenedState", 193), ("pppIpcpIpAddressRcvReq", 194), ("pppIpcpIpAddressRcvNak", 195), ("pppIpcpIpAddressRcvRej", 196), ("pppIpcpIpAddressXmitReq", 197), ("pppIpcpCompressionTypeRcvReq", 198), ("pppIpcpCompressionTypeRcvRej", 199), ("pppIpcpCompressionTypeRcvNak", 200), ("pppIpcpCompressionTypeXmitReq", 201), ("pppIpxcpThisLayerStart", 202), ("pppIpxcpThisLayerFinished", 203), ("pppIpxcpThisLayerUp", 204), ("pppIpxcpThisLayerDown", 205), ("pppIpxcpInitializeRestartCount", 206), ("pppIpxcpZeroRestartCount", 207), ("pppIpxcpRcvConfReqGood", 208), ("pppIpxcpRcvConfReqBad", 209), ("pppIpxcpReceiveConfigureAck", 210), ("pppIpxcpReceiveConfigureNak", 211), ("pppIpxcpReceiveConfigureReject", 212), ("pppIpxcpReceiveTerminateAck", 214), ("pppIpxcpReceiveCodeRejectPermitted", 215), ("pppIpxcpReceiveCodeRejectCatastrophic", 216), ("pppIpxcpReceiveProtocolRejectPermitted", 217), ("pppIpxcpReceiveEchoRequest", 218), ("pppIpxcpReceiveEchoReply", 219), ("pppIpxcpReceiveDiscardRequest", 220), ("pppIpxcpReceiveUnknownCode", 221), ("pppIpxcpIllegalAction", 222), ("pppIpxcpSendConfigureRequest", 223), ("pppIpxcpSendConfigureAck", 224), ("pppIpxcpSendConfigureNak", 225), ("pppIpxcpSendConfigureReject", 226), ("pppIpxcpSendTerminateRequest", 227), ("pppIpxcpSendTerminateAck", 228), ("pppIpxcpSendCodeReject", 229), ("pppIpxcpSendProtocolReject", 230), ("pppIpxcpSendEchoReply", 231), ("pppIpxcpInitialState", 232), ("pppIpxcpStartingState", 233), ("pppIpxcpClosedState", 234), ("pppIpxcpStoppedState", 235), ("pppIpxcpClosingState", 236), ("pppIpxcpStoppingState", 237), ("pppIpxcpReqSentState", 238), ("pppIpxcpAckRcvdState", 239), ("pppIpxcpAckSentState", 240), ("pppIpxcpOpenedState", 241), ("pppIpxcpCompressionProtocolRcvReq", 242), ("pppIpxcpCompressionProtocolRcvNak", 243), ("pppIpxcpCompressionProtocolRcvRej", 244), ("pppIpxcpCompressionProtocolXmitReq", 245), ("pppIpxcpNetworkNumberRcvReq", 246), ("pppIpxcpNetworkNumberRcvNak", 247), ("pppIpxcpNetworkNumberRcvRej", 248), ("pppIpxcpNetworkNumberXmitReq", 249), ("pppIpxcpNodeNumberRcvReq", 250), ("pppIpxcpNodeNumberRcvNak", 251), ("pppIpxcpNodeNumberRcvRej", 252), ("pppIpxcpNodeNumberXmitReq", 253), ("pppIpxcpRoutingProtocolRcvReq", 254), ("pppIpxcpRoutingProtocolRcvNak", 255), ("pppIpxcpRoutingProtocolRcvRej", 256)) + NamedValues(("pppIpxcpRoutingProtocolXmitReq", 257), ("pppIpxcpRouterNameRcvReq", 258), ("pppIpxcpRouterNameRcvNak", 259), ("pppIpxcpRouterNameRcvRej", 260), ("pppIpxcpRouterNameXmitReq", 261), ("pppIpxcpConfigurationCompleteRcvReq", 262), ("pppIpxcpConfigurationCompleteRcvNak", 263), ("pppIpxcpConfigurationCompleteRcvRej", 264), ("pppIpxcpConfigurationCompleteXmitReq", 265), ("pppLcpThisLayerStart", 266), ("pppLcpThisLayerFinished", 267), ("pppLcpThisLayerUp", 268), ("pppLcpThisLayerDown", 269), ("pppLcpInitializeRestartCount", 270), ("pppLcpZeroRestartCount", 271), ("pppLcpRcvConfReqGood", 272), ("pppLcpRcvConfReqBad", 273), ("pppLcpReceiveConfigureAck", 274), ("pppLcpReceiveConfigureNak", 275), ("pppLcpReceiveConfigureReject", 276), ("pppLcpReceiveTerminateRequest", 277), ("pppLcpReceiveTerminateAck", 278), ("pppLcpReceiveCodeRejectPermitted", 279), ("pppLcpReceiveCodeRejectCatastrophic", 280), ("pppLcpReceiveProtocolReject", 281), ("pppLcpReceiveEchoRequest", 282), ("pppLcpReceiveEchoReply", 283), ("pppLcpReceiveDiscardRequest", 284), ("pppLcpReceiveUnknownCode", 285), ("pppLcpIllegalAction", 286), ("pppLcpSendConfigureRequest", 287), ("pppLcpSendConfigureAck", 288), ("pppLcpSendConfigureNak", 289), ("pppLcpSendConfigureReject", 290), ("pppLcpSendTerminateRequest", 291), ("pppLcpSendTerminateAck", 292), ("pppLcpSendCodeReject", 293), ("pppLcpSendProtocolReject", 294), ("pppLcpSendEchoReply", 295), ("pppLcpInitialState", 296), ("pppLcpStartingState", 297), ("pppLcpClosedState", 298), ("pppLcpStoppedState", 299), ("pppLcpClosingState", 300), ("pppLcpStoppingState", 301), ("pppLcpReqSentState", 302), ("pppLcpAckRcvdState", 303), ("pppLcpAckSentState", 304), ("pppLcpOpenedState", 305), ("pppLcpMruRcvReq", 306), ("pppLcpMruRcvNak", 307), ("pppLcpMruRcvRej", 308), ("pppLcpMruXmitReq", 309), ("pppLcpAsyncCharMapRcvReq", 310), ("pppLcpAsyncCharMapRcvNak", 311), ("pppLcpAsyncCharMapRcvRej", 312), ("pppLcpAsyncCharMapXmitReq", 313), ("pppLcpAuthenticationRcvReq", 314), ("pppLcpAuthenticationRcvNak", 315), ("pppLcpAuthenticationRcvRej", 316), ("pppLcpAuthenticationXmitReq", 317), ("pppLcpMagicNumberRcvReq", 318), ("pppLcpMagicNumberRcvNak", 319), ("pppLcpMagicNumberRcvRej", 320), ("pppLcpMagicNumberXmitReq", 321), ("pppLcpLinkQualityRcvReq", 322), ("pppLcpLinkQualityRcvNak", 323), ("pppLcpLinkQualityRcvRej", 324), ("pppLcpLinkQualityXmitReq", 325), ("pppLcpProtCompRcvReq", 326), ("pppLcpProtCompRcvNak", 327), ("pppLcpProtCompRcvRej", 328), ("pppLcpProtCompXmitReq", 329), ("pppLcpAddrCompRcvReq", 330), ("pppLcpAddrCompRcvNak", 331), ("pppLcpAddrCompRcvRej", 332), ("pppLcpAddrCompXmitReq", 333), ("pppLcpFcs32BitRcvReq", 334), ("pppLcpFcs32BitRcvNak", 335), ("pppLcpFcs32BitRcvRej", 336), ("pppLcpFcs32BitXmitReq", 337), ("pppLcpSelfDescPaddingRcvReq", 338), ("pppLcpSelfDescPaddingRcvNak", 339), ("pppLcpSelfDescPaddingRcvRej", 340), ("pppLcpSelfDescPaddingXmitReq", 341), ("pppLcpCompoundFramesRcvReq", 342), ("pppLcpCompoundFramesRcvNak", 343), ("pppLcpCompoundFramesRcvRej", 344), ("pppLcpCompoundFramesXmitReq", 345), ("pppLcpCallbackRcvReq", 346), ("pppLcpCallbackRcvNak", 347), ("pppLcpCallbackRcvRej", 348), ("pppLcpCallbackXmitReq", 349), ("pppLexThisLayerStart", 350), ("pppLexThisLayerFinished", 351), ("pppLexThisLayerUp", 352), ("pppLexThisLayerDown", 353), ("pppLexInitializeRestartCount", 354), ("pppLexZeroRestartCount", 355), ("pppLexRcvConfReqGood", 356), ("pppLexRcvConfReqBad", 357), ("pppLexReceiveConfigureAck", 358), ("pppLexReceiveConfigureNak", 359), ("pppLexReceiveConfigureReject", 360), ("pppLexReceiveTerminateRequest", 361), ("pppLexReceiveTerminateAck", 362), ("pppLexReceiveCodeRejectPermitted", 363), ("pppLexReceiveCodeRejectCatastrophic", 364), ("pppLexReceiveProtocolRejectPermitted", 365), ("pppLexReceiveEchoRequest", 366), ("pppLexReceiveEchoReply", 367), ("pppLexReceiveDiscardRequest", 368), ("pppLexReceiveUnknownCode", 369), ("pppLexIllegalAction", 370), ("pppLexSendConfigureRequest", 371), ("pppLexSendConfigureAck", 372), ("pppLexSendConfigureNak", 373), ("pppLexSendConfigureReject", 374), ("pppLexSendTerminateRequest", 375), ("pppLexSendTerminateAck", 376), ("pppLexSendCodeReject", 377), ("pppLexSendProtocolReject", 378), ("pppLexSendEchoReply", 379), ("pppLexInitialState", 380), ("pppLexStartingState", 381), ("pppLexClosedState", 382), ("pppLexStoppedState", 383), ("pppLexClosingState", 384), ("pppLexStoppingState", 385), ("pppLexReqSentState", 386), ("pppLexAckRcvdState", 387), ("pppLexAckSentState", 388), ("pppLexOpenedState", 389), ("pppLexMacTypeSelectRcvReq", 390), ("pppLexMacTypeSelectRcvNak", 391), ("pppLexMacTypeSelectRcvRej", 392), ("pppLexMacTypeSelectXmitReq", 393), ("pppLexTinygramCompressRcvReq", 394), ("pppLexTinygramCompressRcvNak", 395), ("pppLexTinygramCompressRcvRej", 396), ("pppLexTinygramCompressXmitReq", 397), ("pppLexMacAddressRcvReq", 398), ("pppLexMacAddressRcvNak", 399), ("pppLexMacAddressRcvRej", 400), ("pppLexMacAddressXmitReq", 401), ("pppLexMacRelayRcvReq", 402), ("pppLexMacRelayRcvNak", 403), ("pppLexMacRelayRcvRej", 404), ("pppLexMacRelayXmitReq", 405), ("pppLexStatisticsRequestRcvReq", 406), ("pppLqrSent", 407), ("pppLqrReceived", 408), ("pppLinkDead", 409), ("pppLinkEstablishment", 410), ("pppLinkTermination", 411), ("pppNetworkLayerPhase", 412), ("pppPapAuthenticateReqReceived", 413), ("pppPapAuthenticateAckReceived", 414), ("pppPapAuthenticateNakReceived", 415), ("pppPapAuthenticateReqSent", 416), ("pppPapAuthenticateAckSent", 417), ("pppPapAuthenticateNakSent", 418), ("frGotLmiPacket", 500), ("frGotBadQ922Header", 501), ("frGotCllmPacket", 502), ("frInactiveReceivedPacket", 503), ("frReceivedNlpidIpPacket", 504), ("frSentXidPacket", 505), ("frSentXidResponse", 506), ("frReceivedXidPacket", 507), ("frXidTimerExpired", 508), ("frGotBadUi", 509), ("frGotBadSnapPacket", 510), ("frLinkUp", 511), ("frLinkDown", 512), ("frLmiStarted", 513), ("frLmiStopped", 514), ("frLmiSentFullStatusEnquiry", 515), ("frLmiSentKeepAliveMessage", 516), ("frLmiStatusResponseReceived", 517), ("frLmiGotAnsiReportType", 518), ("frLmiGotFullStatusReport", 519), ("frLmiGotKeepAliveMessage", 520), ("frLmiUnsolicitedKeepAlive", 521), ("frLmiAsynchronousStatus", 522), ("frLmiGotQ933AReportType", 523), ("frLmiBadPvcStatusLength", 524), ("frLmiT391TimeoutFs", 525), ("frLmiT391TimeoutSe", 526), ("frLmiT391PollFailed", 527), ("frLmiT391PollSucceeded", 528), ("frLmiStatusEnquiryReceived", 529), ("frDcpMode1Initializing", 530), ("frDcpMode1Disabled", 531), ("frDcpMode1ControlPacketReceived", 532), ("frDcpMode1DataPacketReceived", 533), ("frDcpMode1RequestSent", 534), ("frDcpMode1RequestReceived", 535), ("frDcpMode1ResponseSent", 536), ("frDcpMode1ResponseReceived", 537), ("frDcpMode1Operational", 538), ("frDcpMode1TimerExpired", 539), ("frDcpMode2ControlPacketReceived", 540), ("frDcpResetPacketSent", 541), ("frDcpResetTimerExpired", 542), ("frDcpResetAckSent", 543), ("frDcpDictionaryQuotaExceeded", 544), ("isdnRemoteConnectionUp", 1000), ("isdnRemoteConnectionDown", 1001), ("isdnActivateConnection", 1002), ("isdnDeactivateConnection", 1003), ("multilinkMpLinkUp", 1500), ("multilinkMpAddBW", 1501), ("multilinkMpRemoveBW", 1502), ("multilinkMpSentBeginningFragment", 1503), ("multilinkMpSentMiddleFragment", 1504), ("multilinkMpSentEndFragment", 1505), ("multilinkMpSentCompleteMessage", 1506), ("multilinkMpReceivedBeginningFragment", 1507), ("multilinkMpReceivedMiddleFragment", 1508), ("multilinkMpReceivedEndFragment", 1509), ("multilinkMpReceivedCompleteMessage", 1510)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: wanMessageCode.setStatus('mandatory')
if mibBuilder.loadTexts: wanMessageCode.setDescription('An enumerated value representing the nature of the debug message sent by the interface.')
ds1AlarmsGlobalConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1))
ds1AlarmGlobalAdmin = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalAdmin.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalAdmin.setDescription('Used to enable and disable the DS1 Alarm Monitoring Application for all DS1 circuits on the device.')
ds1AlarmGlobalAutoRecovery = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalAutoRecovery.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalAutoRecovery.setDescription('Used to enable and disable the DS1 Alarm Automatic Recovery feature for all DS1 circuits on the device. For those devices that support ds1 alarm monitoring, but do not support the automatic recovery feature, this object always returns disabled, and performs no action when written.')
ds1AlarmGlobalTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalTrapEnable.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalTrapEnable.setDescription('Used to enable and disable the DS1 Alarm SNMP Trap generation feature for all DS1 circuits on the device. This object operates in conjunction with configuration of objects in the ctron-trap-mib.txt')
ds1AlarmGlobalESCount = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 300))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalESCount.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalESCount.setDescription('Used in conjuction with ctDs1AlarmESInterval to set the threshold for alarm generation when the number of Errored Seconds for a specified time interval, measured in minutes, is exceeded. The default Errored Seconds alarm condition is 100 errored seconds over a 6 minute time interval.')
ds1AlarmGlobalESInterval = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalESInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalESInterval.setDescription('Used in conjuction with ds1AlarmGlobalESCount to set the threshold for alarm generation when the number of Errored Seconds for a specified time interval, measured in minutes, is exceeded. The default Errored Seconds alarm condition is 100 errored seconds over a 6 minute time interval.')
ds1AlarmGlobalBPVErrorRate = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalBPVErrorRate.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalBPVErrorRate.setDescription("Used to specify the negative exponent of 10, used in monitoring Bipolar Violations (Code Violations for E1) over intervals defined by ds1AlarmGlobalBPVInterval. For example, for a ds1AlarmGlobalBPVErrorRate value of 6 used with a ds1AlarmGlobalBPVInterval of 15, an alarm is generated when a 15 minute period has an average error rate which exceeds 1 Bipolar Violation (Code Violation for E1) per million bits. For those devices that support ds1 alarm monitoring but who's hardware does not support detect bipolar violations, this object always returns the default, and performs no action when written.")
ds1AlarmGlobalBPVInterval = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalBPVInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalBPVInterval.setDescription("Used to specify the interval, used in monitoring Bipolar Violations (Code Violations for E1) with thresholds defined by ds1AlarmGlobalBPVErrorRate. For example, for a ds1AlarmGlobalBPVErrorRate value of 6 used with a ds1AlarmGlobalBPVInterval of 15, an alarm is generated when a 15 minute period has an average error rate which exceeds 1 Bipolar Violation (Code Violation for E1) per million bits. For those devices that support ds1 alarm monitoring but who's hardware does not detect bipolar violations, this object always returns the default, and performs no action when written.")
ds1AlarmGlobalManualRecovery = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("recover", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmGlobalManualRecovery.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmGlobalManualRecovery.setDescription('To cause a manual recovery to occur on all Ds1 circuits, this object is set to a 1. It is always read as a 1. The recovery will occur only when automatic recovery is disabled.')
ds1AlarmConfigTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2), )
if mibBuilder.loadTexts: ds1AlarmConfigTable.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmConfigTable.setDescription('Table containing objects used to configure DS1 alarm functionality on a per DS1 basis.')
ds1AlarmConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ds1PhysNum"))
if mibBuilder.loadTexts: ds1AlarmConfigEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmConfigEntry.setDescription('A description of a single entry.')
ds1PhysNum = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ds1PhysNum.setStatus('mandatory')
if mibBuilder.loadTexts: ds1PhysNum.setDescription('The value for this object is equal to the value of a unique physical connection identifier associated with this DS1 which is clearly labeled on the device. For those products that do not have a DS1 identifier labeled on the device, the value for this object is the same as rfc1406 dsx1LineIndex.')
ds1AlarmAdmin = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmAdmin.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmAdmin.setDescription('Used to enable and disable the DS1 Alarm Monitoring Application for this DS1 circuit.')
ds1AlarmAutoRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmAutoRecovery.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmAutoRecovery.setDescription('Used to enable and disable the DS1 Alarm Automatic Recovery feature for this DS1 circuit. For those devices that support ds1 alarm monitoring, but do not support the automatic recovery feature, this object always returns disabled, and performs no action when written.')
ds1AlarmTrapEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmTrapEnable.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmTrapEnable.setDescription('Used to enable and disable the DS1 Alarm SNMP Trap generation feature for this DS1 circuit. This object operates in conjunction with configuration of objects in the ctron-trap-mib.txt')
ds1AlarmESCount = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 300))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmESCount.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmESCount.setDescription('Used in conjuction with ctDs1AlarmESInterval to set the threshold for alarm generation when the number of Errored Seconds for a specified time interval, measured in minutes, is exceeded. The default Errored Seconds alarm condition is 100 errored seconds over a 6 minute time interval.')
ds1AlarmESInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmESInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmESInterval.setDescription('Used in conjuction with ctDs1AlarmESCount to set the threshold for alarm generation when the number of Errored Seconds for a specified time interval, measured in minutes, is exceeded. The default Errored Seconds alarm condition is 100 errored seconds over a 6 minute time interval.')
ds1AlarmBPVErrorRate = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(5, 9))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmBPVErrorRate.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmBPVErrorRate.setDescription("Used to specify the negative exponent of 10, used in monitoring Bipolar Violations (Code Violations for E1) over intervals defined by ds1AlarmBPVInterval. For example, for a ds1AlarmBPVErrorRate value of 6 used with a ds1AlarmBPVInterval of 15, an alarm is generated when a 15 minute period has an average error rate which exceeds 1 Bipolar Violation (Code Violation for E1) per million bits. For those devices that support ds1 alarm monitoring but who's hardware does not support detect bipolar violations, this object always returns the default, and performs no action when written.")
ds1AlarmBPVInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmBPVInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmBPVInterval.setDescription("Used to specify the interval, used in monitoring Bipolar Violations (Code Violations for E1) with thresholds defined by ds1AlarmBPVErrorRate. For example, for a ds1AlarmBPVErrorRate value of 6 used with a ds1AlarmBPVInterval of 15, an alarm is generated when a 15 minute period has an average error rate which exceeds 1 Bipolar Violation (Code Violation for E1) per million bits. For those devices that support ds1 alarm monitoring but who's hardware does not support detect bipolar violations, this object always returns the default, and performs no action when written.")
ds1AlarmManualRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 8, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("recover", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ds1AlarmManualRecovery.setStatus('mandatory')
if mibBuilder.loadTexts: ds1AlarmManualRecovery.setDescription('To cause a manual recovery to occur, this object is set to a 1. It is always read as a 1. The recovery will occur only when automatic recovery is disabled.')
ipPQConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 1))
ipPQAdmin = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disabled", 1), ("enabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipPQAdmin.setStatus('mandatory')
if mibBuilder.loadTexts: ipPQAdmin.setDescription('Used to enable and disable the IP Priority Queue Forwarding Application.')
iPPQMaxEntries = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: iPPQMaxEntries.setStatus('mandatory')
if mibBuilder.loadTexts: iPPQMaxEntries.setDescription('Returns the maximum number of IP address entries supported by the IP Priority Queue application on this device.')
iPPQNumEntries = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: iPPQNumEntries.setStatus('mandatory')
if mibBuilder.loadTexts: iPPQNumEntries.setDescription('Get the number of IP address entries currently programmed in the ipPQAddressTable.')
iPPQAddAddress = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: iPPQAddAddress.setStatus('mandatory')
if mibBuilder.loadTexts: iPPQAddAddress.setDescription('Add an IP address to the ipPQAddressTable. Always read as a 0.0.0.0.')
iPPQDelAddress = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 1, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: iPPQDelAddress.setStatus('mandatory')
if mibBuilder.loadTexts: iPPQDelAddress.setDescription('Delete an IP address from the ipPQAddressTable. Always read as a 0.0.0.0.')
ipPQAddressTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 2), )
if mibBuilder.loadTexts: ipPQAddressTable.setStatus('mandatory')
if mibBuilder.loadTexts: ipPQAddressTable.setDescription('Table containing IP addresses, used in an IP Priority Queue Forwarding function.')
ipPQAddressEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 2, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ipPQAddressId"))
if mibBuilder.loadTexts: ipPQAddressEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ipPQAddressEntry.setDescription('A description of a single entry.')
ipPQAddressId = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipPQAddressId.setStatus('mandatory')
if mibBuilder.loadTexts: ipPQAddressId.setDescription('A unique value identifying an element in a sequence of IP PQ address entries.')
ipPQIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 9, 2, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipPQIPAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ipPQIPAddress.setDescription('Returns an IP address associated with a specific entry in this table.')
ctDs3ExtensionsTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 10, 1), )
if mibBuilder.loadTexts: ctDs3ExtensionsTable.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs3ExtensionsTable.setDescription('This table is an extension to the standard ds3 configuration table. It is a list of items that are pertinant to ds3 ports on a platform. There is one entry per ds3 physical port on the platform.')
ctDs3ExtensionsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 10, 1, 1), ).setIndexNames((0, "CTRON-REMOTE-ACCESS-MIB", "ctDs3ExtensionsEntryIndex"))
if mibBuilder.loadTexts: ctDs3ExtensionsEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs3ExtensionsEntry.setDescription('A ds3 extensions entry containing objects relating to the particular ds3 physical port.')
ctDs3ExtensionsEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 10, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDs3ExtensionsEntryIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs3ExtensionsEntryIndex.setDescription('A unique value for each ds3 physical port on the platform.')
ctDs3ExtensionsNumInterfaces = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 10, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctDs3ExtensionsNumInterfaces.setReference('rfc-1213')
if mibBuilder.loadTexts: ctDs3ExtensionsNumInterfaces.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs3ExtensionsNumInterfaces.setDescription('The number of interfaces on this physical port. This number inidicates the number of entries this physical port uses in the ifTable.')
ctDs3ExtensionsLineBuildOut = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 2, 10, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unknown", 1), ("zeroto255feet", 2), ("a255to450feet", 3))).clone('zeroto255feet')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ctDs3ExtensionsLineBuildOut.setStatus('mandatory')
if mibBuilder.loadTexts: ctDs3ExtensionsLineBuildOut.setDescription('The line build out setting for this ds3 physical port. Unknown indicates that the setting is neither readable or writable.')
mibBuilder.exportSymbols("CTRON-REMOTE-ACCESS-MIB", ctRs232ExtensionsCTSEnable=ctRs232ExtensionsCTSEnable, ctRemExtPhysPortEntry=ctRemExtPhysPortEntry, ctDs1ExtensionsEntryIndex=ctDs1ExtensionsEntryIndex, wanalyzerMessageTable=wanalyzerMessageTable, ctDs1WanDriverEntryIndex=ctDs1WanDriverEntryIndex, frDcpCircuitEnable=frDcpCircuitEnable, ctRemIntEntCircuitName=ctRemIntEntCircuitName, ctRemExtPhysPortTable=ctRemExtPhysPortTable, ctRemExtPhysPortProtMgrIfaceList=ctRemExtPhysPortProtMgrIfaceList, ctDDSExt=ctDDSExt, ctRs232ExtensionsDSREnable=ctRs232ExtensionsDSREnable, ctPppLcpExtSelfDescribePadding=ctPppLcpExtSelfDescribePadding, ctDs1WanDriverChannelIndex=ctDs1WanDriverChannelIndex, wanDs1ExtensionsBertTestMode=wanDs1ExtensionsBertTestMode, ctDs1WanDriverEntry=ctDs1WanDriverEntry, frDcpCircuitRatio=frDcpCircuitRatio, ctPppLcpExtMpMoreBW=ctPppLcpExtMpMoreBW, ctRemConnectionIndex=ctRemConnectionIndex, ds1AlarmESCount=ds1AlarmESCount, ctPppLcpExtMpLessBW=ctPppLcpExtMpLessBW, ds1AlarmConfigEntry=ds1AlarmConfigEntry, ipPQAddressId=ipPQAddressId, ds1AlarmGlobalAutoRecovery=ds1AlarmGlobalAutoRecovery, ctRemIntEntCompRatio=ctRemIntEntCompRatio, iPPQMaxEntries=iPPQMaxEntries, ctRemIntEntBackupIfTimeToDisconnect=ctRemIntEntBackupIfTimeToDisconnect, ctRemIntEntTxIdleTimeout=ctRemIntEntTxIdleTimeout, ctDs1ExtensionsNumInterfaces=ctDs1ExtensionsNumInterfaces, wanDs1ExtensionsBertCumulativeResults=wanDs1ExtensionsBertCumulativeResults, ctPppLcpExtMpHistoryTime=ctPppLcpExtMpHistoryTime, ds1AlarmBPVErrorRate=ds1AlarmBPVErrorRate, iPPQDelAddress=iPPQDelAddress, ctRemInterfaceEntry=ctRemInterfaceEntry, ctPppBncpExtTable=ctPppBncpExtTable, ctDs1ExtensionsTotalValidIntervals=ctDs1ExtensionsTotalValidIntervals, ctRemPhysPortProtMgrIfaceNum=ctRemPhysPortProtMgrIfaceNum, ctRemIntEntBackupIfOverride=ctRemIntEntBackupIfOverride, ctRemExtPhysPortProtMgrEnable=ctRemExtPhysPortProtMgrEnable, ctWanalyzerEntry=ctWanalyzerEntry, ctDs3ExtensionsTable=ctDs3ExtensionsTable, ctPppCountersMaxFailure=ctPppCountersMaxFailure, wanDs1ExtensionsBertTestTraps=wanDs1ExtensionsBertTestTraps, ctPppEcpExtEntry=ctPppEcpExtEntry, ctRemExtPhysPortProtMgrMaxIfos=ctRemExtPhysPortProtMgrMaxIfos, ctPppEcpExtTable=ctPppEcpExtTable, ctPppCountersRestartTimer=ctPppCountersRestartTimer, ctRemIntEntIfIndex=ctRemIntEntIfIndex, ctDs3ExtensionsEntryIndex=ctDs3ExtensionsEntryIndex, ctPppLcpExtMpChannelsToRemove=ctPppLcpExtMpChannelsToRemove, ctRemPriIntEntConnectRetryInterval=ctRemPriIntEntConnectRetryInterval, ipPQAdmin=ipPQAdmin, iPPQNumEntries=iPPQNumEntries, ctDs3ExtensionsEntry=ctDs3ExtensionsEntry, ctPppLcpExtCallback=ctPppLcpExtCallback, wanDs1ExtensionsBertSamplePeriod=wanDs1ExtensionsBertSamplePeriod, ctRemPhysPortProtMgrMaxIfos=ctRemPhysPortProtMgrMaxIfos, ctWanalyzerClearInterface=ctWanalyzerClearInterface, ctWanalyzerEnabled=ctWanalyzerEnabled, ctRemPhysPortEntry=ctRemPhysPortEntry, ctPppLcpExtQualityProt=ctPppLcpExtQualityProt, ctRemInterfaceTable=ctRemInterfaceTable, ds1AlarmsGlobalConfigGroup=ds1AlarmsGlobalConfigGroup, ds1AlarmGlobalBPVErrorRate=ds1AlarmGlobalBPVErrorRate, wanDs1ExtensionsBertRun=wanDs1ExtensionsBertRun, ctPppLcpExtEntry=ctPppLcpExtEntry, wanMessageIndex=wanMessageIndex, wanDs1ExtensionsBertTestPattern=wanDs1ExtensionsBertTestPattern, ctRemPriIntEntIfIndex=ctRemPriIntEntIfIndex, ctPppLcpExtMpChannelsToAdd=ctPppLcpExtMpChannelsToAdd, ctRs232ExtensionsDTREnable=ctRs232ExtensionsDTREnable, ctPppCountersTable=ctPppCountersTable, ds1AlarmGlobalAdmin=ds1AlarmGlobalAdmin, ctDs1WanDriverTable=ctDs1WanDriverTable, wanMessageDate=wanMessageDate, ctRemIntEntMaxProfiles=ctRemIntEntMaxProfiles, ctDDSPortInSpeed=ctDDSPortInSpeed, ctWanalyzerDisplayInterface=ctWanalyzerDisplayInterface, ctRemIntEntBackupIfNum=ctRemIntEntBackupIfNum, ctPppLcpExtPFC=ctPppLcpExtPFC, ctPppMpExtIfIndex=ctPppMpExtIfIndex, ctWanalyzerMaxEntries=ctWanalyzerMaxEntries, ctRemIntEntCongestion=ctRemIntEntCongestion, ctRemPhysPortType=ctRemPhysPortType, ctPppCountersIfIndex=ctPppCountersIfIndex, ctDDSIfIndex=ctDDSIfIndex, ctRemPhysPortTable=ctRemPhysPortTable, ctRs232ExtensionsEntryIndex=ctRs232ExtensionsEntryIndex, wanMessageTime=wanMessageTime, ctRemBackupInterfaceEntry=ctRemBackupInterfaceEntry, ctDDSConfigEntry=ctDDSConfigEntry, ds1AlarmGlobalManualRecovery=ds1AlarmGlobalManualRecovery, ds1PhysNum=ds1PhysNum, ds1AlarmAdmin=ds1AlarmAdmin, ctRemPrimaryInterfaceTable=ctRemPrimaryInterfaceTable, ds1AlarmConfigTable=ds1AlarmConfigTable, ctRemPhysAlarmTimeOut=ctRemPhysAlarmTimeOut, ctDs1WanDriverLineCode=ctDs1WanDriverLineCode, ctIPPQFilters=ctIPPQFilters, wanMessageCode=wanMessageCode, ctDDSLineIndex=ctDDSLineIndex, ctPppLcpExtTable=ctPppLcpExtTable, ctRemIntEntCompression=ctRemIntEntCompression, frDcpCircuitDlci=frDcpCircuitDlci, ctPppLcpExtCompoundFrames=ctPppLcpExtCompoundFrames, ctPppLcpExtMru=ctPppLcpExtMru, DLCI=DLCI, ctDs1Ext=ctDs1Ext, ctRemPrimaryInterfaceEntry=ctRemPrimaryInterfaceEntry, ctPppEcpExtIfIndex=ctPppEcpExtIfIndex, ctPppLcpExtMpLUT=ctPppLcpExtMpLUT, ctPppEcpKey=ctPppEcpKey, ctPppCountersMaxTerminate=ctPppCountersMaxTerminate, ctPppLcpExtMpMaxChannels=ctPppLcpExtMpMaxChannels, ctPppBncpExtCrcStatus=ctPppBncpExtCrcStatus, ctWanalyzerIfIndex=ctWanalyzerIfIndex, ctPppLcpExtReplyCounter=ctPppLcpExtReplyCounter, ctRemPhysPortSpecificMib=ctRemPhysPortSpecificMib, ctWanalyzerClearAll=ctWanalyzerClearAll, ctWanalyzerCurrEntries=ctWanalyzerCurrEntries, ds1AlarmESInterval=ds1AlarmESInterval, ipPQAddressTable=ipPQAddressTable, ctDs3ExtensionsNumInterfaces=ctDs3ExtensionsNumInterfaces, ctDs1ExtensionsTable=ctDs1ExtensionsTable, iPPQAddAddress=iPPQAddAddress, ds1AlarmGlobalESCount=ds1AlarmGlobalESCount, ctDs1ExtensionsToggleFracTable=ctDs1ExtensionsToggleFracTable, ctDDSTxClockSource=ctDDSTxClockSource, ctDs1Alarms=ctDs1Alarms, ctRs232ExtensionsEntry=ctRs232ExtensionsEntry, ctRemIntEntEncryption=ctRemIntEntEncryption, ds1AlarmGlobalTrapEnable=ds1AlarmGlobalTrapEnable, ctDs1ExtensionsEntry=ctDs1ExtensionsEntry, Index=Index, ctRemIntEntCompStatus=ctRemIntEntCompStatus, ds1AlarmAutoRecovery=ds1AlarmAutoRecovery, ctPppLcpExtACFC=ctPppLcpExtACFC, ctRemoteAccess=ctRemoteAccess, ctRemIntEntBackupIfInUseCnt=ctRemIntEntBackupIfInUseCnt, ctRemExtPhysPortProtMgrType=ctRemExtPhysPortProtMgrType, ctDs3ExtensionsLineBuildOut=ctDs3ExtensionsLineBuildOut, ds1AlarmTrapEnable=ds1AlarmTrapEnable, mibs=mibs, ctron=ctron, ctRemPhysWpimType=ctRemPhysWpimType, ctRemExtPhysPortProtMgrChannelList=ctRemExtPhysPortProtMgrChannelList, ctPppLcpExtAccm=ctPppLcpExtAccm, ctPppLcpExtEchoRequest=ctPppLcpExtEchoRequest, ctWanalyzer=ctWanalyzer, ctDDSLineCoding=ctDDSLineCoding, ctPppMpExtEntry=ctPppMpExtEntry, ctPppLcpExtIfIndex=ctPppLcpExtIfIndex, ctDs3Ext=ctDs3Ext, frDcpCircuitStatus=frDcpCircuitStatus, ipPQConfigGroup=ipPQConfigGroup, ctRemoteConnection=ctRemoteConnection, ctRs232ExtensionsRTSEnable=ctRs232ExtensionsRTSEnable, ctRemIntEntMTU=ctRemIntEntMTU, ds1AlarmGlobalBPVInterval=ds1AlarmGlobalBPVInterval, ctDataLink=ctDataLink, ctRemIntEntBackupConnectRetryInterval=ctRemIntEntBackupConnectRetryInterval, ctRemExtConnectionIndex=ctRemExtConnectionIndex, ctPppEcpIV=ctPppEcpIV, ctRemExtPhysPortProtMgrIfaceNum=ctRemExtPhysPortProtMgrIfaceNum, ctRemPhysPortProtMgrType=ctRemPhysPortProtMgrType, frDcpCircuitIfIndex=frDcpCircuitIfIndex, ctFrDcp=ctFrDcp, ctDs1ExtensionsCFADuration=ctDs1ExtensionsCFADuration, ctWanalyzerTable=ctWanalyzerTable, ctDDSLoopbackConfig=ctDDSLoopbackConfig, ctRemIntEntBackupIfTimeToConnect=ctRemIntEntBackupIfTimeToConnect, ctRemNumConnections=ctRemNumConnections, wanDs1ExtensionsBertCurrentResults=wanDs1ExtensionsBertCurrentResults, ctDs1ExtensionsDIEnable=ctDs1ExtensionsDIEnable, wanDs1ExtensionsBertAverageResult=wanDs1ExtensionsBertAverageResult, ctRs232ExtensionsTable=ctRs232ExtensionsTable, ctPppLcpExtAuthenticationProt=ctPppLcpExtAuthenticationProt, ipPQIPAddress=ipPQIPAddress, ctRs232Ext=ctRs232Ext, wanMessageInterfaceIndex=wanMessageInterfaceIndex, ctPppCountersMaxConfigure=ctPppCountersMaxConfigure, wanDs1ExtensionsBertDataStatus=wanDs1ExtensionsBertDataStatus, ds1AlarmManualRecovery=ds1AlarmManualRecovery, ctRemPhysPortWanIfaceNum=ctRemPhysPortWanIfaceNum, ds1AlarmGlobalESInterval=ds1AlarmGlobalESInterval, frDcpCircuitTable=frDcpCircuitTable, ctronWan=ctronWan, ctPppBncpExtIfIndex=ctPppBncpExtIfIndex, ctRemExtProtMgrIndex=ctRemExtProtMgrIndex, ctDDSLineMode=ctDDSLineMode, ctRemBackupInterfaceTable=ctRemBackupInterfaceTable, ctPppCountersEntry=ctPppCountersEntry, ctPppLcpExtMpCapable=ctPppLcpExtMpCapable, ctPppBncpExtEntry=ctPppBncpExtEntry, frDcpCircuitEntry=frDcpCircuitEntry, ctPPPExt=ctPPPExt, wanalyzerEntry=wanalyzerEntry, ctDDSLineStatus=ctDDSLineStatus, ctPppMpExtTable=ctPppMpExtTable, cabletron=cabletron, wanDs1ExtensionsBertNumPeriods=wanDs1ExtensionsBertNumPeriods, ctDDSConfigTable=ctDDSConfigTable, ctRemIntEntRxIdleTimeout=ctRemIntEntRxIdleTimeout, ds1AlarmBPVInterval=ds1AlarmBPVInterval, ctRemIntEntBackupIfIndex=ctRemIntEntBackupIfIndex, ctRemIntEntBackupConnectRetries=ctRemIntEntBackupConnectRetries, ctDs1WanDriverCRCBits=ctDs1WanDriverCRCBits, ctRemPhysPortProtMgrIfaceList=ctRemPhysPortProtMgrIfaceList, ctDDSPortOutSpeed=ctDDSPortOutSpeed, wanDs1ExtensionsBertPeakResults=wanDs1ExtensionsBertPeakResults, ipPQAddressEntry=ipPQAddressEntry, ctDs1ExtensionsLineBuildOut=ctDs1ExtensionsLineBuildOut, ctRemIntEntEncryptStatus=ctRemIntEntEncryptStatus)
| 193.033175 | 18,674 | 0.779859 |
795582e15b87e126a78f6ab2cde2db7b4b2ea30f | 456 | py | Python | env/Lib/site-packages/plotly/validators/histogram/_hovertext.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 11,750 | 2015-10-12T07:03:39.000Z | 2022-03-31T20:43:15.000Z | venv/Lib/site-packages/plotly/validators/histogram/_hovertext.py | wakisalvador/constructed-misdirection | 74779e9ec640a11bc08d5d1967c85ac4fa44ea5e | [
"Unlicense"
] | 2,951 | 2015-10-12T00:41:25.000Z | 2022-03-31T22:19:26.000Z | venv/Lib/site-packages/plotly/validators/histogram/_hovertext.py | wakisalvador/constructed-misdirection | 74779e9ec640a11bc08d5d1967c85ac4fa44ea5e | [
"Unlicense"
] | 2,623 | 2015-10-15T14:40:27.000Z | 2022-03-28T16:05:50.000Z | import _plotly_utils.basevalidators
class HovertextValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name="hovertext", parent_name="histogram", **kwargs):
super(HovertextValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "style"),
**kwargs
)
| 35.076923 | 83 | 0.66886 |
795582f6aea665b698af069fc9da142fc8427297 | 8,253 | py | Python | reana_client/cli/cwl_runner.py | scailfin/reana-client | 01ddb22ac6a9215e87212bede636389f6f24a7d2 | [
"MIT"
] | null | null | null | reana_client/cli/cwl_runner.py | scailfin/reana-client | 01ddb22ac6a9215e87212bede636389f6f24a7d2 | [
"MIT"
] | null | null | null | reana_client/cli/cwl_runner.py | scailfin/reana-client | 01ddb22ac6a9215e87212bede636389f6f24a7d2 | [
"MIT"
] | 1 | 2019-06-04T22:29:12.000Z | 2019-06-04T22:29:12.000Z | # -*- coding: utf-8 -*-
#
# This file is part of REANA.
# Copyright (C) 2017, 2018 CERN.
#
# REANA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""CWL v1.0 interface CLI implementation."""
import io
import logging
import os
import re
import sys
import traceback
import urllib
from time import sleep
import click
import yaml
from bravado.exception import HTTPServerError
from cwltool.context import LoadingContext
from cwltool.load_tool import fetch_document
from cwltool.main import printdeps
from cwltool.workflow import findfiles
from reana_client.api.client import (create_workflow, current_rs_api_client,
get_workflow_logs, start_workflow,
upload_file)
from reana_client.cli.utils import add_access_token_options
from reana_client.config import default_user
from reana_client.utils import load_workflow_spec
from reana_client.version import __version__
PY3 = sys.version_info > (3,)
def get_file_dependencies_obj(cwl_obj, basedir):
"""Return a dictionary which contains the CWL workflow file dependencies.
:param cwl_obj: A CWL tool or job which might contain file dependencies.
:param basedir: Workflow base dir.
:returns: A dictionary composed of valid CWL file dependencies.
"""
# Load de document
loading_context = LoadingContext()
document_loader, workflow_obj, uri = fetch_document(
cwl_obj, resolver=loading_context.resolver,
fetcher_constructor=loading_context.fetcher_constructor)
in_memory_buffer = io.StringIO() if PY3 else io.BytesIO()
# Get dependencies
printdeps(workflow_obj, document_loader, in_memory_buffer, 'primary', uri,
basedir=basedir)
file_dependencies_obj = yaml.load(in_memory_buffer.getvalue(),
Loader=yaml.FullLoader)
in_memory_buffer.close()
return file_dependencies_obj
@click.command()
@click.version_option(version=__version__)
@click.option('--quiet', is_flag=True,
help='No diagnostic output')
@click.option('--outdir', type=click.Path(),
help='Output directory, defaults to the current directory')
@click.option('--basedir', type=click.Path(),
help='Base directory.')
@add_access_token_options
@click.argument('processfile', required=False)
@click.argument('jobfile')
@click.pass_context
def cwl_runner(ctx, quiet, outdir, basedir, processfile, jobfile,
access_token):
"""Run CWL files in a standard format <workflow.cwl> <job.json>."""
logging.basicConfig(
format='[%(levelname)s] %(message)s',
stream=sys.stderr,
level=logging.INFO if quiet else logging.DEBUG)
try:
basedir = basedir or os.path.abspath(
os.path.dirname(processfile))
if processfile:
with open(jobfile) as f:
reana_spec = {
"workflow": {"type": "cwl"},
"inputs":
{"parameters":
{"input": yaml.load(f, Loader=yaml.FullLoader)}}}
reana_spec['workflow']['spec'] = load_workflow_spec(
reana_spec['workflow']['type'],
processfile,
)
else:
with open(jobfile) as f:
job = yaml.load(f, Loader=yaml.FullLoader)
reana_spec = {"workflow": {"type": "cwl"},
"parameters": {"input": ""}}
reana_spec['workflow']['spec'] = load_workflow_spec(
reana_spec['workflow']['type'],
job['cwl:tool']
)
del job['cwl:tool']
reana_spec['inputs']['parameters'] = {'input': job}
reana_spec['workflow']['spec'] = replace_location_in_cwl_spec(
reana_spec['workflow']['spec'])
logging.info('Connecting to {0}'.format(
current_rs_api_client.swagger_spec.api_url))
response = create_workflow(reana_spec, 'cwl-test', access_token)
logging.error(response)
workflow_name = response['workflow_name']
workflow_id = response['workflow_id']
logging.info('Workflow {0}/{1} has been created.'.format(
workflow_name, workflow_id))
file_dependencies_list = []
for cwlobj in [processfile, jobfile]:
file_dependencies_list.append(
get_file_dependencies_obj(cwlobj, basedir))
files_to_upload = findfiles(file_dependencies_list)
for cwl_file_object in files_to_upload:
file_path = cwl_file_object.get('location')
abs_file_path = os.path.join(basedir, file_path)
with open(abs_file_path, 'r') as f:
upload_file(workflow_id, f, file_path, access_token)
logging.error('File {} uploaded.'.format(file_path))
response = start_workflow(
workflow_id, access_token, reana_spec['inputs']['parameters'])
logging.error(response)
first_logs = ""
while True:
sleep(1)
logging.error('Polling workflow logs')
response = get_workflow_logs(workflow_id, access_token)
logs = response['logs']
if logs != first_logs:
logging.error(logs[len(first_logs):])
first_logs = logs
if "Final process status" in logs or \
"Traceback (most recent call last)" in logs:
# click.echo(response['status'])
break
try:
out = re.search("success{[\S\s]*",
logs).group().replace("success", "")
import ast
import json
json_output = json.dumps(ast.literal_eval(str(out)))
except AttributeError:
logging.error("Workflow execution failed")
sys.exit(1)
except Exception as e:
logging.error(traceback.format_exc())
sys.exit(1)
sys.stdout.write(json_output)
sys.stdout.write("\n")
sys.stdout.flush()
except HTTPServerError as e:
logging.error(traceback.print_exc())
logging.error(e)
except Exception as e:
logging.error(traceback.print_exc())
def replace_location_in_cwl_spec(spec):
"""Replace absolute paths with relative in a workflow.
Recursively replace absolute paths with relative in a normalized (packed)
workflow.
"""
if spec.get('$graph'):
result = spec.copy()
result['$graph'] = []
for tool in spec['$graph']:
result['$graph'].append(replace_location_in_cwl_tool(tool))
return result
elif spec.get('inputs'):
return replace_location_in_cwl_tool(spec)
else:
return spec
def replace_location_in_cwl_tool(spec):
"""Recursively replace absolute paths with relative."""
# tools
inputs_parameters = []
for param in spec['inputs']:
if param['type'] == "File":
if param.get('default', ''):
location = "location" if param['default'].get(
"location") else "path"
param['default'][location] = param['default'][location].split(
'/')[-1]
inputs_parameters.append(param)
spec['inputs'] = inputs_parameters
# workflows
if spec.get("steps"):
steps = []
for tool in spec['steps']:
tool_inputs = []
for param in tool['in']:
if param.get('default') and type(param['default']) is dict:
if param['default'].get('class',
param['default'].get('type')) == \
'File':
location = "location" if param['default'].get(
"location") else "path"
param['default'][location] = \
param['default'][location].split('/')[-1]
tool_inputs.append(param)
tool['in'] = tool_inputs
steps.append(tool)
spec['steps'] = steps
return spec
if __name__ == "__main__":
cwl_runner()
| 36.517699 | 78 | 0.596147 |
795583cbfdd6593295ea2c2ddd57fa4562afad66 | 84 | py | Python | py_utils/box_utils/__init__.py | yuezunli/BMVC2018R-AP | aaccd20934e85b4b635f1b3ec8fcc757589cfb42 | [
"MIT"
] | 9 | 2020-10-30T11:32:35.000Z | 2022-03-31T02:56:00.000Z | py_utils/box_utils/__init__.py | yuezunli/BMVC2018R-AP | aaccd20934e85b4b635f1b3ec8fcc757589cfb42 | [
"MIT"
] | 1 | 2019-10-10T04:13:44.000Z | 2019-10-10T04:13:44.000Z | py_utils/box_utils/__init__.py | danmohaha/BMVC2018R-AP | aaccd20934e85b4b635f1b3ec8fcc757589cfb42 | [
"MIT"
] | 3 | 2020-01-03T03:29:08.000Z | 2020-04-28T08:44:35.000Z | """
Proj: YZ_utils
Date: 8/15/18
Written by Yuezun Li
--------------------------
""" | 14 | 26 | 0.452381 |
7955849fc40c5bc232004d683421ef9f738c995a | 12,584 | py | Python | SAN/lib/san_vision/transforms.py | Jack12xl/landmark-detection | 68b199fe13b0a57cdd7b9302a9f38e16418d675c | [
"MIT"
] | null | null | null | SAN/lib/san_vision/transforms.py | Jack12xl/landmark-detection | 68b199fe13b0a57cdd7b9302a9f38e16418d675c | [
"MIT"
] | null | null | null | SAN/lib/san_vision/transforms.py | Jack12xl/landmark-detection | 68b199fe13b0a57cdd7b9302a9f38e16418d675c | [
"MIT"
] | null | null | null | ##############################################################
### Copyright (c) 2018-present, Xuanyi Dong ###
### Style Aggregated Network for Facial Landmark Detection ###
### Computer Vision and Pattern Recognition, 2018 ###
##############################################################
from __future__ import division
import torch
import sys, math, random, PIL
from PIL import Image, ImageOps
import numpy as np
import numbers
import types
import collections
if sys.version_info.major == 2:
import cPickle as pickle
else:
import pickle
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img, points):
for t in self.transforms:
img, points = t(img, points)
return img, points
class TrainScale2WH(object):
"""Rescale the input PIL.Image to the given size.
Args:
size (sequence or int): Desired output size. If size is a sequence like
(w, h), output size will be matched to this. If size is an int,
smaller edge of the image will be matched to this number.
i.e, if height > width, then image will be rescaled to
(size * height / width, size)
interpolation (int, optional): Desired interpolation. Default is
``PIL.Image.BILINEAR``
"""
def __init__(self, target_size, interpolation=Image.BILINEAR):
assert isinstance(target_size, tuple) or isinstance(target_size, list), 'The type of target_size is not right : {}'.format(target_size)
assert len(target_size) == 2, 'The length of target_size is not right : {}'.format(target_size)
assert isinstance(target_size[0], int) and isinstance(target_size[1], int), 'The type of target_size is not right : {}'.format(target_size)
self.target_size = target_size
self.interpolation = interpolation
def __call__(self, imgs, point_meta):
"""
Args:
img (PIL.Image): Image to be scaled.
points 3 * N numpy.ndarray [x, y, visiable]
Returns:
PIL.Image: Rescaled image.
"""
point_meta = point_meta.copy()
if isinstance(imgs, list): is_list = True
else: is_list, imgs = False, [imgs]
w, h = imgs[0].size
ow, oh = self.target_size[0], self.target_size[1]
point_meta.apply_scale( [ow*1./w, oh*1./h] )
imgs = [ img.resize((ow, oh), self.interpolation) for img in imgs ]
if is_list == False: imgs = imgs[0]
return imgs, point_meta
class ToPILImage(object):
"""Convert a tensor to PIL Image.
Converts a torch.*Tensor of shape C x H x W or a numpy ndarray of shape
H x W x C to a PIL.Image while preserving the value range.
"""
def __call__(self, pic):
"""
Args:
pic (Tensor or numpy.ndarray): Image to be converted to PIL.Image.
Returns:
PIL.Image: Image converted to PIL.Image.
"""
npimg = pic
mode = None
if isinstance(pic, torch.FloatTensor):
pic = pic.mul(255).bool()
if torch.is_tensor(pic):
npimg = np.transpose(pic.numpy(), (1, 2, 0))
assert isinstance(npimg, np.ndarray), 'pic should be Tensor or ndarray'
if npimg.shape[2] == 1:
npimg = npimg[:, :, 0]
if npimg.dtype == np.uint8:
mode = 'L'
if npimg.dtype == np.int16:
mode = 'I;16'
if npimg.dtype == np.int32:
mode = 'I'
elif npimg.dtype == np.float32:
mode = 'F'
else:
if npimg.dtype == np.uint8:
mode = 'RGB'
assert mode is not None, '{} is not supported'.format(npimg.dtype)
return Image.fromarray(npimg, mode=mode)
class ToTensor(object):
"""Convert a ``PIL.Image`` or ``numpy.ndarray`` to tensor.
Converts a PIL.Image or numpy.ndarray (H x W x C) in the range
[0, 255] to a torch.FloatTensor of shape (C x H x W) in the range [0.0, 1.0].
"""
def __call__(self, pics, points):
"""
Args:
pic (PIL.Image or numpy.ndarray): Image to be converted to tensor.
points 3 * N numpy.ndarray [x, y, visiable] or Point_Meta
Returns:
Tensor: Converted image.
"""
## add to support list
if isinstance(pics, list): is_list = True
else: is_list, pics = False, [pics]
returned = []
for pic in pics:
if isinstance(pic, np.ndarray):
# handle numpy array
img = torch.from_numpy(pic.transpose((2, 0, 1)))
# backward compatibility
returned.append( img.float().div(255) )
continue
# handle PIL Image
if pic.mode == 'I':
img = torch.from_numpy(np.array(pic, np.int32, copy=False))
elif pic.mode == 'I;16':
img = torch.from_numpy(np.array(pic, np.int16, copy=False))
else:
img = torch.ByteTensor(torch.ByteStorage.from_buffer(pic.tobytes()))
# PIL image mode: 1, L, P, I, F, RGB, YCbCr, RGBA, CMYK
if pic.mode == 'YCbCr':
nchannel = 3
elif pic.mode == 'I;16':
nchannel = 1
else:
nchannel = len(pic.mode)
img = img.view(pic.size[1], pic.size[0], nchannel)
# put it from HWC to CHW format
# yikes, this transpose takes 80% of the loading time/CPU
img = img.transpose(0, 1).transpose(0, 2).contiguous()
if isinstance(img, torch.ByteTensor):
img = img.float().div(255)
returned.append(img)
if is_list == False:
assert len(returned) == 1, 'For non-list data, length of answer must be one not {}'.format(len(returned))
returned = returned[0]
return returned, points
class Normalize(object):
"""Normalize an tensor image with mean and standard deviation.
Given mean: (R, G, B) and std: (R, G, B),
will normalize each channel of the torch.*Tensor, i.e.
channel = (channel - mean) / std
Args:
mean (sequence): Sequence of means for R, G, B channels respecitvely.
std (sequence): Sequence of standard deviations for R, G, B channels
respecitvely.
"""
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, tensors, points):
"""
Args:
tensor (Tensor): Tensor image of size (C, H, W) to be normalized.
Returns:
Tensor: Normalized image.
"""
# TODO: make efficient
if isinstance(tensors, list): is_list = True
else: is_list, tensors = False, [tensors]
for tensor in tensors:
for t, m, s in zip(tensor, self.mean, self.std):
t.sub_(m).div_(s)
if is_list == False: tensors = tensors[0]
return tensors, points
class PreCrop(object):
"""Crops the given PIL.Image at the center.
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (w, h), a square crop (size, size) is
made.
"""
def __init__(self, expand_ratio):
assert expand_ratio is None or isinstance(expand_ratio, numbers.Number), 'The expand_ratio should not be {}'.format(expand_ratio)
if expand_ratio is None:
self.expand_ratio = 0
else:
self.expand_ratio = expand_ratio
assert self.expand_ratio >= 0, 'The expand_ratio should not be {}'.format(expand_ratio)
def __call__(self, imgs, point_meta):
## AugCrop has something wrong... For unsupervised data
if isinstance(imgs, list): is_list = True
else: is_list, imgs = False, [imgs]
w, h = imgs[0].size
box = point_meta.get_box().tolist()
face_ex_w, face_ex_h = (box[2] - box[0]) * self.expand_ratio, (box[3] - box[1]) * self.expand_ratio
x1, y1 = int(max(math.floor(box[0]-face_ex_w), 0)), int(max(math.floor(box[1]-face_ex_h), 0))
x2, y2 = int(min(math.ceil(box[2]+face_ex_w), w)), int(min(math.ceil(box[3]+face_ex_h), h))
imgs = [ img.crop((x1, y1, x2, y2)) for img in imgs ]
point_meta.set_precrop_wh( imgs[0].size[0], imgs[0].size[1], x1, y1, x2, y2)
point_meta.apply_offset(-x1, -y1)
point_meta.apply_bound(imgs[0].size[0], imgs[0].size[1])
if is_list == False: imgs = imgs[0]
return imgs, point_meta
class AugScale(object):
"""Rescale the input PIL.Image to the given size.
Args:
size (sequence or int): Desired output size. If size is a sequence like
(w, h), output size will be matched to this. If size is an int,
smaller edge of the image will be matched to this number.
i.e, if height > width, then image will be rescaled to
(size * height / width, size)
interpolation (int, optional): Desired interpolation. Default is
``PIL.Image.BILINEAR``
"""
def __init__(self, scale_prob, scale_min, scale_max, interpolation=Image.BILINEAR):
assert isinstance(scale_prob, numbers.Number) and scale_prob >= 0
assert isinstance(scale_min, numbers.Number) and isinstance(scale_max, numbers.Number)
self.scale_prob = scale_prob
self.scale_min = scale_min
self.scale_max = scale_max
self.interpolation = interpolation
def __call__(self, imgs, point_meta):
"""
Args:
img (PIL.Image): Image to be scaled.
points 3 * N numpy.ndarray [x, y, visiable]
Returns:
PIL.Image: Rescaled image.
"""
point_meta = point_meta.copy()
dice = random.random()
if dice > self.scale_prob:
return imgs, point_meta
if isinstance(imgs, list): is_list = True
else: is_list, imgs = False, [imgs]
scale_multiplier = (self.scale_max - self.scale_min) * random.random() + self.scale_min
w, h = imgs[0].size
ow, oh = int(w * scale_multiplier), int(h * scale_multiplier)
imgs = [ img.resize((ow, oh), self.interpolation) for img in imgs ]
point_meta.apply_scale( [scale_multiplier] )
if is_list == False: imgs = imgs[0]
return imgs, point_meta
class AugCrop(object):
def __init__(self, crop_x, crop_y, center_perterb_max, fill=0):
assert isinstance(crop_x, int) and isinstance(crop_y, int) and isinstance(center_perterb_max, numbers.Number)
self.crop_x = crop_x
self.crop_y = crop_y
self.center_perterb_max = center_perterb_max
assert isinstance(fill, numbers.Number) or isinstance(fill, str) or isinstance(fill, tuple)
self.fill = fill
def __call__(self, imgs, point_meta=None):
## AugCrop has something wrong... For unsupervised data
point_meta = point_meta.copy()
if isinstance(imgs, list): is_list = True
else: is_list, imgs = False, [imgs]
dice_x, dice_y = random.random(), random.random()
x_offset = int( (dice_x-0.5) * 2 * self.center_perterb_max)
y_offset = int( (dice_y-0.5) * 2 * self.center_perterb_max)
x1 = int(round( point_meta.center[0] + x_offset - self.crop_x / 2. ))
y1 = int(round( point_meta.center[1] + y_offset - self.crop_y / 2. ))
x2 = x1 + self.crop_x
y2 = y1 + self.crop_y
w, h = imgs[0].size
if x1 < 0 or y1 < 0 or x2 >= w or y2 >= h:
pad = max(0-x1, 0-y1, x2-w+1, y2-h+1)
assert pad > 0, 'padding operation in crop must be greater than 0'
imgs = [ ImageOps.expand(img, border=pad, fill=self.fill) for img in imgs ]
x1, x2, y1, y2 = x1 + pad, x2 + pad, y1 + pad, y2 + pad
point_meta.apply_offset(pad, pad)
point_meta.apply_bound(imgs[0].size[0], imgs[0].size[1])
point_meta.apply_offset(-x1, -y1)
imgs = [ img.crop((x1, y1, x2, y2)) for img in imgs ]
point_meta.apply_bound(imgs[0].size[0], imgs[0].size[1])
if is_list == False: imgs = imgs[0]
return imgs, point_meta
class AugRotate(object):
"""Rotate the given PIL.Image at the center.
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (w, h), a square crop (size, size) is
made.
"""
def __init__(self, max_rotate_degree):
assert isinstance(max_rotate_degree, numbers.Number)
self.max_rotate_degree = max_rotate_degree
def __call__(self, imgs, point_meta):
"""
Args:
img (PIL.Image): Image to be cropped.
point_meta : Point_Meta
Returns:
PIL.Image: Rotated image.
"""
point_meta = point_meta.copy()
if isinstance(imgs, list): is_list = True
else: is_list, imgs = False, [imgs]
degree = (random.random() - 0.5) * 2 * self.max_rotate_degree
center = (imgs[0].size[0] / 2, imgs[0].size[1] / 2)
if PIL.__version__[0] == '4':
imgs = [ img.rotate(degree, center=center) for img in imgs ]
else:
imgs = [ img.rotate(degree) for img in imgs ]
point_meta.apply_rotate(center, degree)
point_meta.apply_bound(imgs[0].size[0], imgs[0].size[1])
if is_list == False: imgs = imgs[0]
return imgs, point_meta
| 33.737265 | 143 | 0.628973 |
79558504b41c1252011d3d2a69097ff1f086a54e | 163,250 | py | Python | ekmmeters.py | ultratolido/ekmmetters | e15325023262e228b4dc037021c28a8d2b9b9b03 | [
"MIT"
] | null | null | null | ekmmeters.py | ultratolido/ekmmetters | e15325023262e228b4dc037021c28a8d2b9b9b03 | [
"MIT"
] | null | null | null | ekmmeters.py | ultratolido/ekmmetters | e15325023262e228b4dc037021c28a8d2b9b9b03 | [
"MIT"
] | null | null | null | """ ekmmeters.py
(c) 2015, 2016 EKM Metering.
The ekmmeters library API for v3 and v4 EKM Omnimeters.
Tested and released under Python 2.6 (tested Centos 6.x only)
and Python 2.7x (Python and Iron Python).
This software is provided under an MIT license:
https://opensource.org/licenses/MIT
"""
import struct
import time
from collections import OrderedDict
from collections import namedtuple
from datetime import date
import sqlite3
import binascii
import serial
import traceback
import sys
import json
import datetime
def ekm_no_log(output_string):
""" No-op predefined module level logging callback.
Args:
output_string (str): string to output.
"""
pass
def ekm_print_log(output_string):
""" Simple print predefined module level logging callback.
Args:
output_string (str): string to output.
Returns:
"""
print(output_string)
pass
global ekmmeters_log_func #: Module level log or diagnostic print
ekmmeters_log_func = ekm_no_log
global ekmmeters_log_level
ekmmeters_log_level = 3
global __EKMMETERS_VERSION
__EKMMETERS_VERSION = "0.2.4"
def ekm_set_log(function_name):
""" Set predefined or user-defined module level log output function.
Args:
function_name (function): function taking 1 string returning nothing.
"""
global ekmmeters_log_func
ekmmeters_log_func = function_name
pass
def ekm_log(logstr, priority=3):
""" Send string to module level log
Args:
logstr (str): string to print.
priority (int): priority, supports 3 (default) and 4 (special).
"""
if priority <= ekmmeters_log_level:
dt = datetime.datetime
stamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M.%f")
ekmmeters_log_func("[EKM Meter Debug Message: " + stamp + "] -> " + logstr)
pass
def ekm_set_log_level(level=3):
""" Set the logging level.
Args:
level (int): cutoff level (print at level and below).
"""
global ekmmeters_log_level
ekmmeters_log_level = level
pass
class MeterData():
""" Each :class:`~ekmmeters.SerialBlock` value is an array with these offsets. All Omnimeter versions.
=============== =
SizeValue 0
TypeValue 1
ScaleValue 2
StringValue 3
NativeValue 4
CalculatedFlag 5
EventFlag 6
=============== =
"""
SizeValue = 0
TypeValue = 1
ScaleValue = 2
StringValue = 3
NativeValue = 4
CalculatedFlag = 5
EventFlag = 6
class MaxDemandResetInterval():
""" As passed in :func:`~ekmmeters.Meter.setMaxDemandResetInterval`. V4 Omnimeters.
======= =
Off 0
Monthly 1
Weekly 2
Daily 3
Hourly 4
======= =
"""
Off = 0
Monthly = 1
Weekly = 2
Daily = 3
Hourly = 4
class MaxDemandPeriod():
"""As passed in :func:`~ekmmeters.Meter.setMaxDemandPeriod`. V3 and V4 Omnimeters.
============= =
At_15_Minutes 1
At_30_Minutes 2
At_60_Minutes 3
============= =
"""
At_15_Minutes = 1
At_30_Minutes = 2
At_60_Minutes = 3
class LCDItems():
""" As passed in :func:`~ekmmeters.V4Meter.addLcdItem`. V4 Omnimeters.
=================== ==
kWh_Tot 1
Rev_kWh_Tot 2
RMS_Volts_Ln_1 3
RMS_Volts_Ln_2 4
RMS_Volts_Ln_3 5
Amps_Ln_1 6
Amps_Ln_2 7
Amps_Ln_3 8
RMS_Watts_Ln_1 9
RMS_Watts_Ln_2 10
RMS_Watts_Ln_3 11
RMS_Watts_Tot 12
Power_Factor_Ln_1 13
Power_Factor_Ln_2 14
Power_Factor_Ln_3 15
kWh_Tariff_1 16
kWh_Tariff_2 17
kWh_Tariff_3 18
kWh_Tariff_4 19
Rev_kWh_Tariff_1 20
Rev_kWh_Tariff_2 21
Rev_kWh_Tariff_3 22
Rev_kWh_Tariff_4 23
Reactive_Pwr_Ln_1 24
Reactive_Pwr_Ln_2 25
Reactive_Pwr_Ln_3 26
Reactive_Pwr_Tot 27
Line_Freq 28
Pulse_Cnt_1 29
Pulse_Cnt_2 30
Pulse_Cnt_3 31
kWh_Ln_1 32
Rev_kWh_Ln_1 33
kWh_Ln_2 34
Rev_kWh_Ln_2 35
kWh_Ln_3 36
Rev_kWh_Ln_3 37
Reactive_Energy_Tot 38
Max_Demand_Rst 39
Rev_kWh_Rst 40
State_Inputs 41
Max_Demand 42
=================== ==
"""
kWh_Tot = 1
Rev_kWh_Tot = 2
RMS_Volts_Ln_1 = 3
RMS_Volts_Ln_2 = 4
RMS_Volts_Ln_3 = 5
Amps_Ln_1 = 6
Amps_Ln_2 = 7
Amps_Ln_3 = 8
RMS_Watts_Ln_1 = 9
RMS_Watts_Ln_2 = 10
RMS_Watts_Ln_3 = 11
RMS_Watts_Tot = 12
Power_Factor_Ln_1 = 13
Power_Factor_Ln_2 = 14
Power_Factor_Ln_3 = 15
kWh_Tariff_1 = 16
kWh_Tariff_2 = 17
kWh_Tariff_3 = 18
kWh_Tariff_4 = 19
Rev_kWh_Tariff_1 = 20
Rev_kWh_Tariff_2 = 21
Rev_kWh_Tariff_3 = 22
Rev_kWh_Tariff_4 = 23
Reactive_Pwr_Ln_1 = 24
Reactive_Pwr_Ln_2 = 25
Reactive_Pwr_Ln_3 = 26
Reactive_Pwr_Tot = 27
Line_Freq = 28
Pulse_Cnt_1 = 29
Pulse_Cnt_2 = 30
Pulse_Cnt_3 = 31
kWh_Ln_1 = 32
Rev_kWh_Ln_1 = 33
kWh_Ln_2 = 34
Rev_kWh_Ln_2 = 35
kWh_Ln_3 = 36
Rev_kWh_Ln_3 = 37
Reactive_Energy_Tot = 38
Max_Demand_Rst = 39
Rev_kWh_Rst = 40
State_Inputs = 41
Max_Demand = 42
class CTRatio():
""" As passed in :func:`~ekmmeters.Meter.setCTRatio`. V3 and V4 Omnimeters.
========= ====
Amps_100 100
Amps_200 200
Amps_400 400
Amps_600 600
Amps_800 800
Amps_1000 1000
Amps_1200 1200
Amps_1500 1500
Amps_2000 2000
Amps_3000 3000
Amps_4000 4000
Amps_5000 5000
========= ====
"""
Amps_100 = 200
Amps_200 = 200
Amps_400 = 400
Amps_600 = 600
Amps_800 = 800
Amps_1000 = 1000
Amps_1200 = 1200
Amps_1500 = 1500
Amps_2000 = 2000
Amps_3000 = 3000
Amps_4000 = 4000
Amps_5000 = 5000
class Field():
""" Union of all V3A and V4AB Fields Returned.
Use these values to directy get read data with
Meter::getField() or in directy traversal of
:class:`~ekmmeters.SerialBlock`.
========================= =======================
Meter_Address 12 character Mfr ID'
Time_Stamp Epoch in ms at read
Model Meter model
Firmware Meter firmware
kWh_Tot Meter power total
kWh_Tariff_1 Power in timeslot 1
kWh_Tariff_2 Power in timeslot 2
kWh_Tariff_3 Power in timeslot 3
kWh_Tariff_4 Power in timeslot 4
Rev_kWh_Tot Meter rev. total
Rev_kWh_Tariff_1 Rev power in timeslot 1
Rev_kWh_Tariff_2 Rev power in timeslot 2
Rev_kWh_Tariff_3 Rev power in timeslot 3
Rev_kWh_Tariff_4 Rev power in timeslot 4
RMS_Volts_Ln_1 Volts line 1
RMS_Volts_Ln_2 Volts line 2
RMS_Volts_Ln_3 Volts line 3
Amps_Ln_1 Current line 1
Amps_Ln_2 Current line 2
Amps_Ln_3 Current line 3
RMS_Watts_Ln_1 Instantaneous watts line 1
RMS_Watts_Ln_2 Instantaneous watts line 2
RMS_Watts_Ln_3 Instantaneous watts line 3
RMS_Watts_Tot Instantaneous watts 1 + 2 + 3
Cos_Theta_Ln_1 Prefix in :class:`~ekmmeters.CosTheta`
Cos_Theta_Ln_2 Prefix in :class:`~ekmmeters.CosTheta`
Cos_Theta_Ln_3 Prefix in :class:`~ekmmeters.CosTheta`
Max_Demand Demand in period
Max_Demand_Period :class:`~ekmmeters.MaxDemandPeriod`
Meter_Time :func:`~ekmmeters.Meter.setTime` and :func:`~ekmmeters.Meter.splitEkmDate`
CT_Ratio :class:`~ekmmeters.Meter.setCTRatio`
Pulse_Cnt_1 Pulse Count Line 1
Pulse_Cnt_2 Pulse Count Line 2
Pulse_Cnt_3 Pulse Count Line 3
Pulse_Ratio_1 :func:`~ekmmeters.V4Meter.setPulseInputRatio`
Pulse_Ratio_2 :func:`~ekmmeters.V4Meter.setPulseInputRatio`
Pulse_Ratio_3 :func:`~ekmmeters.V4Meter.setPulseInputRatio`
State_Inputs' :class:`~ekmmeters.StateIn`
Power_Factor_Ln_1 EKM Power Factor
Power_Factor_Ln_2 EKM Power Factor
Power_Factor_Ln_3 EKM Power Factor
Reactive_Energy_Tot Total VAR
kWh_Ln_1 Line 1 power
kWh_Ln_2 Line 2 power
kWh_Ln_3 Line 3 power
Rev_kWh_Ln_1 Line 1 reverse power
Rev_kWh_Ln_2 Line 2 reverse power
Rev_kWh_Ln_3 Line 3 revers power
Resettable_kWh_Tot :func:`~ekmmeters.V4Meter.setZeroResettableKWH`
Resettable_Rev_kWh_Tot :func:`~ekmmeters.V4Meter.setZeroResettableKWH`
Reactive_Pwr_Ln_1 VAR Line 1
Reactive_Pwr_Ln_2 VAR Line 2
Reactive_Pwr_Ln_3 VAR Line 3
Reactive_Pwr_Tot VAR Total
Line_Freq Freq. Hz.
State_Watts_Dir :class:`~ekmmeters.DirectionFlag`
State_Out :class:`~ekmmeters.StateOut`
kWh_Scale :class:`~ekmmeters.ScaleKWH`
RMS_Watts_Max_Demand Power peak in period
Pulse_Output_Ratio :class:`~ekmmeters.PulseOutput`
Net_Calc_Watts_Ln_1 RMS_Watts with Direction
Net_Calc_Watts_Ln_2 RMS_Watts with Direction
Net_Calc_Watts_Ln_3 RMS_Watts with Direction
Net_Calc_Watts_Tot RMS_Watts with Direction
Status_A Reserved diagnostic.
Status_B Reserved diagnostic.
Status_C Reserved diagnostic.
========================= =======================
Power_Factor is the only power factor measurement supported by
upstring EKM products. The original Cos Theta value
is provided as an API-only feature.
"""
Meter_Address = 'Meter_Address'
Time_Stamp = 'Time_Stamp'
Model = 'Model'
Firmware = 'Firmware'
kWh_Tot = 'kWh_Tot'
kWh_Tariff_1 = 'kWh_Tariff_1'
kWh_Tariff_2 = 'kWh_Tariff_2'
kWh_Tariff_3 = 'kWh_Tariff_3'
kWh_Tariff_4 = 'kWh_Tariff_4'
Rev_kWh_Tot = 'Rev_kWh_Tot'
Rev_kWh_Tariff_1 = 'Rev_kWh_Tariff_1'
Rev_kWh_Tariff_2 = 'Rev_kWh_Tariff_2'
Rev_kWh_Tariff_3 = 'Rev_kWh_Tariff_3'
Rev_kWh_Tariff_4 = 'Rev_kWh_Tariff_4'
RMS_Volts_Ln_1 = 'RMS_Volts_Ln_1'
RMS_Volts_Ln_2 = 'RMS_Volts_Ln_2'
RMS_Volts_Ln_3 = 'RMS_Volts_Ln_3'
Amps_Ln_1 = 'Amps_Ln_1'
Amps_Ln_2 = 'Amps_Ln_2'
Amps_Ln_3 = 'Amps_Ln_3'
RMS_Watts_Ln_1 = 'RMS_Watts_Ln_1'
RMS_Watts_Ln_2 = 'RMS_Watts_Ln_2'
RMS_Watts_Ln_3 = 'RMS_Watts_Ln_3'
RMS_Watts_Tot = 'RMS_Watts_Tot'
Cos_Theta_Ln_1 = 'Cos_Theta_Ln_1'
Cos_Theta_Ln_2 = 'Cos_Theta_Ln_2'
Cos_Theta_Ln_3 = 'Cos_Theta_Ln_3'
Max_Demand = 'Max_Demand'
Max_Demand_Period = 'Max_Demand_Period'
Meter_Time = 'Meter_Time'
CT_Ratio = 'CT_Ratio'
Pulse_Cnt_1 = 'Pulse_Cnt_1'
Pulse_Cnt_2 = 'Pulse_Cnt_2'
Pulse_Cnt_3 = 'Pulse_Cnt_3'
Pulse_Ratio_1 = 'Pulse_Ratio_1'
Pulse_Ratio_2 = 'Pulse_Ratio_2'
Pulse_Ratio_3 = 'Pulse_Ratio_3'
State_Inputs = 'State_Inputs'
Power_Factor_Ln_1 = 'Power_Factor_Ln_1'
Power_Factor_Ln_2 = 'Power_Factor_Ln_2'
Power_Factor_Ln_3 = 'Power_Factor_Ln_3'
Reactive_Energy_Tot = 'Reactive_Energy_Tot'
kWh_Ln_1 = 'kWh_Ln_1'
kWh_Ln_2 = 'kWh_Ln_2'
kWh_Ln_3 = 'kWh_Ln_3'
Rev_kWh_Ln_1 = 'Rev_kWh_Ln_1'
Rev_kWh_Ln_2 = 'Rev_kWh_Ln_2'
Rev_kWh_Ln_3 = 'Rev_kWh_Ln_3'
Resettable_kWh_Tot = 'Resettable_kWh_Tot'
Resettable_Rev_kWh_Tot = 'Resettable_Rev_kWh_Tot'
Reactive_Pwr_Ln_1 = 'Reactive_Pwr_Ln_1'
Reactive_Pwr_Ln_2 = 'Reactive_Pwr_Ln_2'
Reactive_Pwr_Ln_3 = 'Reactive_Pwr_Ln_3'
Reactive_Pwr_Tot = 'Reactive_Pwr_Tot'
Line_Freq = 'Line_Freq'
State_Watts_Dir = 'State_Watts_Dir'
State_Out = 'State_Out'
kWh_Scale = 'kWh_Scale'
RMS_Watts_Max_Demand = 'RMS_Watts_Max_Demand'
Pulse_Output_Ratio = 'Pulse_Output_Ratio'
Net_Calc_Watts_Ln_1 = 'Net_Calc_Watts_Ln_1'
Net_Calc_Watts_Ln_2 = 'Net_Calc_Watts_Ln_2'
Net_Calc_Watts_Ln_3 = 'Net_Calc_Watts_Ln_3'
Net_Calc_Watts_Tot = 'Net_Calc_Watts_Tot'
Status_A = 'Status_A'
Status_B = 'Status_B'
Status_C = 'Status_C'
class Seasons():
""" As passed to :func:`~ekmmeters.Meter.assignSeasonSchedule`. V3 and V4 Omnimeters.
assign* methods use a zero based index for seasons.
You may set a season using one of these constants
or fill and iterate over range(Extents.Seaons).
======== =
Season_1 0
Season_2 1
Season_3 2
Season_4 3
======== =
"""
Season_1 = 0
Season_2 = 1
Season_3 = 2
Season_4 = 3
class Months():
""" As passed to :func:`~ekmmeters.Meter.extractMonthTariff`. V3 and V4 Omnimeters.
======== =
Month_1 0
Month_2 1
Month_3 2
Month_4 3
Month_5 4
Month_6 5
======== =
"""
Month_1 = 0
Month_2 = 1
Month_3 = 2
Month_4 = 3
Month_5 = 4
Month_6 = 5
class Tariffs():
""" As passed to :func:`~ekmmeters.Meter.assignScheduleTariff`. V3 and V4 Omnimeters.
======== =
Tariff_1 0
Tariff_2 1
Tariff_3 2
Tariff_4 3
======== =
"""
Tariff_1 = 0
Tariff_2 = 1
Tariff_3 = 2
Tariff_4 = 3
class Extents():
""" Traversal extents to use with for range(Extent) idiom. V3 and V4 Omnimeters.
Use of range(Extent.Entity) as an iterator insures safe
assignnment without off by one errors.
========== ==
Seasons 4
Holidays 20
Tariffs 4
Schedules 8
Months 6
========== ==
"""
Seasons = 4
Holidays = 20
Tariffs = 4
Schedules = 8
Months = 6
class PulseOutput():
""" As passed to :func:`~ekmmeters.V4Meter.setPulseOutputRatio`. V4 Omnimeters.
========== ==========
Ratio_1 Ratio_40
Ratio_2 Ratio_50
Ratio_4 Ratio_80
Ratio_5 Ratio_100
Ratio_8 Ratio_200
Ratio_10 Ratio_400
Ratio_16 Ratio_800
Ratio_20 Ratio_1600
Ratio_25
========== ==========
"""
Ratio_1 = 1
Ratio_2 = 2
Ratio_4 = 4
Ratio_5 = 5
Ratio_8 = 8
Ratio_10 = 10
Ratio_16 = 16
Ratio_20 = 20
Ratio_25 = 25
Ratio_40 = 40
Ratio_50 = 50
Ratio_80 = 80
Ratio_100 = 100
Ratio_200 = 200
Ratio_400 = 400
Ratio_800 = 800
Ratio_1600 = 1600
class Pulse():
""" As passed to :func:`~ekmmeters.V4Meter.setPulseInputRatio`. V4 Omnimeters.
Simple constant to clarify call.
=== =
In1 1
In2 2
In3 3
=== =
"""
In1 = 1
In2 = 2
In3 = 3
class Schedules():
""" Allowed schedules. V3 and V4 Omnimeters.
Schedules on the meter are zero based, these apply to most passed
schedule parameters.
========== =
Schedule_1 0
Schedule_2 1
Schedule_3 2
Schedule_4 3
Schedule_5 4
Schedule_6 5
Schedule_7 6
Schedule_8 7
========== =
"""
Schedule_1 = 0
Schedule_2 = 1
Schedule_3 = 2
Schedule_4 = 3
Schedule_5 = 4
Schedule_6 = 5
Schedule_7 = 6
Schedule_8 = 7
class ReadSchedules():
""" For :func:`~ekmmeters.Meter.readScheduleTariffs` and :func:`~ekmmeters.Meter.getSchedulesBuffer`. V3 and V4.
================ ==================================
Schedules_1_To_4 1st 4 blocks tariffs and schedules
Schedules_5_To_8 2nd 4 blocks tariffs and schedules
================ ==================================
"""
Schedules_1_To_4 = 0
Schedules_5_To_8 = 1
class ReadMonths():
""" As passed to :func:`~ekmmeters.Meter.readMonthTariffs` and :func:`~ekmmeters.Meter.getMonthsBuffer`. V3 and V4.
Use to select the forward or reverse six month tariff data.
========== ================================
kWh Select forward month tariff data
kWhReverse Select reverse month tariff data
========== ================================
"""
kWh = 1
kWhReverse = 2
class DirectionFlag():
""" On V4, State_Watts_Dir mask shows RMS_Watts direction on line 1-3.
The Direction flag is used to generate Calc_Net_Watts field on every
read. Each word in constant is the direction of the corresponding at
the moment of read. Ex ForwardReverseForward means RMS_Watts lines one
and three are positive, and line two is negtive.
===================== =
ForwardForwardForward 1
ForwardForwardReverse 2
ForwardReverseForward 3
ReverseForwardForward 4
ForwardReverseReverse 5
ReverseForwardReverse 6
ReverseReverseForward 7
ReverseReverseReverse 8
===================== =
"""
ForwardForwardForward = 1
ForwardForwardReverse = 2
ForwardReverseForward = 3
ReverseForwardForward = 4
ForwardReverseReverse = 5
ReverseForwardReverse = 6
ReverseReverseForward = 7
ReverseReverseReverse = 8
class ScaleKWH():
""" Scaling or kWh values controlled by Fields.kWh. V4 Omnimeters.
If MeterData.ScaleValue is ScaleType.KWH, Fields.kWh_Scale one of these.
=========== == ===========
NoScale 0 no scaling
Scale10 1 scale 10^-1
Scale100 2 scale 10^-2
EmptyScale -1 Reserved
=========== == ===========
"""
NoScale = 0
Scale10 = 1
Scale100 = 2
EmptyScale = -1
class ScaleType():
""" Scale type defined in SerialBlock. V4 Omnimeters.
These values are set when a field is defined a SerialBlock.
A Div10 or Div100 results in immediate scaling, otherwise
the scaling is perfformed per the value in Field.kWh_Scale
as described in ScaleKWH.
====== ==============================
KWH :class:`~ekmmeters.ScaleKWH`
No Do not scale
Div10 Scale 10^-1
Div100 Scale 10^-2
====== ==============================
"""
KWH = "kwh"
No = "None"
Div10 = "10"
Div100 = "100"
class FieldType():
""" Every SerialBlock element has a field type. V3 and V4 Omnimeters.
Data arrives as ascii. Field type determines disposition.
The destination type is Python.
============ ==========================
NoType Not type assigned, invalid
Hex Implicit hex string
Int Implicit int
Float Implicit float
String Leave as string, terminate
PowerFactor EKM L or C prefixed pf
============ ==========================
"""
NoType = "None" #: no type assigned
Hex = "hex" #: leave as hexified string
Int = "int" #: int in python
Float = "float" #: float in python
String = "string" #: string in python
PowerFactor = "pf" #: do power factor conversion
class Relay():
""" Relay specified in :func:`~ekmmeters.V4Meter.setRelay`. V4 Omnimeters.
====== ================
Relay1 OUT1 on V4 Meter
Relay2 OUT2 on V4 Meter
====== ================
"""
Relay1 = 1 #: Relay 1 Selection code for v4 meter
Relay2 = 2 #: Relay 2 Selection code for v4 meter
class RelayState():
""" Relay state in :func:`~ekmmeters.V4Meter.setRelay`. V4 Omnimeters.
=========== =
RelayOpen 0
RelayClosed 1
=========== =
"""
RelayOpen = 0 #: Relay Open command code for v4 meter
RelayClose = 1 #: Relay Close command code for v4 meter
class RelayInterval():
""" Relay interval in :func:`~ekmmeters.V4Meter.setRelay`. V4 Omnimeters.
===== ======================
Max 9999 seconds
Min 0, parameter limit
Hold 0 (lock relay state)
===== ======================
"""
Max = 9999 #: Maximum wait
Min = 0 #: Lowest legal value
Hold = Min #: Hold is just zero
class StateOut():
""" Pulse output state at time of read. V4 Omnimeters.
======= =
OffOff 1
OffOn 2
OnOff 3
OnOn 4
======= =
"""
OffOff = 1
OffOn = 2
OnOff = 3
OnOn = 4
class StateIn():
""" State of each pulse line at time of read. V4 Omnimeters.
================= =
HighHighHigh 0
HighHighLow 1
HighLowHigh 2
HighLowLow 3
LowHighHigh 4
LowHighLow 5
LowLowHigh 6
LowLowLow 7
================= =
"""
HighHighHigh = 0
HighHighLow = 1
HighLowHigh = 2
HighLowLow = 3
LowHighHigh = 4
LowHighLow = 5
LowLowHigh = 6
LowLowLow = 7
class CosTheta():
""" Prefix characters returned in power factor. Note a cos of zero has one space. V3 and V4 Omnimeters.
"""
InductiveLag = "L"
CapacitiveLead = "C"
NoLeadOrLag = (" ")
class SerialBlock(OrderedDict):
""" Simple subclass of collections.OrderedDict.
Key is a :class:`~ekmmeters.Field` and value is :class:`~ekmmeters.MeterData` indexed array.
The :class:`~ekmmeters.MeterData` points to one of the following:
============== ==============================================
SizeValue Integer. Equivalent to struct char[SizeValue]
TypeValue A :class:`~ekmmeters.FieldType` value.
ScaleValue A :class:`~ekmmeters.ScaleType` value.
StringValue Printable, scaled and formatted content.
NativeValue Converted, scaled value of field native type.
CalculatedFlag If True, not part of serial read, calculated.
EventFlag If True, state value
============== ==============================================
"""
def __init__(self):
super(SerialBlock, self).__init__()
class SerialPort(object):
""" Wrapper for serial port commands.
It should only be necessary to create one SerialPort per real port.
Object construction sets the class variables. The port is opened with
initPort(), and any serial exceptions will thrown at that point.
The standard serial settings for v3 and v4 EKM meters are 9600 baud,
7 bits, 1 stop bit, no parity. The baud rate may be reset but all timings
and test in this library are at 9600 baud. Bits, stop and parity may not
be changed.
"""
def __init__(self, ttyport, baudrate=9600, force_wait = 0.1):
"""
Args:
ttyport (str): port name, ex 'COM3' '/dev/ttyUSB0'
baudrate (int): optional, 9600 default and recommended
force_wait(float) : optional post commnd sleep, if required
"""
self.m_ttyport = ttyport
self.m_baudrate = baudrate
self.m_ser = None
self.m_fd = None
self.m_max_waits = 60
self.m_wait_sleep = 0.05
self.m_force_wait = force_wait
self.m_init_wait = 0.2
pass
def initPort(self):
""" Required initialization call, wraps pyserial constructor. """
try:
self.m_ser = serial.Serial(port=self.m_ttyport,
baudrate=self.m_baudrate,
timeout=0,
parity=serial.PARITY_EVEN,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.SEVENBITS,
rtscts=False)
ekm_log("Pyserial version = " + serial.VERSION)
ekm_log("Port = " + self.m_ttyport)
ekm_log("Rate = " + str(self.m_baudrate))
time.sleep(self.m_init_wait)
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return False
def getName(self):
""" Getter for serial port name
Returns:
string: name of serial port (ex: 'COM3', '/dev/ttyS0')
"""
return self.m_ttyport
def closePort(self):
""" Passthrough for pyserial port close()."""
self.m_ser.close()
pass
def write(self, output):
"""Passthrough for pyserial Serial.write().
Args:
output (str): Block to write to port
"""
view_str = output.encode('ascii', 'ignore')
if (len(view_str) > 0):
self.m_ser.write(view_str)
self.m_ser.flush()
time.sleep(self.m_force_wait)
pass
def setPollingValues(self, max_waits, wait_sleep):
""" Optional polling loop control
Args:
max_waits (int): waits
wait_sleep (int): ms per wait
"""
self.m_max_waits = max_waits
self.m_wait_sleep = wait_sleep
def getResponse(self, context=""):
""" Poll for finished block or first byte ACK.
Args:
context (str): internal serial call context.
Returns:
string: Response, implict cast from byte array.
"""
waits = 0 # allowed interval counter
response_str = "" # returned bytes in string default
try:
waits = 0 # allowed interval counter
while (waits < self.m_max_waits):
bytes_to_read = self.m_ser.inWaiting()
if bytes_to_read > 0:
next_chunk = str(self.m_ser.read(bytes_to_read)).encode('ascii', 'ignore')
response_str += next_chunk
if (len(response_str) == 255):
time.sleep(self.m_force_wait)
return response_str
if (len(response_str) == 1) and (response_str.encode('hex') == '06'):
time.sleep(self.m_force_wait)
return response_str
else: # hang out -- half shortest expected interval (50 ms)
waits += 1
time.sleep(self.m_force_wait)
response_str = ""
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return response_str
class MeterDB(object):
""" Base class for single-table reads database abstraction."""
def __init__(self, connection_string):
"""
Args:
connection_string (str): database appropriate connection string
"""
self.m_connection_string = connection_string
self.m_all_fields = SerialBlock()
self.combineAB()
pass
def setConnectString(self, connection_string):
""" Setter for connection string.
Args:
connection_string (str): Connection string.
"""
self.m_connection_string = connection_string
pass
def combineAB(self):
""" Use the serial block definitions in V3 and V4 to create one field list. """
v4definition_meter = V4Meter()
v4definition_meter.makeAB()
defv4 = v4definition_meter.getReadBuffer()
v3definition_meter = V3Meter()
v3definition_meter.makeReturnFormat()
defv3 = v3definition_meter.getReadBuffer()
for fld in defv3:
if fld not in self.m_all_fields:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
self.m_all_fields[fld] = defv3[fld]
for fld in defv4:
if fld not in self.m_all_fields:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
self.m_all_fields[fld] = defv4[fld]
pass
def mapTypeToSql(self, fld_type=FieldType.NoType, fld_len=0):
""" Translate FieldType to portable SQL Type. Override if needful.
Args:
fld_type (int): :class:`~ekmmeters.FieldType` in serial block.
fld_len (int): Binary length in serial block
Returns:
string: Portable SQL type and length where appropriate.
"""
if fld_type == FieldType.Float:
return "FLOAT"
elif fld_type == FieldType.String:
return "VARCHAR(" + str(fld_len) + ")"
elif fld_type == FieldType.Int:
return "INT"
elif fld_type == FieldType.Hex:
return "VARCHAR(" + str(fld_len * 2) + ")"
elif fld_type == FieldType.PowerFactor:
return "VARCHAR(" + str(fld_len) + ")"
else:
ekm_log("Type " + str(type) + " not handled by mapTypeToSql, returned VARCHAR(255)")
return "VARCHAR(255)"
def fillCreate(self, qry_str):
""" Return query portion below CREATE.
Args:
qry_str (str): String as built.
Returns:
string: Passed string with fields appended.
"""
count = 0
for fld in self.m_all_fields:
fld_type = self.m_all_fields[fld][MeterData.TypeValue]
fld_len = self.m_all_fields[fld][MeterData.SizeValue]
qry_spec = self.mapTypeToSql(fld_type, fld_len)
if count > 0:
qry_str += ", \n"
qry_str = qry_str + ' ' + fld + ' ' + qry_spec
count += 1
qry_str += (",\n\t" + Field.Time_Stamp + " BIGINT,\n\t" +
"Raw_A VARCHAR(512),\n\t" +
"Raw_B VARCHAR(512)\n)")
return qry_str
def sqlCreate(self):
""" Reasonably portable SQL CREATE for defined fields.
Returns:
string: Portable as possible SQL Create for all-reads table.
"""
count = 0
qry_str = "CREATE TABLE Meter_Reads ( \n\r"
qry_str = self.fillCreate(qry_str)
ekm_log(qry_str, 4)
return qry_str
def sqlInsert(self, def_buf, raw_a, raw_b):
""" Reasonably portable SQL INSERT for from combined read buffer.
Args:
def_buf (SerialBlock): Database only serial block of all fields.
raw_a (str): Raw A read as hex string.
raw_b (str): Raw B read (if exists, otherwise empty) as hex string.
Returns:
str: SQL insert for passed read buffer
"""
count = 0
qry_str = "INSERT INTO Meter_Reads ( \n\t"
for fld in def_buf:
if count > 0:
qry_str += ", \n\t"
qry_str = qry_str + fld
count += 1
qry_str += (",\n\t" + Field.Time_Stamp + ", \n\t" +
"Raw_A,\n\t" +
"Raw_B\n) \n" +
"VALUES( \n\t")
count = 0
for fld in def_buf:
if count > 0:
qry_str += ", \n\t"
fld_type = def_buf[fld][MeterData.TypeValue]
fld_str_content = def_buf[fld][MeterData.StringValue]
delim = ""
if (fld_type == FieldType.Hex) or \
(fld_type == FieldType.String) or \
(fld_type == FieldType.PowerFactor):
delim = "'"
qry_str = qry_str + delim + fld_str_content + delim
count += 1
time_val = int(time.time() * 1000)
qry_str = (qry_str + ",\n\t" + str(time_val) + ",\n\t'" +
binascii.b2a_hex(raw_a) + "'" + ",\n\t'" +
binascii.b2a_hex(raw_b) + "'\n);")
ekm_log(qry_str, 4)
return qry_str
def sqlIdxMeterTime(self):
""" Reasonably portable Meter_Address and Time_Stamp index SQL create.
Returns:
str: SQL CREATE INDEX statement.
"""
return ("CREATE INDEX idx_meter_time " +
"ON Meter_Reads('" + Field.Meter_Address + "', '" +
Field.Time_Stamp + "')")
def sqlIdxMeter(self):
""" Reasonably portable Meter_Address index SQL create.
Returns:
str: SQL CREATE INDEX statement.
"""
return ("CREATE INDEX idx_meter " +
"ON Meter_Reads('" + Field.Meter_Address + "')")
def sqlDrop(self):
""" Reasonably portable drop of reads table.
Returns:
str: SQL DROP TABLE statement.
"""
qry_str = 'DROP TABLE Meter_Reads'
return qry_str
def dbInsert(self, def_buf, raw_a, raw_b):
""" Call overridden dbExec() with built insert statement.
Args:
def_buf (SerialBlock): Block of read buffer fields to write.
raw_a (str): Hex string of raw A read.
raw_b (str): Hex string of raw B read or empty.
"""
self.dbExec(self.sqlInsert(def_buf, raw_a, raw_b))
def dbCreate(self):
""" Call overridden dbExec() with built create statement. """
self.dbExec(self.sqlCreate())
def dbDropReads(self):
""" Call overridden dbExec() with build drop statement. """
self.dbExec(self.sqlDrop())
def dbExec(self, query_str):
""" Required override for MeterDB subclass, run a query.
Args:
query_str (str): SQL Query to run.
"""
pass
class SqliteMeterDB(MeterDB):
"""MeterDB subclass for simple sqlite database"""
def __init__(self, connection_string="default.db"):
"""
Args:
connection_string (str): name of sqlite database file.
"""
super(SqliteMeterDB, self).__init__(connection_string)
def dbExec(self, query_str):
""" Required override of dbExec() from MeterDB(), run query.
Args:
query_str (str): query to run
"""
try:
connection = sqlite3.connect(self.m_connection_string)
cursor = connection.cursor()
cursor.execute(query_str)
connection.commit()
cursor.close()
connection.close()
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return False
pass
def dict_factory(self, cursor, row):
""" Sqlite callback accepting the cursor and the original row as a tuple.
Simple return of JSON safe types.
Args:
cursor (sqlite cursor): Original cursory
row (sqlite row tuple): Original row.
Returns:
dict: modified row.
"""
d = {}
for idx, col in enumerate(cursor.description):
val = row[idx]
name = col[0]
if name == Field.Time_Stamp:
d[col[0]] = str(val)
continue
if name == "Raw_A" or name == "Raw_B": # or name == Field.Meter_Time:
continue
if name not in self.m_all_fields:
continue
if (str(val) != "None") and ((val > 0) or (val < 0)):
d[name] = str(val)
return d
def raw_dict_factory(self, cursor, row):
""" Sqlite callback accepting the cursor and the original row as a tuple.
Simple return of JSON safe types, including raw read hex strings.
Args:
cursor (sqlite cursor): Original cursory
row (sqlite row tuple): Original row.
Returns:
dict: modified row.
"""
d = {}
for idx, col in enumerate(cursor.description):
val = row[idx]
name = col[0]
if name == Field.Time_Stamp or name == Field.Meter_Address:
d[name] = str(val)
continue
if name == "Raw_A" or name == "Raw_B":
d[name] = str(val)
continue
return d
def renderJsonReadsSince(self, timestamp, meter):
""" Simple since Time_Stamp query returned as JSON records.
Args:
timestamp (int): Epoch time in seconds.
meter (str): 12 character meter address to query
Returns:
str: JSON rendered read records.
"""
result = ""
try:
connection = sqlite3.connect(self.m_connection_string)
connection.row_factory = self.dict_factory
select_cursor = connection.cursor()
select_cursor.execute("select * from Meter_Reads where " + Field.Time_Stamp +
" > " + str(timestamp) + " and " + Field.Meter_Address +
"= '" + meter + "';")
reads = select_cursor.fetchall()
result = json.dumps(reads, indent=4)
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return result
def renderRawJsonReadsSince(self, timestamp, meter):
""" Simple Time_Stamp query returned as JSON, with raw hex string fields.
Args:
timestamp (int): Epoch time in seconds.
meter (str): 12 character meter address to query
Returns:
str: JSON rendered read records including raw hex fields.
"""
result = ""
try:
connection = sqlite3.connect(self.m_connection_string)
connection.row_factory = self.raw_dict_factory
select_cursor = connection.cursor()
select_cursor.execute("select " + Field.Time_Stamp + ", Raw_A, Raw_B, " +
Field.Meter_Address + " from Meter_Reads where " +
Field.Time_Stamp + " > " + str(timestamp) + " and " +
Field.Meter_Address + " = '" + meter + "';")
reads = select_cursor.fetchall()
result = json.dumps(reads, indent=4)
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return result
class Meter(object):
""" Abstract base class. Encapuslates serial operations and buffers. """
def __init__(self, meter_address="000000000000"):
"""
Args:
meter_address (str): 12 char EKM meter address on front of meter.
"""
self.m_meter_address = meter_address.zfill(12)
self.m_raw_read_a = ""
self.m_raw_read_b = ""
self.m_observers = []
self.m_cmd_interface = None
self.m_serial_port = None
self.m_command_msg = ""
self.m_context = ""
self.m_schd_1_to_4 = SerialBlock()
self.initSchd_1_to_4()
self.m_schd_5_to_8 = SerialBlock()
self.initSchd_5_to_8()
self.m_hldy = SerialBlock()
self.initHldyDates()
self.m_mons = SerialBlock()
self.initMons()
self.m_rev_mons = SerialBlock()
self.initRevMons()
self.m_seasons_sched_params = {}
self.m_holiday_date_params = {}
self.m_sched_tariff_params = {}
self.initParamLists()
pass
def initParamLists(self):
""" Initialize all short in-object send buffers to zero. """
self.m_seasons_sched_params = {"Season_1_Start_Month": 0, "Season_1_Start_Day": 0,
"Season_1_Schedule": 0, "Season_2_Start_Month": 0,
"Season_2_Start_Day": 0, "Season_2_Schedule": 0,
"Season_3_Start_Month": 0, "Season_3_Start_Day": 0,
"Season_3_Schedule": 0, "Season_4_Start_Month": 0,
"Season_4_Start_Day": 0, "Season_4_Schedule": 0}
self.m_holiday_date_params = {"Holiday_1_Month": 0, "Holiday_1_Day": 0, "Holiday_2_Month": 0,
"Holiday_2_Day": 0, "Holiday_3_Month": 0, "Holiday_3_Day": 0,
"Holiday_4_Month": 0, "Holiday_4_Day": 0, "Holiday_5_Month": 0,
"Holiday_5_Day": 0, "Holiday_6_Month": 0, "Holiday_6_Day": 0,
"Holiday_7_Month": 0, "Holiday_7_Day": 0, "Holiday_8_Month": 0,
"Holiday_8_Day": 0, "Holiday_9_Month": 0, "Holiday_9_Day": 0,
"Holiday_10_Month": 0, "Holiday_10_Day": 0, "Holiday_11_Month": 0,
"Holiday_11_Day": 0, "Holiday_12_Month": 0, "Holiday_12_Day": 0,
"Holiday_13_Month": 0, "Holiday_13_Day": 0, "Holiday_14_Month": 0,
"Holiday_14_Day": 0, "Holiday_15_Month": 0, "Holiday_15_Day": 0,
"Holiday_16_Month": 0, "Holiday_16_Day": 0, "Holiday_17_Month": 0,
"Holiday_17_Day": 0, "Holiday_18_Month": 0, "Holiday_18_Day": 0,
"Holiday_19_Month": 0, "Holiday_19_Day": 0, "Holiday_20_Month": 0,
"Holiday_20_Day": 0}
self.m_sched_tariff_params = {"Schedule": 0, "Hour_1": 0, "Min_1": 0, "Rate_1": 0, "Hour_2": 0,
"Min_2": 0, "Rate_2": 0, "Hour_3": 0, "Min_3": 0, "Rate_3": 0,
"Hour_4": 0, "Min_4": 0, "Rate_4": 0}
pass
def getReadBuffer(self):
""" Required override to fetch the read serial block.
Returns:
SerialBlock: Every supported field (A or A+B, includes all fields)
"""
ekm_log("Meter::getReadBuffer called in superclass.")
empty = SerialBlock()
return empty;
def request(self, send_terminator=False):
""" Required override, issue A or A+B reads and square up buffers.
Args:
send_terminator (bool): Send termination string at end of read.
Returns:
bool: True on successful read.
"""
ekm_log("Meter::request called in superclass.")
return False
def serialPostEnd(self):
""" Required override, issue termination string to port. """
ekm_log("Meter::serialPostEnd called in superclass.")
pass
def setContext(self, context_str):
""" Set context string for serial command. Private setter.
Args:
context_str (str): Command specific string.
"""
if (len(self.m_context) == 0) and (len(context_str) >= 7):
if context_str[0:7] != "request":
ekm_log("Context: " + context_str)
self.m_context = context_str
def getContext(self):
""" Get context string for current serial command. Private getter.
Returns:
str: Context string as set at start of command.
"""
return self.m_context
def calc_crc16(self, buf):
""" Drop in pure python replacement for ekmcrc.c extension.
Args:
buf (bytes): String or byte array (implicit Python 2.7 cast)
Returns:
str: 16 bit CRC per EKM Omnimeters formatted as hex string.
"""
crc_table = [0x0000, 0xc0c1, 0xc181, 0x0140, 0xc301, 0x03c0, 0x0280, 0xc241,
0xc601, 0x06c0, 0x0780, 0xc741, 0x0500, 0xc5c1, 0xc481, 0x0440,
0xcc01, 0x0cc0, 0x0d80, 0xcd41, 0x0f00, 0xcfc1, 0xce81, 0x0e40,
0x0a00, 0xcac1, 0xcb81, 0x0b40, 0xc901, 0x09c0, 0x0880, 0xc841,
0xd801, 0x18c0, 0x1980, 0xd941, 0x1b00, 0xdbc1, 0xda81, 0x1a40,
0x1e00, 0xdec1, 0xdf81, 0x1f40, 0xdd01, 0x1dc0, 0x1c80, 0xdc41,
0x1400, 0xd4c1, 0xd581, 0x1540, 0xd701, 0x17c0, 0x1680, 0xd641,
0xd201, 0x12c0, 0x1380, 0xd341, 0x1100, 0xd1c1, 0xd081, 0x1040,
0xf001, 0x30c0, 0x3180, 0xf141, 0x3300, 0xf3c1, 0xf281, 0x3240,
0x3600, 0xf6c1, 0xf781, 0x3740, 0xf501, 0x35c0, 0x3480, 0xf441,
0x3c00, 0xfcc1, 0xfd81, 0x3d40, 0xff01, 0x3fc0, 0x3e80, 0xfe41,
0xfa01, 0x3ac0, 0x3b80, 0xfb41, 0x3900, 0xf9c1, 0xf881, 0x3840,
0x2800, 0xe8c1, 0xe981, 0x2940, 0xeb01, 0x2bc0, 0x2a80, 0xea41,
0xee01, 0x2ec0, 0x2f80, 0xef41, 0x2d00, 0xedc1, 0xec81, 0x2c40,
0xe401, 0x24c0, 0x2580, 0xe541, 0x2700, 0xe7c1, 0xe681, 0x2640,
0x2200, 0xe2c1, 0xe381, 0x2340, 0xe101, 0x21c0, 0x2080, 0xe041,
0xa001, 0x60c0, 0x6180, 0xa141, 0x6300, 0xa3c1, 0xa281, 0x6240,
0x6600, 0xa6c1, 0xa781, 0x6740, 0xa501, 0x65c0, 0x6480, 0xa441,
0x6c00, 0xacc1, 0xad81, 0x6d40, 0xaf01, 0x6fc0, 0x6e80, 0xae41,
0xaa01, 0x6ac0, 0x6b80, 0xab41, 0x6900, 0xa9c1, 0xa881, 0x6840,
0x7800, 0xb8c1, 0xb981, 0x7940, 0xbb01, 0x7bc0, 0x7a80, 0xba41,
0xbe01, 0x7ec0, 0x7f80, 0xbf41, 0x7d00, 0xbdc1, 0xbc81, 0x7c40,
0xb401, 0x74c0, 0x7580, 0xb541, 0x7700, 0xb7c1, 0xb681, 0x7640,
0x7200, 0xb2c1, 0xb381, 0x7340, 0xb101, 0x71c0, 0x7080, 0xb041,
0x5000, 0x90c1, 0x9181, 0x5140, 0x9301, 0x53c0, 0x5280, 0x9241,
0x9601, 0x56c0, 0x5780, 0x9741, 0x5500, 0x95c1, 0x9481, 0x5440,
0x9c01, 0x5cc0, 0x5d80, 0x9d41, 0x5f00, 0x9fc1, 0x9e81, 0x5e40,
0x5a00, 0x9ac1, 0x9b81, 0x5b40, 0x9901, 0x59c0, 0x5880, 0x9841,
0x8801, 0x48c0, 0x4980, 0x8941, 0x4b00, 0x8bc1, 0x8a81, 0x4a40,
0x4e00, 0x8ec1, 0x8f81, 0x4f40, 0x8d01, 0x4dc0, 0x4c80, 0x8c41,
0x4400, 0x84c1, 0x8581, 0x4540, 0x8701, 0x47c0, 0x4680, 0x8641,
0x8201, 0x42c0, 0x4380, 0x8341, 0x4100, 0x81c1, 0x8081, 0x4040]
crc = 0xffff
for c in buf:
index = (crc ^ ord(c)) & 0xff
crct = crc_table[index]
crc = (crc >> 8) ^ crct
crc = (crc << 8) | (crc >> 8)
crc &= 0x7F7F
return "%04x" % crc
def calcPF(self, pf):
""" Simple wrap to calc legacy PF value
Args:
pf: meter power factor reading
Returns:
int: legacy push pf
"""
pf_y = pf[:1]
pf_x = pf[1:]
result = 100
if pf_y == CosTheta.CapacitiveLead:
result = 200 - int(pf_x)
elif pf_y == CosTheta.InductiveLag:
result = int(pf_x)
return result
def setMaxDemandPeriod(self, period, password="00000000"):
""" Serial call to set max demand period.
Args:
period (int): : as int.
password (str): Optional password.
Returns:
bool: True on completion with ACK.
"""
result = False
self.setContext("setMaxDemandPeriod")
try:
if period < 1 or period > 3:
self.writeCmdMsg("Correct parameter: 1 = 15 minute, 2 = 30 minute, 3 = hour")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "015731023030353028" + binascii.hexlify(str(period)).zfill(2) + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setMaxDemandPeriod): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def setMaxDemandResetInterval(self, interval, password="00000000"):
""" Serial call to set max demand interval.
Args:
interval (int): :class:`~ekmmeters.MaxDemandResetInterval` as int.
password (str): Optional password.
Returns:
bool: True on completion with ACK.
"""
result = False
self.setContext("setMaxDemandResetInterval")
try:
if interval < 0 or interval > 4:
self.writeCmdMsg("Correct parameter: 0 = off, 1 = monthly, 2 = weekly, 3 = daily, 4 = hourly")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "015731023030443528" + binascii.hexlify(str(interval).zfill(1)) + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success (setMaxDemandResetInterval): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def setMeterPassword(self, new_pwd, pwd="00000000"):
""" Serial Call to set meter password. USE WITH CAUTION.
Args:
new_pwd (str): 8 digit numeric password to set
pwd (str): Old 8 digit numeric password.
Returns:
bool: True on completion with ACK.
"""
result = False
self.setContext("setMeterPassword")
try:
if len(new_pwd) != 8 or len(pwd) != 8:
self.writeCmdMsg("Passwords must be exactly eight characters.")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Pre command read failed: check serial line.")
else:
if not self.serialCmdPwdAuth(pwd):
self.writeCmdMsg("Password failure")
else:
req_pwd = binascii.hexlify(new_pwd.zfill(8))
req_str = "015731023030323028" + req_pwd + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setMeterPassword): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def unpackStruct(self, data, def_buf):
""" Wrapper for struct.unpack with SerialBlock buffer definitionns.
Args:
data (str): Implicit cast bytes to str, serial port return.
def_buf (SerialBlock): Block object holding field lengths.
Returns:
tuple: parsed result of struct.unpack() with field definitions.
"""
struct_str = "="
for fld in def_buf:
if not def_buf[fld][MeterData.CalculatedFlag]:
struct_str = struct_str + str(def_buf[fld][MeterData.SizeValue]) + "s"
if len(data) == 255:
contents = struct.unpack(struct_str, str(data))
else:
self.writeCmdMsg("Length error. Len() size = " + str(len(data)))
contents = ()
return contents
def convertData(self, contents, def_buf, kwh_scale=ScaleKWH.EmptyScale):
""" Move data from raw tuple into scaled and conveted values.
Args:
contents (tuple): Breakout of passed block from unpackStruct().
def_buf (): Read buffer destination.
kwh_scale (int): :class:`~ekmmeters.ScaleKWH` as int, from Field.kWhScale`
Returns:
bool: True on completion.
"""
log_str = ""
count = 0
# getting scale does not require a full read. It does require that the
# reads have the scale value in the first block read. This requirement
# is filled by default in V3 and V4 requests
if kwh_scale == ScaleKWH.EmptyScale:
if self.m_kwh_precision == ScaleKWH.EmptyScale :
scale_offset = int(def_buf.keys().index(Field.kWh_Scale))
self.m_kwh_precision = kwh_scale = int(contents[scale_offset])
for fld in def_buf:
if def_buf[fld][MeterData.CalculatedFlag]:
count += 1
continue
if len(contents) == 0:
count += 1
continue
try: # scrub up messes on a field by field basis
raw_data = contents[count]
fld_type = def_buf[fld][MeterData.TypeValue]
fld_scale = def_buf[fld][MeterData.ScaleValue]
if fld_type == FieldType.Float:
float_data = float(str(raw_data))
divisor = 1
if fld_scale == ScaleType.KWH:
divisor = 1
if kwh_scale == ScaleKWH.Scale10:
divisor = 10
elif kwh_scale == ScaleKWH.Scale100:
divisor = 100
elif (kwh_scale != ScaleKWH.NoScale) and (kwh_scale != ScaleKWH.EmptyScale):
ekm_log("Unrecognized kwh scale.")
elif fld_scale == ScaleType.Div10:
divisor = 10
elif fld_scale == ScaleType.Div100:
divisor = 100
elif fld_scale != ScaleType.No:
ekm_log("Unrecognized float scale.")
float_data /= divisor
float_data_str = str(float_data)
def_buf[fld][MeterData.StringValue] = float_data_str
def_buf[fld][MeterData.NativeValue] = float_data
elif fld_type == FieldType.Hex:
hex_data = raw_data.encode('hex')
def_buf[fld][MeterData.StringValue] = hex_data
def_buf[fld][MeterData.NativeValue] = hex_data
elif fld_type == FieldType.Int:
integer_data = int(raw_data)
integer_data_str = str(integer_data)
if len(integer_data_str) == 0:
integer_data_str = str(0)
def_buf[fld][MeterData.StringValue] = integer_data_str
def_buf[fld][MeterData.NativeValue] = integer_data
elif fld_type == FieldType.String:
string_data = str(raw_data)
def_buf[fld][MeterData.StringValue] = string_data
def_buf[fld][MeterData.NativeValue] = string_data
elif fld_type == FieldType.PowerFactor:
def_buf[fld][MeterData.StringValue] = str(raw_data)
def_buf[fld][MeterData.NativeValue] = str(raw_data)
else:
ekm_log("Unrecognized field type")
log_str = log_str + '"' + fld + '": "' + def_buf[fld][MeterData.StringValue] + '"\n'
except:
ekm_log("Exception on Field:" + str(fld))
ekm_log(traceback.format_exc(sys.exc_info()))
self.writeCmdMsg("Exception on Field:" + str(fld))
count += 1
return True
def jsonRender(self, def_buf):
""" Translate the passed serial block into string only JSON.
Args:
def_buf (SerialBlock): Any :class:`~ekmmeters.SerialBlock` object.
Returns:
str: JSON rendering of meter record.
"""
try:
ret_dict = SerialBlock()
ret_dict[Field.Meter_Address] = self.getMeterAddress()
for fld in def_buf:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
ret_dict[str(fld)] = def_buf[fld][MeterData.StringValue]
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return ""
return json.dumps(ret_dict, indent=4)
def crcMeterRead(self, raw_read, def_buf):
""" Internal read CRC wrapper.
Args:
raw_read (str): Bytes with implicit string cast from serial read
def_buf (SerialBlock): Populated read buffer.
Returns:
bool: True if passed CRC equals calculated CRC.
"""
try:
if len(raw_read) == 0:
ekm_log("(" + self.m_context + ") Empty return read.")
return False
sent_crc = self.calc_crc16(raw_read[1:-2])
logstr = "(" + self.m_context + ")CRC sent = " + str(def_buf["crc16"][MeterData.StringValue])
logstr += " CRC calc = " + sent_crc
ekm_log(logstr)
if int(def_buf["crc16"][MeterData.StringValue], 16) == int(sent_crc, 16):
return True
# A cross simple test lines on a USB serial adapter, these occur every
# 1000 to 2000 reads, and they show up here as a bad unpack or
# a bad crc type call. In either case, we suppress them a log will
# become quite large. ekmcrc errors come through as type errors.
# Failures of int type conversion in 16 bit conversion occur as value
# errors.
except struct.error:
ekm_log(str(sys.exc_info()))
for frame in traceback.extract_tb(sys.exc_info()[2]):
fname, lineno, fn, text = frame
ekm_log("Error in %s on line %d" % (fname, lineno))
return False
except TypeError:
ekm_log(str(sys.exc_info()))
for frame in traceback.extract_tb(sys.exc_info()[2]):
fname, lineno, fn, text = frame
ekm_log("Error in %s on line %d" % (fname, lineno))
return False
except ValueError:
ekm_log(str(sys.exc_info()))
for frame in traceback.extract_tb(sys.exc_info()[2]):
fname, lineno, fn, text = frame
ekm_log("Error in %s on line %d" % (fname, lineno))
return False
return False
def splitEkmDate(self, dateint):
"""Break out a date from Omnimeter read.
Note a corrupt date will raise an exception when you
convert it to int to hand to this method.
Args:
dateint (int): Omnimeter datetime as int.
Returns:
tuple: Named tuple which breaks out as followws:
========== =====================
yy Last 2 digits of year
mm Month 1-12
dd Day 1-31
weekday Zero based weekday
hh Hour 0-23
minutes Minutes 0-59
ss Seconds 0-59
========== =====================
"""
date_str = str(dateint)
dt = namedtuple('EkmDate', ['yy', 'mm', 'dd', 'weekday', 'hh', 'minutes', 'ss'])
if len(date_str) != 14:
dt.yy = dt.mm = dt.dd = dt.weekday = dt.hh = dt.minutes = dt.ss = 0
return dt
dt.yy = int(date_str[0:2])
dt.mm = int(date_str[2:4])
dt.dd = int(date_str[4:6])
dt.weekday = int(date_str[6:8])
dt.hh = int(date_str[8:10])
dt.minutes = int(date_str[10:12])
dt.ss = int(date_str[12:14])
return dt
def getMeterAddress(self):
""" Getter for meter object 12 character address.
Returns:
str: 12 character address on front of meter
"""
return self.m_meter_address
def registerObserver(self, observer):
""" Place an observer in the meter update() chain.
Args:
observer (MeterObserver): Subclassed MeterObserver.
"""
self.m_observers.append(observer)
pass
def unregisterObserver(self, observer):
""" Remove an observer from the meter update() chain.
Args:
observer (MeterObserver): Subclassed MeterObserver.
"""
if observer in self.m_observers:
self.m_observers.remove(observer)
pass
def initSchd_1_to_4(self):
""" Initialize first tariff schedule :class:`~ekmmeters.SerialBlock`. """
self.m_schd_1_to_4["reserved_40"] = [6, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_schd_1_to_4["Schd_1_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_1_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["reserved_41"] = [24, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_2_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["reserved_42"] = [24, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_3_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["reserved_43"] = [24, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["Schd_4_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["reserved_44"] = [79, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_1_to_4["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
pass
def initSchd_5_to_8(self):
""" Initialize second(and last) tariff schedule :class:`~ekmmeters.SerialBlock`. """
self.m_schd_5_to_8["reserved_30"] = [6, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_schd_5_to_8["Schd_5_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_5_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["reserved_31"] = [24, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_6_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["reserved_32"] = [24, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_7_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["reserved_33"] = [24, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_1_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_1_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_1_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_2_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_2_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_2_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_3_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_3_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_3_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_4_Hour"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_4_Min"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["Schd_8_Tariff_4_Rate"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["reserved_34"] = [79, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_schd_5_to_8["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
pass
def getSchedulesBuffer(self, period_group):
""" Return the requested tariff schedule :class:`~ekmmeters.SerialBlock` for meter.
Args:
period_group (int): A :class:`~ekmmeters.ReadSchedules` value.
Returns:
SerialBlock: The requested tariff schedules for meter.
"""
empty_return = SerialBlock()
if period_group == ReadSchedules.Schedules_1_To_4:
return self.m_schd_1_to_4
elif period_group == ReadSchedules.Schedules_5_To_8:
return self.m_schd_5_to_8
else:
return empty_return
def initHldyDates(self):
""" Initialize holidays :class:`~ekmmeters.SerialBlock` """
self.m_hldy["reserved_20"] = [6, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_hldy["Holiday_1_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_1_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_2_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_2_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_3_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_3_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_4_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_4_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_5_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_5_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_6_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_6_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_7_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_7_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_8_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_8_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_9_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_9_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_10_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_10_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_11_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_11_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_12_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_12_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_13_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_13_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_14_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_14_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_15_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_15_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_16_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_16_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_17_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_17_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_18_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_18_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_19_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_19_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_20_Mon"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_20_Day"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Weekend_Schd"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["Holiday_Schd"] = [2, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_hldy["reserved_21"] = [163, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_hldy["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
pass
def getHolidayDatesBuffer(self):
""" Get the meter :class:`~ekmmeters.SerialBlock` for holiday dates."""
return self.m_hldy
def initMons(self):
""" Initialize first month tariff :class:`~ekmmeters.SerialBlock` for meter """
self.m_mons["reserved_echo_cmd"] = [6, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_mons["Month_1_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_1_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_1_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_1_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_1_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_2_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_2_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_2_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_2_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_2_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_3_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_3_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_3_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_3_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_3_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_4_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_4_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_4_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_4_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_4_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_5_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_5_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_5_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_5_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_5_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_6_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_6_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_6_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_6_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["Month_6_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_mons["reserved_1"] = [7, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_mons["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
pass
def initRevMons(self):
""" Initialize second (and last) month tarifff :class:`~ekmmeters.SerialBlock` for meter. """
self.m_rev_mons["reserved_echo_cmd"] = [6, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_rev_mons["Month_1_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_1_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_1_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_1_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_1_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_2_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_2_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_2_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_2_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_2_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_3_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_3_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_3_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_3_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_3_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_4_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_4_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_4_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_4_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_4_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_5_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_5_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_5_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_5_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_5_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_6_Tot"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_6_Tariff_1"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_6_Tariff_2"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_6_Tariff_3"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["Month_6_Tariff_4"] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_rev_mons["reserved_1"] = [7, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_rev_mons["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
pass
def getMonthsBuffer(self, direction):
""" Get the months tariff SerialBlock for meter.
Args:
direction (int): A :class:`~ekmmeters.ReadMonths` value.
Returns:
SerialBlock: Requested months tariffs buffer.
"""
if direction == ReadMonths.kWhReverse:
return self.m_rev_mons
# default direction == ReadMonths.kWh
return self.m_mons
def setMaxDemandResetNow(self, password="00000000"):
""" Serial call zero max demand (Dash Now button)
Args:
password (str): Optional password
Returns:
bool: True on completion with ACK.
"""
result = False
self.setContext("setMaxDemandResetNow")
try:
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "015731023030343028" + binascii.hexlify(str(0).zfill(6)) + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setMaxDemandResetNow): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def setTime(self, yy, mm, dd, hh, minutes, ss, password="00000000"):
""" Serial set time with day of week calculation.
Args:
yy (int): Last two digits of year.
mm (int): Month 1-12.
dd (int): Day 1-31
hh (int): Hour 0 to 23.
minutes (int): Minutes 0 to 59.
ss (int): Seconds 0 to 59.
password (str): Optional password.
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setTime")
try:
if mm < 1 or mm > 12:
self.writeCmdMsg("Month must be between 1 and 12")
self.setContext("")
return result
if dd < 1 or dd > 31:
self.writeCmdMsg("Day must be between 1 and 31")
self.setContext("")
return result
if hh < 0 or hh > 23:
self.writeCmdMsg("Hour must be between 0 and 23, inclusive")
self.setContext("")
return result
if minutes < 0 or minutes > 59:
self.writeCmdMsg("Minutes must be between 0 and 59, inclusive")
self.setContext("")
return result
if ss < 0 or ss > 59:
self.writeCmdMsg("Seconds must be between 0 and 59, inclusive")
self.setContext("")
return result
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return result
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
dt_buf = datetime.datetime(int(yy), int(mm), int(dd), int(hh), int(minutes), int(ss))
ekm_log("Writing Date and Time " + dt_buf.strftime("%Y-%m-%d %H:%M"))
dayofweek = dt_buf.date().isoweekday()
ekm_log("Calculated weekday " + str(dayofweek))
req_str = "015731023030363028"
req_str += binascii.hexlify(str(yy)[-2:])
req_str += binascii.hexlify(str(mm).zfill(2))
req_str += binascii.hexlify(str(dd).zfill(2))
req_str += binascii.hexlify(str(dayofweek).zfill(2))
req_str += binascii.hexlify(str(hh).zfill(2))
req_str += binascii.hexlify(str(minutes).zfill(2))
req_str += binascii.hexlify(str(ss).zfill(2))
req_str += "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setTime): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def setCTRatio(self, new_ct, password="00000000"):
""" Serial call to set CT ratio for attached inductive pickup.
Args:
new_ct (int): A :class:`~ekmmeters.CTRatio` value, a legal amperage setting.
password (str): Optional password.
Returns:
bool: True on completion with ACK.
"""
ret = False
self.setContext("setCTRatio")
try:
self.clearCmdMsg()
if ((new_ct != CTRatio.Amps_100) and (new_ct != CTRatio.Amps_200) and
(new_ct != CTRatio.Amps_400) and (new_ct != CTRatio.Amps_600) and
(new_ct != CTRatio.Amps_800) and (new_ct != CTRatio.Amps_1000) and
(new_ct != CTRatio.Amps_1200) and (new_ct != CTRatio.Amps_1500) and
(new_ct != CTRatio.Amps_2000) and (new_ct != CTRatio.Amps_3000) and
(new_ct != CTRatio.Amps_4000) and (new_ct != CTRatio.Amps_5000)):
self.writeCmdMsg("Legal CT Ratios: 100, 200, 400, 600, " +
"800, 1000, 1200, 1500, 2000, 3000, 4000 and 5000")
self.setContext("")
return ret
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return ret
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "015731023030443028" + binascii.hexlify(str(new_ct).zfill(4)) + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setCTRatio): 06 returned.")
ret = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return ret
def assignScheduleTariff(self, schedule, tariff, hour, minute, rate):
""" Assign one schedule tariff period to meter bufffer.
Args:
schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extents.Schedules).
tariff (int): :class:`~ekmmeters.Tariffs` value or in range(Extents.Tariffs).
hour (int): Hour from 0-23.
minute (int): Minute from 0-59.
rate (int): Rate value.
Returns:
bool: True on completed assignment.
"""
if ((schedule not in range(Extents.Schedules)) or
(tariff not in range(Extents.Tariffs)) or
(hour < 0) or (hour > 23) or (minute < 0) or
(minute > 59) or (rate < 0)):
ekm_log("Out of bounds in Schedule_" + str(schedule + 1))
return False
tariff += 1
idx_min = "Min_" + str(tariff)
idx_hour = "Hour_" + str(tariff)
idx_rate = "Rate_" + str(tariff)
if idx_min not in self.m_sched_tariff_params:
ekm_log("Incorrect index: " + idx_min)
return False
if idx_hour not in self.m_sched_tariff_params:
ekm_log("Incorrect index: " + idx_hour)
return False
if idx_rate not in self.m_sched_tariff_params:
ekm_log("Incorrect index: " + idx_rate)
return False
self.m_sched_tariff_params[idx_rate] = rate
self.m_sched_tariff_params[idx_hour] = hour
self.m_sched_tariff_params[idx_min] = minute
self.m_sched_tariff_params['Schedule'] = schedule
return True
def setScheduleTariffs(self, cmd_dict=None, password="00000000"):
""" Serial call to set tariff periodds for a schedule.
Args:
cmd_dict (dict): Optional passed command dictionary.
password (str): Optional password.
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setScheduleTariffs")
if not cmd_dict:
cmd_dict = self.m_sched_tariff_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_table = ""
req_table += binascii.hexlify(str(cmd_dict["Hour_1"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Min_1"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Rate_1"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Hour_2"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Min_2"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Rate_2"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Hour_3"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Min_3"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Rate_3"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Hour_4"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Min_4"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Rate_4"]).zfill(2))
req_table += binascii.hexlify(str(0).zfill(24))
table = binascii.hexlify(str(cmd_dict["Schedule"]).zfill(1))
req_str = "01573102303037" + table + "28" + req_table + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setScheduleTariffs): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def assignSeasonSchedule(self, season, month, day, schedule):
""" Define a single season and assign a schedule
Args:
season (int): A :class:`~ekmmeters.Seasons` value or in range(Extent.Seasons).
month (int): Month 1-12.
day (int): Day 1-31.
schedule (int): A :class:`~ekmmeters.LCDItems` value or in range(Extent.Schedules).
Returns:
bool: True on completion and ACK.
"""
season += 1
schedule += 1
if ((season < 1) or (season > Extents.Seasons) or (schedule < 1) or
(schedule > Extents.Schedules) or (month > 12) or (month < 0) or
(day < 0) or (day > 31)):
ekm_log("Out of bounds: month " + str(month) + " day " + str(day) +
" schedule " + str(schedule) + " season " + str(season))
return False
idx_mon = "Season_" + str(season) + "_Start_Day"
idx_day = "Season_" + str(season) + "_Start_Month"
idx_schedule = "Season_" + str(season) + "_Schedule"
if idx_mon not in self.m_seasons_sched_params:
ekm_log("Incorrect index: " + idx_mon)
return False
if idx_day not in self.m_seasons_sched_params:
ekm_log("Incorrect index: " + idx_day)
return False
if idx_schedule not in self.m_seasons_sched_params:
ekm_log("Incorrect index: " + idx_schedule)
return False
self.m_seasons_sched_params[idx_mon] = month
self.m_seasons_sched_params[idx_day] = day
self.m_seasons_sched_params[idx_schedule] = schedule
return True
def setSeasonSchedules(self, cmd_dict=None, password="00000000"):
""" Serial command to set seasons table.
If no dictionary is passed, the meter object buffer is used.
Args:
cmd_dict (dict): Optional dictionary of season schedules.
password (str): Optional password
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setSeasonSchedules")
if not cmd_dict:
cmd_dict = self.m_seasons_sched_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_table = ""
req_table += binascii.hexlify(str(cmd_dict["Season_1_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_1_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_1_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_2_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_2_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_2_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_3_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_3_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_3_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_4_Start_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_4_Start_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Season_4_Schedule"]).zfill(2))
req_table += binascii.hexlify(str(0).zfill(24))
req_str = "015731023030383028" + req_table + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setSeasonSchedules): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def assignHolidayDate(self, holiday, month, day):
""" Set a singe holiday day and month in object buffer.
There is no class style enum for holidays.
Args:
holiday (int): 0-19 or range(Extents.Holidays).
month (int): Month 1-12.
day (int): Day 1-31
Returns:
bool: True on completion.
"""
holiday += 1
if (month > 12) or (month < 0) or (day > 31) or (day < 0) or (holiday < 1) or (holiday > Extents.Holidays):
ekm_log("Out of bounds: month " + str(month) + " day " + str(day) + " holiday " + str(holiday))
return False
day_str = "Holiday_" + str(holiday) + "_Day"
mon_str = "Holiday_" + str(holiday) + "_Month"
if day_str not in self.m_holiday_date_params:
ekm_log("Incorrect index: " + day_str)
return False
if mon_str not in self.m_holiday_date_params:
ekm_log("Incorrect index: " + mon_str)
return False
self.m_holiday_date_params[day_str] = day
self.m_holiday_date_params[mon_str] = month
return True
def setHolidayDates(self, cmd_dict=None, password="00000000"):
""" Serial call to set holiday list.
If a buffer dictionary is not supplied, the method will use
the class object buffer populated with assignHolidayDate.
Args:
cmd_dict (dict): Optional dictionary of holidays.
password (str): Optional password.
Returns:
bool: True on completion.
"""
result = False
self.setContext("setHolidayDates")
if not cmd_dict:
cmd_dict = self.m_holiday_date_params
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_table = ""
req_table += binascii.hexlify(str(cmd_dict["Holiday_1_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_1_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_2_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_2_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_3_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_3_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_4_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_4_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_5_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_5_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_6_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_6_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_7_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_7_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_8_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_8_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_9_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_9_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_10_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_10_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_11_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_11_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_12_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_12_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_13_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_13_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_14_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_14_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_15_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_15_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_16_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_16_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_17_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_17_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_18_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_18_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_19_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_19_Day"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_20_Month"]).zfill(2))
req_table += binascii.hexlify(str(cmd_dict["Holiday_20_Day"]).zfill(2))
req_str = "015731023030423028" + req_table + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setHolidayDates: 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def setWeekendHolidaySchedules(self, new_wknd, new_hldy, password="00000000"):
""" Serial call to set weekend and holiday :class:`~ekmmeters.Schedules`.
Args:
new_wknd (int): :class:`~ekmmeters.Schedules` value to assign.
new_hldy (int): :class:`~ekmmeters.Schedules` value to assign.
password (str): Optional password..
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setWeekendHolidaySchedules")
try:
if not self.request(False):
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_wkd = binascii.hexlify(str(new_wknd).zfill(2))
req_hldy = binascii.hexlify(str(new_hldy).zfill(2))
req_str = "015731023030433028" + req_wkd + req_hldy + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success(setWeekendHolidaySchedules): 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def readScheduleTariffs(self, tableset):
""" Serial call to read schedule tariffs buffer
Args:
tableset (int): :class:`~ekmmeters.ReadSchedules` buffer to return.
Returns:
bool: True on completion and ACK.
"""
self.setContext("readScheduleTariffs")
try:
req_table = binascii.hexlify(str(tableset).zfill(1))
req_str = "01523102303037" + req_table + "282903"
self.request(False)
req_crc = self.calc_crc16(req_str[2:].decode("hex"))
req_str += req_crc
self.m_serial_port.write(req_str.decode("hex"))
raw_ret = self.m_serial_port.getResponse(self.getContext())
self.serialPostEnd()
return_crc = self.calc_crc16(raw_ret[1:-2])
if tableset == ReadSchedules.Schedules_1_To_4:
unpacked_read = self.unpackStruct(raw_ret, self.m_schd_1_to_4)
self.convertData(unpacked_read, self.m_schd_1_to_4, self.m_kwh_precision)
if str(return_crc) == str(self.m_schd_1_to_4["crc16"][MeterData.StringValue]):
ekm_log("Schedules 1 to 4 CRC success (06 return")
self.setContext("")
return True
elif tableset == ReadSchedules.Schedules_5_To_8:
unpacked_read = self.unpackStruct(raw_ret, self.m_schd_5_to_8)
self.convertData(unpacked_read, self.m_schd_5_to_8, self.m_kwh_precision)
if str(return_crc) == str(self.m_schd_5_to_8["crc16"][MeterData.StringValue]):
ekm_log("Schedules 5 to 8 CRC success (06 return)")
self.setContext("")
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return False
def extractScheduleTariff(self, schedule, tariff):
""" Read a single schedule tariff from meter object buffer.
Args:
schedule (int): A :class:`~ekmmeters.Schedules` value or in range(Extent.Schedules).
tariff (int): A :class:`~ekmmeters.Tariffs` value or in range(Extent.Tariffs).
Returns:
bool: True on completion.
"""
ret = namedtuple("ret", ["Hour", "Min", "Rate", "Tariff", "Schedule"])
work_table = self.m_schd_1_to_4
if Schedules.Schedule_5 <= schedule <= Schedules.Schedule_8:
work_table = self.m_schd_5_to_8
tariff += 1
schedule += 1
ret.Tariff = str(tariff)
ret.Schedule = str(schedule)
if (schedule < 1) or (schedule > Extents.Schedules) or (tariff < 0) or (tariff > Extents.Tariffs):
ekm_log("Out of bounds: tariff " + str(tariff) + " for schedule " + str(schedule))
ret.Hour = ret.Min = ret.Rate = str(0)
return ret
idxhr = "Schd_" + str(schedule) + "_Tariff_" + str(tariff) + "_Hour"
idxmin = "Schd_" + str(schedule) + "_Tariff_" + str(tariff) + "_Min"
idxrate = "Schd_" + str(schedule) + "_Tariff_" + str(tariff) + "_Rate"
if idxhr not in work_table:
ekm_log("Incorrect index: " + idxhr)
ret.Hour = ret.Min = ret.Rate = str(0)
return ret
if idxmin not in work_table:
ekm_log("Incorrect index: " + idxmin)
ret.Hour = ret.Min = ret.Rate = str(0)
return ret
if idxrate not in work_table:
ekm_log("Incorrect index: " + idxrate)
ret.Hour = ret.Min = ret.Rate = str(0)
return ret
ret.Hour = work_table[idxhr][MeterData.StringValue]
ret.Min = work_table[idxmin][MeterData.StringValue].zfill(2)
ret.Rate = work_table[idxrate][MeterData.StringValue]
return ret
def readMonthTariffs(self, months_type):
""" Serial call to read month tariffs block into meter object buffer.
Args:
months_type (int): A :class:`~ekmmeters.ReadMonths` value.
Returns:
bool: True on completion.
"""
self.setContext("readMonthTariffs")
try:
req_type = binascii.hexlify(str(months_type).zfill(1))
req_str = "01523102303031" + req_type + "282903"
work_table = self.m_mons
if months_type == ReadMonths.kWhReverse:
work_table = self.m_rev_mons
self.request(False)
req_crc = self.calc_crc16(req_str[2:].decode("hex"))
req_str += req_crc
self.m_serial_port.write(req_str.decode("hex"))
raw_ret = self.m_serial_port.getResponse(self.getContext())
self.serialPostEnd()
unpacked_read = self.unpackStruct(raw_ret, work_table)
self.convertData(unpacked_read, work_table, self.m_kwh_precision)
return_crc = self.calc_crc16(raw_ret[1:-2])
if str(return_crc) == str(work_table["crc16"][MeterData.StringValue]):
ekm_log("Months CRC success, type = " + str(req_type))
self.setContext("")
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return False
def extractMonthTariff(self, month):
""" Extract the tariff for a single month from the meter object buffer.
Args:
month (int): A :class:`~ekmmeters.Months` value or range(Extents.Months).
Returns:
tuple: The eight tariff period totals for month. The return tuple breaks out as follows:
================= ======================================
kWh_Tariff_1 kWh for tariff period 1 over month.
kWh_Tariff_2 kWh for tariff period 2 over month
kWh_Tariff_3 kWh for tariff period 3 over month
kWh_Tariff_4 kWh for tariff period 4 over month
kWh_Tot Total kWh over requested month
Rev_kWh_Tariff_1 Rev kWh for tariff period 1 over month
Rev_kWh_Tariff_3 Rev kWh for tariff period 2 over month
Rev_kWh_Tariff_3 Rev kWh for tariff period 3 over month
Rev_kWh_Tariff_4 Rev kWh for tariff period 4 over month
Rev_kWh_Tot Total Rev kWh over requested month
================= ======================================
"""
ret = namedtuple("ret", ["Month", Field.kWh_Tariff_1, Field.kWh_Tariff_2, Field.kWh_Tariff_3,
Field.kWh_Tariff_4, Field.kWh_Tot, Field.Rev_kWh_Tariff_1,
Field.Rev_kWh_Tariff_2, Field.Rev_kWh_Tariff_3,
Field.Rev_kWh_Tariff_4, Field.Rev_kWh_Tot])
month += 1
ret.Month = str(month)
if (month < 1) or (month > Extents.Months):
ret.kWh_Tariff_1 = ret.kWh_Tariff_2 = ret.kWh_Tariff_3 = ret.kWh_Tariff_4 = str(0)
ret.Rev_kWh_Tariff_1 = ret.Rev_kWh_Tariff_2 = ret.Rev_kWh_Tariff_3 = ret.Rev_kWh_Tariff_4 = str(0)
ret.kWh_Tot = ret.Rev_kWh_Tot = str(0)
ekm_log("Out of range(Extents.Months) month = " + str(month))
return ret
base_str = "Month_" + str(month) + "_"
ret.kWh_Tariff_1 = self.m_mons[base_str + "Tariff_1"][MeterData.StringValue]
ret.kWh_Tariff_2 = self.m_mons[base_str + "Tariff_2"][MeterData.StringValue]
ret.kWh_Tariff_3 = self.m_mons[base_str + "Tariff_3"][MeterData.StringValue]
ret.kWh_Tariff_4 = self.m_mons[base_str + "Tariff_4"][MeterData.StringValue]
ret.kWh_Tot = self.m_mons[base_str + "Tot"][MeterData.StringValue]
ret.Rev_kWh_Tariff_1 = self.m_rev_mons[base_str + "Tariff_1"][MeterData.StringValue]
ret.Rev_kWh_Tariff_2 = self.m_rev_mons[base_str + "Tariff_2"][MeterData.StringValue]
ret.Rev_kWh_Tariff_3 = self.m_rev_mons[base_str + "Tariff_3"][MeterData.StringValue]
ret.Rev_kWh_Tariff_4 = self.m_rev_mons[base_str + "Tariff_4"][MeterData.StringValue]
ret.Rev_kWh_Tot = self.m_rev_mons[base_str + "Tot"][MeterData.StringValue]
return ret
def readHolidayDates(self):
""" Serial call to read holiday dates into meter object buffer.
Returns:
bool: True on completion.
"""
self.setContext("readHolidayDates")
try:
req_str = "0152310230304230282903"
self.request(False)
req_crc = self.calc_crc16(req_str[2:].decode("hex"))
req_str += req_crc
self.m_serial_port.write(req_str.decode("hex"))
raw_ret = self.m_serial_port.getResponse(self.getContext())
self.serialPostEnd()
unpacked_read = self.unpackStruct(raw_ret, self.m_hldy)
self.convertData(unpacked_read, self.m_hldy, self.m_kwh_precision)
return_crc = self.calc_crc16(raw_ret[1:-2])
if str(return_crc) == str(self.m_hldy["crc16"][MeterData.StringValue]):
ekm_log("Holidays and Schedules CRC success")
self.setContext("")
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return False
def extractHolidayDate(self, setting_holiday):
""" Read a single holiday date from meter buffer.
Args:
setting_holiday (int): Holiday from 0-19 or in range(Extents.Holidays)
Returns:
tuple: Holiday tuple, elements are strings.
=============== ======================
Holiday Holiday 0-19 as string
Day Day 1-31 as string
Month Monty 1-12 as string
=============== ======================
"""
ret = namedtuple("result", ["Holiday", "Month", "Day"])
setting_holiday += 1
ret.Holiday = str(setting_holiday)
if (setting_holiday < 1) or (setting_holiday > Extents.Holidays):
ekm_log("Out of bounds: holiday " + str(setting_holiday))
ret.Holiday = ret.Month = ret.Day = str(0)
return ret
idxday = "Holiday_" + str(setting_holiday) + "_Day"
idxmon = "Holiday_" + str(setting_holiday) + "_Mon"
if idxmon not in self.m_hldy:
ret.Holiday = ret.Month = ret.Day = str(0)
return ret
if idxday not in self.m_hldy:
ret.Holiday = ret.Month = ret.Day = str(0)
return ret
ret.Day = self.m_hldy[idxday][MeterData.StringValue]
ret.Month = self.m_hldy[idxmon][MeterData.StringValue]
return ret
def extractHolidayWeekendSchedules(self):
""" extract holiday and weekend :class:`~ekmmeters.Schedule` from meter object buffer.
Returns:
tuple: Holiday and weekend :class:`~ekmmeters.Schedule` values, as strings.
======= ======================================
Holiday :class:`~ekmmeters.Schedule` as string
Weekend :class:`~ekmmeters.Schedule` as string
======= ======================================
"""
result = namedtuple("result", ["Weekend", "Holiday"])
result.Weekend = self.m_hldy["Weekend_Schd"][MeterData.StringValue]
result.Holiday = self.m_hldy["Holiday_Schd"][MeterData.StringValue]
return result
def readSettings(self):
"""Recommended call to read all meter settings at once.
Returns:
bool: True if all subsequent serial calls completed with ACK.
"""
success = (self.readHolidayDates() and
self.readMonthTariffs(ReadMonths.kWh) and
self.readMonthTariffs(ReadMonths.kWhReverse) and
self.readScheduleTariffs(ReadSchedules.Schedules_1_To_4) and
self.readScheduleTariffs(ReadSchedules.Schedules_5_To_8))
return success
def writeCmdMsg(self, msg):
""" Internal method to set the command result string.
Args:
msg (str): Message built during command.
"""
ekm_log("(writeCmdMsg | " + self.getContext() + ") " + msg)
self.m_command_msg = msg
def readCmdMsg(self):
""" Getter for message set by last command.
Returns:
str: Last set message, if exists.
"""
return self.m_command_msg
def clearCmdMsg(self):
""" Zero out the command message result hint string """
self.m_command_msg = ""
def serialCmdPwdAuth(self, password_str):
""" Password step of set commands
This method is normally called within another serial command, so it
does not issue a termination string. Any default password is set
in the caller parameter list, never here.
Args:
password_str (str): Required password.
Returns:
bool: True on completion and ACK.
"""
result = False
try:
req_start = "0150310228" + binascii.hexlify(password_str) + "2903"
req_crc = self.calc_crc16(req_start[2:].decode("hex"))
req_str = req_start + req_crc
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
ekm_log("Password accepted (" + self.getContext() + ")")
result = True
else:
ekm_log("Password call failure no 06(" + self.getContext() + ")")
except:
ekm_log("Password call failure by exception(" + self.getContext() + ")")
ekm_log(traceback.format_exc(sys.exc_info()))
return result
class MeterObserver(object):
""" Unenforced abstract base class for implementations of the observer pattern.
To use, you must override the constructor and update().
"""
def __init__(self):
pass
def update(self, definition_buffer):
""" Called by attached :class:`~ekmmeters.Meter` on every :func:`~ekmmeters.Meter.request`.
Args:
definition_buffer (SerialBlock): SerialBlock for request
"""
pass
class IntervalObserver(MeterObserver):
""" Simplest possible MeterObserver subclass. Use as template. """
def __init__(self, interval):
"""
Args:
interval (int): Interval to summarize
"""
super(IntervalObserver, self).__init__()
self.m_interval = interval
self.m_summary = SerialBlock()
pass
def update(self, def_buf):
""" Required override of update method called by meter.
No op in this example subclass.
Args:
def_buf (SerialBlock): Buffer from last read.
"""
ekm_log("Example update() in IntervalObserver called.")
pass
class V3Meter(Meter):
"""Subclass of Meter and interface to v3 meters."""
def __init__(self, meter_address="000000000000"):
"""
Args:
meter_address (str): 12 character meter address from front of meter.
"""
self.m_serial_port = None
self.m_meter_address = ""
self.m_last_outgoing_queue__time = 0
self.m_last_incoming_queue_guid = ""
self.m_raw_read_a = ""
self.m_a_crc = False
self.m_kwh_precision = ScaleKWH.Scale10
super(V3Meter, self).__init__(meter_address)
# definition buffer for synthetic read
# (built after reads complete, may merge A and B if necessary)
self.m_req = SerialBlock()
self.m_blk_a = SerialBlock()
self.initWorkFormat()
def attachPort(self, serial_port):
"""Attach required :class:`~ekmmeters.SerialPort`.
Args:
serial_port (SerialPort): Serial port object, does not need to be initialized.
"""
self.m_serial_port = serial_port
pass
def initWorkFormat(self):
""" Initialize :class:`~ekmmeters.SerialBlock` for V3 read. """
self.m_blk_a["reserved_10"] = [1, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Model] = [2, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Firmware] = [1, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Meter_Address] = [12, FieldType.String, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.kWh_Tariff_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.kWh_Tariff_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.kWh_Tariff_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.kWh_Tariff_4] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Tariff_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Tariff_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Tariff_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Tariff_4] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.RMS_Volts_Ln_1] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.RMS_Volts_Ln_2] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.RMS_Volts_Ln_3] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.Amps_Ln_1] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.Amps_Ln_2] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.Amps_Ln_3] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Ln_1] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Ln_2] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Ln_3] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Tot] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Cos_Theta_Ln_1] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Cos_Theta_Ln_2] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Cos_Theta_Ln_3] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Max_Demand] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, True]
self.m_blk_a[Field.Max_Demand_Period] = [1, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Meter_Time] = [14, FieldType.String, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.CT_Ratio] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Pulse_Cnt_1] = [8, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Pulse_Cnt_2] = [8, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Pulse_Cnt_3] = [8, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Pulse_Ratio_1] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Pulse_Ratio_2] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Pulse_Ratio_3] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.State_Inputs] = [3, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a["reserved_11"] = [19, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Status_A] = [1, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a["reserved_12"] = [4, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Power_Factor_Ln_1] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_a[Field.Power_Factor_Ln_2] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_a[Field.Power_Factor_Ln_3] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
def request(self, send_terminator = False):
"""Required request() override for v3 and standard method to read meter.
Args:
send_terminator (bool): Send termination string at end of read.
Returns:
bool: CRC request flag result from most recent read
"""
self.m_a_crc = False
start_context = self.getContext()
self.setContext("request[v3A]")
try:
self.m_serial_port.write("2f3f".decode("hex") +
self.m_meter_address +
"210d0a".decode("hex"))
self.m_raw_read_a = self.m_serial_port.getResponse(self.getContext())
unpacked_read_a = self.unpackStruct(self.m_raw_read_a, self.m_blk_a)
self.convertData(unpacked_read_a, self.m_blk_a, 1)
self.m_a_crc = self.crcMeterRead(self.m_raw_read_a, self.m_blk_a)
if send_terminator:
self.serialPostEnd()
self.calculateFields()
self.makeReturnFormat()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext(start_context)
return self.m_a_crc
def makeReturnFormat(self):
""" Strip reserved and CRC for m_req :class:`~ekmmeters.SerialBlock`. """
for fld in self.m_blk_a:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
self.m_req[fld] = self.m_blk_a[fld]
pass
def getReadBuffer(self):
""" Return :class:`~ekmmeters.SerialBlock` for last read.
Appropriate for conversion to JSON or other extraction.
Returns:
SerialBlock: A read.
"""
return self.m_req
def insert(self, meter_db):
""" Insert to :class:`~ekmmeters.MeterDB` subclass.
Please note MeterDB subclassing is only for simplest-case.
Args:
meter_db (MeterDB): Instance of subclass of MeterDB.
"""
if meter_db:
meter_db.dbInsert(self.m_req, self.m_raw_read_a, self.m_raw_read_b)
else:
ekm_log("Attempt to insert when no MeterDB assigned.")
pass
def updateObservers(self):
""" Fire update method in all attached observers in order of attachment. """
for observer in self.m_observers:
try:
observer.update(self.m_req)
except:
ekm_log(traceback.format_exc(sys.exc_info()))
def getField(self, fld_name):
""" Return :class:`~ekmmeters.Field` content, scaled and formatted.
Args:
fld_name (str): A :class:`~ekmmeters.Field` value which is on your meter.
Returns:
str: String value (scaled if numeric) for the field.
"""
result = ""
if fld_name in self.m_req:
result = self.m_req[fld_name][MeterData.StringValue]
else:
ekm_log("Requested nonexistent field: " + fld_name)
return result
def calculateFields(self):
pf1 = self.m_blk_a[Field.Cos_Theta_Ln_1][MeterData.StringValue]
pf2 = self.m_blk_a[Field.Cos_Theta_Ln_2][MeterData.StringValue]
pf3 = self.m_blk_a[Field.Cos_Theta_Ln_3][MeterData.StringValue]
pf1_int = self.calcPF(pf1)
pf2_int = self.calcPF(pf2)
pf3_int = self.calcPF(pf3)
self.m_blk_a[Field.Power_Factor_Ln_1][MeterData.StringValue] = str(pf1_int)
self.m_blk_a[Field.Power_Factor_Ln_2][MeterData.StringValue] = str(pf2_int)
self.m_blk_a[Field.Power_Factor_Ln_3][MeterData.StringValue] = str(pf3_int)
self.m_blk_a[Field.Power_Factor_Ln_1][MeterData.NativeValue] = pf1_int
self.m_blk_a[Field.Power_Factor_Ln_2][MeterData.NativeValue] = pf2_int
self.m_blk_a[Field.Power_Factor_Ln_3][MeterData.NativeValue] = pf3_int
pass
def serialPostEnd(self):
""" Post termination code to implicitly current meter. """
ekm_log("Termination string sent (" + self.m_context + ")")
self.m_serial_port.write("0142300375".decode("hex"))
pass
class V4Meter(Meter):
""" Commands and buffers for V4 Omnnimeter. """
def __init__(self, meter_address="000000000000"):
"""
Args:
meter_address (str): 12 character meter address.
"""
self.m_serial_port = None
self.m_meter_address = ""
self.m_raw_read_a = ""
self.m_raw_read_b = ""
self.m_a_crc = False
self.m_b_crc = False
self.m_kwh_precision = ScaleKWH.EmptyScale
self.m_lcd_lookup = {}
super(V4Meter, self).__init__(meter_address)
# definition buffer for synthetic AB read (built after reads complete
# static, offsets for retrieving and writing format values
self.m_req = SerialBlock()
# read formats
self.m_blk_a = SerialBlock()
self.initFormatA()
self.m_blk_b = SerialBlock()
self.initFormatB()
self.initLcd()
self.initLcdLookup()
def attachPort(self, serial_port):
""" Required override to attach the port to the meter.
Args:
serial_port (SerialPort): Declared serial port. Does not need to be initialized.
"""
self.m_serial_port = serial_port
pass
def initFormatA(self):
""" Initialize A read :class:`~ekmmeters.SerialBlock`."""
self.m_blk_a["reserved_1"] = [1, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Model] = [2, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Firmware] = [1, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.Meter_Address] = [12, FieldType.String, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Reactive_Energy_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.kWh_Ln_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.kWh_Ln_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.kWh_Ln_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Ln_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Ln_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Rev_kWh_Ln_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Resettable_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.Resettable_Rev_kWh_Tot] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_a[Field.RMS_Volts_Ln_1] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.RMS_Volts_Ln_2] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.RMS_Volts_Ln_3] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.Amps_Ln_1] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.Amps_Ln_2] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.Amps_Ln_3] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Ln_1] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Ln_2] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Ln_3] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.RMS_Watts_Tot] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Cos_Theta_Ln_1] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Cos_Theta_Ln_2] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Cos_Theta_Ln_3] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Reactive_Pwr_Ln_1] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Reactive_Pwr_Ln_2] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Reactive_Pwr_Ln_3] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Reactive_Pwr_Tot] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Line_Freq] = [4, FieldType.Float, ScaleType.Div100, "", 0, False, False]
self.m_blk_a[Field.Pulse_Cnt_1] = [8, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Pulse_Cnt_2] = [8, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Pulse_Cnt_3] = [8, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.State_Inputs] = [1, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.State_Watts_Dir] = [1, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.State_Out] = [1, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a[Field.kWh_Scale] = [1, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_a["reserved_2"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Meter_Time] = [14, FieldType.String, ScaleType.No, "", 0, False, False]
self.m_blk_a["reserved_3"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a["reserved_4"] = [4, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_a[Field.Power_Factor_Ln_1] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_a[Field.Power_Factor_Ln_2] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_a[Field.Power_Factor_Ln_3] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
pass
def initFormatB(self):
""" Initialize B read :class:`~ekmmeters.SerialBlock`."""
self.m_blk_b["reserved_5"] = [1, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.Model] = [2, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Firmware] = [1, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Meter_Address] = [12, FieldType.String, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.kWh_Tariff_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.kWh_Tariff_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.kWh_Tariff_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.kWh_Tariff_4] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.Rev_kWh_Tariff_1] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.Rev_kWh_Tariff_2] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.Rev_kWh_Tariff_3] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.Rev_kWh_Tariff_4] = [8, FieldType.Float, ScaleType.KWH, "", 0, False, False]
self.m_blk_b[Field.RMS_Volts_Ln_1] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_b[Field.RMS_Volts_Ln_2] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_b[Field.RMS_Volts_Ln_3] = [4, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_b[Field.Amps_Ln_1] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_b[Field.Amps_Ln_2] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_b[Field.Amps_Ln_3] = [5, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_b[Field.RMS_Watts_Ln_1] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.RMS_Watts_Ln_2] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.RMS_Watts_Ln_3] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.RMS_Watts_Tot] = [7, FieldType.Int, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.Cos_Theta_Ln_1] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.Cos_Theta_Ln_2] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.Cos_Theta_Ln_3] = [4, FieldType.PowerFactor, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.RMS_Watts_Max_Demand] = [8, FieldType.Float, ScaleType.Div10, "", 0, False, False]
self.m_blk_b[Field.Max_Demand_Period] = [1, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Pulse_Ratio_1] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Pulse_Ratio_2] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Pulse_Ratio_3] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.CT_Ratio] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_b["reserved_6"] = [1, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.Pulse_Output_Ratio] = [4, FieldType.Int, ScaleType.No, "", 0, False, True]
self.m_blk_b["reserved_7"] = [53, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.Status_A] = [1, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Status_B] = [1, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Status_C] = [1, FieldType.Hex, ScaleType.No, "", 0, False, True]
self.m_blk_b[Field.Meter_Time] = [14, FieldType.String, ScaleType.No, "", 0, False, False]
self.m_blk_b["reserved_8"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_b["reserved_9"] = [4, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_b["crc16"] = [2, FieldType.Hex, ScaleType.No, "", 0, False, False]
self.m_blk_b[Field.Net_Calc_Watts_Ln_1] = [7, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_b[Field.Net_Calc_Watts_Ln_2] = [7, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_b[Field.Net_Calc_Watts_Ln_3] = [7, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_b[Field.Net_Calc_Watts_Tot] = [7, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_b[Field.Power_Factor_Ln_1] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_b[Field.Power_Factor_Ln_2] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
self.m_blk_b[Field.Power_Factor_Ln_3] = [4, FieldType.Int, ScaleType.No, "0", 0, True, False]
pass
def initLcdLookup(self):
""" Initialize lookup table for string input of LCD fields """
self.m_lcd_lookup["kWh_Tot"] = LCDItems.kWh_Tot
self.m_lcd_lookup["Rev_kWh_Tot"] = LCDItems.Rev_kWh_Tot
self.m_lcd_lookup["RMS_Volts_Ln_1"] = LCDItems.RMS_Volts_Ln_1
self.m_lcd_lookup["RMS_Volts_Ln_2"] = LCDItems.RMS_Volts_Ln_2
self.m_lcd_lookup["RMS_Volts_Ln_3"] = LCDItems.RMS_Volts_Ln_3
self.m_lcd_lookup["Amps_Ln_1"] = LCDItems.Amps_Ln_1
self.m_lcd_lookup["Amps_Ln_2"] = LCDItems.Amps_Ln_1
self.m_lcd_lookup["Amps_Ln_3"] = LCDItems.Amps_Ln_3
self.m_lcd_lookup["RMS_Watts_Ln_1"] = LCDItems.RMS_Watts_Ln_1
self.m_lcd_lookup["RMS_Watts_Ln_2"] = LCDItems.RMS_Watts_Ln_2
self.m_lcd_lookup["RMS_Watts_Ln_3"] = LCDItems.RMS_Watts_Ln_3
self.m_lcd_lookup["RMS_Watts_Tot"] = LCDItems.RMS_Watts_Tot
self.m_lcd_lookup["Power_Factor_Ln_1"] = LCDItems.Power_Factor_Ln_1
self.m_lcd_lookup["Power_Factor_Ln_2"] = LCDItems.Power_Factor_Ln_2
self.m_lcd_lookup["Power_Factor_Ln_3"] = LCDItems.Power_Factor_Ln_3
self.m_lcd_lookup["kWh_Tariff_1"] = LCDItems.kWh_Tariff_1
self.m_lcd_lookup["kWh_Tariff_2"] = LCDItems.kWh_Tariff_2
self.m_lcd_lookup["kWh_Tariff_3"] = LCDItems.kWh_Tariff_3
self.m_lcd_lookup["kWh_Tariff_4"] = LCDItems.kWh_Tariff_4
self.m_lcd_lookup["Rev_kWh_Tariff_1"] = LCDItems.Rev_kWh_Tariff_1
self.m_lcd_lookup["Rev_kWh_Tariff_2"] = LCDItems.Rev_kWh_Tariff_2
self.m_lcd_lookup["Rev_kWh_Tariff_3"] = LCDItems.Rev_kWh_Tariff_3
self.m_lcd_lookup["Rev_kWh_Tariff_4"] = LCDItems.Rev_kWh_Tariff_4
self.m_lcd_lookup["Reactive_Pwr_Ln_1"] = LCDItems.Reactive_Pwr_Ln_1
self.m_lcd_lookup["Reactive_Pwr_Ln_2"] = LCDItems.Reactive_Pwr_Ln_2
self.m_lcd_lookup["Reactive_Pwr_Ln_3"] = LCDItems.Reactive_Pwr_Ln_3
self.m_lcd_lookup["Reactive_Pwr_Tot"] = LCDItems.Reactive_Pwr_Tot
self.m_lcd_lookup["Line_Freq"] = LCDItems.Line_Freq
self.m_lcd_lookup["Pulse_Cnt_1"] = LCDItems.Pulse_Cnt_1
self.m_lcd_lookup["Pulse_Cnt_2"] = LCDItems.Pulse_Cnt_2
self.m_lcd_lookup["Pulse_Cnt_3"] = LCDItems.Pulse_Cnt_3
self.m_lcd_lookup["kWh_Ln_1"] = LCDItems.kWh_Ln_1
self.m_lcd_lookup["Rev_kWh_Ln_1"] = LCDItems.Rev_kWh_Ln_1
self.m_lcd_lookup["kWh_Ln_2"] = LCDItems.kWh_Ln_2
self.m_lcd_lookup["Rev_kWh_Ln_2"] = LCDItems.Rev_kWh_Ln_2
self.m_lcd_lookup["kWh_Ln_3"] = LCDItems.kWh_Ln_3
self.m_lcd_lookup["Rev_kWh_Ln_3"] = LCDItems.Rev_kWh_Ln_3
self.m_lcd_lookup["Reactive_Energy_Tot"] = LCDItems.Reactive_Energy_Tot
self.m_lcd_lookup["Max_Demand_Rst"] = LCDItems.Max_Demand_Rst
self.m_lcd_lookup["Rev_kWh_Rst"] = LCDItems.Rev_kWh_Rst
self.m_lcd_lookup["State_Inputs"] = LCDItems.State_Inputs
self.m_lcd_lookup["Max_Demand"] = LCDItems.Max_Demand
def request(self, send_terminator = False):
""" Combined A and B read for V4 meter.
Args:
send_terminator (bool): Send termination string at end of read.
Returns:
bool: True on completion.
"""
try:
if self.requestA() and self.requestB():
self.makeAB()
self.calculateFields()
self.updateObservers()
return True
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return False
def requestA(self):
"""Issue an A read on V4 meter.
Returns:
bool: True if CRC match at end of call.
"""
work_context = self.getContext()
self.setContext("request[v4A]")
self.m_serial_port.write("2f3f".decode("hex") + self.m_meter_address + "3030210d0a".decode("hex"))
self.m_raw_read_a = self.m_serial_port.getResponse(self.getContext())
unpacked_read_a = self.unpackStruct(self.m_raw_read_a, self.m_blk_a)
self.convertData(unpacked_read_a, self.m_blk_a)
self.m_kwh_precision = int(self.m_blk_a[Field.kWh_Scale][MeterData.NativeValue])
self.m_a_crc = self.crcMeterRead(self.m_raw_read_a, self.m_blk_a)
self.setContext(work_context)
return self.m_a_crc
def requestB(self):
""" Issue a B read on V4 meter.
Returns:
bool: True if CRC match at end of call.
"""
work_context = self.getContext()
self.setContext("request[v4B]")
self.m_serial_port.write("2f3f".decode("hex") + self.m_meter_address + "3031210d0a".decode("hex"))
self.m_raw_read_b = self.m_serial_port.getResponse(self.getContext())
unpacked_read_b = self.unpackStruct(self.m_raw_read_b, self.m_blk_b)
self.convertData(unpacked_read_b, self.m_blk_b, self.m_kwh_precision)
self.m_b_crc = self.crcMeterRead(self.m_raw_read_b, self.m_blk_b)
self.setContext(work_context)
return self.m_b_crc
def makeAB(self):
""" Munge A and B reads into single serial block with only unique fields."""
for fld in self.m_blk_a:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
self.m_req[fld] = self.m_blk_a[fld]
for fld in self.m_blk_b:
compare_fld = fld.upper()
if not "RESERVED" in compare_fld and not "CRC" in compare_fld:
self.m_req[fld] = self.m_blk_b[fld]
pass
def getReadBuffer(self):
""" Return the read buffer containing A and B reads.
Appropriate for JSON conversion or other processing in an agent.
Returns:
SerialBlock: A :class:`~ekmmeters.SerialBlock` containing both A and B reads.
"""
return self.m_req
def getField(self, fld_name):
""" Return :class:`~ekmmeters.Field` content, scaled and formatted.
Args:
fld_name (str): A `:class:~ekmmeters.Field` value which is on your meter.
Returns:
str: String value (scaled if numeric) for the field.
"""
result = ""
if fld_name in self.m_req:
result = self.m_req[fld_name][MeterData.StringValue]
else:
ekm_log("Requested nonexistent field: " + fld_name)
return result
def calculateFields(self):
"""Write calculated fields for read buffer."""
pf1 = self.m_blk_b[Field.Cos_Theta_Ln_1][MeterData.StringValue]
pf2 = self.m_blk_b[Field.Cos_Theta_Ln_2][MeterData.StringValue]
pf3 = self.m_blk_b[Field.Cos_Theta_Ln_3][MeterData.StringValue]
pf1_int = self.calcPF(pf1)
pf2_int = self.calcPF(pf2)
pf3_int = self.calcPF(pf3)
self.m_blk_b[Field.Power_Factor_Ln_1][MeterData.StringValue] = str(pf1_int)
self.m_blk_b[Field.Power_Factor_Ln_2][MeterData.StringValue] = str(pf2_int)
self.m_blk_b[Field.Power_Factor_Ln_3][MeterData.StringValue] = str(pf3_int)
self.m_blk_b[Field.Power_Factor_Ln_1][MeterData.NativeValue] = pf1_int
self.m_blk_b[Field.Power_Factor_Ln_2][MeterData.NativeValue] = pf2_int
self.m_blk_b[Field.Power_Factor_Ln_3][MeterData.NativeValue] = pf2_int
rms_watts_1 = self.m_blk_b[Field.RMS_Watts_Ln_1][MeterData.NativeValue]
rms_watts_2 = self.m_blk_b[Field.RMS_Watts_Ln_2][MeterData.NativeValue]
rms_watts_3 = self.m_blk_b[Field.RMS_Watts_Ln_3][MeterData.NativeValue]
sign_rms_watts_1 = 1
sign_rms_watts_2 = 1
sign_rms_watts_3 = 1
direction_byte = self.m_blk_a[Field.State_Watts_Dir][MeterData.NativeValue]
if direction_byte == DirectionFlag.ForwardForwardForward:
# all good
pass
if direction_byte == DirectionFlag.ForwardForwardReverse:
sign_rms_watts_3 = -1
pass
if direction_byte == DirectionFlag.ForwardReverseForward:
sign_rms_watts_2 = -1
pass
if direction_byte == DirectionFlag.ReverseForwardForward:
sign_rms_watts_1 = -1
pass
if direction_byte == DirectionFlag.ForwardReverseReverse:
sign_rms_watts_2 = -1
sign_rms_watts_3 = -1
pass
if direction_byte == DirectionFlag.ReverseForwardReverse:
sign_rms_watts_1 = -1
sign_rms_watts_3 = -1
pass
if direction_byte == DirectionFlag.ReverseReverseForward:
sign_rms_watts_1 = -1
sign_rms_watts_2 = -1
pass
if direction_byte == DirectionFlag.ReverseReverseReverse:
sign_rms_watts_1 = -1
sign_rms_watts_2 = -1
sign_rms_watts_3 = -1
pass
net_watts_1 = rms_watts_1 * sign_rms_watts_1
net_watts_2 = rms_watts_2 * sign_rms_watts_2
net_watts_3 = rms_watts_3 * sign_rms_watts_3
net_watts_tot = net_watts_1 + net_watts_2 + net_watts_3
self.m_blk_b[Field.Net_Calc_Watts_Ln_1][MeterData.NativeValue] = net_watts_1
self.m_blk_b[Field.Net_Calc_Watts_Ln_2][MeterData.NativeValue] = net_watts_2
self.m_blk_b[Field.Net_Calc_Watts_Ln_3][MeterData.NativeValue] = net_watts_3
self.m_blk_b[Field.Net_Calc_Watts_Tot][MeterData.NativeValue] = net_watts_tot
self.m_blk_b[Field.Net_Calc_Watts_Ln_1][MeterData.StringValue] = str(net_watts_1)
self.m_blk_b[Field.Net_Calc_Watts_Ln_2][MeterData.StringValue] = str(net_watts_2)
self.m_blk_b[Field.Net_Calc_Watts_Ln_3][MeterData.StringValue] = str(net_watts_3)
self.m_blk_b[Field.Net_Calc_Watts_Tot][MeterData.StringValue] = str(net_watts_tot)
pass
def updateObservers(self):
""" Call the update() method in all attached observers in order of attachment.
Called internally after request().
"""
for observer in self.m_observers:
observer.update(self.m_req)
def insert(self, meter_db):
""" Insert to :class:`~ekmmeters.MeterDB` subclass.
Please note MeterDB subclassing is only for simplest-case.
Args:
meter_db (MeterDB): Instance of subclass of MeterDB.
"""
if meter_db:
meter_db.dbInsert(self.m_req, self.m_raw_read_a, self.m_raw_read_b)
else:
ekm_log("Attempt to insert when no MeterDB assigned.")
pass
def lcdString(self, item_str):
"""Translate a string to corresponding LCD field integer
Args:
item_str (str): String identical to :class:`~ekmmeters.LcdItems` entry.
Returns:
int: :class:`~ekmmeters.LcdItems` integer or 0 if not found.
"""
if item_str in self.m_lcd_lookup:
return self.m_lcd_lookup[item_str]
else:
return 0
def setLCDCmd(self, display_list, password="00000000"):
""" Single call wrapper for LCD set."
Wraps :func:`~ekmmeters.V4Meter.setLcd` and associated init and add methods.
Args:
display_list (list): List composed of :class:`~ekmmeters.LCDItems`
password (str): Optional password.
Returns:
bool: Passthrough from :func:`~ekmmeters.V4Meter.setLcd`
"""
result = False
try:
self.initLcd()
item_cnt = len(display_list)
if (item_cnt > 40) or (item_cnt <= 0):
ekm_log("LCD item list must have between 1 and 40 items")
return False
for display_item in display_list:
self.addLcdItem(int(display_item))
result = self.setLCD(password)
except:
ekm_log(traceback.format_exc(sys.exc_info()))
return result
def setRelay(self, seconds, relay, status, password="00000000"):
"""Serial call to set relay.
Args:
seconds (int): Seconds to hold, ero is hold forever. See :class:`~ekmmeters.RelayInterval`.
relay (int): Selected relay, see :class:`~ekmmeters.Relay`.
status (int): Status to set, see :class:`~ekmmeters.RelayState`
password (str): Optional password
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setRelay")
try:
self.clearCmdMsg()
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return result
if seconds < 0 or seconds > 9999:
self.writeCmdMsg("Relay duration must be between 0 and 9999.")
self.setContext("")
return result
if not self.requestA():
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = ""
req_str = ("01573102303038" +
binascii.hexlify(str(relay)).zfill(2) +
"28" +
binascii.hexlify(str(status)).zfill(2) +
binascii.hexlify(str(seconds).zfill(4)) + "2903")
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success: 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def serialPostEnd(self):
""" Send termination string to implicit current meter."""
ekm_log("Termination string sent (" + self.m_context + ")")
try:
self.m_serial_port.write("0142300375".decode("hex"))
except:
ekm_log(traceback.format_exc(sys.exc_info()))
pass
def setPulseInputRatio(self, line_in, new_cnst, password="00000000"):
"""Serial call to set pulse input ratio on a line.
Args:
line_in (int): Member of :class:`~ekmmeters.Pulse`
new_cnst (int): New pulse input ratio
password (str): Optional password
Returns:
"""
result = False
self.setContext("setPulseInputRatio")
try:
if not self.requestA():
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_const = binascii.hexlify(str(new_cnst).zfill(4))
line_const = binascii.hexlify(str(line_in - 1))
req_str = "01573102303041" + line_const + "28" + req_const + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success: 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def setZeroResettableKWH(self, password="00000000"):
""" Serial call to zero resettable kWh registers.
Args:
password (str): Optional password.
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setZeroResettableKWH")
try:
if not self.requestA():
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "0157310230304433282903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success: 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def setPulseOutputRatio(self, new_pout, password="00000000"):
""" Serial call to set pulse output ratio.
Args:
new_pout (int): Legal output, member of :class:`~ekmmeters.PulseOutput` .
password (str): Optional password
Returns:
bool: True on completion and ACK
"""
result = False
self.setContext("setPulseOutputRatio")
try:
if not self.requestA():
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_str = "015731023030443428" + binascii.hexlify(str(new_pout).zfill(4)) + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success: 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
def initLcd(self):
"""
Simple init for LCD item list
"""
self.m_lcd_items = []
pass
def addLcdItem(self, lcd_item_no):
"""
Simple append to internal buffer.
Used with :func:`~ekmmeters.V4Meter.setLcd` and :func:`~ekmmeters.V4Meter.initLcd`
Args:
lcd_item_no (int): Member of :class:`~ekmmeters.LCDItems`
"""
self.m_lcd_items.append(lcd_item_no)
pass
def setLCD(self, password="00000000"):
""" Serial call to set LCD using meter object bufer.
Used with :func:`~ekmmeters.V4Meter.addLcdItem`.
Args:
password (str): Optional password
Returns:
bool: True on completion and ACK.
"""
result = False
self.setContext("setLCD")
try:
self.clearCmdMsg()
if len(password) != 8:
self.writeCmdMsg("Invalid password length.")
self.setContext("")
return result
if not self.request():
self.writeCmdMsg("Bad read CRC on setting")
else:
if not self.serialCmdPwdAuth(password):
self.writeCmdMsg("Password failure")
else:
req_table = ""
fill_len = 40 - len(self.m_lcd_items)
for lcdid in self.m_lcd_items:
append_val = binascii.hexlify(str(lcdid).zfill(2))
req_table += append_val
for i in range(0, fill_len):
append_val = binascii.hexlify(str(0).zfill(2))
req_table += append_val
req_str = "015731023030443228" + req_table + "2903"
req_str += self.calc_crc16(req_str[2:].decode("hex"))
self.m_serial_port.write(req_str.decode("hex"))
if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06":
self.writeCmdMsg("Success: 06 returned.")
result = True
self.serialPostEnd()
except:
ekm_log(traceback.format_exc(sys.exc_info()))
self.setContext("")
return result
| 42.713239 | 120 | 0.590021 |
795585bb9b4f82167de62458e34e7260ba606811 | 5,894 | py | Python | asposeimagingcloud/models/requests/modify_jpeg2000_request.py | aspose-imaging-cloud/aspose-imaging-cloud-python | 9280a4a1aa415cb569ec26a05792b33186d31a85 | [
"MIT"
] | 1 | 2022-01-14T10:06:26.000Z | 2022-01-14T10:06:26.000Z | asposeimagingcloud/models/requests/modify_jpeg2000_request.py | aspose-imaging-cloud/aspose-imaging-cloud-python | 9280a4a1aa415cb569ec26a05792b33186d31a85 | [
"MIT"
] | 3 | 2019-07-17T15:01:31.000Z | 2020-12-29T09:16:10.000Z | asposeimagingcloud/models/requests/modify_jpeg2000_request.py | aspose-imaging-cloud/aspose-imaging-cloud-python | 9280a4a1aa415cb569ec26a05792b33186d31a85 | [
"MIT"
] | null | null | null | # coding: utf-8
# ----------------------------------------------------------------------------
# <copyright company="Aspose" file="modify_jpeg2000_request.py">
# Copyright (c) 2018-2020 Aspose Pty Ltd. All rights reserved.
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# </summary>
# ----------------------------------------------------------------------------
from asposeimagingcloud.models.requests.imaging_request import ImagingRequest
from asposeimagingcloud.models.requests.http_request import HttpRequest
class ModifyJpeg2000Request(ImagingRequest):
"""
Request model for modify_jpeg2000 operation.
Initializes a new instance.
:param name Filename of image.
:param comment The comment (can be either single or comma-separated).
:param codec The codec (j2k or jp2).
:param from_scratch Specifies where additional parameters we do not support should be taken from. If this is true – they will be taken from default values for standard image, if it is false – they will be saved from current image. Default is false.
:param folder Folder with image to process.
:param storage Your Aspose Cloud Storage name.
"""
def __init__(self, name, comment, codec=None, from_scratch=None, folder=None, storage=None):
ImagingRequest.__init__(self)
self.name = name
self.comment = comment
self.codec = codec
self.from_scratch = from_scratch
self.folder = folder
self.storage = storage
def to_http_info(self, config):
"""
Prepares initial info for HTTP request
:param config: Imaging API configuration
:type: asposeimagingcloud.Configuration
:return: http_request configured http request
:rtype: Configuration.models.requests.HttpRequest
"""
# verify the required parameter 'name' is set
if self.name is None:
raise ValueError("Missing the required parameter `name` when calling `modify_jpeg2000`")
# verify the required parameter 'comment' is set
if self.comment is None:
raise ValueError("Missing the required parameter `comment` when calling `modify_jpeg2000`")
collection_formats = {}
path = '/imaging/{name}/jpg2000'
path_params = {}
if self.name is not None:
path_params[self._lowercase_first_letter('name')] = self.name
query_params = []
if self._lowercase_first_letter('comment') in path:
path = path.replace('{' + self._lowercase_first_letter('comment' + '}'), self.comment if self.comment is not None else '')
else:
if self.comment is not None:
query_params.append((self._lowercase_first_letter('comment'), self.comment))
if self._lowercase_first_letter('codec') in path:
path = path.replace('{' + self._lowercase_first_letter('codec' + '}'), self.codec if self.codec is not None else '')
else:
if self.codec is not None:
query_params.append((self._lowercase_first_letter('codec'), self.codec))
if self._lowercase_first_letter('fromScratch') in path:
path = path.replace('{' + self._lowercase_first_letter('fromScratch' + '}'), self.from_scratch if self.from_scratch is not None else '')
else:
if self.from_scratch is not None:
query_params.append((self._lowercase_first_letter('fromScratch'), self.from_scratch))
if self._lowercase_first_letter('folder') in path:
path = path.replace('{' + self._lowercase_first_letter('folder' + '}'), self.folder if self.folder is not None else '')
else:
if self.folder is not None:
query_params.append((self._lowercase_first_letter('folder'), self.folder))
if self._lowercase_first_letter('storage') in path:
path = path.replace('{' + self._lowercase_first_letter('storage' + '}'), self.storage if self.storage is not None else '')
else:
if self.storage is not None:
query_params.append((self._lowercase_first_letter('storage'), self.storage))
header_params = {}
form_params = []
local_var_files = []
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self._select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = 'multipart/form-data' if form_params else self._select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['JWT']
return HttpRequest(path, path_params, query_params, header_params, form_params, body_params, local_var_files,
collection_formats, auth_settings)
| 48.311475 | 252 | 0.658806 |
795585e3fa7f4ec3f32079e77fa541e90cd409e0 | 15,738 | py | Python | baselines/trpo_exp/trpo_exp.py | veniversum/baselines | 99666bd86f0051b2323366c6ee89111df92e7f44 | [
"MIT"
] | null | null | null | baselines/trpo_exp/trpo_exp.py | veniversum/baselines | 99666bd86f0051b2323366c6ee89111df92e7f44 | [
"MIT"
] | 1 | 2018-03-17T12:58:00.000Z | 2018-03-17T12:58:00.000Z | baselines/trpo_exp/trpo_exp.py | veniversum/baselines | 99666bd86f0051b2323366c6ee89111df92e7f44 | [
"MIT"
] | 1 | 2019-03-02T15:13:36.000Z | 2019-03-02T15:13:36.000Z | from baselines.common import explained_variance, zipsame, dataset
from baselines import logger
import baselines.common.tf_util as U
import tensorflow as tf, numpy as np
import time
from baselines.common import colorize
from collections import deque
from baselines.common import set_global_seeds
from baselines.common.mpi_adam import MpiAdam
from baselines.common.cg import cg
from baselines.common.input import observation_placeholder
from baselines.common.policies import build_policy
from baselines.common.mpi_running_mean_std import RunningMeanStd
from contextlib import contextmanager
from baselines.common.exploration import AEReward, RNDReward
NOVELTY_REWARDS = {'AE': AEReward, 'RND': RNDReward}
try:
from mpi4py import MPI
except ImportError:
MPI = None
def traj_segment_generator(pi, env, horizon, stochastic):
# Initialize state variables
t = 0
ac = env.action_space.sample()
new = True
rew = 0.0
ob = env.reset()
cur_ep_ret = 0
cur_ep_len = 0
ep_rets = []
ep_lens = []
# Initialize history arrays
obs = np.array([ob for _ in range(horizon)])
rews = np.zeros(horizon, 'float32')
vpreds = np.zeros(horizon, 'float32')
news = np.zeros(horizon, 'int32')
acs = np.array([ac for _ in range(horizon)])
prevacs = acs.copy()
while True:
prevac = ac
ac, vpred, _, _ = pi.step(ob, stochastic=stochastic)
# Slight weirdness here because we need value function at time T
# before returning segment [0, T-1] so we get the correct
# terminal value
if t > 0 and t % horizon == 0:
yield {"ob" : obs, "rew" : rews, "vpred" : vpreds, "new" : news,
"ac" : acs, "prevac" : prevacs, "nextvpred": vpred * (1 - new),
"ep_rets" : ep_rets, "ep_lens" : ep_lens}
_, vpred, _, _ = pi.step(ob, stochastic=stochastic)
# Be careful!!! if you change the downstream algorithm to aggregate
# several of these batches, then be sure to do a deepcopy
ep_rets = []
ep_lens = []
i = t % horizon
obs[i] = ob
vpreds[i] = vpred
news[i] = new
acs[i] = ac
prevacs[i] = prevac
ob, rew, new, _ = env.step(ac)
rews[i] = rew
cur_ep_ret += rew
cur_ep_len += 1
if new:
ep_rets.append(cur_ep_ret)
ep_lens.append(cur_ep_len)
cur_ep_ret = 0
cur_ep_len = 0
ob = env.reset()
t += 1
def add_vtarg_and_adv(seg, gamma, lam):
new = np.append(seg["new"], 0) # last element is only used for last vtarg, but we already zeroed it if last new = 1
vpred = np.append(seg["vpred"], seg["nextvpred"])
T = len(seg["rew"])
seg["adv"] = gaelam = np.empty(T, 'float32')
rew = seg["rew"]
lastgaelam = 0
for t in reversed(range(T)):
nonterminal = 1-new[t+1]
delta = rew[t] + gamma * vpred[t+1] * nonterminal - vpred[t]
gaelam[t] = lastgaelam = delta + gamma * lam * nonterminal * lastgaelam
seg["tdlamret"] = seg["adv"] + seg["vpred"]
def learn(*,
network,
env,
total_timesteps,
timesteps_per_batch=1024, # what to train on
max_kl=0.001,
cg_iters=10,
gamma=0.99,
lam=1.0, # advantage estimation
seed=None,
ent_coef=0.0,
cg_damping=1e-2,
vf_stepsize=3e-4,
vf_iters =3,
max_episodes=0, max_iters=0, # time constraint
callback=None,
load_path=None,
novelty_reward='AE',
normalize_int_rew=False,
**network_kwargs
):
'''
learn a policy function with TRPO algorithm
Parameters:
----------
network neural network to learn. Can be either string ('mlp', 'cnn', 'lstm', 'lnlstm' for basic types)
or function that takes input placeholder and returns tuple (output, None) for feedforward nets
or (output, (state_placeholder, state_output, mask_placeholder)) for recurrent nets
env environment (one of the gym environments or wrapped via baselines.common.vec_env.VecEnv-type class
timesteps_per_batch timesteps per gradient estimation batch
max_kl max KL divergence between old policy and new policy ( KL(pi_old || pi) )
ent_coef coefficient of policy entropy term in the optimization objective
cg_iters number of iterations of conjugate gradient algorithm
cg_damping conjugate gradient damping
vf_stepsize learning rate for adam optimizer used to optimie value function loss
vf_iters number of iterations of value function optimization iterations per each policy optimization step
total_timesteps max number of timesteps
max_episodes max number of episodes
max_iters maximum number of policy optimization iterations
callback function to be called with (locals(), globals()) each policy optimization step
load_path str, path to load the model from (default: None, i.e. no model is loaded)
**network_kwargs keyword arguments to the policy / network builder. See baselines.common/policies.py/build_policy and arguments to a particular type of network
Returns:
-------
learnt model
'''
if MPI is not None:
nworkers = MPI.COMM_WORLD.Get_size()
rank = MPI.COMM_WORLD.Get_rank()
else:
nworkers = 1
rank = 0
cpus_per_worker = 1
U.get_session(config=tf.ConfigProto(
allow_soft_placement=True,
inter_op_parallelism_threads=cpus_per_worker,
intra_op_parallelism_threads=cpus_per_worker
))
policy = build_policy(env, network, value_network='copy', **network_kwargs)
set_global_seeds(seed)
np.set_printoptions(precision=3)
# Setup losses and stuff
# ----------------------------------------
ob_space = env.observation_space
ac_space = env.action_space
ob = observation_placeholder(ob_space)
with tf.variable_scope("pi"):
pi = policy(observ_placeholder=ob)
with tf.variable_scope("oldpi"):
oldpi = policy(observ_placeholder=ob)
atarg = tf.placeholder(dtype=tf.float32, shape=[None]) # Target advantage function (if applicable)
ret = tf.placeholder(dtype=tf.float32, shape=[None]) # Empirical return
ac = pi.pdtype.sample_placeholder([None])
kloldnew = oldpi.pd.kl(pi.pd)
ent = pi.pd.entropy()
meankl = tf.reduce_mean(kloldnew)
meanent = tf.reduce_mean(ent)
entbonus = ent_coef * meanent
vferr = tf.reduce_mean(tf.square(pi.vf - ret))
ratio = tf.exp(pi.pd.logp(ac) - oldpi.pd.logp(ac)) # advantage * pnew / pold
surrgain = tf.reduce_mean(ratio * atarg)
optimgain = surrgain + entbonus
losses = [optimgain, meankl, entbonus, surrgain, meanent]
loss_names = ["optimgain", "meankl", "entloss", "surrgain", "entropy"]
dist = meankl
all_var_list = get_trainable_variables("pi")
# var_list = [v for v in all_var_list if v.name.split("/")[1].startswith("pol")]
# vf_var_list = [v for v in all_var_list if v.name.split("/")[1].startswith("vf")]
var_list = get_pi_trainable_variables("pi")
vf_var_list = get_vf_trainable_variables("pi")
vfadam = MpiAdam(vf_var_list)
get_flat = U.GetFlat(var_list)
set_from_flat = U.SetFromFlat(var_list)
klgrads = tf.gradients(dist, var_list)
flat_tangent = tf.placeholder(dtype=tf.float32, shape=[None], name="flat_tan")
shapes = [var.get_shape().as_list() for var in var_list]
start = 0
tangents = []
for shape in shapes:
sz = U.intprod(shape)
tangents.append(tf.reshape(flat_tangent[start:start+sz], shape))
start += sz
gvp = tf.add_n([tf.reduce_sum(g*tangent) for (g, tangent) in zipsame(klgrads, tangents)]) #pylint: disable=E1111
fvp = U.flatgrad(gvp, var_list)
assign_old_eq_new = U.function([],[], updates=[tf.assign(oldv, newv)
for (oldv, newv) in zipsame(get_variables("oldpi"), get_variables("pi"))])
compute_losses = U.function([ob, ac, atarg], losses)
compute_lossandgrad = U.function([ob, ac, atarg], losses + [U.flatgrad(optimgain, var_list)])
compute_fvp = U.function([flat_tangent, ob, ac, atarg], fvp)
compute_vflossandgrad = U.function([ob, ret], U.flatgrad(vferr, vf_var_list))
rff_rms_int = RunningMeanStd()
nr = NOVELTY_REWARDS[novelty_reward](env.observation_space)
@contextmanager
def timed(msg):
if rank == 0:
print(colorize(msg, color='magenta'))
tstart = time.time()
yield
print(colorize("done in %.3f seconds"%(time.time() - tstart), color='magenta'))
else:
yield
def allmean(x):
assert isinstance(x, np.ndarray)
if MPI is not None:
out = np.empty_like(x)
MPI.COMM_WORLD.Allreduce(x, out, op=MPI.SUM)
out /= nworkers
else:
out = np.copy(x)
return out
U.initialize()
if load_path is not None:
pi.load(load_path)
th_init = get_flat()
if MPI is not None:
MPI.COMM_WORLD.Bcast(th_init, root=0)
set_from_flat(th_init)
vfadam.sync()
print("Init param sum", th_init.sum(), flush=True)
# Prepare for rollouts
# ----------------------------------------
seg_gen = traj_segment_generator(pi, env, timesteps_per_batch, stochastic=True)
episodes_so_far = 0
timesteps_so_far = 0
iters_so_far = 0
tstart = time.time()
lenbuffer = deque(maxlen=40) # rolling buffer for episode lengths
rewbuffer = deque(maxlen=40) # rolling buffer for episode rewards
if sum([max_iters>0, total_timesteps>0, max_episodes>0])==0:
# noththing to be done
return pi
assert sum([max_iters>0, total_timesteps>0, max_episodes>0]) < 2, \
'out of max_iters, total_timesteps, and max_episodes only one should be specified'
while True:
if callback: callback(locals(), globals())
if total_timesteps and timesteps_so_far >= total_timesteps:
break
elif max_episodes and episodes_so_far >= max_episodes:
break
elif max_iters and iters_so_far >= max_iters:
break
logger.log("********** Iteration %i ************"%iters_so_far)
with timed("sampling"):
seg = seg_gen.__next__()
# Calculate novelty rewards
bonus = nr.get_batch_bonus_and_update(seg["ob"])
if normalize_int_rew:
rff_rms_int.update(bonus.ravel())
bonus = bonus / rff_rms_int.std.eval()
seg["orig_rew"] = seg["rew"]
seg["rew"] = seg["rew"] + bonus
add_vtarg_and_adv(seg, gamma, lam)
# ob, ac, atarg, ret, td1ret = map(np.concatenate, (obs, acs, atargs, rets, td1rets))
ob, ac, atarg, tdlamret = seg["ob"], seg["ac"], seg["adv"], seg["tdlamret"]
vpredbefore = seg["vpred"] # predicted value function before udpate
atarg = (atarg - atarg.mean()) / atarg.std() # standardized advantage function estimate
if hasattr(pi, "ret_rms"): pi.ret_rms.update(tdlamret)
if hasattr(pi, "ob_rms"): pi.ob_rms.update(ob) # update running mean/std for policy
args = seg["ob"], seg["ac"], atarg
fvpargs = [arr[::5] for arr in args]
def fisher_vector_product(p):
return allmean(compute_fvp(p, *fvpargs)) + cg_damping * p
assign_old_eq_new() # set old parameter values to new parameter values
with timed("computegrad"):
*lossbefore, g = compute_lossandgrad(*args)
lossbefore = allmean(np.array(lossbefore))
g = allmean(g)
if np.allclose(g, 0):
logger.log("Got zero gradient. not updating")
else:
with timed("cg"):
stepdir = cg(fisher_vector_product, g, cg_iters=cg_iters, verbose=rank==0)
assert np.isfinite(stepdir).all()
shs = .5*stepdir.dot(fisher_vector_product(stepdir))
lm = np.sqrt(shs / max_kl)
# logger.log("lagrange multiplier:", lm, "gnorm:", np.linalg.norm(g))
fullstep = stepdir / lm
expectedimprove = g.dot(fullstep)
surrbefore = lossbefore[0]
stepsize = 1.0
thbefore = get_flat()
for _ in range(10):
thnew = thbefore + fullstep * stepsize
set_from_flat(thnew)
meanlosses = surr, kl, *_ = allmean(np.array(compute_losses(*args)))
improve = surr - surrbefore
logger.log("Expected: %.3f Actual: %.3f"%(expectedimprove, improve))
if not np.isfinite(meanlosses).all():
logger.log("Got non-finite value of losses -- bad!")
elif kl > max_kl * 1.5:
logger.log("violated KL constraint. shrinking step.")
elif improve < 0:
logger.log("surrogate didn't improve. shrinking step.")
else:
logger.log("Stepsize OK!")
break
stepsize *= .5
else:
logger.log("couldn't compute a good step")
set_from_flat(thbefore)
if nworkers > 1 and iters_so_far % 20 == 0:
paramsums = MPI.COMM_WORLD.allgather((thnew.sum(), vfadam.getflat().sum())) # list of tuples
assert all(np.allclose(ps, paramsums[0]) for ps in paramsums[1:])
for (lossname, lossval) in zip(loss_names, meanlosses):
logger.record_tabular(lossname, lossval)
with timed("vf"):
for _ in range(vf_iters):
for (mbob, mbret) in dataset.iterbatches((seg["ob"], seg["tdlamret"]),
include_final_partial_batch=False, batch_size=64):
g = allmean(compute_vflossandgrad(mbob, mbret))
vfadam.update(g, vf_stepsize)
logger.record_tabular("ev_tdlam_before", explained_variance(vpredbefore, tdlamret))
lrlocal = (seg["ep_lens"], seg["ep_rets"]) # local values
if MPI is not None:
listoflrpairs = MPI.COMM_WORLD.allgather(lrlocal) # list of tuples
else:
listoflrpairs = [lrlocal]
lens, rews = map(flatten_lists, zip(*listoflrpairs))
lenbuffer.extend(lens)
rewbuffer.extend(rews)
logger.record_tabular("EpLenMean", np.mean(lenbuffer))
logger.record_tabular("EpRewMean", np.mean(rewbuffer))
logger.record_tabular("EpThisIter", len(lens))
episodes_so_far += len(lens)
timesteps_so_far += sum(lens)
iters_so_far += 1
logger.record_tabular("EpisodesSoFar", episodes_so_far)
logger.record_tabular("TimestepsSoFar", timesteps_so_far)
logger.record_tabular("TimeElapsed", time.time() - tstart)
if rank==0:
logger.dump_tabular()
return pi
def flatten_lists(listoflists):
return [el for list_ in listoflists for el in list_]
def get_variables(scope):
return tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope)
def get_trainable_variables(scope):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope)
def get_vf_trainable_variables(scope):
return [v for v in get_trainable_variables(scope) if 'vf' in v.name[len(scope):].split('/')]
def get_pi_trainable_variables(scope):
return [v for v in get_trainable_variables(scope) if 'pi' in v.name[len(scope):].split('/')]
| 36.943662 | 170 | 0.616279 |
7955864b3a157aae75d5b791abbb656b4069fd33 | 3,636 | py | Python | pandas_ta/overlap/vidya.py | bartua/pandas-ta | 3bbd5bef4a906f8e810cd557cf20bf92870851c0 | [
"MIT"
] | null | null | null | pandas_ta/overlap/vidya.py | bartua/pandas-ta | 3bbd5bef4a906f8e810cd557cf20bf92870851c0 | [
"MIT"
] | null | null | null | pandas_ta/overlap/vidya.py | bartua/pandas-ta | 3bbd5bef4a906f8e810cd557cf20bf92870851c0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from numpy import nan as npNaN
from numpy import nonzero
import numpy
from pandas import Series
from pandas_ta.utils import get_drift, get_offset, verify_series
def vidya(close, length=None, drift=None, offset=None, **kwargs):
"""Indicator: Variable Index Dynamic Average (VIDYA)"""
# Validate Arguments
length = int(length) if length and length > 0 else 14
close = verify_series(close, length)
drift = get_drift(drift)
offset = get_offset(offset)
if close is None: return
def _cmo(source: Series, n:int , d: int):
"""Chande Momentum Oscillator (CMO) Patch
For some reason: from pandas_ta.momentum import cmo causes
pandas_ta.momentum.coppock to not be able to import it's
wma like from pandas_ta.overlap import wma?
Weird Circular TypeError!?!
"""
mom = source.diff(d)
positive = mom.copy().clip(lower=0)
negative = mom.copy().clip(upper=0).abs()
pos_sum = positive.rolling(9).sum()
neg_sum = negative.rolling(9).sum()
return (pos_sum - neg_sum) / (pos_sum + neg_sum)
# Calculate Result
m = close.size
nonNan = close.index.get_loc(close.first_valid_index())
alpha = 2 / (length + 1)
abs_cmo = _cmo(close, length, drift).abs()
cmononNAN = abs_cmo.index.get_loc(abs_cmo.first_valid_index())
vidya = Series(0, index=close.index)
for i in range(max(9,length,nonNan,cmononNAN), m):# range(max(9,length,nonNan), m):
vidya.iloc[i] = alpha * abs_cmo.iloc[i] * close.iloc[i] + vidya.iloc[i - 1] * (1 - alpha * abs_cmo.iloc[i])
vidya.replace({0: npNaN}, inplace=True)
# Offset
if offset != 0:
vidya = vidya.shift(offset)
# Handle fills
if "fillna" in kwargs:
vidya.fillna(kwargs["fillna"], inplace=True)
if "fill_method" in kwargs:
vidya.fillna(method=kwargs["fill_method"], inplace=True)
# Name & Category
vidya.name = f"VIDYA_{length}"
vidya.category = "overlap"
return vidya
vidya.__doc__ = \
"""Variable Index Dynamic Average (VIDYA)
Variable Index Dynamic Average (VIDYA) was developed by Tushar Chande. It is
similar to an Exponential Moving Average but it has a dynamically adjusted
lookback period dependent on relative price volatility as measured by Chande
Momentum Oscillator (CMO). When volatility is high, VIDYA reacts faster to
price changes. It is often used as moving average or trend identifier.
Sources:
https://www.tradingview.com/script/hdrf0fXV-Variable-Index-Dynamic-Average-VIDYA/
https://www.perfecttrendsystem.com/blog_mt4_2/en/vidya-indicator-for-mt4
Calculation:
Default Inputs:
length=10, adjust=False, sma=True
if sma:
sma_nth = close[0:length].sum() / length
close[:length - 1] = np.NaN
close.iloc[length - 1] = sma_nth
EMA = close.ewm(span=length, adjust=adjust).mean()
Args:
close (pd.Series): Series of 'close's
length (int): It's period. Default: 14
offset (int): How many periods to offset the result. Default: 0
Kwargs:
adjust (bool, optional): Use adjust option for EMA calculation. Default: False
sma (bool, optional): If True, uses SMA for initial value for EMA calculation. Default: True
talib (bool): If True, uses TA-Libs implementation for CMO. Otherwise uses EMA version. Default: True
fillna (value, optional): pd.DataFrame.fillna(value)
fill_method (value, optional): Type of fill method
Returns:
pd.Series: New feature generated.
"""
| 37.102041 | 116 | 0.661716 |
795587315a6f72c227e342e293660b896698a731 | 64,281 | py | Python | tests/sparse_test.py | Huizerd/jax | 32319e1bc36e17ca270e9ff1a9545e6680f9eb28 | [
"Apache-2.0"
] | null | null | null | tests/sparse_test.py | Huizerd/jax | 32319e1bc36e17ca270e9ff1a9545e6680f9eb28 | [
"Apache-2.0"
] | 6 | 2022-01-03T04:14:24.000Z | 2022-02-14T04:13:22.000Z | tests/sparse_test.py | Huizerd/jax | 32319e1bc36e17ca270e9ff1a9545e6680f9eb28 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
import itertools
import operator
import unittest
from absl.testing import absltest
from absl.testing import parameterized
import jax
from jax import config
from jax import dtypes
from jax.experimental import sparse
from jax import lax
from jax._src.lib import cusparse
from jax._src.lib import xla_bridge
from jax import jit
from jax import tree_util
from jax import vmap
from jax._src import test_util as jtu
from jax import random
from jax import xla
import jax.numpy as jnp
from jax.util import split_list
from jax import jvp
import numpy as np
import scipy.sparse
config.parse_flags_with_absl()
FLAGS = config.FLAGS
MATMUL_TOL = {
np.float32: 1E-5,
np.float64: 1E-10,
np.complex64: 1e-5,
np.complex128: 1E-10,
}
all_dtypes = jtu.dtypes.integer + jtu.dtypes.floating + jtu.dtypes.complex
def rand_sparse(rng, nse=0.5, post=lambda x: x):
def _rand_sparse(shape, dtype, nse=nse):
rand = jtu.rand_default(rng)
size = np.prod(shape)
if 0 <= nse < 1:
nse = nse * size
nse = min(size, int(nse))
M = rand(shape, dtype)
indices = rng.choice(size, size - nse, replace=False)
M.flat[indices] = 0
return post(M)
return _rand_sparse
class cuSparseTest(jtu.JaxTestCase):
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_csr_todense(self, shape, dtype):
rng = rand_sparse(self.rng(), post=scipy.sparse.csr_matrix)
M = rng(shape, dtype)
args = (M.data, M.indices, M.indptr)
todense = lambda *args: sparse.csr_todense(*args, shape=M.shape)
self.assertArraysEqual(M.toarray(), todense(*args))
self.assertArraysEqual(M.toarray(), jit(todense)(*args))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_csr_fromdense(self, shape, dtype):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
M_csr = scipy.sparse.csr_matrix(M)
nse = M_csr.nnz
index_dtype = jnp.int32
fromdense = lambda M: sparse.csr_fromdense(M, nse=nse, index_dtype=jnp.int32)
data, indices, indptr = fromdense(M)
self.assertArraysEqual(data, M_csr.data.astype(dtype))
self.assertArraysEqual(indices, M_csr.indices.astype(index_dtype))
self.assertArraysEqual(indptr, M_csr.indptr.astype(index_dtype))
data, indices, indptr = jit(fromdense)(M)
self.assertArraysEqual(data, M_csr.data.astype(dtype))
self.assertArraysEqual(indices, M_csr.indices.astype(index_dtype))
self.assertArraysEqual(indptr, M_csr.indptr.astype(index_dtype))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_T={}".format(jtu.format_shape_dtype_string(shape, dtype), transpose),
"shape": shape, "dtype": dtype, "transpose": transpose}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for transpose in [True, False]))
def test_csr_matvec(self, shape, dtype, transpose):
op = lambda M: M.T if transpose else M
v_rng = jtu.rand_default(self.rng())
rng = rand_sparse(self.rng(), post=scipy.sparse.csr_matrix)
M = rng(shape, dtype)
v = v_rng(op(M).shape[1], dtype)
args = (M.data, M.indices, M.indptr, v)
matvec = lambda *args: sparse.csr_matvec(*args, shape=M.shape, transpose=transpose)
self.assertAllClose(op(M) @ v, matvec(*args), rtol=MATMUL_TOL)
self.assertAllClose(op(M) @ v, jit(matvec)(*args), rtol=MATMUL_TOL)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_T={}".format(jtu.format_shape_dtype_string(shape, dtype), transpose),
"shape": shape, "dtype": dtype, "transpose": transpose}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for transpose in [True, False]))
def test_csr_matmat(self, shape, dtype, transpose):
op = lambda M: M.T if transpose else M
B_rng = jtu.rand_default(self.rng())
rng = rand_sparse(self.rng(), post=scipy.sparse.csr_matrix)
M = rng(shape, dtype)
B = B_rng((op(M).shape[1], 4), dtype)
args = (M.data, M.indices, M.indptr, B)
matmat = lambda *args: sparse.csr_matmat(*args, shape=shape, transpose=transpose)
self.assertAllClose(op(M) @ B, matmat(*args), rtol=MATMUL_TOL)
self.assertAllClose(op(M) @ B, jit(matmat)(*args), rtol=MATMUL_TOL)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_coo_todense(self, shape, dtype):
rng = rand_sparse(self.rng(), post=scipy.sparse.coo_matrix)
M = rng(shape, dtype)
args = (M.data, M.row, M.col)
todense = lambda *args: sparse.coo_todense(*args, shape=M.shape)
self.assertArraysEqual(M.toarray(), todense(*args))
self.assertArraysEqual(M.toarray(), jit(todense)(*args))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_coo_fromdense(self, shape, dtype):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
M_coo = scipy.sparse.coo_matrix(M)
nse = M_coo.nnz
index_dtype = jnp.int32
fromdense = lambda M: sparse.coo_fromdense(M, nse=nse, index_dtype=jnp.int32)
data, row, col = fromdense(M)
self.assertArraysEqual(data, M_coo.data.astype(dtype))
self.assertArraysEqual(row, M_coo.row.astype(index_dtype))
self.assertArraysEqual(col, M_coo.col.astype(index_dtype))
data, indices, indptr = jit(fromdense)(M)
self.assertArraysEqual(data, M_coo.data.astype(dtype))
self.assertArraysEqual(row, M_coo.row.astype(index_dtype))
self.assertArraysEqual(col, M_coo.col.astype(index_dtype))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_T={}".format(jtu.format_shape_dtype_string(shape, dtype), transpose),
"shape": shape, "dtype": dtype, "transpose": transpose}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for transpose in [True, False]))
def test_coo_matvec(self, shape, dtype, transpose):
op = lambda M: M.T if transpose else M
v_rng = jtu.rand_default(self.rng())
rng = rand_sparse(self.rng(), post=scipy.sparse.coo_matrix)
M = rng(shape, dtype)
v = v_rng(op(M).shape[1], dtype)
args = (M.data, M.row, M.col, v)
matvec = lambda *args: sparse.coo_matvec(*args, shape=M.shape, transpose=transpose)
self.assertAllClose(op(M) @ v, matvec(*args), rtol=MATMUL_TOL)
self.assertAllClose(op(M) @ v, jit(matvec)(*args), rtol=MATMUL_TOL)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_T={}".format(jtu.format_shape_dtype_string(shape, dtype), transpose),
"shape": shape, "dtype": dtype, "transpose": transpose}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for transpose in [True, False]))
def test_coo_matmat(self, shape, dtype, transpose):
op = lambda M: M.T if transpose else M
B_rng = jtu.rand_default(self.rng())
rng = rand_sparse(self.rng(), post=scipy.sparse.coo_matrix)
M = rng(shape, dtype)
B = B_rng((op(M).shape[1], 4), dtype)
args = (M.data, M.row, M.col, B)
matmat = lambda *args: sparse.coo_matmat(*args, shape=shape, transpose=transpose)
self.assertAllClose(op(M) @ B, matmat(*args), rtol=MATMUL_TOL)
self.assertAllClose(op(M) @ B, jit(matmat)(*args), rtol=MATMUL_TOL)
y, dy = jvp(lambda x: sparse.coo_matmat(M.data, M.row, M.col, x, shape=shape, transpose=transpose).sum(), (B, ), (jnp.ones_like(B), ))
self.assertAllClose((op(M) @ B).sum(), y, rtol=MATMUL_TOL)
y, dy = jvp(lambda x: sparse.coo_matmat(x, M.row, M.col, B, shape=shape, transpose=transpose).sum(), (M.data, ), (jnp.ones_like(M.data), ))
self.assertAllClose((op(M) @ B).sum(), y, rtol=MATMUL_TOL)
def test_coo_matmat_layout(self):
# Regression test for https://github.com/google/jax/issues/7533
d = jnp.array([1.0, 2.0, 3.0, 4.0])
i = jnp.array([0, 0, 1, 2])
j = jnp.array([0, 2, 0, 0])
shape = (3, 3)
x = jnp.arange(9).reshape(3, 3).astype(d.dtype)
def f(x):
return sparse.coo_matmat(d, i, j, x.T, shape=shape)
result = f(x)
result_jit = jit(f)(x)
self.assertAllClose(result, result_jit)
@unittest.skipIf(jtu.device_under_test() != "gpu", "test requires GPU")
def test_gpu_translation_rule(self):
version = xla_bridge.get_backend().platform_version
cuda_version = None if version == "<unknown>" else int(version.split()[-1])
if cuda_version is None or cuda_version < 11000:
self.assertFalse(cusparse and cusparse.is_supported)
self.assertNotIn(sparse.csr_todense_p,
xla._backend_specific_translations["gpu"])
else:
self.assertTrue(cusparse and cusparse.is_supported)
self.assertIn(sparse.csr_todense_p,
xla._backend_specific_translations["gpu"])
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}".format(
jtu.format_shape_dtype_string(shape, dtype), mat_type),
"shape": shape, "dtype": dtype, "mat_type": mat_type}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for mat_type in ['csr', 'coo']))
def test_extra_nse(self, shape, dtype, mat_type):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
nse = (M != 0).sum() + 5
fromdense = getattr(sparse, f"{mat_type}_fromdense")
todense = getattr(sparse, f"{mat_type}_todense")
args = fromdense(M, nse=nse, index_dtype=jnp.int32)
M_out = todense(*args, shape=M.shape)
self.assertArraysEqual(M, M_out)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_coo_todense_ad(self, shape, dtype):
rng = rand_sparse(self.rng(), post=jnp.array)
M = rng(shape, dtype)
data, row, col = sparse.coo_fromdense(M, nse=(M != 0).sum())
f = lambda data: sparse.coo_todense(data, row, col, shape=M.shape)
# Forward-mode
primals, tangents = jax.jvp(f, [data], [jnp.ones_like(data)])
self.assertArraysEqual(primals, f(data))
self.assertArraysEqual(tangents, jnp.zeros_like(M).at[row, col].set(1))
# Reverse-mode
primals, vjp_fun = jax.vjp(f, data)
data_out, = vjp_fun(primals)
self.assertArraysEqual(primals, f(data))
self.assertArraysEqual(data_out, data)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}".format(jtu.format_shape_dtype_string(shape, dtype)),
"shape": shape, "dtype": dtype}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_coo_fromdense_ad(self, shape, dtype):
rng = rand_sparse(self.rng(), post=jnp.array)
M = rng(shape, dtype)
nse = (M != 0).sum()
f = lambda M: sparse.coo_fromdense(M, nse=nse)
# Forward-mode
primals, tangents = jax.jvp(f, [M], [jnp.ones_like(M)])
self.assertArraysEqual(primals[0], f(M)[0])
self.assertArraysEqual(primals[1], f(M)[1])
self.assertArraysEqual(primals[2], f(M)[2])
self.assertArraysEqual(tangents[0], jnp.ones(nse, dtype=dtype))
self.assertEqual(tangents[1].dtype, dtypes.float0)
self.assertEqual(tangents[2].dtype, dtypes.float0)
# Reverse-mode
primals, vjp_fun = jax.vjp(f, M)
M_out, = vjp_fun(primals)
self.assertArraysEqual(primals[0], f(M)[0])
self.assertArraysEqual(primals[1], f(M)[1])
self.assertArraysEqual(primals[2], f(M)[2])
self.assertArraysEqual(M_out, M)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}".format(
jtu.format_shape_dtype_string(shape, dtype),
jtu.format_shape_dtype_string(bshape, dtype)),
"shape": shape, "dtype": dtype, "bshape": bshape}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for bshape in [shape[-1:] + s for s in [()]] # TODO: matmul autodiff
for dtype in jtu.dtypes.floating + jtu.dtypes.complex)) # TODO: other types
def test_coo_matvec_ad(self, shape, dtype, bshape):
tol = {np.float32: 1E-6, np.float64: 1E-13, np.complex64: 1E-6, np.complex128: 1E-13}
rng = rand_sparse(self.rng(), post=jnp.array)
rng_b = jtu.rand_default(self.rng())
M = rng(shape, dtype)
data, row, col = sparse.coo_fromdense(M, nse=(M != 0).sum())
x = rng_b(bshape, dtype)
xdot = rng_b(bshape, dtype)
# Forward-mode with respect to the vector
f_dense = lambda x: M @ x
f_sparse = lambda x: sparse.coo_matvec(data, row, col, x, shape=M.shape)
v_sparse, t_sparse = jax.jvp(f_sparse, [x], [xdot])
v_dense, t_dense = jax.jvp(f_dense, [x], [xdot])
self.assertAllClose(v_sparse, v_dense, atol=tol, rtol=tol)
self.assertAllClose(t_sparse, t_dense, atol=tol, rtol=tol)
# Reverse-mode with respect to the vector
primals_dense, vjp_dense = jax.vjp(f_dense, x)
primals_sparse, vjp_sparse = jax.vjp(f_sparse, x)
out_dense, = vjp_dense(primals_dense)
out_sparse, = vjp_sparse(primals_sparse)
self.assertAllClose(primals_dense[0], primals_sparse[0], atol=tol, rtol=tol)
self.assertAllClose(out_dense, out_sparse, atol=tol, rtol=tol)
# Forward-mode with respect to nonzero elements of the matrix
f_sparse = lambda data: sparse.coo_matvec(data, row, col, x, shape=M.shape)
f_dense = lambda data: sparse.coo_todense(data, row, col, shape=M.shape) @ x
data = rng((len(data),), data.dtype)
data_dot = rng((len(data),), data.dtype)
v_sparse, t_sparse = jax.jvp(f_sparse, [data], [data_dot])
v_dense, t_dense = jax.jvp(f_dense, [data], [data_dot])
self.assertAllClose(v_sparse, v_dense, atol=tol, rtol=tol)
self.assertAllClose(t_sparse, t_dense, atol=tol, rtol=tol)
# Reverse-mode with respect to nonzero elements of the matrix
primals_dense, vjp_dense = jax.vjp(f_dense, data)
primals_sparse, vjp_sparse = jax.vjp(f_sparse, data)
out_dense, = vjp_dense(primals_dense)
out_sparse, = vjp_sparse(primals_sparse)
self.assertAllClose(primals_dense[0], primals_sparse[0], atol=tol, rtol=tol)
self.assertAllClose(out_dense, out_sparse, atol=tol, rtol=tol)
class BCOOTest(jtu.JaxTestCase):
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.integer + jtu.dtypes.floating + jtu.dtypes.complex
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_dense_round_trip(self, shape, dtype, n_batch, n_dense):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
n_sparse = M.ndim - n_batch - n_dense
nse = int(sparse.bcoo._bcoo_nse(M, n_batch=n_batch, n_dense=n_dense))
data, indices = sparse.bcoo_fromdense(M, n_batch=n_batch, n_dense=n_dense)
data_jit, indices_jit = jit(partial(sparse.bcoo_fromdense, nse=nse, n_batch=n_batch, n_dense=n_dense))(M)
self.assertArraysEqual(data, data_jit)
self.assertArraysEqual(indices, indices_jit)
assert data.dtype == dtype
assert data.shape == shape[:n_batch] + (nse,) + shape[n_batch + n_sparse:]
assert indices.dtype == jnp.int32 # TODO: test passing this arg
assert indices.shape == shape[:n_batch] + (nse, n_sparse)
todense = partial(sparse.bcoo_todense, shape=shape)
self.assertArraysEqual(M, todense(data, indices))
self.assertArraysEqual(M, jit(todense)(data, indices))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_todense_ad(self, shape, dtype, n_batch, n_dense):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
data, indices = sparse.bcoo_fromdense(M, n_batch=n_batch, n_dense=n_dense)
todense = partial(sparse.bcoo_todense, indices=indices, shape=shape)
j1 = jax.jacfwd(todense)(data)
j2 = jax.jacrev(todense)(data)
hess = jax.hessian(todense)(data)
self.assertArraysAllClose(j1, j2)
self.assertEqual(j1.shape, M.shape + data.shape)
self.assertEqual(hess.shape, M.shape + 2 * data.shape)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_fromdense_ad(self, shape, dtype, n_batch, n_dense):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
nse = int(sparse.bcoo._bcoo_nse(M, n_batch=n_batch, n_dense=n_dense))
def fromdense(M):
return sparse.bcoo_fromdense(M, nse=nse, n_batch=n_batch, n_dense=n_dense)[0]
data = fromdense(M)
j1 = jax.jacfwd(fromdense)(M)
j2 = jax.jacrev(fromdense)(M)
hess = jax.hessian(fromdense)(M)
self.assertArraysAllClose(j1, j2)
self.assertEqual(j1.shape, data.shape + M.shape)
self.assertEqual(hess.shape, data.shape + 2 * M.shape)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_dense_round_trip_batched(self, shape, dtype, n_batch, n_dense):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
n_sparse = M.ndim - n_batch - n_dense
nse = int(sparse.bcoo._bcoo_nse(M, n_batch=n_batch, n_dense=n_dense))
fromdense = partial(sparse.bcoo_fromdense, nse=nse, n_dense=n_dense)
todense = partial(sparse.bcoo_todense, shape=shape[n_batch:])
for i in range(n_batch):
fromdense = jax.vmap(fromdense)
todense = jax.vmap(todense)
data, indices = fromdense(M)
assert data.dtype == dtype
assert data.shape == shape[:n_batch] + (nse,) + shape[n_batch + n_sparse:]
assert indices.dtype == jnp.int32 # TODO: test passing this arg
assert indices.shape == shape[:n_batch] + (nse, n_sparse)
self.assertArraysEqual(M, todense(data, indices))
self.assertArraysEqual(M, jit(todense)(data, indices))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_extract(self, shape, dtype, n_batch, n_dense):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
data, indices = sparse.bcoo_fromdense(M)
data2 = sparse.bcoo_extract(indices, M)
self.assertArraysEqual(data, data2)
data3 = jit(sparse.bcoo_extract)(indices, M)
self.assertArraysEqual(data, data3)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_extract_ad(self, shape, dtype, n_batch, n_dense):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
data, indices = sparse.bcoo_fromdense(M, n_batch=n_batch, n_dense=n_dense)
extract = partial(sparse.bcoo_extract, indices)
j1 = jax.jacfwd(extract)(M)
j2 = jax.jacrev(extract)(M)
hess = jax.hessian(extract)(M)
self.assertArraysAllClose(j1, j2)
self.assertEqual(j1.shape, data.shape + M.shape)
self.assertEqual(hess.shape, data.shape + 2 * M.shape)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_transpose(self, shape, dtype, n_batch, n_dense):
n_sparse = len(shape) - n_batch - n_dense
rng = self.rng()
sprng = rand_sparse(rng)
M = sprng(shape, dtype)
data, indices = sparse.bcoo_fromdense(M, n_batch=n_batch, n_dense=n_dense)
permutation = np.concatenate([
rng.permutation(range(n_batch)),
rng.permutation(range(n_batch, n_batch + n_sparse)),
rng.permutation(range(n_batch + n_sparse, len(shape)))]).astype(int)
M_T = M.transpose(permutation)
trans = partial(sparse.bcoo_transpose, shape=shape, permutation=permutation)
self.assertArraysEqual(M_T, sparse.bcoo_todense(*trans(data, indices), shape=M_T.shape))
self.assertArraysEqual(M_T, sparse.bcoo_todense(*jit(trans)(data, indices), shape=M_T.shape))
# test batched
def trans(M):
return M.transpose([p - n_batch for p in permutation[n_batch:]])
for _ in range(n_batch):
trans = jax.vmap(trans)
Msp = sparse.BCOO.fromdense(M, n_batch=n_batch, n_dense=n_dense)
self.assertArraysEqual(trans(M), trans(Msp).todense())
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_transpose_ad(self, shape, dtype, n_batch, n_dense):
n_sparse = len(shape) - n_batch - n_dense
rng = self.rng()
sprng = rand_sparse(self.rng())
M = sprng(shape, dtype)
data, indices = sparse.bcoo_fromdense(M, n_batch=n_batch, n_dense=n_dense)
permutation = np.concatenate([
rng.permutation(range(n_batch)),
rng.permutation(range(n_batch, n_batch + n_sparse)),
rng.permutation(range(n_batch + n_sparse, len(shape)))]).astype(int)
def f_sparse(data):
return sparse.bcoo_transpose(data, indices, shape=shape, permutation=permutation)[0]
jf_sparse = jax.jacfwd(f_sparse)(data)
jr_sparse = jax.jacrev(f_sparse)(data)
tol = {}
if jtu.device_under_test() == "tpu":
tol = {np.float32: 5E-3}
# TODO(jakevdp) also test against dense version?
self.assertAllClose(jf_sparse, jr_sparse, rtol=tol)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for n_batch in range(1, len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_todense_partial_batch(self, shape, dtype, n_batch, n_dense):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
data, indices = sparse.bcoo_fromdense(M, n_batch=n_batch, n_dense=n_dense)
M1 = sparse.bcoo_todense(data, indices[:1], shape=M.shape)
M2 = sparse.bcoo_todense(data, jnp.stack(shape[0] * [indices[0]]), shape=M.shape)
self.assertAllClose(M1, M2)
M3 = sparse.bcoo_todense(data[:1], indices, shape=M.shape)
M4 = sparse.bcoo_todense(jnp.stack(shape[0] * [data[0]]), indices, shape=M.shape)
self.assertAllClose(M3, M4)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_lhs_shape={}_rhs_shape={}_lhs_contracting={}_rhs_contracting={}_n_dense={}"
.format(jtu.format_shape_dtype_string(lhs_shape, dtype),
jtu.format_shape_dtype_string(rhs_shape, dtype),
lhs_contracting, rhs_contracting, n_dense),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape, "dtype": dtype,
"lhs_contracting": lhs_contracting, "rhs_contracting": rhs_contracting,
"n_dense": n_dense}
for lhs_shape, rhs_shape, lhs_contracting, rhs_contracting in [
[(5,), (6,), [], []],
[(5,), (5,), [0], [0]],
[(5, 7), (5,), [0], [0]],
[(7, 5), (5,), [1], [0]],
[(3, 5), (2, 5), [1], [1]],
[(5, 3), (5, 2), [0], [0]],
[(5, 3, 2), (5, 2, 4), [0], [0]],
[(5, 3, 2), (5, 2, 4), [0,2], [0,1]],
[(5, 3, 2), (3, 5, 2, 4), [0,2], [1,2]],
[(1, 2, 2, 3), (1, 2, 3, 1), [1], [1]],
[(3, 2), (2, 4), [1], [0]],
]
for n_dense in range(len(lhs_shape) - max(lhs_contracting, default=0))
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_dot_general_contract_only(self, lhs_shape, rhs_shape, dtype,
lhs_contracting, rhs_contracting, n_dense):
rng = jtu.rand_small(self.rng())
rng_sparse = rand_sparse(self.rng())
def args_maker():
lhs = rng_sparse(lhs_shape, dtype)
rhs = rng(rhs_shape, dtype)
data, indices = sparse.bcoo_fromdense(lhs, n_dense=n_dense)
return data, indices, lhs, rhs
dimension_numbers = ((lhs_contracting, rhs_contracting), ([], []))
def f_dense(data, indices, lhs, rhs):
return lax.dot_general(lhs, rhs, dimension_numbers=dimension_numbers)
def f_sparse(data, indices, lhs, rhs):
return sparse.bcoo_dot_general(data, indices, rhs,
lhs_shape=lhs.shape,
dimension_numbers=dimension_numbers)
self._CheckAgainstNumpy(f_dense, f_sparse, args_maker)
self._CheckAgainstNumpy(f_dense, jit(f_sparse), args_maker)
# TODO(jakevdp): In rare cases, this fails python_should_be_executing check. Why?
# self._CompileAndCheck(f_sparse, args_maker)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_lhs_shape={}_rhs_shape={}_dimension_numbers={}_n_batch={}_n_dense={}"
.format(jtu.format_shape_dtype_string(lhs_shape, dtype),
jtu.format_shape_dtype_string(rhs_shape, dtype),
dimension_numbers, n_batch, n_dense),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape, "dtype": dtype,
"dimension_numbers": dimension_numbers,
"n_batch": n_batch, "n_dense": n_dense}
for lhs_shape, rhs_shape, dimension_numbers, n_batch, n_dense in [
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 1, 0),
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 2, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 1, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 2, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 1),
]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_dot_general_contract_and_batch(self, lhs_shape, rhs_shape, dtype,
dimension_numbers, n_batch, n_dense):
rng = jtu.rand_small(self.rng())
rng_sparse = rand_sparse(self.rng())
def args_maker():
lhs = rng_sparse(lhs_shape, dtype)
rhs = rng(rhs_shape, dtype)
data, indices = sparse.bcoo_fromdense(lhs, n_batch=n_batch, n_dense=n_dense)
return data, indices, lhs, rhs
def f_dense(data, indices, lhs, rhs):
return lax.dot_general(lhs, rhs, dimension_numbers=dimension_numbers)
def f_sparse(data, indices, lhs, rhs):
return sparse.bcoo_dot_general(data, indices, rhs,
lhs_shape=lhs.shape,
dimension_numbers=dimension_numbers)
self._CheckAgainstNumpy(f_dense, f_sparse, args_maker)
self._CheckAgainstNumpy(f_dense, jit(f_sparse), args_maker)
# TODO(jakevdp): In rare cases, this fails python_should_be_executing check. Why?
# self._CompileAndCheck(f_sparse, args_maker)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_lhs_shape={}_rhs_shape={}_dimension_numbers={}_n_batch={}_n_dense={}"
.format(jtu.format_shape_dtype_string(lhs_shape, dtype),
jtu.format_shape_dtype_string(rhs_shape, dtype),
dimension_numbers, n_batch, n_dense),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape, "dtype": dtype,
"dimension_numbers": dimension_numbers,
"n_batch": n_batch, "n_dense": n_dense}
for lhs_shape, rhs_shape, dimension_numbers, n_batch, n_dense in [
((3, 2, 4), (3, 3, 2), (([1], [2]), ([0], [0])), 1, 0),
((3, 2, 4), (3, 3, 2), (([1], [2]), ([0], [0])), 2, 0),
((2, 3, 4), (3, 3, 2), (([0], [2]), ([1], [0])), 1, 0),
((2, 3, 4), (3, 3, 2), (([0], [2]), ([1], [0])), 2, 0),
((3, 4, 3, 2), (3, 4, 2, 4), (([3], [2]), ([0], [0])), 1, 0),
((3, 4, 3, 2), (3, 4, 2, 4), (([3], [2]), ([0, 1], [0, 1])), 2, 0),
((3, 4, 3, 2), (3, 4, 2, 4), (([3], [2]), ([0, 1], [0, 1])), 2, 1),
]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_rdot_general_contract_and_batch(self, lhs_shape, rhs_shape, dtype,
dimension_numbers, n_batch, n_dense):
rng = jtu.rand_small(self.rng())
rng_sparse = rand_sparse(self.rng())
def args_maker():
lhs = rng(lhs_shape, dtype)
rhs = rng_sparse(rhs_shape, dtype)
data, indices = sparse.bcoo_fromdense(rhs, n_batch=n_batch, n_dense=n_dense)
return data, indices, lhs, rhs
def f_dense(data, indices, lhs, rhs):
return lax.dot_general(lhs, rhs, dimension_numbers=dimension_numbers)
def f_sparse(data, indices, lhs, rhs):
return sparse.bcoo_rdot_general(lhs, data, indices,
rhs_shape=rhs.shape,
dimension_numbers=dimension_numbers)
self._CheckAgainstNumpy(f_dense, f_sparse, args_maker)
self._CheckAgainstNumpy(f_dense, jit(f_sparse), args_maker)
# TODO(jakevdp): In rare cases, this fails python_should_be_executing check. Why?
# self._CompileAndCheck(f_sparse, args_maker)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_lhs_shape={}_rhs_shape={}_dimension_numbers={}_n_batch={}_n_dense={}"
.format(jtu.format_shape_dtype_string(lhs_shape, dtype),
jtu.format_shape_dtype_string(rhs_shape, dtype),
dimension_numbers, n_batch, n_dense),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape, "dtype": dtype,
"dimension_numbers": dimension_numbers,
"n_batch": n_batch, "n_dense": n_dense}
for lhs_shape, rhs_shape, dimension_numbers, n_batch, n_dense in [
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 1, 0),
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 2, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 1, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 2, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 1),
]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_dot_general_partial_batch(self, lhs_shape, rhs_shape, dtype,
dimension_numbers, n_batch, n_dense):
rng = jtu.rand_small(self.rng())
rng_sparse = rand_sparse(self.rng())
X = rng_sparse(lhs_shape, dtype)
data, indices = sparse.bcoo_fromdense(X, n_batch=n_batch, n_dense=n_dense)
Y = rng(rhs_shape, dtype)
def f_dense(X, Y):
return lax.dot_general(X, Y, dimension_numbers=dimension_numbers)
def f_sparse(data, indices, Y):
return sparse.bcoo_dot_general(data, indices, Y, lhs_shape=X.shape,
dimension_numbers=dimension_numbers)
for data, indices in itertools.product([data, data[:1]], [indices, indices[:1]]):
X = sparse.bcoo_todense(data, indices, shape=X.shape)
self.assertAllClose(f_dense(X, Y), f_sparse(data, indices, Y))
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_lhs_shape={}_rhs_shape={}_dimension_numbers={}_n_batch={}_n_dense={}"
.format(jtu.format_shape_dtype_string(lhs_shape, dtype),
jtu.format_shape_dtype_string(rhs_shape, dtype),
dimension_numbers, n_batch, n_dense),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape, "dtype": dtype,
"dimension_numbers": dimension_numbers,
"n_batch": n_batch, "n_dense": n_dense}
for lhs_shape, rhs_shape, dimension_numbers, n_batch, n_dense in [
((4, 5), (5, 3), (([1], [0]), ([], [])), 0, 0),
((2, 4, 5), (2, 5, 3), (([2], [1]), ([0], [0])), 1, 0),
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 1, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 1, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 0),
# These require contraction over batch & dense dimensions
# which is not yet implemented:
# ((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 2, 0),
# ((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 2, 0),
# ((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 1),
]
for dtype in jtu.dtypes.floating))
def test_bcoo_dot_general_ad(self, lhs_shape, rhs_shape, dtype,
dimension_numbers, n_batch, n_dense):
rng = jtu.rand_small(self.rng())
rng_sparse = rand_sparse(self.rng())
X = rng_sparse(lhs_shape, dtype)
data, indices = sparse.bcoo_fromdense(X, n_batch=n_batch, n_dense=n_dense)
Y = rng(rhs_shape, dtype)
# gradient with respect to rhs
def f_dense(Y):
return lax.dot_general(X, Y, dimension_numbers=dimension_numbers)
def f_sparse(Y):
return sparse.bcoo_dot_general(data, indices, Y, lhs_shape=X.shape,
dimension_numbers=dimension_numbers)
jf_dense = jax.jacfwd(f_dense)(Y)
jr_dense = jax.jacrev(f_dense)(Y)
jf_sparse = jax.jacfwd(f_sparse)(Y)
jr_sparse = jax.jacrev(f_sparse)(Y)
tol = {}
if jtu.device_under_test() == "tpu":
tol = {np.float32: 5E-3}
self.assertAllClose(jf_dense, jf_sparse, rtol=tol)
self.assertAllClose(jr_dense, jr_sparse, rtol=tol)
self.assertAllClose(jf_sparse, jr_sparse, rtol=tol)
# gradient with respect to lhs
def g_dense(X):
return lax.dot_general(X, Y, dimension_numbers=dimension_numbers)
def g_sparse(data):
return sparse.bcoo_dot_general(data, indices, Y, lhs_shape=X.shape,
dimension_numbers=dimension_numbers)
jf_dense = jax.jacfwd(g_dense)(X)
jr_dense = jax.jacrev(g_dense)(X)
jf_sparse = jax.jacfwd(g_sparse)(data)
jr_sparse = jax.jacrev(g_sparse)(data)
tol = {}
if jtu.device_under_test() == "tpu":
tol = {np.float32: 5E-3}
self.assertAllClose(jf_dense, jr_dense, rtol=tol)
self.assertAllClose(jf_sparse, jr_sparse, rtol=tol)
# Extract the sparse jacobian from the dense & compare.
def extract(X):
return sparse.bcoo_extract(indices, X)
for i in range(g_dense(X).ndim):
extract = jax.vmap(extract)
self.assertAllClose(extract(jf_dense), jf_sparse, rtol=tol)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_lhs_shape={}_rhs_shape={}_dimension_numbers={}_n_batch={}_n_dense={}"
.format(jtu.format_shape_dtype_string(lhs_shape, dtype),
jtu.format_shape_dtype_string(rhs_shape, dtype),
dimension_numbers, n_batch, n_dense),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape, "dtype": dtype,
"dimension_numbers": dimension_numbers,
"n_batch": n_batch, "n_dense": n_dense}
for lhs_shape, rhs_shape, dimension_numbers, n_batch, n_dense in [
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 0, 0),
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 1, 0),
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 0, 1),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 0, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 1, 1),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 0, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 1, 2),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 1),
]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_dot_general_sampled(self, lhs_shape, rhs_shape, dtype, dimension_numbers, n_batch, n_dense):
rng = jtu.rand_default(self.rng())
sprng = rand_sparse(self.rng())
out_shape = lax.dot_general(
jnp.zeros(lhs_shape), jnp.zeros(rhs_shape),
dimension_numbers=dimension_numbers).shape
args_maker = lambda: [
rng(lhs_shape, dtype), rng(rhs_shape, dtype),
sparse.BCOO.fromdense(sprng(out_shape, dtype),
n_batch=n_batch, n_dense=n_dense).indices]
def dense_fun(lhs, rhs, indices):
AB = lax.dot_general(lhs, rhs, dimension_numbers=dimension_numbers)
return sparse.bcoo_extract(indices, AB)
def sparse_fun(lhs, rhs, indices):
return sparse.bcoo_dot_general_sampled(
lhs, rhs, indices, dimension_numbers=dimension_numbers)
tol = {}
if jtu.device_under_test() == "tpu":
tol = {np.float32: 5E-3}
self._CheckAgainstNumpy(dense_fun, sparse_fun, args_maker, tol=tol)
# TODO: python_should_be_executing check occasionally fails... why?
# self._CompileAndCheck(sparse_fun, args_maker)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name":
"_lhs_shape={}_rhs_shape={}_dimension_numbers={}_n_batch={}_n_dense={}"
.format(jtu.format_shape_dtype_string(lhs_shape, dtype),
jtu.format_shape_dtype_string(rhs_shape, dtype),
dimension_numbers, n_batch, n_dense),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape, "dtype": dtype,
"dimension_numbers": dimension_numbers,
"n_batch": n_batch, "n_dense": n_dense}
for lhs_shape, rhs_shape, dimension_numbers, n_batch, n_dense in [
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 1, 0),
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 1, 1),
((3, 3, 2), (3, 2, 4), (([2], [1]), ([0], [0])), 2, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 1, 0),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 1, 1),
((3, 3, 2), (2, 3, 4), (([2], [0]), ([0], [1])), 2, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 0),
((3, 4, 2, 4), (3, 4, 3, 2), (([2], [3]), ([0, 1], [0, 1])), 2, 1),
]
for dtype in jtu.dtypes.floating))
def test_bcoo_dot_general_sampled_ad(self, lhs_shape, rhs_shape, dtype, dimension_numbers, n_batch, n_dense):
rng = jtu.rand_default(self.rng())
sprng = rand_sparse(self.rng())
out_shape = lax.dot_general(
jnp.zeros(lhs_shape), jnp.zeros(rhs_shape),
dimension_numbers=dimension_numbers).shape
lhs = rng(lhs_shape, dtype)
rhs = rng(rhs_shape, dtype)
indices = sparse.BCOO.fromdense(sprng(out_shape, dtype),
n_batch=n_batch, n_dense=n_dense).indices
def dense_fun(lhs, rhs, indices):
AB = lax.dot_general(lhs, rhs, dimension_numbers=dimension_numbers)
return sparse.bcoo_extract(indices, AB)
def sparse_fun(lhs, rhs, indices):
return sparse.bcoo_dot_general_sampled(
lhs, rhs, indices, dimension_numbers=dimension_numbers)
jf_dense = jax.jacfwd(dense_fun)(lhs, rhs, indices)
jf_sparse = jax.jacfwd(sparse_fun)(lhs, rhs, indices)
jr_dense = jax.jacrev(dense_fun)(lhs, rhs, indices)
jr_sparse = jax.jacrev(sparse_fun)(lhs, rhs, indices)
tol = {}
if jtu.device_under_test() == "tpu":
tol = {np.float32: 5E-3}
self.assertAllClose(jf_sparse, jf_dense, atol=tol)
self.assertAllClose(jr_sparse, jr_dense, atol=tol)
self.assertAllClose(jf_sparse, jr_sparse, atol=tol)
@unittest.skip("Jaxlib GPU build failing in OSS.")
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}[n_batch={}]_{}[n_batch={}]_dims={}".format(
jtu.format_shape_dtype_string(lhs_shape, dtype), lhs_n_batch,
jtu.format_shape_dtype_string(rhs_shape, dtype), rhs_n_batch,
dimension_numbers),
"lhs_shape": lhs_shape, "rhs_shape": rhs_shape,
"lhs_n_batch": lhs_n_batch, "rhs_n_batch": rhs_n_batch,
"dimension_numbers": dimension_numbers, "dtype": dtype}
for lhs_shape, lhs_n_batch, rhs_shape, rhs_n_batch, dimension_numbers in [
# (batched) vector-vector products
((5,), 0, (5,), 0, (([0], [0]), ([], []))),
((7,), 0, (7,), 0, (([0], [0]), ([], []))),
((5, 7), 1, (7,), 0, (([1], [0]), ([], []))),
((2, 3, 4), 2, (2, 4), 1, (([2], [1]), ([0], [0]))),
((2, 3, 4), 2, (2, 4), 1, (([2], [1]), ([], []))),
((2, 3, 4), 2, (3, 4), 1, (([2], [1]), ([1], [0]))),
((2, 3, 4), 2, (3, 4), 1, (([2], [1]), ([], []))),
# (batched) matrix-vector products
((5, 7), 0, (7,), 0, (([1], [0]), ([], []))),
((2, 3, 4), 1, (4,), 0, (([2], [0]), ([], []))),
((2, 3, 4), 1, (2, 4), 1, (([2], [1]), ([0], [0]))),
((3, 2, 4), 1, (3, 4), 1, (([2], [1]), ([0], [0]))),
((2, 3, 4), 0, (2,), 0, (([0], [0]), ([], []))),
]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_spdot_general(self, lhs_shape, lhs_n_batch, rhs_shape, rhs_n_batch, dtype, dimension_numbers):
sprng = rand_sparse(self.rng())
def args_maker():
x = sprng(lhs_shape, dtype)
y = sprng(rhs_shape, dtype)
xsp = sparse.BCOO.fromdense(x, n_batch=lhs_n_batch)
ysp = sparse.BCOO.fromdense(y, n_batch=rhs_n_batch)
return x, y, xsp, ysp
def f_dense(x, y, xsp, ysp):
return lax.dot_general(x, y, dimension_numbers=dimension_numbers)
def f_sparse(x, y, xsp, ysp):
shape = sparse.bcoo._dot_general_validated_shape(x.shape, y.shape, dimension_numbers)
data, indices = sparse.bcoo_spdot_general(xsp.data, xsp.indices, ysp.data, ysp.indices,
lhs_shape=x.shape, rhs_shape=y.shape,
dimension_numbers=dimension_numbers)
return sparse.bcoo_todense(data, indices, shape=shape)
self._CheckAgainstNumpy(f_dense, f_sparse, args_maker)
# TODO(jakevdp): This occasionally fails python_should_be_executing check. Why?
# self._CompileAndCheck(f_sparse, args_maker)
self._CheckAgainstNumpy(jit(f_dense), jit(f_sparse), args_maker)
@unittest.skipIf(jtu.device_under_test() == "tpu", "TPU has insufficient precision")
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}[n_batch={}]_{}[n_batch={}]_in_axes={}".format(
jtu.format_shape_dtype_string(lhs_shape, dtype), lhs_n_batch,
jtu.format_shape_dtype_string(rhs_shape, dtype), rhs_n_batch,
in_axes),
"lhs_shape": lhs_shape, "lhs_n_batch": lhs_n_batch,
"rhs_shape": rhs_shape, "rhs_n_batch": rhs_n_batch,
"dtype": dtype, "in_axes": in_axes}
for lhs_shape, lhs_n_batch, rhs_shape, rhs_n_batch, in_axes in [
((3, 5), 1, (3, 5), 1, 0),
((3, 4, 5), 1, (3, 5), 1, 0),
((3, 4, 5), 2, (3, 5), 1, 0),
# TODO(jakevdp): test these once unequal batches are implemented
# ((4, 5), 1, (5,), 0, (0, None)),
# ((3, 4, 5), 1, (5,), 0, (0, None)),
# ((4, 5), 0, (3, 5), 1, (None, 0)),
]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_spmm_batched(self, lhs_shape, lhs_n_batch, rhs_shape, rhs_n_batch, dtype, in_axes):
sprng = rand_sparse(self.rng())
def args_maker():
x = sprng(lhs_shape, dtype)
y = sprng(rhs_shape, dtype)
xsp = sparse.BCOO.fromdense(x, n_batch=lhs_n_batch)
ysp = sparse.BCOO.fromdense(y, n_batch=rhs_n_batch)
return x, y, xsp, ysp
def f_dense(x, y, _, __):
return jax.vmap(operator.matmul, in_axes=in_axes)(x, y)
def f_sparse(_, __, x, y):
return jax.vmap(operator.matmul, in_axes=in_axes)(x, y)
args = args_maker()
result_dense = f_dense(*args)
result_sparse = f_sparse(*args)
self.assertAllClose(result_dense, result_sparse.todense())
result_sparse_jit = jax.jit(f_sparse)(*args)
self.assertAllClose(result_dense, result_sparse_jit.todense())
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_dedupe(self, shape, dtype, n_batch, n_dense):
rng = self.rng()
rng_sparse = rand_sparse(self.rng())
M = sparse.BCOO.fromdense(rng_sparse(shape, dtype))
for i, s in enumerate(shape[n_batch:len(shape) - n_dense]):
M.indices = M.indices.at[..., i, :].set(rng.randint(0, s, size=M.indices.shape[-1]))
M_dedup = M._dedupe()
self.assertAllClose(M.todense(), M_dedup.todense())
def test_bcoo_dedupe_padding(self):
# Regression test for https://github.com/google/jax/issues/8163
size = 3
data = jnp.array([1, 0, 0])
indices = jnp.array([1, size, size])[:, None]
x = sparse.BCOO((data, indices), shape=(3,))
y = x._dedupe()
self.assertArraysEqual(x.todense(), y.todense())
self.assertArraysEqual(x.indices, y.indices)
self.assertArraysEqual(x.data, y.data)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}_axes={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense, axes),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense, "axes": axes}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)
for naxes in range(len(shape))
for axes in itertools.combinations(range(len(shape)), naxes)))
def test_bcoo_reduce_sum(self, shape, dtype, n_batch, n_dense, axes):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
data, indices = sparse.bcoo_fromdense(M, n_batch=n_batch, n_dense=n_dense)
data_out, indices_out, shape_out = sparse.bcoo_reduce_sum(data, indices, shape=shape, axes=axes)
result_dense = M.sum(axes)
result_sparse = sparse.bcoo_todense(data_out, indices_out, shape=shape_out)
tol = {np.float32: 1E-6, np.float64: 1E-14}
self.assertAllClose(result_dense, result_sparse, atol=tol, rtol=tol)
@unittest.skipIf(jtu.device_under_test() == "tpu", "TPU has insufficient precision")
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype)),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
}
for lhs_shape, rhs_shape in [[(3,), (3,)],
[(3, 4), (4,)],
[(4,), (4, 5)],
[(3, 4), (4, 5)]]
for lhs_dtype in all_dtypes
for rhs_dtype in all_dtypes))
def test_bcoo_matmul(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype):
rng = jtu.rand_default(self.rng())
lhs = jnp.array(rng(lhs_shape, lhs_dtype))
rhs = jnp.array(rng(rhs_shape, rhs_dtype))
out1 = lhs @ rhs
out2 = sparse.BCOO.fromdense(lhs) @ rhs
out3 = lhs @ sparse.BCOO.fromdense(rhs)
tol = {np.float64: 1E-13, np.complex128: 1E-13,
np.float32: 1E-6, np.complex64: 1E-6}
self.assertAllClose(out1, out2, rtol=tol)
self.assertAllClose(out1, out3, rtol=tol)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_{}_n_batch={}_n_dense={}".format(
jtu.format_shape_dtype_string(lhs_shape, lhs_dtype),
jtu.format_shape_dtype_string(rhs_shape, rhs_dtype),
n_batch, n_dense),
"lhs_shape": lhs_shape, "lhs_dtype": lhs_dtype,
"rhs_shape": rhs_shape, "rhs_dtype": rhs_dtype,
"n_batch": n_batch, "n_dense": n_dense,
}
for lhs_shape, rhs_shape in [[(3,), ()], [(3,), (1,)], [(3,), (3,)],
[(3, 4), ()], [(3, 4), (4,)], [(3, 4), (3, 1)], [(3, 4), (3, 4)],
[(3, 4, 5), (4, 5)], [(3, 4, 5), (3, 1, 1)], [(3, 4, 5), (1, 4, 1)]]
for n_batch in range(len(lhs_shape) + 1)
for n_dense in range(len(lhs_shape) + 1 - n_batch)
for lhs_dtype in all_dtypes
for rhs_dtype in all_dtypes))
def test_bcoo_mul_dense(self, lhs_shape, lhs_dtype, rhs_shape, rhs_dtype, n_batch, n_dense):
rng_lhs = rand_sparse(self.rng())
rng_rhs = jtu.rand_default(self.rng())
lhs = jnp.array(rng_lhs(lhs_shape, lhs_dtype))
rhs = jnp.array(rng_rhs(rhs_shape, rhs_dtype))
sp = lambda x: sparse.BCOO.fromdense(x, n_batch=n_batch, n_dense=n_dense)
out1 = lhs * rhs
out2 = (sp(lhs) * rhs).todense()
out3 = (rhs * sp(lhs)).todense()
tol = {np.float64: 1E-13, np.complex128: 1E-13,
np.float32: 1E-6, np.complex64: 1E-6}
self.assertAllClose(out1, out2, rtol=tol)
self.assertAllClose(out1, out3, rtol=tol)
def test_bcoo_vmap_shape(self, shape=(2, 3, 4, 5), dtype=np.float32):
# This test checks that BCOO shape metadata interacts correctly with vmap.
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
def make_bcoo(M):
return sparse.BCOO.fromdense(M, nse=np.prod(M.shape[:-1], dtype=int), n_dense=1)
for _ in range(3):
make_bcoo = jax.vmap(make_bcoo)
Msp = make_bcoo(M)
self.assertEqual(Msp.shape, M.shape)
self.assertArraysEqual(Msp.todense(), M)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_bcoo_unbatch(self, shape, dtype, n_batch, n_dense):
rng_sparse = rand_sparse(self.rng())
M1 = sparse.BCOO.fromdense(rng_sparse(shape, dtype), n_batch=n_batch, n_dense=n_dense)
M2 = M1._unbatch()
self.assertEqual(M2.n_batch, 0)
self.assertEqual(M1.n_dense, M2.n_dense)
self.assertEqual(M1.shape, M2.shape)
self.assertEqual(M1.dtype, M2.dtype)
self.assertArraysEqual(M1.todense(), M2.todense())
def test_bcoo_bad_fillvals(self):
# Extra values have 100 rather than zero. This lets us check that logic is
# properly ignoring these indices.
data = jnp.array([1, 2, 3, 100, 100])
indices = jnp.array([1, 2, 3, 5, 5])[:, None]
x_sp = sparse.BCOO((data, indices), shape=(5,))
x_de = x_sp.todense()
data = jnp.array([3, 2, 100, 100])
indices = jnp.array([2, 3, 5, 5])[:, None]
y_sp = sparse.BCOO((data, indices), shape=(5,))
y_de = y_sp.todense()
self.assertArraysEqual(x_de, jnp.array([0, 1, 2, 3, 0]))
self.assertArraysEqual(y_de, jnp.array([0, 0, 3, 2, 0]))
self.assertArraysEqual(x_sp._dedupe().todense(), x_de)
self.assertArraysEqual(y_sp._dedupe().todense(), y_de)
# reduce_sum:
self.assertArraysEqual(x_sp.sum(), x_de.sum())
# bcoo_dot_general
self.assertArraysEqual(x_sp @ y_de, x_de @ y_de)
# bcoo_spdot_general
self.assertArraysEqual((x_sp @ y_sp).todense(), x_de @ y_de)
self.assertArraysEqual((y_sp @ x_sp).todense(), y_de @ x_de)
class SparseGradTest(jtu.JaxTestCase):
def test_sparse_grad(self):
rng_sparse = rand_sparse(self.rng())
rng = jtu.rand_default(self.rng())
y = rng(5, "float32")
X = rng_sparse((10, 5), "float32")
Xsp = sparse.BCOO.fromdense(X)
def f(X, y):
return jnp.sum(X @ y)
grad_dense = jax.grad(f, argnums=0)(X, y)
grad_sparse = sparse.grad(f, argnums=0)(Xsp, y)
# extract sparse gradient from dense gradient
indices = tuple(Xsp.indices.T)
grad_sparse_from_dense = jnp.zeros_like(grad_dense).at[indices].set(grad_dense[indices])
self.assertArraysEqual(grad_sparse.todense(), grad_sparse_from_dense)
class SparseObjectTest(jtu.JaxTestCase):
def test_repr(self):
M = sparse.BCOO.fromdense(jnp.arange(5, dtype='float32'))
assert repr(M) == "BCOO(float32[5], nse=4)"
M_invalid = sparse.BCOO(([], []), shape=100)
assert repr(M_invalid) == "BCOO(<invalid>)"
@parameterized.named_parameters(
{"testcase_name": "_{}".format(Obj.__name__), "Obj": Obj}
for Obj in [sparse.CSR, sparse.CSC, sparse.COO, sparse.BCOO])
def test_block_until_ready(self, Obj, shape=(5, 8), dtype=np.float32):
rng = rand_sparse(self.rng(), post=Obj.fromdense)
M = rng(shape, dtype)
self.assertEqual(M.shape, M.block_until_ready().shape)
self.assertArraysEqual(M.data, M.block_until_ready().data)
self.assertArraysEqual(M.todense(), M.block_until_ready().todense())
@parameterized.named_parameters(
{"testcase_name": "_{}".format(Obj.__name__), "Obj": Obj}
for Obj in [jnp.array, sparse.CSR, sparse.CSC, sparse.COO, sparse.BCOO])
def test_todense(self, Obj, shape=(5, 8), dtype=np.float32):
rng = rand_sparse(self.rng())
M_dense = rng(shape, dtype)
M = jnp.array(M_dense) if Obj is jnp.array else Obj.fromdense(M_dense)
self.assertArraysEqual(sparse.todense(M), M_dense)
self.assertArraysEqual(jit(sparse.todense)(M), M_dense)
@parameterized.named_parameters(
{"testcase_name": "_{}".format(Obj.__name__), "Obj": Obj}
for Obj in [jnp.array, sparse.BCOO])
def test_todense_batching(self, Obj, shape=(5, 8), dtype=np.float32):
rng = rand_sparse(self.rng())
M_dense = rng(shape, dtype)
if Obj is sparse.BCOO:
M = sparse.BCOO.fromdense(M_dense, n_batch=1)
else:
M = jnp.asarray(M_dense)
self.assertArraysEqual(vmap(sparse.todense)(M), M_dense)
self.assertArraysEqual(jit(vmap(sparse.todense))(M), M_dense)
@parameterized.named_parameters(
{"testcase_name": "_{}".format(Obj.__name__), "Obj": Obj}
for Obj in [jnp.array, sparse.BCOO])
def test_todense_ad(self, Obj, shape=(3,), dtype=np.float32):
M_dense = jnp.array([1., 2., 3.])
M = M_dense if Obj is jnp.array else Obj.fromdense(M_dense)
print(M_dense)
print(M)
bufs, tree = tree_util.tree_flatten(M)
jac = jnp.eye(M.shape[0], dtype=M.dtype)
jac1 = jax.jacfwd(lambda *bufs: sparse.todense_p.bind(*bufs, tree=tree))(*bufs)
jac2 = jax.jacrev(lambda *bufs: sparse.todense_p.bind(*bufs, tree=tree))(*bufs)
self.assertArraysEqual(jac1, jac2)
self.assertArraysEqual(jac, jac2)
@parameterized.named_parameters(
{"testcase_name": "_{}".format(Obj.__name__), "Obj": Obj}
for Obj in [sparse.CSR, sparse.CSC, sparse.COO, sparse.BCOO])
def test_attrs(self, Obj, shape=(5, 8), dtype=np.float16):
rng = rand_sparse(self.rng(), post=Obj.fromdense)
M = rng(shape, dtype)
assert isinstance(M, Obj)
assert M.shape == shape
assert M.dtype == dtype
assert M.nse == (M.todense() != 0).sum()
assert M.data.dtype == dtype
if isinstance(M, sparse.CSR):
assert len(M.data) == len(M.indices)
assert len(M.indptr) == M.shape[0] + 1
elif isinstance(M, sparse.CSC):
assert len(M.data) == len(M.indices)
assert len(M.indptr) == M.shape[1] + 1
elif isinstance(M, sparse.COO):
assert len(M.data) == len(M.row) == len(M.col)
elif isinstance(M, sparse.BCOO):
assert M.data.shape[M.n_batch] == M.indices.shape[-2]
assert M.indices.shape[-1] == M.n_sparse
else:
raise ValueError("Obj={Obj} not expected.")
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": "_{}_Obj={}".format(
jtu.format_shape_dtype_string(shape, dtype), Obj.__name__),
"shape": shape, "dtype": dtype, "Obj": Obj}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex)
for Obj in [sparse.CSR, sparse.CSC, sparse.COO, sparse.BCOO]))
def test_dense_round_trip(self, shape, dtype, Obj):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
Msparse = Obj.fromdense(M)
self.assertArraysEqual(M, Msparse.todense())
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": "_{}_Obj={}".format(
jtu.format_shape_dtype_string(shape, dtype), Obj.__name__),
"shape": shape, "dtype": dtype, "Obj": Obj}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex)
for Obj in [sparse.CSR, sparse.CSC, sparse.COO, sparse.BCOO]))
def test_transpose(self, shape, dtype, Obj):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
Msparse = Obj.fromdense(M)
self.assertArraysEqual(M.T, Msparse.T.todense())
@unittest.skipIf(jtu.device_under_test() == "tpu", "TPU has insufficient precision")
@parameterized.named_parameters(itertools.chain.from_iterable(
jtu.cases_from_list(
{"testcase_name": "_{}_Obj={}_bshape={}".format(
jtu.format_shape_dtype_string(shape, dtype), Obj.__name__, bshape),
"shape": shape, "dtype": dtype, "Obj": Obj, "bshape": bshape}
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for bshape in [shape[-1:] + s for s in [(), (3,), (4,)]]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex)
for Obj in [sparse.CSR, sparse.CSC, sparse.COO, sparse.BCOO]))
def test_matmul(self, shape, dtype, Obj, bshape):
rng = rand_sparse(self.rng(), post=jnp.array)
rng_b = jtu.rand_default(self.rng())
M = rng(shape, dtype)
Msp = Obj.fromdense(M)
x = rng_b(bshape, dtype)
x = jnp.asarray(x)
self.assertAllClose(M @ x, Msp @ x, rtol=MATMUL_TOL)
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}({})".format(
input_type.__name__,
jtu.format_shape_dtype_string(shape, dtype)),
"input_type": input_type, "shape": shape, "dtype": dtype}
for input_type in [scipy.sparse.coo_matrix, scipy.sparse.csr_matrix, scipy.sparse.csc_matrix]
for shape in [(5, 8), (8, 5), (5, 5), (8, 8)]
for dtype in jtu.dtypes.floating + jtu.dtypes.complex))
def test_bcoo_from_scipy_sparse(self, input_type, shape, dtype):
rng = rand_sparse(self.rng())
M = rng(shape, dtype)
M_sparse = input_type(M)
M_bcoo = sparse.BCOO.from_scipy_sparse(M_sparse)
self.assertArraysEqual(M, M_bcoo.todense())
def test_bcoo_methods(self):
M = jnp.arange(12).reshape(3, 4)
Msp = sparse.BCOO.fromdense(M)
self.assertArraysEqual(-M, (-Msp).todense())
self.assertArraysEqual(2 * M, (2 * Msp).todense())
self.assertArraysEqual(M * 2, (Msp * 2).todense())
self.assertArraysEqual(M + M, (Msp + Msp).todense())
self.assertArraysEqual(M.sum(0), Msp.sum(0).todense())
self.assertArraysEqual(M.sum(1), Msp.sum(1).todense())
self.assertArraysEqual(M.sum(), Msp.sum())
class SparseRandomTest(jtu.JaxTestCase):
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_{}_nbatch={}_ndense={}".format(
jtu.format_shape_dtype_string(shape, dtype), n_batch, n_dense),
"shape": shape, "dtype": dtype, "n_batch": n_batch, "n_dense": n_dense}
for shape in [(5,), (5, 8), (8, 5), (3, 4, 5), (3, 4, 3, 2)]
for dtype in jtu.dtypes.floating
for n_batch in range(len(shape) + 1)
for n_dense in range(len(shape) + 1 - n_batch)))
def test_random_bcoo(self, shape, dtype, n_batch, n_dense):
key = random.PRNGKey(1701)
mat = sparse.random_bcoo(key, shape=shape, dtype=dtype, n_batch=n_batch, n_dense=n_dense)
mat_dense = mat.todense()
self.assertEqual(mat_dense.shape, shape)
self.assertEqual(mat_dense.dtype, dtype)
n_sparse = len(shape) - n_batch - n_dense
batch_shape, sparse_shape, dense_shape = split_list(shape, [n_batch, n_sparse])
approx_expected_num_nonzero = (
np.ceil(0.2 * np.prod(sparse_shape))
* np.prod(batch_shape) * np.prod(dense_shape))
num_nonzero = (mat_dense != 0).sum()
self.assertAlmostEqual(num_nonzero, approx_expected_num_nonzero, delta=2)
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader())
| 44.732777 | 143 | 0.632442 |
795588712cd699d000cc16948e8eb15e79c75654 | 5,059 | py | Python | feedback/migrations/0001_initial.py | uktrade/feed-gov-back | c2b9534ac39eb727cafb06eeb7528a1ad622512a | [
"MIT"
] | 1 | 2019-01-08T23:01:25.000Z | 2019-01-08T23:01:25.000Z | feedback/migrations/0001_initial.py | uktrade/feed-gov-back | c2b9534ac39eb727cafb06eeb7528a1ad622512a | [
"MIT"
] | null | null | null | feedback/migrations/0001_initial.py | uktrade/feed-gov-back | c2b9534ac39eb727cafb06eeb7528a1ad622512a | [
"MIT"
] | null | null | null | # Generated by Django 2.1.3 on 2019-01-01 13:50
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ElementType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('key', models.CharField(max_length=30)),
('options', django.contrib.postgres.fields.jsonb.JSONField(default=dict)),
],
),
migrations.CreateModel(
name='FeedbackCollection',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='FeedbackData',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('value', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('collection', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='feedback.FeedbackCollection')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='FeedbackForm',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=250)),
('key', models.CharField(blank=True, max_length=32, null=True, unique=True)),
('description', models.TextField(blank=True, null=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='FormElement',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=250)),
('label', models.CharField(blank=True, max_length=1000, null=True)),
('description', models.TextField(blank=True, null=True)),
('options', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict)),
('order', models.SmallIntegerField(default=0)),
('element_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='feedback.ElementType')),
('form', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='feedback.FeedbackForm')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Placement',
fields=[
('created_at', models.DateTimeField(auto_now_add=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('id', models.CharField(max_length=64, primary_key=True, serialize=False)),
('name', models.CharField(blank=True, max_length=250, null=True)),
('url', models.URLField(blank=True, null=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='feedbackdata',
name='element',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='feedback.FormElement'),
),
migrations.AddField(
model_name='feedbackcollection',
name='form',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='feedback.FeedbackForm'),
),
migrations.AddField(
model_name='feedbackcollection',
name='placement',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='feedback.Placement'),
),
]
| 44.377193 | 147 | 0.578375 |
795589976c64ff2a8b509e91978146f8641d4dba | 751 | py | Python | transmit.py | simondlevy/EMS22A50 | a7954a84ad67507a9f8de831fe45d7e7de779c41 | [
"MIT"
] | null | null | null | transmit.py | simondlevy/EMS22A50 | a7954a84ad67507a9f8de831fe45d7e7de779c41 | [
"MIT"
] | null | null | null | transmit.py | simondlevy/EMS22A50 | a7954a84ad67507a9f8de831fe45d7e7de779c41 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
'''
Reads data from EMS22A50 and sends it over a serial port as ASCII
Uses '*' character as delimiter
Copyright (c) 2021 Simon D. Levy
MIT License
'''
import time
import serial
import RPi.GPIO as GPIO
from ems22a50 import EMS22A50
def main():
# -------------- CLK DAT CS
encoder = EMS22A50(2, 3, 4)
encoder.start()
time.sleep(0.5)
print('GPIO configuration enabled')
port = serial.Serial('/dev/ttyAMA0', 115200)
while True:
try:
msg = str(encoder.readpos()) + '*'
port.write(msg.encode('utf-8'))
time.sleep(0.001)
except KeyboardInterrupt:
break
GPIO.cleanup()
port.close()
if __name__ == '__main__':
main()
| 16.326087 | 65 | 0.596538 |
79558a9d377f550141e0e725b5ea235c272b633f | 502 | py | Python | src/domain/flight/entity.py | Monster-Gem/f-ticket | bfee2bcc3db92e2350c0f3bbc32108a79350583e | [
"MIT"
] | 1 | 2022-03-23T13:35:38.000Z | 2022-03-23T13:35:38.000Z | src/domain/flight/entity.py | Monster-Gem/f-ticket | bfee2bcc3db92e2350c0f3bbc32108a79350583e | [
"MIT"
] | null | null | null | src/domain/flight/entity.py | Monster-Gem/f-ticket | bfee2bcc3db92e2350c0f3bbc32108a79350583e | [
"MIT"
] | null | null | null | from enum import unique
from domain.maintenance.repository import database
import uuid
from domain.route.entity import Route
class Flight(database.Document):
public_id = database.StringField(max_length=50, unique = True, default=lambda: str(uuid.uuid4()))
price = database.DecimalField(required=True, decimal_places=2)
max_capacity = database.IntField(required=True)
route = database.ReferenceField(Route)
departure_time = database.DateTimeField(required=True, unique_with='route') | 45.636364 | 101 | 0.788845 |
79558b2924a6af68c17b28927502d5b49882d2e7 | 686 | py | Python | app/core/migrations/0003_ingradient.py | userSoni/recipe-app-api | b9cc66ccd69c91d4698aec957b0b4e3523dd8e8d | [
"MIT"
] | null | null | null | app/core/migrations/0003_ingradient.py | userSoni/recipe-app-api | b9cc66ccd69c91d4698aec957b0b4e3523dd8e8d | [
"MIT"
] | null | null | null | app/core/migrations/0003_ingradient.py | userSoni/recipe-app-api | b9cc66ccd69c91d4698aec957b0b4e3523dd8e8d | [
"MIT"
] | null | null | null | # Generated by Django 2.1.15 on 2021-08-31 08:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0002_tag'),
]
operations = [
migrations.CreateModel(
name='Ingradient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 28.583333 | 118 | 0.618076 |
79558b9374c73c01d7aac020121dcdf607b76eeb | 1,377 | py | Python | baidu_code/bcoreapi/ticket/settings.py | deevarvar/myLab | 7a5019f5f7fc11e173d350e6e2a7d2c80504782d | [
"MIT"
] | null | null | null | baidu_code/bcoreapi/ticket/settings.py | deevarvar/myLab | 7a5019f5f7fc11e173d350e6e2a7d2c80504782d | [
"MIT"
] | null | null | null | baidu_code/bcoreapi/ticket/settings.py | deevarvar/myLab | 7a5019f5f7fc11e173d350e6e2a7d2c80504782d | [
"MIT"
] | 3 | 2016-10-08T15:01:49.000Z | 2018-05-24T03:14:24.000Z | ODP_MOVIE_PATH = '/home/map/odp_movie'
OLD_PARTNER_SUPPORTED = ['cmts','dingxin','jinyi','maizuo','txpc','vista','newvista','wangpiao','cfc']
# new api ignoring : komovie,lanhai
NEW_PARTNER_SUPPORTED = ['cmts','dingxin','fire','jinyi','shiguang','maizuo','spider','txpc','vista','wangpiao','xingmei','cfc']
SERVER = {
# pool04
'HOST' : '10.94.34.26',
'PORT' : '8204'
# online
#'HOST' : 'yf-orp-app0003.yf01',
#'PORT' : '8240'
# yangyu's odp_movie
#'HOST' : '10.94.22.20',
#'PORT' : '8111'
}
ODP_PAL_SERVER = {
# pool04
'HOST' : '10.94.34.26',
'PORT' : '8888'
# yangyu's odp_pal
#'HOST' : 'cp01-ocean-pool002.cp01.baidu.com',
#'PORT' : '8888'
}
REDIS_CONF = {
'HOST' : '10.94.34.26',
'PORT' : 6379,
'DB' : 0,
'SUC_NO' : 'sec_no',
'FAIL_NO' : 'fail_no'
}
MYSQL = {
# pool03
'HOST' : '10.94.34.26',
'PORT' : 3305,
'DB' : 'instant_info',
# yangyu's sql
#'HOST' : '10.58.102.38',
#'PORT' : 3306,
#'DB' : 'instant_info',
'USER' : 'root',
'PASSWD' : 'root'
}
#SIGN_KEY = 'seat'
SIGN_KEY = 'b_movie_core_api'
#SIGN_KEY = 'B_MOVIE_CORE_API'
ONLINE_SIGN_KEY = 'b_movie_core_api'
PAGE_SIGN_KEY = 'baidu'
#PAY_NOTICE_SIGN_KEY = '8343e400f6b0ff2fc13337f7f2cf33ff'
PAY_NOTICE_SIGN_KEY = '123456'
| 22.57377 | 129 | 0.554829 |
79558cb7fc4ba65abb055b3f84638afec228f04b | 1,506 | py | Python | atlantis/front/views.py | jcabdala/atlantisapp | 4cf926048f0f5b68afc84b763ea1db532f66ab4e | [
"BSD-3-Clause"
] | null | null | null | atlantis/front/views.py | jcabdala/atlantisapp | 4cf926048f0f5b68afc84b763ea1db532f66ab4e | [
"BSD-3-Clause"
] | null | null | null | atlantis/front/views.py | jcabdala/atlantisapp | 4cf926048f0f5b68afc84b763ea1db532f66ab4e | [
"BSD-3-Clause"
] | null | null | null | from django.shortcuts import render_to_response
from django.template import RequestContext
from django.forms import ModelForm
from front.models import Client
import requests
import json
# Create your views here.
def index(request, *args, **kwargs):
form = ClientForm()
return render_to_response("index.html", locals(),
context_instance=RequestContext(request))
def calcular(request, *args, **kwargs):
calc = ClientForm(request.POST)
if calc.is_valid():
lat = calc.cleaned_data['lat']
lon = calc.cleaned_data['lon']
name = calc.cleaned_data['name']
email = calc.cleaned_data['email']
print lat
print lon
url = "http://172.18.7.119:8000/atlantis/default/agua.json?"
data = url + "latitud=" + str(lat) + "&longitud=" + str(lon)
r = requests.get(data)
dic = r.json()
print dic
zone = ""
status = ""
print dic["rows"][0]
if dic["rows"][0]:
status = "danger"
zone = "Ha sufrido una inundacion "
else:
zone = "No a sufrido una inundacion "
status = "success"
return render_to_response("report.html", locals(),
context_instance=RequestContext(request))
class ClientForm(ModelForm):
class Meta:
model = Client
fields = ["name", "lat", "lon", "email"]
| 30.734694 | 72 | 0.557105 |
79558cb8820abab0f14c235fa9891e092b15dc00 | 1,472 | py | Python | tests/test_registries.py | d3rky/django-cqrs | 16c7d80409b44264141240b0c8a97d5838555039 | [
"Apache-2.0"
] | null | null | null | tests/test_registries.py | d3rky/django-cqrs | 16c7d80409b44264141240b0c8a97d5838555039 | [
"Apache-2.0"
] | null | null | null | tests/test_registries.py | d3rky/django-cqrs | 16c7d80409b44264141240b0c8a97d5838555039 | [
"Apache-2.0"
] | null | null | null | # Copyright © 2020 Ingram Micro Inc. All rights reserved.
import pytest
from dj_cqrs.registries import MasterRegistry, ReplicaRegistry
from tests.dj_master import models as master_models
from tests.dj_replica import models as replica_models
@pytest.mark.parametrize('registry', [MasterRegistry, ReplicaRegistry])
def test_duplicate_cqrs_id(registry):
class Cls(object):
CQRS_ID = 'basic'
with pytest.raises(AssertionError) as e:
registry.register_model(Cls)
assert str(e.value) == "Two models can't have the same CQRS_ID: basic."
@pytest.mark.parametrize('model,registry', (
(master_models.SimplestModel, MasterRegistry),
(master_models.AutoFieldsModel, MasterRegistry),
(replica_models.BasicFieldsModelRef, ReplicaRegistry),
(replica_models.BadTypeModelRef, ReplicaRegistry),
))
def test_models_are_registered(model, registry):
assert registry.models[model.CQRS_ID] == model
assert registry.get_model_by_cqrs_id(model.CQRS_ID) == model
def test_get_model_by_cqrs_id_no_id(caplog):
assert ReplicaRegistry.get_model_by_cqrs_id('invalid') is None
assert 'No model with such CQRS_ID: invalid.' in caplog.text
def test_no_cqrs_queue(settings):
settings.CQRS.update({'queue': None})
with pytest.raises(AssertionError) as e:
ReplicaRegistry.register_model(replica_models.MappedFieldsModelRef)
assert str(e.value) == 'CQRS queue must be set for the service, that has replica models.'
| 33.454545 | 93 | 0.765625 |
79558ceb4ef200642a091b96f07a6c40b417ce38 | 1,494 | py | Python | fangyan_tones/utils/pinyin_utils.py | wlans4/fangyan_tones | bb108ecf771cdad9acd8a9197e2babaf37c681e1 | [
"Apache-2.0"
] | null | null | null | fangyan_tones/utils/pinyin_utils.py | wlans4/fangyan_tones | bb108ecf771cdad9acd8a9197e2babaf37c681e1 | [
"Apache-2.0"
] | null | null | null | fangyan_tones/utils/pinyin_utils.py | wlans4/fangyan_tones | bb108ecf771cdad9acd8a9197e2babaf37c681e1 | [
"Apache-2.0"
] | null | null | null | from pypinyin import pinyin as to_pinyin
from pypinyin import Style as style
tone_styles = [style.TONE, style.TONE2, style.TONE3]
def char_to_pinyin(char: str, tone_style: int) -> str:
"""Converts a single character to pinyin
# TODO support heteronyms?
Parameters
----------
char : String
A single chinese character
tone_style : int
an integeter representing the tone style to use. 0 is "zhong", 1 is "zhōng", 2 is "zho1ng"
Returns
-------
String
The pinyin representing the single chinese character
"""
# Is created as a list of lists, so return as just string
return to_pinyin(char, style=tone_styles[tone_style], heteronyms=False)[0][0]
def chars_to_pinyin(chars: str, tone_style: int, as_list=True) -> [str]:
"""Converts a series characters in a single str into a list of pinyin representing those characters
Parameters
----------
chars : str
A string representing a series of characters
tone_style : int
The tone style to use in the pinyin
as_list : bool, optional
If the result should be returned as a list , or as a space separated string
Returns
-------
[str]
[description]
"""
chars_list = to_pinyin(chars, style=tone_styles[tone_style])
chars_list = [char[0] for char in chars_list if char[0] != " "]
if as_list:
return chars_list
# Return as space separated sentence
return " ".join(chars_list)
| 30.489796 | 103 | 0.655288 |
79558e180765eaaea1096fa02811e0f64d72a408 | 1,487 | py | Python | src/client_libraries/python/dynamics/customerinsights/api/models/odata_inner_error.py | microsoft/Dynamics365-CustomerInsights-Client-Libraries | e00632f7972717b03e0fb1a9e2667e8f9444a0fe | [
"MIT"
] | null | null | null | src/client_libraries/python/dynamics/customerinsights/api/models/odata_inner_error.py | microsoft/Dynamics365-CustomerInsights-Client-Libraries | e00632f7972717b03e0fb1a9e2667e8f9444a0fe | [
"MIT"
] | null | null | null | src/client_libraries/python/dynamics/customerinsights/api/models/odata_inner_error.py | microsoft/Dynamics365-CustomerInsights-Client-Libraries | e00632f7972717b03e0fb1a9e2667e8f9444a0fe | [
"MIT"
] | 7 | 2021-02-11T19:48:57.000Z | 2021-12-17T08:00:15.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ODataInnerError(Model):
"""ODataInnerError.
:param properties:
:type properties: dict[str,
~dynamics.customerinsights.api.models.ODataValue]
:param message:
:type message: str
:param type_name:
:type type_name: str
:param stack_trace:
:type stack_trace: str
:param inner_error:
:type inner_error: ~dynamics.customerinsights.api.models.ODataInnerError
"""
_attribute_map = {
'properties': {'key': 'properties', 'type': '{ODataValue}'},
'message': {'key': 'message', 'type': 'str'},
'type_name': {'key': 'typeName', 'type': 'str'},
'stack_trace': {'key': 'stackTrace', 'type': 'str'},
'inner_error': {'key': 'innerError', 'type': 'ODataInnerError'},
}
def __init__(self, **kwargs):
super(ODataInnerError, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
self.message = kwargs.get('message', None)
self.type_name = kwargs.get('type_name', None)
self.stack_trace = kwargs.get('stack_trace', None)
self.inner_error = kwargs.get('inner_error', None)
| 35.404762 | 76 | 0.579018 |
79558e8877be7ecaebc345eb4ff80b259bbc1f5a | 5,119 | py | Python | bsp/stm32/stm32l053-st-nucleo/rtconfig.py | rockonedege/rt-thread | 4fe6c709d0bfe719bed6c927f0144ba373bbda5a | [
"Apache-2.0"
] | 7,482 | 2015-01-01T09:23:08.000Z | 2022-03-31T19:34:05.000Z | bsp/stm32/stm32l053-st-nucleo/rtconfig.py | rockonedege/rt-thread | 4fe6c709d0bfe719bed6c927f0144ba373bbda5a | [
"Apache-2.0"
] | 2,543 | 2015-01-09T02:01:34.000Z | 2022-03-31T23:10:14.000Z | bsp/stm32/stm32l053-st-nucleo/rtconfig.py | rockonedege/rt-thread | 4fe6c709d0bfe719bed6c927f0144ba373bbda5a | [
"Apache-2.0"
] | 4,645 | 2015-01-06T07:05:31.000Z | 2022-03-31T18:21:50.000Z | import os
# toolchains options
ARCH='arm'
CPU='cortex-m0'
CROSS_TOOL='gcc'
# bsp lib config
BSP_LIBRARY_TYPE = None
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
if os.getenv('RTT_ROOT'):
RTT_ROOT = os.getenv('RTT_ROOT')
# cross_tool provides the cross compiler
# EXEC_PATH is the compiler execute path, for example, CodeSourcery, Keil MDK, IAR
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = r'C:\Users\XXYYZZ'
elif CROSS_TOOL == 'keil':
PLATFORM = 'armcc'
EXEC_PATH = r'C:/Keil_v5'
elif CROSS_TOOL == 'iar':
PLATFORM = 'iar'
EXEC_PATH = r'C:/Program Files (x86)/IAR Systems/Embedded Workbench 8.0'
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
CXX = PREFIX + 'g++'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m0plus -mthumb -ffunction-sections -fdata-sections'
CFLAGS = DEVICE + ' -Dgcc'
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp -Wa,-mimplicit-it=thumb '
LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rt-thread.map,-cref,-u,Reset_Handler -T board/linker_scripts/link.lds'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2 -g'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
CXXFLAGS = CFLAGS
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
elif PLATFORM == 'armcc':
# toolchains
CC = 'armcc'
CXX = 'armcc'
AS = 'armasm'
AR = 'armar'
LINK = 'armlink'
TARGET_EXT = 'axf'
DEVICE = ' --cpu Cortex-M0 '
CFLAGS = '-c ' + DEVICE + ' --apcs=interwork --c99'
AFLAGS = DEVICE + ' --apcs=interwork '
LFLAGS = DEVICE + ' --scatter "board\linker_scripts\link.sct" --info sizes --info totals --info unused --info veneers --list rt-thread.map --strict'
CFLAGS += ' -I' + EXEC_PATH + '/ARM/ARMCC/include'
LFLAGS += ' --libpath=' + EXEC_PATH + '/ARM/ARMCC/lib'
CFLAGS += ' -D__MICROLIB '
AFLAGS += ' --pd "__MICROLIB SETA 1" '
LFLAGS += ' --library_type=microlib '
EXEC_PATH += '/ARM/ARMCC/bin/'
if BUILD == 'debug':
CFLAGS += ' -g -O0'
AFLAGS += ' -g'
else:
CFLAGS += ' -O2'
CXXFLAGS = CFLAGS
CFLAGS += ' -std=c99'
POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET'
elif PLATFORM == 'armclang':
# toolchains
CC = 'armclang'
CXX = 'armclang'
AS = 'armasm'
AR = 'armar'
LINK = 'armlink'
TARGET_EXT = 'axf'
DEVICE = ' --cpu Cortex-M0 '
CFLAGS = ' --target=arm-arm-none-eabi -mcpu=cortex-m0 '
CFLAGS += ' -mcpu=cortex-m0 '
CFLAGS += ' -c -fno-rtti -funsigned-char -fshort-enums -fshort-wchar '
CFLAGS += ' -gdwarf-3 -ffunction-sections '
AFLAGS = DEVICE + ' --apcs=interwork '
LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers '
LFLAGS += ' --list rt-thread.map '
LFLAGS += r' --strict --scatter "board\linker_scripts\link.sct" '
CFLAGS += ' -I' + EXEC_PATH + '/ARM/ARMCLANG/include'
LFLAGS += ' --libpath=' + EXEC_PATH + '/ARM/ARMCLANG/lib'
EXEC_PATH += '/ARM/ARMCLANG/bin/'
if BUILD == 'debug':
CFLAGS += ' -g -O1' # armclang recommend
AFLAGS += ' -g'
else:
CFLAGS += ' -O2'
CXXFLAGS = CFLAGS
CFLAGS += ' -std=c99'
POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET'
elif PLATFORM == 'iar':
# toolchains
CC = 'iccarm'
CXX = 'iccarm'
AS = 'iasmarm'
AR = 'iarchive'
LINK = 'ilinkarm'
TARGET_EXT = 'out'
DEVICE = '-Dewarm'
CFLAGS = DEVICE
CFLAGS += ' --diag_suppress Pa050'
CFLAGS += ' --no_cse'
CFLAGS += ' --no_unroll'
CFLAGS += ' --no_inline'
CFLAGS += ' --no_code_motion'
CFLAGS += ' --no_tbaa'
CFLAGS += ' --no_clustering'
CFLAGS += ' --no_scheduling'
CFLAGS += ' --endian=little'
CFLAGS += ' --cpu=Cortex-M0'
CFLAGS += ' -e'
CFLAGS += ' --fpu=None'
CFLAGS += ' --dlib_config "' + EXEC_PATH + '/arm/INC/c/DLib_Config_Normal.h"'
CFLAGS += ' --silent'
AFLAGS = DEVICE
AFLAGS += ' -s+'
AFLAGS += ' -w+'
AFLAGS += ' -r'
AFLAGS += ' --cpu Cortex-M0'
AFLAGS += ' --fpu None'
AFLAGS += ' -S'
if BUILD == 'debug':
CFLAGS += ' --debug'
CFLAGS += ' -On'
else:
CFLAGS += ' -Oh'
LFLAGS = ' --config "board/linker_scripts/link.icf"'
LFLAGS += ' --entry __iar_program_start'
CXXFLAGS = CFLAGS
EXEC_PATH = EXEC_PATH + '/arm/bin/'
POST_ACTION = 'ielftool --bin $TARGET rtthread.bin'
def dist_handle(BSP_ROOT, dist_dir):
import sys
cwd_path = os.getcwd()
sys.path.append(os.path.join(os.path.dirname(BSP_ROOT), 'tools'))
from sdk_dist import dist_do_building
dist_do_building(BSP_ROOT, dist_dir)
| 27.67027 | 152 | 0.577457 |
795592bee4efa846ae1a010dbe6a48bac3f75d47 | 544 | py | Python | rmp_nav/simulation/agent_factory.py | Jbwasse2/rmp_nav | 270de9d302630724c5e3fcc63c19e7449aaf18ac | [
"MIT"
] | null | null | null | rmp_nav/simulation/agent_factory.py | Jbwasse2/rmp_nav | 270de9d302630724c5e3fcc63c19e7449aaf18ac | [
"MIT"
] | null | null | null | rmp_nav/simulation/agent_factory.py | Jbwasse2/rmp_nav | 270de9d302630724c5e3fcc63c19e7449aaf18ac | [
"MIT"
] | null | null | null | from __future__ import print_function
# Import agents
from . import agent_factory_common, agent_factory_minirccar, agent_factory_rccar
"""
Unless absolutely necessary, we should only add agents to this file , but not modify any existing
agent.
Agents that start with '_' are private agents. For example, agents that are used for training.
"""
all_agents = agent_factory_common.all_agents
public_agents = agent_factory_common.public_agents
private_agents = agent_factory_common.private_agents
agents_dict = agent_factory_common.agents_dict
| 32 | 98 | 0.830882 |
79559359e4ca9ff5ae91f9682a0be9aac68cd2b2 | 2,520 | py | Python | files/044 - pentagon numbers.py | farukara/Project-Euler-problems | 806fdbd797edd9929728b43cc428a55df50e1c01 | [
"MIT"
] | null | null | null | files/044 - pentagon numbers.py | farukara/Project-Euler-problems | 806fdbd797edd9929728b43cc428a55df50e1c01 | [
"MIT"
] | null | null | null | files/044 - pentagon numbers.py | farukara/Project-Euler-problems | 806fdbd797edd9929728b43cc428a55df50e1c01 | [
"MIT"
] | null | null | null | #!python3
# coding: utf-8
# Pentagonal numbers are generated by the formula, Pn=n(3n−1)/2. The first ten pentagonal numbers are:
# 1, 5, 12, 22, 35, 51, 70, 92, 117, 145, ...
# It can be seen that P4 + P7 = 22 + 70 = 92 = P8. However, their difference, 70 − 22 = 48, is not pentagonal.
# Find the pair of pentagonal numbers, Pj and Pk, for which their sum and difference are pentagonal and D = |Pk − Pj| is minimised; what is the value of D?
#https://projecteuler.net/problem=44
from time import perf_counter
from math import sqrt
def timed(function):
def wrapper(*args, **kwargs):
start = perf_counter()
value = function(*args, **kwargs)
finish = perf_counter()
print(f"\n\"{function.__name__}\" function took {finish - start:.2f} seconds")
return value
return wrapper
def pentagon_sieve(n):
"returns a sieve list for first n number of pentagon numbers"
number_pentagon = int(n*(3*n-1)/2)
sieve = [False]*((number_pentagon+1)*2)
sieve[1] = True
i = 2
while i <= n:
sieve[int(i*(3*i-1)/2)] = True
i += 1
return sieve
@timed
def method1(): #25 seconds
limit = 10_000
sieve = pentagon_sieve(limit)
list_of_pentagons = [i for i in range(len(sieve)) if sieve[i]]
start = perf_counter()
candidates = {}
for i in range(len(list_of_pentagons)):
for j in range(i+1, len(list_of_pentagons)):
if sieve[list_of_pentagons[i] + list_of_pentagons[j]] and sieve[list_of_pentagons[j] - list_of_pentagons[i]]:
difference = (list_of_pentagons[j] - list_of_pentagons[i])
candidates[difference] = (i,j)
print()
print(candidates)
print(f"Minimum difference: ", difference)
end = perf_counter()
print("Time: ", end- start)
@timed
def method2(): #2 seconds
def is_pentagon(n: int) -> bool:
"returns true if n is pentagon"
root = (-1-sqrt(1-(4*3*(2*n*(-1)))))/6
return root.is_integer()
def pentagonize(n):
"returns nth pentagon as int"
return int(n*(3*n-1)/2)
limit = 100
limit_pentagon = pentagonize(limit)
i = 2
while True:
i += 1
flag = False
for j in range(1, i):
if is_pentagon(pentagonize(i) + pentagonize(j)) and is_pentagon(pentagonize(i) - pentagonize(j)):
print(pentagonize(j) - pentagonize(i))
flag = True
break
if flag:
break
if __name__ == "__main__":
method2()
| 32.727273 | 155 | 0.60754 |
7955938902d7c528cc7435b7f6d74bf4410e92f9 | 1,148 | py | Python | application.py | cloudpassage/list_unprotected_docker_engines | 8ec56596e0f8c3a0fd63639548d5fc230a620769 | [
"BSD-2-Clause"
] | null | null | null | application.py | cloudpassage/list_unprotected_docker_engines | 8ec56596e0f8c3a0fd63639548d5fc230a620769 | [
"BSD-2-Clause"
] | null | null | null | application.py | cloudpassage/list_unprotected_docker_engines | 8ec56596e0f8c3a0fd63639548d5fc230a620769 | [
"BSD-2-Clause"
] | null | null | null | """List servers running Docker, which do not have docker inspection enabled."""
import argparse
import dlib
def main():
args = get_args()
halo = dlib.Halo()
halo.print_total_server_count()
servers_running_docker = halo.get_servers_running_docker()
print("Servers running dockerd: %s" % str(len(servers_running_docker)))
print("Protected servers (%s)" % str(len(halo.protected)))
print("Unprotected servers (%s):" % str(len(halo.unprotected)))
for x in halo.unprotected:
print("Server ID: %s\tServer name: %s\tServer group: %s\t" %
(x["id"], x["hostname"], x["group_path"]))
if args.fix:
ids = [x["id"] for x in halo.unprotected]
halo.enable_docker_inspection(ids)
return
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("--fix", help="Enable Docker inspection for unprotected workloads.") # NOQA
args = parser.parse_args()
if args.fix:
print("Will enable Docker inspection on available Docker hosts.")
else:
print("Will not make changes in account.")
return args
if __name__ == "__main__":
main()
| 31.888889 | 100 | 0.659408 |
795593915940cd9d401f1f27130ea37cfb19c71a | 47,241 | py | Python | fairseq/models/longformer.py | hamsik1223/fairseq | 13164c38b0aab4269f8775a2506e2b60f5909114 | [
"MIT"
] | null | null | null | fairseq/models/longformer.py | hamsik1223/fairseq | 13164c38b0aab4269f8775a2506e2b60f5909114 | [
"MIT"
] | null | null | null | fairseq/models/longformer.py | hamsik1223/fairseq | 13164c38b0aab4269f8775a2506e2b60f5909114 | [
"MIT"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
from typing import Any, Dict, List, Optional, Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import options, utils
from fairseq.models import (
FairseqEncoder,
FairseqEncoderDecoderModel,
FairseqIncrementalDecoder,
register_model,
register_model_architecture,
)
from fairseq.models.fairseq_encoder import EncoderOut
from fairseq.modules import (
AdaptiveSoftmax,
LayerDropModuleList,
LayerNorm,
PositionalEmbedding,
SinusoidalPositionalEmbedding,
TransformerDecoderLayer,
TransformerEncoderLayer,
)
from fairseq.modules.longformer_layer import LongformerEncoderLayer
from fairseq.modules.quant_noise import quant_noise as apply_quant_noise_
from torch import Tensor
DEFAULT_MAX_SOURCE_POSITIONS = 1024
DEFAULT_MAX_TARGET_POSITIONS = 1024
@register_model("longformer")
class TransformerModel(FairseqEncoderDecoderModel):
"""
Transformer model from `"Attention Is All You Need" (Vaswani, et al, 2017)
<https://arxiv.org/abs/1706.03762>`_.
Args:
encoder (TransformerEncoder): the encoder
decoder (TransformerDecoder): the decoder
The Transformer model provides the following named architectures and
command-line arguments:
.. argparse::
:ref: fairseq.models.transformer_parser
:prog:
"""
@classmethod
def hub_models(cls):
# fmt: off
def moses_subword(path):
return {
'path': path,
'tokenizer': 'moses',
'bpe': 'subword_nmt',
}
def moses_fastbpe(path):
return {
'path': path,
'tokenizer': 'moses',
'bpe': 'fastbpe',
}
return {
'transformer.wmt14.en-fr': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/wmt14.en-fr.joined-dict.transformer.tar.bz2'),
'transformer.wmt16.en-de': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt16.en-de.joined-dict.transformer.tar.bz2',
'transformer.wmt18.en-de': moses_subword('https://dl.fbaipublicfiles.com/fairseq/models/wmt18.en-de.ensemble.tar.gz'),
'transformer.wmt19.en-de': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-de.joined-dict.ensemble.tar.gz'),
'transformer.wmt19.en-ru': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-ru.ensemble.tar.gz'),
'transformer.wmt19.de-en': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.de-en.joined-dict.ensemble.tar.gz'),
'transformer.wmt19.ru-en': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.ru-en.ensemble.tar.gz'),
'transformer.wmt19.en-de.single_model': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-de.joined-dict.single_model.tar.gz'),
'transformer.wmt19.en-ru.single_model': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-ru.single_model.tar.gz'),
'transformer.wmt19.de-en.single_model': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.de-en.joined-dict.single_model.tar.gz'),
'transformer.wmt19.ru-en.single_model': moses_fastbpe('https://dl.fbaipublicfiles.com/fairseq/models/wmt19.ru-en.single_model.tar.gz'),
}
# fmt: on
def __init__(self, args, encoder, decoder):
super().__init__(encoder, decoder)
self.args = args
self.supports_align_args = True
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
# fmt: off
parser.add_argument('--activation-fn',
choices=utils.get_available_activation_fns(),
help='activation function to use')
parser.add_argument('--dropout', type=float, metavar='D',
help='dropout probability')
parser.add_argument('--attention-dropout', type=float, metavar='D',
help='dropout probability for attention weights')
parser.add_argument('--activation-dropout', '--relu-dropout', type=float, metavar='D',
help='dropout probability after activation in FFN.')
parser.add_argument('--encoder-embed-path', type=str, metavar='STR',
help='path to pre-trained encoder embedding')
parser.add_argument('--encoder-embed-dim', type=int, metavar='N',
help='encoder embedding dimension')
parser.add_argument('--encoder-ffn-embed-dim', type=int, metavar='N',
help='encoder embedding dimension for FFN')
parser.add_argument('--encoder-layers', type=int, metavar='N',
help='num encoder layers')
parser.add_argument('--encoder-attention-heads', type=int, metavar='N',
help='num encoder attention heads')
parser.add_argument('--encoder-normalize-before', action='store_true',
help='apply layernorm before each encoder block')
parser.add_argument('--encoder-learned-pos', action='store_true',
help='use learned positional embeddings in the encoder')
parser.add_argument('--decoder-embed-path', type=str, metavar='STR',
help='path to pre-trained decoder embedding')
parser.add_argument('--decoder-embed-dim', type=int, metavar='N',
help='decoder embedding dimension')
parser.add_argument('--decoder-ffn-embed-dim', type=int, metavar='N',
help='decoder embedding dimension for FFN')
parser.add_argument('--decoder-layers', type=int, metavar='N',
help='num decoder layers')
parser.add_argument('--decoder-attention-heads', type=int, metavar='N',
help='num decoder attention heads')
parser.add_argument('--decoder-learned-pos', action='store_true',
help='use learned positional embeddings in the decoder')
parser.add_argument('--decoder-normalize-before', action='store_true',
help='apply layernorm before each decoder block')
parser.add_argument('--decoder-output-dim', type=int, metavar='N',
help='decoder output dimension (extra linear layer '
'if different from decoder embed dim')
parser.add_argument('--share-decoder-input-output-embed', action='store_true',
help='share decoder input and output embeddings')
parser.add_argument('--share-all-embeddings', action='store_true',
help='share encoder, decoder and output embeddings'
' (requires shared dictionary and embed dim)')
parser.add_argument('--no-token-positional-embeddings', default=False, action='store_true',
help='if set, disables positional embeddings (outside self attention)')
parser.add_argument('--adaptive-softmax-cutoff', metavar='EXPR',
help='comma separated list of adaptive softmax cutoff points. '
'Must be used with adaptive_loss criterion'),
parser.add_argument('--adaptive-softmax-dropout', type=float, metavar='D',
help='sets adaptive softmax dropout for the tail projections')
parser.add_argument('--layernorm-embedding', action='store_true',
help='add layernorm to embedding')
parser.add_argument('--no-scale-embedding', action='store_true',
help='if True, dont scale embeddings')
# args for "Cross+Self-Attention for Transformer Models" (Peitz et al., 2019)
parser.add_argument('--no-cross-attention', default=False, action='store_true',
help='do not perform cross-attention')
parser.add_argument('--cross-self-attention', default=False, action='store_true',
help='perform cross+self-attention')
# args for "Reducing Transformer Depth on Demand with Structured Dropout" (Fan et al., 2019)
parser.add_argument('--encoder-layerdrop', type=float, metavar='D', default=0,
help='LayerDrop probability for encoder')
parser.add_argument('--decoder-layerdrop', type=float, metavar='D', default=0,
help='LayerDrop probability for decoder')
parser.add_argument('--encoder-layers-to-keep', default=None,
help='which layers to *keep* when pruning as a comma-separated list')
parser.add_argument('--decoder-layers-to-keep', default=None,
help='which layers to *keep* when pruning as a comma-separated list')
# args for Training with Quantization Noise for Extreme Model Compression ({Fan*, Stock*} et al., 2020)
parser.add_argument('--quant-noise-pq', type=float, metavar='D', default=0,
help='iterative PQ quantization noise at training time')
parser.add_argument('--quant-noise-pq-block-size', type=int, metavar='D', default=8,
help='block size of quantization noise at training time')
parser.add_argument('--quant-noise-scalar', type=float, metavar='D', default=0,
help='scalar quantization noise and scalar quantization at training time')
# args for using learned relative positional embeddings
parser.add_argument('--use-relative-pos-embeddings', default=False, action='store_true',
help='Use learned relative positions in multi-head self attention')
parser.add_argument('--max-relative-pos', type=int, default=128,
help='Max relative position to create an embedding for.')
parser.add_argument('--heads-share-embeddings', default=False, action='store_true',
help='Heads share the same relative positional embeddings')
parser.add_argument('--add-pos-embeddings-to-values', default=False, action='store_true',
help='Add relative positional embeddings to values (apart from keys)')
# fmt: on
# args for longformer
parser.add_argument('--attention-mode', default='sliding_chunks',
help='')
parser.add_argument('--attention-window', default=[16, 16, 16, 32, 64, 64],
help='')
parser.add_argument('--attention-dilation', default=[1]*6,
help='')
parser.add_argument('--autoregressive', default= False,
help='')
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
# make sure all arguments are present in older models
base_architecture(args)
if args.encoder_layers_to_keep:
args.encoder_layers = len(args.encoder_layers_to_keep.split(","))
if args.decoder_layers_to_keep:
args.decoder_layers = len(args.decoder_layers_to_keep.split(","))
if getattr(args, "max_source_positions", None) is None:
args.max_source_positions = DEFAULT_MAX_SOURCE_POSITIONS
if getattr(args, "max_target_positions", None) is None:
args.max_target_positions = DEFAULT_MAX_TARGET_POSITIONS
src_dict, tgt_dict = task.source_dictionary, task.target_dictionary
if args.share_all_embeddings:
if src_dict != tgt_dict:
raise ValueError("--share-all-embeddings requires a joined dictionary")
if args.encoder_embed_dim != args.decoder_embed_dim:
raise ValueError(
"--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim"
)
if args.decoder_embed_path and (
args.decoder_embed_path != args.encoder_embed_path
):
raise ValueError(
"--share-all-embeddings not compatible with --decoder-embed-path"
)
encoder_embed_tokens = cls.build_embedding(
args, src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = encoder_embed_tokens
args.share_decoder_input_output_embed = True
else:
encoder_embed_tokens = cls.build_embedding(
args, src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = cls.build_embedding(
args, tgt_dict, args.decoder_embed_dim, args.decoder_embed_path
)
encoder = cls.build_encoder(args, src_dict, encoder_embed_tokens)
decoder = cls.build_decoder(args, tgt_dict, decoder_embed_tokens)
print('the attention window list is: ', args.attention_window)
return cls(args, encoder, decoder)
@classmethod
def build_embedding(cls, args, dictionary, embed_dim, path=None):
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
emb = Embedding(num_embeddings, embed_dim, padding_idx)
# if provided, load from preloaded dictionaries
if path:
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
return emb
@classmethod
def build_encoder(cls, args, src_dict, embed_tokens):
return TransformerEncoder(args, src_dict, embed_tokens)
@classmethod
def build_decoder(cls, args, tgt_dict, embed_tokens):
return TransformerDecoder(
args,
tgt_dict,
embed_tokens,
no_encoder_attn=getattr(args, "no_cross_attention", False),
)
# TorchScript doesn't support optional arguments with variable length (**kwargs).
# Current workaround is to add union of all arguments in child classes.
def forward(
self,
src_tokens,
src_lengths,
prev_output_tokens,
return_all_hiddens: bool = True,
features_only: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
):
"""
Run the forward pass for an encoder-decoder model.
Copied from the base class, but without ``**kwargs``,
which are not supported by TorchScript.
"""
encoder_out = self.encoder(
src_tokens,
src_lengths=src_lengths,
return_all_hiddens=return_all_hiddens,
)
decoder_out = self.decoder(
prev_output_tokens,
encoder_out=encoder_out,
features_only=features_only,
alignment_layer=alignment_layer,
alignment_heads=alignment_heads,
src_lengths=src_lengths,
return_all_hiddens=return_all_hiddens,
)
return decoder_out
# Since get_normalized_probs is in the Fairseq Model which is not scriptable,
# I rewrite the get_normalized_probs from Base Class to call the
# helper function in the Base Class.
@torch.jit.export
def get_normalized_probs(
self,
net_output: Tuple[Tensor, Optional[Dict[str, List[Optional[Tensor]]]]],
log_probs: bool,
sample: Optional[Dict[str, Tensor]] = None,
):
"""Get normalized probabilities (or log probs) from a net's output."""
return self.get_normalized_probs_scriptable(net_output, log_probs, sample)
class TransformerEncoder(FairseqEncoder):
"""
Transformer encoder consisting of *args.encoder_layers* layers. Each layer
is a :class:`TransformerEncoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): encoding dictionary
embed_tokens (torch.nn.Embedding): input embedding
"""
def __init__(self, args, dictionary, embed_tokens):
super().__init__(dictionary)
self.register_buffer("version", torch.Tensor([3]))
self.dropout = args.dropout
self.encoder_layerdrop = args.encoder_layerdrop
embed_dim = embed_tokens.embedding_dim
self.padding_idx = embed_tokens.padding_idx
self.max_source_positions = args.max_source_positions
self.embed_tokens = embed_tokens
self.embed_scale = 1.0 if args.no_scale_embedding else math.sqrt(embed_dim)
self.embed_positions = (
PositionalEmbedding(
args.max_source_positions,
embed_dim,
self.padding_idx,
learned=args.encoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
if not args.adaptive_input and args.quant_noise_pq > 0:
self.quant_noise = apply_quant_noise_(
nn.Linear(embed_dim, embed_dim, bias=False),
args.quant_noise_pq,
args.quant_noise_pq_block_size,
)
else:
self.quant_noise = None
if self.encoder_layerdrop > 0.0:
self.layers = LayerDropModuleList(p=self.encoder_layerdrop)
else:
self.layers = nn.ModuleList([])
self.layers.extend([
self.build_encoder_layer(args, i)
for i in range(args.encoder_layers)
])
self.num_layers = len(self.layers)
self.attention_window = args.attention_window
if args.encoder_normalize_before:
self.layer_norm = LayerNorm(embed_dim)
else:
self.layer_norm = None
if getattr(args, "layernorm_embedding", False):
self.layernorm_embedding = LayerNorm(embed_dim)
else:
self.layernorm_embedding = None
def build_encoder_layer(self, args, layer_id):
return LongformerEncoderLayer(args, layer_id)
def padding_src_tokens(self, src_tokens, _w):
_device = src_tokens.device
src_seqlen = src_tokens.size()[1]
padded_src_seqlen = (int(src_seqlen//_w) + 1) * _w - src_seqlen
padded_tensor = torch.zeros([src_tokens.size()[0],padded_src_seqlen], device = _device) + self.padding_idx
res = torch.cat([src_tokens, padded_tensor.long()], axis = 1)
return res
def find_split_of_source_and_context(self, src_tokens):
#looking for split index of the source in the input instance
#there is only one <s> as the start (src_tokens_0),
#and the </s> after that <s> is the end
B=src_tokens.size()[0]
src_tokens_start = torch.where(src_tokens==0)[1]
src_tokens_end_tuple = torch.where(src_tokens==2)
src_tokens_end_sen_index = src_tokens_end_tuple[0]
src_tokens_end_index = src_tokens_end_tuple[1]
src_tokens_end = torch.zeros(src_tokens_start.size())
for i in range(B):
src_tokens_end_cur = src_tokens_end_index[torch.where(src_tokens_end_sen_index==i)]
src_tokens_end[i] = src_tokens_end_cur[torch.where(src_tokens_end_cur > src_tokens_start[i])[0][0]]
return src_tokens_start, src_tokens_end.int(), src_tokens_end_tuple
def build_source_sentence_mask(self, src_tokens, src_tokens_start, src_tokens_end):
_device = src_tokens.device
B=src_tokens.size()[0]
encoder_padding_mask = torch.zeros(src_tokens.size(), device = _device)
for i in range(B):
encoder_padding_mask[i, (1+src_tokens_start[i]): (1+src_tokens_end[i])] = 1
encoder_padding_mask = encoder_padding_mask < 0.5
return encoder_padding_mask
def forward_embedding(self, src_tokens):
# embed tokens and positions
x = embed = self.embed_scale * self.embed_tokens(src_tokens)
if self.embed_positions is not None:
x = embed + self.embed_positions(src_tokens)
if self.layernorm_embedding is not None:
x = self.layernorm_embedding(x)
x = F.dropout(x, p=self.dropout, training=self.training)
if self.quant_noise is not None:
x = self.quant_noise(x)
return x, embed
def forward(
self,
src_tokens,
src_lengths,
return_all_hiddens: bool = False,
):
"""
Args:
src_tokens (LongTensor): tokens in the source language of shape
`(batch, src_len)`
src_lengths (torch.LongTensor): lengths of each source sentence of
shape `(batch)`
return_all_hiddens (bool, optional): also return all of the
intermediate hidden states (default: False).
Returns:
namedtuple:
- **encoder_out** (Tensor): the last encoder layer's output of
shape `(src_len, batch, embed_dim)`
- **encoder_padding_mask** (ByteTensor): the positions of
padding elements of shape `(batch, src_len)`
- **encoder_embedding** (Tensor): the (scaled) embedding lookup
of shape `(batch, src_len, embed_dim)`
- **encoder_states** (List[Tensor]): all intermediate
hidden states of shape `(src_len, batch, embed_dim)`.
Only populated if *return_all_hiddens* is True.
"""
## first find the start and end of the src sentence
src_tokens_start, src_tokens_end, _ = self.find_split_of_source_and_context(src_tokens)
seqlen = src_tokens.size()[1]
max_window_size = max(self.attention_window)
##padding the input x with seqlen of 2*w's multiple,
##e.g. if w=32, let the seqlen to be 64's multiple.
src_tokens = self.padding_src_tokens(src_tokens, 2*max_window_size)
x, encoder_embedding = self.forward_embedding(src_tokens)
# B x T x C -> T x B x C
#x = x.transpose(0, 1)
# compute padding mask
encoder_padding_mask = src_tokens.eq(self.padding_idx)
encoder_states = [] if return_all_hiddens else None
encoder_attn = [] if return_all_hiddens else None
# encoder layers
for i, layer in enumerate(self.layers):
x, _ = layer(x, output_attentions = True)
if return_all_hiddens:
assert encoder_states is not None
encoder_states.append(x)
assert encoder_attn is not None
encoder_attn.append(_)
##only get the unpadded parts...
x = x[:, 0:seqlen, :]
encoder_padding_mask = self.build_source_sentence_mask(src_tokens[:, 0:seqlen], src_tokens_start, src_tokens_end)
x = x.transpose(0, 1)
if self.layer_norm is not None:
x = self.layer_norm(x)
return EncoderOut(
encoder_out=x, # T x B x C
encoder_padding_mask=encoder_padding_mask, # B x T
encoder_embedding=encoder_embedding, # B x T x C
encoder_states=encoder_states, # List[T x B x C]
src_tokens=None,
src_lengths=None,
encoder_attn=encoder_attn
)
@torch.jit.export
def reorder_encoder_out(self, encoder_out: EncoderOut, new_order):
"""
Reorder encoder output according to *new_order*.
Args:
encoder_out: output from the ``forward()`` method
new_order (LongTensor): desired order
Returns:
*encoder_out* rearranged according to *new_order*
"""
new_encoder_out: Dict[str, Tensor] = {}
new_encoder_out["encoder_out"] = (
encoder_out.encoder_out
if encoder_out.encoder_out is None
else encoder_out.encoder_out.index_select(1, new_order)
)
new_encoder_out["encoder_padding_mask"] = (
encoder_out.encoder_padding_mask
if encoder_out.encoder_padding_mask is None
else encoder_out.encoder_padding_mask.index_select(0, new_order)
)
new_encoder_out["encoder_embedding"] = (
encoder_out.encoder_embedding
if encoder_out.encoder_embedding is None
else encoder_out.encoder_embedding.index_select(0, new_order)
)
src_tokens = encoder_out.src_tokens
if src_tokens is not None:
src_tokens = src_tokens.index_select(0, new_order)
src_lengths = encoder_out.src_lengths
if src_lengths is not None:
src_lengths = src_lengths.index_select(0, new_order)
encoder_states = encoder_out.encoder_states
if encoder_states is not None:
for idx, state in enumerate(encoder_states):
encoder_states[idx] = state.index_select(1, new_order)
return EncoderOut(
encoder_out=new_encoder_out["encoder_out"], # T x B x C
encoder_padding_mask=new_encoder_out["encoder_padding_mask"], # B x T
encoder_embedding=new_encoder_out["encoder_embedding"], # B x T x C
encoder_states=encoder_states, # List[T x B x C]
src_tokens=src_tokens, # B x T
src_lengths=src_lengths, # B x 1
encoder_attn = []
)
def max_positions(self):
"""Maximum input length supported by the encoder."""
if self.embed_positions is None:
return self.max_source_positions
return min(self.max_source_positions, self.embed_positions.max_positions)
def upgrade_state_dict_named(self, state_dict, name):
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
weights_key = "{}.embed_positions.weights".format(name)
if weights_key in state_dict:
print("deleting {0}".format(weights_key))
del state_dict[weights_key]
state_dict[
"{}.embed_positions._float_tensor".format(name)
] = torch.FloatTensor(1)
for i in range(self.num_layers):
# update layer norms
self.layers[i].upgrade_state_dict_named(
state_dict, "{}.layers.{}".format(name, i)
)
version_key = "{}.version".format(name)
if utils.item(state_dict.get(version_key, torch.Tensor([1]))[0]) < 2:
# earlier checkpoints did not normalize after the stack of layers
self.layer_norm = None
self.normalize = False
state_dict[version_key] = torch.Tensor([1])
return state_dict
class TransformerDecoder(FairseqIncrementalDecoder):
"""
Transformer decoder consisting of *args.decoder_layers* layers. Each layer
is a :class:`TransformerDecoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): decoding dictionary
embed_tokens (torch.nn.Embedding): output embedding
no_encoder_attn (bool, optional): whether to attend to encoder outputs
(default: False).
"""
def __init__(self, args, dictionary, embed_tokens, no_encoder_attn=False):
self.args = args
super().__init__(dictionary)
self.register_buffer("version", torch.Tensor([3]))
self._future_mask = torch.empty(0)
self.dropout = args.dropout
self.decoder_layerdrop = args.decoder_layerdrop
self.share_input_output_embed = args.share_decoder_input_output_embed
input_embed_dim = embed_tokens.embedding_dim
embed_dim = args.decoder_embed_dim
self.embed_dim = embed_dim
self.output_embed_dim = args.decoder_output_dim
self.padding_idx = embed_tokens.padding_idx
self.max_target_positions = args.max_target_positions
self.embed_tokens = embed_tokens
self.embed_scale = 1.0 if args.no_scale_embedding else math.sqrt(embed_dim)
if not args.adaptive_input and args.quant_noise_pq > 0:
self.quant_noise = apply_quant_noise_(
nn.Linear(embed_dim, embed_dim, bias=False),
args.quant_noise_pq,
args.quant_noise_pq_block_size,
)
else:
self.quant_noise = None
self.project_in_dim = (
Linear(input_embed_dim, embed_dim, bias=False)
if embed_dim != input_embed_dim
else None
)
self.embed_positions = (
PositionalEmbedding(
args.max_target_positions,
embed_dim,
self.padding_idx,
learned=args.decoder_learned_pos,
)
if not args.no_token_positional_embeddings
else None
)
if getattr(args, "layernorm_embedding", False):
self.layernorm_embedding = LayerNorm(embed_dim)
else:
self.layernorm_embedding = None
self.cross_self_attention = getattr(args, "cross_self_attention", False)
if self.decoder_layerdrop > 0.0:
self.layers = LayerDropModuleList(p=self.decoder_layerdrop)
else:
self.layers = nn.ModuleList([])
self.layers.extend([
self.build_decoder_layer(args, no_encoder_attn)
for _ in range(args.decoder_layers)
])
self.num_layers = len(self.layers)
if args.decoder_normalize_before and not getattr(args, "no_decoder_final_norm", False):
self.layer_norm = LayerNorm(embed_dim)
else:
self.layer_norm = None
self.project_out_dim = (
Linear(embed_dim, self.output_embed_dim, bias=False)
if embed_dim != self.output_embed_dim and not args.tie_adaptive_weights
else None
)
self.adaptive_softmax = None
self.output_projection = None
if args.adaptive_softmax_cutoff is not None:
self.adaptive_softmax = AdaptiveSoftmax(
len(dictionary),
self.output_embed_dim,
options.eval_str_list(args.adaptive_softmax_cutoff, type=int),
dropout=args.adaptive_softmax_dropout,
adaptive_inputs=embed_tokens if args.tie_adaptive_weights else None,
factor=args.adaptive_softmax_factor,
tie_proj=args.tie_adaptive_proj,
)
elif self.share_input_output_embed:
self.output_projection = nn.Linear(
self.embed_tokens.weight.shape[1],
self.embed_tokens.weight.shape[0],
bias=False,
)
self.output_projection.weight = self.embed_tokens.weight
else:
self.output_projection = nn.Linear(
self.output_embed_dim, len(dictionary), bias=False
)
nn.init.normal_(
self.output_projection.weight, mean=0, std=self.output_embed_dim ** -0.5
)
def build_decoder_layer(self, args, no_encoder_attn=False):
return TransformerDecoderLayer(args, no_encoder_attn)
def forward(
self,
prev_output_tokens,
encoder_out: Optional[EncoderOut] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
features_only: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
src_lengths: Optional[Any] = None,
return_all_hiddens: bool = False,
):
"""
Args:
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for teacher forcing
encoder_out (optional): output from the encoder, used for
encoder-side attention
incremental_state (dict): dictionary used for storing state during
:ref:`Incremental decoding`
features_only (bool, optional): only return features without
applying output layer (default: False).
Returns:
tuple:
- the decoder's output of shape `(batch, tgt_len, vocab)`
- a dictionary with any model-specific outputs
"""
x, extra = self.extract_features(
prev_output_tokens,
encoder_out=encoder_out,
incremental_state=incremental_state,
alignment_layer=alignment_layer,
alignment_heads=alignment_heads,
)
if not features_only:
x = self.output_layer(x)
return x, extra
def extract_features(
self,
prev_output_tokens,
encoder_out: Optional[EncoderOut] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
full_context_alignment: bool = False,
alignment_layer: Optional[int] = None,
alignment_heads: Optional[int] = None,
):
"""
Similar to *forward* but only return features.
Includes several features from "Jointly Learning to Align and
Translate with Transformer Models" (Garg et al., EMNLP 2019).
Args:
full_context_alignment (bool, optional): don't apply
auto-regressive mask to self-attention (default: False).
alignment_layer (int, optional): return mean alignment over
heads at this layer (default: last layer).
alignment_heads (int, optional): only average alignment over
this many heads (default: all heads).
Returns:
tuple:
- the decoder's features of shape `(batch, tgt_len, embed_dim)`
- a dictionary with any model-specific outputs
"""
if alignment_layer is None:
alignment_layer = self.num_layers - 1
# embed positions
positions = (
self.embed_positions(
prev_output_tokens, incremental_state=incremental_state
)
if self.embed_positions is not None
else None
)
if incremental_state is not None:
prev_output_tokens = prev_output_tokens[:, -1:]
if positions is not None:
positions = positions[:, -1:]
# embed tokens and positions
x = self.embed_scale * self.embed_tokens(prev_output_tokens)
if self.quant_noise is not None:
x = self.quant_noise(x)
if self.project_in_dim is not None:
x = self.project_in_dim(x)
if positions is not None:
x += positions
if self.layernorm_embedding is not None:
x = self.layernorm_embedding(x)
x = F.dropout(x, p=self.dropout, training=self.training)
# B x T x C -> T x B x C
x = x.transpose(0, 1)
self_attn_padding_mask: Optional[Tensor] = None
if self.cross_self_attention or prev_output_tokens.eq(self.padding_idx).any():
self_attn_padding_mask = prev_output_tokens.eq(self.padding_idx)
# decoder layers
attn: Optional[Tensor] = None
inner_states: List[Optional[Tensor]] = [x]
for idx, layer in enumerate(self.layers):
if incremental_state is None and not full_context_alignment:
self_attn_mask = self.buffered_future_mask(x)
else:
self_attn_mask = None
x, layer_attn, _ = layer(
x,
encoder_out.encoder_out if encoder_out is not None else None,
encoder_out.encoder_padding_mask if encoder_out is not None else None,
incremental_state,
self_attn_mask=self_attn_mask,
self_attn_padding_mask=self_attn_padding_mask,
need_attn=bool((idx == alignment_layer)),
need_head_weights=bool((idx == alignment_layer)),
)
inner_states.append(x)
if layer_attn is not None and idx == alignment_layer:
attn = layer_attn.float().to(x)
if attn is not None:
if alignment_heads is not None:
attn = attn[:alignment_heads]
# average probabilities over heads
attn = attn.mean(dim=0)
if self.layer_norm is not None:
x = self.layer_norm(x)
# T x B x C -> B x T x C
x = x.transpose(0, 1)
if self.project_out_dim is not None:
x = self.project_out_dim(x)
return x, {"attn": [attn], "inner_states": inner_states, "encoder_out": encoder_out}
def output_layer(self, features):
"""Project features to the vocabulary size."""
if self.adaptive_softmax is None:
# project back to size of vocabulary
return self.output_projection(features)
else:
return features
def max_positions(self):
"""Maximum output length supported by the decoder."""
if self.embed_positions is None:
return self.max_target_positions
return min(self.max_target_positions, self.embed_positions.max_positions)
def buffered_future_mask(self, tensor):
dim = tensor.size(0)
# self._future_mask.device != tensor.device is not working in TorchScript. This is a workaround.
if (
self._future_mask.size(0) == 0
or (not self._future_mask.device == tensor.device)
or self._future_mask.size(0) < dim
):
self._future_mask = torch.triu(
utils.fill_with_neg_inf(torch.zeros([dim, dim])), 1
)
self._future_mask = self._future_mask.to(tensor)
return self._future_mask[:dim, :dim]
def upgrade_state_dict_named(self, state_dict, name):
"""Upgrade a (possibly old) state dict for new versions of fairseq."""
if isinstance(self.embed_positions, SinusoidalPositionalEmbedding):
weights_key = "{}.embed_positions.weights".format(name)
if weights_key in state_dict:
del state_dict[weights_key]
state_dict[
"{}.embed_positions._float_tensor".format(name)
] = torch.FloatTensor(1)
if f"{name}.output_projection.weight" not in state_dict:
if self.share_input_output_embed:
embed_out_key = f"{name}.embed_tokens.weight"
else:
embed_out_key = f"{name}.embed_out"
if embed_out_key in state_dict:
state_dict[f"{name}.output_projection.weight"] = state_dict[embed_out_key]
if not self.share_input_output_embed:
del state_dict[embed_out_key]
for i in range(self.num_layers):
# update layer norms
layer_norm_map = {
"0": "self_attn_layer_norm",
"1": "encoder_attn_layer_norm",
"2": "final_layer_norm",
}
for old, new in layer_norm_map.items():
for m in ("weight", "bias"):
k = "{}.layers.{}.layer_norms.{}.{}".format(name, i, old, m)
if k in state_dict:
state_dict[
"{}.layers.{}.{}.{}".format(name, i, new, m)
] = state_dict[k]
del state_dict[k]
version_key = "{}.version".format(name)
if utils.item(state_dict.get(version_key, torch.Tensor([1]))[0]) <= 2:
# earlier checkpoints did not normalize after the stack of layers
self.layer_norm = None
self.normalize = False
state_dict[version_key] = torch.Tensor([1])
return state_dict
# Overwrite the method to temporaily support JIT scripting in Transformer
@torch.jit.export
def reorder_incremental_state(
self,
incremental_state: Dict[str, Dict[str, Optional[Tensor]]],
new_order: Tensor,
):
"""Scriptable reorder incremental state in the transformer."""
for layer in self.layers:
layer.reorder_incremental_state(incremental_state, new_order)
def Embedding(num_embeddings, embedding_dim, padding_idx):
m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx)
nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5)
nn.init.constant_(m.weight[padding_idx], 0)
return m
def Linear(in_features, out_features, bias=True):
m = nn.Linear(in_features, out_features, bias)
nn.init.xavier_uniform_(m.weight)
if bias:
nn.init.constant_(m.bias, 0.0)
return m
@register_model_architecture("longformer", "longformer")
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 2048)
args.encoder_layers = getattr(args, "encoder_layers", 6)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 8)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", False)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 8)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "relu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.no_cross_attention = getattr(args, "no_cross_attention", False)
args.cross_self_attention = getattr(args, "cross_self_attention", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.no_scale_embedding = getattr(args, "no_scale_embedding", False)
args.layernorm_embedding = getattr(args, "layernorm_embedding", False)
args.tie_adaptive_weights = getattr(args, "tie_adaptive_weights", False)
args.attention_mode = getattr(args, "attention_mode", 'sliding_chunks')
args.attention_window = getattr(args, "attention_window", [32]*6)
args.attention_dilation = getattr(args, "attention_dilation", [1]*6)
args.autoregressive = getattr(args, "autoregressive", False)
@register_model_architecture("longformer", "longformer_flat_trans")
def base_architecture(args):
args.encoder_embed_path = getattr(args, "encoder_embed_path", None)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 512)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 1024)
args.encoder_layers = getattr(args, "encoder_layers", 6)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 4)
args.encoder_normalize_before = getattr(args, "encoder_normalize_before", False)
args.encoder_learned_pos = getattr(args, "encoder_learned_pos", False)
args.decoder_embed_path = getattr(args, "decoder_embed_path", None)
args.decoder_embed_dim = getattr(args, "decoder_embed_dim", args.encoder_embed_dim)
args.decoder_ffn_embed_dim = getattr(
args, "decoder_ffn_embed_dim", args.encoder_ffn_embed_dim
)
args.decoder_layers = getattr(args, "decoder_layers", 6)
args.decoder_attention_heads = getattr(args, "decoder_attention_heads", 4)
args.decoder_normalize_before = getattr(args, "decoder_normalize_before", False)
args.decoder_learned_pos = getattr(args, "decoder_learned_pos", False)
args.attention_dropout = getattr(args, "attention_dropout", 0.0)
args.activation_dropout = getattr(args, "activation_dropout", 0.0)
args.activation_fn = getattr(args, "activation_fn", "relu")
args.dropout = getattr(args, "dropout", 0.1)
args.adaptive_softmax_cutoff = getattr(args, "adaptive_softmax_cutoff", None)
args.adaptive_softmax_dropout = getattr(args, "adaptive_softmax_dropout", 0)
args.share_decoder_input_output_embed = getattr(
args, "share_decoder_input_output_embed", False
)
args.share_all_embeddings = getattr(args, "share_all_embeddings", False)
args.no_token_positional_embeddings = getattr(
args, "no_token_positional_embeddings", False
)
args.adaptive_input = getattr(args, "adaptive_input", False)
args.no_cross_attention = getattr(args, "no_cross_attention", False)
args.cross_self_attention = getattr(args, "cross_self_attention", False)
args.decoder_output_dim = getattr(
args, "decoder_output_dim", args.decoder_embed_dim
)
args.decoder_input_dim = getattr(args, "decoder_input_dim", args.decoder_embed_dim)
args.no_scale_embedding = getattr(args, "no_scale_embedding", False)
args.layernorm_embedding = getattr(args, "layernorm_embedding", False)
args.tie_adaptive_weights = getattr(args, "tie_adaptive_weights", False)
args.checkpoint_activations = getattr(args, "checkpoint_activations", False)
args.encoder_layers_to_keep = getattr(args, "encoder_layers_to_keep", None)
args.decoder_layers_to_keep = getattr(args, "decoder_layers_to_keep", None)
args.encoder_layerdrop = getattr(args, "encoder_layerdrop", 0)
args.decoder_layerdrop = getattr(args, "decoder_layerdrop", 0)
args.quant_noise_pq = getattr(args, "quant_noise_pq", 0)
args.quant_noise_pq_block_size = getattr(args, "quant_noise_pq_block_size", 8)
args.quant_noise_scalar = getattr(args, "quant_noise_scalar", 0)
args.attention_mode = getattr(args, "attention_mode", 'sliding_chunks')
args.attention_window = getattr(args, "attention_window", [76] + [24]*5)
args.attention_dilation = getattr(args, "attention_dilation", [1]*6)
args.autoregressive = getattr(args, "autoregressive", False) | 44.483051 | 159 | 0.636735 |
7955942178490478c1fd9b10d3d553064f49098a | 53,103 | py | Python | watertap/unit_models/coag_floc_model.py | kurbansitterley/watertap | 1a8986a779bdcb36f1481f03eed24c6c42d26481 | [
"BSD-3-Clause-LBNL"
] | null | null | null | watertap/unit_models/coag_floc_model.py | kurbansitterley/watertap | 1a8986a779bdcb36f1481f03eed24c6c42d26481 | [
"BSD-3-Clause-LBNL"
] | null | null | null | watertap/unit_models/coag_floc_model.py | kurbansitterley/watertap | 1a8986a779bdcb36f1481f03eed24c6c42d26481 | [
"BSD-3-Clause-LBNL"
] | null | null | null | ###############################################################################
# WaterTAP Copyright (c) 2021, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory, Oak Ridge National
# Laboratory, National Renewable Energy Laboratory, and National Energy
# Technology Laboratory (subject to receipt of any required approvals from
# the U.S. Dept. of Energy). All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and license
# information, respectively. These files are also available online at the URL
# "https://github.com/watertap-org/watertap/"
#
###############################################################################
# Import Pyomo libraries
from pyomo.environ import (
Block,
Set,
Var,
Param,
Expression,
Suffix,
NonNegativeReals,
PositiveIntegers,
Reference,
value,
units as pyunits,
)
from pyomo.common.config import ConfigBlock, ConfigValue, In
# Import IDAES cores
from idaes.core import (
ControlVolume0DBlock,
declare_process_block_class,
MaterialBalanceType,
EnergyBalanceType,
MomentumBalanceType,
UnitModelBlockData,
useDefault,
MaterialFlowBasis,
)
from idaes.core.util.constants import Constants
from idaes.core.solvers import get_solver
from idaes.core.util.tables import create_stream_table_dataframe
from idaes.core.util.config import is_physical_parameter_block
from idaes.core.util.exceptions import ConfigurationError
import idaes.core.util.scaling as iscale
import idaes.logger as idaeslog
__author__ = "Austin Ladshaw"
_log = idaeslog.getLogger(__name__)
# Name of the unit model
@declare_process_block_class("CoagulationFlocculation")
class CoagulationFlocculationData(UnitModelBlockData):
"""
Zero order Coagulation-Flocculation model based on Jar Tests
"""
# CONFIG are options for the unit model
CONFIG = ConfigBlock()
CONFIG.declare(
"dynamic",
ConfigValue(
domain=In([False]),
default=False,
description="Dynamic model flag; must be False",
doc="""Indicates whether this model will be dynamic or not,
**default**: False. The filtration unit does not support dynamic
behavior, thus this must be False.""",
),
)
CONFIG.declare(
"has_holdup",
ConfigValue(
default=False,
domain=In([False]),
description="Holdup construction flag; must be False",
doc="""Indicates whether holdup terms should be constructed or not.
**default**: False. The filtration unit does not have defined volume, thus
this must be False.""",
),
)
CONFIG.declare(
"material_balance_type",
ConfigValue(
default=MaterialBalanceType.useDefault,
domain=In(MaterialBalanceType),
description="Material balance construction flag",
doc="""Indicates what type of mass balance should be constructed,
**default** - MaterialBalanceType.useDefault.
**Valid values:** {
**MaterialBalanceType.useDefault - refer to property package for default
balance type
**MaterialBalanceType.none** - exclude material balances,
**MaterialBalanceType.componentPhase** - use phase component balances,
**MaterialBalanceType.componentTotal** - use total component balances,
**MaterialBalanceType.elementTotal** - use total element balances,
**MaterialBalanceType.total** - use total material balance.}""",
),
)
# NOTE: This option is temporarily disabled
'''
CONFIG.declare("energy_balance_type", ConfigValue(
default=EnergyBalanceType.useDefault,
domain=In(EnergyBalanceType),
description="Energy balance construction flag",
doc="""Indicates what type of energy balance should be constructed,
**default** - EnergyBalanceType.useDefault.
**Valid values:** {
**EnergyBalanceType.useDefault - refer to property package for default
balance type
**EnergyBalanceType.none** - exclude energy balances,
**EnergyBalanceType.enthalpyTotal** - single enthalpy balance for material,
**EnergyBalanceType.enthalpyPhase** - enthalpy balances for each phase,
**EnergyBalanceType.energyTotal** - single energy balance for material,
**EnergyBalanceType.energyPhase** - energy balances for each phase.}"""))
'''
CONFIG.declare(
"momentum_balance_type",
ConfigValue(
default=MomentumBalanceType.pressureTotal,
domain=In(MomentumBalanceType),
description="Momentum balance construction flag",
doc="""Indicates what type of momentum balance should be constructed,
**default** - MomentumBalanceType.pressureTotal.
**Valid values:** {
**MomentumBalanceType.none** - exclude momentum balances,
**MomentumBalanceType.pressureTotal** - single pressure balance for material,
**MomentumBalanceType.pressurePhase** - pressure balances for each phase,
**MomentumBalanceType.momentumTotal** - single momentum balance for material,
**MomentumBalanceType.momentumPhase** - momentum balances for each phase.}""",
),
)
CONFIG.declare(
"property_package",
ConfigValue(
default=useDefault,
domain=is_physical_parameter_block,
description="Property package to use for control volume",
doc="""Property parameter object used to define property calculations,
**default** - useDefault.
**Valid values:** {
**useDefault** - use default package from parent model or flowsheet,
**PhysicalParameterObject** - a PhysicalParameterBlock object.}""",
),
)
CONFIG.declare(
"property_package_args",
ConfigBlock(
implicit=True,
description="Arguments to use for constructing property packages",
doc="""A ConfigBlock with arguments to be passed to a property block(s)
and used when constructing these,
**default** - None.
**Valid values:** {
see property package for documentation.}""",
),
)
CONFIG.declare(
"chemical_additives",
ConfigValue(
default={},
domain=dict,
description="Specification of chemical additives used in coagulation process",
doc="""
A dict of chemical additives used in coagulation process
along with their molecular weights, the moles of salt produced per mole of
chemical added, and the molecular weights of the salt produced by the chemical
additive with the format of::
{'chem_name_1':
{'parameter_data':
{
'mw_additive': (value, units),
'moles_salt_per_mole_additive': value,
'mw_salt': (value, units)
}
},
'chem_name_2':
{'parameter_data':
{
'mw_additive': (value, units),
'moles_salt_per_mole_additive': value,
'mw_salt': (value, units)
}
},
}
""",
),
)
def build(self):
# build always starts by calling super().build()
# This triggers a lot of boilerplate in the background for you
super().build()
# this creates blank scaling factors, which are populated later
self.scaling_factor = Suffix(direction=Suffix.EXPORT)
# Next, get the base units of measurement from the property definition
units_meta = self.config.property_package.get_metadata().get_derived_units
# check the optional config arg 'chemical_additives'
common_msg = (
"The 'chemical_additives' dict MUST contain a dict of 'parameter_data' for "
+ "each chemical name. That 'parameter_data' dict MUST contain 'mw_chem', "
+ "'moles_salt_per_mole_additive', and 'mw_salt' as keys. Users are also "
+ "required to provide the values for the molecular weights and the units "
+ "within a tuple arg. Example format provided below.\n\n"
+ "{'chem_name_1': \n"
+ " {'parameter_data': \n"
+ " {'mw_additive': (value, units), \n"
+ " 'moles_salt_per_mole_additive': value, \n"
+ " 'mw_salt': (value, units)} \n"
+ " }, \n"
+ "}\n\n"
)
mw_adds = {}
mw_salts = {}
molar_rat = {}
for j in self.config.chemical_additives:
if type(self.config.chemical_additives[j]) != dict:
raise ConfigurationError(
"\n Did not provide a 'dict' for chemical \n" + common_msg
)
if "parameter_data" not in self.config.chemical_additives[j]:
raise ConfigurationError(
"\n Did not provide a 'parameter_data' for chemical \n" + common_msg
)
if "mw_additive" not in self.config.chemical_additives[j]["parameter_data"]:
raise ConfigurationError(
"\n Did not provide a 'mw_additive' for chemical \n" + common_msg
)
if (
"moles_salt_per_mole_additive"
not in self.config.chemical_additives[j]["parameter_data"]
):
raise ConfigurationError(
"\n Did not provide a 'moles_salt_per_mole_additive' for chemical \n"
+ common_msg
)
if "mw_salt" not in self.config.chemical_additives[j]["parameter_data"]:
raise ConfigurationError(
"\n Did not provide a 'mw_salt' for chemical \n" + common_msg
)
if (
type(self.config.chemical_additives[j]["parameter_data"]["mw_additive"])
!= tuple
):
raise ConfigurationError(
"\n Did not provide a tuple for 'mw_additive' \n" + common_msg
)
if (
type(self.config.chemical_additives[j]["parameter_data"]["mw_salt"])
!= tuple
):
raise ConfigurationError(
"\n Did not provide a tuple for 'mw_salt' \n" + common_msg
)
if not isinstance(
self.config.chemical_additives[j]["parameter_data"][
"moles_salt_per_mole_additive"
],
(int, float),
):
raise ConfigurationError(
"\n Did not provide a number for 'moles_salt_per_mole_additive' \n"
+ common_msg
)
# Populate temp dicts for parameter and variable setting
mw_adds[j] = pyunits.convert_value(
self.config.chemical_additives[j]["parameter_data"]["mw_additive"][0],
from_units=self.config.chemical_additives[j]["parameter_data"][
"mw_additive"
][1],
to_units=pyunits.kg / pyunits.mol,
)
mw_salts[j] = pyunits.convert_value(
self.config.chemical_additives[j]["parameter_data"]["mw_salt"][0],
from_units=self.config.chemical_additives[j]["parameter_data"][
"mw_salt"
][1],
to_units=pyunits.kg / pyunits.mol,
)
molar_rat[j] = self.config.chemical_additives[j]["parameter_data"][
"moles_salt_per_mole_additive"
]
# Add unit variables
# Linear relationship between TSS (mg/L) and Turbidity (NTU)
# TSS (mg/L) = Turbidity (NTU) * slope + intercept
# Default values come from the following paper:
# H. Rugner, M. Schwientek,B. Beckingham, B. Kuch, P. Grathwohl,
# Environ. Earth Sci. 69 (2013) 373-380. DOI: 10.1007/s12665-013-2307-1
self.slope = Var(
self.flowsheet().config.time,
initialize=1.86,
bounds=(1e-8, 10),
domain=NonNegativeReals,
units=pyunits.mg / pyunits.L,
doc="Slope relation between TSS (mg/L) and Turbidity (NTU)",
)
self.intercept = Var(
self.flowsheet().config.time,
initialize=0,
bounds=(0, 10),
domain=NonNegativeReals,
units=pyunits.mg / pyunits.L,
doc="Intercept relation between TSS (mg/L) and Turbidity (NTU)",
)
self.initial_turbidity_ntu = Var(
self.flowsheet().config.time,
initialize=50,
bounds=(0, 10000),
domain=NonNegativeReals,
units=pyunits.dimensionless,
doc="Initial measured Turbidity (NTU) from Jar Test",
)
self.final_turbidity_ntu = Var(
self.flowsheet().config.time,
initialize=1,
bounds=(0, 10000),
domain=NonNegativeReals,
units=pyunits.dimensionless,
doc="Final measured Turbidity (NTU) from Jar Test",
)
self.chemical_doses = Var(
self.flowsheet().config.time,
self.config.chemical_additives.keys(),
initialize=0,
bounds=(0, 100),
domain=NonNegativeReals,
units=pyunits.mg / pyunits.L,
doc="Dosages of the set of chemical additives",
)
self.chemical_mw = Param(
self.config.chemical_additives.keys(),
mutable=True,
initialize=mw_adds,
domain=NonNegativeReals,
units=pyunits.kg / pyunits.mol,
doc="Molecular weights of the set of chemical additives",
)
self.salt_mw = Param(
self.config.chemical_additives.keys(),
mutable=True,
initialize=mw_salts,
domain=NonNegativeReals,
units=pyunits.kg / pyunits.mol,
doc="Molecular weights of the produced salts from chemical additives",
)
self.salt_from_additive_mole_ratio = Param(
self.config.chemical_additives.keys(),
mutable=True,
initialize=molar_rat,
domain=NonNegativeReals,
units=pyunits.mol / pyunits.mol,
doc="Moles of the produced salts from 1 mole of chemical additives",
)
# Below set of Vars are for the power usage of the unit
# User's will need to provide scaling factors for these
# -----------------------------------------------------------
# Mines, R.O., Environmental engineering: Principles
# and Practice, 1st Ed, John Wiley & Sons, 2014.
# Ch. 6.
self.rapid_mixing_retention_time = Var(
self.flowsheet().config.time,
initialize=30,
bounds=(0.1, 10000),
domain=NonNegativeReals,
units=pyunits.s,
doc="Hydraulic retention time of each rapid mixing basin in seconds",
)
self.num_rapid_mixing_basins = Var(
initialize=1,
bounds=(1, 10),
domain=PositiveIntegers,
units=pyunits.dimensionless,
doc="Number of rapid mixing basins in series",
)
self.rapid_mixing_vel_grad = Var(
self.flowsheet().config.time,
initialize=250,
bounds=(0.1, 10000),
domain=NonNegativeReals,
units=pyunits.s**-1,
doc="Velocity gradient in each rapid mixing basin in (m/s)/m",
)
# NOTE: There are 2 modes for flocculation mixing discussed in literature
# Here we are only intially defining the 'Paddle-Wheel' mode. Other
# modes can be added later (if needed). The 'Paddle-Wheel' configuration
# is the most common used for conventional water treatment.
self.floc_retention_time = Var(
self.flowsheet().config.time,
initialize=1800,
bounds=(10, 10000),
domain=NonNegativeReals,
units=pyunits.s,
doc="Hydraulic retention time of the flocculation mixing basin in seconds",
)
self.single_paddle_length = Var(
initialize=2,
bounds=(0.1, 100),
domain=NonNegativeReals,
units=pyunits.m,
doc="Length of a single paddle blade (from center of rotation to the edge) in meters",
)
self.single_paddle_width = Var(
initialize=0.5,
bounds=(0.01, 100),
domain=NonNegativeReals,
units=pyunits.m,
doc="Width of a single paddle blade in meters",
)
self.paddle_rotational_speed = Var(
self.flowsheet().config.time,
initialize=100,
bounds=(0.01, 10000),
domain=NonNegativeReals,
units=pyunits.s**-1,
doc="Rotational speed of the paddles in revolutions per second",
)
self.paddle_drag_coef = Var(
self.flowsheet().config.time,
initialize=1.5,
bounds=(0.1, 10),
domain=NonNegativeReals,
units=pyunits.dimensionless,
doc="Drag coefficient for the paddles in the flocculation basin",
)
self.vel_fraction = Var(
initialize=0.7,
bounds=(0.6, 0.9),
domain=NonNegativeReals,
units=pyunits.dimensionless,
doc="Fraction of actual paddle velocity relative to local water velocity",
)
self.num_paddle_wheels = Var(
initialize=4,
bounds=(1, 10),
domain=PositiveIntegers,
units=pyunits.dimensionless,
doc="Number of rotating paddle wheels in the flocculation basin",
)
self.num_paddles_per_wheel = Var(
initialize=4,
bounds=(1, 10),
domain=PositiveIntegers,
units=pyunits.dimensionless,
doc="Number of paddles attached to each rotating wheel in the flocculation basin",
)
# Build control volume for feed side
self.control_volume = ControlVolume0DBlock(
default={
"dynamic": False,
"has_holdup": False,
"property_package": self.config.property_package,
"property_package_args": self.config.property_package_args,
}
)
self.control_volume.add_state_blocks(has_phase_equilibrium=False)
self.control_volume.add_material_balances(
balance_type=self.config.material_balance_type, has_mass_transfer=True
)
# NOTE: This checks for if an energy_balance_type is defined
if hasattr(self.config, "energy_balance_type"):
self.control_volume.add_energy_balances(
balance_type=self.config.energy_balance_type,
has_enthalpy_transfer=False,
)
self.control_volume.add_momentum_balances(
balance_type=self.config.momentum_balance_type, has_pressure_change=False
)
# Add ports
self.add_inlet_port(name="inlet", block=self.control_volume)
self.add_outlet_port(name="outlet", block=self.control_volume)
# Check _phase_component_set for required items
if ("Liq", "TDS") not in self.config.property_package._phase_component_set:
raise ConfigurationError(
"Coagulation-Flocculation model MUST contain ('Liq','TDS') as a component, but "
"the property package has only specified the following components {}".format(
[p for p in self.config.property_package._phase_component_set]
)
)
if ("Liq", "Sludge") not in self.config.property_package._phase_component_set:
raise ConfigurationError(
"Coagulation-Flocculation model MUST contain ('Liq','Sludge') as a component, but "
"the property package has only specified the following components {}".format(
[p for p in self.config.property_package._phase_component_set]
)
)
if ("Liq", "TSS") not in self.config.property_package._phase_component_set:
raise ConfigurationError(
"Coagulation-Flocculation model MUST contain ('Liq','TSS') as a component, but "
"the property package has only specified the following components {}".format(
[p for p in self.config.property_package._phase_component_set]
)
)
# -------- Add constraints ---------
# Adds isothermal constraint if no energy balance present
if not hasattr(self.config, "energy_balance_type"):
@self.Constraint(self.flowsheet().config.time, doc="Isothermal condition")
def eq_isothermal(self, t):
return (
self.control_volume.properties_out[t].temperature
== self.control_volume.properties_in[t].temperature
)
# Constraint for tss loss rate based on measured final turbidity
self.tss_loss_rate = Var(
self.flowsheet().config.time,
initialize=1,
bounds=(0, 100),
domain=NonNegativeReals,
units=units_meta("mass") * units_meta("time") ** -1,
doc="Mass per time loss rate of TSS based on the measured final turbidity",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the loss rate of TSS to be used in mass_transfer_term",
)
def eq_tss_loss_rate(self, t):
tss_out = pyunits.convert(
self.slope[t] * self.final_turbidity_ntu[t] + self.intercept[t],
to_units=units_meta("mass") * units_meta("length") ** -3,
)
input_rate = self.control_volume.properties_in[t].flow_mass_phase_comp[
"Liq", "TSS"
]
exit_rate = (
self.control_volume.properties_out[t].flow_vol_phase["Liq"] * tss_out
)
return self.tss_loss_rate[t] == input_rate - exit_rate
# Constraint for tds gain rate based on 'chemical_doses' and 'chemical_additives'
if self.config.chemical_additives:
self.tds_gain_rate = Var(
self.flowsheet().config.time,
initialize=0,
bounds=(0, 100),
domain=NonNegativeReals,
units=units_meta("mass") * units_meta("time") ** -1,
doc="Mass per time gain rate of TDS based on the chemicals added for coagulation",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the loss rate of TSS to be used in mass_transfer_term",
)
def eq_tds_gain_rate(self, t):
sum = 0
for j in self.config.chemical_additives.keys():
chem_dose = pyunits.convert(
self.chemical_doses[t, j],
to_units=units_meta("mass") * units_meta("length") ** -3,
)
chem_dose = (
chem_dose
/ self.chemical_mw[j]
* self.salt_from_additive_mole_ratio[j]
* self.salt_mw[j]
* self.control_volume.properties_out[t].flow_vol_phase["Liq"]
)
sum = sum + chem_dose
return self.tds_gain_rate[t] == sum
# Add constraints for mass transfer terms
@self.Constraint(
self.flowsheet().config.time,
self.config.property_package.phase_list,
self.config.property_package.component_list,
doc="Mass transfer term",
)
def eq_mass_transfer_term(self, t, p, j):
if (p, j) == ("Liq", "TSS"):
return (
self.control_volume.mass_transfer_term[t, p, j]
== -self.tss_loss_rate[t]
)
elif (p, j) == ("Liq", "Sludge"):
return (
self.control_volume.mass_transfer_term[t, p, j]
== self.tss_loss_rate[t]
)
elif (p, j) == ("Liq", "TDS"):
if self.config.chemical_additives:
return (
self.control_volume.mass_transfer_term[t, p, j]
== self.tds_gain_rate[t]
)
else:
return self.control_volume.mass_transfer_term[t, p, j] == 0.0
else:
return self.control_volume.mass_transfer_term[t, p, j] == 0.0
# Constraint for the volume of each rapid mixing basin in series
# Mines, R.O., Environmental engineering: Principles
# and Practice, 1st Ed, John Wiley & Sons, 2014.
# Ch. 6.
self.rapid_mixing_basin_vol = Var(
self.flowsheet().config.time,
initialize=1,
bounds=(0, 1000),
domain=NonNegativeReals,
units=units_meta("length") ** 3,
doc="Volume of each rapid mixing basin in the series",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the volume of each rapid mixing basin",
)
def eq_rapid_mixing_basin_vol(self, t):
flow_rate = pyunits.convert(
self.control_volume.properties_in[t].flow_vol_phase["Liq"],
to_units=units_meta("length") ** 3 / pyunits.s,
)
return (
self.rapid_mixing_basin_vol[t]
== flow_rate * self.rapid_mixing_retention_time[t]
)
# Constraint for the power usage of the rapid mixers
# Mines, R.O., Environmental engineering: Principles
# and Practice, 1st Ed, John Wiley & Sons, 2014.
# Ch. 6.
self.rapid_mixing_power = Var(
self.flowsheet().config.time,
initialize=0.01,
bounds=(0, 100),
domain=NonNegativeReals,
units=pyunits.kW,
doc="Power usage of the rapid mixing basins in kW",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the power usage of the rapid mixing basins",
)
def eq_rapid_mixing_power(self, t):
vel_grad = pyunits.convert(
self.rapid_mixing_vel_grad[t], to_units=units_meta("time") ** -1
)
power_usage = pyunits.convert(
vel_grad**2
* self.control_volume.properties_out[t].visc_d["Liq"]
* self.rapid_mixing_basin_vol[t]
* self.num_rapid_mixing_basins,
to_units=pyunits.kW,
)
return self.rapid_mixing_power[t] == power_usage
# Constraint for the volume of the flocculation basin
# Mines, R.O., Environmental engineering: Principles
# and Practice, 1st Ed, John Wiley & Sons, 2014.
# Ch. 6.
self.floc_basin_vol = Var(
self.flowsheet().config.time,
initialize=10,
bounds=(0, 10000),
domain=NonNegativeReals,
units=units_meta("length") ** 3,
doc="Volume of the flocculation basin",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the volume of the flocculation basin",
)
def eq_floc_basin_vol(self, t):
flow_rate = pyunits.convert(
self.control_volume.properties_in[t].flow_vol_phase["Liq"],
to_units=units_meta("length") ** 3 / pyunits.s,
)
return self.floc_basin_vol[t] == flow_rate * self.floc_retention_time[t]
# Constraint for the velocity of the paddle wheels
# Mines, R.O., Environmental engineering: Principles
# and Practice, 1st Ed, John Wiley & Sons, 2014.
# Ch. 6.
self.floc_wheel_speed = Var(
self.flowsheet().config.time,
initialize=1,
bounds=(0, 100),
domain=NonNegativeReals,
units=units_meta("length") * units_meta("time") ** -1,
doc="Velocity of the wheels in the flocculation basin",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the velocity of the wheels in the flocculation basin",
)
def eq_floc_wheel_speed(self, t):
wheel_rate = pyunits.convert(
Constants.pi
* self.single_paddle_length
* self.paddle_rotational_speed[t],
to_units=units_meta("length") * units_meta("time") ** -1,
)
return self.floc_wheel_speed[t] == wheel_rate
# Constraint for the power usage of the flocculation mixer
# Mines, R.O., Environmental engineering: Principles
# and Practice, 1st Ed, John Wiley & Sons, 2014.
# Ch. 6.
self.flocculation_power = Var(
self.flowsheet().config.time,
initialize=0.5,
bounds=(0, 100),
domain=NonNegativeReals,
units=pyunits.kW,
doc="Power usage of the flocculation basin in kW",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the power usage of the flocculation basin",
)
def eq_flocculation_power(self, t):
total_area = pyunits.convert(
self.single_paddle_width
* self.single_paddle_length
* self.num_paddle_wheels
* self.num_paddles_per_wheel,
to_units=units_meta("length") ** 2,
)
power_usage = pyunits.convert(
0.5
* self.paddle_drag_coef[t]
* total_area
* self.control_volume.properties_out[t].dens_mass_phase["Liq"]
* self.vel_fraction**3
* self.floc_wheel_speed[t] ** 3,
to_units=pyunits.kW,
)
return self.flocculation_power[t] == power_usage
self.total_power = Var(
self.flowsheet().config.time,
initialize=0.5,
bounds=(0, 100),
domain=NonNegativeReals,
units=pyunits.kW,
doc="Power usage of the full unit model in kW",
)
@self.Constraint(
self.flowsheet().config.time,
doc="Constraint for the power usage of the full unit model",
)
def eq_total_power(self, t):
return (
self.total_power[t]
== self.flocculation_power[t] + self.rapid_mixing_power[t]
)
# Return a scalar expression for the inlet concentration of TSS
def compute_inlet_tss_mass_concentration(self, t):
"""
Function to generate an expression that would represent the mass
concentration of TSS at the inlet port of the unit. Inlet ports
are generally established upstream, but this will be useful for
establishing the inlet TSS when an upstream TSS is unknown. This
level of inlet TSS is based off of measurements made of Turbidity
during the Jar Test.
Keyword Arguments:
self : this unit model object
t : time index on the flowsheet
Returns: Expression
Recover the numeric value by using 'value(Expression)'
"""
units_meta = self.config.property_package.get_metadata().get_derived_units
return pyunits.convert(
self.slope[t] * self.initial_turbidity_ntu[t] + self.intercept[t],
to_units=units_meta("mass") * units_meta("length") ** -3,
)
# Return a scale expression for the inlet mass flow rate of TSS
def compute_inlet_tss_mass_flow(self, t):
"""
Function to generate an expression that would represent the mass
flow rate of TSS at the inlet port of the unit. Inlet ports
are generally established upstream, but this will be useful for
establishing the inlet TSS when an upstream TSS is unknown. This
level of inlet TSS is based off of measurements made of Turbidity
during the Jar Test.
Keyword Arguments:
self : this unit model object
t : time index on the flowsheet
Returns: Expression
Recover the numeric value by using 'value(Expression)'
"""
return self.control_volume.properties_in[t].flow_vol_phase[
"Liq"
] * self.compute_inlet_tss_mass_concentration(t)
# Function to automate fixing of the Turbidity v TSS relation params to defaults
def fix_tss_turbidity_relation_defaults(self):
self.slope.fix()
self.intercept.fix()
# initialize method
def initialize_build(
blk, state_args=None, outlvl=idaeslog.NOTSET, solver=None, optarg=None
):
"""
General wrapper for pressure changer initialization routines
Keyword Arguments:
state_args : a dict of arguments to be passed to the property
package(s) to provide an initial state for
initialization (see documentation of the specific
property package) (default = {}).
outlvl : sets output level of initialization routine
optarg : solver options dictionary object (default=None)
solver : str indicating which solver to use during
initialization (default = None)
Returns: None
"""
init_log = idaeslog.getInitLogger(blk.name, outlvl, tag="unit")
solve_log = idaeslog.getSolveLogger(blk.name, outlvl, tag="unit")
# Set solver options
opt = get_solver(solver, optarg)
# ---------------------------------------------------------------------
# Initialize holdup block
flags = blk.control_volume.initialize(
outlvl=outlvl,
optarg=optarg,
solver=solver,
state_args=state_args,
)
init_log.info_high("Initialization Step 1 Complete.")
# ---------------------------------------------------------------------
# ---------------------------------------------------------------------
# Solve unit
with idaeslog.solver_log(solve_log, idaeslog.DEBUG) as slc:
res = opt.solve(blk, tee=slc.tee)
init_log.info_high("Initialization Step 2 {}.".format(idaeslog.condition(res)))
# ---------------------------------------------------------------------
# Release Inlet state
blk.control_volume.release_state(flags, outlvl + 1)
init_log.info("Initialization Complete: {}".format(idaeslog.condition(res)))
def calculate_scaling_factors(self):
super().calculate_scaling_factors()
units_meta = self.config.property_package.get_metadata().get_derived_units
# scaling factors for turbidity relationship
# Supressing warning (these factors are not very important)
if iscale.get_scaling_factor(self.slope) is None:
sf = iscale.get_scaling_factor(self.slope, default=1, warning=False)
iscale.set_scaling_factor(self.slope, sf)
if iscale.get_scaling_factor(self.intercept) is None:
sf = iscale.get_scaling_factor(self.intercept, default=1, warning=False)
iscale.set_scaling_factor(self.intercept, sf)
# scaling factors for turbidity measurements and chemical doses
# Supressing warning
if iscale.get_scaling_factor(self.initial_turbidity_ntu) is None:
sf = iscale.get_scaling_factor(
self.initial_turbidity_ntu, default=1, warning=False
)
iscale.set_scaling_factor(self.initial_turbidity_ntu, sf)
if iscale.get_scaling_factor(self.final_turbidity_ntu) is None:
sf = iscale.get_scaling_factor(
self.final_turbidity_ntu, default=1, warning=False
)
iscale.set_scaling_factor(self.final_turbidity_ntu, sf)
if iscale.get_scaling_factor(self.chemical_doses) is None:
sf = iscale.get_scaling_factor(
self.chemical_doses, default=1, warning=False
)
iscale.set_scaling_factor(self.chemical_doses, sf)
# scaling factors for power usage in rapid mixing
# Display warning
if iscale.get_scaling_factor(self.rapid_mixing_retention_time) is None:
sf = iscale.get_scaling_factor(
self.rapid_mixing_retention_time, default=1e-1, warning=True
)
iscale.set_scaling_factor(self.rapid_mixing_retention_time, sf)
if iscale.get_scaling_factor(self.num_rapid_mixing_basins) is None:
sf = iscale.get_scaling_factor(
self.num_rapid_mixing_basins, default=1, warning=True
)
iscale.set_scaling_factor(self.num_rapid_mixing_basins, sf)
if iscale.get_scaling_factor(self.rapid_mixing_vel_grad) is None:
sf = iscale.get_scaling_factor(
self.rapid_mixing_vel_grad, default=1e-2, warning=True
)
iscale.set_scaling_factor(self.rapid_mixing_vel_grad, sf)
if iscale.get_scaling_factor(self.floc_retention_time) is None:
sf = iscale.get_scaling_factor(
self.floc_retention_time, default=1e-3, warning=True
)
iscale.set_scaling_factor(self.floc_retention_time, sf)
if iscale.get_scaling_factor(self.single_paddle_length) is None:
sf = iscale.get_scaling_factor(
self.single_paddle_length, default=1, warning=True
)
iscale.set_scaling_factor(self.single_paddle_length, sf)
if iscale.get_scaling_factor(self.single_paddle_width) is None:
sf = iscale.get_scaling_factor(
self.single_paddle_width, default=1, warning=True
)
iscale.set_scaling_factor(self.single_paddle_width, sf)
if iscale.get_scaling_factor(self.paddle_rotational_speed) is None:
sf = iscale.get_scaling_factor(
self.paddle_rotational_speed, default=10, warning=True
)
iscale.set_scaling_factor(self.paddle_rotational_speed, sf)
if iscale.get_scaling_factor(self.paddle_drag_coef) is None:
sf = iscale.get_scaling_factor(
self.paddle_drag_coef, default=1, warning=True
)
iscale.set_scaling_factor(self.paddle_drag_coef, sf)
if iscale.get_scaling_factor(self.vel_fraction) is None:
sf = iscale.get_scaling_factor(self.vel_fraction, default=1, warning=True)
iscale.set_scaling_factor(self.vel_fraction, sf)
if iscale.get_scaling_factor(self.num_paddle_wheels) is None:
sf = iscale.get_scaling_factor(
self.num_paddle_wheels, default=1, warning=True
)
iscale.set_scaling_factor(self.num_paddle_wheels, sf)
if iscale.get_scaling_factor(self.num_paddles_per_wheel) is None:
sf = iscale.get_scaling_factor(
self.num_paddles_per_wheel, default=1, warning=True
)
iscale.set_scaling_factor(self.num_paddles_per_wheel, sf)
# set scaling for tss_loss_rate
if iscale.get_scaling_factor(self.tss_loss_rate) is None:
sf = 0
for t in self.control_volume.properties_in:
sf += value(
self.control_volume.properties_in[t].flow_mass_phase_comp[
"Liq", "TSS"
]
)
sf = sf / len(self.control_volume.properties_in)
if sf < 0.01:
sf = 0.01
iscale.set_scaling_factor(self.tss_loss_rate, 1 / sf)
for ind, c in self.eq_tss_loss_rate.items():
iscale.constraint_scaling_transform(c, 1 / sf)
# set scaling for tds_gain_rate
if self.config.chemical_additives:
if iscale.get_scaling_factor(self.tds_gain_rate) is None:
sf = 0
for t in self.control_volume.properties_in:
sum = 0
for j in self.config.chemical_additives.keys():
chem_dose = pyunits.convert(
self.chemical_doses[t, j],
to_units=units_meta("mass") * units_meta("length") ** -3,
)
chem_dose = (
chem_dose
/ self.chemical_mw[j]
* self.salt_from_additive_mole_ratio[j]
* self.salt_mw[j]
* self.control_volume.properties_in[t].flow_vol_phase["Liq"]
)
sum = sum + chem_dose
sf += value(sum)
sf = sf / len(self.control_volume.properties_in)
if sf < 0.001:
sf = 0.001
iscale.set_scaling_factor(self.tds_gain_rate, 1 / sf)
for ind, c in self.eq_tds_gain_rate.items():
iscale.constraint_scaling_transform(c, 1 / sf)
# set scaling for mass transfer terms
for ind, c in self.eq_mass_transfer_term.items():
if ind[2] == "TDS":
if self.config.chemical_additives:
sf = iscale.get_scaling_factor(self.tds_gain_rate)
else:
sf = 1
elif ind[2] == "TSS":
sf = iscale.get_scaling_factor(self.tss_loss_rate)
elif ind[2] == "Sludge":
sf = iscale.get_scaling_factor(self.tss_loss_rate)
else:
sf = 1
iscale.constraint_scaling_transform(c, sf)
iscale.set_scaling_factor(self.control_volume.mass_transfer_term[ind], sf)
# set scaling for rapid_mixing_basin_vol
if iscale.get_scaling_factor(self.rapid_mixing_basin_vol[t]) is None:
sf1 = 0
sf2 = 0
for t in self.control_volume.properties_out:
sf1 += iscale.get_scaling_factor(
self.control_volume.properties_out[t].flow_vol_phase["Liq"]
)
sf2 = iscale.get_scaling_factor(self.rapid_mixing_retention_time)
sf1 = sf1 / len(self.control_volume.properties_in)
sf = sf1 * sf2
iscale.set_scaling_factor(self.rapid_mixing_basin_vol, sf)
for ind, c in self.eq_rapid_mixing_basin_vol.items():
iscale.constraint_scaling_transform(c, sf)
# set scaling for rapid_mixing_power
if iscale.get_scaling_factor(self.rapid_mixing_power[t]) is None:
sf1 = 0
sf2 = 0
for t in self.control_volume.properties_out:
sf1 += iscale.get_scaling_factor(
self.control_volume.properties_out[t].visc_d["Liq"]
)
sf2 = iscale.get_scaling_factor(self.rapid_mixing_vel_grad)
sf3 = iscale.get_scaling_factor(self.rapid_mixing_basin_vol)
sf4 = iscale.get_scaling_factor(self.num_rapid_mixing_basins)
sf1 = sf1 / len(self.control_volume.properties_in)
sf = sf1 * sf2**2 * sf3 * sf4
iscale.set_scaling_factor(self.rapid_mixing_power, sf)
for ind, c in self.eq_rapid_mixing_power.items():
iscale.constraint_scaling_transform(c, sf)
# set scaling for floc_basin_vol
if iscale.get_scaling_factor(self.floc_basin_vol[t]) is None:
sf1 = 0
sf2 = 0
for t in self.control_volume.properties_out:
sf1 += iscale.get_scaling_factor(
self.control_volume.properties_out[t].flow_vol_phase["Liq"]
)
sf2 = iscale.get_scaling_factor(self.floc_retention_time)
sf1 = sf1 / len(self.control_volume.properties_in)
sf = sf1 * sf2
iscale.set_scaling_factor(self.floc_basin_vol, sf)
for ind, c in self.eq_floc_basin_vol.items():
iscale.constraint_scaling_transform(c, sf)
# set scaling for floc_wheel_speed
if iscale.get_scaling_factor(self.floc_wheel_speed[t]) is None:
sf1 = iscale.get_scaling_factor(self.paddle_rotational_speed)
sf2 = iscale.get_scaling_factor(self.single_paddle_length)
sf = sf1 * sf2 * Constants.pi / 10
iscale.set_scaling_factor(self.floc_wheel_speed, sf)
for ind, c in self.eq_floc_wheel_speed.items():
iscale.constraint_scaling_transform(c, sf)
# set scaling for flocculation_power
if iscale.get_scaling_factor(self.flocculation_power[t]) is None:
sf1 = iscale.get_scaling_factor(self.floc_wheel_speed)
sf2 = iscale.get_scaling_factor(self.vel_fraction)
sf3 = 0
for t in self.control_volume.properties_out:
sf3 += iscale.get_scaling_factor(
self.control_volume.properties_out[t].dens_mass_phase["Liq"]
)
sf3 = sf3 / len(self.control_volume.properties_in)
sf4 = iscale.get_scaling_factor(self.single_paddle_length)
sf5 = iscale.get_scaling_factor(self.single_paddle_width)
sf6 = iscale.get_scaling_factor(self.num_paddle_wheels)
sf7 = iscale.get_scaling_factor(self.num_paddles_per_wheel)
sf8 = iscale.get_scaling_factor(self.paddle_drag_coef)
sf = 0.5 * sf8 * (sf4 * sf5 * sf6 * sf7) * sf3 * sf2**3 * sf1**3 * 500
iscale.set_scaling_factor(self.flocculation_power, sf)
for ind, c in self.eq_flocculation_power.items():
iscale.constraint_scaling_transform(c, sf)
# set scaling for total_power
if iscale.get_scaling_factor(self.total_power[t]) is None:
sf1 = iscale.get_scaling_factor(self.flocculation_power)
sf2 = iscale.get_scaling_factor(self.rapid_mixing_power)
sf = (sf1 + sf2) / 2
iscale.set_scaling_factor(self.total_power, sf)
for ind, c in self.eq_total_power.items():
iscale.constraint_scaling_transform(c, sf)
# set scaling factors for control_volume.properties_in based on control_volume.properties_out
for t in self.control_volume.properties_in:
if (
iscale.get_scaling_factor(
self.control_volume.properties_in[t].dens_mass_phase
)
is None
):
sf = iscale.get_scaling_factor(
self.control_volume.properties_out[t].dens_mass_phase
)
iscale.set_scaling_factor(
self.control_volume.properties_in[t].dens_mass_phase, sf
)
if (
iscale.get_scaling_factor(
self.control_volume.properties_in[t].flow_mass_phase_comp
)
is None
):
for ind in self.control_volume.properties_in[t].flow_mass_phase_comp:
sf = iscale.get_scaling_factor(
self.control_volume.properties_out[t].flow_mass_phase_comp[ind]
)
iscale.set_scaling_factor(
self.control_volume.properties_in[t].flow_mass_phase_comp[ind],
sf,
)
if (
iscale.get_scaling_factor(
self.control_volume.properties_in[t].mass_frac_phase_comp
)
is None
):
for ind in self.control_volume.properties_in[t].mass_frac_phase_comp:
sf = iscale.get_scaling_factor(
self.control_volume.properties_out[t].mass_frac_phase_comp[ind]
)
iscale.set_scaling_factor(
self.control_volume.properties_in[t].mass_frac_phase_comp[ind],
sf,
)
if (
iscale.get_scaling_factor(
self.control_volume.properties_in[t].flow_vol_phase
)
is None
):
for ind in self.control_volume.properties_in[t].flow_vol_phase:
sf = iscale.get_scaling_factor(
self.control_volume.properties_out[t].flow_vol_phase[ind]
)
iscale.set_scaling_factor(
self.control_volume.properties_in[t].flow_vol_phase[ind], sf
)
# update scaling for control_volume.properties_out
for t in self.control_volume.properties_out:
if (
iscale.get_scaling_factor(
self.control_volume.properties_out[t].dens_mass_phase
)
is None
):
iscale.set_scaling_factor(
self.control_volume.properties_out[t].dens_mass_phase, 1e-3
)
if (
iscale.get_scaling_factor(self.control_volume.properties_out[t].visc_d)
is None
):
iscale.set_scaling_factor(
self.control_volume.properties_out[t].visc_d, 1e3
)
# need to update scaling factors for TSS, Sludge, and TDS to account for the
# expected change in their respective values from the loss/gain rates
for ind in self.control_volume.properties_out[t].flow_mass_phase_comp:
if ind[1] == "TSS":
sf_og = iscale.get_scaling_factor(
self.control_volume.properties_out[t].flow_mass_phase_comp[ind]
)
sf_new = iscale.get_scaling_factor(self.tss_loss_rate)
iscale.set_scaling_factor(
self.control_volume.properties_out[t].flow_mass_phase_comp[ind],
100 * sf_new * (sf_new / sf_og),
)
if ind[1] == "Sludge":
sf_og = iscale.get_scaling_factor(
self.control_volume.properties_out[t].flow_mass_phase_comp[ind]
)
sf_new = iscale.get_scaling_factor(self.tss_loss_rate)
iscale.set_scaling_factor(
self.control_volume.properties_out[t].flow_mass_phase_comp[ind],
100 * sf_new * (sf_new / sf_og),
)
for ind in self.control_volume.properties_out[t].mass_frac_phase_comp:
if ind[1] == "TSS":
sf_og = iscale.get_scaling_factor(
self.control_volume.properties_out[t].mass_frac_phase_comp[ind]
)
sf_new = iscale.get_scaling_factor(self.tss_loss_rate)
iscale.set_scaling_factor(
self.control_volume.properties_out[t].mass_frac_phase_comp[ind],
100 * sf_new * (sf_new / sf_og),
)
if ind[1] == "Sludge":
sf_og = iscale.get_scaling_factor(
self.control_volume.properties_out[t].mass_frac_phase_comp[ind]
)
sf_new = iscale.get_scaling_factor(self.tss_loss_rate)
iscale.set_scaling_factor(
self.control_volume.properties_out[t].mass_frac_phase_comp[ind],
100 * sf_new * (sf_new / sf_og),
)
def _get_performance_contents(self, time_point=0):
t = time_point
return {
"vars": {
"Total Power Usage (kW)": self.total_power[t],
"Rapid Mixing Power (kW)": self.rapid_mixing_power[t],
"Flocc Mixing Power (kW)": self.flocculation_power[t],
},
"exprs": {},
"params": {},
}
| 41.714847 | 101 | 0.57251 |
79559480fefe867288e8db9228d105afb8d3b43a | 4,372 | py | Python | isi_sdk_8_0/isi_sdk_8_0/models/directory_query_scope_conditions.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_8_0/isi_sdk_8_0/models/directory_query_scope_conditions.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_8_0/isi_sdk_8_0/models/directory_query_scope_conditions.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 3
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class DirectoryQueryScopeConditions(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'attr': 'str',
'operator': 'str',
'value': 'str'
}
attribute_map = {
'attr': 'attr',
'operator': 'operator',
'value': 'value'
}
def __init__(self, attr=None, operator=None, value=None): # noqa: E501
"""DirectoryQueryScopeConditions - a model defined in Swagger""" # noqa: E501
self._attr = None
self._operator = None
self._value = None
self.discriminator = None
if attr is not None:
self.attr = attr
if operator is not None:
self.operator = operator
if value is not None:
self.value = value
@property
def attr(self):
"""Gets the attr of this DirectoryQueryScopeConditions. # noqa: E501
:return: The attr of this DirectoryQueryScopeConditions. # noqa: E501
:rtype: str
"""
return self._attr
@attr.setter
def attr(self, attr):
"""Sets the attr of this DirectoryQueryScopeConditions.
:param attr: The attr of this DirectoryQueryScopeConditions. # noqa: E501
:type: str
"""
self._attr = attr
@property
def operator(self):
"""Gets the operator of this DirectoryQueryScopeConditions. # noqa: E501
:return: The operator of this DirectoryQueryScopeConditions. # noqa: E501
:rtype: str
"""
return self._operator
@operator.setter
def operator(self, operator):
"""Sets the operator of this DirectoryQueryScopeConditions.
:param operator: The operator of this DirectoryQueryScopeConditions. # noqa: E501
:type: str
"""
self._operator = operator
@property
def value(self):
"""Gets the value of this DirectoryQueryScopeConditions. # noqa: E501
:return: The value of this DirectoryQueryScopeConditions. # noqa: E501
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this DirectoryQueryScopeConditions.
:param value: The value of this DirectoryQueryScopeConditions. # noqa: E501
:type: str
"""
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DirectoryQueryScopeConditions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.49697 | 90 | 0.572278 |
795594baedd472d37effa7c22bbb225fe1310ad0 | 2,157 | py | Python | setup.py | tjguk/winsys | 346a5bc0ad74f3f70b1099e714b544a6f008b72c | [
"MIT"
] | 51 | 2015-01-14T09:23:48.000Z | 2021-11-08T12:53:54.000Z | setup.py | tjguk/winsys | 346a5bc0ad74f3f70b1099e714b544a6f008b72c | [
"MIT"
] | 2 | 2019-05-30T12:34:40.000Z | 2020-06-13T20:00:55.000Z | setup.py | tjguk/winsys | 346a5bc0ad74f3f70b1099e714b544a6f008b72c | [
"MIT"
] | 15 | 2015-04-21T19:48:48.000Z | 2020-07-29T19:30:43.000Z | import os
import re
from setuptools import setup
#
# setup.py framework shamelessly stolen from the
# Mu editor setup
#
base_dir = os.path.dirname(__file__)
DUNDER_ASSIGN_RE = re.compile(r"""^__\w+__\s*=\s*['"].+['"]$""")
about = {}
with open(os.path.join(base_dir, "winsys", "__init__.py"), encoding="utf8") as f:
for line in f:
if DUNDER_ASSIGN_RE.search(line):
exec(line, about)
TO_STRIP = set([":class:", ":mod:", ":meth:", ":func:"])
with open(os.path.join(base_dir, "README.rst"), encoding="utf8") as f:
readme = f.read()
for s in TO_STRIP:
readme = readme.replace(s, "")
#~ with open(os.path.join(base_dir, "CHANGES.rst"), encoding="utf8") as f:
#~ changes = f.read()
changes = ""
install_requires = [
"pywin32"
]
extras_require = {
"tests": [
"pytest",
],
"docs": ["sphinx"],
"package": [
# Wheel building and PyPI uploading
"wheel",
"twine",
],
}
extras_require["dev"] = (
extras_require["tests"]
+ extras_require["docs"]
+ extras_require["package"]
)
extras_require["all"] = list(
{req for extra, reqs in extras_require.items() for req in reqs}
)
setup(
name=about["__title__"],
version=about["__version__"],
description=about["__description__"],
long_description="{}\n\n{}".format(readme, changes),
long_description_content_type = "text/x-rst",
author=about["__author__"],
author_email=about["__email__"],
url=about["__url__"],
license=about["__license__"],
packages = ['winsys', 'winsys._security', 'winsys.extras'],
install_requires=install_requires,
extras_require=extras_require,
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows :: Windows NT/2000',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
platforms='win32'
)
| 27.653846 | 82 | 0.600834 |
795594bf5676e71e7151192381b3c93ed5d82432 | 7,748 | py | Python | src/decode_demo.py | bgshin/mxnet_cnn | 19ebc13f4990ee29612a479325cf13d3bd9723ec | [
"Apache-2.0"
] | null | null | null | src/decode_demo.py | bgshin/mxnet_cnn | 19ebc13f4990ee29612a479325cf13d3bd9723ec | [
"Apache-2.0"
] | null | null | null | src/decode_demo.py | bgshin/mxnet_cnn | 19ebc13f4990ee29612a479325cf13d3bd9723ec | [
"Apache-2.0"
] | null | null | null | from keras.layers import Conv1D, Average, Multiply
from keras.layers import Dense, AveragePooling1D, Input, Lambda
from keras.models import Model
from keras.preprocessing import sequence
from dataset import Timer
import os
from keras import backend as K
import gensim
import numpy as np
import pickle
class SentimentAnalysis():
def __init__(self, w2v_dim=400, maxlen=60, w2v_path='../data/w2v/w2v-400-semevaltrndev.gnsm'):
self.w2v_dim = w2v_dim
self.maxlen = maxlen
self.embedding, self.vocab = self.load_embedding(w2v_path)
self.load_model()
def load_embedding(self, w2v_path):
print('Loading w2v...')
emb_model = gensim.models.KeyedVectors.load(w2v_path, mmap='r')
print('creating w2v mat...')
word_index = emb_model.vocab
embedding_matrix = np.zeros((len(word_index) + 1, 400), dtype=np.float32)
for word, i in word_index.items():
embedding_vector = emb_model[word]
if embedding_vector is not None:
# words not found in embedding index will be all-zeros.
embedding_matrix[i.index] = embedding_vector
return embedding_matrix, emb_model.vocab
def load_model(self, modelpath = './model/newbests17-400-v2-3'):
filter_sizes = (1, 2, 3, 4, 5)
num_filters = 80
hidden_dims = 20
def prediction_model(model_input, model_path):
conv_blocks = []
for sz in filter_sizes:
conv = Conv1D(num_filters,
sz,
padding="valid",
activation="relu",
strides=1)(model_input)
print(conv)
conv = Lambda(lambda x: K.permute_dimensions(x, (0, 2, 1)))(conv)
conv = AveragePooling1D(pool_size=num_filters)(conv)
attention_size = self.maxlen - sz + 1
multiplied_vector_list = []
for i in range(attention_size):
selected_attention = Lambda(lambda x: x[:, 0, i] / float(sz))(conv)
for j in range(sz):
selected_token = Lambda(lambda x: x[:, i + j, :])(model_input)
multiplied_vector = Lambda(lambda x: Multiply()(x))([selected_token, selected_attention])
multiplied_vector_list.append(multiplied_vector)
attentioned_conv = Average()(multiplied_vector_list)
print(attentioned_conv)
conv_blocks.append(attentioned_conv)
z = Average()(conv_blocks)
z = Dense(hidden_dims, activation="relu")(z)
model_output = Dense(3, activation="softmax")(z)
model = Model(model_input, model_output)
model.load_weights(model_path)
model.compile(loss="sparse_categorical_crossentropy", optimizer="adam", metrics=["accuracy"])
return model
def attention_model(model_input):
att_list = []
for sz in filter_sizes:
conv = Conv1D(num_filters,
sz,
padding="valid",
activation="relu",
strides=1)(model_input)
conv = Lambda(lambda x: K.permute_dimensions(x, (0, 2, 1)))(conv)
att = AveragePooling1D(pool_size=num_filters)(conv)
att_list.append(att)
model = Model(model_input, att_list)
model.compile(loss="sparse_categorical_crossentropy", optimizer="adam", metrics=["accuracy"])
return model
input_shape = (self.maxlen, self.w2v_dim)
model_input = Input(shape=input_shape)
self.p_model = prediction_model(model_input, modelpath)
self.a_model = attention_model(model_input)
for i in range(len(self.a_model.layers)):
self.a_model.layers[i].set_weights(self.p_model.layers[i].get_weights())
def preprocess_x(self, sentences):
x_text = [line for line in sentences]
x = []
for s in x_text:
one_doc = []
for token in s.strip().split(" "):
try:
one_doc.append(self.vocab[token].index)
except:
one_doc.append(len(self.vocab))
x.append(one_doc)
x = np.array(x)
sentence_len_list = [len(sentence) for sentence in x]
x = sequence.pad_sequences(x, maxlen=self.maxlen)
x = self.embedding[x]
return x, sentence_len_list
def decode(self, sentences):
x, sentence_len_list = self.preprocess_x(sentences)
y = self.p_model.predict(x, batch_size=2000, verbose=0)
attention_matrix = self.a_model.predict(x, batch_size=2000, verbose=0)
all_att = []
for sample_index in range(len(sentence_len_list)):
one_sample_att = []
for gram_index in range(5):
norm_one_sample = attention_matrix[gram_index][sample_index][0] / max(
attention_matrix[gram_index][sample_index][0])
one_sample_att.append(norm_one_sample[-sentence_len_list[sample_index] + gram_index:])
all_att.append(one_sample_att)
return y, all_att
if __name__ == "__main__":
os.environ["CUDA_VISIBLE_DEVICES"] = '3'
sentence = "I feel a little bit tired today , but I am really happy !"
sentence_neg = "Although the rain stopped , I hate this thick cloud in the sky ."
sentences1 = [sentence for i in range(1)]
sentences10 = [sentence for i in range(10)]
sentences100 = [sentence for i in range(100)]
sentences1000 = [sentence for i in range(1000)]
sentences10000 = [sentence for i in range(10000)]
sentences100000 = [sentence for i in range(100000)]
sentence = "I feel a little bit tired today , but I am really happy !"
sentence_neg = "Although the rain stopped , I hate this thick cloud in the sky ."
x_text = [line.split('\t')[2] for line in open('../data/s17/tst', "r").readlines()][:20]
x_text.append(sentence)
x_text.append(sentence_neg)
with Timer("init..."):
sa = SentimentAnalysis()
sentences = np.array(x_text)[[0,2,6,7,9,11,12,13,14,18,20,21]]
for sent in sentences:
print(sent)
y, att = sa.decode(sentences)
with open('output.pkl', 'wb') as handle:
pickle.dump(y, handle)
pickle.dump(att, handle)
pickle.dump(sentences, handle)
exit()
for i in [1, 10, 100, 1000, 10000, 100000]:
varname = 'sentences%d' % i
with Timer("decode %s..." % varname):
y, att = sa.decode(eval(varname))
# [init...]
# Elapsed: 50.20814561843872
# 1/1 [==============================] - 1s
# 1/1 [==============================] - 0s
# [decode sentences1...]
# Elapsed: 2.263317346572876
# 10/10 [==============================] - 0s
# 10/10 [==============================] - 0s
# [decode sentences10...]
# Elapsed: 0.09254097938537598
# 100/100 [==============================] - 0s
# 100/100 [==============================] - 0s
# [decode sentences100...]
# Elapsed: 0.16536641120910645
# 1000/1000 [==============================] - 0s
# 1000/1000 [==============================] - 0s
# [decode sentences1000...]
# Elapsed: 0.2981994152069092
# 10000/10000 [==============================] - 1s
# 10000/10000 [==============================] - 0s
# [decode sentences10000...]
# Elapsed: 2.2783617973327637
# 100000/100000 [==============================] - 68s
# 100000/100000 [==============================] - 58s
# [decode sentences100000...]
# Elapsed: 146.7458312511444
| 36.205607 | 113 | 0.566856 |
795594fe43d130f78463ad6bcbc444f6a0add8ca | 4,494 | py | Python | django/contrib/gis/gdal/prototypes/ds.py | Yoann-Vie/esgi-hearthstone | 115d03426c7e8e80d89883b78ac72114c29bed12 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/contrib/gis/gdal/prototypes/ds.py | Yoann-Vie/esgi-hearthstone | 115d03426c7e8e80d89883b78ac72114c29bed12 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/contrib/gis/gdal/prototypes/ds.py | Yoann-Vie/esgi-hearthstone | 115d03426c7e8e80d89883b78ac72114c29bed12 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | """
This module houses the ctypes function prototypes for OGR DataSource
related data structures. OGR_Dr_*, OGR_DS_*, OGR_L_*, OGR_F_*,
OGR_Fld_* routines are relevant here.
"""
from ctypes import POINTER, c_char_p, c_double, c_int, c_long, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import GDAL_VERSION, lgdal
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output, double_output, geom_output, int64_output, int_output,
srs_output, void_output, voidptr_output,
)
c_int_p = POINTER(c_int) # shortcut type
# Driver Routines
register_all = void_output(lgdal.OGRRegisterAll, [], errcheck=False)
cleanup_all = void_output(lgdal.OGRCleanupAll, [], errcheck=False)
get_driver = voidptr_output(lgdal.OGRGetDriver, [c_int])
get_driver_by_name = voidptr_output(lgdal.OGRGetDriverByName, [c_char_p], errcheck=False)
get_driver_count = int_output(lgdal.OGRGetDriverCount, [])
get_driver_name = const_string_output(lgdal.OGR_Dr_GetName, [c_void_p], decoding='ascii')
# DataSource
open_ds = voidptr_output(lgdal.OGROpen, [c_char_p, c_int, POINTER(c_void_p)])
destroy_ds = void_output(lgdal.OGR_DS_Destroy, [c_void_p], errcheck=False)
release_ds = void_output(lgdal.OGRReleaseDataSource, [c_void_p])
get_ds_name = const_string_output(lgdal.OGR_DS_GetName, [c_void_p])
get_layer = voidptr_output(lgdal.OGR_DS_GetLayer, [c_void_p, c_int])
get_layer_by_name = voidptr_output(lgdal.OGR_DS_GetLayerByName, [c_void_p, c_char_p])
get_layer_count = int_output(lgdal.OGR_DS_GetLayerCount, [c_void_p])
# Layer Routines
get_extent = void_output(lgdal.OGR_L_GetExtent, [c_void_p, POINTER(OGREnvelope), c_int])
get_feature = voidptr_output(lgdal.OGR_L_GetFeature, [c_void_p, c_long])
get_feature_count = int_output(lgdal.OGR_L_GetFeatureCount, [c_void_p, c_int])
get_layer_defn = voidptr_output(lgdal.OGR_L_GetLayerDefn, [c_void_p])
get_layer_srs = srs_output(lgdal.OGR_L_GetSpatialRef, [c_void_p])
get_next_feature = voidptr_output(lgdal.OGR_L_GetNextFeature, [c_void_p])
reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False)
test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p])
get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p])
set_spatial_filter = void_output(lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False)
set_spatial_filter_rect = void_output(
lgdal.OGR_L_SetSpatialFilterRect,
[c_void_p, c_double, c_double, c_double, c_double], errcheck=False
)
# Feature Definition Routines
get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p])
get_fd_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_feat_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_field_count = int_output(lgdal.OGR_FD_GetFieldCount, [c_void_p])
get_field_defn = voidptr_output(lgdal.OGR_FD_GetFieldDefn, [c_void_p, c_int])
# Feature Routines
clone_feature = voidptr_output(lgdal.OGR_F_Clone, [c_void_p])
destroy_feature = void_output(lgdal.OGR_F_Destroy, [c_void_p], errcheck=False)
feature_equal = int_output(lgdal.OGR_F_Equal, [c_void_p, c_void_p])
get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p])
get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p])
get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int])
get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p])
get_field_as_datetime = int_output(
lgdal.OGR_F_GetFieldAsDateTime,
[c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p]
)
get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int])
get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int])
if GDAL_VERSION >= (2, 0):
get_field_as_integer64 = int64_output(lgdal.OGR_F_GetFieldAsInteger64, [c_void_p, c_int])
get_field_as_string = const_string_output(lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int])
get_field_index = int_output(lgdal.OGR_F_GetFieldIndex, [c_void_p, c_char_p])
# Field Routines
get_field_name = const_string_output(lgdal.OGR_Fld_GetNameRef, [c_void_p])
get_field_precision = int_output(lgdal.OGR_Fld_GetPrecision, [c_void_p])
get_field_type = int_output(lgdal.OGR_Fld_GetType, [c_void_p])
get_field_type_name = const_string_output(lgdal.OGR_GetFieldTypeName, [c_int])
get_field_width = int_output(lgdal.OGR_Fld_GetWidth, [c_void_p])
| 54.804878 | 101 | 0.804406 |
795596542f9f86f793e1c78b6e71b8f2d0ffb7ab | 13,171 | py | Python | sdk/python/pulumi_azure_nextgen/containerregistry/v20191201preview/get_registry.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/containerregistry/v20191201preview/get_registry.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/containerregistry/v20191201preview/get_registry.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetRegistryResult',
'AwaitableGetRegistryResult',
'get_registry',
]
@pulumi.output_type
class GetRegistryResult:
"""
An object that represents a container registry.
"""
def __init__(__self__, admin_user_enabled=None, creation_date=None, data_endpoint_enabled=None, data_endpoint_host_names=None, encryption=None, id=None, identity=None, location=None, login_server=None, name=None, network_rule_bypass_options=None, network_rule_set=None, policies=None, private_endpoint_connections=None, provisioning_state=None, public_network_access=None, sku=None, status=None, system_data=None, tags=None, type=None):
if admin_user_enabled and not isinstance(admin_user_enabled, bool):
raise TypeError("Expected argument 'admin_user_enabled' to be a bool")
pulumi.set(__self__, "admin_user_enabled", admin_user_enabled)
if creation_date and not isinstance(creation_date, str):
raise TypeError("Expected argument 'creation_date' to be a str")
pulumi.set(__self__, "creation_date", creation_date)
if data_endpoint_enabled and not isinstance(data_endpoint_enabled, bool):
raise TypeError("Expected argument 'data_endpoint_enabled' to be a bool")
pulumi.set(__self__, "data_endpoint_enabled", data_endpoint_enabled)
if data_endpoint_host_names and not isinstance(data_endpoint_host_names, list):
raise TypeError("Expected argument 'data_endpoint_host_names' to be a list")
pulumi.set(__self__, "data_endpoint_host_names", data_endpoint_host_names)
if encryption and not isinstance(encryption, dict):
raise TypeError("Expected argument 'encryption' to be a dict")
pulumi.set(__self__, "encryption", encryption)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if login_server and not isinstance(login_server, str):
raise TypeError("Expected argument 'login_server' to be a str")
pulumi.set(__self__, "login_server", login_server)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if network_rule_bypass_options and not isinstance(network_rule_bypass_options, str):
raise TypeError("Expected argument 'network_rule_bypass_options' to be a str")
pulumi.set(__self__, "network_rule_bypass_options", network_rule_bypass_options)
if network_rule_set and not isinstance(network_rule_set, dict):
raise TypeError("Expected argument 'network_rule_set' to be a dict")
pulumi.set(__self__, "network_rule_set", network_rule_set)
if policies and not isinstance(policies, dict):
raise TypeError("Expected argument 'policies' to be a dict")
pulumi.set(__self__, "policies", policies)
if private_endpoint_connections and not isinstance(private_endpoint_connections, list):
raise TypeError("Expected argument 'private_endpoint_connections' to be a list")
pulumi.set(__self__, "private_endpoint_connections", private_endpoint_connections)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_network_access and not isinstance(public_network_access, str):
raise TypeError("Expected argument 'public_network_access' to be a str")
pulumi.set(__self__, "public_network_access", public_network_access)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if status and not isinstance(status, dict):
raise TypeError("Expected argument 'status' to be a dict")
pulumi.set(__self__, "status", status)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="adminUserEnabled")
def admin_user_enabled(self) -> Optional[bool]:
"""
The value that indicates whether the admin user is enabled.
"""
return pulumi.get(self, "admin_user_enabled")
@property
@pulumi.getter(name="creationDate")
def creation_date(self) -> str:
"""
The creation date of the container registry in ISO8601 format.
"""
return pulumi.get(self, "creation_date")
@property
@pulumi.getter(name="dataEndpointEnabled")
def data_endpoint_enabled(self) -> Optional[bool]:
"""
Enable a single data endpoint per region for serving data.
"""
return pulumi.get(self, "data_endpoint_enabled")
@property
@pulumi.getter(name="dataEndpointHostNames")
def data_endpoint_host_names(self) -> Sequence[str]:
"""
List of host names that will serve data when dataEndpointEnabled is true.
"""
return pulumi.get(self, "data_endpoint_host_names")
@property
@pulumi.getter
def encryption(self) -> Optional['outputs.EncryptionPropertyResponse']:
"""
The encryption settings of container registry.
"""
return pulumi.get(self, "encryption")
@property
@pulumi.getter
def id(self) -> str:
"""
The resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.IdentityPropertiesResponse']:
"""
The identity of the container registry.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> str:
"""
The location of the resource. This cannot be changed after the resource is created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="loginServer")
def login_server(self) -> str:
"""
The URL that can be used to log into the container registry.
"""
return pulumi.get(self, "login_server")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkRuleBypassOptions")
def network_rule_bypass_options(self) -> Optional[str]:
"""
Whether to allow trusted Azure services to access a network restricted registry.
"""
return pulumi.get(self, "network_rule_bypass_options")
@property
@pulumi.getter(name="networkRuleSet")
def network_rule_set(self) -> Optional['outputs.NetworkRuleSetResponse']:
"""
The network rule set for a container registry.
"""
return pulumi.get(self, "network_rule_set")
@property
@pulumi.getter
def policies(self) -> Optional['outputs.PoliciesResponse']:
"""
The policies for a container registry.
"""
return pulumi.get(self, "policies")
@property
@pulumi.getter(name="privateEndpointConnections")
def private_endpoint_connections(self) -> Sequence['outputs.PrivateEndpointConnectionResponse']:
"""
List of private endpoint connections for a container registry.
"""
return pulumi.get(self, "private_endpoint_connections")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the container registry at the time the operation was called.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicNetworkAccess")
def public_network_access(self) -> Optional[str]:
"""
Whether or not public network access is allowed for the container registry.
"""
return pulumi.get(self, "public_network_access")
@property
@pulumi.getter
def sku(self) -> 'outputs.SkuResponse':
"""
The SKU of the container registry.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def status(self) -> 'outputs.StatusResponse':
"""
The status of the container registry at the time the operation was called.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Metadata pertaining to creation and last modification of the resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
The tags of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetRegistryResult(GetRegistryResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRegistryResult(
admin_user_enabled=self.admin_user_enabled,
creation_date=self.creation_date,
data_endpoint_enabled=self.data_endpoint_enabled,
data_endpoint_host_names=self.data_endpoint_host_names,
encryption=self.encryption,
id=self.id,
identity=self.identity,
location=self.location,
login_server=self.login_server,
name=self.name,
network_rule_bypass_options=self.network_rule_bypass_options,
network_rule_set=self.network_rule_set,
policies=self.policies,
private_endpoint_connections=self.private_endpoint_connections,
provisioning_state=self.provisioning_state,
public_network_access=self.public_network_access,
sku=self.sku,
status=self.status,
system_data=self.system_data,
tags=self.tags,
type=self.type)
def get_registry(registry_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRegistryResult:
"""
An object that represents a container registry.
:param str registry_name: The name of the container registry.
:param str resource_group_name: The name of the resource group to which the container registry belongs.
"""
__args__ = dict()
__args__['registryName'] = registry_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:containerregistry/v20191201preview:getRegistry', __args__, opts=opts, typ=GetRegistryResult).value
return AwaitableGetRegistryResult(
admin_user_enabled=__ret__.admin_user_enabled,
creation_date=__ret__.creation_date,
data_endpoint_enabled=__ret__.data_endpoint_enabled,
data_endpoint_host_names=__ret__.data_endpoint_host_names,
encryption=__ret__.encryption,
id=__ret__.id,
identity=__ret__.identity,
location=__ret__.location,
login_server=__ret__.login_server,
name=__ret__.name,
network_rule_bypass_options=__ret__.network_rule_bypass_options,
network_rule_set=__ret__.network_rule_set,
policies=__ret__.policies,
private_endpoint_connections=__ret__.private_endpoint_connections,
provisioning_state=__ret__.provisioning_state,
public_network_access=__ret__.public_network_access,
sku=__ret__.sku,
status=__ret__.status,
system_data=__ret__.system_data,
tags=__ret__.tags,
type=__ret__.type)
| 40.278287 | 440 | 0.669805 |
7955968560013a97be429647223cf519321a4ecf | 6,735 | py | Python | web_app/routes/strain_routes.py | andrewwhite5/Med-Cabinet-Solo-Project | 18604b096313cc388ad7e858a4948259819e49dd | [
"MIT"
] | null | null | null | web_app/routes/strain_routes.py | andrewwhite5/Med-Cabinet-Solo-Project | 18604b096313cc388ad7e858a4948259819e49dd | [
"MIT"
] | null | null | null | web_app/routes/strain_routes.py | andrewwhite5/Med-Cabinet-Solo-Project | 18604b096313cc388ad7e858a4948259819e49dd | [
"MIT"
] | null | null | null | #web_app/routes/strain_routes.py
from flask import Blueprint, request, render_template, flash, redirect, jsonify
import os
import pickle
import json
from sqlalchemy.sql.expression import func
from sqlalchemy import or_
from web_app.models import DB, Strain, extract_data, create_table, parse_records
from web_app.services.strains_service import API
strain_routes = Blueprint("strain_routes", __name__)
'''
Strain Recommendation
'''
# Load in dtm from nlp_dtm.pkl
with open('./stats_model/pickle_models/nlp_dtm.pkl', 'rb') as nlp_pkl_file:
dtm = pickle.load(nlp_pkl_file)
def give_recs(request):
"""
Creates a JSON object with top 5 recommended strains (numbers).
Parameters
----------
request : string
Description of strains user has used in the past, primarily focusing on
location of origin and physical appearance.
Returns
-------
strain_recs
A JSON object of recommended strains.
"""
# Fit on DTM with 5 nn
nn = NearestNeighbors(n_neighbors=5, algorithm='kd_tree')
nn.fit(dtm)
# Query data for similar descriptions
new = tfidf.transform(request)
new
# 5 most similar strain descriptions and their probabilities
probs, strain_nums = nn.kneighbors(new.todense())
# Convert np.ndarray to pd.Series then to JSON
strain_recs = pd.Series(strain_nums[0]).to_json()
return strain_recs
#CORS requirement to access apis
@strain_routes.before_request
def before_request():
""" CORS preflight, required for off-server access """
def _build_cors_prelight_response():
response = make_response()
response.headers.add("Access-Control-Allow-Origin", "*")
response.headers.add("Access-Control-Allow-Headers", "*")
response.headers.add("Access-Control-Allow-Methods", "*")
return response
if request.method == "OPTIONS":
return _build_cors_prelight_response()
@strain_routes.after_request
def after_request(response):
""" CORS headers, required for off-server access """
header = response.headers
header['Access-Control-Allow-Origin'] = '*'
return response
@strain_routes.route("/", methods=['GET', 'POST'])
def root():
"""Return all strains in pickle dict"""
PICKLE2_FILEPATH = os.path.join(os.path.dirname(__file__),"..", "stats_model", "pickle_models", "strains_num.pkl")
pickle2_dict = pickle.load(open(PICKLE2_FILEPATH, "rb"))
return jsonify(pickle2_dict)
@strain_routes.route("/<strain>", methods=['GET'])
def get_strain(strain):
""" Can query pickled strains dictionary to get information about a particular strain"""
PICKLE_FILEPATH = os.path.join(os.path.dirname(__file__),"..", "stats_model", "pickle_models", "strains.pkl")
pickle_dict = pickle.load(open(PICKLE_FILEPATH, "rb"))
return jsonify({"strain":pickle_dict[strain]})
@strain_routes.route('/data', methods=["GET", "POST"])
def data():
"""View all strains in database."""
DB.drop_all()
DB.create_all()
create_table(extract_data())
strain = Strain.query.all()
records = parse_records(strain)
return render_template("data.html", records=records, message="Home Page")
#More query routes
@strain_routes.route('/types/<race>', methods=['GET'])
def get_type(race):
"""User can query Strains DB based on type/race preference - sativa, indica, hybrid"""
records = Strain.query.filter_by(race=race).order_by(func.random()).limit(5).all()
types = []
for typ in records:
types.append({
"id":typ.id,
"name": typ.name,
"type": typ.race,
"medical":typ.medical,
"positive": typ.positive,
"flavor": typ.flavors,
"negative": typ.negative
})
return jsonify(types)
@strain_routes.route('/medical/<medical>', methods=['GET'])
def get_medical(medical):
"""User can query Strains DB based on medical symptoms/ailmens - depression, nausea, etc."""
records = Strain.query.filter(Strain.medical.ilike(f"%{medical}%", escape="/")).order_by(func.random()).limit(5).all()
medicals = []
for med in records:
medicals.append({
"id":med.id,
"name": med.name,
"type": med.race,
"medical":med.medical,
"positive": med.positive,
"flavor": med.flavors,
"negative": med.negative
})
return jsonify(medicals)
@strain_routes.route('/positive/<positive>', methods=['GET'])
def get_positve(positive):
"""User can query Strains DB based on positive effects they want to experience -euphoria, focus, etc."""
records = Strain.query.filter(Strain.positive.ilike(f"%{positive}%", escape="/")).order_by(func.random()).limit(5).all()
positives = []
for pos in records:
positives.append({
"id":pos.id,
"name": pos.name,
"type": pos.race,
"medical":pos.medical,
"positive": pos.positive,
"flavor": pos.flavors,
"negative": pos.negative
})
return jsonify(positives)
@strain_routes.route('/flavors/<flavors>', methods=['GET'])
def get_flavors(flavors):
"""User can query Strains DB based on flavor preferences."""
records = Strain.query.filter(Strain.flavors.ilike(f"%{flavors}%", escape="/")).order_by(func.random()).limit(5).all()
tastes = []
for tas in records:
tastes.append({
"id":tas.id,
"name": tas.name,
"type": tas.race,
"medical":tas.medical,
"positive": tas.positive,
"flavor": tas.flavors,
"negative": tas.negative
})
return jsonify(tastes)
@strain_routes.route('/query/<medical>/<medical1>/<positive>', methods=['GET'])
def get_match(medical, medical1, positive): #, medical1, positive1):
"""User can query Strains DB based on medical and pain."""
records = Strain.query.filter(or_(Strain.medical.ilike(f"%{medical}%", escape="/")),
(Strain.medical.ilike(f"%{medical1}%", escape="/")),
(Strain.positive.ilike(f"%{positive}%", escape="/"))).order_by(func.random()).limit(5).all()
matches = []
for match in records:
matches.append({
"id":match.id,
"name": match.name,
"type": match.race,
"medical":match.medical,
"positive": match.positive,
"flavor": match.flavors,
"negative": match.negative
})
return jsonify(matches)
@strain_routes.route('/query/<definition>', methods=['GET'])
def get_rec(definition):
give_recs(request=definition)
return strain_recs
| 32.379808 | 129 | 0.63415 |
795597438c90dc0c3b831dcaaadb29053f299b76 | 565 | py | Python | app/proxy_fix.py | department-of-veterans-affairs/notification-admin | afba831a1e5e407cd7939df991d7438786da8afd | [
"MIT"
] | 33 | 2016-01-11T20:16:17.000Z | 2021-11-23T12:50:29.000Z | app/proxy_fix.py | department-of-veterans-affairs/notification-admin | afba831a1e5e407cd7939df991d7438786da8afd | [
"MIT"
] | 1,249 | 2015-11-30T16:43:21.000Z | 2022-03-24T13:04:55.000Z | app/proxy_fix.py | department-of-veterans-affairs/notification-admin | afba831a1e5e407cd7939df991d7438786da8afd | [
"MIT"
] | 36 | 2015-12-02T09:49:26.000Z | 2021-04-10T18:05:41.000Z | from werkzeug.middleware.proxy_fix import ProxyFix
class CustomProxyFix(object):
def __init__(self, app, forwarded_proto):
self.app = ProxyFix(app, x_for=1, x_proto=1, x_host=1, x_port=0, x_prefix=0)
self.forwarded_proto = forwarded_proto
def __call__(self, environ, start_response):
environ.update({
"HTTP_X_FORWARDED_PROTO": self.forwarded_proto
})
return self.app(environ, start_response)
def init_app(app):
app.wsgi_app = CustomProxyFix(app.wsgi_app, app.config.get('HTTP_PROTOCOL', 'http'))
| 31.388889 | 88 | 0.699115 |
79559815f1aaf8bf71170b15853534c8bc663a4b | 338 | py | Python | as_du_transport/www/demenagement-services/index.py | yaacine/as_du_transport_app | d0b9a530240eff8916c39b39b9cb70f59b86b69a | [
"MIT"
] | null | null | null | as_du_transport/www/demenagement-services/index.py | yaacine/as_du_transport_app | d0b9a530240eff8916c39b39b9cb70f59b86b69a | [
"MIT"
] | null | null | null | as_du_transport/www/demenagement-services/index.py | yaacine/as_du_transport_app | d0b9a530240eff8916c39b39b9cb70f59b86b69a | [
"MIT"
] | null | null | null | import frappe
def get_context(context):
context.services= frappe.get_list("Service demenagement", fields=['name','service_title', 'short_description', 'full_description', 'service_full_image', 'service_short_image'] )
# print('#########################')
# print(context.services)
# print('#########################')
| 48.285714 | 181 | 0.60355 |
7955983450ae8ba274ba23644242075781ebacbc | 4,372 | py | Python | meshio/_xdmf/common.py | clbarnes/meshio | 21601539d073ea7cb7fe70cf7e2e818aa9c4353c | [
"MIT"
] | null | null | null | meshio/_xdmf/common.py | clbarnes/meshio | 21601539d073ea7cb7fe70cf7e2e818aa9c4353c | [
"MIT"
] | 5 | 2018-07-18T21:00:20.000Z | 2019-11-28T10:58:37.000Z | meshio/_xdmf/common.py | clbarnes/meshio | 21601539d073ea7cb7fe70cf7e2e818aa9c4353c | [
"MIT"
] | null | null | null | import numpy
numpy_to_xdmf_dtype = {
"int32": ("Int", "4"),
"int64": ("Int", "8"),
"uint32": ("UInt", "4"),
"uint64": ("UInt", "8"),
"float32": ("Float", "4"),
"float64": ("Float", "8"),
}
xdmf_to_numpy_type = {v: k for k, v in numpy_to_xdmf_dtype.items()}
dtype_to_format_string = {
"int32": "%d",
"int64": "%d",
"unit32": "%d",
"uint64": "%d",
"float32": "%.7e",
"float64": "%.15e",
}
# See
# <http://www.xdmf.org/index.php/XDMF_Model_and_Format#XML_Element_.28Xdmf_ClassName.29_and_Default_XML_Attributes>
# for XDMF types.
# There appears to be no particular consistency, so allow for different
# alternatives as well.
meshio_to_xdmf_type = {
"vertex": ["Polyvertex"],
"line": ["Polyline"],
"triangle": ["Triangle"],
"quad": ["Quadrilateral"],
"tetra": ["Tetrahedron"],
"pyramid": ["Pyramid"],
"wedge": ["Wedge"],
"hexahedron": ["Hexahedron"],
"line3": ["Edge_3"],
"triangle6": ["Triangle_6", "Tri_6"],
"quad8": ["Quadrilateral_8", "Quad_8"],
"tetra10": ["Tetrahedron_10", "Tet_10"],
"pyramid13": ["Pyramid_13"],
"wedge15": ["Wedge_15"],
"hexahedron20": ["Hexahedron_20", "Hex_20"],
}
xdmf_to_meshio_type = {v: k for k, vals in meshio_to_xdmf_type.items() for v in vals}
# Check out
# <https://gitlab.kitware.com/xdmf/xdmf/blob/master/XdmfTopologyType.cpp>
# for the list of indices.
xdmf_idx_to_meshio_type = {
0x1: "vertex",
0x2: "line",
0x4: "triangle",
0x5: "quad",
0x6: "tetra",
0x7: "pyramid",
0x8: "wedge",
0x9: "hexahedron",
0x22: "line3",
0x23: "quad9",
0x24: "triangle6",
0x25: "quad8",
0x26: "tetra10",
0x27: "pyramid13",
0x28: "wedge15",
0x29: "wedge18",
0x30: "hexahedron20",
0x31: "hexahedron24",
0x32: "hexahedron27",
0x33: "hexahedron64",
0x34: "hexahedron125",
0x35: "hexahedron216",
0x36: "hexahedron343",
0x37: "hexahedron512",
0x38: "hexahedron729",
0x39: "hexahedron1000",
0x40: "hexahedron1331",
# 0x41: 'hexahedron_spectral_64',
# 0x42: 'hexahedron_spectral_125',
# 0x43: 'hexahedron_spectral_216',
# 0x44: 'hexahedron_spectral_343',
# 0x45: 'hexahedron_spectral_512',
# 0x46: 'hexahedron_spectral_729',
# 0x47: 'hexahedron_spectral_1000',
# 0x48: 'hexahedron_spectral_1331',
}
meshio_type_to_xdmf_index = {v: k for k, v in xdmf_idx_to_meshio_type.items()}
def translate_mixed_cells(data):
# Translate it into the cells dictionary.
# `data` is a one-dimensional vector with
# (cell_type1, p0, p1, ... ,pk, cell_type2, p10, p11, ..., p1k, ...
# http://www.xdmf.org/index.php/XDMF_Model_and_Format#Topology
# https://gitlab.kitware.com/xdmf/xdmf/blob/master/XdmfTopologyType.hpp#L394
xdmf_idx_to_num_nodes = {
1: 1, # vertex
4: 3, # triangle
5: 4, # quad
6: 4, # tet
7: 5, # pyramid
8: 6, # wedge
9: 8, # hex
11: 6, # triangle6
}
# collect types and offsets
types = []
offsets = []
r = 0
while r < len(data):
types.append(data[r])
offsets.append(r)
r += xdmf_idx_to_num_nodes[data[r]] + 1
offsets = numpy.array(offsets)
# Collect types into bins.
# See <https://stackoverflow.com/q/47310359/353337> for better
# alternatives.
uniques = numpy.unique(types)
bins = {u: numpy.where(types == u)[0] for u in uniques}
cells = {}
for tpe, b in bins.items():
meshio_type = xdmf_idx_to_meshio_type[tpe]
assert (data[offsets[b]] == tpe).all()
n = xdmf_idx_to_num_nodes[tpe]
indices = numpy.array([numpy.arange(1, n + 1) + o for o in offsets[b]])
cells[meshio_type] = data[indices]
return cells
def attribute_type(data):
# <http://www.xdmf.org/index.php/XDMF_Model_and_Format#Attribute>
if len(data.shape) == 1 or (len(data.shape) == 2 and data.shape[1] == 1):
return "Scalar"
elif len(data.shape) == 2 and data.shape[1] in [2, 3]:
return "Vector"
elif (len(data.shape) == 2 and data.shape[1] == 9) or (
len(data.shape) == 3 and data.shape[1] == 3 and data.shape[2] == 3
):
return "Tensor"
elif len(data.shape) == 2 and data.shape[1] == 6:
return "Tensor6"
assert len(data.shape) == 3
return "Matrix"
| 28.763158 | 115 | 0.598124 |
795599a86d1b3536f7744db18d2c86aacee1f6e2 | 5,194 | py | Python | project/service/t5_inf.py | hnliu-git/decepticon | 18ce5eafd91b3128f101ef9399904bd57f32b40d | [
"Apache-2.0"
] | null | null | null | project/service/t5_inf.py | hnliu-git/decepticon | 18ce5eafd91b3128f101ef9399904bd57f32b40d | [
"Apache-2.0"
] | null | null | null | project/service/t5_inf.py | hnliu-git/decepticon | 18ce5eafd91b3128f101ef9399904bd57f32b40d | [
"Apache-2.0"
] | null | null | null | # Python Import:
from argparse import ArgumentParser
from transformers import (
T5ForConditionalGeneration, T5Config, AutoTokenizer
)
# Pytorch Lightning Import:
import pytorch_lightning as pl
class RaceInfModule(pl.LightningModule):
""" T5 Model """
@staticmethod
def add_model_specific_args(parent_parser):
""""""
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument("--version", type=float,
help="specify it in a form X.XX")
parser.add_argument("--padding_token", type=int, default=0,
help="don't change it")
parser.add_argument("--tokenizer_len", type=int, default=32104,
help="don't touch it")
parser.add_argument("--seed", default=1234, type=float)
parser.add_argument("--weight_decay", default=5e-5, type=float)
parser.add_argument("--learning_rate", default=1e-4, type=float)
return parser
def __init__(self, hparams):
"""
:param batch_fn: function to process batch
"""
super(RaceInfModule, self).__init__()
self.hparams = hparams
self.save_hyperparameters(hparams)
if self.hparams.pretrained_model in ["t5-base","t5-small"]:
# Model:
config = T5Config(decoder_start_token_id=self.hparams.padding_token)
self.model = T5ForConditionalGeneration(config).from_pretrained(self.hparams.pretrained_model)
# Tokenizer:
self.tokenizer = AutoTokenizer.from_pretrained(self.hparams.pretrained_model)
self.tokenizer.add_special_tokens({"additional_special_tokens": ["[CON]","[QUE]","[ANS]","[DIS]"]})
try:
self.model.resize_token_embeddings(self.hparams.tokenizer_len)
except:
self.model.resize_token_embeddings(32104)
else:
raise NotImplementedError
def generate(self, inputs, use_beam=False, use_sample=True, **kwargs):
""" Args:
inputs dict: dict of input
kwargs: for generation
Returns:
id_seqs (bsz, pred_len)
"""
assert use_beam or use_sample, 'Must use one method for generation'
if use_beam:
return self.generate_with_beam(inputs, **kwargs)
if use_sample:
return self.generate_with_sampling(inputs, **kwargs)
def generate_with_beam(self, inputs,
num_beams: int = 6,
no_repeat_ngram_size: int = 2,
max_length: int = 64,
early_stopping: bool = True,
num_beam_groups: int = 2):
""""""
generated = self.model.generate(**inputs, # context -> answer + article
num_beams=num_beams,
num_beam_groups=num_beam_groups,
max_length=max_length,
no_repeat_ngram_size=no_repeat_ngram_size,
early_stopping=early_stopping)
return generated
def generate_with_sampling(self, inputs,
top_k: int = 50, ##1 75
top_p: float = 0.95, ##2 0.9
max_length: int = 64,
do_sample: bool = True,
no_repeat_ngram_size: int = 2,
num_samples = 1):
""""""
# [bsz, pred_len]
try:
top_k = self.top_k ##1 75
top_p = self.top_p ##2 0.9
no_repeat_ngram_size = self.no_repeat_ngram_size
num_samples = self.num_samples
except:
pass
generated = self.model.generate(**inputs, # context -> answer + article
max_length=max_length,
do_sample=do_sample,
no_repeat_ngram_size=no_repeat_ngram_size,
num_return_sequences=num_samples,
top_k=top_k,
top_p=top_p)
return generated
def generate_sentence(self, article, answer, question=None):
"""Args:
article (str)
answer (str)
question (str): if not none, generating distractors
skip_special_tokens (bool): skip special_tokens while decoding
:return:
list of generated sentences, len(list) = sample_num
"""
if question:
context = " ".join(['[ANS]', answer, '[QUE]', question, '[CON]', article])
else:
context = " ".join(['[ANS]', answer, '[CON]', article])
inputs = self.tokenizer([context], padding=True, truncation=True, max_length=512, return_tensors="pt")
sentence = self.generate(inputs, use_sample=True)
return self.tokenizer.decode(sentence.squeeze(), skip_special_tokens=True) | 40.578125 | 111 | 0.538506 |
79559a7f498a4a345a77e0b62d4553c9fa19c28f | 1,526 | py | Python | Ago-Dic-2017/Jaime Villarreal/Práctica 1/Motocicleta.py | Andremm303/DAS_Sistemas | 0163505737e2b24365ea8b4e8135773a6801add4 | [
"MIT"
] | null | null | null | Ago-Dic-2017/Jaime Villarreal/Práctica 1/Motocicleta.py | Andremm303/DAS_Sistemas | 0163505737e2b24365ea8b4e8135773a6801add4 | [
"MIT"
] | null | null | null | Ago-Dic-2017/Jaime Villarreal/Práctica 1/Motocicleta.py | Andremm303/DAS_Sistemas | 0163505737e2b24365ea8b4e8135773a6801add4 | [
"MIT"
] | null | null | null | from Vehiculo import Vehiculo
class Motocicleta(Vehiculo):
# Clase para representar una motocicleta.
def __init__(self, modelo, color, transmision, motor, cilindros, sku,
tipo, centimetros_cubicos, existencia=1):
# Heredamos los atributos de 'Vehiculo'.
super().__init__(modelo, color, transmision, motor, cilindros, sku)
# Definimos los atributos particulares de una motocicleta.
self.tipo = tipo
self.centimetros_cubicos = centimetros_cubicos
def get_tipo(self):
# Regresa el tipo.
return self.tipo
def set_tipo(self, tipo):
# Asigna un tipo.
self.tipo = tipo
def get_centimetros_cubicos(self):
# Regresa cuántos CC posee.
return self.centimetros_cubicos
def set_centimetros_cubicos(self, centimetros_cubicos):
# Asigna un valor de CC.
self.centimetros_cubicos = centimetros_cubicos
def get_info_motocicleta(self):
# Regresa un resumen de la motocicleta.
info = "Modelo: {}\nColor: {}\nTipo: {}\n".format(self.modelo,
self.color, self.tipo)
info += "Transmisión: {}\nMotor: {}\nCilindros: {}\n".format(self.transmision,
self.motor, self.cilindros)
info += "CC: {}\nSKU: {}\n".format(self.centimetros_cubicos,
self.sku)
return info
'''
mi_moto = Motocicleta('Ducati Monster 1200S 2017', 'Negro', 'Cadena',
'Testastretta 11° L-Twin', 4, 'DUC-MON-1200S-01', 'Urbana', 1198)
print(mi_moto.get_info_motocicleta())
'''
| 31.791667 | 86 | 0.656619 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.