hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71f603e3e2b9119bf19c27949a553a350de4dbb | 3,576 | py | Python | nowcast/workers/ping_erddap.py | SalishSeaCast/SalishSeaNowcast | 947ba6fbb8952c7ae989a3aa96614b900748f55d | [
"Apache-2.0"
] | 4 | 2020-02-06T01:10:13.000Z | 2021-12-11T01:06:10.000Z | nowcast/workers/ping_erddap.py | SalishSeaCast/SalishSeaNowcast | 947ba6fbb8952c7ae989a3aa96614b900748f55d | [
"Apache-2.0"
] | 30 | 2020-02-03T23:54:10.000Z | 2022-03-18T18:50:31.000Z | nowcast/workers/ping_erddap.py | SalishSeaCast/SalishSeaNowcast | 947ba6fbb8952c7ae989a3aa96614b900748f55d | [
"Apache-2.0"
] | null | null | null | # Copyright 2013-2021 The Salish Sea MEOPAR contributors
# and The University of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SalishSeaCast worker that creates flag files to tell the ERDDAP server
to reload datasets for which new results have been downloaded.
"""
import logging
from pathlib import Path
from nemo_nowcast import NowcastWorker
NAME = "ping_erddap"
logger = logging.getLogger(NAME)
def main():
"""Set up and run the worker.
For command-line usage see:
:command:`python -m nowcast.workers.ping_erddap --help`
"""
worker = NowcastWorker(NAME, description=__doc__)
worker.init_cli()
worker.cli.add_argument(
"dataset",
choices={
"weather",
"SCVIP-CTD",
"SEVIP-CTD",
"USDDL-CTD",
"TWDP-ferry",
"VFPA-HADCP",
"nowcast-green",
"nemo-forecast",
"wwatch3-forecast",
"fvcom-x2-nowcast",
"fvcom-r12-nowcast",
"fvcom-forecast",
},
help="""
Type of dataset to notify ERDDAP of:
'weather' means atmospheric forcing downloaded & processed,
'SCVIP-CTD' means ONC SCVIP node CTD T&S observations downloaded &
processed,
'SEVIP-CTD' means ONC SEVIP node CTD T&S observations downloaded &
processed,
'USDDL-CTD' means ONC USDDL node CTD T&S observations downloaded &
processed,
'TWDP-ferry' means ONC Tsawwassen/Duke Pt. ferry observations
downloaded & processed,
'VFPA-HADCP' means VFPA 2nd Narrows Rail Bridge HADCP observations processed,
'nowcast-green' means nowcast green ocean run,
'nemo-forecast' means updated NEMO rolling forecast,
'wwatch3-forecast' means updated WaveWatch3 rolling forecast,
'fvcom-x2-nowcast' means updated VHFR FVCOM x2 nowcast run,
'fvcom-r12-nowcast' means updated VHFR FVCOM r12 nowcast run,
'fvcom-forecast' means updated VHFR FVCOM x2 rolling forecast
""",
)
worker.run(ping_erddap, success, failure)
return worker
def success(parsed_args):
logger.info(f"{parsed_args.dataset} ERDDAP dataset flag file(s) created")
msg_type = f"success {parsed_args.dataset}"
return msg_type
def failure(parsed_args):
logger.critical(
f"{parsed_args.dataset} ERDDAP dataset flag file(s) creation failed"
)
msg_type = f"failure {parsed_args.dataset}"
return msg_type
def ping_erddap(parsed_args, config, *args):
dataset = parsed_args.dataset
flag_path = Path(config["erddap"]["flag dir"])
checklist = {dataset: []}
try:
for dataset_id in config["erddap"]["datasetIDs"][dataset]:
(flag_path / dataset_id).touch()
logger.debug(f"{flag_path / dataset_id} touched")
checklist[dataset].append(dataset_id)
except KeyError:
# run type is not in datasetIDs dict
pass
return checklist
if __name__ == "__main__":
main() # pragma: no cover
| 33.420561 | 85 | 0.661074 |
import logging
from pathlib import Path
from nemo_nowcast import NowcastWorker
NAME = "ping_erddap"
logger = logging.getLogger(NAME)
def main():
worker = NowcastWorker(NAME, description=__doc__)
worker.init_cli()
worker.cli.add_argument(
"dataset",
choices={
"weather",
"SCVIP-CTD",
"SEVIP-CTD",
"USDDL-CTD",
"TWDP-ferry",
"VFPA-HADCP",
"nowcast-green",
"nemo-forecast",
"wwatch3-forecast",
"fvcom-x2-nowcast",
"fvcom-r12-nowcast",
"fvcom-forecast",
},
help="""
Type of dataset to notify ERDDAP of:
'weather' means atmospheric forcing downloaded & processed,
'SCVIP-CTD' means ONC SCVIP node CTD T&S observations downloaded &
processed,
'SEVIP-CTD' means ONC SEVIP node CTD T&S observations downloaded &
processed,
'USDDL-CTD' means ONC USDDL node CTD T&S observations downloaded &
processed,
'TWDP-ferry' means ONC Tsawwassen/Duke Pt. ferry observations
downloaded & processed,
'VFPA-HADCP' means VFPA 2nd Narrows Rail Bridge HADCP observations processed,
'nowcast-green' means nowcast green ocean run,
'nemo-forecast' means updated NEMO rolling forecast,
'wwatch3-forecast' means updated WaveWatch3 rolling forecast,
'fvcom-x2-nowcast' means updated VHFR FVCOM x2 nowcast run,
'fvcom-r12-nowcast' means updated VHFR FVCOM r12 nowcast run,
'fvcom-forecast' means updated VHFR FVCOM x2 rolling forecast
""",
)
worker.run(ping_erddap, success, failure)
return worker
def success(parsed_args):
logger.info(f"{parsed_args.dataset} ERDDAP dataset flag file(s) created")
msg_type = f"success {parsed_args.dataset}"
return msg_type
def failure(parsed_args):
logger.critical(
f"{parsed_args.dataset} ERDDAP dataset flag file(s) creation failed"
)
msg_type = f"failure {parsed_args.dataset}"
return msg_type
def ping_erddap(parsed_args, config, *args):
dataset = parsed_args.dataset
flag_path = Path(config["erddap"]["flag dir"])
checklist = {dataset: []}
try:
for dataset_id in config["erddap"]["datasetIDs"][dataset]:
(flag_path / dataset_id).touch()
logger.debug(f"{flag_path / dataset_id} touched")
checklist[dataset].append(dataset_id)
except KeyError:
pass
return checklist
if __name__ == "__main__":
main()
| true | true |
f71f61276a4576ec17d6d55cf5e8e0be9bdbeab7 | 918 | py | Python | FILE/file_merge.py | AceCoooool/python-example | 1d0068627210f08d31f027b6a333118d9f743956 | [
"MIT"
] | 2 | 2019-02-15T09:19:44.000Z | 2019-02-15T09:21:01.000Z | FILE/file_merge.py | AceCoooool/python-example | 1d0068627210f08d31f027b6a333118d9f743956 | [
"MIT"
] | null | null | null | FILE/file_merge.py | AceCoooool/python-example | 1d0068627210f08d31f027b6a333118d9f743956 | [
"MIT"
] | null | null | null | import os
import argparse
def file_merge(folder, out_file, ext):
files = [os.path.join(folder, file) for file in os.listdir(folder) if file.endswith(ext)]
with open(out_file, 'w') as f:
for file in files:
with open(file, 'r') as rf:
print('File {} readed.'.format(file))
f.write(rf.read() + '\n')
print('\n File {} wrote.'.format(out_file))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='File merge')
parser.add_argument('--folder', type=str, default='../data/txt')
parser.add_argument('--out_file', type=str, default='../data/results.txt')
parser.add_argument('--ext', type=str, default='txt')
config = parser.parse_args()
with open(config.out_file, 'w+'):
pass
if config.ext[0] != '.':
config.ext = '.' + config.ext
file_merge(config.folder, config.out_file, config.ext)
| 31.655172 | 93 | 0.61329 | import os
import argparse
def file_merge(folder, out_file, ext):
files = [os.path.join(folder, file) for file in os.listdir(folder) if file.endswith(ext)]
with open(out_file, 'w') as f:
for file in files:
with open(file, 'r') as rf:
print('File {} readed.'.format(file))
f.write(rf.read() + '\n')
print('\n File {} wrote.'.format(out_file))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='File merge')
parser.add_argument('--folder', type=str, default='../data/txt')
parser.add_argument('--out_file', type=str, default='../data/results.txt')
parser.add_argument('--ext', type=str, default='txt')
config = parser.parse_args()
with open(config.out_file, 'w+'):
pass
if config.ext[0] != '.':
config.ext = '.' + config.ext
file_merge(config.folder, config.out_file, config.ext)
| true | true |
f71f61bbd250bef9d676ace26f835628c544adaa | 2,169 | py | Python | api/generated/python/azure-iiot-opc-twin/models/value_write_request_api_model.py | jaz230/Industrial-IoT | bd4c5abfe579cbb7086a621e8381978e6c70a563 | [
"MIT"
] | 1 | 2020-01-22T12:03:08.000Z | 2020-01-22T12:03:08.000Z | api/generated/python/azure-iiot-opc-twin/models/value_write_request_api_model.py | likithadt/Industrial-IoT | d4ea7b330eff08455ca0556fed76aa74d2034da5 | [
"MIT"
] | null | null | null | api/generated/python/azure-iiot-opc-twin/models/value_write_request_api_model.py | likithadt/Industrial-IoT | d4ea7b330eff08455ca0556fed76aa74d2034da5 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator 2.3.33.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ValueWriteRequestApiModel(Model):
"""Value write request model.
:param node_id: Node id to to write value to.
:type node_id: str
:param browse_path: An optional path from NodeId instance to
the actual node.
:type browse_path: list[str]
:param value: Value to write. The system tries to convert
the value according to the data type value,
e.g. convert comma seperated value strings
into arrays. (Mandatory)
:type value: object
:param data_type: A built in datatype for the value. This can
be a data type from browse, or a built in
type.
(default: best effort)
:type data_type: str
:param index_range: Index range to write
:type index_range: str
:param header:
:type header: ~azure-iiot-opc-twin.models.RequestHeaderApiModel
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'browse_path': {'key': 'browsePath', 'type': '[str]'},
'value': {'key': 'value', 'type': 'object'},
'data_type': {'key': 'dataType', 'type': 'str'},
'index_range': {'key': 'indexRange', 'type': 'str'},
'header': {'key': 'header', 'type': 'RequestHeaderApiModel'},
}
def __init__(self, value, node_id=None, browse_path=None, data_type=None, index_range=None, header=None):
super(ValueWriteRequestApiModel, self).__init__()
self.node_id = node_id
self.browse_path = browse_path
self.value = value
self.data_type = data_type
self.index_range = index_range
self.header = header
| 36.15 | 109 | 0.608575 |
from msrest.serialization import Model
class ValueWriteRequestApiModel(Model):
_validation = {
'value': {'required': True},
}
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'browse_path': {'key': 'browsePath', 'type': '[str]'},
'value': {'key': 'value', 'type': 'object'},
'data_type': {'key': 'dataType', 'type': 'str'},
'index_range': {'key': 'indexRange', 'type': 'str'},
'header': {'key': 'header', 'type': 'RequestHeaderApiModel'},
}
def __init__(self, value, node_id=None, browse_path=None, data_type=None, index_range=None, header=None):
super(ValueWriteRequestApiModel, self).__init__()
self.node_id = node_id
self.browse_path = browse_path
self.value = value
self.data_type = data_type
self.index_range = index_range
self.header = header
| true | true |
f71f61e720b4ca7b2e7ace2c709ec4297289840e | 130,096 | py | Python | salt/states/pkg.py | waynegemmell/salt | 88056db3589cccab8956c2ae4f9b733acce89461 | [
"Apache-2.0"
] | 1 | 2020-09-10T07:38:20.000Z | 2020-09-10T07:38:20.000Z | salt/states/pkg.py | waynegemmell/salt | 88056db3589cccab8956c2ae4f9b733acce89461 | [
"Apache-2.0"
] | 4 | 2016-05-10T22:05:34.000Z | 2016-05-20T18:10:13.000Z | salt/states/pkg.py | waynegemmell/salt | 88056db3589cccab8956c2ae4f9b733acce89461 | [
"Apache-2.0"
] | 1 | 2020-12-02T01:20:28.000Z | 2020-12-02T01:20:28.000Z | """
Installation of packages using OS package managers such as yum or apt-get
=========================================================================
.. note::
On minions running systemd>=205, as of version 2015.8.12, 2016.3.3, and
2016.11.0, `systemd-run(1)`_ is now used to isolate commands which modify
installed packages from the ``salt-minion`` daemon's control group. This is
done to keep systemd from killing the package manager commands spawned by
Salt, when Salt updates itself (see ``KillMode`` in the `systemd.kill(5)`_
manpage for more information). If desired, usage of `systemd-run(1)`_ can
be suppressed by setting a :mod:`config option <salt.modules.config.get>`
called ``systemd.use_scope``, with a value of ``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Salt can manage software packages via the pkg state module, packages can be
set up to be installed, latest, removed and purged. Package management
declarations are typically rather simple:
.. code-block:: yaml
vim:
pkg.installed
A more involved example involves pulling from a custom repository.
.. code-block:: yaml
base:
pkgrepo.managed:
- name: ppa:wolfnet/logstash
- dist: precise
- file: /etc/apt/sources.list.d/logstash.list
- keyid: 28B04E4A
- keyserver: keyserver.ubuntu.com
logstash:
pkg.installed:
- fromrepo: ppa:wolfnet/logstash
Multiple packages can also be installed with the use of the pkgs
state module
.. code-block:: yaml
dotdeb.repo:
pkgrepo.managed:
- name: deb http://packages.dotdeb.org wheezy-php55 all
- dist: wheezy-php55
- file: /etc/apt/sources.list.d/dotbeb.list
- keyid: 89DF5277
- keyserver: keys.gnupg.net
- refresh_db: true
php.packages:
pkg.installed:
- fromrepo: wheezy-php55
- pkgs:
- php5-fpm
- php5-cli
- php5-curl
.. warning::
Package names are currently case-sensitive. If the minion is using a
package manager which is not case-sensitive (such as :mod:`pkgng
<salt.modules.pkgng>`), then this state will fail if the proper case is not
used. This will be addressed in a future release of Salt.
"""
import fnmatch
import logging
import os
import re
import salt.utils.pkg
import salt.utils.platform
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
from salt.modules.pkg_resource import _repack_pkgs
from salt.output import nested
from salt.utils.functools import namespaced_function as _namespaced_function
from salt.utils.odict import OrderedDict as _OrderedDict
# pylint: disable=invalid-name
_repack_pkgs = _namespaced_function(_repack_pkgs, globals())
if salt.utils.platform.is_windows():
# pylint: disable=import-error,no-name-in-module,unused-import
from urllib.parse import urlparse as _urlparse
from salt.exceptions import SaltRenderError
import collections
import datetime
import errno
import time
from functools import cmp_to_key
# pylint: disable=import-error
# pylint: enable=unused-import
from salt.modules.win_pkg import _get_package_info
from salt.modules.win_pkg import get_repo_data
from salt.modules.win_pkg import _get_repo_details
from salt.modules.win_pkg import _refresh_db_conditional
from salt.modules.win_pkg import refresh_db
from salt.modules.win_pkg import genrepo
from salt.modules.win_pkg import _repo_process_pkg_sls
from salt.modules.win_pkg import _get_latest_pkg_version
from salt.modules.win_pkg import _reverse_cmp_pkg_versions
_get_package_info = _namespaced_function(_get_package_info, globals())
get_repo_data = _namespaced_function(get_repo_data, globals())
_get_repo_details = _namespaced_function(_get_repo_details, globals())
_refresh_db_conditional = _namespaced_function(_refresh_db_conditional, globals())
refresh_db = _namespaced_function(refresh_db, globals())
genrepo = _namespaced_function(genrepo, globals())
_repo_process_pkg_sls = _namespaced_function(_repo_process_pkg_sls, globals())
_get_latest_pkg_version = _namespaced_function(_get_latest_pkg_version, globals())
_reverse_cmp_pkg_versions = _namespaced_function(
_reverse_cmp_pkg_versions, globals()
)
# The following imports are used by the namespaced win_pkg funcs
# and need to be included in their globals.
# pylint: disable=import-error,unused-import
import salt.utils.msgpack as msgpack
from salt.utils.versions import LooseVersion
# pylint: enable=import-error,unused-import
# pylint: enable=invalid-name
log = logging.getLogger(__name__)
def __virtual__():
"""
Only make these states available if a pkg provider has been detected or
assigned for this minion
"""
if "pkg.install" in __salt__:
return True
return (False, "pkg module could not be loaded")
def _get_comparison_spec(pkgver):
"""
Return a tuple containing the comparison operator and the version. If no
comparison operator was passed, the comparison is assumed to be an "equals"
comparison, and "==" will be the operator returned.
"""
oper, verstr = salt.utils.pkg.split_comparison(pkgver.strip())
if oper in ("=", ""):
oper = "=="
return oper, verstr
def _check_ignore_epoch(oper, desired_version, ignore_epoch=None):
"""
Conditionally ignore epoch, but only under all of the following
circumstances:
1. No value for ignore_epoch passed to state
2. desired_version has no epoch
3. oper does not contain a "<" or ">"
"""
if ignore_epoch is not None:
return ignore_epoch
return "<" not in oper and ">" not in oper and ":" not in desired_version
def _parse_version_string(version_conditions_string):
"""
Returns a list of two-tuples containing (operator, version).
"""
result = []
version_conditions_string = version_conditions_string.strip()
if not version_conditions_string:
return result
for version_condition in version_conditions_string.split(","):
operator_and_version = _get_comparison_spec(version_condition)
result.append(operator_and_version)
return result
def _fulfills_version_string(
installed_versions,
version_conditions_string,
ignore_epoch=None,
allow_updates=False,
):
"""
Returns True if any of the installed versions match the specified version conditions,
otherwise returns False.
installed_versions
The installed versions
version_conditions_string
The string containing all version conditions. E.G.
1.2.3-4
>=1.2.3-4
>=1.2.3-4, <2.3.4-5
>=1.2.3-4, <2.3.4-5, !=1.2.4-1
ignore_epoch : None
When a package version contains an non-zero epoch (e.g.
``1:3.14.159-2.el7``), and a specific version of a package is desired,
set this option to ``True`` to ignore the epoch when comparing
versions.
.. versionchanged:: 3001
If no value for this argument is passed to the state that calls
this helper function, and ``version_conditions_string`` contains no
epoch or greater-than/less-than, then the epoch will be ignored.
allow_updates : False
Allow the package to be updated outside Salt's control (e.g. auto updates on Windows).
This means a package on the Minion can have a newer version than the latest available in
the repository without enforcing a re-installation of the package.
(Only applicable if only one strict version condition is specified E.G. version: 2.0.6~ubuntu3)
"""
version_conditions = _parse_version_string(version_conditions_string)
for installed_version in installed_versions:
fullfills_all = True
for operator, version_string in version_conditions:
if allow_updates and len(version_conditions) == 1 and operator == "==":
operator = ">="
fullfills_all = fullfills_all and _fulfills_version_spec(
[installed_version], operator, version_string, ignore_epoch=ignore_epoch
)
if fullfills_all:
return True
return False
def _fulfills_version_spec(versions, oper, desired_version, ignore_epoch=None):
"""
Returns True if any of the installed versions match the specified version,
otherwise returns False
"""
cmp_func = __salt__.get("pkg.version_cmp")
# stripping "with_origin" dict wrapper
if salt.utils.platform.is_freebsd():
if isinstance(versions, dict) and "version" in versions:
versions = versions["version"]
for ver in versions:
if (
oper == "==" and fnmatch.fnmatch(ver, desired_version)
) or salt.utils.versions.compare(
ver1=ver,
oper=oper,
ver2=desired_version,
cmp_func=cmp_func,
ignore_epoch=_check_ignore_epoch(oper, desired_version, ignore_epoch),
):
return True
return False
def _find_unpurge_targets(desired, **kwargs):
"""
Find packages which are marked to be purged but can't yet be removed
because they are dependencies for other installed packages. These are the
packages which will need to be 'unpurged' because they are part of
pkg.installed states. This really just applies to Debian-based Linuxes.
"""
return [
x
for x in desired
if x in __salt__["pkg.list_pkgs"](purge_desired=True, **kwargs)
]
def _find_download_targets(
name=None,
version=None,
pkgs=None,
normalize=True,
skip_suggestions=False,
ignore_epoch=None,
**kwargs
):
"""
Inspect the arguments to pkg.downloaded and discover what packages need to
be downloaded. Return a dict of packages to download.
"""
cur_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
if pkgs:
# pylint: disable=not-callable
to_download = _repack_pkgs(pkgs, normalize=normalize)
# pylint: enable=not-callable
if not to_download:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
to_download = {_normalize_name(name): version}
else:
to_download = {name: version}
cver = cur_pkgs.get(name, {})
if name in to_download:
# Package already downloaded, no need to download again
if cver and version in cver:
return {
"name": name,
"changes": {},
"result": True,
"comment": (
"Version {} of package '{}' is already downloaded".format(
version, name
)
),
}
# if cver is not an empty string, the package is already downloaded
elif cver and version is None:
# The package is downloaded
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already downloaded".format(name),
}
version_spec = False
if not skip_suggestions:
try:
problems = _preflight_check(to_download, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
# Find out which packages will be targeted in the call to pkg.download
# Check current downloaded versions against specified versions
targets = {}
problems = []
for pkgname, pkgver in to_download.items():
cver = cur_pkgs.get(pkgname, {})
# Package not yet downloaded, so add to targets
if not cver:
targets[pkgname] = pkgver
continue
# No version specified but package is already downloaded
elif cver and not pkgver:
continue
version_spec = True
try:
if not _fulfills_version_string(
cver.keys(), pkgver, ignore_epoch=ignore_epoch
):
targets[pkgname] = pkgver
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
# All specified packages are already downloaded
msg = "All specified packages{} are already downloaded".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_advisory_targets(name=None, advisory_ids=None, **kwargs):
"""
Inspect the arguments to pkg.patch_installed and discover what advisory
patches need to be installed. Return a dict of advisory patches to install.
"""
cur_patches = __salt__["pkg.list_installed_patches"](**kwargs)
if advisory_ids:
to_download = advisory_ids
else:
to_download = [name]
if cur_patches.get(name, {}):
# Advisory patch already installed, no need to install it again
return {
"name": name,
"changes": {},
"result": True,
"comment": "Advisory patch {} is already installed".format(name),
}
# Find out which advisory patches will be targeted in the call to pkg.install
targets = []
for patch_name in to_download:
cver = cur_patches.get(patch_name, {})
# Advisory patch not yet installed, so add to targets
if not cver:
targets.append(patch_name)
continue
if not targets:
# All specified packages are already downloaded
msg = "All specified advisory patches are already installed"
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_remove_targets(
name=None, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs
):
"""
Inspect the arguments to pkg.removed and discover what packages need to
be removed. Return a dict of packages to remove.
"""
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if pkgs:
# pylint: disable=not-callable
to_remove = _repack_pkgs(pkgs, normalize=normalize)
# pylint: enable=not-callable
if not to_remove:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
_normalize_name = __salt__.get("pkg.normalize_name", lambda pkgname: pkgname)
to_remove = {_normalize_name(name): version}
version_spec = False
# Find out which packages will be targeted in the call to pkg.remove
# Check current versions against specified versions
targets = []
problems = []
for pkgname, pkgver in to_remove.items():
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names
origin = bool(re.search("/", pkgname))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == pkgname]
else:
cver = cur_pkgs.get(pkgname, [])
# Package not installed, no need to remove
if not cver:
continue
# No version specified and pkg is installed
elif __salt__["pkg_resource.version_clean"](pkgver) is None:
targets.append(pkgname)
continue
version_spec = True
try:
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
targets.append(pkgname)
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), will not remove",
cver,
pkgver,
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
# All specified packages are already absent
msg = "All specified packages{} are already absent".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_install_targets(
name=None,
version=None,
pkgs=None,
sources=None,
skip_suggestions=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
refresh=False,
**kwargs
):
"""
Inspect the arguments to pkg.installed and discover what packages need to
be installed. Return a dict of desired packages
"""
was_refreshed = False
if all((pkgs, sources)):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Only one of "pkgs" and "sources" is permitted.',
}
# dict for packages that fail pkg.verify and their altered files
altered_files = {}
# Get the ignore_types list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
# Get the verify_options list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
if salt.utils.platform.is_windows():
# Windows requires a refresh to establish a pkg db if refresh=True, so
# add it to the kwargs.
kwargs["refresh"] = refresh
resolve_capabilities = (
kwargs.get("resolve_capabilities", False) and "pkg.list_provides" in __salt__
)
try:
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
cur_prov = (
resolve_capabilities and __salt__["pkg.list_provides"](**kwargs) or dict()
)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
if salt.utils.platform.is_windows() and kwargs.pop("refresh", False):
# We already refreshed when we called pkg.list_pkgs
was_refreshed = True
refresh = False
if any((pkgs, sources)):
if pkgs:
# pylint: disable=not-callable
desired = _repack_pkgs(pkgs, normalize=normalize)
# pylint: enable=not-callable
elif sources:
desired = __salt__["pkg_resource.pack_sources"](
sources,
normalize=normalize,
)
if not desired:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted '{}' parameter. See minion log.".format(
"pkgs" if pkgs else "sources"
),
}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
else:
if salt.utils.platform.is_windows():
# pylint: disable=not-callable
pkginfo = _get_package_info(name, saltenv=kwargs["saltenv"])
# pylint: enable=not-callable
if not pkginfo:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Package {} not found in the repository.".format(name),
}
if version is None:
# pylint: disable=not-callable
version = _get_latest_pkg_version(pkginfo)
# pylint: enable=not-callable
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
desired = {_normalize_name(name): version}
else:
desired = {name: version}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names
origin = bool(re.search("/", name))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == name]
else:
cver = cur_pkgs.get(name, [])
if name not in to_unpurge:
if version and version in cver and not reinstall and not pkg_verify:
# The package is installed and is the correct version
return {
"name": name,
"changes": {},
"result": True,
"comment": "Version {} of package '{}' is already installed".format(
version, name
),
}
# if cver is not an empty string, the package is already installed
elif cver and version is None and not reinstall and not pkg_verify:
# The package is installed
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already installed".format(name),
}
version_spec = False
if not sources:
# Check for alternate package names if strict processing is not
# enforced. Takes extra time. Disable for improved performance
if not skip_suggestions:
# Perform platform-specific pre-flight checks
not_installed = {
name: version
for name, version in desired.items()
if not (
name in cur_pkgs
and (
version is None
or _fulfills_version_string(
cur_pkgs[name], version, ignore_epoch=ignore_epoch
)
)
)
}
if not_installed:
try:
problems = _preflight_check(not_installed, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
# Resolve the latest package version for any packages with "latest" in the
# package version
wants_latest = [] if sources else [x for x, y in desired.items() if y == "latest"]
if wants_latest:
resolved_latest = __salt__["pkg.latest_version"](
*wants_latest, refresh=refresh, **kwargs
)
if len(wants_latest) == 1:
resolved_latest = {wants_latest[0]: resolved_latest}
if refresh:
was_refreshed = True
refresh = False
# pkg.latest_version returns an empty string when the package is
# up-to-date. So check the currently-installed packages. If found, the
# resolved latest version will be the currently installed one from
# cur_pkgs. If not found, then the package doesn't exist and the
# resolved latest version will be None.
for key in resolved_latest:
if not resolved_latest[key]:
if key in cur_pkgs:
resolved_latest[key] = cur_pkgs[key][-1]
else:
resolved_latest[key] = None
# Update the desired versions with the ones we resolved
desired.update(resolved_latest)
# Find out which packages will be targeted in the call to pkg.install
targets = {}
to_reinstall = {}
problems = []
warnings = []
failed_verify = False
for package_name, version_string in desired.items():
cver = cur_pkgs.get(package_name, [])
if resolve_capabilities and not cver and package_name in cur_prov:
cver = cur_pkgs.get(cur_prov.get(package_name)[0], [])
# Package not yet installed, so add to targets
if not cver:
targets[package_name] = version_string
continue
if sources:
if reinstall:
to_reinstall[package_name] = version_string
continue
elif "lowpkg.bin_pkg_info" not in __salt__:
continue
# Metadata parser is available, cache the file and derive the
# package's name and version
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
version_string, saltenv=kwargs["saltenv"]
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))
continue
if not cached_path:
problems.append(err.format(version_string, "file not found"))
continue
elif not os.path.exists(cached_path):
problems.append("{} does not exist on minion".format(version_string))
continue
source_info = __salt__["lowpkg.bin_pkg_info"](cached_path)
if source_info is None:
warnings.append(
"Failed to parse metadata for {}".format(version_string)
)
continue
else:
verstr = source_info["version"]
else:
verstr = version_string
if reinstall:
to_reinstall[package_name] = version_string
continue
if not __salt__["pkg_resource.check_extra_requirements"](
package_name, version_string
):
targets[package_name] = version_string
continue
# No version specified and pkg is installed
elif __salt__["pkg_resource.version_clean"](version_string) is None:
if (not reinstall) and pkg_verify:
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
continue
version_fulfilled = False
allow_updates = bool(not sources and kwargs.get("allow_updates"))
try:
version_fulfilled = _fulfills_version_string(
cver, verstr, ignore_epoch=ignore_epoch, allow_updates=allow_updates
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
# Compare desired version against installed version.
version_spec = True
if not version_fulfilled:
if reinstall:
to_reinstall[package_name] = version_string
else:
version_conditions = _parse_version_string(version_string)
if pkg_verify and any(
oper == "==" for oper, version in version_conditions
):
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), adding to installation targets",
cver,
version_string,
)
targets[package_name] = version_string
if failed_verify:
problems.append(failed_verify)
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not any((targets, to_unpurge, to_reinstall)):
# All specified packages are installed
msg = "All specified packages are already installed{0}"
msg = msg.format(
" and are at the desired version" if version_spec and not sources else ""
)
ret = {"name": name, "changes": {}, "result": True, "comment": msg}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
return (
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
)
def _verify_install(desired, new_pkgs, ignore_epoch=None, new_caps=None):
"""
Determine whether or not the installed packages match what was requested in
the SLS file.
"""
_ok = []
failed = []
if not new_caps:
new_caps = dict()
for pkgname, pkgver in desired.items():
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names.
# Homebrew for Mac OSX does something similar with tap names
# prefixing package names, separated with a slash.
has_origin = "/" in pkgname
if __grains__["os"] == "FreeBSD" and has_origin:
cver = [k for k, v in new_pkgs.items() if v["origin"] == pkgname]
elif __grains__["os"] == "MacOS" and has_origin:
cver = new_pkgs.get(pkgname, new_pkgs.get(pkgname.split("/")[-1]))
elif __grains__["os"] == "OpenBSD":
cver = new_pkgs.get(pkgname.split("%")[0])
elif __grains__["os_family"] == "Debian":
cver = new_pkgs.get(pkgname.split("=")[0])
else:
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
cver = new_pkgs.get(new_caps.get(pkgname)[0])
if not cver:
failed.append(pkgname)
continue
elif pkgver == "latest":
_ok.append(pkgname)
continue
elif not __salt__["pkg_resource.version_clean"](pkgver):
_ok.append(pkgname)
continue
elif pkgver.endswith("*") and cver[0].startswith(pkgver[:-1]):
_ok.append(pkgname)
continue
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
_ok.append(pkgname)
else:
failed.append(pkgname)
return _ok, failed
def _get_desired_pkg(name, desired):
"""
Helper function that retrieves and nicely formats the desired pkg (and
version if specified) so that helpful information can be printed in the
comment for the state.
"""
if not desired[name] or desired[name].startswith(("<", ">", "=")):
oper = ""
else:
oper = "="
return "{}{}{}".format(name, oper, "" if not desired[name] else desired[name])
def _preflight_check(desired, fromrepo, **kwargs):
"""
Perform platform-specific checks on desired packages
"""
if "pkg.check_db" not in __salt__:
return {}
ret = {"suggest": {}, "no_suggest": []}
pkginfo = __salt__["pkg.check_db"](
*list(desired.keys()), fromrepo=fromrepo, **kwargs
)
for pkgname in pkginfo:
if pkginfo[pkgname]["found"] is False:
if pkginfo[pkgname]["suggestions"]:
ret["suggest"][pkgname] = pkginfo[pkgname]["suggestions"]
else:
ret["no_suggest"].append(pkgname)
return ret
def _nested_output(obj):
"""
Serialize obj and format for output
"""
nested.__opts__ = __opts__
ret = nested.output(obj).rstrip()
return ret
def _resolve_capabilities(pkgs, refresh=False, **kwargs):
"""
Resolve capabilities in ``pkgs`` and exchange them with real package
names, when the result is distinct.
This feature can be turned on while setting the paramter
``resolve_capabilities`` to True.
Return the input dictionary with replaced capability names and as
second return value a bool which say if a refresh need to be run.
In case of ``resolve_capabilities`` is False (disabled) or not
supported by the implementation the input is returned unchanged.
"""
if not pkgs or "pkg.resolve_capabilities" not in __salt__:
return pkgs, refresh
ret = __salt__["pkg.resolve_capabilities"](pkgs, refresh=refresh, **kwargs)
return ret, False
def installed(
name,
version=None,
refresh=None,
fromrepo=None,
skip_verify=False,
skip_suggestions=False,
pkgs=None,
sources=None,
allow_updates=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
update_holds=False,
**kwargs
):
"""
Ensure that the package is installed, and that it is the correct version
(if specified).
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.installed:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
:param str name:
The name of the package to be installed. This parameter is ignored if
either "pkgs" or "sources" is used. Additionally, please note that this
option can only be used to install packages from a software repository.
To install a package file manually, use the "sources" option detailed
below.
:param str version:
Install a specific version of a package. This option is ignored if
"sources" is used. Currently, this option is supported
for the following pkg providers: :mod:`apt <salt.modules.aptpkg>`,
:mod:`ebuild <salt.modules.ebuild>`,
:mod:`pacman <salt.modules.pacman>`,
:mod:`pkgin <salt.modules.pkgin>`,
:mod:`win_pkg <salt.modules.win_pkg>`,
:mod:`yumpkg <salt.modules.yumpkg>`, and
:mod:`zypper <salt.modules.zypper>`. The version number includes the
release designation where applicable, to allow Salt to target a
specific release of a given version. When in doubt, using the
``pkg.latest_version`` function for an uninstalled package will tell
you the version available.
.. code-block:: bash
# salt myminion pkg.latest_version vim-enhanced
myminion:
2:7.4.160-1.el7
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon like in the
``pkg.latest_version`` output above) must have the epoch included
when specifying the version number. For example:
.. code-block:: yaml
vim-enhanced:
pkg.installed:
- version: 2:7.4.160-1.el7
In version 2015.8.9, an **ignore_epoch** argument has been added to
:py:mod:`pkg.installed <salt.states.pkg.installed>`,
:py:mod:`pkg.removed <salt.states.pkg.removed>`, and
:py:mod:`pkg.purged <salt.states.pkg.purged>` states, which
causes the epoch to be disregarded when the state checks to see if
the desired version was installed.
Also, while this function is not yet implemented for all pkg frontends,
:mod:`pkg.list_repo_pkgs <salt.modules.yumpkg.list_repo_pkgs>` will
show all versions available in the various repositories for a given
package, irrespective of whether or not it is installed.
.. code-block:: bash
# salt myminion pkg.list_repo_pkgs bash
myminion:
----------
bash:
- 4.2.46-21.el7_3
- 4.2.46-20.el7_2
This function was first added for :mod:`pkg.list_repo_pkgs
<salt.modules.yumpkg.list_repo_pkgs>` in 2014.1.0, and was expanded to
:py:func:`Debian/Ubuntu <salt.modules.aptpkg.list_repo_pkgs>` and
:py:func:`Arch Linux <salt.modules.pacman.list_repo_pkgs>`-based
distros in the 2017.7.0 release.
The version strings returned by either of these functions can be used
as version specifiers in pkg states.
You can install a specific version when using the ``pkgs`` argument by
including the version after the package:
.. code-block:: yaml
common_packages:
pkg.installed:
- pkgs:
- unzip
- dos2unix
- salt-minion: 2015.8.5-1.el6
If the version given is the string ``latest``, the latest available
package version will be installed à la ``pkg.latest``.
**WILDCARD VERSIONS**
As of the 2017.7.0 release, this state now supports wildcards in
package versions for SUSE SLES/Leap/Tumbleweed, Debian/Ubuntu,
RHEL/CentOS, Arch Linux, and their derivatives. Using wildcards can be
useful for packages where the release name is built into the version in
some way, such as for RHEL/CentOS which typically has version numbers
like ``1.2.34-5.el7``. An example of the usage for this would be:
.. code-block:: yaml
mypkg:
pkg.installed:
- version: '1.2.34*'
Keep in mind that using wildcard versions will result in a slower state
run since Salt must gather the available versions of the specified
packages and figure out which of them match the specified wildcard
expression.
:param bool refresh:
This parameter controls whether or not the package repo database is
updated prior to installing the requested package(s).
If ``True``, the package database will be refreshed (``apt-get
update`` or equivalent, depending on platform) before installing.
If ``False``, the package database will *not* be refreshed before
installing.
If unset, then Salt treats package database refreshes differently
depending on whether or not a ``pkg`` state has been executed already
during the current Salt run. Once a refresh has been performed in a
``pkg`` state, for the remainder of that Salt run no other refreshes
will be performed for ``pkg`` states which do not explicitly set
``refresh`` to ``True``. This prevents needless additional refreshes
from slowing down the Salt run.
:param str cache_valid_time:
.. versionadded:: 2016.11.0
This parameter sets the value in seconds after which the cache is
marked as invalid, and a cache update is necessary. This overwrites
the ``refresh`` parameter's default behavior.
Example:
.. code-block:: yaml
httpd:
pkg.installed:
- fromrepo: mycustomrepo
- skip_verify: True
- skip_suggestions: True
- version: 2.0.6~ubuntu3
- refresh: True
- cache_valid_time: 300
- allow_updates: True
- hold: False
In this case, a refresh will not take place for 5 minutes since the last
``apt-get update`` was executed on the system.
.. note::
This parameter is available only on Debian based distributions and
has no effect on the rest.
:param str fromrepo:
Specify a repository from which to install
.. note::
Distros which use APT (Debian, Ubuntu, etc.) do not have a concept
of repositories, in the same way as YUM-based distros do. When a
source is added, it is assigned to a given release. Consider the
following source configuration:
.. code-block:: text
deb http://ppa.launchpad.net/saltstack/salt/ubuntu precise main
The packages provided by this source would be made available via
the ``precise`` release, therefore ``fromrepo`` would need to be
set to ``precise`` for Salt to install the package from this
source.
Having multiple sources in the same release may result in the
default install candidate being newer than what is desired. If this
is the case, the desired version must be specified using the
``version`` parameter.
If the ``pkgs`` parameter is being used to install multiple
packages in the same state, then instead of using ``version``,
use the method of version specification described in the **Multiple
Package Installation Options** section below.
Running the shell command ``apt-cache policy pkgname`` on a minion
can help elucidate the APT configuration and aid in properly
configuring states:
.. code-block:: bash
root@saltmaster:~# salt ubuntu01 cmd.run 'apt-cache policy ffmpeg'
ubuntu01:
ffmpeg:
Installed: (none)
Candidate: 7:0.10.11-1~precise1
Version table:
7:0.10.11-1~precise1 0
500 http://ppa.launchpad.net/jon-severinsson/ffmpeg/ubuntu/ precise/main amd64 Packages
4:0.8.10-0ubuntu0.12.04.1 0
500 http://us.archive.ubuntu.com/ubuntu/ precise-updates/main amd64 Packages
500 http://security.ubuntu.com/ubuntu/ precise-security/main amd64 Packages
4:0.8.1-0ubuntu1 0
500 http://us.archive.ubuntu.com/ubuntu/ precise/main amd64 Packages
The release is located directly after the source's URL. The actual
release name is the part before the slash, so to install version
**4:0.8.10-0ubuntu0.12.04.1** either ``precise-updates`` or
``precise-security`` could be used for the ``fromrepo`` value.
:param bool skip_verify:
Skip the GPG verification check for the package to be installed
:param bool skip_suggestions:
Force strict package naming. Disables lookup of package alternatives.
.. versionadded:: 2014.1.1
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
:param bool allow_updates:
Allow the package to be updated outside Salt's control (e.g. auto
updates on Windows). This means a package on the Minion can have a
newer version than the latest available in the repository without
enforcing a re-installation of the package.
.. versionadded:: 2014.7.0
Example:
.. code-block:: yaml
httpd:
pkg.installed:
- fromrepo: mycustomrepo
- skip_verify: True
- skip_suggestions: True
- version: 2.0.6~ubuntu3
- refresh: True
- allow_updates: True
- hold: False
:param bool pkg_verify:
.. versionadded:: 2014.7.0
For requested packages that are already installed and would not be
targeted for upgrade or downgrade, use pkg.verify to determine if any
of the files installed by the package have been altered. If files have
been altered, the reinstall option of pkg.install is used to force a
reinstall. Types to ignore can be passed to pkg.verify. Additionally,
``verify_options`` can be used to modify further the behavior of
pkg.verify. See examples below. Currently, this option is supported
for the following pkg providers: :mod:`yumpkg <salt.modules.yumpkg>`.
Examples:
.. code-block:: yaml
httpd:
pkg.installed:
- version: 2.2.15-30.el6.centos
- pkg_verify: True
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: 1.2.3-4
- baz
- pkg_verify:
- ignore_types:
- config
- doc
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: 1.2.3-4
- baz
- pkg_verify:
- ignore_types:
- config
- doc
- verify_options:
- nodeps
- nofiledigest
:param list ignore_types:
List of types to ignore when verifying the package
.. versionadded:: 2014.7.0
:param list verify_options:
List of additional options to pass when verifying the package. These
options will be added to the ``rpm -V`` command, prepended with ``--``
(for example, when ``nodeps`` is passed in this option, ``rpm -V`` will
be run with ``--nodeps``).
.. versionadded:: 2016.11.0
:param bool normalize:
Normalize the package name by removing the architecture, if the
architecture of the package is different from the architecture of the
operating system. The ability to disable this behavior is useful for
poorly-created packages which include the architecture as an actual
part of the name, such as kernel modules which match a specific kernel
version.
.. versionadded:: 2014.7.0
Example:
.. code-block:: yaml
gpfs.gplbin-2.6.32-279.31.1.el6.x86_64:
pkg.installed:
- normalize: False
:param bool ignore_epoch:
If this option is not explicitly set, and there is no epoch in the
desired package version, the epoch will be implicitly ignored. Set this
argument to ``True`` to explicitly ignore the epoch, and ``False`` to
strictly enforce it.
.. versionadded:: 2015.8.9
.. versionchanged:: 3001
In prior releases, the default behavior was to strictly enforce
epochs unless this argument was set to ``True``.
|
**MULTIPLE PACKAGE INSTALLATION OPTIONS: (not supported in pkgng)**
:param list pkgs:
A list of packages to install from a software repository. All packages
listed under ``pkgs`` will be installed via a single command.
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar
- baz
- hold: True
``NOTE:`` For :mod:`apt <salt.modules.aptpkg>`,
:mod:`ebuild <salt.modules.ebuild>`,
:mod:`pacman <salt.modules.pacman>`,
:mod:`winrepo <salt.modules.win_pkg>`,
:mod:`yumpkg <salt.modules.yumpkg>`, and
:mod:`zypper <salt.modules.zypper>`,
version numbers can be specified
in the ``pkgs`` argument. For example:
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: 1.2.3-4
- baz
Additionally, :mod:`ebuild <salt.modules.ebuild>`, :mod:`pacman
<salt.modules.pacman>`, :mod:`zypper <salt.modules.zypper>`,
:mod:`yum/dnf <salt.modules.yumpkg>`, and :mod:`apt
<salt.modules.aptpkg>` support the ``<``, ``<=``, ``>=``, and ``>``
operators for more control over what versions will be installed. For
example:
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: '>=1.2.3-4'
- baz
``NOTE:`` When using comparison operators, the expression must be enclosed
in quotes to avoid a YAML render error.
With :mod:`ebuild <salt.modules.ebuild>` is also possible to specify a
use flag list and/or if the given packages should be in
package.accept_keywords file and/or the overlay from which you want the
package to be installed. For example:
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo: '~'
- bar: '~>=1.2:slot::overlay[use,-otheruse]'
- baz
:param list sources:
A list of packages to install, along with the source URI or local path
from which to install each package. In the example below, ``foo``,
``bar``, ``baz``, etc. refer to the name of the package, as it would
appear in the output of the ``pkg.version`` or ``pkg.list_pkgs`` salt
CLI commands.
.. code-block:: yaml
mypkgs:
pkg.installed:
- sources:
- foo: salt://rpms/foo.rpm
- bar: http://somesite.org/bar.rpm
- baz: ftp://someothersite.org/baz.rpm
- qux: /minion/path/to/qux.rpm
**PLATFORM-SPECIFIC ARGUMENTS**
These are specific to each OS. If it does not apply to the execution
module for your OS, it is ignored.
:param bool hold:
Force the package to be held at the current installed version.
Supported on YUM/DNF & APT based systems.
.. versionadded:: 2014.7.0
Supported on Zypper-based systems.
.. versionadded:: 3003
:param bool update_holds:
If ``True``, and this function would update the package version, any
packages which are being held will be temporarily unheld so that they
can be updated. Otherwise, if this function attempts to update a held
package, the held package(s) will be skipped and the state will fail.
By default, this parameter is set to ``False``.
Supported on YUM/DNF & APT based systems.
.. versionadded:: 2016.11.0
Supported on Zypper-based systems.
.. versionadded:: 3003
:param list names:
A list of packages to install from a software repository. Each package
will be installed individually by the package manager.
.. warning::
Unlike ``pkgs``, the ``names`` parameter cannot specify a version.
In addition, it makes a separate call to the package management
frontend to install each package, whereas ``pkgs`` makes just a
single call. It is therefore recommended to use ``pkgs`` instead of
``names`` to install multiple packages, both for the additional
features and the performance improvement that it brings.
:param bool install_recommends:
Whether to install the packages marked as recommended. Default is
``True``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.installed:
- install_recommends: False
:param bool only_upgrade:
Only upgrade the packages, if they are already installed. Default is
``False``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.installed:
- only_upgrade: True
.. note::
If this parameter is set to True and the package is not already
installed, the state will fail.
:param bool report_reboot_exit_codes:
If the installer exits with a recognized exit code indicating that
a reboot is required, the module function
*win_system.set_reboot_required_witnessed*
will be called, preserving the knowledge of this event
for the remainder of the current boot session. For the time being,
``3010`` is the only recognized exit code,
but this is subject to future refinement.
The value of this param
defaults to ``True``. This parameter has no effect
on non-Windows systems.
.. versionadded:: 2016.11.0
.. code-block:: yaml
ms vcpp installed:
pkg.installed:
- name: ms-vcpp
- version: 10.0.40219
- report_reboot_exit_codes: False
:return:
A dictionary containing the state of the software installation
:rtype dict:
.. note::
The ``pkg.installed`` state supports the usage of ``reload_modules``.
This functionality allows you to force Salt to reload all modules. In
many cases, Salt is clever enough to transparently reload the modules.
For example, if you install a package, Salt reloads modules because some
other module or state might require the package which was installed.
However, there are some edge cases where this may not be the case, which
is what ``reload_modules`` is meant to resolve.
You should only use ``reload_modules`` if your ``pkg.installed`` does some
sort of installation where if you do not reload the modules future items
in your state which rely on the software being installed will fail. Please
see the :ref:`Reloading Modules <reloading-modules>` documentation for more
information.
.. seealso:: unless and onlyif
If running pkg commands together with :ref:`aggregate <mod-aggregate-state>`
isn't an option, you can use the :ref:`creates <creates-requisite>`,
:ref:`unless <unless-requisite>`, or :ref:`onlyif <onlyif-requisite>`
syntax to skip a full package run. This can be helpful in large environments
with multiple states that include requisites for packages to be installed.
.. code-block:: yaml
# Using creates for a simple single-factor check
install_nginx:
pkg.installed:
- name: nginx
- creates:
- /etc/nginx/nginx.conf
.. code-block:: yaml
# Using file.file_exists for a single-factor check
install_nginx:
pkg.installed:
- name: nginx
- unless:
- fun: file.file_exists
args:
- /etc/nginx/nginx.conf
# Using unless with a shell test
install_nginx:
pkg.installed:
- name: nginx
- unless: test -f /etc/nginx/nginx.conf
.. code-block:: yaml
# Using file.search for a two-factor check
install_nginx:
pkg.installed:
- name: nginx
- unless:
- fun: file.search
args:
- /etc/nginx/nginx.conf
- 'user www-data;'
The above examples use different methods to reasonably ensure
that a package has already been installed. First, with checking for a
file that would be created with the package. Second, by checking for
specific text within a file that would be created or managed by salt.
With these requisists satisfied, creates/unless will return ``True`` and the
``pkg.installed`` state will be skipped.
.. code-block:: bash
# Example of state run without unless used
salt 'saltdev' state.apply nginx
saltdev:
----------
ID: install_nginx
Function: pkg.installed
Name: nginx
Result: True
Comment: All specified packages are already installed
Started: 20:11:56.388331
Duration: 4290.0 ms
Changes:
# Example of state run using unless requisite
salt 'saltdev' state.apply nginx
saltdev:
----------
ID: install_nginx
Function: pkg.installed
Name: nginx
Result: True
Comment: unless condition is true
Started: 20:10:50.659215
Duration: 1530.0 ms
Changes:
The result is a reduction of almost 3 seconds. In larger environments,
small reductions in waiting time can add up.
:ref:`Unless Requisite <unless-requisite>`
"""
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
# If just a name (and optionally a version) is passed, just pack them into
# the pkgs argument.
if name and not any((pkgs, sources)):
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
kwargs["saltenv"] = __env__
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
# check if capabilities should be checked and modify the requested packages
# accordingly.
if pkgs:
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
if not isinstance(pkg_verify, list):
pkg_verify = pkg_verify is True
if (pkg_verify or isinstance(pkg_verify, list)) and "pkg.verify" not in __salt__:
return {
"name": name,
"changes": {},
"result": False,
"comment": "pkg.verify not implemented",
}
if not isinstance(version, str) and version is not None:
version = str(version)
kwargs["allow_updates"] = allow_updates
result = _find_install_targets(
name,
version,
pkgs,
sources,
fromrepo=fromrepo,
skip_suggestions=skip_suggestions,
pkg_verify=pkg_verify,
normalize=normalize,
ignore_epoch=ignore_epoch,
reinstall=reinstall,
refresh=refresh,
**kwargs
)
try:
(
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
) = result
if was_refreshed:
refresh = False
except ValueError:
# _find_install_targets() found no targets or encountered an error
# check that the hold function is available
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=pkgs, sources=sources)
except (CommandExecutionError, SaltInvocationError) as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": str(exc),
}
if "result" in hold_ret and not hold_ret["result"]:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
for i in modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
result["changes"][i["name"]] = i["changes"]
for i in not_modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
for i in failed_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
return result
if to_unpurge and "lowpkg.unpurge" not in __salt__:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "lowpkg.unpurge not implemented",
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
# Remove any targets not returned by _find_install_targets
if pkgs:
pkgs = [dict([(x, y)]) for x, y in targets.items()]
pkgs.extend([dict([(x, y)]) for x, y in to_reinstall.items()])
elif sources:
oldsources = sources
sources = [x for x in oldsources if next(iter(list(x.keys()))) in targets]
sources.extend(
[x for x in oldsources if next(iter(list(x.keys()))) in to_reinstall]
)
comment = []
changes = {"installed": {}}
if __opts__["test"]:
if targets:
if sources:
_targets = targets
else:
_targets = [_get_desired_pkg(x, targets) for x in targets]
summary = ", ".join(targets)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in targets}
)
comment.append(
"The following packages would be installed/updated: {}".format(summary)
)
if to_unpurge:
comment.append(
"The following packages would have their selection status "
"changed from 'purge' to 'install': {}".format(", ".join(to_unpurge))
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in to_unpurge}
)
if to_reinstall:
# Add a comment for each package in to_reinstall with its
# pkg.verify output
if reinstall:
reinstall_targets = []
for reinstall_pkg in to_reinstall:
if sources:
reinstall_targets.append(reinstall_pkg)
else:
reinstall_targets.append(
_get_desired_pkg(reinstall_pkg, to_reinstall)
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in reinstall_targets}
)
msg = "The following packages would be reinstalled: "
msg += ", ".join(reinstall_targets)
comment.append(msg)
else:
for reinstall_pkg in to_reinstall:
if sources:
pkgstr = reinstall_pkg
else:
pkgstr = _get_desired_pkg(reinstall_pkg, to_reinstall)
comment.append(
"Package '{}' would be reinstalled because the "
"following files have been altered:".format(pkgstr)
)
changes["installed"].update({reinstall_pkg: {}})
comment.append(_nested_output(altered_files[reinstall_pkg]))
ret = {
"name": name,
"changes": changes,
"result": None,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
modified_hold = None
not_modified_hold = None
failed_hold = None
if targets or to_reinstall:
try:
pkg_ret = __salt__["pkg.install"](
name=None,
refresh=refresh,
version=version,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=pkgs,
sources=sources,
reinstall=bool(to_reinstall),
normalize=normalize,
update_holds=update_holds,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while installing package(s): {}".format(
exc
)
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
if refresh:
refresh = False
if isinstance(pkg_ret, dict):
changes["installed"].update(pkg_ret)
elif isinstance(pkg_ret, str):
comment.append(pkg_ret)
# Code below will be looking for a dictionary. If this is a string
# it means that there was an exception raised and that no packages
# changed, so now that we have added this error to the comments we
# set this to an empty dictionary so that the code below which
# checks reinstall targets works.
pkg_ret = {}
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=desired)
except (CommandExecutionError, SaltInvocationError) as exc:
comment.append(str(exc))
ret = {
"name": name,
"changes": changes,
"result": False,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
if "result" in hold_ret and not hold_ret["result"]:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
if to_unpurge:
changes["purge_desired"] = __salt__["lowpkg.unpurge"](*to_unpurge)
# Analyze pkg.install results for packages in targets
if sources:
modified = [x for x in changes["installed"] if x in targets]
not_modified = [
x for x in desired if x not in targets and x not in to_reinstall
]
failed = [x for x in targets if x not in modified]
else:
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
new_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if (
kwargs.get("resolve_capabilities", False)
and "pkg.list_provides" in __salt__
):
new_caps = __salt__["pkg.list_provides"](**kwargs)
else:
new_caps = {}
_ok, failed = _verify_install(
desired, new_pkgs, ignore_epoch=ignore_epoch, new_caps=new_caps
)
modified = [x for x in _ok if x in targets]
not_modified = [x for x in _ok if x not in targets and x not in to_reinstall]
failed = [x for x in failed if x in targets]
# If there was nothing unpurged, just set the changes dict to the contents
# of changes['installed'].
if not changes.get("purge_desired"):
changes = changes["installed"]
if modified:
if sources:
summary = ", ".join(modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in modified])
if len(summary) < 20:
comment.append(
"The following packages were installed/updated: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} installed/updated.".format(
len(modified),
"s" if len(modified) > 1 else "",
"were" if len(modified) > 1 else "was",
)
)
if modified_hold:
for i in modified_hold:
change_name = i["name"]
if change_name in changes:
comment.append(i["comment"])
if len(changes[change_name]["new"]) > 0:
changes[change_name]["new"] += "\n"
changes[change_name]["new"] += "{}".format(i["changes"]["new"])
if len(changes[change_name]["old"]) > 0:
changes[change_name]["old"] += "\n"
changes[change_name]["old"] += "{}".format(i["changes"]["old"])
else:
comment.append(i["comment"])
changes[change_name] = {}
changes[change_name]["new"] = "{}".format(i["changes"]["new"])
# Any requested packages that were not targeted for install or reinstall
if not_modified:
if sources:
summary = ", ".join(not_modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in not_modified])
if len(not_modified) <= 20:
comment.append(
"The following packages were already installed: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} already installed".format(
len(not_modified),
"s" if len(not_modified) > 1 else "",
"were" if len(not_modified) > 1 else "was",
)
)
if not_modified_hold:
for i in not_modified_hold:
comment.append(i["comment"])
result = True
if failed:
if sources:
summary = ", ".join(failed)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in failed])
comment.insert(
0, "The following packages failed to install/update: {}".format(summary)
)
result = False
if failed_hold:
for i in failed_hold:
comment.append(i["comment"])
result = False
# Get the ignore_types list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
# Get the verify_options list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
# Rerun pkg.verify for packages in to_reinstall to determine failed
modified = []
failed = []
for reinstall_pkg in to_reinstall:
if reinstall:
if reinstall_pkg in pkg_ret:
modified.append(reinstall_pkg)
else:
failed.append(reinstall_pkg)
elif pkg_verify:
# No need to wrap this in a try/except because we would already
# have caught invalid arguments earlier.
verify_result = __salt__["pkg.verify"](
reinstall_pkg,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
if verify_result:
failed.append(reinstall_pkg)
altered_files[reinstall_pkg] = verify_result
else:
modified.append(reinstall_pkg)
if modified:
# Add a comment for each package in modified with its pkg.verify output
for modified_pkg in modified:
if sources:
pkgstr = modified_pkg
else:
pkgstr = _get_desired_pkg(modified_pkg, desired)
msg = "Package {} was reinstalled.".format(pkgstr)
if modified_pkg in altered_files:
msg += " The following files were remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[modified_pkg]))
else:
comment.append(msg)
if failed:
# Add a comment for each package in failed with its pkg.verify output
for failed_pkg in failed:
if sources:
pkgstr = failed_pkg
else:
pkgstr = _get_desired_pkg(failed_pkg, desired)
msg = "Reinstall was not successful for package {}.".format(pkgstr)
if failed_pkg in altered_files:
msg += " The following files could not be remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[failed_pkg]))
else:
comment.append(msg)
result = False
ret = {
"name": name,
"changes": changes,
"result": result,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
def downloaded(
name, version=None, pkgs=None, fromrepo=None, ignore_epoch=None, **kwargs
):
"""
.. versionadded:: 2017.7.0
Ensure that the package is downloaded, and that it is the correct version
(if specified).
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to download the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.downloaded:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>`, :mod:`zypper <salt.modules.zypper>` and :mod:`zypper <salt.modules.aptpkg>`
:param str name:
The name of the package to be downloaded. This parameter is ignored if
either "pkgs" is used. Additionally, please note that this option can
only be used to download packages from a software repository.
:param str version:
Download a specific version of a package.
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon must have the epoch included
when specifying the version number. For example:
.. code-block:: yaml
vim-enhanced:
pkg.downloaded:
- version: 2:7.4.160-1.el7
An **ignore_epoch** argument has been added to which causes the
epoch to be disregarded when the state checks to see if the desired
version was installed.
You can install a specific version when using the ``pkgs`` argument by
including the version after the package:
.. code-block:: yaml
common_packages:
pkg.downloaded:
- pkgs:
- unzip
- dos2unix
- salt-minion: 2015.8.5-1.el6
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
CLI Example:
.. code-block:: yaml
zsh:
pkg.downloaded:
- version: 5.0.5-4.63
- fromrepo: "myrepository"
"""
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_downloaded" not in __salt__:
ret["result"] = False
ret["comment"] = "The pkg.downloaded state is not available on this platform"
return ret
if isinstance(pkgs, list) and len(pkgs) == 0:
ret["result"] = True
ret["comment"] = "No packages to download provided"
return ret
# If just a name (and optionally a version) is passed, just pack them into
# the pkgs argument.
if name and not pkgs:
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
# It doesn't make sense here to received 'downloadonly' as kwargs
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
pkgs, _refresh = _resolve_capabilities(pkgs, **kwargs)
# Only downloading not yet downloaded packages
targets = _find_download_targets(
name, version, pkgs, fromrepo=fromrepo, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, dict):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret["comment"] = "The following packages would be downloaded: {}".format(
summary
)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name,
pkgs=pkgs,
version=version,
downloadonly=True,
fromrepo=fromrepo,
ignore_epoch=ignore_epoch,
**kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
new_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
_ok, failed = _verify_install(targets, new_pkgs, ignore_epoch=ignore_epoch)
if failed:
summary = ", ".join([_get_desired_pkg(x, targets) for x in failed])
ret["result"] = False
ret["comment"] = "The following packages failed to download: {}".format(summary)
if not ret["changes"] and not ret["comment"]:
ret["result"] = True
ret["comment"] = "Packages downloaded: {}".format(", ".join(targets))
return ret
def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
"""
.. versionadded:: 2017.7.0
Ensure that packages related to certain advisory ids are installed.
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the patch(es).
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
CLI Example:
.. code-block:: yaml
issue-foo-fixed:
pkg.patch_installed:
- advisory_ids:
- SUSE-SLE-SERVER-12-SP2-2017-185
- SUSE-SLE-SERVER-12-SP2-2017-150
- SUSE-SLE-SERVER-12-SP2-2017-120
"""
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_patches" not in __salt__:
ret["result"] = False
ret[
"comment"
] = "The pkg.patch_installed state is not available on this platform"
return ret
if isinstance(advisory_ids, list) and len(advisory_ids) == 0:
ret["result"] = True
ret["comment"] = "No advisory ids provided"
return ret
# Only downloading not yet downloaded packages
targets = _find_advisory_targets(name, advisory_ids, **kwargs)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret[
"comment"
] = "The following advisory patches would be downloaded: {}".format(summary)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name, advisory_ids=advisory_ids, downloadonly=downloadonly, **kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
if not ret["changes"] and not ret["comment"]:
status = "downloaded" if downloadonly else "installed"
ret["result"] = True
ret[
"comment"
] = "Advisory patch is not needed or related packages are already {}".format(
status
)
return ret
def patch_downloaded(name, advisory_ids=None, **kwargs):
"""
.. versionadded:: 2017.7.0
Ensure that packages related to certain advisory ids are downloaded.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
CLI Example:
.. code-block:: yaml
preparing-to-fix-issues:
pkg.patch_downloaded:
- advisory_ids:
- SUSE-SLE-SERVER-12-SP2-2017-185
- SUSE-SLE-SERVER-12-SP2-2017-150
- SUSE-SLE-SERVER-12-SP2-2017-120
"""
if "pkg.list_patches" not in __salt__:
return {
"name": name,
"result": False,
"changes": {},
"comment": (
"The pkg.patch_downloaded state is not available on this platform"
),
}
# It doesn't make sense here to received 'downloadonly' as kwargs
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
return patch_installed(
name=name, advisory_ids=advisory_ids, downloadonly=True, **kwargs
)
def latest(
name,
refresh=None,
fromrepo=None,
skip_verify=False,
pkgs=None,
watch_flags=True,
**kwargs
):
"""
Ensure that the named package is installed and the latest available
package. If the package can be updated, this state function will update
the package. Generally it is better for the
:mod:`installed <salt.states.pkg.installed>` function to be
used, as :mod:`latest <salt.states.pkg.latest>` will update the package
whenever a new package is available.
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.latest:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
name
The name of the package to maintain at the latest available version.
This parameter is ignored if "pkgs" is used.
fromrepo
Specify a repository from which to install
skip_verify
Skip the GPG verification check for the package to be installed
refresh
This parameter controls whether or not the package repo database is
updated prior to checking for the latest available version of the
requested packages.
If ``True``, the package database will be refreshed (``apt-get update``
or equivalent, depending on platform) before checking for the latest
available version of the requested packages.
If ``False``, the package database will *not* be refreshed before
checking.
If unset, then Salt treats package database refreshes differently
depending on whether or not a ``pkg`` state has been executed already
during the current Salt run. Once a refresh has been performed in a
``pkg`` state, for the remainder of that Salt run no other refreshes
will be performed for ``pkg`` states which do not explicitly set
``refresh`` to ``True``. This prevents needless additional refreshes
from slowing down the Salt run.
:param str cache_valid_time:
.. versionadded:: 2016.11.0
This parameter sets the value in seconds after which the cache is
marked as invalid, and a cache update is necessary. This overwrites
the ``refresh`` parameter's default behavior.
Example:
.. code-block:: yaml
httpd:
pkg.latest:
- refresh: True
- cache_valid_time: 300
In this case, a refresh will not take place for 5 minutes since the last
``apt-get update`` was executed on the system.
.. note::
This parameter is available only on Debian based distributions and
has no effect on the rest.
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
Multiple Package Installation Options:
(Not yet supported for: FreeBSD, OpenBSD, MacOS, and Solaris pkgutil)
pkgs
A list of packages to maintain at the latest available version.
.. code-block:: yaml
mypkgs:
pkg.latest:
- pkgs:
- foo
- bar
- baz
install_recommends
Whether to install the packages marked as recommended. Default is
``True``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.latest:
- install_recommends: False
only_upgrade
Only upgrade the packages, if they are already installed. Default is
``False``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.latest:
- only_upgrade: True
.. note::
If this parameter is set to True and the package is not already
installed, the state will fail.
report_reboot_exit_codes
If the installer exits with a recognized exit code indicating that
a reboot is required, the module function
*win_system.set_reboot_required_witnessed*
will be called, preserving the knowledge of this event
for the remainder of the current boot session. For the time being,
``3010`` is the only recognized exit code, but this
is subject to future refinement. The value of this param
defaults to ``True``. This parameter has no effect on
non-Windows systems.
.. versionadded:: 2016.11.0
.. code-block:: yaml
ms vcpp installed:
pkg.latest:
- name: ms-vcpp
- report_reboot_exit_codes: False
"""
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
if kwargs.get("sources"):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'The "sources" parameter is not supported.',
}
elif pkgs:
desired_pkgs = list(_repack_pkgs(pkgs).keys()) # pylint: disable=not-callable
if not desired_pkgs:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Invalidly formatted "pkgs" parameter. See minion log.',
}
else:
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
else:
desired_pkgs = [name]
kwargs["saltenv"] = __env__
# check if capabilities should be checked and modify the requested packages
# accordingly.
desired_pkgs, refresh = _resolve_capabilities(
desired_pkgs, refresh=refresh, **kwargs
)
try:
avail = __salt__["pkg.latest_version"](
*desired_pkgs, fromrepo=fromrepo, refresh=refresh, **kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while checking the "
"newest available version of package(s): {}".format(exc)
),
}
try:
cur = __salt__["pkg.version"](*desired_pkgs, **kwargs)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
# Repack the cur/avail data if only a single package is being checked
if isinstance(cur, str):
cur = {desired_pkgs[0]: cur}
if isinstance(avail, str):
avail = {desired_pkgs[0]: avail}
targets = {}
problems = []
for pkg in desired_pkgs:
if not avail.get(pkg):
# Package either a) is up-to-date, or b) does not exist
if not cur.get(pkg):
# Package does not exist
msg = "No information found for '{}'.".format(pkg)
log.error(msg)
problems.append(msg)
elif (
watch_flags
and __grains__.get("os") == "Gentoo"
and __salt__["portage_config.is_changed_uses"](pkg)
):
# Package is up-to-date, but Gentoo USE flags are changing so
# we need to add it to the targets
targets[pkg] = cur[pkg]
else:
# Package either a) is not installed, or b) is installed and has an
# upgrade available
targets[pkg] = avail[pkg]
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if targets:
# Find up-to-date packages
if not pkgs:
# There couldn't have been any up-to-date packages if this state
# only targeted a single package and is being allowed to proceed to
# the install step.
up_to_date = []
else:
up_to_date = [x for x in pkgs if x not in targets]
if __opts__["test"]:
comments = []
comments.append(
"The following packages would be installed/upgraded: "
+ ", ".join(sorted(targets))
)
if up_to_date:
up_to_date_count = len(up_to_date)
if up_to_date_count <= 10:
comments.append(
"The following packages are already up-to-date: "
+ ", ".join(
["{} ({})".format(x, cur[x]) for x in sorted(up_to_date)]
)
)
else:
comments.append(
"{} packages are already up-to-date".format(up_to_date_count)
)
return {
"name": name,
"changes": {},
"result": None,
"comment": "\n".join(comments),
}
if salt.utils.platform.is_windows():
# pkg.install execution module on windows ensures the software
# package is installed when no version is specified, it does not
# upgrade the software to the latest. This is per the design.
# Build updated list of pkgs *with verion number*, exclude
# non-targeted ones
targeted_pkgs = [{x: targets[x]} for x in targets]
else:
# Build updated list of pkgs to exclude non-targeted ones
targeted_pkgs = list(targets)
# No need to refresh, if a refresh was necessary it would have been
# performed above when pkg.latest_version was run.
try:
changes = __salt__["pkg.install"](
name=None,
refresh=False,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=targeted_pkgs,
**kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while installing package(s): {}".format(
exc
)
),
}
if changes:
# Find failed and successful updates
failed = [
x
for x in targets
if not changes.get(x)
or changes[x].get("new") != targets[x]
and targets[x] != "latest"
]
successful = [x for x in targets if x not in failed]
comments = []
if failed:
msg = "The following packages failed to update: {}".format(
", ".join(sorted(failed))
)
comments.append(msg)
if successful:
msg = (
"The following packages were successfully "
"installed/upgraded: "
"{}".format(", ".join(sorted(successful)))
)
comments.append(msg)
if up_to_date:
if len(up_to_date) <= 10:
msg = "The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
else:
msg = "{} packages were already up-to-date ".format(len(up_to_date))
comments.append(msg)
return {
"name": name,
"changes": changes,
"result": False if failed else True,
"comment": " ".join(comments),
}
else:
if len(targets) > 10:
comment = (
"{} targeted packages failed to update. "
"See debug log for details.".format(len(targets))
)
elif len(targets) > 1:
comment = (
"The following targeted packages failed to update. "
"See debug log for details: ({}).".format(
", ".join(sorted(targets))
)
)
else:
comment = "Package {} failed to update.".format(
next(iter(list(targets.keys())))
)
if up_to_date:
if len(up_to_date) <= 10:
comment += (
" The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
)
else:
comment += "{} packages were already up-to-date".format(
len(up_to_date)
)
return {
"name": name,
"changes": changes,
"result": False,
"comment": comment,
}
else:
if len(desired_pkgs) > 10:
comment = "All {} packages are up-to-date.".format(len(desired_pkgs))
elif len(desired_pkgs) > 1:
comment = "All packages are up-to-date ({}).".format(
", ".join(sorted(desired_pkgs))
)
else:
comment = "Package {} is already up-to-date".format(desired_pkgs[0])
return {"name": name, "changes": {}, "result": True, "comment": comment}
def _uninstall(
action="remove",
name=None,
version=None,
pkgs=None,
normalize=True,
ignore_epoch=None,
**kwargs
):
"""
Common function for package removal
"""
if action not in ("remove", "purge"):
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalid action '{}'. This is probably a bug.".format(action),
}
try:
pkg_params = __salt__["pkg_resource.parse_targets"](
name, pkgs, normalize=normalize
)[0]
except MinionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while parsing targets: {}".format(exc),
}
targets = _find_remove_targets(
name, version, pkgs, normalize, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while checking targets: {}".format(
targets
),
}
if action == "purge":
old_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
targets.extend([x for x in pkg_params if x in old_removed])
targets.sort()
if not targets:
return {
"name": name,
"changes": {},
"result": True,
"comment": "None of the targeted packages are installed{}".format(
" or partially installed" if action == "purge" else ""
),
}
if __opts__["test"]:
_changes = {}
_changes.update({x: {"new": "{}d".format(action), "old": ""} for x in targets})
return {
"name": name,
"changes": _changes,
"result": None,
"comment": "The following packages will be {}d: {}.".format(
action, ", ".join(targets)
),
}
changes = __salt__["pkg.{}".format(action)](
name, pkgs=pkgs, version=version, **kwargs
)
new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
failed = []
for param in pkg_params:
if __grains__["os_family"] in ["Suse", "RedHat"]:
# Check if the package version set to be removed is actually removed:
if param in new and not pkg_params[param]:
failed.append(param)
elif param in new and pkg_params[param] in new[param]:
failed.append(param + "-" + pkg_params[param])
elif param in new:
failed.append(param)
if action == "purge":
new_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
failed.extend([x for x in pkg_params if x in new_removed])
failed.sort()
if failed:
return {
"name": name,
"changes": changes,
"result": False,
"comment": "The following packages failed to {}: {}.".format(
action, ", ".join(failed)
),
}
comments = []
not_installed = sorted([x for x in pkg_params if x not in targets])
if not_installed:
comments.append(
"The following packages were not installed: {}".format(
", ".join(not_installed)
)
)
comments.append(
"The following packages were {}d: {}.".format(action, ", ".join(targets))
)
else:
comments.append("All targeted packages were {}d.".format(action))
return {
"name": name,
"changes": changes,
"result": True,
"comment": " ".join(comments),
}
def removed(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
"""
Verify that a package is not installed, calling ``pkg.remove`` if necessary
to remove the package.
name
The name of the package to be removed.
version
The version of the package that should be removed. Don't do anything if
the package is installed with an unmatching version.
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon like in the example above)
must have the epoch included when specifying the version number.
For example:
.. code-block:: yaml
vim-enhanced:
pkg.removed:
- version: 2:7.4.160-1.el7
In version 2015.8.9, an **ignore_epoch** argument has been added to
:py:mod:`pkg.installed <salt.states.pkg.installed>`,
:py:mod:`pkg.removed <salt.states.pkg.removed>`, and
:py:mod:`pkg.purged <salt.states.pkg.purged>` states, which
causes the epoch to be disregarded when the state checks to see if
the desired version was installed. If **ignore_epoch** was not set
to ``True``, and instead of ``2:7.4.160-1.el7`` a version of
``7.4.160-1.el7`` were used, this state would report success since
the actual installed version includes the epoch, and the specified
version would not match.
normalize : True
Normalize the package name by removing the architecture, if the
architecture of the package is different from the architecture of the
operating system. The ability to disable this behavior is useful for
poorly-created packages which include the architecture as an actual
part of the name, such as kernel modules which match a specific kernel
version.
.. versionadded:: 2015.8.0
ignore_epoch : None
If this option is not explicitly set, and there is no epoch in the
desired package version, the epoch will be implicitly ignored. Set this
argument to ``True`` to explicitly ignore the epoch, and ``False`` to
strictly enforce it.
.. versionadded:: 2015.8.9
.. versionchanged:: 3001
In prior releases, the default behavior was to strictly enforce
epochs unless this argument was set to ``True``.
Multiple Package Options:
pkgs
A list of packages to remove. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed. It accepts
version numbers as well.
.. versionadded:: 0.16.0
"""
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="remove",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while removing package(s): {}".format(exc)
return ret
def purged(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
"""
Verify that a package is not installed, calling ``pkg.purge`` if necessary
to purge the package. All configuration files are also removed.
name
The name of the package to be purged.
version
The version of the package that should be removed. Don't do anything if
the package is installed with an unmatching version.
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon like in the example above)
must have the epoch included when specifying the version number.
For example:
.. code-block:: yaml
vim-enhanced:
pkg.purged:
- version: 2:7.4.160-1.el7
In version 2015.8.9, an **ignore_epoch** argument has been added to
:py:mod:`pkg.installed <salt.states.pkg.installed>`,
:py:mod:`pkg.removed <salt.states.pkg.removed>`, and
:py:mod:`pkg.purged <salt.states.pkg.purged>` states, which
causes the epoch to be disregarded when the state checks to see if
the desired version was installed. If **ignore_epoch** was not set
to ``True``, and instead of ``2:7.4.160-1.el7`` a version of
``7.4.160-1.el7`` were used, this state would report success since
the actual installed version includes the epoch, and the specified
version would not match.
normalize : True
Normalize the package name by removing the architecture, if the
architecture of the package is different from the architecture of the
operating system. The ability to disable this behavior is useful for
poorly-created packages which include the architecture as an actual
part of the name, such as kernel modules which match a specific kernel
version.
.. versionadded:: 2015.8.0
ignore_epoch : None
If this option is not explicitly set, and there is no epoch in the
desired package version, the epoch will be implicitly ignored. Set this
argument to ``True`` to explicitly ignore the epoch, and ``False`` to
strictly enforce it.
.. versionadded:: 2015.8.9
.. versionchanged:: 3001
In prior releases, the default behavior was to strictly enforce
epochs unless this argument was set to ``True``.
Multiple Package Options:
pkgs
A list of packages to purge. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed. It accepts
version numbers as well.
.. versionadded:: 0.16.0
"""
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="purge",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while purging package(s): {}".format(exc)
return ret
def uptodate(name, refresh=False, pkgs=None, **kwargs):
"""
.. versionadded:: 2014.7.0
.. versionchanged:: 2018.3.0
Added support for the ``pkgin`` provider.
Verify that the system is completely up to date.
name
The name has no functional value and is only used as a tracking
reference
refresh
refresh the package database before checking for new upgrades
pkgs
list of packages to upgrade
:param str cache_valid_time:
This parameter sets the value in seconds after which cache marked as invalid,
and cache update is necessary. This overwrite ``refresh`` parameter
default behavior.
In this case cache_valid_time is set, refresh will not take place for
amount in seconds since last ``apt-get update`` executed on the system.
.. note::
This parameter available only on Debian based distributions, and
have no effect on the rest.
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
kwargs
Any keyword arguments to pass through to ``pkg.upgrade``.
.. versionadded:: 2015.5.0
"""
ret = {"name": name, "changes": {}, "result": False, "comment": "Failed to update"}
if "pkg.list_upgrades" not in __salt__:
ret["comment"] = "State pkg.uptodate is not available"
return ret
# emerge --update doesn't appear to support repo notation
if "fromrepo" in kwargs and __grains__["os"] == "Gentoo":
ret["comment"] = "'fromrepo' argument not supported on this platform"
return ret
if isinstance(refresh, bool):
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
try:
packages = __salt__["pkg.list_upgrades"](refresh=refresh, **kwargs)
expected = {
pkgname: {
"new": pkgver,
"old": __salt__["pkg.version"](pkgname, **kwargs),
}
for pkgname, pkgver in packages.items()
}
if isinstance(pkgs, list):
packages = [pkg for pkg in packages if pkg in pkgs]
expected = {
pkgname: pkgver
for pkgname, pkgver in expected.items()
if pkgname in pkgs
}
except Exception as exc: # pylint: disable=broad-except
ret["comment"] = str(exc)
return ret
else:
ret["comment"] = "refresh must be either True or False"
return ret
if not packages:
ret["comment"] = "System is already up-to-date"
ret["result"] = True
return ret
elif __opts__["test"]:
ret["comment"] = "System update will be performed"
ret["changes"] = expected
ret["result"] = None
return ret
try:
ret["changes"] = __salt__["pkg.upgrade"](refresh=refresh, pkgs=pkgs, **kwargs)
except CommandExecutionError as exc:
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while updating packages: {}".format(exc)
return ret
# If a package list was provided, ensure those packages were updated
missing = []
if isinstance(pkgs, list):
missing = [pkg for pkg in expected.keys() if pkg not in ret["changes"]]
if missing:
ret["comment"] = "The following package(s) failed to update: {}".format(
", ".join(missing)
)
ret["result"] = False
else:
ret["comment"] = "Upgrade ran successfully"
ret["result"] = True
return ret
def group_installed(name, skip=None, include=None, **kwargs):
"""
.. versionadded:: 2015.8.0
.. versionchanged:: 2016.11.0
Added support in :mod:`pacman <salt.modules.pacman>`
Ensure that an entire package group is installed. This state is currently
only supported for the :mod:`yum <salt.modules.yumpkg>` and :mod:`pacman <salt.modules.pacman>`
package managers.
skip
Packages that would normally be installed by the package group
("default" packages), which should not be installed.
.. code-block:: yaml
Load Balancer:
pkg.group_installed:
- skip:
- piranha
include
Packages which are included in a group, which would not normally be
installed by a ``yum groupinstall`` ("optional" packages). Note that
this will not enforce group membership; if you include packages which
are not members of the specified groups, they will still be installed.
.. code-block:: yaml
Load Balancer:
pkg.group_installed:
- include:
- haproxy
.. versionchanged:: 2016.3.0
This option can no longer be passed as a comma-separated list, it
must now be passed as a list (as shown in the above example).
.. note::
Because this is essentially a wrapper around :py:func:`pkg.install
<salt.modules.yumpkg.install>`, any argument which can be passed to
pkg.install may also be included here, and it will be passed on to the
call to :py:func:`pkg.install <salt.modules.yumpkg.install>`.
"""
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
if "pkg.group_diff" not in __salt__:
ret["comment"] = "pkg.group_install not available for this platform"
return ret
if skip is None:
skip = []
else:
if not isinstance(skip, list):
ret["comment"] = "skip must be formatted as a list"
return ret
for idx, item in enumerate(skip):
if not isinstance(item, str):
skip[idx] = str(item)
if include is None:
include = []
else:
if not isinstance(include, list):
ret["comment"] = "include must be formatted as a list"
return ret
for idx, item in enumerate(include):
if not isinstance(item, str):
include[idx] = str(item)
try:
diff = __salt__["pkg.group_diff"](name)
except CommandExecutionError as err:
ret[
"comment"
] = "An error was encountered while installing/updating group '{}': {}.".format(
name, err
)
return ret
mandatory = diff["mandatory"]["installed"] + diff["mandatory"]["not installed"]
invalid_skip = [x for x in mandatory if x in skip]
if invalid_skip:
ret[
"comment"
] = "The following mandatory packages cannot be skipped: {}".format(
", ".join(invalid_skip)
)
return ret
targets = diff["mandatory"]["not installed"]
targets.extend([x for x in diff["default"]["not installed"] if x not in skip])
targets.extend(include)
if not targets:
ret["result"] = True
ret["comment"] = "Group '{}' is already installed".format(name)
return ret
partially_installed = (
diff["mandatory"]["installed"]
or diff["default"]["installed"]
or diff["optional"]["installed"]
)
if __opts__["test"]:
ret["result"] = None
if partially_installed:
ret[
"comment"
] = "Group '{}' is partially installed and will be updated".format(name)
else:
ret["comment"] = "Group '{}' will be installed".format(name)
return ret
try:
ret["changes"] = __salt__["pkg.install"](pkgs=targets, **kwargs)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while "
"installing/updating group '{}': {}".format(name, exc)
)
return ret
failed = [x for x in targets if x not in __salt__["pkg.list_pkgs"](**kwargs)]
if failed:
ret["comment"] = "Failed to install the following packages: {}".format(
", ".join(failed)
)
return ret
ret["result"] = True
ret["comment"] = "Group '{}' was {}".format(
name, "updated" if partially_installed else "installed"
)
return ret
def mod_init(low):
"""
Set a flag to tell the install functions to refresh the package database.
This ensures that the package database is refreshed only once during
a state run significantly improving the speed of package management
during a state run.
It sets a flag for a number of reasons, primarily due to timeline logic.
When originally setting up the mod_init for pkg a number of corner cases
arose with different package managers and how they refresh package data.
It also runs the "ex_mod_init" from the package manager module that is
currently loaded. The "ex_mod_init" is expected to work as a normal
"mod_init" function.
.. seealso::
:py:func:`salt.modules.ebuild.ex_mod_init`
"""
ret = True
if "pkg.ex_mod_init" in __salt__:
ret = __salt__["pkg.ex_mod_init"](low)
if low["fun"] == "installed" or low["fun"] == "latest":
salt.utils.pkg.write_rtag(__opts__)
return ret
return False
def mod_aggregate(low, chunks, running):
"""
The mod_aggregate function which looks up all packages in the available
low chunks and merges them into a single pkgs ref in the present low data
"""
pkgs = []
pkg_type = None
agg_enabled = [
"installed",
"latest",
"removed",
"purged",
]
if low.get("fun") not in agg_enabled:
return low
for chunk in chunks:
tag = __utils__["state.gen_tag"](chunk)
if tag in running:
# Already ran the pkg state, skip aggregation
continue
if chunk.get("state") == "pkg":
if "__agg__" in chunk:
continue
# Check for the same function
if chunk.get("fun") != low.get("fun"):
continue
# Check for the same repo
if chunk.get("fromrepo") != low.get("fromrepo"):
continue
# Check first if 'sources' was passed so we don't aggregate pkgs
# and sources together.
if "sources" in chunk:
if pkg_type is None:
pkg_type = "sources"
if pkg_type == "sources":
pkgs.extend(chunk["sources"])
chunk["__agg__"] = True
else:
# If hold exists in the chunk, do not add to aggregation
# otherwise all packages will be held or unheld.
# setting a package to be held/unheld is not as
# time consuming as installing/uninstalling.
if "hold" not in chunk:
if pkg_type is None:
pkg_type = "pkgs"
if pkg_type == "pkgs":
# Pull out the pkg names!
if "pkgs" in chunk:
pkgs.extend(chunk["pkgs"])
chunk["__agg__"] = True
elif "name" in chunk:
version = chunk.pop("version", None)
if version is not None:
pkgs.append({chunk["name"]: version})
else:
pkgs.append(chunk["name"])
chunk["__agg__"] = True
if pkg_type is not None and pkgs:
if pkg_type in low:
low[pkg_type].extend(pkgs)
else:
low[pkg_type] = pkgs
return low
def mod_watch(name, **kwargs):
"""
Install/reinstall a package based on a watch requisite
.. note::
This state exists to support special handling of the ``watch``
:ref:`requisite <requisites>`. It should not be called directly.
Parameters for this function should be set by the state being triggered.
"""
sfun = kwargs.pop("sfun", None)
mapfun = {
"purged": purged,
"latest": latest,
"removed": removed,
"installed": installed,
}
if sfun in mapfun:
return mapfun[sfun](name, **kwargs)
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the watch requisite".format(sfun),
"result": False,
}
def mod_beacon(name, **kwargs):
"""
Create a beacon to monitor a package or packages
based on a beacon state argument.
.. note::
This state exists to support special handling of the ``beacon``
state argument for supported state functions. It should not be called directly.
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
sfun = kwargs.pop("sfun", None)
supported_funcs = ["installed", "removed"]
if sfun in supported_funcs:
if kwargs.get("beacon"):
beacon_module = "pkg"
beacon_name = "beacon_{}_{}".format(beacon_module, name)
beacon_kwargs = {
"name": beacon_name,
"pkgs": kwargs.get("pkgs", [name]),
"interval": 60,
"beacon_module": beacon_module,
}
ret = __states__["beacon.present"](**beacon_kwargs)
return ret
else:
return {
"name": name,
"changes": {},
"comment": "Not adding beacon.",
"result": True,
}
else:
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the mod_beacon state function".format(
sfun
),
"result": False,
}
| 35.672059 | 116 | 0.563891 |
import fnmatch
import logging
import os
import re
import salt.utils.pkg
import salt.utils.platform
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
from salt.modules.pkg_resource import _repack_pkgs
from salt.output import nested
from salt.utils.functools import namespaced_function as _namespaced_function
from salt.utils.odict import OrderedDict as _OrderedDict
_repack_pkgs = _namespaced_function(_repack_pkgs, globals())
if salt.utils.platform.is_windows():
from urllib.parse import urlparse as _urlparse
from salt.exceptions import SaltRenderError
import collections
import datetime
import errno
import time
from functools import cmp_to_key
from salt.modules.win_pkg import _get_package_info
from salt.modules.win_pkg import get_repo_data
from salt.modules.win_pkg import _get_repo_details
from salt.modules.win_pkg import _refresh_db_conditional
from salt.modules.win_pkg import refresh_db
from salt.modules.win_pkg import genrepo
from salt.modules.win_pkg import _repo_process_pkg_sls
from salt.modules.win_pkg import _get_latest_pkg_version
from salt.modules.win_pkg import _reverse_cmp_pkg_versions
_get_package_info = _namespaced_function(_get_package_info, globals())
get_repo_data = _namespaced_function(get_repo_data, globals())
_get_repo_details = _namespaced_function(_get_repo_details, globals())
_refresh_db_conditional = _namespaced_function(_refresh_db_conditional, globals())
refresh_db = _namespaced_function(refresh_db, globals())
genrepo = _namespaced_function(genrepo, globals())
_repo_process_pkg_sls = _namespaced_function(_repo_process_pkg_sls, globals())
_get_latest_pkg_version = _namespaced_function(_get_latest_pkg_version, globals())
_reverse_cmp_pkg_versions = _namespaced_function(
_reverse_cmp_pkg_versions, globals()
)
import salt.utils.msgpack as msgpack
from salt.utils.versions import LooseVersion
log = logging.getLogger(__name__)
def __virtual__():
if "pkg.install" in __salt__:
return True
return (False, "pkg module could not be loaded")
def _get_comparison_spec(pkgver):
oper, verstr = salt.utils.pkg.split_comparison(pkgver.strip())
if oper in ("=", ""):
oper = "=="
return oper, verstr
def _check_ignore_epoch(oper, desired_version, ignore_epoch=None):
if ignore_epoch is not None:
return ignore_epoch
return "<" not in oper and ">" not in oper and ":" not in desired_version
def _parse_version_string(version_conditions_string):
result = []
version_conditions_string = version_conditions_string.strip()
if not version_conditions_string:
return result
for version_condition in version_conditions_string.split(","):
operator_and_version = _get_comparison_spec(version_condition)
result.append(operator_and_version)
return result
def _fulfills_version_string(
installed_versions,
version_conditions_string,
ignore_epoch=None,
allow_updates=False,
):
version_conditions = _parse_version_string(version_conditions_string)
for installed_version in installed_versions:
fullfills_all = True
for operator, version_string in version_conditions:
if allow_updates and len(version_conditions) == 1 and operator == "==":
operator = ">="
fullfills_all = fullfills_all and _fulfills_version_spec(
[installed_version], operator, version_string, ignore_epoch=ignore_epoch
)
if fullfills_all:
return True
return False
def _fulfills_version_spec(versions, oper, desired_version, ignore_epoch=None):
cmp_func = __salt__.get("pkg.version_cmp")
if salt.utils.platform.is_freebsd():
if isinstance(versions, dict) and "version" in versions:
versions = versions["version"]
for ver in versions:
if (
oper == "==" and fnmatch.fnmatch(ver, desired_version)
) or salt.utils.versions.compare(
ver1=ver,
oper=oper,
ver2=desired_version,
cmp_func=cmp_func,
ignore_epoch=_check_ignore_epoch(oper, desired_version, ignore_epoch),
):
return True
return False
def _find_unpurge_targets(desired, **kwargs):
return [
x
for x in desired
if x in __salt__["pkg.list_pkgs"](purge_desired=True, **kwargs)
]
def _find_download_targets(
name=None,
version=None,
pkgs=None,
normalize=True,
skip_suggestions=False,
ignore_epoch=None,
**kwargs
):
cur_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
if pkgs:
to_download = _repack_pkgs(pkgs, normalize=normalize)
if not to_download:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
to_download = {_normalize_name(name): version}
else:
to_download = {name: version}
cver = cur_pkgs.get(name, {})
if name in to_download:
if cver and version in cver:
return {
"name": name,
"changes": {},
"result": True,
"comment": (
"Version {} of package '{}' is already downloaded".format(
version, name
)
),
}
elif cver and version is None:
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already downloaded".format(name),
}
version_spec = False
if not skip_suggestions:
try:
problems = _preflight_check(to_download, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
targets = {}
problems = []
for pkgname, pkgver in to_download.items():
cver = cur_pkgs.get(pkgname, {})
if not cver:
targets[pkgname] = pkgver
continue
elif cver and not pkgver:
continue
version_spec = True
try:
if not _fulfills_version_string(
cver.keys(), pkgver, ignore_epoch=ignore_epoch
):
targets[pkgname] = pkgver
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
msg = "All specified packages{} are already downloaded".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_advisory_targets(name=None, advisory_ids=None, **kwargs):
cur_patches = __salt__["pkg.list_installed_patches"](**kwargs)
if advisory_ids:
to_download = advisory_ids
else:
to_download = [name]
if cur_patches.get(name, {}):
return {
"name": name,
"changes": {},
"result": True,
"comment": "Advisory patch {} is already installed".format(name),
}
targets = []
for patch_name in to_download:
cver = cur_patches.get(patch_name, {})
if not cver:
targets.append(patch_name)
continue
if not targets:
msg = "All specified advisory patches are already installed"
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_remove_targets(
name=None, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs
):
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if pkgs:
to_remove = _repack_pkgs(pkgs, normalize=normalize)
if not to_remove:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
_normalize_name = __salt__.get("pkg.normalize_name", lambda pkgname: pkgname)
to_remove = {_normalize_name(name): version}
version_spec = False
targets = []
problems = []
for pkgname, pkgver in to_remove.items():
origin = bool(re.search("/", pkgname))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == pkgname]
else:
cver = cur_pkgs.get(pkgname, [])
if not cver:
continue
elif __salt__["pkg_resource.version_clean"](pkgver) is None:
targets.append(pkgname)
continue
version_spec = True
try:
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
targets.append(pkgname)
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), will not remove",
cver,
pkgver,
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
msg = "All specified packages{} are already absent".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_install_targets(
name=None,
version=None,
pkgs=None,
sources=None,
skip_suggestions=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
refresh=False,
**kwargs
):
was_refreshed = False
if all((pkgs, sources)):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Only one of "pkgs" and "sources" is permitted.',
}
altered_files = {}
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
if salt.utils.platform.is_windows():
kwargs["refresh"] = refresh
resolve_capabilities = (
kwargs.get("resolve_capabilities", False) and "pkg.list_provides" in __salt__
)
try:
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
cur_prov = (
resolve_capabilities and __salt__["pkg.list_provides"](**kwargs) or dict()
)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
if salt.utils.platform.is_windows() and kwargs.pop("refresh", False):
was_refreshed = True
refresh = False
if any((pkgs, sources)):
if pkgs:
desired = _repack_pkgs(pkgs, normalize=normalize)
elif sources:
desired = __salt__["pkg_resource.pack_sources"](
sources,
normalize=normalize,
)
if not desired:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted '{}' parameter. See minion log.".format(
"pkgs" if pkgs else "sources"
),
}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
else:
if salt.utils.platform.is_windows():
pkginfo = _get_package_info(name, saltenv=kwargs["saltenv"])
if not pkginfo:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Package {} not found in the repository.".format(name),
}
if version is None:
version = _get_latest_pkg_version(pkginfo)
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
desired = {_normalize_name(name): version}
else:
desired = {name: version}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
origin = bool(re.search("/", name))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == name]
else:
cver = cur_pkgs.get(name, [])
if name not in to_unpurge:
if version and version in cver and not reinstall and not pkg_verify:
return {
"name": name,
"changes": {},
"result": True,
"comment": "Version {} of package '{}' is already installed".format(
version, name
),
}
elif cver and version is None and not reinstall and not pkg_verify:
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already installed".format(name),
}
version_spec = False
if not sources:
if not skip_suggestions:
not_installed = {
name: version
for name, version in desired.items()
if not (
name in cur_pkgs
and (
version is None
or _fulfills_version_string(
cur_pkgs[name], version, ignore_epoch=ignore_epoch
)
)
)
}
if not_installed:
try:
problems = _preflight_check(not_installed, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
wants_latest = [] if sources else [x for x, y in desired.items() if y == "latest"]
if wants_latest:
resolved_latest = __salt__["pkg.latest_version"](
*wants_latest, refresh=refresh, **kwargs
)
if len(wants_latest) == 1:
resolved_latest = {wants_latest[0]: resolved_latest}
if refresh:
was_refreshed = True
refresh = False
# resolved latest version will be None.
for key in resolved_latest:
if not resolved_latest[key]:
if key in cur_pkgs:
resolved_latest[key] = cur_pkgs[key][-1]
else:
resolved_latest[key] = None
# Update the desired versions with the ones we resolved
desired.update(resolved_latest)
# Find out which packages will be targeted in the call to pkg.install
targets = {}
to_reinstall = {}
problems = []
warnings = []
failed_verify = False
for package_name, version_string in desired.items():
cver = cur_pkgs.get(package_name, [])
if resolve_capabilities and not cver and package_name in cur_prov:
cver = cur_pkgs.get(cur_prov.get(package_name)[0], [])
# Package not yet installed, so add to targets
if not cver:
targets[package_name] = version_string
continue
if sources:
if reinstall:
to_reinstall[package_name] = version_string
continue
elif "lowpkg.bin_pkg_info" not in __salt__:
continue
# Metadata parser is available, cache the file and derive the
# package's name and version
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
version_string, saltenv=kwargs["saltenv"]
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))
continue
if not cached_path:
problems.append(err.format(version_string, "file not found"))
continue
elif not os.path.exists(cached_path):
problems.append("{} does not exist on minion".format(version_string))
continue
source_info = __salt__["lowpkg.bin_pkg_info"](cached_path)
if source_info is None:
warnings.append(
"Failed to parse metadata for {}".format(version_string)
)
continue
else:
verstr = source_info["version"]
else:
verstr = version_string
if reinstall:
to_reinstall[package_name] = version_string
continue
if not __salt__["pkg_resource.check_extra_requirements"](
package_name, version_string
):
targets[package_name] = version_string
continue
elif __salt__["pkg_resource.version_clean"](version_string) is None:
if (not reinstall) and pkg_verify:
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
continue
version_fulfilled = False
allow_updates = bool(not sources and kwargs.get("allow_updates"))
try:
version_fulfilled = _fulfills_version_string(
cver, verstr, ignore_epoch=ignore_epoch, allow_updates=allow_updates
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
version_spec = True
if not version_fulfilled:
if reinstall:
to_reinstall[package_name] = version_string
else:
version_conditions = _parse_version_string(version_string)
if pkg_verify and any(
oper == "==" for oper, version in version_conditions
):
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), adding to installation targets",
cver,
version_string,
)
targets[package_name] = version_string
if failed_verify:
problems.append(failed_verify)
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not any((targets, to_unpurge, to_reinstall)):
msg = "All specified packages are already installed{0}"
msg = msg.format(
" and are at the desired version" if version_spec and not sources else ""
)
ret = {"name": name, "changes": {}, "result": True, "comment": msg}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
return (
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
)
def _verify_install(desired, new_pkgs, ignore_epoch=None, new_caps=None):
_ok = []
failed = []
if not new_caps:
new_caps = dict()
for pkgname, pkgver in desired.items():
has_origin = "/" in pkgname
if __grains__["os"] == "FreeBSD" and has_origin:
cver = [k for k, v in new_pkgs.items() if v["origin"] == pkgname]
elif __grains__["os"] == "MacOS" and has_origin:
cver = new_pkgs.get(pkgname, new_pkgs.get(pkgname.split("/")[-1]))
elif __grains__["os"] == "OpenBSD":
cver = new_pkgs.get(pkgname.split("%")[0])
elif __grains__["os_family"] == "Debian":
cver = new_pkgs.get(pkgname.split("=")[0])
else:
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
cver = new_pkgs.get(new_caps.get(pkgname)[0])
if not cver:
failed.append(pkgname)
continue
elif pkgver == "latest":
_ok.append(pkgname)
continue
elif not __salt__["pkg_resource.version_clean"](pkgver):
_ok.append(pkgname)
continue
elif pkgver.endswith("*") and cver[0].startswith(pkgver[:-1]):
_ok.append(pkgname)
continue
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
_ok.append(pkgname)
else:
failed.append(pkgname)
return _ok, failed
def _get_desired_pkg(name, desired):
if not desired[name] or desired[name].startswith(("<", ">", "=")):
oper = ""
else:
oper = "="
return "{}{}{}".format(name, oper, "" if not desired[name] else desired[name])
def _preflight_check(desired, fromrepo, **kwargs):
if "pkg.check_db" not in __salt__:
return {}
ret = {"suggest": {}, "no_suggest": []}
pkginfo = __salt__["pkg.check_db"](
*list(desired.keys()), fromrepo=fromrepo, **kwargs
)
for pkgname in pkginfo:
if pkginfo[pkgname]["found"] is False:
if pkginfo[pkgname]["suggestions"]:
ret["suggest"][pkgname] = pkginfo[pkgname]["suggestions"]
else:
ret["no_suggest"].append(pkgname)
return ret
def _nested_output(obj):
nested.__opts__ = __opts__
ret = nested.output(obj).rstrip()
return ret
def _resolve_capabilities(pkgs, refresh=False, **kwargs):
if not pkgs or "pkg.resolve_capabilities" not in __salt__:
return pkgs, refresh
ret = __salt__["pkg.resolve_capabilities"](pkgs, refresh=refresh, **kwargs)
return ret, False
def installed(
name,
version=None,
refresh=None,
fromrepo=None,
skip_verify=False,
skip_suggestions=False,
pkgs=None,
sources=None,
allow_updates=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
update_holds=False,
**kwargs
):
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
if name and not any((pkgs, sources)):
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
kwargs["saltenv"] = __env__
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
if pkgs:
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
if not isinstance(pkg_verify, list):
pkg_verify = pkg_verify is True
if (pkg_verify or isinstance(pkg_verify, list)) and "pkg.verify" not in __salt__:
return {
"name": name,
"changes": {},
"result": False,
"comment": "pkg.verify not implemented",
}
if not isinstance(version, str) and version is not None:
version = str(version)
kwargs["allow_updates"] = allow_updates
result = _find_install_targets(
name,
version,
pkgs,
sources,
fromrepo=fromrepo,
skip_suggestions=skip_suggestions,
pkg_verify=pkg_verify,
normalize=normalize,
ignore_epoch=ignore_epoch,
reinstall=reinstall,
refresh=refresh,
**kwargs
)
try:
(
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
) = result
if was_refreshed:
refresh = False
except ValueError:
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=pkgs, sources=sources)
except (CommandExecutionError, SaltInvocationError) as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": str(exc),
}
if "result" in hold_ret and not hold_ret["result"]:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
for i in modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
result["changes"][i["name"]] = i["changes"]
for i in not_modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
for i in failed_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
return result
if to_unpurge and "lowpkg.unpurge" not in __salt__:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "lowpkg.unpurge not implemented",
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
if pkgs:
pkgs = [dict([(x, y)]) for x, y in targets.items()]
pkgs.extend([dict([(x, y)]) for x, y in to_reinstall.items()])
elif sources:
oldsources = sources
sources = [x for x in oldsources if next(iter(list(x.keys()))) in targets]
sources.extend(
[x for x in oldsources if next(iter(list(x.keys()))) in to_reinstall]
)
comment = []
changes = {"installed": {}}
if __opts__["test"]:
if targets:
if sources:
_targets = targets
else:
_targets = [_get_desired_pkg(x, targets) for x in targets]
summary = ", ".join(targets)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in targets}
)
comment.append(
"The following packages would be installed/updated: {}".format(summary)
)
if to_unpurge:
comment.append(
"The following packages would have their selection status "
"changed from 'purge' to 'install': {}".format(", ".join(to_unpurge))
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in to_unpurge}
)
if to_reinstall:
if reinstall:
reinstall_targets = []
for reinstall_pkg in to_reinstall:
if sources:
reinstall_targets.append(reinstall_pkg)
else:
reinstall_targets.append(
_get_desired_pkg(reinstall_pkg, to_reinstall)
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in reinstall_targets}
)
msg = "The following packages would be reinstalled: "
msg += ", ".join(reinstall_targets)
comment.append(msg)
else:
for reinstall_pkg in to_reinstall:
if sources:
pkgstr = reinstall_pkg
else:
pkgstr = _get_desired_pkg(reinstall_pkg, to_reinstall)
comment.append(
"Package '{}' would be reinstalled because the "
"following files have been altered:".format(pkgstr)
)
changes["installed"].update({reinstall_pkg: {}})
comment.append(_nested_output(altered_files[reinstall_pkg]))
ret = {
"name": name,
"changes": changes,
"result": None,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
modified_hold = None
not_modified_hold = None
failed_hold = None
if targets or to_reinstall:
try:
pkg_ret = __salt__["pkg.install"](
name=None,
refresh=refresh,
version=version,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=pkgs,
sources=sources,
reinstall=bool(to_reinstall),
normalize=normalize,
update_holds=update_holds,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while installing package(s): {}".format(
exc
)
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
if refresh:
refresh = False
if isinstance(pkg_ret, dict):
changes["installed"].update(pkg_ret)
elif isinstance(pkg_ret, str):
comment.append(pkg_ret)
pkg_ret = {}
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=desired)
except (CommandExecutionError, SaltInvocationError) as exc:
comment.append(str(exc))
ret = {
"name": name,
"changes": changes,
"result": False,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
if "result" in hold_ret and not hold_ret["result"]:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
if to_unpurge:
changes["purge_desired"] = __salt__["lowpkg.unpurge"](*to_unpurge)
if sources:
modified = [x for x in changes["installed"] if x in targets]
not_modified = [
x for x in desired if x not in targets and x not in to_reinstall
]
failed = [x for x in targets if x not in modified]
else:
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
new_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if (
kwargs.get("resolve_capabilities", False)
and "pkg.list_provides" in __salt__
):
new_caps = __salt__["pkg.list_provides"](**kwargs)
else:
new_caps = {}
_ok, failed = _verify_install(
desired, new_pkgs, ignore_epoch=ignore_epoch, new_caps=new_caps
)
modified = [x for x in _ok if x in targets]
not_modified = [x for x in _ok if x not in targets and x not in to_reinstall]
failed = [x for x in failed if x in targets]
if not changes.get("purge_desired"):
changes = changes["installed"]
if modified:
if sources:
summary = ", ".join(modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in modified])
if len(summary) < 20:
comment.append(
"The following packages were installed/updated: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} installed/updated.".format(
len(modified),
"s" if len(modified) > 1 else "",
"were" if len(modified) > 1 else "was",
)
)
if modified_hold:
for i in modified_hold:
change_name = i["name"]
if change_name in changes:
comment.append(i["comment"])
if len(changes[change_name]["new"]) > 0:
changes[change_name]["new"] += "\n"
changes[change_name]["new"] += "{}".format(i["changes"]["new"])
if len(changes[change_name]["old"]) > 0:
changes[change_name]["old"] += "\n"
changes[change_name]["old"] += "{}".format(i["changes"]["old"])
else:
comment.append(i["comment"])
changes[change_name] = {}
changes[change_name]["new"] = "{}".format(i["changes"]["new"])
if not_modified:
if sources:
summary = ", ".join(not_modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in not_modified])
if len(not_modified) <= 20:
comment.append(
"The following packages were already installed: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} already installed".format(
len(not_modified),
"s" if len(not_modified) > 1 else "",
"were" if len(not_modified) > 1 else "was",
)
)
if not_modified_hold:
for i in not_modified_hold:
comment.append(i["comment"])
result = True
if failed:
if sources:
summary = ", ".join(failed)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in failed])
comment.insert(
0, "The following packages failed to install/update: {}".format(summary)
)
result = False
if failed_hold:
for i in failed_hold:
comment.append(i["comment"])
result = False
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
modified = []
failed = []
for reinstall_pkg in to_reinstall:
if reinstall:
if reinstall_pkg in pkg_ret:
modified.append(reinstall_pkg)
else:
failed.append(reinstall_pkg)
elif pkg_verify:
verify_result = __salt__["pkg.verify"](
reinstall_pkg,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
if verify_result:
failed.append(reinstall_pkg)
altered_files[reinstall_pkg] = verify_result
else:
modified.append(reinstall_pkg)
if modified:
for modified_pkg in modified:
if sources:
pkgstr = modified_pkg
else:
pkgstr = _get_desired_pkg(modified_pkg, desired)
msg = "Package {} was reinstalled.".format(pkgstr)
if modified_pkg in altered_files:
msg += " The following files were remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[modified_pkg]))
else:
comment.append(msg)
if failed:
for failed_pkg in failed:
if sources:
pkgstr = failed_pkg
else:
pkgstr = _get_desired_pkg(failed_pkg, desired)
msg = "Reinstall was not successful for package {}.".format(pkgstr)
if failed_pkg in altered_files:
msg += " The following files could not be remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[failed_pkg]))
else:
comment.append(msg)
result = False
ret = {
"name": name,
"changes": changes,
"result": result,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
def downloaded(
name, version=None, pkgs=None, fromrepo=None, ignore_epoch=None, **kwargs
):
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_downloaded" not in __salt__:
ret["result"] = False
ret["comment"] = "The pkg.downloaded state is not available on this platform"
return ret
if isinstance(pkgs, list) and len(pkgs) == 0:
ret["result"] = True
ret["comment"] = "No packages to download provided"
return ret
if name and not pkgs:
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
pkgs, _refresh = _resolve_capabilities(pkgs, **kwargs)
targets = _find_download_targets(
name, version, pkgs, fromrepo=fromrepo, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, dict):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret["comment"] = "The following packages would be downloaded: {}".format(
summary
)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name,
pkgs=pkgs,
version=version,
downloadonly=True,
fromrepo=fromrepo,
ignore_epoch=ignore_epoch,
**kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
new_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
_ok, failed = _verify_install(targets, new_pkgs, ignore_epoch=ignore_epoch)
if failed:
summary = ", ".join([_get_desired_pkg(x, targets) for x in failed])
ret["result"] = False
ret["comment"] = "The following packages failed to download: {}".format(summary)
if not ret["changes"] and not ret["comment"]:
ret["result"] = True
ret["comment"] = "Packages downloaded: {}".format(", ".join(targets))
return ret
def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_patches" not in __salt__:
ret["result"] = False
ret[
"comment"
] = "The pkg.patch_installed state is not available on this platform"
return ret
if isinstance(advisory_ids, list) and len(advisory_ids) == 0:
ret["result"] = True
ret["comment"] = "No advisory ids provided"
return ret
targets = _find_advisory_targets(name, advisory_ids, **kwargs)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret[
"comment"
] = "The following advisory patches would be downloaded: {}".format(summary)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name, advisory_ids=advisory_ids, downloadonly=downloadonly, **kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
if not ret["changes"] and not ret["comment"]:
status = "downloaded" if downloadonly else "installed"
ret["result"] = True
ret[
"comment"
] = "Advisory patch is not needed or related packages are already {}".format(
status
)
return ret
def patch_downloaded(name, advisory_ids=None, **kwargs):
if "pkg.list_patches" not in __salt__:
return {
"name": name,
"result": False,
"changes": {},
"comment": (
"The pkg.patch_downloaded state is not available on this platform"
),
}
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
return patch_installed(
name=name, advisory_ids=advisory_ids, downloadonly=True, **kwargs
)
def latest(
name,
refresh=None,
fromrepo=None,
skip_verify=False,
pkgs=None,
watch_flags=True,
**kwargs
):
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
if kwargs.get("sources"):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'The "sources" parameter is not supported.',
}
elif pkgs:
desired_pkgs = list(_repack_pkgs(pkgs).keys())
if not desired_pkgs:
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Invalidly formatted "pkgs" parameter. See minion log.',
}
else:
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
else:
desired_pkgs = [name]
kwargs["saltenv"] = __env__
desired_pkgs, refresh = _resolve_capabilities(
desired_pkgs, refresh=refresh, **kwargs
)
try:
avail = __salt__["pkg.latest_version"](
*desired_pkgs, fromrepo=fromrepo, refresh=refresh, **kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while checking the "
"newest available version of package(s): {}".format(exc)
),
}
try:
cur = __salt__["pkg.version"](*desired_pkgs, **kwargs)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
if isinstance(cur, str):
cur = {desired_pkgs[0]: cur}
if isinstance(avail, str):
avail = {desired_pkgs[0]: avail}
targets = {}
problems = []
for pkg in desired_pkgs:
if not avail.get(pkg):
if not cur.get(pkg):
msg = "No information found for '{}'.".format(pkg)
log.error(msg)
problems.append(msg)
elif (
watch_flags
and __grains__.get("os") == "Gentoo"
and __salt__["portage_config.is_changed_uses"](pkg)
):
targets[pkg] = cur[pkg]
else:
targets[pkg] = avail[pkg]
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if targets:
if not pkgs:
# only targeted a single package and is being allowed to proceed to
# the install step.
up_to_date = []
else:
up_to_date = [x for x in pkgs if x not in targets]
if __opts__["test"]:
comments = []
comments.append(
"The following packages would be installed/upgraded: "
+ ", ".join(sorted(targets))
)
if up_to_date:
up_to_date_count = len(up_to_date)
if up_to_date_count <= 10:
comments.append(
"The following packages are already up-to-date: "
+ ", ".join(
["{} ({})".format(x, cur[x]) for x in sorted(up_to_date)]
)
)
else:
comments.append(
"{} packages are already up-to-date".format(up_to_date_count)
)
return {
"name": name,
"changes": {},
"result": None,
"comment": "\n".join(comments),
}
if salt.utils.platform.is_windows():
# pkg.install execution module on windows ensures the software
# package is installed when no version is specified, it does not
# upgrade the software to the latest. This is per the design.
# Build updated list of pkgs *with verion number*, exclude
# non-targeted ones
targeted_pkgs = [{x: targets[x]} for x in targets]
else:
# Build updated list of pkgs to exclude non-targeted ones
targeted_pkgs = list(targets)
# No need to refresh, if a refresh was necessary it would have been
# performed above when pkg.latest_version was run.
try:
changes = __salt__["pkg.install"](
name=None,
refresh=False,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=targeted_pkgs,
**kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while installing package(s): {}".format(
exc
)
),
}
if changes:
# Find failed and successful updates
failed = [
x
for x in targets
if not changes.get(x)
or changes[x].get("new") != targets[x]
and targets[x] != "latest"
]
successful = [x for x in targets if x not in failed]
comments = []
if failed:
msg = "The following packages failed to update: {}".format(
", ".join(sorted(failed))
)
comments.append(msg)
if successful:
msg = (
"The following packages were successfully "
"installed/upgraded: "
"{}".format(", ".join(sorted(successful)))
)
comments.append(msg)
if up_to_date:
if len(up_to_date) <= 10:
msg = "The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
else:
msg = "{} packages were already up-to-date ".format(len(up_to_date))
comments.append(msg)
return {
"name": name,
"changes": changes,
"result": False if failed else True,
"comment": " ".join(comments),
}
else:
if len(targets) > 10:
comment = (
"{} targeted packages failed to update. "
"See debug log for details.".format(len(targets))
)
elif len(targets) > 1:
comment = (
"The following targeted packages failed to update. "
"See debug log for details: ({}).".format(
", ".join(sorted(targets))
)
)
else:
comment = "Package {} failed to update.".format(
next(iter(list(targets.keys())))
)
if up_to_date:
if len(up_to_date) <= 10:
comment += (
" The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
)
else:
comment += "{} packages were already up-to-date".format(
len(up_to_date)
)
return {
"name": name,
"changes": changes,
"result": False,
"comment": comment,
}
else:
if len(desired_pkgs) > 10:
comment = "All {} packages are up-to-date.".format(len(desired_pkgs))
elif len(desired_pkgs) > 1:
comment = "All packages are up-to-date ({}).".format(
", ".join(sorted(desired_pkgs))
)
else:
comment = "Package {} is already up-to-date".format(desired_pkgs[0])
return {"name": name, "changes": {}, "result": True, "comment": comment}
def _uninstall(
action="remove",
name=None,
version=None,
pkgs=None,
normalize=True,
ignore_epoch=None,
**kwargs
):
if action not in ("remove", "purge"):
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalid action '{}'. This is probably a bug.".format(action),
}
try:
pkg_params = __salt__["pkg_resource.parse_targets"](
name, pkgs, normalize=normalize
)[0]
except MinionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while parsing targets: {}".format(exc),
}
targets = _find_remove_targets(
name, version, pkgs, normalize, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while checking targets: {}".format(
targets
),
}
if action == "purge":
old_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
targets.extend([x for x in pkg_params if x in old_removed])
targets.sort()
if not targets:
return {
"name": name,
"changes": {},
"result": True,
"comment": "None of the targeted packages are installed{}".format(
" or partially installed" if action == "purge" else ""
),
}
if __opts__["test"]:
_changes = {}
_changes.update({x: {"new": "{}d".format(action), "old": ""} for x in targets})
return {
"name": name,
"changes": _changes,
"result": None,
"comment": "The following packages will be {}d: {}.".format(
action, ", ".join(targets)
),
}
changes = __salt__["pkg.{}".format(action)](
name, pkgs=pkgs, version=version, **kwargs
)
new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
failed = []
for param in pkg_params:
if __grains__["os_family"] in ["Suse", "RedHat"]:
# Check if the package version set to be removed is actually removed:
if param in new and not pkg_params[param]:
failed.append(param)
elif param in new and pkg_params[param] in new[param]:
failed.append(param + "-" + pkg_params[param])
elif param in new:
failed.append(param)
if action == "purge":
new_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
failed.extend([x for x in pkg_params if x in new_removed])
failed.sort()
if failed:
return {
"name": name,
"changes": changes,
"result": False,
"comment": "The following packages failed to {}: {}.".format(
action, ", ".join(failed)
),
}
comments = []
not_installed = sorted([x for x in pkg_params if x not in targets])
if not_installed:
comments.append(
"The following packages were not installed: {}".format(
", ".join(not_installed)
)
)
comments.append(
"The following packages were {}d: {}.".format(action, ", ".join(targets))
)
else:
comments.append("All targeted packages were {}d.".format(action))
return {
"name": name,
"changes": changes,
"result": True,
"comment": " ".join(comments),
}
def removed(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="remove",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while removing package(s): {}".format(exc)
return ret
def purged(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="purge",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while purging package(s): {}".format(exc)
return ret
def uptodate(name, refresh=False, pkgs=None, **kwargs):
ret = {"name": name, "changes": {}, "result": False, "comment": "Failed to update"}
if "pkg.list_upgrades" not in __salt__:
ret["comment"] = "State pkg.uptodate is not available"
return ret
# emerge --update doesn't appear to support repo notation
if "fromrepo" in kwargs and __grains__["os"] == "Gentoo":
ret["comment"] = "'fromrepo' argument not supported on this platform"
return ret
if isinstance(refresh, bool):
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
try:
packages = __salt__["pkg.list_upgrades"](refresh=refresh, **kwargs)
expected = {
pkgname: {
"new": pkgver,
"old": __salt__["pkg.version"](pkgname, **kwargs),
}
for pkgname, pkgver in packages.items()
}
if isinstance(pkgs, list):
packages = [pkg for pkg in packages if pkg in pkgs]
expected = {
pkgname: pkgver
for pkgname, pkgver in expected.items()
if pkgname in pkgs
}
except Exception as exc:
ret["comment"] = str(exc)
return ret
else:
ret["comment"] = "refresh must be either True or False"
return ret
if not packages:
ret["comment"] = "System is already up-to-date"
ret["result"] = True
return ret
elif __opts__["test"]:
ret["comment"] = "System update will be performed"
ret["changes"] = expected
ret["result"] = None
return ret
try:
ret["changes"] = __salt__["pkg.upgrade"](refresh=refresh, pkgs=pkgs, **kwargs)
except CommandExecutionError as exc:
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while updating packages: {}".format(exc)
return ret
missing = []
if isinstance(pkgs, list):
missing = [pkg for pkg in expected.keys() if pkg not in ret["changes"]]
if missing:
ret["comment"] = "The following package(s) failed to update: {}".format(
", ".join(missing)
)
ret["result"] = False
else:
ret["comment"] = "Upgrade ran successfully"
ret["result"] = True
return ret
def group_installed(name, skip=None, include=None, **kwargs):
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
if "pkg.group_diff" not in __salt__:
ret["comment"] = "pkg.group_install not available for this platform"
return ret
if skip is None:
skip = []
else:
if not isinstance(skip, list):
ret["comment"] = "skip must be formatted as a list"
return ret
for idx, item in enumerate(skip):
if not isinstance(item, str):
skip[idx] = str(item)
if include is None:
include = []
else:
if not isinstance(include, list):
ret["comment"] = "include must be formatted as a list"
return ret
for idx, item in enumerate(include):
if not isinstance(item, str):
include[idx] = str(item)
try:
diff = __salt__["pkg.group_diff"](name)
except CommandExecutionError as err:
ret[
"comment"
] = "An error was encountered while installing/updating group '{}': {}.".format(
name, err
)
return ret
mandatory = diff["mandatory"]["installed"] + diff["mandatory"]["not installed"]
invalid_skip = [x for x in mandatory if x in skip]
if invalid_skip:
ret[
"comment"
] = "The following mandatory packages cannot be skipped: {}".format(
", ".join(invalid_skip)
)
return ret
targets = diff["mandatory"]["not installed"]
targets.extend([x for x in diff["default"]["not installed"] if x not in skip])
targets.extend(include)
if not targets:
ret["result"] = True
ret["comment"] = "Group '{}' is already installed".format(name)
return ret
partially_installed = (
diff["mandatory"]["installed"]
or diff["default"]["installed"]
or diff["optional"]["installed"]
)
if __opts__["test"]:
ret["result"] = None
if partially_installed:
ret[
"comment"
] = "Group '{}' is partially installed and will be updated".format(name)
else:
ret["comment"] = "Group '{}' will be installed".format(name)
return ret
try:
ret["changes"] = __salt__["pkg.install"](pkgs=targets, **kwargs)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while "
"installing/updating group '{}': {}".format(name, exc)
)
return ret
failed = [x for x in targets if x not in __salt__["pkg.list_pkgs"](**kwargs)]
if failed:
ret["comment"] = "Failed to install the following packages: {}".format(
", ".join(failed)
)
return ret
ret["result"] = True
ret["comment"] = "Group '{}' was {}".format(
name, "updated" if partially_installed else "installed"
)
return ret
def mod_init(low):
ret = True
if "pkg.ex_mod_init" in __salt__:
ret = __salt__["pkg.ex_mod_init"](low)
if low["fun"] == "installed" or low["fun"] == "latest":
salt.utils.pkg.write_rtag(__opts__)
return ret
return False
def mod_aggregate(low, chunks, running):
pkgs = []
pkg_type = None
agg_enabled = [
"installed",
"latest",
"removed",
"purged",
]
if low.get("fun") not in agg_enabled:
return low
for chunk in chunks:
tag = __utils__["state.gen_tag"](chunk)
if tag in running:
continue
if chunk.get("state") == "pkg":
if "__agg__" in chunk:
continue
if chunk.get("fun") != low.get("fun"):
continue
if chunk.get("fromrepo") != low.get("fromrepo"):
continue
# and sources together.
if "sources" in chunk:
if pkg_type is None:
pkg_type = "sources"
if pkg_type == "sources":
pkgs.extend(chunk["sources"])
chunk["__agg__"] = True
else:
# If hold exists in the chunk, do not add to aggregation
# otherwise all packages will be held or unheld.
# setting a package to be held/unheld is not as
# time consuming as installing/uninstalling.
if "hold" not in chunk:
if pkg_type is None:
pkg_type = "pkgs"
if pkg_type == "pkgs":
# Pull out the pkg names!
if "pkgs" in chunk:
pkgs.extend(chunk["pkgs"])
chunk["__agg__"] = True
elif "name" in chunk:
version = chunk.pop("version", None)
if version is not None:
pkgs.append({chunk["name"]: version})
else:
pkgs.append(chunk["name"])
chunk["__agg__"] = True
if pkg_type is not None and pkgs:
if pkg_type in low:
low[pkg_type].extend(pkgs)
else:
low[pkg_type] = pkgs
return low
def mod_watch(name, **kwargs):
sfun = kwargs.pop("sfun", None)
mapfun = {
"purged": purged,
"latest": latest,
"removed": removed,
"installed": installed,
}
if sfun in mapfun:
return mapfun[sfun](name, **kwargs)
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the watch requisite".format(sfun),
"result": False,
}
def mod_beacon(name, **kwargs):
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
sfun = kwargs.pop("sfun", None)
supported_funcs = ["installed", "removed"]
if sfun in supported_funcs:
if kwargs.get("beacon"):
beacon_module = "pkg"
beacon_name = "beacon_{}_{}".format(beacon_module, name)
beacon_kwargs = {
"name": beacon_name,
"pkgs": kwargs.get("pkgs", [name]),
"interval": 60,
"beacon_module": beacon_module,
}
ret = __states__["beacon.present"](**beacon_kwargs)
return ret
else:
return {
"name": name,
"changes": {},
"comment": "Not adding beacon.",
"result": True,
}
else:
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the mod_beacon state function".format(
sfun
),
"result": False,
}
| true | true |
f71f6287f35f2c7ff53b83f6c0121a0e0b75c1ea | 13,549 | py | Python | chainer/training/extensions/variable_statistics_plot.py | seiyab/chainer | 39fffb9597a6e9646307fba27ad3233c65d38632 | [
"MIT"
] | null | null | null | chainer/training/extensions/variable_statistics_plot.py | seiyab/chainer | 39fffb9597a6e9646307fba27ad3233c65d38632 | [
"MIT"
] | null | null | null | chainer/training/extensions/variable_statistics_plot.py | seiyab/chainer | 39fffb9597a6e9646307fba27ad3233c65d38632 | [
"MIT"
] | null | null | null | from __future__ import division
import os
import warnings
import numpy
import six
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer.training import extension
from chainer.training import trigger as trigger_module
from chainer.utils import argument
_available = None
def _try_import_matplotlib():
global matplotlib, _available
global _plot_color, _plot_color_trans, _plot_common_kwargs
try:
import matplotlib
_available = True
except ImportError:
_available = False
if _available:
if hasattr(matplotlib.colors, 'to_rgba'):
_to_rgba = matplotlib.colors.to_rgba
else:
# For matplotlib 1.x
_to_rgba = matplotlib.colors.ColorConverter().to_rgba
_plot_color = _to_rgba('#1f77b4') # C0 color
_plot_color_trans = _plot_color[:3] + (0.2,) # apply alpha
_plot_common_kwargs = {
'alpha': 0.2, 'linewidth': 0, 'color': _plot_color_trans}
def _check_available():
if _available is None:
_try_import_matplotlib()
if not _available:
warnings.warn('matplotlib is not installed on your environment, '
'so nothing will be plotted at this time. '
'Please install matplotlib to plot figures.\n\n'
' $ pip install matplotlib\n')
def _unpack_variables(x, memo=None):
if memo is None:
memo = ()
if isinstance(x, chainer.Variable):
memo += (x,)
elif isinstance(x, chainer.Link):
memo += tuple(x.params(include_uninit=True))
elif isinstance(x, (list, tuple)):
for xi in x:
memo += _unpack_variables(xi)
return memo
class Reservoir(object):
"""Reservoir sample with a fixed sized buffer."""
def __init__(self, size, data_shape, dtype=numpy.float32):
self.size = size
self.data = numpy.zeros((size,) + data_shape, dtype=dtype)
self.idxs = numpy.zeros((size,), dtype=numpy.int32)
self.counter = 0
def add(self, x, idx=None):
if self.counter < self.size:
self.data[self.counter] = x
self.idxs[self.counter] = idx or self.counter
elif self.counter >= self.size and \
numpy.random.random() < self.size / float(self.counter + 1):
i = numpy.random.randint(self.size)
self.data[i] = x
self.idxs[i] = idx or self.counter
self.counter += 1
def get_data(self):
idxs = self.idxs[:min(self.counter, self.size)]
sorted_args = numpy.argsort(idxs)
return idxs[sorted_args], self.data[sorted_args]
class Statistician(object):
"""Helper to compute basic NumPy-like statistics."""
def __init__(self, collect_mean, collect_std, percentile_sigmas):
self.collect_mean = collect_mean
self.collect_std = collect_std
self.percentile_sigmas = percentile_sigmas
def __call__(self, x, axis=0, dtype=None, xp=None):
if axis is None:
axis = tuple(range(x.ndim))
elif not isinstance(axis, (tuple, list)):
axis = axis,
return self.collect(x, axis)
def collect(self, x, axis):
out = dict()
if self.collect_mean:
out['mean'] = x.mean(axis=axis)
if self.collect_std:
out['std'] = x.std(axis=axis)
if self.percentile_sigmas:
xp = cuda.get_array_module(x)
p = xp.percentile(x, self.percentile_sigmas, axis=axis)
out['percentile'] = p
return out
class VariableStatisticsPlot(extension.Extension):
"""__init__(\
targets, max_sample_size=1000, report_data=True,\
report_grad=True, plot_mean=True, plot_std=True,\
percentile_sigmas=(0, 0.13, 2.28, 15.87, 50, 84.13, 97.72, 99.87,\
100), trigger=(1, 'epoch'), filename='statistics.png',\
figsize=None, marker=None, grid=True)
Trainer extension to plot statistics for :class:`Variable`\\s.
This extension collects statistics for a single :class:`Variable`, a list
of :class:`Variable`\\s or similarly a single or a list of
:class:`Link`\\s containing one or more :class:`Variable`\\s. In case
multiple :class:`Variable`\\s are found, the means are computed. The
collected statistics are plotted and saved as an image in the directory
specified by the :class:`Trainer`.
Statistics include mean, standard deviation and percentiles.
This extension uses reservoir sampling to preserve memory, using a fixed
size running sample. This means that collected items in the sample are
discarded uniformly at random when the number of items becomes larger
than the maximum sample size, but each item is expected to occur in the
sample with equal probability.
Args:
targets (:class:`Variable`, :class:`Link` or list of either):
Parameters for which statistics are collected.
max_sample_size (int):
Maximum number of running samples.
report_data (bool):
If ``True``, data (e.g. weights) statistics are plotted. If
``False``, they are neither computed nor plotted.
report_grad (bool):
If ``True``, gradient statistics are plotted. If ``False``, they
are neither computed nor plotted.
plot_mean (bool):
If ``True``, means are plotted. If ``False``, they are
neither computed nor plotted.
plot_std (bool):
If ``True``, standard deviations are plotted. If ``False``, they
are neither computed nor plotted.
percentile_sigmas (float or tuple of floats):
Percentiles to plot in the range :math:`[0, 100]`.
trigger:
Trigger that decides when to save the plots as an image. This is
distinct from the trigger of this extension itself. If it is a
tuple in the form ``<int>, 'epoch'`` or ``<int>, 'iteration'``, it
is passed to :class:`IntervalTrigger`.
filename (str):
Name of the output image file under the output directory.
For historical reasons ``file_name`` is also accepted as an alias
of this argument.
figsize (tuple of int):
Matlotlib ``figsize`` argument that specifies the size of the
output image.
marker (str):
Matplotlib ``marker`` argument that specified the marker style of
the plots.
grid (bool):
Matplotlib ``grid`` argument that specifies whether grids are
rendered in in the plots or not.
"""
def __init__(self, targets, max_sample_size=1000,
report_data=True, report_grad=True,
plot_mean=True, plot_std=True,
percentile_sigmas=(
0, 0.13, 2.28, 15.87, 50, 84.13, 97.72, 99.87, 100),
trigger=(1, 'epoch'), filename=None,
figsize=None, marker=None, grid=True, **kwargs):
file_name, = argument.parse_kwargs(
kwargs, ('file_name', 'statistics.png')
)
if filename is None:
filename = file_name
del file_name # avoid accidental use
self._vars = _unpack_variables(targets)
if not self._vars:
raise ValueError(
'Need at least one variables for which to collect statistics.'
'\nActual: 0 <= 0')
if not any((plot_mean, plot_std, bool(percentile_sigmas))):
raise ValueError('Nothing to plot')
self._keys = []
if report_data:
self._keys.append('data')
if report_grad:
self._keys.append('grad')
self._report_data = report_data
self._report_grad = report_grad
self._statistician = Statistician(
collect_mean=plot_mean, collect_std=plot_std,
percentile_sigmas=percentile_sigmas)
self._plot_mean = plot_mean
self._plot_std = plot_std
self._plot_percentile = bool(percentile_sigmas)
self._trigger = trigger_module.get_trigger(trigger)
self._filename = filename
self._figsize = figsize
self._marker = marker
self._grid = grid
if not self._plot_percentile:
n_percentile = 0
else:
if not isinstance(percentile_sigmas, (list, tuple)):
n_percentile = 1 # scalar, single percentile
else:
n_percentile = len(percentile_sigmas)
self._data_shape = (
len(self._keys), int(plot_mean) + int(plot_std) + n_percentile)
self._samples = Reservoir(max_sample_size, data_shape=self._data_shape)
@staticmethod
def available():
_check_available()
return _available
def __call__(self, trainer):
if self.available():
# Dynamically import pyplot to call matplotlib.use()
# after importing chainer.training.extensions
import matplotlib.pyplot as plt
else:
return
xp = backend.get_array_module(self._vars[0].data)
stats = xp.zeros(self._data_shape, dtype=xp.float32)
for i, k in enumerate(self._keys):
xs = []
for var in self._vars:
x = getattr(var, k, None)
if x is not None:
xs.append(x.ravel())
if xs:
stat_dict = self._statistician(
xp.concatenate(xs, axis=0), axis=0, xp=xp)
stat_list = []
if self._plot_mean:
stat_list.append(xp.atleast_1d(stat_dict['mean']))
if self._plot_std:
stat_list.append(xp.atleast_1d(stat_dict['std']))
if self._plot_percentile:
stat_list.append(xp.atleast_1d(stat_dict['percentile']))
stats[i] = xp.concatenate(stat_list, axis=0)
if xp == cuda.cupy:
stats = cuda.to_cpu(stats)
self._samples.add(stats, idx=trainer.updater.iteration)
if self._trigger(trainer):
file_path = os.path.join(trainer.out, self._filename)
self.save_plot_using_module(file_path, plt)
def save_plot_using_module(self, file_path, plt):
nrows = int(self._plot_mean or self._plot_std) \
+ int(self._plot_percentile)
ncols = len(self._keys)
fig, axes = plt.subplots(
nrows, ncols, figsize=self._figsize, sharex=True)
if not isinstance(axes, numpy.ndarray): # single subplot
axes = numpy.asarray([axes])
if nrows == 1:
axes = axes[None, :]
elif ncols == 1:
axes = axes[:, None]
assert axes.ndim == 2
idxs, data = self._samples.get_data()
# Offset to access percentile data from `data`
offset = int(self._plot_mean) + int(self._plot_std)
n_percentile = data.shape[-1] - offset
n_percentile_mid_floor = n_percentile // 2
n_percentile_odd = n_percentile % 2 == 1
for col in six.moves.range(ncols):
row = 0
ax = axes[row, col]
ax.set_title(self._keys[col]) # `data` or `grad`
if self._plot_mean or self._plot_std:
if self._plot_mean and self._plot_std:
ax.errorbar(
idxs, data[:, col, 0], data[:, col, 1],
color=_plot_color, ecolor=_plot_color_trans,
label='mean, std', marker=self._marker)
else:
if self._plot_mean:
label = 'mean'
elif self._plot_std:
label = 'std'
ax.plot(
idxs, data[:, col, 0], color=_plot_color, label=label,
marker=self._marker)
row += 1
if self._plot_percentile:
ax = axes[row, col]
for i in six.moves.range(n_percentile_mid_floor + 1):
if n_percentile_odd and i == n_percentile_mid_floor:
# Enters at most once per sub-plot, in case there is
# only a single percentile to plot or when this
# percentile is the mid percentile and the number of
# percentiles are odd
ax.plot(
idxs, data[:, col, offset + i], color=_plot_color,
label='percentile', marker=self._marker)
else:
if i == n_percentile_mid_floor:
# Last percentiles and the number of all
# percentiles are even
label = 'percentile'
else:
label = '_nolegend_'
ax.fill_between(
idxs,
data[:, col, offset + i],
data[:, col, -i - 1],
label=label,
**_plot_common_kwargs)
ax.set_xlabel('iteration')
for ax in axes.ravel():
ax.legend()
if self._grid:
ax.grid()
ax.set_axisbelow(True)
fig.savefig(file_path)
plt.close()
| 36.817935 | 79 | 0.569489 | from __future__ import division
import os
import warnings
import numpy
import six
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer.training import extension
from chainer.training import trigger as trigger_module
from chainer.utils import argument
_available = None
def _try_import_matplotlib():
global matplotlib, _available
global _plot_color, _plot_color_trans, _plot_common_kwargs
try:
import matplotlib
_available = True
except ImportError:
_available = False
if _available:
if hasattr(matplotlib.colors, 'to_rgba'):
_to_rgba = matplotlib.colors.to_rgba
else:
_to_rgba = matplotlib.colors.ColorConverter().to_rgba
_plot_color = _to_rgba('#1f77b4')
_plot_color_trans = _plot_color[:3] + (0.2,)
_plot_common_kwargs = {
'alpha': 0.2, 'linewidth': 0, 'color': _plot_color_trans}
def _check_available():
if _available is None:
_try_import_matplotlib()
if not _available:
warnings.warn('matplotlib is not installed on your environment, '
'so nothing will be plotted at this time. '
'Please install matplotlib to plot figures.\n\n'
' $ pip install matplotlib\n')
def _unpack_variables(x, memo=None):
if memo is None:
memo = ()
if isinstance(x, chainer.Variable):
memo += (x,)
elif isinstance(x, chainer.Link):
memo += tuple(x.params(include_uninit=True))
elif isinstance(x, (list, tuple)):
for xi in x:
memo += _unpack_variables(xi)
return memo
class Reservoir(object):
def __init__(self, size, data_shape, dtype=numpy.float32):
self.size = size
self.data = numpy.zeros((size,) + data_shape, dtype=dtype)
self.idxs = numpy.zeros((size,), dtype=numpy.int32)
self.counter = 0
def add(self, x, idx=None):
if self.counter < self.size:
self.data[self.counter] = x
self.idxs[self.counter] = idx or self.counter
elif self.counter >= self.size and \
numpy.random.random() < self.size / float(self.counter + 1):
i = numpy.random.randint(self.size)
self.data[i] = x
self.idxs[i] = idx or self.counter
self.counter += 1
def get_data(self):
idxs = self.idxs[:min(self.counter, self.size)]
sorted_args = numpy.argsort(idxs)
return idxs[sorted_args], self.data[sorted_args]
class Statistician(object):
def __init__(self, collect_mean, collect_std, percentile_sigmas):
self.collect_mean = collect_mean
self.collect_std = collect_std
self.percentile_sigmas = percentile_sigmas
def __call__(self, x, axis=0, dtype=None, xp=None):
if axis is None:
axis = tuple(range(x.ndim))
elif not isinstance(axis, (tuple, list)):
axis = axis,
return self.collect(x, axis)
def collect(self, x, axis):
out = dict()
if self.collect_mean:
out['mean'] = x.mean(axis=axis)
if self.collect_std:
out['std'] = x.std(axis=axis)
if self.percentile_sigmas:
xp = cuda.get_array_module(x)
p = xp.percentile(x, self.percentile_sigmas, axis=axis)
out['percentile'] = p
return out
class VariableStatisticsPlot(extension.Extension):
def __init__(self, targets, max_sample_size=1000,
report_data=True, report_grad=True,
plot_mean=True, plot_std=True,
percentile_sigmas=(
0, 0.13, 2.28, 15.87, 50, 84.13, 97.72, 99.87, 100),
trigger=(1, 'epoch'), filename=None,
figsize=None, marker=None, grid=True, **kwargs):
file_name, = argument.parse_kwargs(
kwargs, ('file_name', 'statistics.png')
)
if filename is None:
filename = file_name
del file_name
self._vars = _unpack_variables(targets)
if not self._vars:
raise ValueError(
'Need at least one variables for which to collect statistics.'
'\nActual: 0 <= 0')
if not any((plot_mean, plot_std, bool(percentile_sigmas))):
raise ValueError('Nothing to plot')
self._keys = []
if report_data:
self._keys.append('data')
if report_grad:
self._keys.append('grad')
self._report_data = report_data
self._report_grad = report_grad
self._statistician = Statistician(
collect_mean=plot_mean, collect_std=plot_std,
percentile_sigmas=percentile_sigmas)
self._plot_mean = plot_mean
self._plot_std = plot_std
self._plot_percentile = bool(percentile_sigmas)
self._trigger = trigger_module.get_trigger(trigger)
self._filename = filename
self._figsize = figsize
self._marker = marker
self._grid = grid
if not self._plot_percentile:
n_percentile = 0
else:
if not isinstance(percentile_sigmas, (list, tuple)):
n_percentile = 1
else:
n_percentile = len(percentile_sigmas)
self._data_shape = (
len(self._keys), int(plot_mean) + int(plot_std) + n_percentile)
self._samples = Reservoir(max_sample_size, data_shape=self._data_shape)
@staticmethod
def available():
_check_available()
return _available
def __call__(self, trainer):
if self.available():
import matplotlib.pyplot as plt
else:
return
xp = backend.get_array_module(self._vars[0].data)
stats = xp.zeros(self._data_shape, dtype=xp.float32)
for i, k in enumerate(self._keys):
xs = []
for var in self._vars:
x = getattr(var, k, None)
if x is not None:
xs.append(x.ravel())
if xs:
stat_dict = self._statistician(
xp.concatenate(xs, axis=0), axis=0, xp=xp)
stat_list = []
if self._plot_mean:
stat_list.append(xp.atleast_1d(stat_dict['mean']))
if self._plot_std:
stat_list.append(xp.atleast_1d(stat_dict['std']))
if self._plot_percentile:
stat_list.append(xp.atleast_1d(stat_dict['percentile']))
stats[i] = xp.concatenate(stat_list, axis=0)
if xp == cuda.cupy:
stats = cuda.to_cpu(stats)
self._samples.add(stats, idx=trainer.updater.iteration)
if self._trigger(trainer):
file_path = os.path.join(trainer.out, self._filename)
self.save_plot_using_module(file_path, plt)
def save_plot_using_module(self, file_path, plt):
nrows = int(self._plot_mean or self._plot_std) \
+ int(self._plot_percentile)
ncols = len(self._keys)
fig, axes = plt.subplots(
nrows, ncols, figsize=self._figsize, sharex=True)
if not isinstance(axes, numpy.ndarray):
axes = numpy.asarray([axes])
if nrows == 1:
axes = axes[None, :]
elif ncols == 1:
axes = axes[:, None]
assert axes.ndim == 2
idxs, data = self._samples.get_data()
offset = int(self._plot_mean) + int(self._plot_std)
n_percentile = data.shape[-1] - offset
n_percentile_mid_floor = n_percentile // 2
n_percentile_odd = n_percentile % 2 == 1
for col in six.moves.range(ncols):
row = 0
ax = axes[row, col]
ax.set_title(self._keys[col])
if self._plot_mean or self._plot_std:
if self._plot_mean and self._plot_std:
ax.errorbar(
idxs, data[:, col, 0], data[:, col, 1],
color=_plot_color, ecolor=_plot_color_trans,
label='mean, std', marker=self._marker)
else:
if self._plot_mean:
label = 'mean'
elif self._plot_std:
label = 'std'
ax.plot(
idxs, data[:, col, 0], color=_plot_color, label=label,
marker=self._marker)
row += 1
if self._plot_percentile:
ax = axes[row, col]
for i in six.moves.range(n_percentile_mid_floor + 1):
if n_percentile_odd and i == n_percentile_mid_floor:
ax.plot(
idxs, data[:, col, offset + i], color=_plot_color,
label='percentile', marker=self._marker)
else:
if i == n_percentile_mid_floor:
label = 'percentile'
else:
label = '_nolegend_'
ax.fill_between(
idxs,
data[:, col, offset + i],
data[:, col, -i - 1],
label=label,
**_plot_common_kwargs)
ax.set_xlabel('iteration')
for ax in axes.ravel():
ax.legend()
if self._grid:
ax.grid()
ax.set_axisbelow(True)
fig.savefig(file_path)
plt.close()
| true | true |
f71f62b04af3fdacd6538bdd099ff2935e8e0a14 | 2,893 | py | Python | tests/test_param_grid.py | MarcoJHB/ploomber | 4849ef6915572f7934392443b4faf138172b9596 | [
"Apache-2.0"
] | 2,141 | 2020-02-14T02:34:34.000Z | 2022-03-31T22:43:20.000Z | tests/test_param_grid.py | MarcoJHB/ploomber | 4849ef6915572f7934392443b4faf138172b9596 | [
"Apache-2.0"
] | 660 | 2020-02-06T16:15:57.000Z | 2022-03-31T22:55:01.000Z | tests/test_param_grid.py | MarcoJHB/ploomber | 4849ef6915572f7934392443b4faf138172b9596 | [
"Apache-2.0"
] | 122 | 2020-02-14T18:53:05.000Z | 2022-03-27T22:33:24.000Z | import datetime
from dateutil.relativedelta import relativedelta
import pytest
from ploomber.util import ParamGrid, Interval
def compare(a, b):
for element in a:
if element not in b:
return False
return len(a) == len(b)
def test_interval():
interval = Interval(datetime.date(year=2010, month=1, day=1),
datetime.date(year=2012, month=1, day=1),
relativedelta(years=1))
expanded = interval.expand()
repr_ = ('Interval from 2010-01-01 to 2012-01-01 with '
'delta relativedelta(years=+1)')
expected = [(datetime.date(2010, 1, 1), datetime.date(2011, 1, 1)),
(datetime.date(2011, 1, 1), datetime.date(2012, 1, 1))]
assert expanded == expected
assert repr(interval) == repr_
def test_param_grid():
pg = ParamGrid({'a': [1, 2, 3], 'b': [2, 4, 6]})
assert compare(list(pg.zip()), [{
'a': 1,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 3,
'b': 6
}])
assert compare(list(pg.product()), [{
'a': 1,
'b': 2
}, {
'a': 1,
'b': 4
}, {
'a': 1,
'b': 6
}, {
'a': 2,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 2,
'b': 6
}, {
'a': 3,
'b': 2
}, {
'a': 3,
'b': 4
}, {
'a': 3,
'b': 6
}])
def test_param_grid_w_interval():
pg = ParamGrid({'a': Interval(0, 10, 2), 'b': [2, 4, 6, 8, 10]})
assert compare(list(pg.zip()), [{
'a': (0, 2),
'b': 2
}, {
'a': (2, 4),
'b': 4
}, {
'a': (4, 6),
'b': 6
}, {
'a': (6, 8),
'b': 8
}, {
'a': (8, 10),
'b': 10
}])
def test_param_grid_list():
first = {'a': [1, 2], 'b': [1, 2]}
second = {'c': [3, 4], 'd': [3, 4]}
pg = ParamGrid([first, second])
assert list(pg.product()) == [{
'a': 1,
'b': 1
}, {
'a': 1,
'b': 2
}, {
'a': 2,
'b': 1
}, {
'a': 2,
'b': 2
}, {
'c': 3,
'd': 3
}, {
'c': 3,
'd': 4
}, {
'c': 4,
'd': 3
}, {
'c': 4,
'd': 4
}]
def test_param_grid_with_str_list():
pg = ParamGrid({
'a': ['one', 'another'],
'b': ['more', 'final'],
})
assert len(list(pg.product())) == 4
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_product_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more', 'final']})
assert len(list(pg.product())) == 2
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_zip_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more']})
assert len(list(pg.zip())) == 1
| 19.15894 | 71 | 0.407881 | import datetime
from dateutil.relativedelta import relativedelta
import pytest
from ploomber.util import ParamGrid, Interval
def compare(a, b):
for element in a:
if element not in b:
return False
return len(a) == len(b)
def test_interval():
interval = Interval(datetime.date(year=2010, month=1, day=1),
datetime.date(year=2012, month=1, day=1),
relativedelta(years=1))
expanded = interval.expand()
repr_ = ('Interval from 2010-01-01 to 2012-01-01 with '
'delta relativedelta(years=+1)')
expected = [(datetime.date(2010, 1, 1), datetime.date(2011, 1, 1)),
(datetime.date(2011, 1, 1), datetime.date(2012, 1, 1))]
assert expanded == expected
assert repr(interval) == repr_
def test_param_grid():
pg = ParamGrid({'a': [1, 2, 3], 'b': [2, 4, 6]})
assert compare(list(pg.zip()), [{
'a': 1,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 3,
'b': 6
}])
assert compare(list(pg.product()), [{
'a': 1,
'b': 2
}, {
'a': 1,
'b': 4
}, {
'a': 1,
'b': 6
}, {
'a': 2,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 2,
'b': 6
}, {
'a': 3,
'b': 2
}, {
'a': 3,
'b': 4
}, {
'a': 3,
'b': 6
}])
def test_param_grid_w_interval():
pg = ParamGrid({'a': Interval(0, 10, 2), 'b': [2, 4, 6, 8, 10]})
assert compare(list(pg.zip()), [{
'a': (0, 2),
'b': 2
}, {
'a': (2, 4),
'b': 4
}, {
'a': (4, 6),
'b': 6
}, {
'a': (6, 8),
'b': 8
}, {
'a': (8, 10),
'b': 10
}])
def test_param_grid_list():
first = {'a': [1, 2], 'b': [1, 2]}
second = {'c': [3, 4], 'd': [3, 4]}
pg = ParamGrid([first, second])
assert list(pg.product()) == [{
'a': 1,
'b': 1
}, {
'a': 1,
'b': 2
}, {
'a': 2,
'b': 1
}, {
'a': 2,
'b': 2
}, {
'c': 3,
'd': 3
}, {
'c': 3,
'd': 4
}, {
'c': 4,
'd': 3
}, {
'c': 4,
'd': 4
}]
def test_param_grid_with_str_list():
pg = ParamGrid({
'a': ['one', 'another'],
'b': ['more', 'final'],
})
assert len(list(pg.product())) == 4
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_product_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more', 'final']})
assert len(list(pg.product())) == 2
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_zip_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more']})
assert len(list(pg.zip())) == 1
| true | true |
f71f63419874a18aec03723ca69a1e11494c93fe | 27 | py | Python | btd6_memory_info/generated/NinjaKiwi/LiNK/Lobbies/LatencyMeasurements/StatsExtensions/stats_extensions.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/NinjaKiwi/LiNK/Lobbies/LatencyMeasurements/StatsExtensions/stats_extensions.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/NinjaKiwi/LiNK/Lobbies/LatencyMeasurements/StatsExtensions/stats_extensions.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | class StatsExtensions: pass | 27 | 27 | 0.888889 | class StatsExtensions: pass | true | true |
f71f638a703961e5577fb4b19745f2c50b5b4f2c | 478 | py | Python | Django/SOC2/MyChat/chat/routing.py | JanStoltman/100DaysOfCode | 1d18b76ed1e3e942e8392006a5d4bfb41484d047 | [
"MIT"
] | null | null | null | Django/SOC2/MyChat/chat/routing.py | JanStoltman/100DaysOfCode | 1d18b76ed1e3e942e8392006a5d4bfb41484d047 | [
"MIT"
] | null | null | null | Django/SOC2/MyChat/chat/routing.py | JanStoltman/100DaysOfCode | 1d18b76ed1e3e942e8392006a5d4bfb41484d047 | [
"MIT"
] | null | null | null | from channels import route
from .consumers import ws_connect, ws_receive, ws_disconnect, chat_join, chat_leave, chat_send
websocket_routing = [
route("websocket.connect", ws_connect),
route("websocket.receive", ws_receive),
route("websocket.disconnect", ws_disconnect),
]
custom_routing = [
route("chat.receive", chat_join, command="^join$"),
route("chat.receive", chat_leave, command="^leave$"),
route("chat.receive", chat_send, command="^send$"),
] | 29.875 | 94 | 0.717573 | from channels import route
from .consumers import ws_connect, ws_receive, ws_disconnect, chat_join, chat_leave, chat_send
websocket_routing = [
route("websocket.connect", ws_connect),
route("websocket.receive", ws_receive),
route("websocket.disconnect", ws_disconnect),
]
custom_routing = [
route("chat.receive", chat_join, command="^join$"),
route("chat.receive", chat_leave, command="^leave$"),
route("chat.receive", chat_send, command="^send$"),
] | true | true |
f71f65dc650c5a613143b036baee3ed96b5449c9 | 5,089 | py | Python | jinahub/encoders/audio/VGGISHAudioEncoder/vggish_audio_encoder.py | Gikiman/executors | 98658b4136859164390cfccbde8cf0f7cf843593 | [
"Apache-2.0"
] | null | null | null | jinahub/encoders/audio/VGGISHAudioEncoder/vggish_audio_encoder.py | Gikiman/executors | 98658b4136859164390cfccbde8cf0f7cf843593 | [
"Apache-2.0"
] | null | null | null | jinahub/encoders/audio/VGGISHAudioEncoder/vggish_audio_encoder.py | Gikiman/executors | 98658b4136859164390cfccbde8cf0f7cf843593 | [
"Apache-2.0"
] | null | null | null | __copyright__ = "Copyright (c) 2021 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import os
from pathlib import Path
from typing import Any, Optional, List, Iterable
from jina import Executor, requests, DocumentArray
from jina.logging.logger import JinaLogger
import requests as _requests
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from .vggish.vggish_postprocess import *
from .vggish.vggish_slim import *
cur_dir = os.path.dirname(os.path.abspath(__file__))
class VggishAudioEncoder(Executor):
"""
Encode audio data with Vggish embeddings
:param model_path: path of the models directory
:param default_traversal_paths: fallback batch size in case there is not batch size sent in the request
"""
def __init__(self,
model_path: str = Path(cur_dir) / 'models',
default_traversal_paths: Optional[Iterable[str]] = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.default_traversal_paths = default_traversal_paths or ['r']
self.logger = JinaLogger(self.__class__.__name__)
self.model_path = Path(model_path)
self.vgg_model_path = self.model_path / 'vggish_model.ckpt'
self.pca_model_path = self.model_path / 'vggish_pca_params.ckpt'
self.model_path.mkdir(exist_ok=True) # Create the model directory if it does not exist yet
if not self.vgg_model_path.exists():
self.logger.info('VGGish model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_model.ckpt')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download vggish model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download vggish model')
raise
with open(self.vgg_model_path, 'wb') as f:
f.write(r.content)
if not self.pca_model_path.exists():
self.logger.info('PCA model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_pca_params.npz')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download pca model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download pca model')
raise
with open(self.pca_model_path, 'wb') as f:
f.write(r.content)
self.sess = tf.compat.v1.Session()
define_vggish_slim()
load_vggish_slim_checkpoint(self.sess, str(self.vgg_model_path))
self.feature_tensor = self.sess.graph.get_tensor_by_name(
INPUT_TENSOR_NAME)
self.embedding_tensor = self.sess.graph.get_tensor_by_name(
OUTPUT_TENSOR_NAME)
self.post_processor = Postprocessor(str(self.pca_model_path))
@requests
def encode(self, docs: Optional[DocumentArray], parameters: dict, **kwargs):
"""
Compute embeddings and store them in the `docs` array.
:param docs: documents sent to the encoder. The docs must have `text`.
By default, the input `text` must be a `list` of `str`.
:param parameters: dictionary to define the `traversal_paths` and the `batch_size`. For example,
`parameters={'traversal_paths': ['r'], 'batch_size': 10}`.
:param kwargs: Additional key value arguments.
:return:
"""
if docs:
cleaned_document_array = self._get_input_data(docs, parameters)
self._create_embeddings(cleaned_document_array)
def _get_input_data(self, docs: DocumentArray, parameters: dict):
"""Create a filtered set of Documents to iterate over."""
traversal_paths = parameters.get('traversal_paths', self.default_traversal_paths)
# traverse thought all documents which have to be processed
flat_docs = docs.traverse_flat(traversal_paths)
# filter out documents without images
filtered_docs = DocumentArray([doc for doc in flat_docs if doc.blob is not None])
return filtered_docs
def _create_embeddings(self, filtered_docs: Iterable):
"""Update the documents with the embeddings generated by VGGISH"""
for d in filtered_docs:
# Vggish broadcasts across different length audios, not batches
[embedding] = self.sess.run([self.embedding_tensor], feed_dict={self.feature_tensor: d.blob})
result = self.post_processor.postprocess(embedding)
d.embedding = np.mean((np.float32(result) - 128.) / 128., axis=0)
def close(self):
self.sess.close()
| 41.373984 | 112 | 0.659658 | __copyright__ = "Copyright (c) 2021 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import os
from pathlib import Path
from typing import Any, Optional, List, Iterable
from jina import Executor, requests, DocumentArray
from jina.logging.logger import JinaLogger
import requests as _requests
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from .vggish.vggish_postprocess import *
from .vggish.vggish_slim import *
cur_dir = os.path.dirname(os.path.abspath(__file__))
class VggishAudioEncoder(Executor):
def __init__(self,
model_path: str = Path(cur_dir) / 'models',
default_traversal_paths: Optional[Iterable[str]] = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.default_traversal_paths = default_traversal_paths or ['r']
self.logger = JinaLogger(self.__class__.__name__)
self.model_path = Path(model_path)
self.vgg_model_path = self.model_path / 'vggish_model.ckpt'
self.pca_model_path = self.model_path / 'vggish_pca_params.ckpt'
self.model_path.mkdir(exist_ok=True)
if not self.vgg_model_path.exists():
self.logger.info('VGGish model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_model.ckpt')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download vggish model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download vggish model')
raise
with open(self.vgg_model_path, 'wb') as f:
f.write(r.content)
if not self.pca_model_path.exists():
self.logger.info('PCA model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_pca_params.npz')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download pca model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download pca model')
raise
with open(self.pca_model_path, 'wb') as f:
f.write(r.content)
self.sess = tf.compat.v1.Session()
define_vggish_slim()
load_vggish_slim_checkpoint(self.sess, str(self.vgg_model_path))
self.feature_tensor = self.sess.graph.get_tensor_by_name(
INPUT_TENSOR_NAME)
self.embedding_tensor = self.sess.graph.get_tensor_by_name(
OUTPUT_TENSOR_NAME)
self.post_processor = Postprocessor(str(self.pca_model_path))
@requests
def encode(self, docs: Optional[DocumentArray], parameters: dict, **kwargs):
if docs:
cleaned_document_array = self._get_input_data(docs, parameters)
self._create_embeddings(cleaned_document_array)
def _get_input_data(self, docs: DocumentArray, parameters: dict):
traversal_paths = parameters.get('traversal_paths', self.default_traversal_paths)
flat_docs = docs.traverse_flat(traversal_paths)
filtered_docs = DocumentArray([doc for doc in flat_docs if doc.blob is not None])
return filtered_docs
def _create_embeddings(self, filtered_docs: Iterable):
for d in filtered_docs:
[embedding] = self.sess.run([self.embedding_tensor], feed_dict={self.feature_tensor: d.blob})
result = self.post_processor.postprocess(embedding)
d.embedding = np.mean((np.float32(result) - 128.) / 128., axis=0)
def close(self):
self.sess.close()
| true | true |
f71f66195317baeeed07698a274b4377fafe07c5 | 1,436 | py | Python | alipay/aop/api/domain/AlipayOpenPublicSinglearticleDataBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/AlipayOpenPublicSinglearticleDataBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/AlipayOpenPublicSinglearticleDataBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenPublicSinglearticleDataBatchqueryModel(object):
def __init__(self):
self._begin_date = None
self._end_date = None
@property
def begin_date(self):
return self._begin_date
@begin_date.setter
def begin_date(self, value):
self._begin_date = value
@property
def end_date(self):
return self._end_date
@end_date.setter
def end_date(self, value):
self._end_date = value
def to_alipay_dict(self):
params = dict()
if self.begin_date:
if hasattr(self.begin_date, 'to_alipay_dict'):
params['begin_date'] = self.begin_date.to_alipay_dict()
else:
params['begin_date'] = self.begin_date
if self.end_date:
if hasattr(self.end_date, 'to_alipay_dict'):
params['end_date'] = self.end_date.to_alipay_dict()
else:
params['end_date'] = self.end_date
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenPublicSinglearticleDataBatchqueryModel()
if 'begin_date' in d:
o.begin_date = d['begin_date']
if 'end_date' in d:
o.end_date = d['end_date']
return o
| 25.642857 | 71 | 0.601671 |
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenPublicSinglearticleDataBatchqueryModel(object):
def __init__(self):
self._begin_date = None
self._end_date = None
@property
def begin_date(self):
return self._begin_date
@begin_date.setter
def begin_date(self, value):
self._begin_date = value
@property
def end_date(self):
return self._end_date
@end_date.setter
def end_date(self, value):
self._end_date = value
def to_alipay_dict(self):
params = dict()
if self.begin_date:
if hasattr(self.begin_date, 'to_alipay_dict'):
params['begin_date'] = self.begin_date.to_alipay_dict()
else:
params['begin_date'] = self.begin_date
if self.end_date:
if hasattr(self.end_date, 'to_alipay_dict'):
params['end_date'] = self.end_date.to_alipay_dict()
else:
params['end_date'] = self.end_date
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenPublicSinglearticleDataBatchqueryModel()
if 'begin_date' in d:
o.begin_date = d['begin_date']
if 'end_date' in d:
o.end_date = d['end_date']
return o
| true | true |
f71f668d6fbe52a3a43d82cbb88b941356fc85b3 | 2,366 | py | Python | actstream/runtests/manage.py | inspiration4hunter/django-actstream | 7d655b3bf239c85a6ac804ff72e748214b81bb8e | [
"BSD-3-Clause"
] | 1 | 2019-06-27T13:04:59.000Z | 2019-06-27T13:04:59.000Z | actstream/runtests/manage.py | techdragon/django-activity-stream | d5b18470c8682cec3e3db4cfaf8920c3dd33f6bb | [
"BSD-3-Clause"
] | null | null | null | actstream/runtests/manage.py | techdragon/django-activity-stream | d5b18470c8682cec3e3db4cfaf8920c3dd33f6bb | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/
# http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/
# http://code.djangoproject.com/svn/django/trunk/tests/runtests.py
# https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/runtests/runtests.py
import os
import sys
import warnings
warnings.filterwarnings("ignore")
# fix sys path so we don't need to setup PYTHONPATH
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
os.environ['DJANGO_SETTINGS_MODULE'] = 'actstream.runtests.settings'
engine = os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3')
if engine.startswith('mysql'):
engine = 'django.db.backends.mysql'
elif engine.startswith('postgre'):
engine = 'django.db.backends.postgresql_psycopg2'
else:
engine = 'django.db.backends.sqlite3'
try:
import django
except SyntaxError:
sys.stderr.write('Unable to import django (older python version)\n')
exit(0)
PYPY = hasattr(sys, 'pypy_version_info')
version = sys.version_info[:2]
PY3 = version[0] == 3
if PYPY and engine.endswith('psycopg2') and bytes != str:
sys.stderr.write('PyPy3 does not have a psycopg implementation\n')
exit(0)
if PY3 and django.VERSION[:2] >= (1, 9) and version <= (3, 3):
sys.stderr.write('Django>=1.9 does not support Python<=3.3\n')
exit(0)
if PY3 and django.VERSION[:2] <= (1, 8) and version >= (3, 5):
sys.stderr.write('Django<=1.8 does not support Python>=3.5\n')
exit(0)
if PY3 and django.VERSION[:2] == (1, 8) and version <= (3, 3):
sys.stderr.write('Django 1.8 does not support Python<=3.3\n')
exit(0)
if django.VERSION[:2] <= (1, 4) and PY3:
sys.stderr.write('Django<=1.4 does not support Python3\n')
exit(0)
if version == (2, 6) and django.VERSION[:2] >= (1, 7):
sys.stderr.write('Django>=1.7 does not support Python2.6\n')
exit(0)
os.environ['DATABASE_ENGINE'] = engine
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
try:
import pymysql
pymysql.install_as_MySQLdb()
except ImportError:
pass
try:
django.setup()
except AttributeError:
pass
if __name__ == '__main__':
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 30.333333 | 102 | 0.703719 |
import os
import sys
import warnings
warnings.filterwarnings("ignore")
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
os.environ['DJANGO_SETTINGS_MODULE'] = 'actstream.runtests.settings'
engine = os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3')
if engine.startswith('mysql'):
engine = 'django.db.backends.mysql'
elif engine.startswith('postgre'):
engine = 'django.db.backends.postgresql_psycopg2'
else:
engine = 'django.db.backends.sqlite3'
try:
import django
except SyntaxError:
sys.stderr.write('Unable to import django (older python version)\n')
exit(0)
PYPY = hasattr(sys, 'pypy_version_info')
version = sys.version_info[:2]
PY3 = version[0] == 3
if PYPY and engine.endswith('psycopg2') and bytes != str:
sys.stderr.write('PyPy3 does not have a psycopg implementation\n')
exit(0)
if PY3 and django.VERSION[:2] >= (1, 9) and version <= (3, 3):
sys.stderr.write('Django>=1.9 does not support Python<=3.3\n')
exit(0)
if PY3 and django.VERSION[:2] <= (1, 8) and version >= (3, 5):
sys.stderr.write('Django<=1.8 does not support Python>=3.5\n')
exit(0)
if PY3 and django.VERSION[:2] == (1, 8) and version <= (3, 3):
sys.stderr.write('Django 1.8 does not support Python<=3.3\n')
exit(0)
if django.VERSION[:2] <= (1, 4) and PY3:
sys.stderr.write('Django<=1.4 does not support Python3\n')
exit(0)
if version == (2, 6) and django.VERSION[:2] >= (1, 7):
sys.stderr.write('Django>=1.7 does not support Python2.6\n')
exit(0)
os.environ['DATABASE_ENGINE'] = engine
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
try:
import pymysql
pymysql.install_as_MySQLdb()
except ImportError:
pass
try:
django.setup()
except AttributeError:
pass
if __name__ == '__main__':
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| true | true |
f71f669e87f2e8ea5e0d8ed4ce44947d705ba9d6 | 572 | py | Python | Python/Factorization Of Numbers/PairFactorization.py | DeWill404/Data-Structure-and-Algorithm | c61d245c920edff747e87dc7c2ea139561766a3a | [
"MIT"
] | null | null | null | Python/Factorization Of Numbers/PairFactorization.py | DeWill404/Data-Structure-and-Algorithm | c61d245c920edff747e87dc7c2ea139561766a3a | [
"MIT"
] | null | null | null | Python/Factorization Of Numbers/PairFactorization.py | DeWill404/Data-Structure-and-Algorithm | c61d245c920edff747e87dc7c2ea139561766a3a | [
"MIT"
] | null | null | null | # function to generate list of factors
def get_factorList(n):
# Insert 1 & n in list, if is n == 1 then only add 1
l = list(set([1,n]))
# Iterate to sq.rt. of n to get all factors
for i in range(2, int(n**0.5)+1):
if n%i == 0:
# If i & n/i aree same, then append only one
if i == n//i:
l.append(i)
# else append pair
else:
l.extend([i,n//i])
return l
if __name__ == "__main__":
# List of input no's
list_of_numbers = [23, 46, 65, 34234, 423, 43212]
# Get factor list of given no.
for num in list_of_numbers:
print(get_factorList(num)) | 22.88 | 53 | 0.624126 |
def get_factorList(n):
l = list(set([1,n]))
for i in range(2, int(n**0.5)+1):
if n%i == 0:
if i == n//i:
l.append(i)
else:
l.extend([i,n//i])
return l
if __name__ == "__main__":
list_of_numbers = [23, 46, 65, 34234, 423, 43212]
# Get factor list of given no.
for num in list_of_numbers:
print(get_factorList(num)) | true | true |
f71f684248dd8ea778131509570d6005305ece61 | 3,684 | py | Python | uwb_channel.py | iguarna/uwb-ieee | 782813b8a6fc9effeb076c47cd5d497b6e62b330 | [
"MIT"
] | null | null | null | uwb_channel.py | iguarna/uwb-ieee | 782813b8a6fc9effeb076c47cd5d497b6e62b330 | [
"MIT"
] | null | null | null | uwb_channel.py | iguarna/uwb-ieee | 782813b8a6fc9effeb076c47cd5d497b6e62b330 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
def gen_channel(parameters, fc=5E9, fs=2E9, dynamic_range=30):
# Calculate samples/nanosec ratio
nanosec_to_samples = int(1E-9 * fs)
#####################################
# Unpack parameters and convert units
cluster_rate = parameters['cluster_rate'] / nanosec_to_samples
inter_cluster_rate_1 = parameters['inter_cluster_rate_1'] / nanosec_to_samples
inter_cluster_rate_2 = parameters['inter_cluster_rate_2'] / nanosec_to_samples
beta = parameters['beta']
cluster_decay = parameters['cluster_decay'] * nanosec_to_samples
inter_cluster_decay = parameters['inter_cluster_decay'] * nanosec_to_samples
mean_m = parameters['mean_m']
std_m = parameters['std_m']
std_cluster_shadowing = parameters['std_cluster_shadowing']
kf = parameters['kf']
#########################
# Obtain impulse response
if inter_cluster_decay > cluster_decay:
raise ValueError("Inter cluster decay cannot be larger than cluster decay.")
max_t = int(dynamic_range * cluster_decay * np.log(10) / 10)
h = np.zeros(max_t, dtype=complex)
t = 0
while t < max_t:
tau = 0
max_tau = int((max_t - t) * inter_cluster_decay / cluster_decay)
cluster_power = np.exp(-t / cluster_decay) * np.random.lognormal(mean=0, sigma=std_cluster_shadowing)
while tau < max_tau:
# Mean power for this ray
mean_power = cluster_power * np.exp(-tau / inter_cluster_decay)
# Nakagami m-factor is log normally distributed
m = np.random.lognormal(mean_m, std_m)
# Compute amplitude as Nakagami distributed
a = np.sqrt(np.random.gamma(shape=m, scale=mean_power / m))
# Compute phase as uniformly distributed
phi = np.random.uniform(0, 2 * np.pi)
h[t + tau] = np.array([a * np.exp(-1j * phi)])[0]
if np.random.uniform(0, 1) < beta:
inter_cluster_rate = inter_cluster_rate_1
else:
inter_cluster_rate = inter_cluster_rate_2
tau += round(np.random.exponential(1 / inter_cluster_rate))
t += round(np.random.exponential(1 / cluster_rate))
##########################
# Add frequency dependency
# Zero padding before FFT to avoid artifacts
h = np.append(h, np.zeros(h.size, dtype=complex))
H = np.fft.fft(h, norm='ortho')
# Get frequency array in the same order as produced by the FFT
freq = np.linspace(fc - fs / 2, fc + fs / 2, num=h.size)
freq = np.append(freq[freq.size // 2:], freq[:freq.size // 2])
# Calculate frequency dependency and apply
Gf = np.power(freq, -2 * kf)
H = np.multiply(Gf, H)
# Inverse FFT
h = np.fft.ifft(H, norm='ortho')
# Remove padding
h = h[:h.size // 2]
###############
# Normalization
h = normalize(h)
return h
def normalize(s):
return s / np.sqrt(energy(s))
def energy(s):
return np.sum(np.square(np.abs(s)))
if __name__ == '__main__':
parameters_cm1 = {
'cluster_rate': 0.047,
'inter_cluster_rate_1': 1.54,
'inter_cluster_rate_2': 0.15,
'beta': 0.095,
'cluster_decay': 22.61,
'inter_cluster_decay': 12.53,
'mean_m': 0.67,
'std_m': 0.28,
'std_cluster_shadowing': 2.75,
'kf': 1.12,
'kd': 1.79,
'std_path_shadowing': 2.22
}
h = gen_channel(parameters=parameters_cm1,
fc=(10.6E9 + 3.1E9) / 2,
fs=6E9,
dynamic_range=30)
plt.plot(np.abs(h))
plt.show()
| 28.55814 | 109 | 0.59392 | import numpy as np
import matplotlib.pyplot as plt
def gen_channel(parameters, fc=5E9, fs=2E9, dynamic_range=30):
nanosec_to_samples = int(1E-9 * fs)
er = np.exp(-t / cluster_decay) * np.random.lognormal(mean=0, sigma=std_cluster_shadowing)
while tau < max_tau:
mean_power = cluster_power * np.exp(-tau / inter_cluster_decay)
m = np.random.lognormal(mean_m, std_m)
a = np.sqrt(np.random.gamma(shape=m, scale=mean_power / m))
phi = np.random.uniform(0, 2 * np.pi)
h[t + tau] = np.array([a * np.exp(-1j * phi)])[0]
if np.random.uniform(0, 1) < beta:
inter_cluster_rate = inter_cluster_rate_1
else:
inter_cluster_rate = inter_cluster_rate_2
tau += round(np.random.exponential(1 / inter_cluster_rate))
t += round(np.random.exponential(1 / cluster_rate))
fft(H, norm='ortho')
h = h[:h.size // 2]
rgy(s):
return np.sum(np.square(np.abs(s)))
if __name__ == '__main__':
parameters_cm1 = {
'cluster_rate': 0.047,
'inter_cluster_rate_1': 1.54,
'inter_cluster_rate_2': 0.15,
'beta': 0.095,
'cluster_decay': 22.61,
'inter_cluster_decay': 12.53,
'mean_m': 0.67,
'std_m': 0.28,
'std_cluster_shadowing': 2.75,
'kf': 1.12,
'kd': 1.79,
'std_path_shadowing': 2.22
}
h = gen_channel(parameters=parameters_cm1,
fc=(10.6E9 + 3.1E9) / 2,
fs=6E9,
dynamic_range=30)
plt.plot(np.abs(h))
plt.show()
| true | true |
f71f689447e4c38f173ed630b270c2889bd40d14 | 3,032 | py | Python | tottle/exception_factory/error_handler/error_handler.py | muffleo/tottle | 69a5bdda879ab56d43505d517d3369a687c135a2 | [
"MIT"
] | 12 | 2020-09-06T15:31:34.000Z | 2021-02-27T20:30:34.000Z | tottle/exception_factory/error_handler/error_handler.py | cyanlabs-org/tottle | 6cf02022ed7b445c9b5af475c6e854b91780d792 | [
"MIT"
] | 2 | 2021-04-13T06:43:42.000Z | 2021-07-07T20:52:39.000Z | tottle/exception_factory/error_handler/error_handler.py | cyanlabs-org/tottle | 6cf02022ed7b445c9b5af475c6e854b91780d792 | [
"MIT"
] | 4 | 2020-09-12T03:09:25.000Z | 2021-03-22T08:52:04.000Z | import traceback
import typing
from tottle.exception_factory.error_handler.abc import ABCErrorHandler, ExceptionHandler
from tottle.modules import logger
class ErrorHandler(ABCErrorHandler):
def __init__(self, redirect_arguments: bool = False):
self.error_handlers: typing.Dict[str, ExceptionHandler] = {}
self.undefined_error_handler: typing.Optional[ExceptionHandler] = None
self.redirect_arguments = redirect_arguments
def register_error_handler(
self,
exception_type: typing.Type[BaseException],
exception_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if exception_handler:
self.error_handlers[exception_type.__name__] = exception_handler
return None
def decorator(func: ExceptionHandler):
self.error_handlers[exception_type.__name__] = func
return func
return decorator
def register_undefined_error_handler(
self,
undefined_error_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if undefined_error_handler:
self.undefined_error_handler = undefined_error_handler
return None
def decorator(func: ExceptionHandler):
self.undefined_error_handler = func
return func
return decorator
async def call_handler(
self, handler: ExceptionHandler, e: BaseException, *args, **kwargs
) -> typing.Awaitable[typing.Any]:
try:
if self.redirect_arguments:
return await handler(e, *args, **kwargs) # type: ignore
return await handler(e) # type: ignore
except TypeError:
pass
def wraps_error_handler(
self,
) -> typing.Callable[
[typing.Any],
typing.Callable[[typing.Any, typing.Any], typing.Awaitable[typing.Any]],
]:
def decorator(func: typing.Union[typing.NoReturn, typing.Any]):
async def wrapper(*args, **kwargs):
try:
return await func(*args, **kwargs)
except BaseException as e:
return await self.handle(e, *args, **kwargs)
return wrapper
return decorator
async def handle(self, e: BaseException, *args, **kwargs) -> typing.Any:
if e.__class__.__name__ in self.error_handlers:
return await self.call_handler(
self.error_handlers[e.__class__.__name__], e, *args, **kwargs
)
elif self.undefined_error_handler:
return await self.call_handler(
self.undefined_error_handler, e, *args, **kwargs
)
logger.error("\n" + traceback.format_exc())
@property
def handling_exceptions(
self,
) -> typing.Union[str, typing.Tuple[str, ...]]:
return tuple(k for k in self.error_handlers.keys())
| 34.067416 | 88 | 0.636544 | import traceback
import typing
from tottle.exception_factory.error_handler.abc import ABCErrorHandler, ExceptionHandler
from tottle.modules import logger
class ErrorHandler(ABCErrorHandler):
def __init__(self, redirect_arguments: bool = False):
self.error_handlers: typing.Dict[str, ExceptionHandler] = {}
self.undefined_error_handler: typing.Optional[ExceptionHandler] = None
self.redirect_arguments = redirect_arguments
def register_error_handler(
self,
exception_type: typing.Type[BaseException],
exception_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if exception_handler:
self.error_handlers[exception_type.__name__] = exception_handler
return None
def decorator(func: ExceptionHandler):
self.error_handlers[exception_type.__name__] = func
return func
return decorator
def register_undefined_error_handler(
self,
undefined_error_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if undefined_error_handler:
self.undefined_error_handler = undefined_error_handler
return None
def decorator(func: ExceptionHandler):
self.undefined_error_handler = func
return func
return decorator
async def call_handler(
self, handler: ExceptionHandler, e: BaseException, *args, **kwargs
) -> typing.Awaitable[typing.Any]:
try:
if self.redirect_arguments:
return await handler(e, *args, **kwargs)
return await handler(e)
except TypeError:
pass
def wraps_error_handler(
self,
) -> typing.Callable[
[typing.Any],
typing.Callable[[typing.Any, typing.Any], typing.Awaitable[typing.Any]],
]:
def decorator(func: typing.Union[typing.NoReturn, typing.Any]):
async def wrapper(*args, **kwargs):
try:
return await func(*args, **kwargs)
except BaseException as e:
return await self.handle(e, *args, **kwargs)
return wrapper
return decorator
async def handle(self, e: BaseException, *args, **kwargs) -> typing.Any:
if e.__class__.__name__ in self.error_handlers:
return await self.call_handler(
self.error_handlers[e.__class__.__name__], e, *args, **kwargs
)
elif self.undefined_error_handler:
return await self.call_handler(
self.undefined_error_handler, e, *args, **kwargs
)
logger.error("\n" + traceback.format_exc())
@property
def handling_exceptions(
self,
) -> typing.Union[str, typing.Tuple[str, ...]]:
return tuple(k for k in self.error_handlers.keys())
| true | true |
f71f68f22277399de37d076c657cde17a277ddbd | 70,087 | py | Python | androguard/core/analysis/analysis.py | appknox/old-androguard | 8b2fbc262f10f99016f4bbaaac51a963abdb90e4 | [
"Apache-2.0"
] | null | null | null | androguard/core/analysis/analysis.py | appknox/old-androguard | 8b2fbc262f10f99016f4bbaaac51a963abdb90e4 | [
"Apache-2.0"
] | null | null | null | androguard/core/analysis/analysis.py | appknox/old-androguard | 8b2fbc262f10f99016f4bbaaac51a963abdb90e4 | [
"Apache-2.0"
] | null | null | null | # This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import collections
from androguard.core.analysis.sign import Signature, TAINTED_PACKAGE_CREATE, \
TAINTED_PACKAGE_CALL
from androguard.core.androconf import debug, is_ascii_problem,\
load_api_specific_resource_module
from androguard.core.bytecodes import dvm
DVM_FIELDS_ACCESS = {
"iget": "R",
"iget-wide": "R",
"iget-object": "R",
"iget-boolean": "R",
"iget-byte": "R",
"iget-char": "R",
"iget-short": "R",
"iput": "W",
"iput-wide": "W",
"iput-object": "W",
"iput-boolean": "W",
"iput-byte": "W",
"iput-char": "W",
"iput-short": "W",
"sget": "R",
"sget-wide": "R",
"sget-object": "R",
"sget-boolean": "R",
"sget-byte": "R",
"sget-char": "R",
"sget-short": "R",
"sput": "W",
"sput-wide": "W",
"sput-object": "W",
"sput-boolean": "W",
"sput-byte": "W",
"sput-char": "W",
"sput-short": "W",
}
class ContextField(object):
def __init__(self, mode):
self.mode = mode
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ContextMethod(object):
def __init__(self):
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ExternalFM(object):
def __init__(self, class_name, name, descriptor):
self.class_name = class_name
self.name = name
self.descriptor = descriptor
def get_class_name(self):
return self.class_name
def get_name(self):
return self.name
def get_descriptor(self):
return self.descriptor
class ToString(object):
def __init__(self, tab):
self.__tab = tab
self.__re_tab = {}
for i in self.__tab:
self.__re_tab[i] = []
for j in self.__tab[i]:
self.__re_tab[i].append(re.compile(j))
self.__string = ""
def push(self, name):
for i in self.__tab:
for j in self.__re_tab[i]:
if j.match(name) is not None:
if len(self.__string) > 0:
if i == 'O' and self.__string[-1] == 'O':
continue
self.__string += i
def get_string(self):
return self.__string
class BreakBlock(object):
def __init__(self, _vm, idx):
self._vm = _vm
self._start = idx
self._end = self._start
self._ins = []
self._ops = []
self._fields = {}
self._methods = {}
def get_ops(self):
return self._ops
def get_fields(self):
return self._fields
def get_methods(self):
return self._methods
def push(self, ins):
self._ins.append(ins)
self._end += ins.get_length()
def get_start(self):
return self._start
def get_end(self):
return self._end
def show(self):
for i in self._ins:
i.show(0)
class DVMBasicBlock(object):
"""
A simple basic block of a dalvik method
"""
def __init__(self, start, vm, method, context):
self.__vm = vm
self.method = method
self.context = context
self.last_length = 0
self.nb_instructions = 0
self.fathers = []
self.childs = []
self.start = start
self.end = self.start
self.special_ins = {}
self.name = "%s-BB@0x%x" % (self.method.get_name(), self.start)
self.exception_analysis = None
self.tainted_variables = self.context.get_tainted_variables()
self.tainted_packages = self.context.get_tainted_packages()
self.notes = []
def get_notes(self):
return self.notes
def set_notes(self, value):
self.notes = [value]
def add_note(self, note):
self.notes.append(note)
def clear_notes(self):
self.notes = []
def get_instructions(self):
"""
Get all instructions from a basic block.
:rtype: Return all instructions in the current basic block
"""
tmp_ins = []
idx = 0
for i in self.method.get_instructions():
if idx >= self.start and idx < self.end:
tmp_ins.append(i)
idx += i.get_length()
return tmp_ins
def get_nb_instructions(self):
return self.nb_instructions
def get_method(self):
return self.method
def get_name(self):
return "%s-BB@0x%x" % (self.method.get_name(), self.start)
def get_start(self):
return self.start
def get_end(self):
return self.end
def get_last(self):
return self.get_instructions()[-1]
def get_next(self):
"""
Get next basic blocks
:rtype: a list of the next basic blocks
"""
return self.childs
def get_prev(self):
"""
Get previous basic blocks
:rtype: a list of the previous basic blocks
"""
return self.fathers
def set_fathers(self, f):
self.fathers.append(f)
def get_last_length(self):
return self.last_length
def set_childs(self, values):
if values == []:
next_block = self.context.get_basic_block(self.end + 1)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), self.end, next_block))
else:
for i in values:
if i != -1:
next_block = self.context.get_basic_block(i)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), i, next_block))
for c in self.childs:
if c[2] is not None:
c[2].set_fathers((c[1], c[0], self))
def push(self, i):
try:
self.nb_instructions += 1
idx = self.end
self.last_length = i.get_length()
self.end += self.last_length
op_value = i.get_op_value()
# field access
if (op_value >= 0x52 and op_value <= 0x6d):
desc = self.__vm.get_cm_field(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(TAINTED_FIELD, desc, DVM_FIELDS_ACCESS[
i.get_name()][0], idx, self.method)
# invoke
elif (op_value >= 0x6e and op_value <= 0x72) or (op_value >= 0x74 and op_value <= 0x78):
idx_meth = i.get_ref_kind()
method_info = self.__vm.get_cm_method(idx_meth)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
method_info[0], TAINTED_PACKAGE_CALL, idx, self.method, idx_meth)
# new_instance
elif op_value == 0x22:
idx_type = i.get_ref_kind()
type_info = self.__vm.get_cm_type(idx_type)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
type_info, TAINTED_PACKAGE_CREATE, idx, self.method, None)
# const-string
elif (op_value >= 0x1a and op_value <= 0x1b):
string_name = self.__vm.get_cm_string(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(
TAINTED_STRING, string_name, "R", idx, self.method)
elif op_value == 0x26 or (op_value >= 0x2b and op_value <= 0x2c):
code = self.method.get_code().get_bc()
self.special_ins[idx] = code.get_ins_off(
idx + i.get_ref_off() * 2)
except:
pass
def get_special_ins(self, idx):
"""
Return the associated instruction to a specific instruction (for example a packed/sparse switch)
:param idx: the index of the instruction
:rtype: None or an Instruction
"""
try:
return self.special_ins[idx]
except:
return None
def get_exception_analysis(self):
return self.exception_analysis
def set_exception_analysis(self, exception_analysis):
self.exception_analysis = exception_analysis
TAINTED_LOCAL_VARIABLE = 0
TAINTED_FIELD = 1
TAINTED_STRING = 2
class PathVar(object):
def __init__(self, access, idx, dst_idx, info_obj):
self.access_flag = access
self.idx = idx
self.dst_idx = dst_idx
self.info_obj = info_obj
def get_var_info(self):
return self.info_obj.get_info()
def get_access_flag(self):
return self.access_flag
def get_src(self, cm):
method = cm.get_method_ref(self.idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
class TaintedVariable(object):
def __init__(self, var, _type):
self.var = var
self.type = _type
self.paths = {}
self.__cache = []
def get_type(self):
return self.type
def get_info(self):
if self.type == TAINTED_FIELD:
return [self.var[0], self.var[2], self.var[1]]
return self.var
def push(self, access, idx, ref):
m_idx = ref.get_method_idx()
if m_idx not in self.paths:
self.paths[m_idx] = []
self.paths[m_idx].append((access, idx))
def get_paths_access(self, mode):
for i in self.paths:
for j in self.paths[i]:
for k, v in self.paths[i][j]:
if k in mode:
yield i, j, k, v
def get_paths(self):
if self.__cache != []:
return self.__cache
for i in self.paths:
for j in self.paths[i]:
self.__cache.append([j, i])
# yield j, i
return self.__cache
def get_paths_length(self):
return len(self.paths)
def show_paths(self, vm):
show_PathVariable(vm, self.get_paths())
class TaintedVariables(object):
def __init__(self, _vm):
self.__vm = _vm
self.__vars = {
TAINTED_LOCAL_VARIABLE: {},
TAINTED_FIELD: {},
TAINTED_STRING: {},
}
self.__cache_field_by_method = {}
self.__cache_string_by_method = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
# functions to get particulars elements
def get_string(self, s):
try:
return self.__vars[TAINTED_STRING][s]
except KeyError:
return None
def get_field(self, class_name, name, descriptor):
key = class_name + descriptor + name
try:
return self.__vars[TAINTED_FIELD][key]
except KeyError:
return None
def toPathVariable(self, obj):
z = []
for i in obj.get_paths():
access, idx = i[0]
m_idx = i[1]
z.append(PathVar(access, idx, m_idx, obj))
return z
# permission functions
def get_permissions_method(self, method):
permissions = set()
for f, f1 in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
for path in f.get_paths():
#access, idx = path[0]
m_idx = path[1]
if m_idx == method.get_idx():
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
return permissions
def get_permissions(self, permissions_needed):
"""
@param permissions_needed : a list of restricted permissions to get ([] returns all permissions)
@rtype : a dictionnary of permissions' paths
"""
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
for f, _ in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
for p in perm_intersection:
try:
permissions[p].extend(self.toPathVariable(f))
except KeyError:
permissions[p] = []
permissions[p].extend(self.toPathVariable(f))
return permissions
# global functions
def get_strings(self):
for i in self.__vars[TAINTED_STRING]:
yield self.__vars[TAINTED_STRING][i], i
def get_fields(self):
for i in self.__vars[TAINTED_FIELD]:
yield self.__vars[TAINTED_FIELD][i], i
# specifics functions
def get_strings_by_method(self, method):
z = {}
try:
for i in self.__cache_string_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def get_fields_by_method(self, method):
z = {}
try:
for i in self.__cache_field_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def add(self, var, _type, _method=None):
if _type == TAINTED_FIELD:
key = var[0] + var[1] + var[2]
if key not in self.__vars[TAINTED_FIELD]:
self.__vars[TAINTED_FIELD][key] = TaintedVariable(var, _type)
elif _type == TAINTED_STRING:
if var not in self.__vars[TAINTED_STRING]:
self.__vars[TAINTED_STRING][var] = TaintedVariable(var, _type)
elif _type == TAINTED_LOCAL_VARIABLE:
if _method not in self.__vars[TAINTED_LOCAL_VARIABLE]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method] = {}
if var not in self.__vars[TAINTED_LOCAL_VARIABLE][_method]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method][
var] = TaintedVariable(var, _type)
def push_info(self, _type, var, access, idx, ref):
if _type == TAINTED_FIELD:
self.add(var, _type)
key = var[0] + var[1] + var[2]
self.__vars[_type][key].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_field_by_method:
self.__cache_field_by_method[method_idx] = set()
self.__cache_field_by_method[method_idx].add(
self.__vars[TAINTED_FIELD][key])
elif _type == TAINTED_STRING:
self.add(var, _type)
self.__vars[_type][var].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_string_by_method:
self.__cache_string_by_method[method_idx] = set()
self.__cache_string_by_method[method_idx].add(
self.__vars[TAINTED_STRING][var])
def show_Path(vm, path):
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
def get_Path(vm, path):
x = {}
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
info_var = path.get_var_info()
x["src"] = "%s" % info_var
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
x["idx"] = path.get_idx()
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["idx"] = path.get_idx()
return x
def show_Paths(vm, paths):
"""
Show paths of packages
:param vm: the object which represents the dex file
:param paths: a list of :class:`PathP` objects
"""
for path in paths:
show_Path(vm, path)
def get_Paths(vm, paths):
"""
Return paths of packages
:param vm: the object which represents the dex file
:param paths: a list of :class:`PathP` objects
"""
full_paths = []
for path in paths:
full_paths.append(get_Path(vm, path))
return full_paths
def show_PathVariable(vm, paths):
return
for path in paths:
access, idx = path[0]
m_idx = path[1]
method = vm.get_cm_method(m_idx)
print("%s %x %s->%s %s" %
(access, idx, method[0], method[1], method[2][0] + method[2][1]))
class PathP(object):
def __init__(self, access, idx, src_idx, dst_idx):
self.access_flag = access
self.idx = idx
self.src_idx = src_idx
self.dst_idx = dst_idx
def get_access_flag(self):
return self.access_flag
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_src(self, cm):
method = cm.get_method_ref(self.src_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
def get_src_idx(self):
return self.src_idx
def get_dst_idx(self):
return self.dst_idx
class TaintedPackage(object):
def __init__(self, vm, name):
self.vm = vm
self.name = name
self.paths = {TAINTED_PACKAGE_CREATE: [], TAINTED_PACKAGE_CALL: []}
def get_name(self):
return self.name
def gets(self):
return self.paths
def push(self, access, idx, src_idx, dst_idx):
p = PathP(access, idx, src_idx, dst_idx)
self.paths[access].append(p)
return p
def get_objects_paths(self):
return self.paths[TAINTED_PACKAGE_CREATE]
def search_method(self, name, descriptor):
"""
@param name : a regexp for the name of the method
@param descriptor : a regexp for the descriptor of the method
@rtype : a list of called paths
"""
l = []
m_name = re.compile(name)
m_descriptor = re.compile(descriptor)
for path in self.paths[TAINTED_PACKAGE_CALL]:
_, dst_name, dst_descriptor = path.get_dst(
self.vm.get_class_manager())
if m_name.match(dst_name) is not None and m_descriptor.match(dst_descriptor) is not None:
l.append(path)
return l
def get_method(self, name, descriptor):
l = []
for path in self.paths[TAINTED_PACKAGE_CALL]:
if path.get_name() == name and path.get_descriptor() == descriptor:
l.append(path)
return l
def get_paths(self):
for i in self.paths:
for j in self.paths[i]:
yield j
def get_paths_length(self):
x = 0
for i in self.paths:
x += len(self.paths[i])
return x
def get_methods(self):
return [path for path in self.paths[TAINTED_PACKAGE_CALL]]
def get_new(self):
return [path for path in self.paths[TAINTED_PACKAGE_CREATE]]
def show(self):
return
cm = self.vm.get_class_manager()
print(self.get_name())
for _type in self.paths:
print("\t -->", _type)
if _type == TAINTED_PACKAGE_CALL:
for path in self.paths[_type]:
print("\t\t => %s <-- %x in %s" %
(path.get_dst(cm), path.get_idx(), path.get_src(cm)))
else:
for path in self.paths[_type]:
print("\t\t => %x in %s" %
(path.get_idx(), path.get_src(cm)))
def show_Permissions(dx):
"""
Show where permissions are used in a specific application
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
p = dx.get_permissions([])
for i in p:
for j in p[i]:
show_Path(dx.get_vm(), j)
def show_DynCode(dx):
"""
Show where dynamic code is used
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
paths = []
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."))
show_Paths(dx.get_vm(), paths)
def show_NativeMethods(dx):
"""
Show the native methods
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
return
print(get_NativeMethods(dx))
def show_ReflectionCode(dx):
"""
Show the reflection code
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
show_Paths(dx.get_vm(), paths)
def get_NativeMethods(dx):
"""
Return the native methods
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: [tuple]
"""
d = dx.get_vm()
native_methods = []
for i in d.get_methods():
if i.get_access_flags() & 0x100:
native_methods.append(
(i.get_class_name(), i.get_name(), i.get_descriptor()))
return native_methods
def get_ReflectionCode(dx):
"""
Return the reflection code
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: [dict]
"""
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
return get_Paths(dx.get_vm(), paths)
def is_crypto_code(dx):
"""
Crypto code is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ljavax/crypto/.",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/security/spec/.",
".",
"."):
return True
return False
def is_dyn_code(dx):
"""
Dalvik Dynamic code loading is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."):
return True
return False
def is_reflection_code(dx):
"""
Reflection is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Method;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Field;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Class;",
"forName",
"."):
return True
return False
def is_native_code(dx):
"""
Native code is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ljava/lang/System;",
"load.",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Runtime;",
"load.",
"."):
return True
return False
class TaintedPackages(object):
def __init__(self, _vm):
self.__vm = _vm
self.__packages = {}
self.__methods = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
def _add_pkg(self, name):
if name not in self.__packages:
self.__packages[name] = TaintedPackage(self.__vm, name)
#self.context.get_tainted_packages().push_info( method_info[0], TAINTED_PACKAGE_CALL, idx, self, self.method, method_info[1], method_info[2][0] + method_info[2][1] )
def push_info(self, class_name, access, idx, method, idx_method):
self._add_pkg(class_name)
p = self.__packages[class_name].push(
access, idx, method.get_method_idx(), idx_method)
try:
self.__methods[method][class_name].append(p)
except:
try:
self.__methods[method][class_name] = []
except:
self.__methods[method] = {}
self.__methods[method][class_name] = []
self.__methods[method][class_name].append(p)
def get_packages_by_method(self, method):
try:
return self.__methods[method]
except KeyError:
return {}
def get_package(self, name):
return self.__packages[name]
def get_packages_by_bb(self, bb):
"""
:rtype: return a list of packaged used in a basic block
"""
l = []
for i in self.__packages:
paths = self.__packages[i].gets()
for j in paths:
for k in paths[j]:
if k.get_bb() == bb:
l.append((i, k.get_access_flag(),
k.get_idx(), k.get_method()))
return l
def get_packages(self):
for i in self.__packages:
yield self.__packages[i], i
def get_internal_packages_from_package(self, package):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name == package and dst_class_name in classes:
l.append(j)
return l
def get_internal_packages(self):
"""
:rtype: return a list of the internal packages called in the application
"""
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, _, _ = j.get_dst(
self.__vm.get_class_manager())
if dst_class_name in classes and m.get_name() in classes:
l.append(j)
return l
def get_internal_new_packages(self):
"""
:rtype: return a list of the internal packages created in the application
"""
classes = self.__vm.get_classes_names()
l = {}
for m, _ in self.get_packages():
paths = m.get_new()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
if src_class_name in classes and m.get_name() in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CREATE:
try:
l[m.get_name()].append(j)
except:
l[m.get_name()] = []
l[m.get_name()].append(j)
return l
def get_external_packages(self):
"""
:rtype: return a list of the external packages called in the application
"""
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name in classes and dst_class_name not in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
l.append(j)
return l
def search_packages(self, package_name):
"""
:param package_name: a regexp for the name of the package
:rtype: a list of called packages' paths
"""
ex = re.compile(package_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_methods())
return l
def search_unique_packages(self, package_name):
"""
:param package_name: a regexp for the name of the package
"""
ex = re.compile(package_name)
l = []
d = {}
for m, _ in self.get_packages():
if ex.match(m.get_info()) is not None:
for path in m.get_methods():
try:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] += 1
except KeyError:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] = 0
l.append([path.get_class_name(),
path.get_name(), path.get_descriptor()])
return l, d
def search_methods(self, class_name, name, descriptor, re_expr=True):
"""
@param class_name : a regexp for the class name of the method (the package)
@param name : a regexp for the name of the method
@param descriptor : a regexp for the descriptor of the method
@rtype : a list of called methods' paths
"""
l = []
if re_expr:
ex = re.compile(class_name)
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.search_method(name, descriptor))
return l
def search_objects(self, class_name):
"""
@param class_name : a regexp for the class name
@rtype : a list of created objects' paths
"""
ex = re.compile(class_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_objects_paths())
return l
def search_crypto_packages(self):
"""
@rtype : a list of called crypto packages
"""
return self.search_packages("Ljavax/crypto/")
def search_telephony_packages(self):
"""
@rtype : a list of called telephony packages
"""
return self.search_packages("Landroid/telephony/")
def search_net_packages(self):
"""
@rtype : a list of called net packages
"""
return self.search_packages("Landroid/net/")
def get_method(self, class_name, name, descriptor):
try:
return self.__packages[class_name].get_method(name, descriptor)
except KeyError:
return []
def get_permissions_method(self, method):
permissions = set()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_method() == method:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
return permissions
def get_permissions(self, permissions_needed):
"""
@param permissions_needed : a list of restricted permissions to get ([] returns all permissions)
@rtype : a dictionnary of permissions' paths
"""
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
classes = self.__vm.get_classes_names()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, src_method_name, src_descriptor = j.get_src(
self.__vm.get_class_manager())
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
if (src_class_name in classes) and (dst_class_name not in classes):
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
for p in perm_intersection:
try:
permissions[p].append(j)
except KeyError:
permissions[p] = []
permissions[p].append(j)
return permissions
class Enum(object):
def __init__(self, names):
self.names = names
for value, name in enumerate(self.names):
setattr(self, name.upper(), value)
def tuples(self):
return tuple(enumerate(self.names))
TAG_ANDROID = Enum([
'ANDROID', 'TELEPHONY', 'SMS', 'SMSMESSAGE', 'ACCESSIBILITYSERVICE', 'ACCOUNTS',
'ANIMATION', 'APP', 'BLUETOOTH', 'CONTENT', 'DATABASE', 'DEBUG', 'DRM', 'GESTURE',
'GRAPHICS', 'HARDWARE', 'INPUTMETHODSERVICE', 'LOCATION', 'MEDIA', 'MTP',
'NET', 'NFC', 'OPENGL', 'OS', 'PREFERENCE', 'PROVIDER', 'RENDERSCRIPT',
'SAX', 'SECURITY', 'SERVICE', 'SPEECH', 'SUPPORT', 'TEST', 'TEXT', 'UTIL',
'VIEW', 'WEBKIT', 'WIDGET', 'DALVIK_BYTECODE', 'DALVIK_SYSTEM', 'JAVA_REFLECTION'])
TAG_REVERSE_ANDROID = dict((i[0], i[1]) for i in TAG_ANDROID.tuples())
TAGS_ANDROID = {
TAG_ANDROID.ANDROID: [0, "Landroid"],
TAG_ANDROID.TELEPHONY: [0, "Landroid/telephony"],
TAG_ANDROID.SMS: [0, "Landroid/telephony/SmsManager"],
TAG_ANDROID.SMSMESSAGE: [0, "Landroid/telephony/SmsMessage"],
TAG_ANDROID.DEBUG: [0, "Landroid/os/Debug"],
TAG_ANDROID.ACCESSIBILITYSERVICE: [0, "Landroid/accessibilityservice"],
TAG_ANDROID.ACCOUNTS: [0, "Landroid/accounts"],
TAG_ANDROID.ANIMATION: [0, "Landroid/animation"],
TAG_ANDROID.APP: [0, "Landroid/app"],
TAG_ANDROID.BLUETOOTH: [0, "Landroid/bluetooth"],
TAG_ANDROID.CONTENT: [0, "Landroid/content"],
TAG_ANDROID.DATABASE: [0, "Landroid/database"],
TAG_ANDROID.DRM: [0, "Landroid/drm"],
TAG_ANDROID.GESTURE: [0, "Landroid/gesture"],
TAG_ANDROID.GRAPHICS: [0, "Landroid/graphics"],
TAG_ANDROID.HARDWARE: [0, "Landroid/hardware"],
TAG_ANDROID.INPUTMETHODSERVICE: [0, "Landroid/inputmethodservice"],
TAG_ANDROID.LOCATION: [0, "Landroid/location"],
TAG_ANDROID.MEDIA: [0, "Landroid/media"],
TAG_ANDROID.MTP: [0, "Landroid/mtp"],
TAG_ANDROID.NET: [0, "Landroid/net"],
TAG_ANDROID.NFC: [0, "Landroid/nfc"],
TAG_ANDROID.OPENGL: [0, "Landroid/opengl"],
TAG_ANDROID.OS: [0, "Landroid/os"],
TAG_ANDROID.PREFERENCE: [0, "Landroid/preference"],
TAG_ANDROID.PROVIDER: [0, "Landroid/provider"],
TAG_ANDROID.RENDERSCRIPT: [0, "Landroid/renderscript"],
TAG_ANDROID.SAX: [0, "Landroid/sax"],
TAG_ANDROID.SECURITY: [0, "Landroid/security"],
TAG_ANDROID.SERVICE: [0, "Landroid/service"],
TAG_ANDROID.SPEECH: [0, "Landroid/speech"],
TAG_ANDROID.SUPPORT: [0, "Landroid/support"],
TAG_ANDROID.TEST: [0, "Landroid/test"],
TAG_ANDROID.TEXT: [0, "Landroid/text"],
TAG_ANDROID.UTIL: [0, "Landroid/util"],
TAG_ANDROID.VIEW: [0, "Landroid/view"],
TAG_ANDROID.WEBKIT: [0, "Landroid/webkit"],
TAG_ANDROID.WIDGET: [0, "Landroid/widget"],
TAG_ANDROID.DALVIK_BYTECODE: [0, "Ldalvik/bytecode"],
TAG_ANDROID.DALVIK_SYSTEM: [0, "Ldalvik/system"],
TAG_ANDROID.JAVA_REFLECTION: [0, "Ljava/lang/reflect"],
}
class Tags(object):
"""
Handle specific tags
:param patterns:
:params reverse:
"""
def __init__(self, patterns=TAGS_ANDROID, reverse=TAG_REVERSE_ANDROID):
self.tags = set()
self.patterns = patterns
self.reverse = TAG_REVERSE_ANDROID
for i in self.patterns:
self.patterns[i][1] = re.compile(self.patterns[i][1])
def emit(self, method):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(method.get_class()) is not None:
self.tags.add(i)
def emit_by_classname(self, classname):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(classname) is not None:
self.tags.add(i)
def get_list(self):
return [self.reverse[i] for i in self.tags]
def __contains__(self, key):
return key in self.tags
def __str__(self):
return str([self.reverse[i] for i in self.tags])
def empty(self):
return self.tags == set()
class BasicBlocks(object):
"""
This class represents all basic blocks of a method
"""
def __init__(self, _vm, tv):
self.__vm = _vm
self.tainted = tv
self.bb = []
def push(self, bb):
self.bb.append(bb)
def pop(self, idx):
return self.bb.pop(idx)
def get_basic_block(self, idx):
for i in self.bb:
if idx >= i.get_start() and idx < i.get_end():
return i
return None
def get_tainted_integers(self):
try:
return self.tainted.get_tainted_integers()
except:
return None
def get_tainted_packages(self):
try:
return self.tainted.get_tainted_packages()
except:
return None
def get_tainted_variables(self):
try:
return self.tainted.get_tainted_variables()
except:
return None
def get(self):
"""
:rtype: return each basic block (:class:`DVMBasicBlock` object)
"""
for i in self.bb:
yield i
def gets(self):
"""
:rtype: a list of basic blocks (:class:`DVMBasicBlock` objects)
"""
return self.bb
def get_basic_block_pos(self, idx):
return self.bb[idx]
class ExceptionAnalysis(object):
def __init__(self, exception, bb):
self.start = exception[0]
self.end = exception[1]
self.exceptions = exception[2:]
for i in self.exceptions:
i.append(bb.get_basic_block(i[1]))
def show_buff(self):
buff = "%x:%x\n" % (self.start, self.end)
for i in self.exceptions:
if i[2] is None:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2])
else:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2].get_name())
return buff[:-1]
def get(self):
d = {"start": self.start, "end": self.end, "list": []}
for i in self.exceptions:
d["list"].append(
{"name": i[0], "idx": i[1], "bb": i[2].get_name()})
return d
class Exceptions(object):
def __init__(self, _vm):
self.__vm = _vm
self.exceptions = []
def add(self, exceptions, basic_blocks):
for i in exceptions:
self.exceptions.append(ExceptionAnalysis(i, basic_blocks))
def get_exception(self, addr_start, addr_end):
for i in self.exceptions:
# print hex(i.start), hex(i.end), hex(addr_start), hex(addr_end),
# i.start >= addr_start and i.end <= addr_end, addr_end <= i.end
# and addr_start >= i.start
if i.start >= addr_start and i.end <= addr_end:
return i
elif addr_end <= i.end and addr_start >= i.start:
return i
return None
def gets(self):
return self.exceptions
def get(self):
for i in self.exceptions:
yield i
BO = {"BasicOPCODES": dvm.BRANCH_DVM_OPCODES, "BasicClass": DVMBasicBlock,
"Dnext": dvm.determineNext, "Dexception": dvm.determineException}
BO["BasicOPCODES_H"] = []
for i in BO["BasicOPCODES"]:
BO["BasicOPCODES_H"].append(re.compile(i))
class MethodAnalysis(object):
"""
This class analyses in details a method of a class/dex file
:param vm: the object which represent the dex file
:param method: the original method
:param tv: a virtual object to get access to tainted information
:type vm: a :class:`DalvikVMFormat` object
:type method: a :class:`EncodedMethod` object
"""
def __init__(self, vm, method, tv):
self.__vm = vm
self.method = method
self.tainted = tv
self.basic_blocks = BasicBlocks(self.__vm, self.tainted)
self.exceptions = Exceptions(self.__vm)
code = self.method.get_code()
if code is None:
return
current_basic = BO["BasicClass"](
0, self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
##########################################################
bc = code.get_bc()
l = []
h = {}
idx = 0
debug("Parsing instructions")
instructions = [i for i in bc.get_instructions()]
for i in instructions:
for j in BO["BasicOPCODES_H"]:
try:
if j.match(i.get_name()) is not None:
v = BO["Dnext"](i, idx, self.method)
h[idx] = v
l.extend(v)
break
except Exception:
# print("BasicOPCODES_H Error")
break
idx += i.get_length()
debug("Parsing exceptions")
excepts = BO["Dexception"](self.__vm, self.method)
for i in excepts:
l.extend([i[0]])
for handler in i[2:]:
l.append(handler[1])
debug("Creating basic blocks in %s" % self.method)
idx = 0
for i in instructions:
# index is a destination
if idx in l:
if current_basic.get_nb_instructions() != 0:
current_basic = BO["BasicClass"](
current_basic.get_end(), self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
current_basic.push(i)
# index is a branch instruction
if idx in h:
current_basic = BO["BasicClass"](
current_basic.get_end(), self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
idx += i.get_length()
if current_basic.get_nb_instructions() == 0:
self.basic_blocks.pop(-1)
debug("Settings basic blocks childs")
for i in self.basic_blocks.get():
try:
i.set_childs(h[i.end - i.get_last_length()])
except KeyError:
i.set_childs([])
debug("Creating exceptions")
# Create exceptions
self.exceptions.add(excepts, self.basic_blocks)
for i in self.basic_blocks.get():
# setup exception by basic block
i.set_exception_analysis(
self.exceptions.get_exception(i.start, i.end - 1))
del instructions
del h, l
def get_basic_blocks(self):
"""
:rtype: a :class:`BasicBlocks` object
"""
return self.basic_blocks
def get_length(self):
"""
:rtype: an integer which is the length of the code
"""
return self.get_code().get_length()
def get_vm(self):
return self.__vm
def get_method(self):
return self.method
def get_local_variables(self):
return self.tainted.get_tainted_variables().get_local_variables(self.method)
def show(self):
return
print("METHOD", self.method.get_class_name(),
self.method.get_name(), self.method.get_descriptor())
for i in self.basic_blocks.get():
print("\t", i)
i.show()
print("")
def show_methods(self):
return
print("\t #METHODS :")
for i in self.__bb:
methods = i.get_methods()
for method in methods:
print("\t\t-->", method.get_class_name(),
method.get_name(), method.get_descriptor())
for context in methods[method]:
print("\t\t\t |---|", context.details)
def create_tags(self):
"""
Create the tags for the method
"""
self.tags = Tags()
for i in self.tainted.get_tainted_packages().get_packages_by_method(self.method):
self.tags.emit_by_classname(i)
def get_tags(self):
"""
Return the tags of the method
:rtype: a :class:`Tags` object
"""
return self.tags
SIGNATURE_L0_0 = "L0_0"
SIGNATURE_L0_1 = "L0_1"
SIGNATURE_L0_2 = "L0_2"
SIGNATURE_L0_3 = "L0_3"
SIGNATURE_L0_4 = "L0_4"
SIGNATURE_L0_5 = "L0_5"
SIGNATURE_L0_6 = "L0_6"
SIGNATURE_L0_0_L1 = "L0_0:L1"
SIGNATURE_L0_1_L1 = "L0_1:L1"
SIGNATURE_L0_2_L1 = "L0_2:L1"
SIGNATURE_L0_3_L1 = "L0_3:L1"
SIGNATURE_L0_4_L1 = "L0_4:L1"
SIGNATURE_L0_5_L1 = "L0_5:L1"
SIGNATURE_L0_0_L2 = "L0_0:L2"
SIGNATURE_L0_0_L3 = "L0_0:L3"
SIGNATURE_HEX = "hex"
SIGNATURE_SEQUENCE_BB = "sequencebb"
SIGNATURES = {
SIGNATURE_L0_0: {"type": 0},
SIGNATURE_L0_1: {"type": 1},
SIGNATURE_L0_2: {"type": 2, "arguments": ["Landroid"]},
SIGNATURE_L0_3: {"type": 2, "arguments": ["Ljava"]},
SIGNATURE_L0_4: {"type": 2, "arguments": ["Landroid", "Ljava"]},
SIGNATURE_L0_5: {"type": 3, "arguments": ["Landroid"]},
SIGNATURE_L0_6: {"type": 3, "arguments": ["Ljava"]},
SIGNATURE_SEQUENCE_BB: {},
SIGNATURE_HEX: {},
}
class StringAnalysis(object):
def __init__(self, value):
self.value = value
self.xreffrom = set()
def AddXrefFrom(self, classobj, methodobj):
#debug("Added strings xreffrom for %s to %s" % (self.value, methodobj))
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def __str__(self):
data = "XREFto for string %s in\n" % repr(self.value)
for ref_class, ref_method in self.xreffrom:
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class MethodClassAnalysis(object):
def __init__(self, method):
self.method = method
self.xrefto = set()
self.xreffrom = set()
def AddXrefTo(self, classobj, methodobj):
#debug("Added method xrefto for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xrefto.add((classobj, methodobj))
def AddXrefFrom(self, classobj, methodobj):
#debug("Added method xreffrom for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def __str__(self):
data = "XREFto for %s\n" % self.method
for ref_class, ref_method in self.xrefto:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFFrom for %s\n" % self.method
for ref_class, ref_method in self.xreffrom:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class FieldClassAnalysis(object):
def __init__(self, field):
self.field = field
self.xrefread = set()
self.xrefwrite = set()
def AddXrefRead(self, classobj, methodobj):
#debug("Added method xrefto for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xrefread.add((classobj, methodobj))
def AddXrefWrite(self, classobj, methodobj):
#debug("Added method xreffrom for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xrefwrite.add((classobj, methodobj))
def get_xref_read(self):
return self.xrefread
def get_xref_write(self):
return self.xrefwrite
def __str__(self):
data = "XREFRead for %s\n" % self.field
for ref_class, ref_method in self.xrefread:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFWrite for %s\n" % self.field
for ref_class, ref_method in self.xrefwrite:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
REF_NEW_INSTANCE = 0
REF_CLASS_USAGE = 1
class ClassAnalysis(object):
def __init__(self, classobj):
self._class = classobj
self._methods = {}
self._fields = {}
self.xrefto = collections.defaultdict(set)
self.xreffrom = collections.defaultdict(set)
def get_method_analysis(self, method):
return self._methods.get(method)
def get_field_analysis(self, field):
return self._fields.get(field)
def AddFXrefRead(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefRead(classobj, method)
def AddFXrefWrite(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefWrite(classobj, method)
def AddMXrefTo(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefTo(classobj, method2)
def AddMXrefFrom(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefFrom(classobj, method2)
def AddXrefTo(self, ref_kind, classobj, methodobj):
#debug("Added class xrefto for %s to %s" % (self._class.get_name(), classobj.get_vm_class().get_name()))
self.xrefto[classobj].add((ref_kind, methodobj))
def AddXrefFrom(self, ref_kind, classobj, methodobj):
#debug("Added class xreffrom for %s to %s" % (self._class.get_name(), classobj.get_vm_class().get_name()))
self.xreffrom[classobj].add((ref_kind, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def get_vm_class(self):
return self._class
def __str__(self):
data = "XREFto for %s\n" % self._class
for ref_class in self.xrefto:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xrefto[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
data += "XREFFrom for %s\n" % self._class
for ref_class in self.xreffrom:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xreffrom[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
return data
class newVMAnalysis(object):
def __init__(self, vm):
self.vm = vm
self.classes = {}
self.strings = {}
for current_class in self.vm.get_classes():
self.classes[current_class.get_name()] = ClassAnalysis(
current_class)
def create_xref(self):
debug("Creating XREF/DREF")
instances_class_name = list(self.classes.keys())
for current_class in self.vm.get_classes():
for current_method in current_class.get_methods():
debug("Creating XREF for %s" % current_method)
code = current_method.get_code()
if code is None:
continue
off = 0
bc = code.get_bc()
for instruction in bc.get_instructions():
op_value = instruction.get_op_value()
if op_value in [0x1c, 0x22]:
idx_type = instruction.get_ref_kind()
type_info = self.vm.get_cm_type(idx_type)
# Internal xref related to class manipulation
if type_info in instances_class_name and type_info != current_class.get_name():
# new instance
if op_value == 0x22:
self.classes[current_class.get_name()].AddXrefTo(
REF_NEW_INSTANCE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_NEW_INSTANCE, self.classes[
current_class.get_name()], current_method)
# class reference
else:
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif ((op_value >= 0x6e and op_value <= 0x72) or
(op_value >= 0x74 and op_value <= 0x78)):
idx_meth = instruction.get_ref_kind()
method_info = self.vm.get_cm_method(idx_meth)
if method_info:
class_info = method_info[0]
method_item = self.vm.get_method_descriptor(
method_info[0], method_info[1], ''.join(method_info[2]))
if method_item:
self.classes[current_class.get_name()].AddMXrefTo(
current_method, self.classes[class_info], method_item)
self.classes[class_info].AddMXrefFrom(
method_item, self.classes[current_class.get_name()], current_method)
# Internal xref related to class manipulation
if class_info in instances_class_name and class_info != current_class.get_name():
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[class_info], method_item)
self.classes[class_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif op_value >= 0x1a and op_value <= 0x1b:
string_value = self.vm.get_cm_string(
instruction.get_ref_kind())
if string_value not in self.strings:
self.strings[string_value] = StringAnalysis(
string_value)
self.strings[string_value].AddXrefFrom(
self.classes[current_class.get_name()], current_method)
elif op_value >= 0x52 and op_value <= 0x6d:
idx_field = instruction.get_ref_kind()
field_info = self.vm.get_cm_field(idx_field)
field_item = self.vm.get_field_descriptor(
field_info[0], field_info[2], field_info[1])
if field_item:
# read access to a field
if (op_value >= 0x52 and op_value <= 0x58) or (op_value >= 0x60 and op_value <= 0x66):
self.classes[current_class.get_name()].AddFXrefRead(
current_method, self.classes[current_class.get_name()], field_item)
# write access to a field
else:
self.classes[current_class.get_name()].AddFXrefWrite(
current_method, self.classes[current_class.get_name()], field_item)
off += instruction.get_length()
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_method_by_name(self, class_name, method_name, method_descriptor):
if class_name in self.classes:
for method in self.classes[class_name].get_vm_class().get_methods():
if method.get_name() == method_name and method.get_descriptor() == method_descriptor:
return method
return None
def is_class_present(self, class_name):
return class_name in self.classes
def get_class_analysis(self, class_name):
return self.classes.get(class_name)
def get_strings_analysis(self):
return self.strings
class VMAnalysis(object):
"""
This class analyses a dex file
:param _vm: the object which represent the dex file
:type _vm: a :class:`DalvikVMFormat` object
:Example:
VMAnalysis( DalvikVMFormat( read("toto.dex", binary=False) ) )
"""
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
for i in self.vm.get_all_fields():
self.tainted_variables.add(
[i.get_class_name(), i.get_descriptor(), i.get_name()], TAINTED_FIELD)
self.methods = []
self.hmethods = {}
self.__nmethods = {}
for i in self.vm.get_methods():
x = MethodAnalysis(self.vm, i, self)
self.methods.append(x)
self.hmethods[i] = x
self.__nmethods[i.get_name()] = x
def get_vm(self):
return self.vm
def get_method(self, method):
"""
Return an analysis method
:param method: a classical method object
:type method: an :class:`EncodedMethod` object
:rtype: a :class:`MethodAnalysis` object
"""
return self.hmethods[method]
def get_methods(self):
"""
Return each analysis method
:rtype: a :class:`MethodAnalysis` object
"""
for i in self.hmethods:
yield self.hmethods[i]
def get_method_signature(self, method, grammar_type="", options={}, predef_sign=""):
"""
Return a specific signature for a specific method
:param method: a reference to method from a vm class
:type method: a :class:`EncodedMethod` object
:param grammar_type: the type of the signature (optional)
:type grammar_type: string
:param options: the options of the signature (optional)
:param options: dict
:param predef_sign: used a predefined signature (optional)
:type predef_sign: string
:rtype: a :class:`Sign` object
"""
if self.signature is None:
self.signature = Signature(self)
if predef_sign != "":
g = ""
o = {}
for i in predef_sign.split(":"):
if "_" in i:
g += "L0:"
o["L0"] = SIGNATURES[i]
else:
g += i
g += ":"
return self.signature.get_method(self.get_method(method), g[:-1], o)
else:
return self.signature.get_method(self.get_method(method), grammar_type, options)
def get_permissions(self, permissions_needed):
"""
Return the permissions used
:param permissions_needed: a list of restricted permissions to get ([] returns all permissions)
:type permissions_needed: list
:rtype: a dictionnary of permissions paths
"""
permissions = {}
permissions.update(self.get_tainted_packages(
).get_permissions(permissions_needed))
permissions.update(self.get_tainted_variables(
).get_permissions(permissions_needed))
return permissions
def get_permissions_method(self, method):
permissions_f = self.get_tainted_packages().get_permissions_method(method)
permissions_v = self.get_tainted_variables().get_permissions_method(method)
all_permissions_of_method = permissions_f.union(permissions_v)
return list(all_permissions_of_method)
def get_tainted_variables(self):
"""
Return the tainted variables
:rtype: a :class:`TaintedVariables` object
"""
return self.tainted_variables
def get_tainted_packages(self):
"""
Return the tainted packages
:rtype: a :class:`TaintedPackages` object
"""
return self.tainted_packages
def get_tainted_fields(self):
return self.get_tainted_variables().get_fields()
def get_tainted_field(self, class_name, name, descriptor):
"""
Return a specific tainted field
:param class_name: the name of the class
:param name: the name of the field
:param descriptor: the descriptor of the field
:type class_name: string
:type name: string
:type descriptor: string
:rtype: a :class:`TaintedVariable` object
"""
return self.get_tainted_variables().get_field(class_name, name, descriptor)
class uVMAnalysis(VMAnalysis):
"""
This class analyses a dex file but on the fly (quicker !)
:param _vm: the object which represent the dex file
:type _vm: a :class:`DalvikVMFormat` object
:Example:
uVMAnalysis( DalvikVMFormat( read("toto.dex", binary=False) ) )
"""
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
self.resolve = False
def get_methods(self):
self.resolve = True
for i in self.vm.get_methods():
yield MethodAnalysis(self.vm, i, self)
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_vm(self):
return self.vm
def _resolve(self):
if not self.resolve:
for i in self.get_methods():
pass
def get_tainted_packages(self):
self._resolve()
return self.tainted_packages
def get_tainted_variables(self):
self._resolve()
return self.tainted_variables
def is_ascii_obfuscation(vm):
for classe in vm.get_classes():
if is_ascii_problem(classe.get_name()):
return True
for method in classe.get_methods():
if is_ascii_problem(method.get_name()):
return True
return False
| 32.462714 | 169 | 0.551957 |
import re
import collections
from androguard.core.analysis.sign import Signature, TAINTED_PACKAGE_CREATE, \
TAINTED_PACKAGE_CALL
from androguard.core.androconf import debug, is_ascii_problem,\
load_api_specific_resource_module
from androguard.core.bytecodes import dvm
DVM_FIELDS_ACCESS = {
"iget": "R",
"iget-wide": "R",
"iget-object": "R",
"iget-boolean": "R",
"iget-byte": "R",
"iget-char": "R",
"iget-short": "R",
"iput": "W",
"iput-wide": "W",
"iput-object": "W",
"iput-boolean": "W",
"iput-byte": "W",
"iput-char": "W",
"iput-short": "W",
"sget": "R",
"sget-wide": "R",
"sget-object": "R",
"sget-boolean": "R",
"sget-byte": "R",
"sget-char": "R",
"sget-short": "R",
"sput": "W",
"sput-wide": "W",
"sput-object": "W",
"sput-boolean": "W",
"sput-byte": "W",
"sput-char": "W",
"sput-short": "W",
}
class ContextField(object):
def __init__(self, mode):
self.mode = mode
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ContextMethod(object):
def __init__(self):
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ExternalFM(object):
def __init__(self, class_name, name, descriptor):
self.class_name = class_name
self.name = name
self.descriptor = descriptor
def get_class_name(self):
return self.class_name
def get_name(self):
return self.name
def get_descriptor(self):
return self.descriptor
class ToString(object):
def __init__(self, tab):
self.__tab = tab
self.__re_tab = {}
for i in self.__tab:
self.__re_tab[i] = []
for j in self.__tab[i]:
self.__re_tab[i].append(re.compile(j))
self.__string = ""
def push(self, name):
for i in self.__tab:
for j in self.__re_tab[i]:
if j.match(name) is not None:
if len(self.__string) > 0:
if i == 'O' and self.__string[-1] == 'O':
continue
self.__string += i
def get_string(self):
return self.__string
class BreakBlock(object):
def __init__(self, _vm, idx):
self._vm = _vm
self._start = idx
self._end = self._start
self._ins = []
self._ops = []
self._fields = {}
self._methods = {}
def get_ops(self):
return self._ops
def get_fields(self):
return self._fields
def get_methods(self):
return self._methods
def push(self, ins):
self._ins.append(ins)
self._end += ins.get_length()
def get_start(self):
return self._start
def get_end(self):
return self._end
def show(self):
for i in self._ins:
i.show(0)
class DVMBasicBlock(object):
def __init__(self, start, vm, method, context):
self.__vm = vm
self.method = method
self.context = context
self.last_length = 0
self.nb_instructions = 0
self.fathers = []
self.childs = []
self.start = start
self.end = self.start
self.special_ins = {}
self.name = "%s-BB@0x%x" % (self.method.get_name(), self.start)
self.exception_analysis = None
self.tainted_variables = self.context.get_tainted_variables()
self.tainted_packages = self.context.get_tainted_packages()
self.notes = []
def get_notes(self):
return self.notes
def set_notes(self, value):
self.notes = [value]
def add_note(self, note):
self.notes.append(note)
def clear_notes(self):
self.notes = []
def get_instructions(self):
tmp_ins = []
idx = 0
for i in self.method.get_instructions():
if idx >= self.start and idx < self.end:
tmp_ins.append(i)
idx += i.get_length()
return tmp_ins
def get_nb_instructions(self):
return self.nb_instructions
def get_method(self):
return self.method
def get_name(self):
return "%s-BB@0x%x" % (self.method.get_name(), self.start)
def get_start(self):
return self.start
def get_end(self):
return self.end
def get_last(self):
return self.get_instructions()[-1]
def get_next(self):
return self.childs
def get_prev(self):
return self.fathers
def set_fathers(self, f):
self.fathers.append(f)
def get_last_length(self):
return self.last_length
def set_childs(self, values):
if values == []:
next_block = self.context.get_basic_block(self.end + 1)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), self.end, next_block))
else:
for i in values:
if i != -1:
next_block = self.context.get_basic_block(i)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), i, next_block))
for c in self.childs:
if c[2] is not None:
c[2].set_fathers((c[1], c[0], self))
def push(self, i):
try:
self.nb_instructions += 1
idx = self.end
self.last_length = i.get_length()
self.end += self.last_length
op_value = i.get_op_value()
if (op_value >= 0x52 and op_value <= 0x6d):
desc = self.__vm.get_cm_field(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(TAINTED_FIELD, desc, DVM_FIELDS_ACCESS[
i.get_name()][0], idx, self.method)
elif (op_value >= 0x6e and op_value <= 0x72) or (op_value >= 0x74 and op_value <= 0x78):
idx_meth = i.get_ref_kind()
method_info = self.__vm.get_cm_method(idx_meth)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
method_info[0], TAINTED_PACKAGE_CALL, idx, self.method, idx_meth)
elif op_value == 0x22:
idx_type = i.get_ref_kind()
type_info = self.__vm.get_cm_type(idx_type)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
type_info, TAINTED_PACKAGE_CREATE, idx, self.method, None)
elif (op_value >= 0x1a and op_value <= 0x1b):
string_name = self.__vm.get_cm_string(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(
TAINTED_STRING, string_name, "R", idx, self.method)
elif op_value == 0x26 or (op_value >= 0x2b and op_value <= 0x2c):
code = self.method.get_code().get_bc()
self.special_ins[idx] = code.get_ins_off(
idx + i.get_ref_off() * 2)
except:
pass
def get_special_ins(self, idx):
try:
return self.special_ins[idx]
except:
return None
def get_exception_analysis(self):
return self.exception_analysis
def set_exception_analysis(self, exception_analysis):
self.exception_analysis = exception_analysis
TAINTED_LOCAL_VARIABLE = 0
TAINTED_FIELD = 1
TAINTED_STRING = 2
class PathVar(object):
def __init__(self, access, idx, dst_idx, info_obj):
self.access_flag = access
self.idx = idx
self.dst_idx = dst_idx
self.info_obj = info_obj
def get_var_info(self):
return self.info_obj.get_info()
def get_access_flag(self):
return self.access_flag
def get_src(self, cm):
method = cm.get_method_ref(self.idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
class TaintedVariable(object):
def __init__(self, var, _type):
self.var = var
self.type = _type
self.paths = {}
self.__cache = []
def get_type(self):
return self.type
def get_info(self):
if self.type == TAINTED_FIELD:
return [self.var[0], self.var[2], self.var[1]]
return self.var
def push(self, access, idx, ref):
m_idx = ref.get_method_idx()
if m_idx not in self.paths:
self.paths[m_idx] = []
self.paths[m_idx].append((access, idx))
def get_paths_access(self, mode):
for i in self.paths:
for j in self.paths[i]:
for k, v in self.paths[i][j]:
if k in mode:
yield i, j, k, v
def get_paths(self):
if self.__cache != []:
return self.__cache
for i in self.paths:
for j in self.paths[i]:
self.__cache.append([j, i])
return self.__cache
def get_paths_length(self):
return len(self.paths)
def show_paths(self, vm):
show_PathVariable(vm, self.get_paths())
class TaintedVariables(object):
def __init__(self, _vm):
self.__vm = _vm
self.__vars = {
TAINTED_LOCAL_VARIABLE: {},
TAINTED_FIELD: {},
TAINTED_STRING: {},
}
self.__cache_field_by_method = {}
self.__cache_string_by_method = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
def get_string(self, s):
try:
return self.__vars[TAINTED_STRING][s]
except KeyError:
return None
def get_field(self, class_name, name, descriptor):
key = class_name + descriptor + name
try:
return self.__vars[TAINTED_FIELD][key]
except KeyError:
return None
def toPathVariable(self, obj):
z = []
for i in obj.get_paths():
access, idx = i[0]
m_idx = i[1]
z.append(PathVar(access, idx, m_idx, obj))
return z
def get_permissions_method(self, method):
permissions = set()
for f, f1 in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
for path in f.get_paths():
m_idx = path[1]
if m_idx == method.get_idx():
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
return permissions
def get_permissions(self, permissions_needed):
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
for f, _ in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
for p in perm_intersection:
try:
permissions[p].extend(self.toPathVariable(f))
except KeyError:
permissions[p] = []
permissions[p].extend(self.toPathVariable(f))
return permissions
def get_strings(self):
for i in self.__vars[TAINTED_STRING]:
yield self.__vars[TAINTED_STRING][i], i
def get_fields(self):
for i in self.__vars[TAINTED_FIELD]:
yield self.__vars[TAINTED_FIELD][i], i
def get_strings_by_method(self, method):
z = {}
try:
for i in self.__cache_string_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def get_fields_by_method(self, method):
z = {}
try:
for i in self.__cache_field_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def add(self, var, _type, _method=None):
if _type == TAINTED_FIELD:
key = var[0] + var[1] + var[2]
if key not in self.__vars[TAINTED_FIELD]:
self.__vars[TAINTED_FIELD][key] = TaintedVariable(var, _type)
elif _type == TAINTED_STRING:
if var not in self.__vars[TAINTED_STRING]:
self.__vars[TAINTED_STRING][var] = TaintedVariable(var, _type)
elif _type == TAINTED_LOCAL_VARIABLE:
if _method not in self.__vars[TAINTED_LOCAL_VARIABLE]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method] = {}
if var not in self.__vars[TAINTED_LOCAL_VARIABLE][_method]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method][
var] = TaintedVariable(var, _type)
def push_info(self, _type, var, access, idx, ref):
if _type == TAINTED_FIELD:
self.add(var, _type)
key = var[0] + var[1] + var[2]
self.__vars[_type][key].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_field_by_method:
self.__cache_field_by_method[method_idx] = set()
self.__cache_field_by_method[method_idx].add(
self.__vars[TAINTED_FIELD][key])
elif _type == TAINTED_STRING:
self.add(var, _type)
self.__vars[_type][var].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_string_by_method:
self.__cache_string_by_method[method_idx] = set()
self.__cache_string_by_method[method_idx].add(
self.__vars[TAINTED_STRING][var])
def show_Path(vm, path):
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
def get_Path(vm, path):
x = {}
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
info_var = path.get_var_info()
x["src"] = "%s" % info_var
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
x["idx"] = path.get_idx()
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["idx"] = path.get_idx()
return x
def show_Paths(vm, paths):
for path in paths:
show_Path(vm, path)
def get_Paths(vm, paths):
full_paths = []
for path in paths:
full_paths.append(get_Path(vm, path))
return full_paths
def show_PathVariable(vm, paths):
return
for path in paths:
access, idx = path[0]
m_idx = path[1]
method = vm.get_cm_method(m_idx)
print("%s %x %s->%s %s" %
(access, idx, method[0], method[1], method[2][0] + method[2][1]))
class PathP(object):
def __init__(self, access, idx, src_idx, dst_idx):
self.access_flag = access
self.idx = idx
self.src_idx = src_idx
self.dst_idx = dst_idx
def get_access_flag(self):
return self.access_flag
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_src(self, cm):
method = cm.get_method_ref(self.src_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
def get_src_idx(self):
return self.src_idx
def get_dst_idx(self):
return self.dst_idx
class TaintedPackage(object):
def __init__(self, vm, name):
self.vm = vm
self.name = name
self.paths = {TAINTED_PACKAGE_CREATE: [], TAINTED_PACKAGE_CALL: []}
def get_name(self):
return self.name
def gets(self):
return self.paths
def push(self, access, idx, src_idx, dst_idx):
p = PathP(access, idx, src_idx, dst_idx)
self.paths[access].append(p)
return p
def get_objects_paths(self):
return self.paths[TAINTED_PACKAGE_CREATE]
def search_method(self, name, descriptor):
l = []
m_name = re.compile(name)
m_descriptor = re.compile(descriptor)
for path in self.paths[TAINTED_PACKAGE_CALL]:
_, dst_name, dst_descriptor = path.get_dst(
self.vm.get_class_manager())
if m_name.match(dst_name) is not None and m_descriptor.match(dst_descriptor) is not None:
l.append(path)
return l
def get_method(self, name, descriptor):
l = []
for path in self.paths[TAINTED_PACKAGE_CALL]:
if path.get_name() == name and path.get_descriptor() == descriptor:
l.append(path)
return l
def get_paths(self):
for i in self.paths:
for j in self.paths[i]:
yield j
def get_paths_length(self):
x = 0
for i in self.paths:
x += len(self.paths[i])
return x
def get_methods(self):
return [path for path in self.paths[TAINTED_PACKAGE_CALL]]
def get_new(self):
return [path for path in self.paths[TAINTED_PACKAGE_CREATE]]
def show(self):
return
cm = self.vm.get_class_manager()
print(self.get_name())
for _type in self.paths:
print("\t -->", _type)
if _type == TAINTED_PACKAGE_CALL:
for path in self.paths[_type]:
print("\t\t => %s <-- %x in %s" %
(path.get_dst(cm), path.get_idx(), path.get_src(cm)))
else:
for path in self.paths[_type]:
print("\t\t => %x in %s" %
(path.get_idx(), path.get_src(cm)))
def show_Permissions(dx):
p = dx.get_permissions([])
for i in p:
for j in p[i]:
show_Path(dx.get_vm(), j)
def show_DynCode(dx):
paths = []
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."))
show_Paths(dx.get_vm(), paths)
def show_NativeMethods(dx):
return
print(get_NativeMethods(dx))
def show_ReflectionCode(dx):
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
show_Paths(dx.get_vm(), paths)
def get_NativeMethods(dx):
d = dx.get_vm()
native_methods = []
for i in d.get_methods():
if i.get_access_flags() & 0x100:
native_methods.append(
(i.get_class_name(), i.get_name(), i.get_descriptor()))
return native_methods
def get_ReflectionCode(dx):
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
return get_Paths(dx.get_vm(), paths)
def is_crypto_code(dx):
if dx.get_tainted_packages().search_methods("Ljavax/crypto/.",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/security/spec/.",
".",
"."):
return True
return False
def is_dyn_code(dx):
if dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."):
return True
return False
def is_reflection_code(dx):
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Method;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Field;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Class;",
"forName",
"."):
return True
return False
def is_native_code(dx):
if dx.get_tainted_packages().search_methods("Ljava/lang/System;",
"load.",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Runtime;",
"load.",
"."):
return True
return False
class TaintedPackages(object):
def __init__(self, _vm):
self.__vm = _vm
self.__packages = {}
self.__methods = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
def _add_pkg(self, name):
if name not in self.__packages:
self.__packages[name] = TaintedPackage(self.__vm, name)
def push_info(self, class_name, access, idx, method, idx_method):
self._add_pkg(class_name)
p = self.__packages[class_name].push(
access, idx, method.get_method_idx(), idx_method)
try:
self.__methods[method][class_name].append(p)
except:
try:
self.__methods[method][class_name] = []
except:
self.__methods[method] = {}
self.__methods[method][class_name] = []
self.__methods[method][class_name].append(p)
def get_packages_by_method(self, method):
try:
return self.__methods[method]
except KeyError:
return {}
def get_package(self, name):
return self.__packages[name]
def get_packages_by_bb(self, bb):
l = []
for i in self.__packages:
paths = self.__packages[i].gets()
for j in paths:
for k in paths[j]:
if k.get_bb() == bb:
l.append((i, k.get_access_flag(),
k.get_idx(), k.get_method()))
return l
def get_packages(self):
for i in self.__packages:
yield self.__packages[i], i
def get_internal_packages_from_package(self, package):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name == package and dst_class_name in classes:
l.append(j)
return l
def get_internal_packages(self):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, _, _ = j.get_dst(
self.__vm.get_class_manager())
if dst_class_name in classes and m.get_name() in classes:
l.append(j)
return l
def get_internal_new_packages(self):
classes = self.__vm.get_classes_names()
l = {}
for m, _ in self.get_packages():
paths = m.get_new()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
if src_class_name in classes and m.get_name() in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CREATE:
try:
l[m.get_name()].append(j)
except:
l[m.get_name()] = []
l[m.get_name()].append(j)
return l
def get_external_packages(self):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name in classes and dst_class_name not in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
l.append(j)
return l
def search_packages(self, package_name):
ex = re.compile(package_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_methods())
return l
def search_unique_packages(self, package_name):
ex = re.compile(package_name)
l = []
d = {}
for m, _ in self.get_packages():
if ex.match(m.get_info()) is not None:
for path in m.get_methods():
try:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] += 1
except KeyError:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] = 0
l.append([path.get_class_name(),
path.get_name(), path.get_descriptor()])
return l, d
def search_methods(self, class_name, name, descriptor, re_expr=True):
l = []
if re_expr:
ex = re.compile(class_name)
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.search_method(name, descriptor))
return l
def search_objects(self, class_name):
ex = re.compile(class_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_objects_paths())
return l
def search_crypto_packages(self):
return self.search_packages("Ljavax/crypto/")
def search_telephony_packages(self):
return self.search_packages("Landroid/telephony/")
def search_net_packages(self):
return self.search_packages("Landroid/net/")
def get_method(self, class_name, name, descriptor):
try:
return self.__packages[class_name].get_method(name, descriptor)
except KeyError:
return []
def get_permissions_method(self, method):
permissions = set()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_method() == method:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
return permissions
def get_permissions(self, permissions_needed):
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
classes = self.__vm.get_classes_names()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, src_method_name, src_descriptor = j.get_src(
self.__vm.get_class_manager())
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
if (src_class_name in classes) and (dst_class_name not in classes):
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
for p in perm_intersection:
try:
permissions[p].append(j)
except KeyError:
permissions[p] = []
permissions[p].append(j)
return permissions
class Enum(object):
def __init__(self, names):
self.names = names
for value, name in enumerate(self.names):
setattr(self, name.upper(), value)
def tuples(self):
return tuple(enumerate(self.names))
TAG_ANDROID = Enum([
'ANDROID', 'TELEPHONY', 'SMS', 'SMSMESSAGE', 'ACCESSIBILITYSERVICE', 'ACCOUNTS',
'ANIMATION', 'APP', 'BLUETOOTH', 'CONTENT', 'DATABASE', 'DEBUG', 'DRM', 'GESTURE',
'GRAPHICS', 'HARDWARE', 'INPUTMETHODSERVICE', 'LOCATION', 'MEDIA', 'MTP',
'NET', 'NFC', 'OPENGL', 'OS', 'PREFERENCE', 'PROVIDER', 'RENDERSCRIPT',
'SAX', 'SECURITY', 'SERVICE', 'SPEECH', 'SUPPORT', 'TEST', 'TEXT', 'UTIL',
'VIEW', 'WEBKIT', 'WIDGET', 'DALVIK_BYTECODE', 'DALVIK_SYSTEM', 'JAVA_REFLECTION'])
TAG_REVERSE_ANDROID = dict((i[0], i[1]) for i in TAG_ANDROID.tuples())
TAGS_ANDROID = {
TAG_ANDROID.ANDROID: [0, "Landroid"],
TAG_ANDROID.TELEPHONY: [0, "Landroid/telephony"],
TAG_ANDROID.SMS: [0, "Landroid/telephony/SmsManager"],
TAG_ANDROID.SMSMESSAGE: [0, "Landroid/telephony/SmsMessage"],
TAG_ANDROID.DEBUG: [0, "Landroid/os/Debug"],
TAG_ANDROID.ACCESSIBILITYSERVICE: [0, "Landroid/accessibilityservice"],
TAG_ANDROID.ACCOUNTS: [0, "Landroid/accounts"],
TAG_ANDROID.ANIMATION: [0, "Landroid/animation"],
TAG_ANDROID.APP: [0, "Landroid/app"],
TAG_ANDROID.BLUETOOTH: [0, "Landroid/bluetooth"],
TAG_ANDROID.CONTENT: [0, "Landroid/content"],
TAG_ANDROID.DATABASE: [0, "Landroid/database"],
TAG_ANDROID.DRM: [0, "Landroid/drm"],
TAG_ANDROID.GESTURE: [0, "Landroid/gesture"],
TAG_ANDROID.GRAPHICS: [0, "Landroid/graphics"],
TAG_ANDROID.HARDWARE: [0, "Landroid/hardware"],
TAG_ANDROID.INPUTMETHODSERVICE: [0, "Landroid/inputmethodservice"],
TAG_ANDROID.LOCATION: [0, "Landroid/location"],
TAG_ANDROID.MEDIA: [0, "Landroid/media"],
TAG_ANDROID.MTP: [0, "Landroid/mtp"],
TAG_ANDROID.NET: [0, "Landroid/net"],
TAG_ANDROID.NFC: [0, "Landroid/nfc"],
TAG_ANDROID.OPENGL: [0, "Landroid/opengl"],
TAG_ANDROID.OS: [0, "Landroid/os"],
TAG_ANDROID.PREFERENCE: [0, "Landroid/preference"],
TAG_ANDROID.PROVIDER: [0, "Landroid/provider"],
TAG_ANDROID.RENDERSCRIPT: [0, "Landroid/renderscript"],
TAG_ANDROID.SAX: [0, "Landroid/sax"],
TAG_ANDROID.SECURITY: [0, "Landroid/security"],
TAG_ANDROID.SERVICE: [0, "Landroid/service"],
TAG_ANDROID.SPEECH: [0, "Landroid/speech"],
TAG_ANDROID.SUPPORT: [0, "Landroid/support"],
TAG_ANDROID.TEST: [0, "Landroid/test"],
TAG_ANDROID.TEXT: [0, "Landroid/text"],
TAG_ANDROID.UTIL: [0, "Landroid/util"],
TAG_ANDROID.VIEW: [0, "Landroid/view"],
TAG_ANDROID.WEBKIT: [0, "Landroid/webkit"],
TAG_ANDROID.WIDGET: [0, "Landroid/widget"],
TAG_ANDROID.DALVIK_BYTECODE: [0, "Ldalvik/bytecode"],
TAG_ANDROID.DALVIK_SYSTEM: [0, "Ldalvik/system"],
TAG_ANDROID.JAVA_REFLECTION: [0, "Ljava/lang/reflect"],
}
class Tags(object):
def __init__(self, patterns=TAGS_ANDROID, reverse=TAG_REVERSE_ANDROID):
self.tags = set()
self.patterns = patterns
self.reverse = TAG_REVERSE_ANDROID
for i in self.patterns:
self.patterns[i][1] = re.compile(self.patterns[i][1])
def emit(self, method):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(method.get_class()) is not None:
self.tags.add(i)
def emit_by_classname(self, classname):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(classname) is not None:
self.tags.add(i)
def get_list(self):
return [self.reverse[i] for i in self.tags]
def __contains__(self, key):
return key in self.tags
def __str__(self):
return str([self.reverse[i] for i in self.tags])
def empty(self):
return self.tags == set()
class BasicBlocks(object):
def __init__(self, _vm, tv):
self.__vm = _vm
self.tainted = tv
self.bb = []
def push(self, bb):
self.bb.append(bb)
def pop(self, idx):
return self.bb.pop(idx)
def get_basic_block(self, idx):
for i in self.bb:
if idx >= i.get_start() and idx < i.get_end():
return i
return None
def get_tainted_integers(self):
try:
return self.tainted.get_tainted_integers()
except:
return None
def get_tainted_packages(self):
try:
return self.tainted.get_tainted_packages()
except:
return None
def get_tainted_variables(self):
try:
return self.tainted.get_tainted_variables()
except:
return None
def get(self):
for i in self.bb:
yield i
def gets(self):
return self.bb
def get_basic_block_pos(self, idx):
return self.bb[idx]
class ExceptionAnalysis(object):
def __init__(self, exception, bb):
self.start = exception[0]
self.end = exception[1]
self.exceptions = exception[2:]
for i in self.exceptions:
i.append(bb.get_basic_block(i[1]))
def show_buff(self):
buff = "%x:%x\n" % (self.start, self.end)
for i in self.exceptions:
if i[2] is None:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2])
else:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2].get_name())
return buff[:-1]
def get(self):
d = {"start": self.start, "end": self.end, "list": []}
for i in self.exceptions:
d["list"].append(
{"name": i[0], "idx": i[1], "bb": i[2].get_name()})
return d
class Exceptions(object):
def __init__(self, _vm):
self.__vm = _vm
self.exceptions = []
def add(self, exceptions, basic_blocks):
for i in exceptions:
self.exceptions.append(ExceptionAnalysis(i, basic_blocks))
def get_exception(self, addr_start, addr_end):
for i in self.exceptions:
if i.start >= addr_start and i.end <= addr_end:
return i
elif addr_end <= i.end and addr_start >= i.start:
return i
return None
def gets(self):
return self.exceptions
def get(self):
for i in self.exceptions:
yield i
BO = {"BasicOPCODES": dvm.BRANCH_DVM_OPCODES, "BasicClass": DVMBasicBlock,
"Dnext": dvm.determineNext, "Dexception": dvm.determineException}
BO["BasicOPCODES_H"] = []
for i in BO["BasicOPCODES"]:
BO["BasicOPCODES_H"].append(re.compile(i))
class MethodAnalysis(object):
def __init__(self, vm, method, tv):
self.__vm = vm
self.method = method
self.tainted = tv
self.basic_blocks = BasicBlocks(self.__vm, self.tainted)
self.exceptions = Exceptions(self.__vm)
code = self.method.get_code()
if code is None:
return
current_basic = BO["BasicClass"](
0, self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
basic blocks childs")
for i in self.basic_blocks.get():
try:
i.set_childs(h[i.end - i.get_last_length()])
except KeyError:
i.set_childs([])
debug("Creating exceptions")
self.exceptions.add(excepts, self.basic_blocks)
for i in self.basic_blocks.get():
i.set_exception_analysis(
self.exceptions.get_exception(i.start, i.end - 1))
del instructions
del h, l
def get_basic_blocks(self):
return self.basic_blocks
def get_length(self):
return self.get_code().get_length()
def get_vm(self):
return self.__vm
def get_method(self):
return self.method
def get_local_variables(self):
return self.tainted.get_tainted_variables().get_local_variables(self.method)
def show(self):
return
print("METHOD", self.method.get_class_name(),
self.method.get_name(), self.method.get_descriptor())
for i in self.basic_blocks.get():
print("\t", i)
i.show()
print("")
def show_methods(self):
return
print("\t #METHODS :")
for i in self.__bb:
methods = i.get_methods()
for method in methods:
print("\t\t-->", method.get_class_name(),
method.get_name(), method.get_descriptor())
for context in methods[method]:
print("\t\t\t |---|", context.details)
def create_tags(self):
self.tags = Tags()
for i in self.tainted.get_tainted_packages().get_packages_by_method(self.method):
self.tags.emit_by_classname(i)
def get_tags(self):
return self.tags
SIGNATURE_L0_0 = "L0_0"
SIGNATURE_L0_1 = "L0_1"
SIGNATURE_L0_2 = "L0_2"
SIGNATURE_L0_3 = "L0_3"
SIGNATURE_L0_4 = "L0_4"
SIGNATURE_L0_5 = "L0_5"
SIGNATURE_L0_6 = "L0_6"
SIGNATURE_L0_0_L1 = "L0_0:L1"
SIGNATURE_L0_1_L1 = "L0_1:L1"
SIGNATURE_L0_2_L1 = "L0_2:L1"
SIGNATURE_L0_3_L1 = "L0_3:L1"
SIGNATURE_L0_4_L1 = "L0_4:L1"
SIGNATURE_L0_5_L1 = "L0_5:L1"
SIGNATURE_L0_0_L2 = "L0_0:L2"
SIGNATURE_L0_0_L3 = "L0_0:L3"
SIGNATURE_HEX = "hex"
SIGNATURE_SEQUENCE_BB = "sequencebb"
SIGNATURES = {
SIGNATURE_L0_0: {"type": 0},
SIGNATURE_L0_1: {"type": 1},
SIGNATURE_L0_2: {"type": 2, "arguments": ["Landroid"]},
SIGNATURE_L0_3: {"type": 2, "arguments": ["Ljava"]},
SIGNATURE_L0_4: {"type": 2, "arguments": ["Landroid", "Ljava"]},
SIGNATURE_L0_5: {"type": 3, "arguments": ["Landroid"]},
SIGNATURE_L0_6: {"type": 3, "arguments": ["Ljava"]},
SIGNATURE_SEQUENCE_BB: {},
SIGNATURE_HEX: {},
}
class StringAnalysis(object):
def __init__(self, value):
self.value = value
self.xreffrom = set()
def AddXrefFrom(self, classobj, methodobj):
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def __str__(self):
data = "XREFto for string %s in\n" % repr(self.value)
for ref_class, ref_method in self.xreffrom:
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class MethodClassAnalysis(object):
def __init__(self, method):
self.method = method
self.xrefto = set()
self.xreffrom = set()
def AddXrefTo(self, classobj, methodobj):
self.xrefto.add((classobj, methodobj))
def AddXrefFrom(self, classobj, methodobj):
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def __str__(self):
data = "XREFto for %s\n" % self.method
for ref_class, ref_method in self.xrefto:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFFrom for %s\n" % self.method
for ref_class, ref_method in self.xreffrom:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class FieldClassAnalysis(object):
def __init__(self, field):
self.field = field
self.xrefread = set()
self.xrefwrite = set()
def AddXrefRead(self, classobj, methodobj):
self.xrefread.add((classobj, methodobj))
def AddXrefWrite(self, classobj, methodobj):
self.xrefwrite.add((classobj, methodobj))
def get_xref_read(self):
return self.xrefread
def get_xref_write(self):
return self.xrefwrite
def __str__(self):
data = "XREFRead for %s\n" % self.field
for ref_class, ref_method in self.xrefread:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFWrite for %s\n" % self.field
for ref_class, ref_method in self.xrefwrite:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
REF_NEW_INSTANCE = 0
REF_CLASS_USAGE = 1
class ClassAnalysis(object):
def __init__(self, classobj):
self._class = classobj
self._methods = {}
self._fields = {}
self.xrefto = collections.defaultdict(set)
self.xreffrom = collections.defaultdict(set)
def get_method_analysis(self, method):
return self._methods.get(method)
def get_field_analysis(self, field):
return self._fields.get(field)
def AddFXrefRead(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefRead(classobj, method)
def AddFXrefWrite(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefWrite(classobj, method)
def AddMXrefTo(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefTo(classobj, method2)
def AddMXrefFrom(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefFrom(classobj, method2)
def AddXrefTo(self, ref_kind, classobj, methodobj):
self.xrefto[classobj].add((ref_kind, methodobj))
def AddXrefFrom(self, ref_kind, classobj, methodobj):
self.xreffrom[classobj].add((ref_kind, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def get_vm_class(self):
return self._class
def __str__(self):
data = "XREFto for %s\n" % self._class
for ref_class in self.xrefto:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xrefto[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
data += "XREFFrom for %s\n" % self._class
for ref_class in self.xreffrom:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xreffrom[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
return data
class newVMAnalysis(object):
def __init__(self, vm):
self.vm = vm
self.classes = {}
self.strings = {}
for current_class in self.vm.get_classes():
self.classes[current_class.get_name()] = ClassAnalysis(
current_class)
def create_xref(self):
debug("Creating XREF/DREF")
instances_class_name = list(self.classes.keys())
for current_class in self.vm.get_classes():
for current_method in current_class.get_methods():
debug("Creating XREF for %s" % current_method)
code = current_method.get_code()
if code is None:
continue
off = 0
bc = code.get_bc()
for instruction in bc.get_instructions():
op_value = instruction.get_op_value()
if op_value in [0x1c, 0x22]:
idx_type = instruction.get_ref_kind()
type_info = self.vm.get_cm_type(idx_type)
if type_info in instances_class_name and type_info != current_class.get_name():
if op_value == 0x22:
self.classes[current_class.get_name()].AddXrefTo(
REF_NEW_INSTANCE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_NEW_INSTANCE, self.classes[
current_class.get_name()], current_method)
else:
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif ((op_value >= 0x6e and op_value <= 0x72) or
(op_value >= 0x74 and op_value <= 0x78)):
idx_meth = instruction.get_ref_kind()
method_info = self.vm.get_cm_method(idx_meth)
if method_info:
class_info = method_info[0]
method_item = self.vm.get_method_descriptor(
method_info[0], method_info[1], ''.join(method_info[2]))
if method_item:
self.classes[current_class.get_name()].AddMXrefTo(
current_method, self.classes[class_info], method_item)
self.classes[class_info].AddMXrefFrom(
method_item, self.classes[current_class.get_name()], current_method)
if class_info in instances_class_name and class_info != current_class.get_name():
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[class_info], method_item)
self.classes[class_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif op_value >= 0x1a and op_value <= 0x1b:
string_value = self.vm.get_cm_string(
instruction.get_ref_kind())
if string_value not in self.strings:
self.strings[string_value] = StringAnalysis(
string_value)
self.strings[string_value].AddXrefFrom(
self.classes[current_class.get_name()], current_method)
elif op_value >= 0x52 and op_value <= 0x6d:
idx_field = instruction.get_ref_kind()
field_info = self.vm.get_cm_field(idx_field)
field_item = self.vm.get_field_descriptor(
field_info[0], field_info[2], field_info[1])
if field_item:
if (op_value >= 0x52 and op_value <= 0x58) or (op_value >= 0x60 and op_value <= 0x66):
self.classes[current_class.get_name()].AddFXrefRead(
current_method, self.classes[current_class.get_name()], field_item)
else:
self.classes[current_class.get_name()].AddFXrefWrite(
current_method, self.classes[current_class.get_name()], field_item)
off += instruction.get_length()
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_method_by_name(self, class_name, method_name, method_descriptor):
if class_name in self.classes:
for method in self.classes[class_name].get_vm_class().get_methods():
if method.get_name() == method_name and method.get_descriptor() == method_descriptor:
return method
return None
def is_class_present(self, class_name):
return class_name in self.classes
def get_class_analysis(self, class_name):
return self.classes.get(class_name)
def get_strings_analysis(self):
return self.strings
class VMAnalysis(object):
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
for i in self.vm.get_all_fields():
self.tainted_variables.add(
[i.get_class_name(), i.get_descriptor(), i.get_name()], TAINTED_FIELD)
self.methods = []
self.hmethods = {}
self.__nmethods = {}
for i in self.vm.get_methods():
x = MethodAnalysis(self.vm, i, self)
self.methods.append(x)
self.hmethods[i] = x
self.__nmethods[i.get_name()] = x
def get_vm(self):
return self.vm
def get_method(self, method):
return self.hmethods[method]
def get_methods(self):
for i in self.hmethods:
yield self.hmethods[i]
def get_method_signature(self, method, grammar_type="", options={}, predef_sign=""):
if self.signature is None:
self.signature = Signature(self)
if predef_sign != "":
g = ""
o = {}
for i in predef_sign.split(":"):
if "_" in i:
g += "L0:"
o["L0"] = SIGNATURES[i]
else:
g += i
g += ":"
return self.signature.get_method(self.get_method(method), g[:-1], o)
else:
return self.signature.get_method(self.get_method(method), grammar_type, options)
def get_permissions(self, permissions_needed):
permissions = {}
permissions.update(self.get_tainted_packages(
).get_permissions(permissions_needed))
permissions.update(self.get_tainted_variables(
).get_permissions(permissions_needed))
return permissions
def get_permissions_method(self, method):
permissions_f = self.get_tainted_packages().get_permissions_method(method)
permissions_v = self.get_tainted_variables().get_permissions_method(method)
all_permissions_of_method = permissions_f.union(permissions_v)
return list(all_permissions_of_method)
def get_tainted_variables(self):
return self.tainted_variables
def get_tainted_packages(self):
return self.tainted_packages
def get_tainted_fields(self):
return self.get_tainted_variables().get_fields()
def get_tainted_field(self, class_name, name, descriptor):
return self.get_tainted_variables().get_field(class_name, name, descriptor)
class uVMAnalysis(VMAnalysis):
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
self.resolve = False
def get_methods(self):
self.resolve = True
for i in self.vm.get_methods():
yield MethodAnalysis(self.vm, i, self)
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_vm(self):
return self.vm
def _resolve(self):
if not self.resolve:
for i in self.get_methods():
pass
def get_tainted_packages(self):
self._resolve()
return self.tainted_packages
def get_tainted_variables(self):
self._resolve()
return self.tainted_variables
def is_ascii_obfuscation(vm):
for classe in vm.get_classes():
if is_ascii_problem(classe.get_name()):
return True
for method in classe.get_methods():
if is_ascii_problem(method.get_name()):
return True
return False
| true | true |
f71f68f60efce427cc864118cc7e00210f6bd3bb | 302 | py | Python | python3/recent_counter.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | 1 | 2020-10-08T09:17:40.000Z | 2020-10-08T09:17:40.000Z | python3/recent_counter.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | python3/recent_counter.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | """
space : O(n)
time : O(n)
"""
class RecentCounter:
def __init__(self):
self.history = []
def ping(self, t: int) -> int:
self.history.append(t)
s = t - 3000
while self.history[0] < s:
self.history.pop(0)
return len(self.history)
| 15.1 | 34 | 0.503311 |
class RecentCounter:
def __init__(self):
self.history = []
def ping(self, t: int) -> int:
self.history.append(t)
s = t - 3000
while self.history[0] < s:
self.history.pop(0)
return len(self.history)
| true | true |
f71f694ec80a3bd4c8eb0b4d9cd3f8f8a53b92c1 | 8,293 | py | Python | knack/invocation.py | derekbekoe/knack | 07ce4c3ae51ef22e6364ed93c5980cae7688e347 | [
"MIT"
] | 1 | 2019-02-10T01:38:05.000Z | 2019-02-10T01:38:05.000Z | knack/invocation.py | derekbekoe/knack | 07ce4c3ae51ef22e6364ed93c5980cae7688e347 | [
"MIT"
] | null | null | null | knack/invocation.py | derekbekoe/knack | 07ce4c3ae51ef22e6364ed93c5980cae7688e347 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import sys
from collections import defaultdict
from .deprecation import ImplicitDeprecated, resolve_deprecate_info
from .util import CLIError, CtxTypeError, CommandResultItem, todict
from .parser import CLICommandParser
from .commands import CLICommandsLoader
from .events import (EVENT_INVOKER_PRE_CMD_TBL_CREATE, EVENT_INVOKER_POST_CMD_TBL_CREATE,
EVENT_INVOKER_CMD_TBL_LOADED, EVENT_INVOKER_PRE_PARSE_ARGS,
EVENT_INVOKER_POST_PARSE_ARGS, EVENT_INVOKER_TRANSFORM_RESULT,
EVENT_INVOKER_FILTER_RESULT)
from .help import CLIHelp
class CommandInvoker(object):
def __init__(self,
cli_ctx=None,
parser_cls=CLICommandParser,
commands_loader_cls=CLICommandsLoader,
help_cls=CLIHelp,
initial_data=None):
""" Manages a single invocation of the CLI (i.e. running a command)
:param cli_ctx: CLI Context
:type cli_ctx: knack.cli.CLI
:param parser_cls: A class to handle command parsing
:type parser_cls: knack.parser.CLICommandParser
:param commands_loader_cls: A class to handle loading commands
:type commands_loader_cls: knack.commands.CLICommandsLoader
:param help_cls: A class to handle help
:type help_cls: knack.help.CLIHelp
:param initial_data: The initial in-memory collection for this command invocation
:type initial_data: dict
"""
from .cli import CLI
if cli_ctx is not None and not isinstance(cli_ctx, CLI):
raise CtxTypeError(cli_ctx)
self.cli_ctx = cli_ctx
# In memory collection of key-value data for this current invocation This does not persist between invocations.
self.data = initial_data or defaultdict(lambda: None)
self.data['command'] = 'unknown'
self._global_parser = parser_cls.create_global_parser(cli_ctx=self.cli_ctx)
self.help = help_cls(cli_ctx=self.cli_ctx)
self.parser = parser_cls(cli_ctx=self.cli_ctx, cli_help=self.help,
prog=self.cli_ctx.name, parents=[self._global_parser])
self.commands_loader = commands_loader_cls(cli_ctx=self.cli_ctx)
def _filter_params(self, args): # pylint: disable=no-self-use
# Consider - we are using any args that start with an underscore (_) as 'private'
# arguments and remove them from the arguments that we pass to the actual function.
params = {key: value
for key, value in args.__dict__.items()
if not key.startswith('_')}
params.pop('func', None)
params.pop('command', None)
return params
def _rudimentary_get_command(self, args):
""" Rudimentary parsing to get the command """
nouns = []
command_names = self.commands_loader.command_table.keys()
for arg in args:
if arg and arg[0] != '-':
nouns.append(arg)
else:
break
def _find_args(args):
search = ' '.join(args).lower()
return next((x for x in command_names if x.startswith(search)), False)
# since the command name may be immediately followed by a positional arg, strip those off
while nouns and not _find_args(nouns):
del nouns[-1]
# ensure the command string is case-insensitive
for i in range(len(nouns)):
args[i] = args[i].lower()
return ' '.join(nouns)
def _validate_cmd_level(self, ns, cmd_validator): # pylint: disable=no-self-use
if cmd_validator:
cmd_validator(ns)
try:
delattr(ns, '_command_validator')
except AttributeError:
pass
def _validate_arg_level(self, ns, **_): # pylint: disable=no-self-use
for validator in getattr(ns, '_argument_validators', []):
validator(ns)
try:
delattr(ns, '_argument_validators')
except AttributeError:
pass
def _validation(self, parsed_ns):
try:
cmd_validator = getattr(parsed_ns, '_command_validator', None)
if cmd_validator:
self._validate_cmd_level(parsed_ns, cmd_validator)
else:
self._validate_arg_level(parsed_ns)
except CLIError:
raise
except Exception: # pylint: disable=broad-except
err = sys.exc_info()[1]
getattr(parsed_ns, '_parser', self.parser).validation_error(str(err))
def execute(self, args):
""" Executes the command invocation
:param args: The command arguments for this invocation
:type args: list
:return: The command result
:rtype: knack.util.CommandResultItem
"""
import colorama
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args)
cmd_tbl = self.commands_loader.load_command_table(args)
command = self._rudimentary_get_command(args)
self.cli_ctx.invocation.data['command_string'] = command
self.commands_loader.load_arguments(command)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=cmd_tbl)
self.parser.load_command_table(self.commands_loader)
self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, parser=self.parser)
arg_check = [a for a in args if a not in ['--verbose', '--debug']]
if not arg_check:
self.cli_ctx.completion.enable_autocomplete(self.parser)
subparser = self.parser.subparsers[tuple()]
self.help.show_welcome(subparser)
return CommandResultItem(None, exit_code=0)
if args[0].lower() == 'help':
args[0] = '--help'
self.cli_ctx.completion.enable_autocomplete(self.parser)
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args)
parsed_args = self.parser.parse_args(args)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args)
self._validation(parsed_args)
# save the command name (leaf in the tree)
self.data['command'] = parsed_args.command
cmd = parsed_args.func
if hasattr(parsed_args, 'cmd'):
parsed_args.cmd = cmd
deprecations = getattr(parsed_args, '_argument_deprecations', [])
if cmd.deprecate_info:
deprecations.append(cmd.deprecate_info)
params = self._filter_params(parsed_args)
# search for implicit deprecation
path_comps = cmd.name.split()[:-1]
implicit_deprecate_info = None
while path_comps and not implicit_deprecate_info:
implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, ' '.join(path_comps))
del path_comps[-1]
if implicit_deprecate_info:
deprecate_kwargs = implicit_deprecate_info.__dict__.copy()
deprecate_kwargs['object_type'] = 'command'
del deprecate_kwargs['_get_tag']
del deprecate_kwargs['_get_message']
deprecations.append(ImplicitDeprecated(**deprecate_kwargs))
colorama.init()
for d in deprecations:
print(d.message, file=sys.stderr)
colorama.deinit()
cmd_result = parsed_args.func(params)
cmd_result = todict(cmd_result)
event_data = {'result': cmd_result}
self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data)
self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data)
return CommandResultItem(event_data['result'],
exit_code=0,
table_transformer=cmd_tbl[parsed_args.command].table_transformer,
is_query_active=self.data['query_active'])
| 41.673367 | 119 | 0.636079 |
from __future__ import print_function
import sys
from collections import defaultdict
from .deprecation import ImplicitDeprecated, resolve_deprecate_info
from .util import CLIError, CtxTypeError, CommandResultItem, todict
from .parser import CLICommandParser
from .commands import CLICommandsLoader
from .events import (EVENT_INVOKER_PRE_CMD_TBL_CREATE, EVENT_INVOKER_POST_CMD_TBL_CREATE,
EVENT_INVOKER_CMD_TBL_LOADED, EVENT_INVOKER_PRE_PARSE_ARGS,
EVENT_INVOKER_POST_PARSE_ARGS, EVENT_INVOKER_TRANSFORM_RESULT,
EVENT_INVOKER_FILTER_RESULT)
from .help import CLIHelp
class CommandInvoker(object):
def __init__(self,
cli_ctx=None,
parser_cls=CLICommandParser,
commands_loader_cls=CLICommandsLoader,
help_cls=CLIHelp,
initial_data=None):
from .cli import CLI
if cli_ctx is not None and not isinstance(cli_ctx, CLI):
raise CtxTypeError(cli_ctx)
self.cli_ctx = cli_ctx
self.data = initial_data or defaultdict(lambda: None)
self.data['command'] = 'unknown'
self._global_parser = parser_cls.create_global_parser(cli_ctx=self.cli_ctx)
self.help = help_cls(cli_ctx=self.cli_ctx)
self.parser = parser_cls(cli_ctx=self.cli_ctx, cli_help=self.help,
prog=self.cli_ctx.name, parents=[self._global_parser])
self.commands_loader = commands_loader_cls(cli_ctx=self.cli_ctx)
def _filter_params(self, args):
params = {key: value
for key, value in args.__dict__.items()
if not key.startswith('_')}
params.pop('func', None)
params.pop('command', None)
return params
def _rudimentary_get_command(self, args):
nouns = []
command_names = self.commands_loader.command_table.keys()
for arg in args:
if arg and arg[0] != '-':
nouns.append(arg)
else:
break
def _find_args(args):
search = ' '.join(args).lower()
return next((x for x in command_names if x.startswith(search)), False)
while nouns and not _find_args(nouns):
del nouns[-1]
for i in range(len(nouns)):
args[i] = args[i].lower()
return ' '.join(nouns)
def _validate_cmd_level(self, ns, cmd_validator):
if cmd_validator:
cmd_validator(ns)
try:
delattr(ns, '_command_validator')
except AttributeError:
pass
def _validate_arg_level(self, ns, **_):
for validator in getattr(ns, '_argument_validators', []):
validator(ns)
try:
delattr(ns, '_argument_validators')
except AttributeError:
pass
def _validation(self, parsed_ns):
try:
cmd_validator = getattr(parsed_ns, '_command_validator', None)
if cmd_validator:
self._validate_cmd_level(parsed_ns, cmd_validator)
else:
self._validate_arg_level(parsed_ns)
except CLIError:
raise
except Exception:
err = sys.exc_info()[1]
getattr(parsed_ns, '_parser', self.parser).validation_error(str(err))
def execute(self, args):
import colorama
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args)
cmd_tbl = self.commands_loader.load_command_table(args)
command = self._rudimentary_get_command(args)
self.cli_ctx.invocation.data['command_string'] = command
self.commands_loader.load_arguments(command)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=cmd_tbl)
self.parser.load_command_table(self.commands_loader)
self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, parser=self.parser)
arg_check = [a for a in args if a not in ['--verbose', '--debug']]
if not arg_check:
self.cli_ctx.completion.enable_autocomplete(self.parser)
subparser = self.parser.subparsers[tuple()]
self.help.show_welcome(subparser)
return CommandResultItem(None, exit_code=0)
if args[0].lower() == 'help':
args[0] = '--help'
self.cli_ctx.completion.enable_autocomplete(self.parser)
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args)
parsed_args = self.parser.parse_args(args)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args)
self._validation(parsed_args)
self.data['command'] = parsed_args.command
cmd = parsed_args.func
if hasattr(parsed_args, 'cmd'):
parsed_args.cmd = cmd
deprecations = getattr(parsed_args, '_argument_deprecations', [])
if cmd.deprecate_info:
deprecations.append(cmd.deprecate_info)
params = self._filter_params(parsed_args)
path_comps = cmd.name.split()[:-1]
implicit_deprecate_info = None
while path_comps and not implicit_deprecate_info:
implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, ' '.join(path_comps))
del path_comps[-1]
if implicit_deprecate_info:
deprecate_kwargs = implicit_deprecate_info.__dict__.copy()
deprecate_kwargs['object_type'] = 'command'
del deprecate_kwargs['_get_tag']
del deprecate_kwargs['_get_message']
deprecations.append(ImplicitDeprecated(**deprecate_kwargs))
colorama.init()
for d in deprecations:
print(d.message, file=sys.stderr)
colorama.deinit()
cmd_result = parsed_args.func(params)
cmd_result = todict(cmd_result)
event_data = {'result': cmd_result}
self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data)
self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data)
return CommandResultItem(event_data['result'],
exit_code=0,
table_transformer=cmd_tbl[parsed_args.command].table_transformer,
is_query_active=self.data['query_active'])
| true | true |
f71f6972720d1f87a308457a99c2da6ef6fe19d9 | 63,620 | py | Python | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
@ Author : pengj
@ date : 2018/11/26 19:28
@ IDE : PyCharm
@ GitHub : https://github.com/JackyPJB
@ Contact : pengjianbiao@hotmail.com
-------------------------------------------------
Description : 888. 公平的糖果交换
虚拟 用户通过次数 0
虚拟 用户尝试次数 1
虚拟 通过次数 0
虚拟 提交次数 1
题目难度 Easy
爱丽丝和鲍勃有不同大小的糖果棒:A[i] 是爱丽丝拥有的第 i 块糖的大小,B[j] 是鲍勃拥有的第 j 块糖的大小。
因为他们是朋友,所以他们想交换一个糖果棒,这样交换后,他们都有相同的糖果总量。(一个人拥有的糖果总量是他们拥有的糖果棒大小的总和。)
返回一个整数数组 ans,其中 ans[0] 是爱丽丝必须交换的糖果棒的大小,ans[1] 是 Bob 必须交换的糖果棒的大小。
如果有多个答案,你可以返回其中任何一个。保证答案存在。
示例 1:
输入:A = [1,1], B = [2,2]
输出:[1,2]
示例 2:
输入:A = [1,2], B = [2,3]
输出:[1,2]
示例 3:
输入:A = [2], B = [1,3]
输出:[2,3]
示例 4:
输入:A = [1,2,5], B = [2,4]
输出:[5,4]
提示:
1 <= A.length <= 10000
1 <= B.length <= 10000
1 <= A[i] <= 100000
1 <= B[i] <= 100000
保证爱丽丝与鲍勃的糖果总量不同。
答案肯定存在。
-------------------------------------------------
"""
import time
__author__ = 'Max_Pengjb'
start = time.time()
# 下面写上代码块
class Solution(object):
def fairCandySwap(self, A, B):
"""
:type A: List[int]
:type B: List[int]
:rtype: List[int]
"""
k = (sum(A) - sum(B)) // 2
b = dict(zip(B, [1 for _ in B]))
for i in A:
if i - k in b.keys():
return [i, i - k]
A = [1, 2, 5]
B = [2, 4]
a1 = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59,
61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113,
115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159,
161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205,
207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251,
253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297,
299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343,
345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389,
391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435,
437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481,
483, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527,
529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573,
575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619,
621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665,
667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 699, 701, 703, 705, 707, 709, 711,
713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757,
759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803,
805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 845, 847, 849,
851, 853, 855, 857, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895,
897, 899, 901, 903, 905, 907, 909, 911, 913, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941,
943, 945, 947, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987,
989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1021, 1023, 1025, 1027,
1029, 1031, 1033, 1035, 1037, 1039, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1061, 1063, 1065,
1067, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1089, 1091, 1093, 1095, 1097, 1099, 1101, 1103,
1105, 1107, 1109, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, 1135, 1137, 1139, 1141,
1143, 1145, 1147, 1149, 1151, 1153, 1155, 1157, 1159, 1161, 1163, 1165, 1167, 1169, 1171, 1173, 1175, 1177, 1179,
1181, 1183, 1185, 1187, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217,
1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255,
1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293,
1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331,
1333, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369,
1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407,
1409, 1411, 1413, 1415, 1417, 1419, 1421, 1423, 1425, 1427, 1429, 1431, 1433, 1435, 1437, 1439, 1441, 1443, 1445,
1447, 1449, 1451, 1453, 1455, 1457, 1459, 1461, 1463, 1465, 1467, 1469, 1471, 1473, 1475, 1477, 1479, 1481, 1483,
1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521,
1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1555, 1557, 1559,
1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597,
1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635,
1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673,
1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711,
1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1737, 1739, 1741, 1743, 1745, 1747, 1749,
1751, 1753, 1755, 1757, 1759, 1761, 1763, 1765, 1767, 1769, 1771, 1773, 1775, 1777, 1779, 1781, 1783, 1785, 1787,
1789, 1791, 1793, 1795, 1797, 1799, 1801, 1803, 1805, 1807, 1809, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825,
1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1855, 1857, 1859, 1861, 1863,
1865, 1867, 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, 1901,
1903, 1905, 1907, 1909, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939,
1941, 1943, 1945, 1947, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977,
1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015,
2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053,
2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091,
2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129,
2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167,
2169, 2171, 2173, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2199, 2201, 2203, 2205,
2207, 2209, 2211, 2213, 2215, 2217, 2219, 2221, 2223, 2225, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243,
2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281,
2283, 2285, 2287, 2289, 2291, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319,
2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357,
2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395,
2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433,
2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471,
2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509,
2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547,
2549, 2551, 2553, 2555, 2557, 2559, 2561, 2563, 2565, 2567, 2569, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585,
2587, 2589, 2591, 2593, 2595, 2597, 2599, 2601, 2603, 2605, 2607, 2609, 2611, 2613, 2615, 2617, 2619, 2621, 2623,
2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2657, 2659, 2661,
2663, 2665, 2667, 2669, 2671, 2673, 2675, 2677, 2679, 2681, 2683, 2685, 2687, 2689, 2691, 2693, 2695, 2697, 2699,
2701, 2703, 2705, 2707, 2709, 2711, 2713, 2715, 2717, 2719, 2721, 2723, 2725, 2727, 2729, 2731, 2733, 2735, 2737,
2739, 2741, 2743, 2745, 2747, 2749, 2751, 2753, 2755, 2757, 2759, 2761, 2763, 2765, 2767, 2769, 2771, 2773, 2775,
2777, 2779, 2781, 2783, 2785, 2787, 2789, 2791, 2793, 2795, 2797, 2799, 2801, 2803, 2805, 2807, 2809, 2811, 2813,
2815, 2817, 2819, 2821, 2823, 2825, 2827, 2829, 2831, 2833, 2835, 2837, 2839, 2841, 2843, 2845, 2847, 2849, 2851,
2853, 2855, 2857, 2859, 2861, 2863, 2865, 2867, 2869, 2871, 2873, 2875, 2877, 2879, 2881, 2883, 2885, 2887, 2889,
2891, 2893, 2895, 2897, 2899, 2901, 2903, 2905, 2907, 2909, 2911, 2913, 2915, 2917, 2919, 2921, 2923, 2925, 2927,
2929, 2931, 2933, 2935, 2937, 2939, 2941, 2943, 2945, 2947, 2949, 2951, 2953, 2955, 2957, 2959, 2961, 2963, 2965,
2967, 2969, 2971, 2973, 2975, 2977, 2979, 2981, 2983, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003,
3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041,
3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3065, 3067, 3069, 3071, 3073, 3075, 3077, 3079,
3081, 3083, 3085, 3087, 3089, 3091, 3093, 3095, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117,
3119, 3121, 3123, 3125, 3127, 3129, 3131, 3133, 3135, 3137, 3139, 3141, 3143, 3145, 3147, 3149, 3151, 3153, 3155,
3157, 3159, 3161, 3163, 3165, 3167, 3169, 3171, 3173, 3175, 3177, 3179, 3181, 3183, 3185, 3187, 3189, 3191, 3193,
3195, 3197, 3199, 3201, 3203, 3205, 3207, 3209, 3211, 3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, 3229, 3231,
3233, 3235, 3237, 3239, 3241, 3243, 3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, 3261, 3263, 3265, 3267, 3269,
3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, 3293, 3295, 3297, 3299, 3301, 3303, 3305, 3307,
3309, 3311, 3313, 3315, 3317, 3319, 3321, 3323, 3325, 3327, 3329, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345,
3347, 3349, 3351, 3353, 3355, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383,
3385, 3387, 3389, 3391, 3393, 3395, 3397, 3399, 3401, 3403, 3405, 3407, 3409, 3411, 3413, 3415, 3417, 3419, 3421,
3423, 3425, 3427, 3429, 3431, 3433, 3435, 3437, 3439, 3441, 3443, 3445, 3447, 3449, 3451, 3453, 3455, 3457, 3459,
3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3497,
3499, 3501, 3503, 3505, 3507, 3509, 3511, 3513, 3515, 3517, 3519, 3521, 3523, 3525, 3527, 3529, 3531, 3533, 3535,
3537, 3539, 3541, 3543, 3545, 3547, 3549, 3551, 3553, 3555, 3557, 3559, 3561, 3563, 3565, 3567, 3569, 3571, 3573,
3575, 3577, 3579, 3581, 3583, 3585, 3587, 3589, 3591, 3593, 3595, 3597, 3599, 3601, 3603, 3605, 3607, 3609, 3611,
3613, 3615, 3617, 3619, 3621, 3623, 3625, 3627, 3629, 3631, 3633, 3635, 3637, 3639, 3641, 3643, 3645, 3647, 3649,
3651, 3653, 3655, 3657, 3659, 3661, 3663, 3665, 3667, 3669, 3671, 3673, 3675, 3677, 3679, 3681, 3683, 3685, 3687,
3689, 3691, 3693, 3695, 3697, 3699, 3701, 3703, 3705, 3707, 3709, 3711, 3713, 3715, 3717, 3719, 3721, 3723, 3725,
3727, 3729, 3731, 3733, 3735, 3737, 3739, 3741, 3743, 3745, 3747, 3749, 3751, 3753, 3755, 3757, 3759, 3761, 3763,
3765, 3767, 3769, 3771, 3773, 3775, 3777, 3779, 3781, 3783, 3785, 3787, 3789, 3791, 3793, 3795, 3797, 3799, 3801,
3803, 3805, 3807, 3809, 3811, 3813, 3815, 3817, 3819, 3821, 3823, 3825, 3827, 3829, 3831, 3833, 3835, 3837, 3839,
3841, 3843, 3845, 3847, 3849, 3851, 3853, 3855, 3857, 3859, 3861, 3863, 3865, 3867, 3869, 3871, 3873, 3875, 3877,
3879, 3881, 3883, 3885, 3887, 3889, 3891, 3893, 3895, 3897, 3899, 3901, 3903, 3905, 3907, 3909, 3911, 3913, 3915,
3917, 3919, 3921, 3923, 3925, 3927, 3929, 3931, 3933, 3935, 3937, 3939, 3941, 3943, 3945, 3947, 3949, 3951, 3953,
3955, 3957, 3959, 3961, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3979, 3981, 3983, 3985, 3987, 3989, 3991,
3993, 3995, 3997, 3999, 4001, 4003, 4005, 4007, 4009, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029,
4031, 4033, 4035, 4037, 4039, 4041, 4043, 4045, 4047, 4049, 4051, 4053, 4055, 4057, 4059, 4061, 4063, 4065, 4067,
4069, 4071, 4073, 4075, 4077, 4079, 4081, 4083, 4085, 4087, 4089, 4091, 4093, 4095, 4097, 4099, 4101, 4103, 4105,
4107, 4109, 4111, 4113, 4115, 4117, 4119, 4121, 4123, 4125, 4127, 4129, 4131, 4133, 4135, 4137, 4139, 4141, 4143,
4145, 4147, 4149, 4151, 4153, 4155, 4157, 4159, 4161, 4163, 4165, 4167, 4169, 4171, 4173, 4175, 4177, 4179, 4181,
4183, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4201, 4203, 4205, 4207, 4209, 4211, 4213, 4215, 4217, 4219,
4221, 4223, 4225, 4227, 4229, 4231, 4233, 4235, 4237, 4239, 4241, 4243, 4245, 4247, 4249, 4251, 4253, 4255, 4257,
4259, 4261, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295,
4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333,
4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371,
4373, 4375, 4377, 4379, 4381, 4383, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409,
4411, 4413, 4415, 4417, 4419, 4421, 4423, 4425, 4427, 4429, 4431, 4433, 4435, 4437, 4439, 4441, 4443, 4445, 4447,
4449, 4451, 4453, 4455, 4457, 4459, 4461, 4463, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4481, 4483, 4485,
4487, 4489, 4491, 4493, 4495, 4497, 4499, 4501, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4519, 4521, 4523,
4525, 4527, 4529, 4531, 4533, 4535, 4537, 4539, 4541, 4543, 4545, 4547, 4549, 4551, 4553, 4555, 4557, 4559, 4561,
4563, 4565, 4567, 4569, 4571, 4573, 4575, 4577, 4579, 4581, 4583, 4585, 4587, 4589, 4591, 4593, 4595, 4597, 4599,
4601, 4603, 4605, 4607, 4609, 4611, 4613, 4615, 4617, 4619, 4621, 4623, 4625, 4627, 4629, 4631, 4633, 4635, 4637,
4639, 4641, 4643, 4645, 4647, 4649, 4651, 4653, 4655, 4657, 4659, 4661, 4663, 4665, 4667, 4669, 4671, 4673, 4675,
4677, 4679, 4681, 4683, 4685, 4687, 4689, 4691, 4693, 4695, 4697, 4699, 4701, 4703, 4705, 4707, 4709, 4711, 4713,
4715, 4717, 4719, 4721, 4723, 4725, 4727, 4729, 4731, 4733, 4735, 4737, 4739, 4741, 4743, 4745, 4747, 4749, 4751,
4753, 4755, 4757, 4759, 4761, 4763, 4765, 4767, 4769, 4771, 4773, 4775, 4777, 4779, 4781, 4783, 4785, 4787, 4789,
4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 4807, 4809, 4811, 4813, 4815, 4817, 4819, 4821, 4823, 4825, 4827,
4829, 4831, 4833, 4835, 4837, 4839, 4841, 4843, 4845, 4847, 4849, 4851, 4853, 4855, 4857, 4859, 4861, 4863, 4865,
4867, 4869, 4871, 4873, 4875, 4877, 4879, 4881, 4883, 4885, 4887, 4889, 4891, 4893, 4895, 4897, 4899, 4901, 4903,
4905, 4907, 4909, 4911, 4913, 4915, 4917, 4919, 4921, 4923, 4925, 4927, 4929, 4931, 4933, 4935, 4937, 4939, 4941,
4943, 4945, 4947, 4949, 4951, 4953, 4955, 4957, 4959, 4961, 4963, 4965, 4967, 4969, 4971, 4973, 4975, 4977, 4979,
4981, 4983, 4985, 4987, 4989, 4991, 4993, 4995, 4997, 4999, 5001, 5003, 5005, 5007, 5009, 5011, 5013, 5015, 5017,
5019, 5021, 5023, 5025, 5027, 5029, 5031, 5033, 5035, 5037, 5039, 5041, 5043, 5045, 5047, 5049, 5051, 5053, 5055,
5057, 5059, 5061, 5063, 5065, 5067, 5069, 5071, 5073, 5075, 5077, 5079, 5081, 5083, 5085, 5087, 5089, 5091, 5093,
5095, 5097, 5099, 5101, 5103, 5105, 5107, 5109, 5111, 5113, 5115, 5117, 5119, 5121, 5123, 5125, 5127, 5129, 5131,
5133, 5135, 5137, 5139, 5141, 5143, 5145, 5147, 5149, 5151, 5153, 5155, 5157, 5159, 5161, 5163, 5165, 5167, 5169,
5171, 5173, 5175, 5177, 5179, 5181, 5183, 5185, 5187, 5189, 5191, 5193, 5195, 5197, 5199, 5201, 5203, 5205, 5207,
5209, 5211, 5213, 5215, 5217, 5219, 5221, 5223, 5225, 5227, 5229, 5231, 5233, 5235, 5237, 5239, 5241, 5243, 5245,
5247, 5249, 5251, 5253, 5255, 5257, 5259, 5261, 5263, 5265, 5267, 5269, 5271, 5273, 5275, 5277, 5279, 5281, 5283,
5285, 5287, 5289, 5291, 5293, 5295, 5297, 5299, 5301, 5303, 5305, 5307, 5309, 5311, 5313, 5315, 5317, 5319, 5321,
5323, 5325, 5327, 5329, 5331, 5333, 5335, 5337, 5339, 5341, 5343, 5345, 5347, 5349, 5351, 5353, 5355, 5357, 5359,
5361, 5363, 5365, 5367, 5369, 5371, 5373, 5375, 5377, 5379, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397,
5399, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5417, 5419, 5421, 5423, 5425, 5427, 5429, 5431, 5433, 5435,
5437, 5439, 5441, 5443, 5445, 5447, 5449, 5451, 5453, 5455, 5457, 5459, 5461, 5463, 5465, 5467, 5469, 5471, 5473,
5475, 5477, 5479, 5481, 5483, 5485, 5487, 5489, 5491, 5493, 5495, 5497, 5499, 5501, 5503, 5505, 5507, 5509, 5511,
5513, 5515, 5517, 5519, 5521, 5523, 5525, 5527, 5529, 5531, 5533, 5535, 5537, 5539, 5541, 5543, 5545, 5547, 5549,
5551, 5553, 5555, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587,
5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 5607, 5609, 5611, 5613, 5615, 5617, 5619, 5621, 5623, 5625,
5627, 5629, 5631, 5633, 5635, 5637, 5639, 5641, 5643, 5645, 5647, 5649, 5651, 5653, 5655, 5657, 5659, 5661, 5663,
5665, 5667, 5669, 5671, 5673, 5675, 5677, 5679, 5681, 5683, 5685, 5687, 5689, 5691, 5693, 5695, 5697, 5699, 5701,
5703, 5705, 5707, 5709, 5711, 5713, 5715, 5717, 5719, 5721, 5723, 5725, 5727, 5729, 5731, 5733, 5735, 5737, 5739,
5741, 5743, 5745, 5747, 5749, 5751, 5753, 5755, 5757, 5759, 5761, 5763, 5765, 5767, 5769, 5771, 5773, 5775, 5777,
5779, 5781, 5783, 5785, 5787, 5789, 5791, 5793, 5795, 5797, 5799, 5801, 5803, 5805, 5807, 5809, 5811, 5813, 5815,
5817, 5819, 5821, 5823, 5825, 5827, 5829, 5831, 5833, 5835, 5837, 5839, 5841, 5843, 5845, 5847, 5849, 5851, 5853,
5855, 5857, 5859, 5861, 5863, 5865, 5867, 5869, 5871, 5873, 5875, 5877, 5879, 5881, 5883, 5885, 5887, 5889, 5891,
5893, 5895, 5897, 5899, 5901, 5903, 5905, 5907, 5909, 5911, 5913, 5915, 5917, 5919, 5921, 5923, 5925, 5927, 5929,
5931, 5933, 5935, 5937, 5939, 5941, 5943, 5945, 5947, 5949, 5951, 5953, 5955, 5957, 5959, 5961, 5963, 5965, 5967,
5969, 5971, 5973, 5975, 5977, 5979, 5981, 5983, 5985, 5987, 5989, 5991, 5993, 5995, 5997, 5999, 6001, 6003, 6005,
6007, 6009, 6011, 6013, 6015, 6017, 6019, 6021, 6023, 6025, 6027, 6029, 6031, 6033, 6035, 6037, 6039, 6041, 6043,
6045, 6047, 6049, 6051, 6053, 6055, 6057, 6059, 6061, 6063, 6065, 6067, 6069, 6071, 6073, 6075, 6077, 6079, 6081,
6083, 6085, 6087, 6089, 6091, 6093, 6095, 6097, 6099, 6101, 6103, 6105, 6107, 6109, 6111, 6113, 6115, 6117, 6119,
6121, 6123, 6125, 6127, 6129, 6131, 6133, 6135, 6137, 6139, 6141, 6143, 6145, 6147, 6149, 6151, 6153, 6155, 6157,
6159, 6161, 6163, 6165, 6167, 6169, 6171, 6173, 6175, 6177, 6179, 6181, 6183, 6185, 6187, 6189, 6191, 6193, 6195,
6197, 6199, 6201, 6203, 6205, 6207, 6209, 6211, 6213, 6215, 6217, 6219, 6221, 6223, 6225, 6227, 6229, 6231, 6233,
6235, 6237, 6239, 6241, 6243, 6245, 6247, 6249, 6251, 6253, 6255, 6257, 6259, 6261, 6263, 6265, 6267, 6269, 6271,
6273, 6275, 6277, 6279, 6281, 6283, 6285, 6287, 6289, 6291, 6293, 6295, 6297, 6299, 6301, 6303, 6305, 6307, 6309,
6311, 6313, 6315, 6317, 6319, 6321, 6323, 6325, 6327, 6329, 6331, 6333, 6335, 6337, 6339, 6341, 6343, 6345, 6347,
6349, 6351, 6353, 6355, 6357, 6359, 6361, 6363, 6365, 6367, 6369, 6371, 6373, 6375, 6377, 6379, 6381, 6383, 6385,
6387, 6389, 6391, 6393, 6395, 6397, 6399, 6401, 6403, 6405, 6407, 6409, 6411, 6413, 6415, 6417, 6419, 6421, 6423,
6425, 6427, 6429, 6431, 6433, 6435, 6437, 6439, 6441, 6443, 6445, 6447, 6449, 6451, 6453, 6455, 6457, 6459, 6461,
6463, 6465, 6467, 6469, 6471, 6473, 6475, 6477, 6479, 6481, 6483, 6485, 6487, 6489, 6491, 6493, 6495, 6497, 6499,
6501, 6503, 6505, 6507, 6509, 6511, 6513, 6515, 6517, 6519, 6521, 6523, 6525, 6527, 6529, 6531, 6533, 6535, 6537,
6539, 6541, 6543, 6545, 6547, 6549, 6551, 6553, 6555, 6557, 6559, 6561, 6563, 6565, 6567, 6569, 6571, 6573, 6575,
6577, 6579, 6581, 6583, 6585, 6587, 6589, 6591, 6593, 6595, 6597, 6599, 6601, 6603, 6605, 6607, 6609, 6611, 6613,
6615, 6617, 6619, 6621, 6623, 6625, 6627, 6629, 6631, 6633, 6635, 6637, 6639, 6641, 6643, 6645, 6647, 6649, 6651,
6653, 6655, 6657, 6659, 6661, 6663, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689,
6691, 6693, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6719, 6721, 6723, 6725, 6727,
6729, 6731, 6733, 6735, 6737, 6739, 6741, 6743, 6745, 6747, 6749, 6751, 6753, 6755, 6757, 6759, 6761, 6763, 6765,
6767, 6769, 6771, 6773, 6775, 6777, 6779, 6781, 6783, 6785, 6787, 6789, 6791, 6793, 6795, 6797, 6799, 6801, 6803,
6805, 6807, 6809, 6811, 6813, 6815, 6817, 6819, 6821, 6823, 6825, 6827, 6829, 6831, 6833, 6835, 6837, 6839, 6841,
6843, 6845, 6847, 6849, 6851, 6853, 6855, 6857, 6859, 6861, 6863, 6865, 6867, 6869, 6871, 6873, 6875, 6877, 6879,
6881, 6883, 6885, 6887, 6889, 6891, 6893, 6895, 6897, 6899, 6901, 6903, 6905, 6907, 6909, 6911, 6913, 6915, 6917,
6919, 6921, 6923, 6925, 6927, 6929, 6931, 6933, 6935, 6937, 6939, 6941, 6943, 6945, 6947, 6949, 6951, 6953, 6955,
6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993,
6995, 6997, 6999, 7001, 7003, 7005, 7007, 7009, 7011, 7013, 7015, 7017, 7019, 7021, 7023, 7025, 7027, 7029, 7031,
7033, 7035, 7037, 7039, 7041, 7043, 7045, 7047, 7049, 7051, 7053, 7055, 7057, 7059, 7061, 7063, 7065, 7067, 7069,
7071, 7073, 7075, 7077, 7079, 7081, 7083, 7085, 7087, 7089, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107,
7109, 7111, 7113, 7115, 7117, 7119, 7121, 7123, 7125, 7127, 7129, 7131, 7133, 7135, 7137, 7139, 7141, 7143, 7145,
7147, 7149, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7179, 7181, 7183,
7185, 7187, 7189, 7191, 7193, 7195, 7197, 7199, 7201, 7203, 7205, 7207, 7209, 7211, 7213, 7215, 7217, 7219, 7221,
7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259,
7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297,
7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335,
7337, 7339, 7341, 7343, 7345, 7347, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373,
7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411,
7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449,
7451, 7453, 7455, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7473, 7475, 7477, 7479, 7481, 7483, 7485, 7487,
7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7515, 7517, 7519, 7521, 7523, 7525,
7527, 7529, 7531, 7533, 7535, 7537, 7539, 7541, 7543, 7545, 7547, 7549, 7551, 7553, 7555, 7557, 7559, 7561, 7563,
7565, 7567, 7569, 7571, 7573, 7575, 7577, 7579, 7581, 7583, 7585, 7587, 7589, 7591, 7593, 7595, 7597, 7599, 7601,
7603, 7605, 7607, 7609, 7611, 7613, 7615, 7617, 7619, 7621, 7623, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639,
7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677,
7679, 7681, 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, 7715,
7717, 7719, 7721, 7723, 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753,
7755, 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791,
7793, 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, 7827, 7829,
7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867,
7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905,
7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, 7931, 7933, 7935, 7937, 7939, 7941, 7943,
7945, 7947, 7949, 7951, 7953, 7955, 7957, 7959, 7961, 7963, 7965, 7967, 7969, 7971, 7973, 7975, 7977, 7979, 7981,
7983, 7985, 7987, 7989, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 8007, 8009, 8011, 8013, 8015, 8017, 8019,
8021, 8023, 8025, 8027, 8029, 8031, 8033, 8035, 8037, 8039, 8041, 8043, 8045, 8047, 8049, 8051, 8053, 8055, 8057,
8059, 8061, 8063, 8065, 8067, 8069, 8071, 8073, 8075, 8077, 8079, 8081, 8083, 8085, 8087, 8089, 8091, 8093, 8095,
8097, 8099, 8101, 8103, 8105, 8107, 8109, 8111, 8113, 8115, 8117, 8119, 8121, 8123, 8125, 8127, 8129, 8131, 8133,
8135, 8137, 8139, 8141, 8143, 8145, 8147, 8149, 8151, 8153, 8155, 8157, 8159, 8161, 8163, 8165, 8167, 8169, 8171,
8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 8193, 8195, 8197, 8199, 8201, 8203, 8205, 8207, 8209,
8211, 8213, 8215, 8217, 8219, 8221, 8223, 8225, 8227, 8229, 8231, 8233, 8235, 8237, 8239, 8241, 8243, 8245, 8247,
8249, 8251, 8253, 8255, 8257, 8259, 8261, 8263, 8265, 8267, 8269, 8271, 8273, 8275, 8277, 8279, 8281, 8283, 8285,
8287, 8289, 8291, 8293, 8295, 8297, 8299, 8301, 8303, 8305, 8307, 8309, 8311, 8313, 8315, 8317, 8319, 8321, 8323,
8325, 8327, 8329, 8331, 8333, 8335, 8337, 8339, 8341, 8343, 8345, 8347, 8349, 8351, 8353, 8355, 8357, 8359, 8361,
8363, 8365, 8367, 8369, 8371, 8373, 8375, 8377, 8379, 8381, 8383, 8385, 8387, 8389, 8391, 8393, 8395, 8397, 8399,
8401, 8403, 8405, 8407, 8409, 8411, 8413, 8415, 8417, 8419, 8421, 8423, 8425, 8427, 8429, 8431, 8433, 8435, 8437,
8439, 8441, 8443, 8445, 8447, 8449, 8451, 8453, 8455, 8457, 8459, 8461, 8463, 8465, 8467, 8469, 8471, 8473, 8475,
8477, 8479, 8481, 8483, 8485, 8487, 8489, 8491, 8493, 8495, 8497, 8499, 8501, 8503, 8505, 8507, 8509, 8511, 8513,
8515, 8517, 8519, 8521, 8523, 8525, 8527, 8529, 8531, 8533, 8535, 8537, 8539, 8541, 8543, 8545, 8547, 8549, 8551,
8553, 8555, 8557, 8559, 8561, 8563, 8565, 8567, 8569, 8571, 8573, 8575, 8577, 8579, 8581, 8583, 8585, 8587, 8589,
8591, 8593, 8595, 8597, 8599, 8601, 8603, 8605, 8607, 8609, 8611, 8613, 8615, 8617, 8619, 8621, 8623, 8625, 8627,
8629, 8631, 8633, 8635, 8637, 8639, 8641, 8643, 8645, 8647, 8649, 8651, 8653, 8655, 8657, 8659, 8661, 8663, 8665,
8667, 8669, 8671, 8673, 8675, 8677, 8679, 8681, 8683, 8685, 8687, 8689, 8691, 8693, 8695, 8697, 8699, 8701, 8703,
8705, 8707, 8709, 8711, 8713, 8715, 8717, 8719, 8721, 8723, 8725, 8727, 8729, 8731, 8733, 8735, 8737, 8739, 8741,
8743, 8745, 8747, 8749, 8751, 8753, 8755, 8757, 8759, 8761, 8763, 8765, 8767, 8769, 8771, 8773, 8775, 8777, 8779,
8781, 8783, 8785, 8787, 8789, 8791, 8793, 8795, 8797, 8799, 8801, 8803, 8805, 8807, 8809, 8811, 8813, 8815, 8817,
8819, 8821, 8823, 8825, 8827, 8829, 8831, 8833, 8835, 8837, 8839, 8841, 8843, 8845, 8847, 8849, 8851, 8853, 8855,
8857, 8859, 8861, 8863, 8865, 8867, 8869, 8871, 8873, 8875, 8877, 8879, 8881, 8883, 8885, 8887, 8889, 8891, 8893,
8895, 8897, 8899, 8901, 8903, 8905, 8907, 8909, 8911, 8913, 8915, 8917, 8919, 8921, 8923, 8925, 8927, 8929, 8931,
8933, 8935, 8937, 8939, 8941, 8943, 8945, 8947, 8949, 8951, 8953, 8955, 8957, 8959, 8961, 8963, 8965, 8967, 8969,
8971, 8973, 8975, 8977, 8979, 8981, 8983, 8985, 8987, 8989, 8991, 8993, 8995, 8997, 8999, 9001, 9003, 9005, 9007,
9009, 9011, 9013, 9015, 9017, 9019, 9021, 9023, 9025, 9027, 9029, 9031, 9033, 9035, 9037, 9039, 9041, 9043, 9045,
9047, 9049, 9051, 9053, 9055, 9057, 9059, 9061, 9063, 9065, 9067, 9069, 9071, 9073, 9075, 9077, 9079, 9081, 9083,
9085, 9087, 9089, 9091, 9093, 9095, 9097, 9099, 9101, 9103, 9105, 9107, 9109, 9111, 9113, 9115, 9117, 9119, 9121,
9123, 9125, 9127, 9129, 9131, 9133, 9135, 9137, 9139, 9141, 9143, 9145, 9147, 9149, 9151, 9153, 9155, 9157, 9159,
9161, 9163, 9165, 9167, 9169, 9171, 9173, 9175, 9177, 9179, 9181, 9183, 9185, 9187, 9189, 9191, 9193, 9195, 9197,
9199, 9201, 9203, 9205, 9207, 9209, 9211, 9213, 9215, 9217, 9219, 9221, 9223, 9225, 9227, 9229, 9231, 9233, 9235,
9237, 9239, 9241, 9243, 9245, 9247, 9249, 9251, 9253, 9255, 9257, 9259, 9261, 9263, 9265, 9267, 9269, 9271, 9273,
9275, 9277, 9279, 9281, 9283, 9285, 9287, 9289, 9291, 9293, 9295, 9297, 9299, 9301, 9303, 9305, 9307, 9309, 9311,
9313, 9315, 9317, 9319, 9321, 9323, 9325, 9327, 9329, 9331, 9333, 9335, 9337, 9339, 9341, 9343, 9345, 9347, 9349,
9351, 9353, 9355, 9357, 9359, 9361, 9363, 9365, 9367, 9369, 9371, 9373, 9375, 9377, 9379, 9381, 9383, 9385, 9387,
9389, 9391, 9393, 9395, 9397, 9399, 9401, 9403, 9405, 9407, 9409, 9411, 9413, 9415, 9417, 9419, 9421, 9423, 9425,
9427, 9429, 9431, 9433, 9435, 9437, 9439, 9441, 9443, 9445, 9447, 9449, 9451, 9453, 9455, 9457, 9459, 9461, 9463,
9465, 9467, 9469, 9471, 9473, 9475, 9477, 9479, 9481, 9483, 9485, 9487, 9489, 9491, 9493, 9495, 9497, 9499, 9501,
9503, 9505, 9507, 9509, 9511, 9513, 9515, 9517, 9519, 9521, 9523, 9525, 9527, 9529, 9531, 9533, 9535, 9537, 9539,
9541, 9543, 9545, 9547, 9549, 9551, 9553, 9555, 9557, 9559, 9561, 9563, 9565, 9567, 9569, 9571, 9573, 9575, 9577,
9579, 9581, 9583, 9585, 9587, 9589, 9591, 9593, 9595, 9597, 9599, 9601, 9603, 9605, 9607, 9609, 9611, 9613, 9615,
9617, 9619, 9621, 9623, 9625, 9627, 9629, 9631, 9633, 9635, 9637, 9639, 9641, 9643, 9645, 9647, 9649, 9651, 9653,
9655, 9657, 9659, 9661, 9663, 9665, 9667, 9669, 9671, 9673, 9675, 9677, 9679, 9681, 9683, 9685, 9687, 9689, 9691,
9693, 9695, 9697, 9699, 9701, 9703, 9705, 9707, 9709, 9711, 9713, 9715, 9717, 9719, 9721, 9723, 9725, 9727, 9729,
9731, 9733, 9735, 9737, 9739, 9741, 9743, 9745, 9747, 9749, 9751, 9753, 9755, 9757, 9759, 9761, 9763, 9765, 9767,
9769, 9771, 9773, 9775, 9777, 9779, 9781, 9783, 9785, 9787, 9789, 9791, 9793, 9795, 9797, 9799, 9801, 9803, 9805,
9807, 9809, 9811, 9813, 9815, 9817, 9819, 9821, 9823, 9825, 9827, 9829, 9831, 9833, 9835, 9837, 9839, 9841, 9843,
9845, 9847, 9849, 9851, 9853, 9855, 9857, 9859, 9861, 9863, 9865, 9867, 9869, 9871, 9873, 9875, 9877, 9879, 9881,
9883, 9885, 9887, 9889, 9891, 9893, 9895, 9897, 9899, 9901, 9903, 9905, 9907, 9909, 9911, 9913, 9915, 9917, 9919,
9921, 9923, 9925, 9927, 9929, 9931, 9933, 9935, 9937, 9939, 9941, 9943, 9945, 9947, 9949, 9951, 9953, 9955, 9957,
9959, 9961, 9963, 9965, 9967, 9969, 9971, 9973, 9975, 9977, 9979, 9981, 9983, 9985, 9987, 9989, 9991, 9993, 9995,
9997, 9999, 4982]
b1 = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58,
60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112,
114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158,
160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204,
206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250,
252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296,
298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342,
344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388,
390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434,
436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480,
482, 484, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526,
528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572,
574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618,
620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664,
666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 698, 700, 702, 704, 706, 708, 710,
712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756,
758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802,
804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 844, 846, 848,
850, 852, 854, 856, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894,
896, 898, 900, 902, 904, 906, 908, 910, 912, 914, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940,
942, 944, 946, 948, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986,
988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1022, 1024, 1026,
1028, 1030, 1032, 1034, 1036, 1038, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1060, 1062, 1064,
1066, 1068, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1090, 1092, 1094, 1096, 1098, 1100, 1102,
1104, 1106, 1108, 1110, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, 1140,
1142, 1144, 1146, 1148, 1150, 1152, 1154, 1156, 1158, 1160, 1162, 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178,
1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216,
1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254,
1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292,
1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330,
1332, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368,
1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406,
1408, 1410, 1412, 1414, 1416, 1418, 1420, 1422, 1424, 1426, 1428, 1430, 1432, 1434, 1436, 1438, 1440, 1442, 1444,
1446, 1448, 1450, 1452, 1454, 1456, 1458, 1460, 1462, 1464, 1466, 1468, 1470, 1472, 1474, 1476, 1478, 1480, 1482,
1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520,
1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1556, 1558,
1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1580, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596,
1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634,
1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672,
1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710,
1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1738, 1740, 1742, 1744, 1746, 1748,
1750, 1752, 1754, 1756, 1758, 1760, 1762, 1764, 1766, 1768, 1770, 1772, 1774, 1776, 1778, 1780, 1782, 1784, 1786,
1788, 1790, 1792, 1794, 1796, 1798, 1800, 1802, 1804, 1806, 1808, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824,
1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1854, 1856, 1858, 1860, 1862,
1864, 1866, 1868, 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900,
1902, 1904, 1906, 1908, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938,
1940, 1942, 1944, 1946, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976,
1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014,
2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052,
2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090,
2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2108, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128,
2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166,
2168, 2170, 2172, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2198, 2200, 2202, 2204,
2206, 2208, 2210, 2212, 2214, 2216, 2218, 2220, 2222, 2224, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242,
2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280,
2282, 2284, 2286, 2288, 2290, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318,
2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356,
2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394,
2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432,
2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470,
2472, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508,
2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546,
2548, 2550, 2552, 2554, 2556, 2558, 2560, 2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576, 2578, 2580, 2582, 2584,
2586, 2588, 2590, 2592, 2594, 2596, 2598, 2600, 2602, 2604, 2606, 2608, 2610, 2612, 2614, 2616, 2618, 2620, 2622,
2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2658, 2660,
2662, 2664, 2666, 2668, 2670, 2672, 2674, 2676, 2678, 2680, 2682, 2684, 2686, 2688, 2690, 2692, 2694, 2696, 2698,
2700, 2702, 2704, 2706, 2708, 2710, 2712, 2714, 2716, 2718, 2720, 2722, 2724, 2726, 2728, 2730, 2732, 2734, 2736,
2738, 2740, 2742, 2744, 2746, 2748, 2750, 2752, 2754, 2756, 2758, 2760, 2762, 2764, 2766, 2768, 2770, 2772, 2774,
2776, 2778, 2780, 2782, 2784, 2786, 2788, 2790, 2792, 2794, 2796, 2798, 2800, 2802, 2804, 2806, 2808, 2810, 2812,
2814, 2816, 2818, 2820, 2822, 2824, 2826, 2828, 2830, 2832, 2834, 2836, 2838, 2840, 2842, 2844, 2846, 2848, 2850,
2852, 2854, 2856, 2858, 2860, 2862, 2864, 2866, 2868, 2870, 2872, 2874, 2876, 2878, 2880, 2882, 2884, 2886, 2888,
2890, 2892, 2894, 2896, 2898, 2900, 2902, 2904, 2906, 2908, 2910, 2912, 2914, 2916, 2918, 2920, 2922, 2924, 2926,
2928, 2930, 2932, 2934, 2936, 2938, 2940, 2942, 2944, 2946, 2948, 2950, 2952, 2954, 2956, 2958, 2960, 2962, 2964,
2966, 2968, 2970, 2972, 2974, 2976, 2978, 2980, 2982, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002,
3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040,
3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3064, 3066, 3068, 3070, 3072, 3074, 3076, 3078,
3080, 3082, 3084, 3086, 3088, 3090, 3092, 3094, 3096, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116,
3118, 3120, 3122, 3124, 3126, 3128, 3130, 3132, 3134, 3136, 3138, 3140, 3142, 3144, 3146, 3148, 3150, 3152, 3154,
3156, 3158, 3160, 3162, 3164, 3166, 3168, 3170, 3172, 3174, 3176, 3178, 3180, 3182, 3184, 3186, 3188, 3190, 3192,
3194, 3196, 3198, 3200, 3202, 3204, 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, 3230,
3232, 3234, 3236, 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268,
3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, 3294, 3296, 3298, 3300, 3302, 3304, 3306,
3308, 3310, 3312, 3314, 3316, 3318, 3320, 3322, 3324, 3326, 3328, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344,
3346, 3348, 3350, 3352, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382,
3384, 3386, 3388, 3390, 3392, 3394, 3396, 3398, 3400, 3402, 3404, 3406, 3408, 3410, 3412, 3414, 3416, 3418, 3420,
3422, 3424, 3426, 3428, 3430, 3432, 3434, 3436, 3438, 3440, 3442, 3444, 3446, 3448, 3450, 3452, 3454, 3456, 3458,
3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3496,
3498, 3500, 3502, 3504, 3506, 3508, 3510, 3512, 3514, 3516, 3518, 3520, 3522, 3524, 3526, 3528, 3530, 3532, 3534,
3536, 3538, 3540, 3542, 3544, 3546, 3548, 3550, 3552, 3554, 3556, 3558, 3560, 3562, 3564, 3566, 3568, 3570, 3572,
3574, 3576, 3578, 3580, 3582, 3584, 3586, 3588, 3590, 3592, 3594, 3596, 3598, 3600, 3602, 3604, 3606, 3608, 3610,
3612, 3614, 3616, 3618, 3620, 3622, 3624, 3626, 3628, 3630, 3632, 3634, 3636, 3638, 3640, 3642, 3644, 3646, 3648,
3650, 3652, 3654, 3656, 3658, 3660, 3662, 3664, 3666, 3668, 3670, 3672, 3674, 3676, 3678, 3680, 3682, 3684, 3686,
3688, 3690, 3692, 3694, 3696, 3698, 3700, 3702, 3704, 3706, 3708, 3710, 3712, 3714, 3716, 3718, 3720, 3722, 3724,
3726, 3728, 3730, 3732, 3734, 3736, 3738, 3740, 3742, 3744, 3746, 3748, 3750, 3752, 3754, 3756, 3758, 3760, 3762,
3764, 3766, 3768, 3770, 3772, 3774, 3776, 3778, 3780, 3782, 3784, 3786, 3788, 3790, 3792, 3794, 3796, 3798, 3800,
3802, 3804, 3806, 3808, 3810, 3812, 3814, 3816, 3818, 3820, 3822, 3824, 3826, 3828, 3830, 3832, 3834, 3836, 3838,
3840, 3842, 3844, 3846, 3848, 3850, 3852, 3854, 3856, 3858, 3860, 3862, 3864, 3866, 3868, 3870, 3872, 3874, 3876,
3878, 3880, 3882, 3884, 3886, 3888, 3890, 3892, 3894, 3896, 3898, 3900, 3902, 3904, 3906, 3908, 3910, 3912, 3914,
3916, 3918, 3920, 3922, 3924, 3926, 3928, 3930, 3932, 3934, 3936, 3938, 3940, 3942, 3944, 3946, 3948, 3950, 3952,
3954, 3956, 3958, 3960, 3962, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3980, 3982, 3984, 3986, 3988, 3990,
3992, 3994, 3996, 3998, 4000, 4002, 4004, 4006, 4008, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028,
4030, 4032, 4034, 4036, 4038, 4040, 4042, 4044, 4046, 4048, 4050, 4052, 4054, 4056, 4058, 4060, 4062, 4064, 4066,
4068, 4070, 4072, 4074, 4076, 4078, 4080, 4082, 4084, 4086, 4088, 4090, 4092, 4094, 4096, 4098, 4100, 4102, 4104,
4106, 4108, 4110, 4112, 4114, 4116, 4118, 4120, 4122, 4124, 4126, 4128, 4130, 4132, 4134, 4136, 4138, 4140, 4142,
4144, 4146, 4148, 4150, 4152, 4154, 4156, 4158, 4160, 4162, 4164, 4166, 4168, 4170, 4172, 4174, 4176, 4178, 4180,
4182, 4184, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4202, 4204, 4206, 4208, 4210, 4212, 4214, 4216, 4218,
4220, 4222, 4224, 4226, 4228, 4230, 4232, 4234, 4236, 4238, 4240, 4242, 4244, 4246, 4248, 4250, 4252, 4254, 4256,
4258, 4260, 4262, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294,
4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332,
4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370,
4372, 4374, 4376, 4378, 4380, 4382, 4384, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408,
4410, 4412, 4414, 4416, 4418, 4420, 4422, 4424, 4426, 4428, 4430, 4432, 4434, 4436, 4438, 4440, 4442, 4444, 4446,
4448, 4450, 4452, 4454, 4456, 4458, 4460, 4462, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4480, 4482, 4484,
4486, 4488, 4490, 4492, 4494, 4496, 4498, 4500, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4518, 4520, 4522,
4524, 4526, 4528, 4530, 4532, 4534, 4536, 4538, 4540, 4542, 4544, 4546, 4548, 4550, 4552, 4554, 4556, 4558, 4560,
4562, 4564, 4566, 4568, 4570, 4572, 4574, 4576, 4578, 4580, 4582, 4584, 4586, 4588, 4590, 4592, 4594, 4596, 4598,
4600, 4602, 4604, 4606, 4608, 4610, 4612, 4614, 4616, 4618, 4620, 4622, 4624, 4626, 4628, 4630, 4632, 4634, 4636,
4638, 4640, 4642, 4644, 4646, 4648, 4650, 4652, 4654, 4656, 4658, 4660, 4662, 4664, 4666, 4668, 4670, 4672, 4674,
4676, 4678, 4680, 4682, 4684, 4686, 4688, 4690, 4692, 4694, 4696, 4698, 4700, 4702, 4704, 4706, 4708, 4710, 4712,
4714, 4716, 4718, 4720, 4722, 4724, 4726, 4728, 4730, 4732, 4734, 4736, 4738, 4740, 4742, 4744, 4746, 4748, 4750,
4752, 4754, 4756, 4758, 4760, 4762, 4764, 4766, 4768, 4770, 4772, 4774, 4776, 4778, 4780, 4782, 4784, 4786, 4788,
4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 4808, 4810, 4812, 4814, 4816, 4818, 4820, 4822, 4824, 4826,
4828, 4830, 4832, 4834, 4836, 4838, 4840, 4842, 4844, 4846, 4848, 4850, 4852, 4854, 4856, 4858, 4860, 4862, 4864,
4866, 4868, 4870, 4872, 4874, 4876, 4878, 4880, 4882, 4884, 4886, 4888, 4890, 4892, 4894, 4896, 4898, 4900, 4902,
4904, 4906, 4908, 4910, 4912, 4914, 4916, 4918, 4920, 4922, 4924, 4926, 4928, 4930, 4932, 4934, 4936, 4938, 4940,
4942, 4944, 4946, 4948, 4950, 4952, 4954, 4956, 4958, 4960, 4962, 4964, 4966, 4968, 4970, 4972, 4974, 4976, 4978,
4980, 4982, 4984, 4986, 4988, 4990, 4992, 4994, 4996, 4998, 5000, 5002, 5004, 5006, 5008, 5010, 5012, 5014, 5016,
5018, 5020, 5022, 5024, 5026, 5028, 5030, 5032, 5034, 5036, 5038, 5040, 5042, 5044, 5046, 5048, 5050, 5052, 5054,
5056, 5058, 5060, 5062, 5064, 5066, 5068, 5070, 5072, 5074, 5076, 5078, 5080, 5082, 5084, 5086, 5088, 5090, 5092,
5094, 5096, 5098, 5100, 5102, 5104, 5106, 5108, 5110, 5112, 5114, 5116, 5118, 5120, 5122, 5124, 5126, 5128, 5130,
5132, 5134, 5136, 5138, 5140, 5142, 5144, 5146, 5148, 5150, 5152, 5154, 5156, 5158, 5160, 5162, 5164, 5166, 5168,
5170, 5172, 5174, 5176, 5178, 5180, 5182, 5184, 5186, 5188, 5190, 5192, 5194, 5196, 5198, 5200, 5202, 5204, 5206,
5208, 5210, 5212, 5214, 5216, 5218, 5220, 5222, 5224, 5226, 5228, 5230, 5232, 5234, 5236, 5238, 5240, 5242, 5244,
5246, 5248, 5250, 5252, 5254, 5256, 5258, 5260, 5262, 5264, 5266, 5268, 5270, 5272, 5274, 5276, 5278, 5280, 5282,
5284, 5286, 5288, 5290, 5292, 5294, 5296, 5298, 5300, 5302, 5304, 5306, 5308, 5310, 5312, 5314, 5316, 5318, 5320,
5322, 5324, 5326, 5328, 5330, 5332, 5334, 5336, 5338, 5340, 5342, 5344, 5346, 5348, 5350, 5352, 5354, 5356, 5358,
5360, 5362, 5364, 5366, 5368, 5370, 5372, 5374, 5376, 5378, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396,
5398, 5400, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5418, 5420, 5422, 5424, 5426, 5428, 5430, 5432, 5434,
5436, 5438, 5440, 5442, 5444, 5446, 5448, 5450, 5452, 5454, 5456, 5458, 5460, 5462, 5464, 5466, 5468, 5470, 5472,
5474, 5476, 5478, 5480, 5482, 5484, 5486, 5488, 5490, 5492, 5494, 5496, 5498, 5500, 5502, 5504, 5506, 5508, 5510,
5512, 5514, 5516, 5518, 5520, 5522, 5524, 5526, 5528, 5530, 5532, 5534, 5536, 5538, 5540, 5542, 5544, 5546, 5548,
5550, 5552, 5554, 5556, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586,
5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 5606, 5608, 5610, 5612, 5614, 5616, 5618, 5620, 5622, 5624,
5626, 5628, 5630, 5632, 5634, 5636, 5638, 5640, 5642, 5644, 5646, 5648, 5650, 5652, 5654, 5656, 5658, 5660, 5662,
5664, 5666, 5668, 5670, 5672, 5674, 5676, 5678, 5680, 5682, 5684, 5686, 5688, 5690, 5692, 5694, 5696, 5698, 5700,
5702, 5704, 5706, 5708, 5710, 5712, 5714, 5716, 5718, 5720, 5722, 5724, 5726, 5728, 5730, 5732, 5734, 5736, 5738,
5740, 5742, 5744, 5746, 5748, 5750, 5752, 5754, 5756, 5758, 5760, 5762, 5764, 5766, 5768, 5770, 5772, 5774, 5776,
5778, 5780, 5782, 5784, 5786, 5788, 5790, 5792, 5794, 5796, 5798, 5800, 5802, 5804, 5806, 5808, 5810, 5812, 5814,
5816, 5818, 5820, 5822, 5824, 5826, 5828, 5830, 5832, 5834, 5836, 5838, 5840, 5842, 5844, 5846, 5848, 5850, 5852,
5854, 5856, 5858, 5860, 5862, 5864, 5866, 5868, 5870, 5872, 5874, 5876, 5878, 5880, 5882, 5884, 5886, 5888, 5890,
5892, 5894, 5896, 5898, 5900, 5902, 5904, 5906, 5908, 5910, 5912, 5914, 5916, 5918, 5920, 5922, 5924, 5926, 5928,
5930, 5932, 5934, 5936, 5938, 5940, 5942, 5944, 5946, 5948, 5950, 5952, 5954, 5956, 5958, 5960, 5962, 5964, 5966,
5968, 5970, 5972, 5974, 5976, 5978, 5980, 5982, 5984, 5986, 5988, 5990, 5992, 5994, 5996, 5998, 6000, 6002, 6004,
6006, 6008, 6010, 6012, 6014, 6016, 6018, 6020, 6022, 6024, 6026, 6028, 6030, 6032, 6034, 6036, 6038, 6040, 6042,
6044, 6046, 6048, 6050, 6052, 6054, 6056, 6058, 6060, 6062, 6064, 6066, 6068, 6070, 6072, 6074, 6076, 6078, 6080,
6082, 6084, 6086, 6088, 6090, 6092, 6094, 6096, 6098, 6100, 6102, 6104, 6106, 6108, 6110, 6112, 6114, 6116, 6118,
6120, 6122, 6124, 6126, 6128, 6130, 6132, 6134, 6136, 6138, 6140, 6142, 6144, 6146, 6148, 6150, 6152, 6154, 6156,
6158, 6160, 6162, 6164, 6166, 6168, 6170, 6172, 6174, 6176, 6178, 6180, 6182, 6184, 6186, 6188, 6190, 6192, 6194,
6196, 6198, 6200, 6202, 6204, 6206, 6208, 6210, 6212, 6214, 6216, 6218, 6220, 6222, 6224, 6226, 6228, 6230, 6232,
6234, 6236, 6238, 6240, 6242, 6244, 6246, 6248, 6250, 6252, 6254, 6256, 6258, 6260, 6262, 6264, 6266, 6268, 6270,
6272, 6274, 6276, 6278, 6280, 6282, 6284, 6286, 6288, 6290, 6292, 6294, 6296, 6298, 6300, 6302, 6304, 6306, 6308,
6310, 6312, 6314, 6316, 6318, 6320, 6322, 6324, 6326, 6328, 6330, 6332, 6334, 6336, 6338, 6340, 6342, 6344, 6346,
6348, 6350, 6352, 6354, 6356, 6358, 6360, 6362, 6364, 6366, 6368, 6370, 6372, 6374, 6376, 6378, 6380, 6382, 6384,
6386, 6388, 6390, 6392, 6394, 6396, 6398, 6400, 6402, 6404, 6406, 6408, 6410, 6412, 6414, 6416, 6418, 6420, 6422,
6424, 6426, 6428, 6430, 6432, 6434, 6436, 6438, 6440, 6442, 6444, 6446, 6448, 6450, 6452, 6454, 6456, 6458, 6460,
6462, 6464, 6466, 6468, 6470, 6472, 6474, 6476, 6478, 6480, 6482, 6484, 6486, 6488, 6490, 6492, 6494, 6496, 6498,
6500, 6502, 6504, 6506, 6508, 6510, 6512, 6514, 6516, 6518, 6520, 6522, 6524, 6526, 6528, 6530, 6532, 6534, 6536,
6538, 6540, 6542, 6544, 6546, 6548, 6550, 6552, 6554, 6556, 6558, 6560, 6562, 6564, 6566, 6568, 6570, 6572, 6574,
6576, 6578, 6580, 6582, 6584, 6586, 6588, 6590, 6592, 6594, 6596, 6598, 6600, 6602, 6604, 6606, 6608, 6610, 6612,
6614, 6616, 6618, 6620, 6622, 6624, 6626, 6628, 6630, 6632, 6634, 6636, 6638, 6640, 6642, 6644, 6646, 6648, 6650,
6652, 6654, 6656, 6658, 6660, 6662, 6664, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688,
6690, 6692, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6718, 6720, 6722, 6724, 6726,
6728, 6730, 6732, 6734, 6736, 6738, 6740, 6742, 6744, 6746, 6748, 6750, 6752, 6754, 6756, 6758, 6760, 6762, 6764,
6766, 6768, 6770, 6772, 6774, 6776, 6778, 6780, 6782, 6784, 6786, 6788, 6790, 6792, 6794, 6796, 6798, 6800, 6802,
6804, 6806, 6808, 6810, 6812, 6814, 6816, 6818, 6820, 6822, 6824, 6826, 6828, 6830, 6832, 6834, 6836, 6838, 6840,
6842, 6844, 6846, 6848, 6850, 6852, 6854, 6856, 6858, 6860, 6862, 6864, 6866, 6868, 6870, 6872, 6874, 6876, 6878,
6880, 6882, 6884, 6886, 6888, 6890, 6892, 6894, 6896, 6898, 6900, 6902, 6904, 6906, 6908, 6910, 6912, 6914, 6916,
6918, 6920, 6922, 6924, 6926, 6928, 6930, 6932, 6934, 6936, 6938, 6940, 6942, 6944, 6946, 6948, 6950, 6952, 6954,
6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992,
6994, 6996, 6998, 7000, 7002, 7004, 7006, 7008, 7010, 7012, 7014, 7016, 7018, 7020, 7022, 7024, 7026, 7028, 7030,
7032, 7034, 7036, 7038, 7040, 7042, 7044, 7046, 7048, 7050, 7052, 7054, 7056, 7058, 7060, 7062, 7064, 7066, 7068,
7070, 7072, 7074, 7076, 7078, 7080, 7082, 7084, 7086, 7088, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106,
7108, 7110, 7112, 7114, 7116, 7118, 7120, 7122, 7124, 7126, 7128, 7130, 7132, 7134, 7136, 7138, 7140, 7142, 7144,
7146, 7148, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7178, 7180, 7182,
7184, 7186, 7188, 7190, 7192, 7194, 7196, 7198, 7200, 7202, 7204, 7206, 7208, 7210, 7212, 7214, 7216, 7218, 7220,
7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258,
7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296,
7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334,
7336, 7338, 7340, 7342, 7344, 7346, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372,
7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410,
7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448,
7450, 7452, 7454, 7456, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7474, 7476, 7478, 7480, 7482, 7484, 7486,
7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7516, 7518, 7520, 7522, 7524,
7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7542, 7544, 7546, 7548, 7550, 7552, 7554, 7556, 7558, 7560, 7562,
7564, 7566, 7568, 7570, 7572, 7574, 7576, 7578, 7580, 7582, 7584, 7586, 7588, 7590, 7592, 7594, 7596, 7598, 7600,
7602, 7604, 7606, 7608, 7610, 7612, 7614, 7616, 7618, 7620, 7622, 7624, 7626, 7628, 7630, 7632, 7634, 7636, 7638,
7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676,
7678, 7680, 7682, 7684, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, 7704, 7706, 7708, 7710, 7712, 7714,
7716, 7718, 7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, 7736, 7738, 7740, 7742, 7744, 7746, 7748, 7750, 7752,
7754, 7756, 7758, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790,
7792, 7794, 7796, 7798, 7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, 7816, 7818, 7820, 7822, 7824, 7826, 7828,
7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866,
7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904,
7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, 7936, 7938, 7940, 7942,
7944, 7946, 7948, 7950, 7952, 7954, 7956, 7958, 7960, 7962, 7964, 7966, 7968, 7970, 7972, 7974, 7976, 7978, 7980,
7982, 7984, 7986, 7988, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 8006, 8008, 8010, 8012, 8014, 8016, 8018,
8020, 8022, 8024, 8026, 8028, 8030, 8032, 8034, 8036, 8038, 8040, 8042, 8044, 8046, 8048, 8050, 8052, 8054, 8056,
8058, 8060, 8062, 8064, 8066, 8068, 8070, 8072, 8074, 8076, 8078, 8080, 8082, 8084, 8086, 8088, 8090, 8092, 8094,
8096, 8098, 8100, 8102, 8104, 8106, 8108, 8110, 8112, 8114, 8116, 8118, 8120, 8122, 8124, 8126, 8128, 8130, 8132,
8134, 8136, 8138, 8140, 8142, 8144, 8146, 8148, 8150, 8152, 8154, 8156, 8158, 8160, 8162, 8164, 8166, 8168, 8170,
8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 8194, 8196, 8198, 8200, 8202, 8204, 8206, 8208,
8210, 8212, 8214, 8216, 8218, 8220, 8222, 8224, 8226, 8228, 8230, 8232, 8234, 8236, 8238, 8240, 8242, 8244, 8246,
8248, 8250, 8252, 8254, 8256, 8258, 8260, 8262, 8264, 8266, 8268, 8270, 8272, 8274, 8276, 8278, 8280, 8282, 8284,
8286, 8288, 8290, 8292, 8294, 8296, 8298, 8300, 8302, 8304, 8306, 8308, 8310, 8312, 8314, 8316, 8318, 8320, 8322,
8324, 8326, 8328, 8330, 8332, 8334, 8336, 8338, 8340, 8342, 8344, 8346, 8348, 8350, 8352, 8354, 8356, 8358, 8360,
8362, 8364, 8366, 8368, 8370, 8372, 8374, 8376, 8378, 8380, 8382, 8384, 8386, 8388, 8390, 8392, 8394, 8396, 8398,
8400, 8402, 8404, 8406, 8408, 8410, 8412, 8414, 8416, 8418, 8420, 8422, 8424, 8426, 8428, 8430, 8432, 8434, 8436,
8438, 8440, 8442, 8444, 8446, 8448, 8450, 8452, 8454, 8456, 8458, 8460, 8462, 8464, 8466, 8468, 8470, 8472, 8474,
8476, 8478, 8480, 8482, 8484, 8486, 8488, 8490, 8492, 8494, 8496, 8498, 8500, 8502, 8504, 8506, 8508, 8510, 8512,
8514, 8516, 8518, 8520, 8522, 8524, 8526, 8528, 8530, 8532, 8534, 8536, 8538, 8540, 8542, 8544, 8546, 8548, 8550,
8552, 8554, 8556, 8558, 8560, 8562, 8564, 8566, 8568, 8570, 8572, 8574, 8576, 8578, 8580, 8582, 8584, 8586, 8588,
8590, 8592, 8594, 8596, 8598, 8600, 8602, 8604, 8606, 8608, 8610, 8612, 8614, 8616, 8618, 8620, 8622, 8624, 8626,
8628, 8630, 8632, 8634, 8636, 8638, 8640, 8642, 8644, 8646, 8648, 8650, 8652, 8654, 8656, 8658, 8660, 8662, 8664,
8666, 8668, 8670, 8672, 8674, 8676, 8678, 8680, 8682, 8684, 8686, 8688, 8690, 8692, 8694, 8696, 8698, 8700, 8702,
8704, 8706, 8708, 8710, 8712, 8714, 8716, 8718, 8720, 8722, 8724, 8726, 8728, 8730, 8732, 8734, 8736, 8738, 8740,
8742, 8744, 8746, 8748, 8750, 8752, 8754, 8756, 8758, 8760, 8762, 8764, 8766, 8768, 8770, 8772, 8774, 8776, 8778,
8780, 8782, 8784, 8786, 8788, 8790, 8792, 8794, 8796, 8798, 8800, 8802, 8804, 8806, 8808, 8810, 8812, 8814, 8816,
8818, 8820, 8822, 8824, 8826, 8828, 8830, 8832, 8834, 8836, 8838, 8840, 8842, 8844, 8846, 8848, 8850, 8852, 8854,
8856, 8858, 8860, 8862, 8864, 8866, 8868, 8870, 8872, 8874, 8876, 8878, 8880, 8882, 8884, 8886, 8888, 8890, 8892,
8894, 8896, 8898, 8900, 8902, 8904, 8906, 8908, 8910, 8912, 8914, 8916, 8918, 8920, 8922, 8924, 8926, 8928, 8930,
8932, 8934, 8936, 8938, 8940, 8942, 8944, 8946, 8948, 8950, 8952, 8954, 8956, 8958, 8960, 8962, 8964, 8966, 8968,
8970, 8972, 8974, 8976, 8978, 8980, 8982, 8984, 8986, 8988, 8990, 8992, 8994, 8996, 8998, 9000, 9002, 9004, 9006,
9008, 9010, 9012, 9014, 9016, 9018, 9020, 9022, 9024, 9026, 9028, 9030, 9032, 9034, 9036, 9038, 9040, 9042, 9044,
9046, 9048, 9050, 9052, 9054, 9056, 9058, 9060, 9062, 9064, 9066, 9068, 9070, 9072, 9074, 9076, 9078, 9080, 9082,
9084, 9086, 9088, 9090, 9092, 9094, 9096, 9098, 9100, 9102, 9104, 9106, 9108, 9110, 9112, 9114, 9116, 9118, 9120,
9122, 9124, 9126, 9128, 9130, 9132, 9134, 9136, 9138, 9140, 9142, 9144, 9146, 9148, 9150, 9152, 9154, 9156, 9158,
9160, 9162, 9164, 9166, 9168, 9170, 9172, 9174, 9176, 9178, 9180, 9182, 9184, 9186, 9188, 9190, 9192, 9194, 9196,
9198, 9200, 9202, 9204, 9206, 9208, 9210, 9212, 9214, 9216, 9218, 9220, 9222, 9224, 9226, 9228, 9230, 9232, 9234,
9236, 9238, 9240, 9242, 9244, 9246, 9248, 9250, 9252, 9254, 9256, 9258, 9260, 9262, 9264, 9266, 9268, 9270, 9272,
9274, 9276, 9278, 9280, 9282, 9284, 9286, 9288, 9290, 9292, 9294, 9296, 9298, 9300, 9302, 9304, 9306, 9308, 9310,
9312, 9314, 9316, 9318, 9320, 9322, 9324, 9326, 9328, 9330, 9332, 9334, 9336, 9338, 9340, 9342, 9344, 9346, 9348,
9350, 9352, 9354, 9356, 9358, 9360, 9362, 9364, 9366, 9368, 9370, 9372, 9374, 9376, 9378, 9380, 9382, 9384, 9386,
9388, 9390, 9392, 9394, 9396, 9398, 9400, 9402, 9404, 9406, 9408, 9410, 9412, 9414, 9416, 9418, 9420, 9422, 9424,
9426, 9428, 9430, 9432, 9434, 9436, 9438, 9440, 9442, 9444, 9446, 9448, 9450, 9452, 9454, 9456, 9458, 9460, 9462,
9464, 9466, 9468, 9470, 9472, 9474, 9476, 9478, 9480, 9482, 9484, 9486, 9488, 9490, 9492, 9494, 9496, 9498, 9500,
9502, 9504, 9506, 9508, 9510, 9512, 9514, 9516, 9518, 9520, 9522, 9524, 9526, 9528, 9530, 9532, 9534, 9536, 9538,
9540, 9542, 9544, 9546, 9548, 9550, 9552, 9554, 9556, 9558, 9560, 9562, 9564, 9566, 9568, 9570, 9572, 9574, 9576,
9578, 9580, 9582, 9584, 9586, 9588, 9590, 9592, 9594, 9596, 9598, 9600, 9602, 9604, 9606, 9608, 9610, 9612, 9614,
9616, 9618, 9620, 9622, 9624, 9626, 9628, 9630, 9632, 9634, 9636, 9638, 9640, 9642, 9644, 9646, 9648, 9650, 9652,
9654, 9656, 9658, 9660, 9662, 9664, 9666, 9668, 9670, 9672, 9674, 9676, 9678, 9680, 9682, 9684, 9686, 9688, 9690,
9692, 9694, 9696, 9698, 9700, 9702, 9704, 9706, 9708, 9710, 9712, 9714, 9716, 9718, 9720, 9722, 9724, 9726, 9728,
9730, 9732, 9734, 9736, 9738, 9740, 9742, 9744, 9746, 9748, 9750, 9752, 9754, 9756, 9758, 9760, 9762, 9764, 9766,
9768, 9770, 9772, 9774, 9776, 9778, 9780, 9782, 9784, 9786, 9788, 9790, 9792, 9794, 9796, 9798, 9800, 9802, 9804,
9806, 9808, 9810, 9812, 9814, 9816, 9818, 9820, 9822, 9824, 9826, 9828, 9830, 9832, 9834, 9836, 9838, 9840, 9842,
9844, 9846, 9848, 9850, 9852, 9854, 9856, 9858, 9860, 9862, 9864, 9866, 9868, 9870, 9872, 9874, 9876, 9878, 9880,
9882, 9884, 9886, 9888, 9890, 9892, 9894, 9896, 9898, 9900, 9902, 9904, 9906, 9908, 9910, 9912, 9914, 9916, 9918,
9920, 9922, 9924, 9926, 9928, 9930, 9932, 9934, 9936, 9938, 9940, 9942, 9944, 9946, 9948, 9950, 9952, 9954, 9956,
9958, 9960, 9962, 9964, 9966, 9968, 9970, 9972, 9974, 9976, 9978, 9980, 9982, 9984, 9986, 9988, 9990, 9992, 9994,
9996, 9998, 10000, 10002]
res = Solution().fairCandySwap(a1, b1)
print(res)
# 上面中间写上代码块
end = time.time()
print('Running time: %s Seconds' % (end - start))
| 104.638158 | 120 | 0.623546 |
import time
__author__ = 'Max_Pengjb'
start = time.time()
class Solution(object):
def fairCandySwap(self, A, B):
k = (sum(A) - sum(B)) // 2
b = dict(zip(B, [1 for _ in B]))
for i in A:
if i - k in b.keys():
return [i, i - k]
A = [1, 2, 5]
B = [2, 4]
a1 = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59,
61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113,
115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159,
161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205,
207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251,
253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297,
299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343,
345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389,
391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435,
437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481,
483, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527,
529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573,
575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619,
621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665,
667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 699, 701, 703, 705, 707, 709, 711,
713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757,
759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803,
805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 845, 847, 849,
851, 853, 855, 857, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895,
897, 899, 901, 903, 905, 907, 909, 911, 913, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941,
943, 945, 947, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987,
989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1021, 1023, 1025, 1027,
1029, 1031, 1033, 1035, 1037, 1039, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1061, 1063, 1065,
1067, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1089, 1091, 1093, 1095, 1097, 1099, 1101, 1103,
1105, 1107, 1109, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, 1135, 1137, 1139, 1141,
1143, 1145, 1147, 1149, 1151, 1153, 1155, 1157, 1159, 1161, 1163, 1165, 1167, 1169, 1171, 1173, 1175, 1177, 1179,
1181, 1183, 1185, 1187, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217,
1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255,
1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293,
1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331,
1333, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369,
1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407,
1409, 1411, 1413, 1415, 1417, 1419, 1421, 1423, 1425, 1427, 1429, 1431, 1433, 1435, 1437, 1439, 1441, 1443, 1445,
1447, 1449, 1451, 1453, 1455, 1457, 1459, 1461, 1463, 1465, 1467, 1469, 1471, 1473, 1475, 1477, 1479, 1481, 1483,
1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521,
1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1555, 1557, 1559,
1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597,
1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635,
1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673,
1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711,
1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1737, 1739, 1741, 1743, 1745, 1747, 1749,
1751, 1753, 1755, 1757, 1759, 1761, 1763, 1765, 1767, 1769, 1771, 1773, 1775, 1777, 1779, 1781, 1783, 1785, 1787,
1789, 1791, 1793, 1795, 1797, 1799, 1801, 1803, 1805, 1807, 1809, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825,
1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1855, 1857, 1859, 1861, 1863,
1865, 1867, 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, 1901,
1903, 1905, 1907, 1909, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939,
1941, 1943, 1945, 1947, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977,
1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015,
2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053,
2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091,
2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129,
2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167,
2169, 2171, 2173, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2199, 2201, 2203, 2205,
2207, 2209, 2211, 2213, 2215, 2217, 2219, 2221, 2223, 2225, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243,
2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281,
2283, 2285, 2287, 2289, 2291, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319,
2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357,
2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395,
2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433,
2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471,
2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509,
2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547,
2549, 2551, 2553, 2555, 2557, 2559, 2561, 2563, 2565, 2567, 2569, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585,
2587, 2589, 2591, 2593, 2595, 2597, 2599, 2601, 2603, 2605, 2607, 2609, 2611, 2613, 2615, 2617, 2619, 2621, 2623,
2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2657, 2659, 2661,
2663, 2665, 2667, 2669, 2671, 2673, 2675, 2677, 2679, 2681, 2683, 2685, 2687, 2689, 2691, 2693, 2695, 2697, 2699,
2701, 2703, 2705, 2707, 2709, 2711, 2713, 2715, 2717, 2719, 2721, 2723, 2725, 2727, 2729, 2731, 2733, 2735, 2737,
2739, 2741, 2743, 2745, 2747, 2749, 2751, 2753, 2755, 2757, 2759, 2761, 2763, 2765, 2767, 2769, 2771, 2773, 2775,
2777, 2779, 2781, 2783, 2785, 2787, 2789, 2791, 2793, 2795, 2797, 2799, 2801, 2803, 2805, 2807, 2809, 2811, 2813,
2815, 2817, 2819, 2821, 2823, 2825, 2827, 2829, 2831, 2833, 2835, 2837, 2839, 2841, 2843, 2845, 2847, 2849, 2851,
2853, 2855, 2857, 2859, 2861, 2863, 2865, 2867, 2869, 2871, 2873, 2875, 2877, 2879, 2881, 2883, 2885, 2887, 2889,
2891, 2893, 2895, 2897, 2899, 2901, 2903, 2905, 2907, 2909, 2911, 2913, 2915, 2917, 2919, 2921, 2923, 2925, 2927,
2929, 2931, 2933, 2935, 2937, 2939, 2941, 2943, 2945, 2947, 2949, 2951, 2953, 2955, 2957, 2959, 2961, 2963, 2965,
2967, 2969, 2971, 2973, 2975, 2977, 2979, 2981, 2983, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003,
3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041,
3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3065, 3067, 3069, 3071, 3073, 3075, 3077, 3079,
3081, 3083, 3085, 3087, 3089, 3091, 3093, 3095, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117,
3119, 3121, 3123, 3125, 3127, 3129, 3131, 3133, 3135, 3137, 3139, 3141, 3143, 3145, 3147, 3149, 3151, 3153, 3155,
3157, 3159, 3161, 3163, 3165, 3167, 3169, 3171, 3173, 3175, 3177, 3179, 3181, 3183, 3185, 3187, 3189, 3191, 3193,
3195, 3197, 3199, 3201, 3203, 3205, 3207, 3209, 3211, 3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, 3229, 3231,
3233, 3235, 3237, 3239, 3241, 3243, 3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, 3261, 3263, 3265, 3267, 3269,
3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, 3293, 3295, 3297, 3299, 3301, 3303, 3305, 3307,
3309, 3311, 3313, 3315, 3317, 3319, 3321, 3323, 3325, 3327, 3329, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345,
3347, 3349, 3351, 3353, 3355, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383,
3385, 3387, 3389, 3391, 3393, 3395, 3397, 3399, 3401, 3403, 3405, 3407, 3409, 3411, 3413, 3415, 3417, 3419, 3421,
3423, 3425, 3427, 3429, 3431, 3433, 3435, 3437, 3439, 3441, 3443, 3445, 3447, 3449, 3451, 3453, 3455, 3457, 3459,
3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3497,
3499, 3501, 3503, 3505, 3507, 3509, 3511, 3513, 3515, 3517, 3519, 3521, 3523, 3525, 3527, 3529, 3531, 3533, 3535,
3537, 3539, 3541, 3543, 3545, 3547, 3549, 3551, 3553, 3555, 3557, 3559, 3561, 3563, 3565, 3567, 3569, 3571, 3573,
3575, 3577, 3579, 3581, 3583, 3585, 3587, 3589, 3591, 3593, 3595, 3597, 3599, 3601, 3603, 3605, 3607, 3609, 3611,
3613, 3615, 3617, 3619, 3621, 3623, 3625, 3627, 3629, 3631, 3633, 3635, 3637, 3639, 3641, 3643, 3645, 3647, 3649,
3651, 3653, 3655, 3657, 3659, 3661, 3663, 3665, 3667, 3669, 3671, 3673, 3675, 3677, 3679, 3681, 3683, 3685, 3687,
3689, 3691, 3693, 3695, 3697, 3699, 3701, 3703, 3705, 3707, 3709, 3711, 3713, 3715, 3717, 3719, 3721, 3723, 3725,
3727, 3729, 3731, 3733, 3735, 3737, 3739, 3741, 3743, 3745, 3747, 3749, 3751, 3753, 3755, 3757, 3759, 3761, 3763,
3765, 3767, 3769, 3771, 3773, 3775, 3777, 3779, 3781, 3783, 3785, 3787, 3789, 3791, 3793, 3795, 3797, 3799, 3801,
3803, 3805, 3807, 3809, 3811, 3813, 3815, 3817, 3819, 3821, 3823, 3825, 3827, 3829, 3831, 3833, 3835, 3837, 3839,
3841, 3843, 3845, 3847, 3849, 3851, 3853, 3855, 3857, 3859, 3861, 3863, 3865, 3867, 3869, 3871, 3873, 3875, 3877,
3879, 3881, 3883, 3885, 3887, 3889, 3891, 3893, 3895, 3897, 3899, 3901, 3903, 3905, 3907, 3909, 3911, 3913, 3915,
3917, 3919, 3921, 3923, 3925, 3927, 3929, 3931, 3933, 3935, 3937, 3939, 3941, 3943, 3945, 3947, 3949, 3951, 3953,
3955, 3957, 3959, 3961, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3979, 3981, 3983, 3985, 3987, 3989, 3991,
3993, 3995, 3997, 3999, 4001, 4003, 4005, 4007, 4009, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029,
4031, 4033, 4035, 4037, 4039, 4041, 4043, 4045, 4047, 4049, 4051, 4053, 4055, 4057, 4059, 4061, 4063, 4065, 4067,
4069, 4071, 4073, 4075, 4077, 4079, 4081, 4083, 4085, 4087, 4089, 4091, 4093, 4095, 4097, 4099, 4101, 4103, 4105,
4107, 4109, 4111, 4113, 4115, 4117, 4119, 4121, 4123, 4125, 4127, 4129, 4131, 4133, 4135, 4137, 4139, 4141, 4143,
4145, 4147, 4149, 4151, 4153, 4155, 4157, 4159, 4161, 4163, 4165, 4167, 4169, 4171, 4173, 4175, 4177, 4179, 4181,
4183, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4201, 4203, 4205, 4207, 4209, 4211, 4213, 4215, 4217, 4219,
4221, 4223, 4225, 4227, 4229, 4231, 4233, 4235, 4237, 4239, 4241, 4243, 4245, 4247, 4249, 4251, 4253, 4255, 4257,
4259, 4261, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295,
4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333,
4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371,
4373, 4375, 4377, 4379, 4381, 4383, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409,
4411, 4413, 4415, 4417, 4419, 4421, 4423, 4425, 4427, 4429, 4431, 4433, 4435, 4437, 4439, 4441, 4443, 4445, 4447,
4449, 4451, 4453, 4455, 4457, 4459, 4461, 4463, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4481, 4483, 4485,
4487, 4489, 4491, 4493, 4495, 4497, 4499, 4501, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4519, 4521, 4523,
4525, 4527, 4529, 4531, 4533, 4535, 4537, 4539, 4541, 4543, 4545, 4547, 4549, 4551, 4553, 4555, 4557, 4559, 4561,
4563, 4565, 4567, 4569, 4571, 4573, 4575, 4577, 4579, 4581, 4583, 4585, 4587, 4589, 4591, 4593, 4595, 4597, 4599,
4601, 4603, 4605, 4607, 4609, 4611, 4613, 4615, 4617, 4619, 4621, 4623, 4625, 4627, 4629, 4631, 4633, 4635, 4637,
4639, 4641, 4643, 4645, 4647, 4649, 4651, 4653, 4655, 4657, 4659, 4661, 4663, 4665, 4667, 4669, 4671, 4673, 4675,
4677, 4679, 4681, 4683, 4685, 4687, 4689, 4691, 4693, 4695, 4697, 4699, 4701, 4703, 4705, 4707, 4709, 4711, 4713,
4715, 4717, 4719, 4721, 4723, 4725, 4727, 4729, 4731, 4733, 4735, 4737, 4739, 4741, 4743, 4745, 4747, 4749, 4751,
4753, 4755, 4757, 4759, 4761, 4763, 4765, 4767, 4769, 4771, 4773, 4775, 4777, 4779, 4781, 4783, 4785, 4787, 4789,
4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 4807, 4809, 4811, 4813, 4815, 4817, 4819, 4821, 4823, 4825, 4827,
4829, 4831, 4833, 4835, 4837, 4839, 4841, 4843, 4845, 4847, 4849, 4851, 4853, 4855, 4857, 4859, 4861, 4863, 4865,
4867, 4869, 4871, 4873, 4875, 4877, 4879, 4881, 4883, 4885, 4887, 4889, 4891, 4893, 4895, 4897, 4899, 4901, 4903,
4905, 4907, 4909, 4911, 4913, 4915, 4917, 4919, 4921, 4923, 4925, 4927, 4929, 4931, 4933, 4935, 4937, 4939, 4941,
4943, 4945, 4947, 4949, 4951, 4953, 4955, 4957, 4959, 4961, 4963, 4965, 4967, 4969, 4971, 4973, 4975, 4977, 4979,
4981, 4983, 4985, 4987, 4989, 4991, 4993, 4995, 4997, 4999, 5001, 5003, 5005, 5007, 5009, 5011, 5013, 5015, 5017,
5019, 5021, 5023, 5025, 5027, 5029, 5031, 5033, 5035, 5037, 5039, 5041, 5043, 5045, 5047, 5049, 5051, 5053, 5055,
5057, 5059, 5061, 5063, 5065, 5067, 5069, 5071, 5073, 5075, 5077, 5079, 5081, 5083, 5085, 5087, 5089, 5091, 5093,
5095, 5097, 5099, 5101, 5103, 5105, 5107, 5109, 5111, 5113, 5115, 5117, 5119, 5121, 5123, 5125, 5127, 5129, 5131,
5133, 5135, 5137, 5139, 5141, 5143, 5145, 5147, 5149, 5151, 5153, 5155, 5157, 5159, 5161, 5163, 5165, 5167, 5169,
5171, 5173, 5175, 5177, 5179, 5181, 5183, 5185, 5187, 5189, 5191, 5193, 5195, 5197, 5199, 5201, 5203, 5205, 5207,
5209, 5211, 5213, 5215, 5217, 5219, 5221, 5223, 5225, 5227, 5229, 5231, 5233, 5235, 5237, 5239, 5241, 5243, 5245,
5247, 5249, 5251, 5253, 5255, 5257, 5259, 5261, 5263, 5265, 5267, 5269, 5271, 5273, 5275, 5277, 5279, 5281, 5283,
5285, 5287, 5289, 5291, 5293, 5295, 5297, 5299, 5301, 5303, 5305, 5307, 5309, 5311, 5313, 5315, 5317, 5319, 5321,
5323, 5325, 5327, 5329, 5331, 5333, 5335, 5337, 5339, 5341, 5343, 5345, 5347, 5349, 5351, 5353, 5355, 5357, 5359,
5361, 5363, 5365, 5367, 5369, 5371, 5373, 5375, 5377, 5379, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397,
5399, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5417, 5419, 5421, 5423, 5425, 5427, 5429, 5431, 5433, 5435,
5437, 5439, 5441, 5443, 5445, 5447, 5449, 5451, 5453, 5455, 5457, 5459, 5461, 5463, 5465, 5467, 5469, 5471, 5473,
5475, 5477, 5479, 5481, 5483, 5485, 5487, 5489, 5491, 5493, 5495, 5497, 5499, 5501, 5503, 5505, 5507, 5509, 5511,
5513, 5515, 5517, 5519, 5521, 5523, 5525, 5527, 5529, 5531, 5533, 5535, 5537, 5539, 5541, 5543, 5545, 5547, 5549,
5551, 5553, 5555, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587,
5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 5607, 5609, 5611, 5613, 5615, 5617, 5619, 5621, 5623, 5625,
5627, 5629, 5631, 5633, 5635, 5637, 5639, 5641, 5643, 5645, 5647, 5649, 5651, 5653, 5655, 5657, 5659, 5661, 5663,
5665, 5667, 5669, 5671, 5673, 5675, 5677, 5679, 5681, 5683, 5685, 5687, 5689, 5691, 5693, 5695, 5697, 5699, 5701,
5703, 5705, 5707, 5709, 5711, 5713, 5715, 5717, 5719, 5721, 5723, 5725, 5727, 5729, 5731, 5733, 5735, 5737, 5739,
5741, 5743, 5745, 5747, 5749, 5751, 5753, 5755, 5757, 5759, 5761, 5763, 5765, 5767, 5769, 5771, 5773, 5775, 5777,
5779, 5781, 5783, 5785, 5787, 5789, 5791, 5793, 5795, 5797, 5799, 5801, 5803, 5805, 5807, 5809, 5811, 5813, 5815,
5817, 5819, 5821, 5823, 5825, 5827, 5829, 5831, 5833, 5835, 5837, 5839, 5841, 5843, 5845, 5847, 5849, 5851, 5853,
5855, 5857, 5859, 5861, 5863, 5865, 5867, 5869, 5871, 5873, 5875, 5877, 5879, 5881, 5883, 5885, 5887, 5889, 5891,
5893, 5895, 5897, 5899, 5901, 5903, 5905, 5907, 5909, 5911, 5913, 5915, 5917, 5919, 5921, 5923, 5925, 5927, 5929,
5931, 5933, 5935, 5937, 5939, 5941, 5943, 5945, 5947, 5949, 5951, 5953, 5955, 5957, 5959, 5961, 5963, 5965, 5967,
5969, 5971, 5973, 5975, 5977, 5979, 5981, 5983, 5985, 5987, 5989, 5991, 5993, 5995, 5997, 5999, 6001, 6003, 6005,
6007, 6009, 6011, 6013, 6015, 6017, 6019, 6021, 6023, 6025, 6027, 6029, 6031, 6033, 6035, 6037, 6039, 6041, 6043,
6045, 6047, 6049, 6051, 6053, 6055, 6057, 6059, 6061, 6063, 6065, 6067, 6069, 6071, 6073, 6075, 6077, 6079, 6081,
6083, 6085, 6087, 6089, 6091, 6093, 6095, 6097, 6099, 6101, 6103, 6105, 6107, 6109, 6111, 6113, 6115, 6117, 6119,
6121, 6123, 6125, 6127, 6129, 6131, 6133, 6135, 6137, 6139, 6141, 6143, 6145, 6147, 6149, 6151, 6153, 6155, 6157,
6159, 6161, 6163, 6165, 6167, 6169, 6171, 6173, 6175, 6177, 6179, 6181, 6183, 6185, 6187, 6189, 6191, 6193, 6195,
6197, 6199, 6201, 6203, 6205, 6207, 6209, 6211, 6213, 6215, 6217, 6219, 6221, 6223, 6225, 6227, 6229, 6231, 6233,
6235, 6237, 6239, 6241, 6243, 6245, 6247, 6249, 6251, 6253, 6255, 6257, 6259, 6261, 6263, 6265, 6267, 6269, 6271,
6273, 6275, 6277, 6279, 6281, 6283, 6285, 6287, 6289, 6291, 6293, 6295, 6297, 6299, 6301, 6303, 6305, 6307, 6309,
6311, 6313, 6315, 6317, 6319, 6321, 6323, 6325, 6327, 6329, 6331, 6333, 6335, 6337, 6339, 6341, 6343, 6345, 6347,
6349, 6351, 6353, 6355, 6357, 6359, 6361, 6363, 6365, 6367, 6369, 6371, 6373, 6375, 6377, 6379, 6381, 6383, 6385,
6387, 6389, 6391, 6393, 6395, 6397, 6399, 6401, 6403, 6405, 6407, 6409, 6411, 6413, 6415, 6417, 6419, 6421, 6423,
6425, 6427, 6429, 6431, 6433, 6435, 6437, 6439, 6441, 6443, 6445, 6447, 6449, 6451, 6453, 6455, 6457, 6459, 6461,
6463, 6465, 6467, 6469, 6471, 6473, 6475, 6477, 6479, 6481, 6483, 6485, 6487, 6489, 6491, 6493, 6495, 6497, 6499,
6501, 6503, 6505, 6507, 6509, 6511, 6513, 6515, 6517, 6519, 6521, 6523, 6525, 6527, 6529, 6531, 6533, 6535, 6537,
6539, 6541, 6543, 6545, 6547, 6549, 6551, 6553, 6555, 6557, 6559, 6561, 6563, 6565, 6567, 6569, 6571, 6573, 6575,
6577, 6579, 6581, 6583, 6585, 6587, 6589, 6591, 6593, 6595, 6597, 6599, 6601, 6603, 6605, 6607, 6609, 6611, 6613,
6615, 6617, 6619, 6621, 6623, 6625, 6627, 6629, 6631, 6633, 6635, 6637, 6639, 6641, 6643, 6645, 6647, 6649, 6651,
6653, 6655, 6657, 6659, 6661, 6663, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689,
6691, 6693, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6719, 6721, 6723, 6725, 6727,
6729, 6731, 6733, 6735, 6737, 6739, 6741, 6743, 6745, 6747, 6749, 6751, 6753, 6755, 6757, 6759, 6761, 6763, 6765,
6767, 6769, 6771, 6773, 6775, 6777, 6779, 6781, 6783, 6785, 6787, 6789, 6791, 6793, 6795, 6797, 6799, 6801, 6803,
6805, 6807, 6809, 6811, 6813, 6815, 6817, 6819, 6821, 6823, 6825, 6827, 6829, 6831, 6833, 6835, 6837, 6839, 6841,
6843, 6845, 6847, 6849, 6851, 6853, 6855, 6857, 6859, 6861, 6863, 6865, 6867, 6869, 6871, 6873, 6875, 6877, 6879,
6881, 6883, 6885, 6887, 6889, 6891, 6893, 6895, 6897, 6899, 6901, 6903, 6905, 6907, 6909, 6911, 6913, 6915, 6917,
6919, 6921, 6923, 6925, 6927, 6929, 6931, 6933, 6935, 6937, 6939, 6941, 6943, 6945, 6947, 6949, 6951, 6953, 6955,
6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993,
6995, 6997, 6999, 7001, 7003, 7005, 7007, 7009, 7011, 7013, 7015, 7017, 7019, 7021, 7023, 7025, 7027, 7029, 7031,
7033, 7035, 7037, 7039, 7041, 7043, 7045, 7047, 7049, 7051, 7053, 7055, 7057, 7059, 7061, 7063, 7065, 7067, 7069,
7071, 7073, 7075, 7077, 7079, 7081, 7083, 7085, 7087, 7089, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107,
7109, 7111, 7113, 7115, 7117, 7119, 7121, 7123, 7125, 7127, 7129, 7131, 7133, 7135, 7137, 7139, 7141, 7143, 7145,
7147, 7149, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7179, 7181, 7183,
7185, 7187, 7189, 7191, 7193, 7195, 7197, 7199, 7201, 7203, 7205, 7207, 7209, 7211, 7213, 7215, 7217, 7219, 7221,
7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259,
7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297,
7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335,
7337, 7339, 7341, 7343, 7345, 7347, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373,
7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411,
7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449,
7451, 7453, 7455, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7473, 7475, 7477, 7479, 7481, 7483, 7485, 7487,
7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7515, 7517, 7519, 7521, 7523, 7525,
7527, 7529, 7531, 7533, 7535, 7537, 7539, 7541, 7543, 7545, 7547, 7549, 7551, 7553, 7555, 7557, 7559, 7561, 7563,
7565, 7567, 7569, 7571, 7573, 7575, 7577, 7579, 7581, 7583, 7585, 7587, 7589, 7591, 7593, 7595, 7597, 7599, 7601,
7603, 7605, 7607, 7609, 7611, 7613, 7615, 7617, 7619, 7621, 7623, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639,
7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677,
7679, 7681, 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, 7715,
7717, 7719, 7721, 7723, 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753,
7755, 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791,
7793, 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, 7827, 7829,
7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867,
7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905,
7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, 7931, 7933, 7935, 7937, 7939, 7941, 7943,
7945, 7947, 7949, 7951, 7953, 7955, 7957, 7959, 7961, 7963, 7965, 7967, 7969, 7971, 7973, 7975, 7977, 7979, 7981,
7983, 7985, 7987, 7989, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 8007, 8009, 8011, 8013, 8015, 8017, 8019,
8021, 8023, 8025, 8027, 8029, 8031, 8033, 8035, 8037, 8039, 8041, 8043, 8045, 8047, 8049, 8051, 8053, 8055, 8057,
8059, 8061, 8063, 8065, 8067, 8069, 8071, 8073, 8075, 8077, 8079, 8081, 8083, 8085, 8087, 8089, 8091, 8093, 8095,
8097, 8099, 8101, 8103, 8105, 8107, 8109, 8111, 8113, 8115, 8117, 8119, 8121, 8123, 8125, 8127, 8129, 8131, 8133,
8135, 8137, 8139, 8141, 8143, 8145, 8147, 8149, 8151, 8153, 8155, 8157, 8159, 8161, 8163, 8165, 8167, 8169, 8171,
8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 8193, 8195, 8197, 8199, 8201, 8203, 8205, 8207, 8209,
8211, 8213, 8215, 8217, 8219, 8221, 8223, 8225, 8227, 8229, 8231, 8233, 8235, 8237, 8239, 8241, 8243, 8245, 8247,
8249, 8251, 8253, 8255, 8257, 8259, 8261, 8263, 8265, 8267, 8269, 8271, 8273, 8275, 8277, 8279, 8281, 8283, 8285,
8287, 8289, 8291, 8293, 8295, 8297, 8299, 8301, 8303, 8305, 8307, 8309, 8311, 8313, 8315, 8317, 8319, 8321, 8323,
8325, 8327, 8329, 8331, 8333, 8335, 8337, 8339, 8341, 8343, 8345, 8347, 8349, 8351, 8353, 8355, 8357, 8359, 8361,
8363, 8365, 8367, 8369, 8371, 8373, 8375, 8377, 8379, 8381, 8383, 8385, 8387, 8389, 8391, 8393, 8395, 8397, 8399,
8401, 8403, 8405, 8407, 8409, 8411, 8413, 8415, 8417, 8419, 8421, 8423, 8425, 8427, 8429, 8431, 8433, 8435, 8437,
8439, 8441, 8443, 8445, 8447, 8449, 8451, 8453, 8455, 8457, 8459, 8461, 8463, 8465, 8467, 8469, 8471, 8473, 8475,
8477, 8479, 8481, 8483, 8485, 8487, 8489, 8491, 8493, 8495, 8497, 8499, 8501, 8503, 8505, 8507, 8509, 8511, 8513,
8515, 8517, 8519, 8521, 8523, 8525, 8527, 8529, 8531, 8533, 8535, 8537, 8539, 8541, 8543, 8545, 8547, 8549, 8551,
8553, 8555, 8557, 8559, 8561, 8563, 8565, 8567, 8569, 8571, 8573, 8575, 8577, 8579, 8581, 8583, 8585, 8587, 8589,
8591, 8593, 8595, 8597, 8599, 8601, 8603, 8605, 8607, 8609, 8611, 8613, 8615, 8617, 8619, 8621, 8623, 8625, 8627,
8629, 8631, 8633, 8635, 8637, 8639, 8641, 8643, 8645, 8647, 8649, 8651, 8653, 8655, 8657, 8659, 8661, 8663, 8665,
8667, 8669, 8671, 8673, 8675, 8677, 8679, 8681, 8683, 8685, 8687, 8689, 8691, 8693, 8695, 8697, 8699, 8701, 8703,
8705, 8707, 8709, 8711, 8713, 8715, 8717, 8719, 8721, 8723, 8725, 8727, 8729, 8731, 8733, 8735, 8737, 8739, 8741,
8743, 8745, 8747, 8749, 8751, 8753, 8755, 8757, 8759, 8761, 8763, 8765, 8767, 8769, 8771, 8773, 8775, 8777, 8779,
8781, 8783, 8785, 8787, 8789, 8791, 8793, 8795, 8797, 8799, 8801, 8803, 8805, 8807, 8809, 8811, 8813, 8815, 8817,
8819, 8821, 8823, 8825, 8827, 8829, 8831, 8833, 8835, 8837, 8839, 8841, 8843, 8845, 8847, 8849, 8851, 8853, 8855,
8857, 8859, 8861, 8863, 8865, 8867, 8869, 8871, 8873, 8875, 8877, 8879, 8881, 8883, 8885, 8887, 8889, 8891, 8893,
8895, 8897, 8899, 8901, 8903, 8905, 8907, 8909, 8911, 8913, 8915, 8917, 8919, 8921, 8923, 8925, 8927, 8929, 8931,
8933, 8935, 8937, 8939, 8941, 8943, 8945, 8947, 8949, 8951, 8953, 8955, 8957, 8959, 8961, 8963, 8965, 8967, 8969,
8971, 8973, 8975, 8977, 8979, 8981, 8983, 8985, 8987, 8989, 8991, 8993, 8995, 8997, 8999, 9001, 9003, 9005, 9007,
9009, 9011, 9013, 9015, 9017, 9019, 9021, 9023, 9025, 9027, 9029, 9031, 9033, 9035, 9037, 9039, 9041, 9043, 9045,
9047, 9049, 9051, 9053, 9055, 9057, 9059, 9061, 9063, 9065, 9067, 9069, 9071, 9073, 9075, 9077, 9079, 9081, 9083,
9085, 9087, 9089, 9091, 9093, 9095, 9097, 9099, 9101, 9103, 9105, 9107, 9109, 9111, 9113, 9115, 9117, 9119, 9121,
9123, 9125, 9127, 9129, 9131, 9133, 9135, 9137, 9139, 9141, 9143, 9145, 9147, 9149, 9151, 9153, 9155, 9157, 9159,
9161, 9163, 9165, 9167, 9169, 9171, 9173, 9175, 9177, 9179, 9181, 9183, 9185, 9187, 9189, 9191, 9193, 9195, 9197,
9199, 9201, 9203, 9205, 9207, 9209, 9211, 9213, 9215, 9217, 9219, 9221, 9223, 9225, 9227, 9229, 9231, 9233, 9235,
9237, 9239, 9241, 9243, 9245, 9247, 9249, 9251, 9253, 9255, 9257, 9259, 9261, 9263, 9265, 9267, 9269, 9271, 9273,
9275, 9277, 9279, 9281, 9283, 9285, 9287, 9289, 9291, 9293, 9295, 9297, 9299, 9301, 9303, 9305, 9307, 9309, 9311,
9313, 9315, 9317, 9319, 9321, 9323, 9325, 9327, 9329, 9331, 9333, 9335, 9337, 9339, 9341, 9343, 9345, 9347, 9349,
9351, 9353, 9355, 9357, 9359, 9361, 9363, 9365, 9367, 9369, 9371, 9373, 9375, 9377, 9379, 9381, 9383, 9385, 9387,
9389, 9391, 9393, 9395, 9397, 9399, 9401, 9403, 9405, 9407, 9409, 9411, 9413, 9415, 9417, 9419, 9421, 9423, 9425,
9427, 9429, 9431, 9433, 9435, 9437, 9439, 9441, 9443, 9445, 9447, 9449, 9451, 9453, 9455, 9457, 9459, 9461, 9463,
9465, 9467, 9469, 9471, 9473, 9475, 9477, 9479, 9481, 9483, 9485, 9487, 9489, 9491, 9493, 9495, 9497, 9499, 9501,
9503, 9505, 9507, 9509, 9511, 9513, 9515, 9517, 9519, 9521, 9523, 9525, 9527, 9529, 9531, 9533, 9535, 9537, 9539,
9541, 9543, 9545, 9547, 9549, 9551, 9553, 9555, 9557, 9559, 9561, 9563, 9565, 9567, 9569, 9571, 9573, 9575, 9577,
9579, 9581, 9583, 9585, 9587, 9589, 9591, 9593, 9595, 9597, 9599, 9601, 9603, 9605, 9607, 9609, 9611, 9613, 9615,
9617, 9619, 9621, 9623, 9625, 9627, 9629, 9631, 9633, 9635, 9637, 9639, 9641, 9643, 9645, 9647, 9649, 9651, 9653,
9655, 9657, 9659, 9661, 9663, 9665, 9667, 9669, 9671, 9673, 9675, 9677, 9679, 9681, 9683, 9685, 9687, 9689, 9691,
9693, 9695, 9697, 9699, 9701, 9703, 9705, 9707, 9709, 9711, 9713, 9715, 9717, 9719, 9721, 9723, 9725, 9727, 9729,
9731, 9733, 9735, 9737, 9739, 9741, 9743, 9745, 9747, 9749, 9751, 9753, 9755, 9757, 9759, 9761, 9763, 9765, 9767,
9769, 9771, 9773, 9775, 9777, 9779, 9781, 9783, 9785, 9787, 9789, 9791, 9793, 9795, 9797, 9799, 9801, 9803, 9805,
9807, 9809, 9811, 9813, 9815, 9817, 9819, 9821, 9823, 9825, 9827, 9829, 9831, 9833, 9835, 9837, 9839, 9841, 9843,
9845, 9847, 9849, 9851, 9853, 9855, 9857, 9859, 9861, 9863, 9865, 9867, 9869, 9871, 9873, 9875, 9877, 9879, 9881,
9883, 9885, 9887, 9889, 9891, 9893, 9895, 9897, 9899, 9901, 9903, 9905, 9907, 9909, 9911, 9913, 9915, 9917, 9919,
9921, 9923, 9925, 9927, 9929, 9931, 9933, 9935, 9937, 9939, 9941, 9943, 9945, 9947, 9949, 9951, 9953, 9955, 9957,
9959, 9961, 9963, 9965, 9967, 9969, 9971, 9973, 9975, 9977, 9979, 9981, 9983, 9985, 9987, 9989, 9991, 9993, 9995,
9997, 9999, 4982]
b1 = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58,
60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112,
114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158,
160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204,
206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250,
252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296,
298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342,
344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388,
390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434,
436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480,
482, 484, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526,
528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572,
574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618,
620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664,
666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 698, 700, 702, 704, 706, 708, 710,
712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756,
758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802,
804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 844, 846, 848,
850, 852, 854, 856, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894,
896, 898, 900, 902, 904, 906, 908, 910, 912, 914, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940,
942, 944, 946, 948, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986,
988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1022, 1024, 1026,
1028, 1030, 1032, 1034, 1036, 1038, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1060, 1062, 1064,
1066, 1068, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1090, 1092, 1094, 1096, 1098, 1100, 1102,
1104, 1106, 1108, 1110, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, 1140,
1142, 1144, 1146, 1148, 1150, 1152, 1154, 1156, 1158, 1160, 1162, 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178,
1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216,
1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254,
1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292,
1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330,
1332, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368,
1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406,
1408, 1410, 1412, 1414, 1416, 1418, 1420, 1422, 1424, 1426, 1428, 1430, 1432, 1434, 1436, 1438, 1440, 1442, 1444,
1446, 1448, 1450, 1452, 1454, 1456, 1458, 1460, 1462, 1464, 1466, 1468, 1470, 1472, 1474, 1476, 1478, 1480, 1482,
1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520,
1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1556, 1558,
1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1580, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596,
1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634,
1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672,
1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710,
1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1738, 1740, 1742, 1744, 1746, 1748,
1750, 1752, 1754, 1756, 1758, 1760, 1762, 1764, 1766, 1768, 1770, 1772, 1774, 1776, 1778, 1780, 1782, 1784, 1786,
1788, 1790, 1792, 1794, 1796, 1798, 1800, 1802, 1804, 1806, 1808, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824,
1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1854, 1856, 1858, 1860, 1862,
1864, 1866, 1868, 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900,
1902, 1904, 1906, 1908, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938,
1940, 1942, 1944, 1946, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976,
1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014,
2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052,
2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090,
2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2108, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128,
2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166,
2168, 2170, 2172, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2198, 2200, 2202, 2204,
2206, 2208, 2210, 2212, 2214, 2216, 2218, 2220, 2222, 2224, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242,
2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280,
2282, 2284, 2286, 2288, 2290, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318,
2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356,
2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394,
2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432,
2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470,
2472, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508,
2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546,
2548, 2550, 2552, 2554, 2556, 2558, 2560, 2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576, 2578, 2580, 2582, 2584,
2586, 2588, 2590, 2592, 2594, 2596, 2598, 2600, 2602, 2604, 2606, 2608, 2610, 2612, 2614, 2616, 2618, 2620, 2622,
2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2658, 2660,
2662, 2664, 2666, 2668, 2670, 2672, 2674, 2676, 2678, 2680, 2682, 2684, 2686, 2688, 2690, 2692, 2694, 2696, 2698,
2700, 2702, 2704, 2706, 2708, 2710, 2712, 2714, 2716, 2718, 2720, 2722, 2724, 2726, 2728, 2730, 2732, 2734, 2736,
2738, 2740, 2742, 2744, 2746, 2748, 2750, 2752, 2754, 2756, 2758, 2760, 2762, 2764, 2766, 2768, 2770, 2772, 2774,
2776, 2778, 2780, 2782, 2784, 2786, 2788, 2790, 2792, 2794, 2796, 2798, 2800, 2802, 2804, 2806, 2808, 2810, 2812,
2814, 2816, 2818, 2820, 2822, 2824, 2826, 2828, 2830, 2832, 2834, 2836, 2838, 2840, 2842, 2844, 2846, 2848, 2850,
2852, 2854, 2856, 2858, 2860, 2862, 2864, 2866, 2868, 2870, 2872, 2874, 2876, 2878, 2880, 2882, 2884, 2886, 2888,
2890, 2892, 2894, 2896, 2898, 2900, 2902, 2904, 2906, 2908, 2910, 2912, 2914, 2916, 2918, 2920, 2922, 2924, 2926,
2928, 2930, 2932, 2934, 2936, 2938, 2940, 2942, 2944, 2946, 2948, 2950, 2952, 2954, 2956, 2958, 2960, 2962, 2964,
2966, 2968, 2970, 2972, 2974, 2976, 2978, 2980, 2982, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002,
3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040,
3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3064, 3066, 3068, 3070, 3072, 3074, 3076, 3078,
3080, 3082, 3084, 3086, 3088, 3090, 3092, 3094, 3096, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116,
3118, 3120, 3122, 3124, 3126, 3128, 3130, 3132, 3134, 3136, 3138, 3140, 3142, 3144, 3146, 3148, 3150, 3152, 3154,
3156, 3158, 3160, 3162, 3164, 3166, 3168, 3170, 3172, 3174, 3176, 3178, 3180, 3182, 3184, 3186, 3188, 3190, 3192,
3194, 3196, 3198, 3200, 3202, 3204, 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, 3230,
3232, 3234, 3236, 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268,
3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, 3294, 3296, 3298, 3300, 3302, 3304, 3306,
3308, 3310, 3312, 3314, 3316, 3318, 3320, 3322, 3324, 3326, 3328, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344,
3346, 3348, 3350, 3352, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382,
3384, 3386, 3388, 3390, 3392, 3394, 3396, 3398, 3400, 3402, 3404, 3406, 3408, 3410, 3412, 3414, 3416, 3418, 3420,
3422, 3424, 3426, 3428, 3430, 3432, 3434, 3436, 3438, 3440, 3442, 3444, 3446, 3448, 3450, 3452, 3454, 3456, 3458,
3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3496,
3498, 3500, 3502, 3504, 3506, 3508, 3510, 3512, 3514, 3516, 3518, 3520, 3522, 3524, 3526, 3528, 3530, 3532, 3534,
3536, 3538, 3540, 3542, 3544, 3546, 3548, 3550, 3552, 3554, 3556, 3558, 3560, 3562, 3564, 3566, 3568, 3570, 3572,
3574, 3576, 3578, 3580, 3582, 3584, 3586, 3588, 3590, 3592, 3594, 3596, 3598, 3600, 3602, 3604, 3606, 3608, 3610,
3612, 3614, 3616, 3618, 3620, 3622, 3624, 3626, 3628, 3630, 3632, 3634, 3636, 3638, 3640, 3642, 3644, 3646, 3648,
3650, 3652, 3654, 3656, 3658, 3660, 3662, 3664, 3666, 3668, 3670, 3672, 3674, 3676, 3678, 3680, 3682, 3684, 3686,
3688, 3690, 3692, 3694, 3696, 3698, 3700, 3702, 3704, 3706, 3708, 3710, 3712, 3714, 3716, 3718, 3720, 3722, 3724,
3726, 3728, 3730, 3732, 3734, 3736, 3738, 3740, 3742, 3744, 3746, 3748, 3750, 3752, 3754, 3756, 3758, 3760, 3762,
3764, 3766, 3768, 3770, 3772, 3774, 3776, 3778, 3780, 3782, 3784, 3786, 3788, 3790, 3792, 3794, 3796, 3798, 3800,
3802, 3804, 3806, 3808, 3810, 3812, 3814, 3816, 3818, 3820, 3822, 3824, 3826, 3828, 3830, 3832, 3834, 3836, 3838,
3840, 3842, 3844, 3846, 3848, 3850, 3852, 3854, 3856, 3858, 3860, 3862, 3864, 3866, 3868, 3870, 3872, 3874, 3876,
3878, 3880, 3882, 3884, 3886, 3888, 3890, 3892, 3894, 3896, 3898, 3900, 3902, 3904, 3906, 3908, 3910, 3912, 3914,
3916, 3918, 3920, 3922, 3924, 3926, 3928, 3930, 3932, 3934, 3936, 3938, 3940, 3942, 3944, 3946, 3948, 3950, 3952,
3954, 3956, 3958, 3960, 3962, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3980, 3982, 3984, 3986, 3988, 3990,
3992, 3994, 3996, 3998, 4000, 4002, 4004, 4006, 4008, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028,
4030, 4032, 4034, 4036, 4038, 4040, 4042, 4044, 4046, 4048, 4050, 4052, 4054, 4056, 4058, 4060, 4062, 4064, 4066,
4068, 4070, 4072, 4074, 4076, 4078, 4080, 4082, 4084, 4086, 4088, 4090, 4092, 4094, 4096, 4098, 4100, 4102, 4104,
4106, 4108, 4110, 4112, 4114, 4116, 4118, 4120, 4122, 4124, 4126, 4128, 4130, 4132, 4134, 4136, 4138, 4140, 4142,
4144, 4146, 4148, 4150, 4152, 4154, 4156, 4158, 4160, 4162, 4164, 4166, 4168, 4170, 4172, 4174, 4176, 4178, 4180,
4182, 4184, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4202, 4204, 4206, 4208, 4210, 4212, 4214, 4216, 4218,
4220, 4222, 4224, 4226, 4228, 4230, 4232, 4234, 4236, 4238, 4240, 4242, 4244, 4246, 4248, 4250, 4252, 4254, 4256,
4258, 4260, 4262, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294,
4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332,
4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370,
4372, 4374, 4376, 4378, 4380, 4382, 4384, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408,
4410, 4412, 4414, 4416, 4418, 4420, 4422, 4424, 4426, 4428, 4430, 4432, 4434, 4436, 4438, 4440, 4442, 4444, 4446,
4448, 4450, 4452, 4454, 4456, 4458, 4460, 4462, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4480, 4482, 4484,
4486, 4488, 4490, 4492, 4494, 4496, 4498, 4500, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4518, 4520, 4522,
4524, 4526, 4528, 4530, 4532, 4534, 4536, 4538, 4540, 4542, 4544, 4546, 4548, 4550, 4552, 4554, 4556, 4558, 4560,
4562, 4564, 4566, 4568, 4570, 4572, 4574, 4576, 4578, 4580, 4582, 4584, 4586, 4588, 4590, 4592, 4594, 4596, 4598,
4600, 4602, 4604, 4606, 4608, 4610, 4612, 4614, 4616, 4618, 4620, 4622, 4624, 4626, 4628, 4630, 4632, 4634, 4636,
4638, 4640, 4642, 4644, 4646, 4648, 4650, 4652, 4654, 4656, 4658, 4660, 4662, 4664, 4666, 4668, 4670, 4672, 4674,
4676, 4678, 4680, 4682, 4684, 4686, 4688, 4690, 4692, 4694, 4696, 4698, 4700, 4702, 4704, 4706, 4708, 4710, 4712,
4714, 4716, 4718, 4720, 4722, 4724, 4726, 4728, 4730, 4732, 4734, 4736, 4738, 4740, 4742, 4744, 4746, 4748, 4750,
4752, 4754, 4756, 4758, 4760, 4762, 4764, 4766, 4768, 4770, 4772, 4774, 4776, 4778, 4780, 4782, 4784, 4786, 4788,
4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 4808, 4810, 4812, 4814, 4816, 4818, 4820, 4822, 4824, 4826,
4828, 4830, 4832, 4834, 4836, 4838, 4840, 4842, 4844, 4846, 4848, 4850, 4852, 4854, 4856, 4858, 4860, 4862, 4864,
4866, 4868, 4870, 4872, 4874, 4876, 4878, 4880, 4882, 4884, 4886, 4888, 4890, 4892, 4894, 4896, 4898, 4900, 4902,
4904, 4906, 4908, 4910, 4912, 4914, 4916, 4918, 4920, 4922, 4924, 4926, 4928, 4930, 4932, 4934, 4936, 4938, 4940,
4942, 4944, 4946, 4948, 4950, 4952, 4954, 4956, 4958, 4960, 4962, 4964, 4966, 4968, 4970, 4972, 4974, 4976, 4978,
4980, 4982, 4984, 4986, 4988, 4990, 4992, 4994, 4996, 4998, 5000, 5002, 5004, 5006, 5008, 5010, 5012, 5014, 5016,
5018, 5020, 5022, 5024, 5026, 5028, 5030, 5032, 5034, 5036, 5038, 5040, 5042, 5044, 5046, 5048, 5050, 5052, 5054,
5056, 5058, 5060, 5062, 5064, 5066, 5068, 5070, 5072, 5074, 5076, 5078, 5080, 5082, 5084, 5086, 5088, 5090, 5092,
5094, 5096, 5098, 5100, 5102, 5104, 5106, 5108, 5110, 5112, 5114, 5116, 5118, 5120, 5122, 5124, 5126, 5128, 5130,
5132, 5134, 5136, 5138, 5140, 5142, 5144, 5146, 5148, 5150, 5152, 5154, 5156, 5158, 5160, 5162, 5164, 5166, 5168,
5170, 5172, 5174, 5176, 5178, 5180, 5182, 5184, 5186, 5188, 5190, 5192, 5194, 5196, 5198, 5200, 5202, 5204, 5206,
5208, 5210, 5212, 5214, 5216, 5218, 5220, 5222, 5224, 5226, 5228, 5230, 5232, 5234, 5236, 5238, 5240, 5242, 5244,
5246, 5248, 5250, 5252, 5254, 5256, 5258, 5260, 5262, 5264, 5266, 5268, 5270, 5272, 5274, 5276, 5278, 5280, 5282,
5284, 5286, 5288, 5290, 5292, 5294, 5296, 5298, 5300, 5302, 5304, 5306, 5308, 5310, 5312, 5314, 5316, 5318, 5320,
5322, 5324, 5326, 5328, 5330, 5332, 5334, 5336, 5338, 5340, 5342, 5344, 5346, 5348, 5350, 5352, 5354, 5356, 5358,
5360, 5362, 5364, 5366, 5368, 5370, 5372, 5374, 5376, 5378, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396,
5398, 5400, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5418, 5420, 5422, 5424, 5426, 5428, 5430, 5432, 5434,
5436, 5438, 5440, 5442, 5444, 5446, 5448, 5450, 5452, 5454, 5456, 5458, 5460, 5462, 5464, 5466, 5468, 5470, 5472,
5474, 5476, 5478, 5480, 5482, 5484, 5486, 5488, 5490, 5492, 5494, 5496, 5498, 5500, 5502, 5504, 5506, 5508, 5510,
5512, 5514, 5516, 5518, 5520, 5522, 5524, 5526, 5528, 5530, 5532, 5534, 5536, 5538, 5540, 5542, 5544, 5546, 5548,
5550, 5552, 5554, 5556, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586,
5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 5606, 5608, 5610, 5612, 5614, 5616, 5618, 5620, 5622, 5624,
5626, 5628, 5630, 5632, 5634, 5636, 5638, 5640, 5642, 5644, 5646, 5648, 5650, 5652, 5654, 5656, 5658, 5660, 5662,
5664, 5666, 5668, 5670, 5672, 5674, 5676, 5678, 5680, 5682, 5684, 5686, 5688, 5690, 5692, 5694, 5696, 5698, 5700,
5702, 5704, 5706, 5708, 5710, 5712, 5714, 5716, 5718, 5720, 5722, 5724, 5726, 5728, 5730, 5732, 5734, 5736, 5738,
5740, 5742, 5744, 5746, 5748, 5750, 5752, 5754, 5756, 5758, 5760, 5762, 5764, 5766, 5768, 5770, 5772, 5774, 5776,
5778, 5780, 5782, 5784, 5786, 5788, 5790, 5792, 5794, 5796, 5798, 5800, 5802, 5804, 5806, 5808, 5810, 5812, 5814,
5816, 5818, 5820, 5822, 5824, 5826, 5828, 5830, 5832, 5834, 5836, 5838, 5840, 5842, 5844, 5846, 5848, 5850, 5852,
5854, 5856, 5858, 5860, 5862, 5864, 5866, 5868, 5870, 5872, 5874, 5876, 5878, 5880, 5882, 5884, 5886, 5888, 5890,
5892, 5894, 5896, 5898, 5900, 5902, 5904, 5906, 5908, 5910, 5912, 5914, 5916, 5918, 5920, 5922, 5924, 5926, 5928,
5930, 5932, 5934, 5936, 5938, 5940, 5942, 5944, 5946, 5948, 5950, 5952, 5954, 5956, 5958, 5960, 5962, 5964, 5966,
5968, 5970, 5972, 5974, 5976, 5978, 5980, 5982, 5984, 5986, 5988, 5990, 5992, 5994, 5996, 5998, 6000, 6002, 6004,
6006, 6008, 6010, 6012, 6014, 6016, 6018, 6020, 6022, 6024, 6026, 6028, 6030, 6032, 6034, 6036, 6038, 6040, 6042,
6044, 6046, 6048, 6050, 6052, 6054, 6056, 6058, 6060, 6062, 6064, 6066, 6068, 6070, 6072, 6074, 6076, 6078, 6080,
6082, 6084, 6086, 6088, 6090, 6092, 6094, 6096, 6098, 6100, 6102, 6104, 6106, 6108, 6110, 6112, 6114, 6116, 6118,
6120, 6122, 6124, 6126, 6128, 6130, 6132, 6134, 6136, 6138, 6140, 6142, 6144, 6146, 6148, 6150, 6152, 6154, 6156,
6158, 6160, 6162, 6164, 6166, 6168, 6170, 6172, 6174, 6176, 6178, 6180, 6182, 6184, 6186, 6188, 6190, 6192, 6194,
6196, 6198, 6200, 6202, 6204, 6206, 6208, 6210, 6212, 6214, 6216, 6218, 6220, 6222, 6224, 6226, 6228, 6230, 6232,
6234, 6236, 6238, 6240, 6242, 6244, 6246, 6248, 6250, 6252, 6254, 6256, 6258, 6260, 6262, 6264, 6266, 6268, 6270,
6272, 6274, 6276, 6278, 6280, 6282, 6284, 6286, 6288, 6290, 6292, 6294, 6296, 6298, 6300, 6302, 6304, 6306, 6308,
6310, 6312, 6314, 6316, 6318, 6320, 6322, 6324, 6326, 6328, 6330, 6332, 6334, 6336, 6338, 6340, 6342, 6344, 6346,
6348, 6350, 6352, 6354, 6356, 6358, 6360, 6362, 6364, 6366, 6368, 6370, 6372, 6374, 6376, 6378, 6380, 6382, 6384,
6386, 6388, 6390, 6392, 6394, 6396, 6398, 6400, 6402, 6404, 6406, 6408, 6410, 6412, 6414, 6416, 6418, 6420, 6422,
6424, 6426, 6428, 6430, 6432, 6434, 6436, 6438, 6440, 6442, 6444, 6446, 6448, 6450, 6452, 6454, 6456, 6458, 6460,
6462, 6464, 6466, 6468, 6470, 6472, 6474, 6476, 6478, 6480, 6482, 6484, 6486, 6488, 6490, 6492, 6494, 6496, 6498,
6500, 6502, 6504, 6506, 6508, 6510, 6512, 6514, 6516, 6518, 6520, 6522, 6524, 6526, 6528, 6530, 6532, 6534, 6536,
6538, 6540, 6542, 6544, 6546, 6548, 6550, 6552, 6554, 6556, 6558, 6560, 6562, 6564, 6566, 6568, 6570, 6572, 6574,
6576, 6578, 6580, 6582, 6584, 6586, 6588, 6590, 6592, 6594, 6596, 6598, 6600, 6602, 6604, 6606, 6608, 6610, 6612,
6614, 6616, 6618, 6620, 6622, 6624, 6626, 6628, 6630, 6632, 6634, 6636, 6638, 6640, 6642, 6644, 6646, 6648, 6650,
6652, 6654, 6656, 6658, 6660, 6662, 6664, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688,
6690, 6692, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6718, 6720, 6722, 6724, 6726,
6728, 6730, 6732, 6734, 6736, 6738, 6740, 6742, 6744, 6746, 6748, 6750, 6752, 6754, 6756, 6758, 6760, 6762, 6764,
6766, 6768, 6770, 6772, 6774, 6776, 6778, 6780, 6782, 6784, 6786, 6788, 6790, 6792, 6794, 6796, 6798, 6800, 6802,
6804, 6806, 6808, 6810, 6812, 6814, 6816, 6818, 6820, 6822, 6824, 6826, 6828, 6830, 6832, 6834, 6836, 6838, 6840,
6842, 6844, 6846, 6848, 6850, 6852, 6854, 6856, 6858, 6860, 6862, 6864, 6866, 6868, 6870, 6872, 6874, 6876, 6878,
6880, 6882, 6884, 6886, 6888, 6890, 6892, 6894, 6896, 6898, 6900, 6902, 6904, 6906, 6908, 6910, 6912, 6914, 6916,
6918, 6920, 6922, 6924, 6926, 6928, 6930, 6932, 6934, 6936, 6938, 6940, 6942, 6944, 6946, 6948, 6950, 6952, 6954,
6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992,
6994, 6996, 6998, 7000, 7002, 7004, 7006, 7008, 7010, 7012, 7014, 7016, 7018, 7020, 7022, 7024, 7026, 7028, 7030,
7032, 7034, 7036, 7038, 7040, 7042, 7044, 7046, 7048, 7050, 7052, 7054, 7056, 7058, 7060, 7062, 7064, 7066, 7068,
7070, 7072, 7074, 7076, 7078, 7080, 7082, 7084, 7086, 7088, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106,
7108, 7110, 7112, 7114, 7116, 7118, 7120, 7122, 7124, 7126, 7128, 7130, 7132, 7134, 7136, 7138, 7140, 7142, 7144,
7146, 7148, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7178, 7180, 7182,
7184, 7186, 7188, 7190, 7192, 7194, 7196, 7198, 7200, 7202, 7204, 7206, 7208, 7210, 7212, 7214, 7216, 7218, 7220,
7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258,
7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296,
7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334,
7336, 7338, 7340, 7342, 7344, 7346, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372,
7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410,
7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448,
7450, 7452, 7454, 7456, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7474, 7476, 7478, 7480, 7482, 7484, 7486,
7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7516, 7518, 7520, 7522, 7524,
7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7542, 7544, 7546, 7548, 7550, 7552, 7554, 7556, 7558, 7560, 7562,
7564, 7566, 7568, 7570, 7572, 7574, 7576, 7578, 7580, 7582, 7584, 7586, 7588, 7590, 7592, 7594, 7596, 7598, 7600,
7602, 7604, 7606, 7608, 7610, 7612, 7614, 7616, 7618, 7620, 7622, 7624, 7626, 7628, 7630, 7632, 7634, 7636, 7638,
7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676,
7678, 7680, 7682, 7684, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, 7704, 7706, 7708, 7710, 7712, 7714,
7716, 7718, 7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, 7736, 7738, 7740, 7742, 7744, 7746, 7748, 7750, 7752,
7754, 7756, 7758, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790,
7792, 7794, 7796, 7798, 7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, 7816, 7818, 7820, 7822, 7824, 7826, 7828,
7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866,
7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904,
7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, 7936, 7938, 7940, 7942,
7944, 7946, 7948, 7950, 7952, 7954, 7956, 7958, 7960, 7962, 7964, 7966, 7968, 7970, 7972, 7974, 7976, 7978, 7980,
7982, 7984, 7986, 7988, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 8006, 8008, 8010, 8012, 8014, 8016, 8018,
8020, 8022, 8024, 8026, 8028, 8030, 8032, 8034, 8036, 8038, 8040, 8042, 8044, 8046, 8048, 8050, 8052, 8054, 8056,
8058, 8060, 8062, 8064, 8066, 8068, 8070, 8072, 8074, 8076, 8078, 8080, 8082, 8084, 8086, 8088, 8090, 8092, 8094,
8096, 8098, 8100, 8102, 8104, 8106, 8108, 8110, 8112, 8114, 8116, 8118, 8120, 8122, 8124, 8126, 8128, 8130, 8132,
8134, 8136, 8138, 8140, 8142, 8144, 8146, 8148, 8150, 8152, 8154, 8156, 8158, 8160, 8162, 8164, 8166, 8168, 8170,
8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 8194, 8196, 8198, 8200, 8202, 8204, 8206, 8208,
8210, 8212, 8214, 8216, 8218, 8220, 8222, 8224, 8226, 8228, 8230, 8232, 8234, 8236, 8238, 8240, 8242, 8244, 8246,
8248, 8250, 8252, 8254, 8256, 8258, 8260, 8262, 8264, 8266, 8268, 8270, 8272, 8274, 8276, 8278, 8280, 8282, 8284,
8286, 8288, 8290, 8292, 8294, 8296, 8298, 8300, 8302, 8304, 8306, 8308, 8310, 8312, 8314, 8316, 8318, 8320, 8322,
8324, 8326, 8328, 8330, 8332, 8334, 8336, 8338, 8340, 8342, 8344, 8346, 8348, 8350, 8352, 8354, 8356, 8358, 8360,
8362, 8364, 8366, 8368, 8370, 8372, 8374, 8376, 8378, 8380, 8382, 8384, 8386, 8388, 8390, 8392, 8394, 8396, 8398,
8400, 8402, 8404, 8406, 8408, 8410, 8412, 8414, 8416, 8418, 8420, 8422, 8424, 8426, 8428, 8430, 8432, 8434, 8436,
8438, 8440, 8442, 8444, 8446, 8448, 8450, 8452, 8454, 8456, 8458, 8460, 8462, 8464, 8466, 8468, 8470, 8472, 8474,
8476, 8478, 8480, 8482, 8484, 8486, 8488, 8490, 8492, 8494, 8496, 8498, 8500, 8502, 8504, 8506, 8508, 8510, 8512,
8514, 8516, 8518, 8520, 8522, 8524, 8526, 8528, 8530, 8532, 8534, 8536, 8538, 8540, 8542, 8544, 8546, 8548, 8550,
8552, 8554, 8556, 8558, 8560, 8562, 8564, 8566, 8568, 8570, 8572, 8574, 8576, 8578, 8580, 8582, 8584, 8586, 8588,
8590, 8592, 8594, 8596, 8598, 8600, 8602, 8604, 8606, 8608, 8610, 8612, 8614, 8616, 8618, 8620, 8622, 8624, 8626,
8628, 8630, 8632, 8634, 8636, 8638, 8640, 8642, 8644, 8646, 8648, 8650, 8652, 8654, 8656, 8658, 8660, 8662, 8664,
8666, 8668, 8670, 8672, 8674, 8676, 8678, 8680, 8682, 8684, 8686, 8688, 8690, 8692, 8694, 8696, 8698, 8700, 8702,
8704, 8706, 8708, 8710, 8712, 8714, 8716, 8718, 8720, 8722, 8724, 8726, 8728, 8730, 8732, 8734, 8736, 8738, 8740,
8742, 8744, 8746, 8748, 8750, 8752, 8754, 8756, 8758, 8760, 8762, 8764, 8766, 8768, 8770, 8772, 8774, 8776, 8778,
8780, 8782, 8784, 8786, 8788, 8790, 8792, 8794, 8796, 8798, 8800, 8802, 8804, 8806, 8808, 8810, 8812, 8814, 8816,
8818, 8820, 8822, 8824, 8826, 8828, 8830, 8832, 8834, 8836, 8838, 8840, 8842, 8844, 8846, 8848, 8850, 8852, 8854,
8856, 8858, 8860, 8862, 8864, 8866, 8868, 8870, 8872, 8874, 8876, 8878, 8880, 8882, 8884, 8886, 8888, 8890, 8892,
8894, 8896, 8898, 8900, 8902, 8904, 8906, 8908, 8910, 8912, 8914, 8916, 8918, 8920, 8922, 8924, 8926, 8928, 8930,
8932, 8934, 8936, 8938, 8940, 8942, 8944, 8946, 8948, 8950, 8952, 8954, 8956, 8958, 8960, 8962, 8964, 8966, 8968,
8970, 8972, 8974, 8976, 8978, 8980, 8982, 8984, 8986, 8988, 8990, 8992, 8994, 8996, 8998, 9000, 9002, 9004, 9006,
9008, 9010, 9012, 9014, 9016, 9018, 9020, 9022, 9024, 9026, 9028, 9030, 9032, 9034, 9036, 9038, 9040, 9042, 9044,
9046, 9048, 9050, 9052, 9054, 9056, 9058, 9060, 9062, 9064, 9066, 9068, 9070, 9072, 9074, 9076, 9078, 9080, 9082,
9084, 9086, 9088, 9090, 9092, 9094, 9096, 9098, 9100, 9102, 9104, 9106, 9108, 9110, 9112, 9114, 9116, 9118, 9120,
9122, 9124, 9126, 9128, 9130, 9132, 9134, 9136, 9138, 9140, 9142, 9144, 9146, 9148, 9150, 9152, 9154, 9156, 9158,
9160, 9162, 9164, 9166, 9168, 9170, 9172, 9174, 9176, 9178, 9180, 9182, 9184, 9186, 9188, 9190, 9192, 9194, 9196,
9198, 9200, 9202, 9204, 9206, 9208, 9210, 9212, 9214, 9216, 9218, 9220, 9222, 9224, 9226, 9228, 9230, 9232, 9234,
9236, 9238, 9240, 9242, 9244, 9246, 9248, 9250, 9252, 9254, 9256, 9258, 9260, 9262, 9264, 9266, 9268, 9270, 9272,
9274, 9276, 9278, 9280, 9282, 9284, 9286, 9288, 9290, 9292, 9294, 9296, 9298, 9300, 9302, 9304, 9306, 9308, 9310,
9312, 9314, 9316, 9318, 9320, 9322, 9324, 9326, 9328, 9330, 9332, 9334, 9336, 9338, 9340, 9342, 9344, 9346, 9348,
9350, 9352, 9354, 9356, 9358, 9360, 9362, 9364, 9366, 9368, 9370, 9372, 9374, 9376, 9378, 9380, 9382, 9384, 9386,
9388, 9390, 9392, 9394, 9396, 9398, 9400, 9402, 9404, 9406, 9408, 9410, 9412, 9414, 9416, 9418, 9420, 9422, 9424,
9426, 9428, 9430, 9432, 9434, 9436, 9438, 9440, 9442, 9444, 9446, 9448, 9450, 9452, 9454, 9456, 9458, 9460, 9462,
9464, 9466, 9468, 9470, 9472, 9474, 9476, 9478, 9480, 9482, 9484, 9486, 9488, 9490, 9492, 9494, 9496, 9498, 9500,
9502, 9504, 9506, 9508, 9510, 9512, 9514, 9516, 9518, 9520, 9522, 9524, 9526, 9528, 9530, 9532, 9534, 9536, 9538,
9540, 9542, 9544, 9546, 9548, 9550, 9552, 9554, 9556, 9558, 9560, 9562, 9564, 9566, 9568, 9570, 9572, 9574, 9576,
9578, 9580, 9582, 9584, 9586, 9588, 9590, 9592, 9594, 9596, 9598, 9600, 9602, 9604, 9606, 9608, 9610, 9612, 9614,
9616, 9618, 9620, 9622, 9624, 9626, 9628, 9630, 9632, 9634, 9636, 9638, 9640, 9642, 9644, 9646, 9648, 9650, 9652,
9654, 9656, 9658, 9660, 9662, 9664, 9666, 9668, 9670, 9672, 9674, 9676, 9678, 9680, 9682, 9684, 9686, 9688, 9690,
9692, 9694, 9696, 9698, 9700, 9702, 9704, 9706, 9708, 9710, 9712, 9714, 9716, 9718, 9720, 9722, 9724, 9726, 9728,
9730, 9732, 9734, 9736, 9738, 9740, 9742, 9744, 9746, 9748, 9750, 9752, 9754, 9756, 9758, 9760, 9762, 9764, 9766,
9768, 9770, 9772, 9774, 9776, 9778, 9780, 9782, 9784, 9786, 9788, 9790, 9792, 9794, 9796, 9798, 9800, 9802, 9804,
9806, 9808, 9810, 9812, 9814, 9816, 9818, 9820, 9822, 9824, 9826, 9828, 9830, 9832, 9834, 9836, 9838, 9840, 9842,
9844, 9846, 9848, 9850, 9852, 9854, 9856, 9858, 9860, 9862, 9864, 9866, 9868, 9870, 9872, 9874, 9876, 9878, 9880,
9882, 9884, 9886, 9888, 9890, 9892, 9894, 9896, 9898, 9900, 9902, 9904, 9906, 9908, 9910, 9912, 9914, 9916, 9918,
9920, 9922, 9924, 9926, 9928, 9930, 9932, 9934, 9936, 9938, 9940, 9942, 9944, 9946, 9948, 9950, 9952, 9954, 9956,
9958, 9960, 9962, 9964, 9966, 9968, 9970, 9972, 9974, 9976, 9978, 9980, 9982, 9984, 9986, 9988, 9990, 9992, 9994,
9996, 9998, 10000, 10002]
res = Solution().fairCandySwap(a1, b1)
print(res)
end = time.time()
print('Running time: %s Seconds' % (end - start))
| true | true |
f71f6977583be15f02e5a3484137a80e4aecac84 | 926 | py | Python | supervised_learning/0x03-optimization/12-learning_rate_decay.py | cbarros7/holbertonschool-machine_learning | 1edb4c253441f6319b86c9c590d1e7dd3fc32bf4 | [
"MIT"
] | 1 | 2022-03-09T19:12:22.000Z | 2022-03-09T19:12:22.000Z | supervised_learning/0x03-optimization/12-learning_rate_decay.py | cbarros7/holbertonschool-machine_learning | 1edb4c253441f6319b86c9c590d1e7dd3fc32bf4 | [
"MIT"
] | null | null | null | supervised_learning/0x03-optimization/12-learning_rate_decay.py | cbarros7/holbertonschool-machine_learning | 1edb4c253441f6319b86c9c590d1e7dd3fc32bf4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Learning Rate Decay Upgraded"""
import tensorflow as tf
def learning_rate_decay(alpha, decay_rate, global_step, decay_step):
"""learning_rate_decay: creates a learning rate decay operation in
tensorflow using inverse time decay:
Args:
alpha: is the original learning rate
decay_rate: is the weight used to determine the rate at
which alpha will decay
global_step: is the number of passes of gradient descent
that have elapsed
decay_step: is the number of passes of gradient descent
that should occur before alpha is decayed further
Returns: the learning rate decay operation
"""
return tf.train.inverse_time_decay(
learning_rate=alpha, global_step=global_step, decay_steps=decay_step,
decay_rate=decay_rate, staircase=True, name=None
)
| 37.04 | 77 | 0.671706 |
import tensorflow as tf
def learning_rate_decay(alpha, decay_rate, global_step, decay_step):
return tf.train.inverse_time_decay(
learning_rate=alpha, global_step=global_step, decay_steps=decay_step,
decay_rate=decay_rate, staircase=True, name=None
)
| true | true |
f71f6a82759cb768681d3430b292afb226aeca63 | 11,152 | py | Python | analyses/weat.py | raphaottoni/youtube-hatespeech | 4467d065c2bddd86feec673ad74670c4c975f017 | [
"MIT"
] | null | null | null | analyses/weat.py | raphaottoni/youtube-hatespeech | 4467d065c2bddd86feec673ad74670c4c975f017 | [
"MIT"
] | 1 | 2018-08-09T14:13:16.000Z | 2018-08-09T15:34:00.000Z | analyses/weat.py | raphaottoni/youtube-hatespeech | 4467d065c2bddd86feec673ad74670c4c975f017 | [
"MIT"
] | null | null | null | from gensim import corpora, models, similarities
import json
import numpy
import random
import csv
import itertools
import gensim, copy
from collections import defaultdict
import glob, os
target_words = {}
attribute_words= {}
weats_name = []
# 1 WEAT Immigrants
target_words["1_a"] = ["immigrant", "migrant"]
target_words["1_b"] = ["citizen", "native"]
attribute_words["1_a"] = ["bad", "burden", "pirate", "plague", "taker", "thief"]
attribute_words["1_b"] = ["good", "honest", "maker", "rightful"]
weats_name.append("Immigrants")
# 2 WEAT Muslims
target_words["2_a"] = ["islamism", "muhammed", "muslim", "quran"]
target_words["2_b"] = ["bible", "christian", "christianity", "jesus"]
attribute_words["2_a"] = ["assassin", "attack", "bomb", "death", "murder", "radical", "terrorist"]
attribute_words["2_b"] = ["compassionate", "gentle", "humane", "kind", "tolerant"]
weats_name.append("Muslims")
# 3 WEAT LGBT people
target_words["3_a"] = ["bisexual", "gay", "homosexual", "lesbian"]
target_words["3_b"] = ["het", "hetero", "heterosexual", "straight"]
attribute_words["3_a"] = ["immoral", "outrageous", "promiscuous", "revolting", "sinner"]
attribute_words["3_b"] = ["moral", "natural", "normal"]
weats_name.append("LGBT People")
# 4 WEAT math arts male_female
target_words["4_a"] = ["math","algebra","geometry","calculus","equations","computation","numbers" ]
target_words["4_b"] = ["poetry","art","dance","literature","novel","symphony","drama","sculpture" ]
attribute_words["4_a"] = ["male","man","boy","brother","he","him","his", "son"]
attribute_words["4_b"] = ["female","woman","girl","sister","she","her","hers", "daughter"]
weats_name.append("Math Arts")
# 5 WEAT Judaism and Christianity
# Religious Judaism Christianity
target_words["5_a"] = ["church","bible","christian","christianity"]
target_words["5_b"] = ["synagogue","torah","jew","judaism"]
attribute_words["5_a"] = ["terrific","delight","celebrate","glad","beautiful","appealing", "pleasure", "enjoy" ]
attribute_words["5_b"] = ["hate","angry","detest","despise","dirty","humiliate","pain", "horrible"]
weats_name.append("Jewish")
# 6 WEAT African American 1
target_words["6_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["6_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["6_a"] = [ "joy", "love", "peace", "wonderful", "pleasure", "friend", "laughter", "happy"]
attribute_words["6_b"] = ["agony", "terrible", "horrible", "nasty", "evil", "war", "awful","failure"]
weats_name.append("African - 1")
# 7 WEAT African American 2
target_words["7_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["7_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["7_a"] = ["caress", "freedom", "health", "love", "peace",
"cheer", "friend", "heaven", "loyal", "pleasure",
"diamond", "gentle", "honest", "lucky", "rainbow",
"diploma", "gift", "honor", "miracle", "sunrise",
"family", "happy","laughter","paradise", "vacation"]
attribute_words["7_b"] = ["abuse", "crash", "filth", "murder", "sickness",
"accident", "death", "grief", "poison", "stink",
"assault", "disaster", "hatred","pollute", "tragedy",
"bomb", "divorce", "jail", "poverty", "ugly", "cancer",
"evil", "kill", "rotten","vomit"]
weats_name.append("African - 2")
def statistic_test(X,Y,A,B,M):
result = 0.0
sum_X = 0.0
sum_Y = 0.0
for word_X in X:
sum_X += sub_statistic_test(word_X, A,B,M)
for word_Y in Y:
sum_Y += sub_statistic_test(word_Y, A,B,M)
return (sum_X - sum_Y)
def sub_statistic_test(w,A,B,M):
result = 0.0
sum_cos_A = 0.0
sum_cos_B = 0.0
for word_A in A:
sum_cos_A += numpy.dot(M[w],M[word_A])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_A]))
for word_B in B:
sum_cos_B += numpy.dot(M[w],M[word_B])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_B]))
return (sum_cos_A/len(A) - sum_cos_B/len(B))
def effect_size(x_words,y_words,a_attributes,b_attributes,M):
# Effect size
test_x = 0.0
test_y = 0.0
samples = []
for word_x in target_words[x_words]:
test_x += sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M))
for word_y in target_words[y_words]:
test_y += sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M))
mean_x = test_x/len(target_words[x_words])
mean_y = test_y/len(target_words[y_words])
std_dev = numpy.std(samples)
effect_size = (mean_x - mean_y)/std_dev
return effect_size
# P-Value
def p_value(X,Y,A,B,model):
null_hipotese_evidance = 0.0
number_permitations = 0.0
# Finds the biggest possible set of the same size for the two classes
X_size = len(target_words[X])
Y_size = len(target_words[Y])
size = max(X_size, Y_size)
union = set(target_words[X] + target_words[Y])
random_test_statistic_values = []
test_statistic_value = statistic_test(target_words[X],target_words[Y],attribute_words[A],attribute_words[B],model)
if (Y_size + X_size) < 14:
# there will be less than 5000 combinations
permutations = itertools.combinations(union,size)
for i,permutation in enumerate(permutations):
x_i = permutation
y_i = union - set(permutation)
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
#print("null hipotese_evidance: " + str(null_hipotese_evidance))
#print("num_permutations: " + str(number_permitations))
#print("P-Value():")
#print(null_hipotese_evidance/number_permitations)
p_value_result = null_hipotese_evidance/number_permitations
#print("enviando " + str(p_value_result))
return(p_value_result)
else:
# There will be more than 5000, thus we should randomize
print("Generating 5k random")
classes = target_words[X] + target_words[Y]
for i in range(5000):
random.shuffle(classes)
x_i = classes[:size]
y_i = classes[size+1:]
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
# save the valus to be used for each channel
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
#if number_permitations % 100 == 0:
# print(number_permitations)
#print("null hipotese_evidance: " + str(null_hipotese_evidance))
#print("num_permutations: " + str(number_permitations))
#print("P-Value(english):")
#print(null_hipotese_evidance/number_permitations)
p_value_result = null_hipotese_evidance/number_permitations
return(p_value_result)
def main():
# Which models to load
political_biases_model = ["left", "leftcenter", "center", "right-center", "right"]
model_types = [ "captions", "comments"]
# list of WEATs to execute
weats = [1,2,3]
with open("../data/weat/weat_results.csv", "w") as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow(["channel","WEAT","political_bias", "source", "effect_size", "p_value"])
#for political_bias in political_biases_model:
# for model_type in model_types:
# for file in os.listdir("../models/biases/" + model_type + "/" + political_bias):
# if file.endswith(".model"):
# print("Loading " + political_bias + " word2vec " + model_type + " model " + "(" + file + ")")
# model = gensim.models.Word2Vec.load("../models/biases/" + model_type + "/" + political_bias+ "/" + file)
# #model = gensim.models.Word2Vec.load("../models/wiki-word2vec/wiki-en.word2vec.model")
# print("Executing WEATs on current model" )
# for weat_number in weats:
# X = str(weat_number) + "_a"
# Y = str(weat_number) + "_b"
# A = str(weat_number) + "_a"
# B = str(weat_number) + "_b"
# ## Effect size of the base model
# effect_size_result = effect_size(X,Y,A,B,model)
# print("Effect-Size("+str(weat_number)+ "):" + str(effect_size_result))
# p_value_result = p_value(X,Y,A,B,model)
# print("P-value("+str(weat_number)+ "):" + str(p_value_result))
# writer.writerow([file[:-6],weats_name[weat_number -1],political_bias , model_type, effect_size_result, p_value_result])
# Add the baseline weat results the wikipedia model
print("Loading the wiki base model")
model = gensim.models.Word2Vec.load("../models/wiki-word2vec/wiki-en.word2vec.model")
print("Executing WEATs on current model" )
for weat_number in weats:
X = str(weat_number) + "_a"
Y = str(weat_number) + "_b"
A = str(weat_number) + "_a"
B = str(weat_number) + "_b"
## Effect size of the base model
effect_size_result = effect_size(X,Y,A,B,model)
print("Effect-Size("+str(weat_number)+ "):" + str(effect_size_result))
p_value_result = p_value(X,Y,A,B,model)
print("P-value("+str(weat_number)+ "):" + str(p_value_result))
writer.writerow(["wikipedia",weats_name[weat_number -1], "wiki", "wiki", effect_size_result, p_value_result])
if __name__ == "__main__":
main()
| 43.733333 | 148 | 0.598816 | from gensim import corpora, models, similarities
import json
import numpy
import random
import csv
import itertools
import gensim, copy
from collections import defaultdict
import glob, os
target_words = {}
attribute_words= {}
weats_name = []
target_words["1_a"] = ["immigrant", "migrant"]
target_words["1_b"] = ["citizen", "native"]
attribute_words["1_a"] = ["bad", "burden", "pirate", "plague", "taker", "thief"]
attribute_words["1_b"] = ["good", "honest", "maker", "rightful"]
weats_name.append("Immigrants")
target_words["2_a"] = ["islamism", "muhammed", "muslim", "quran"]
target_words["2_b"] = ["bible", "christian", "christianity", "jesus"]
attribute_words["2_a"] = ["assassin", "attack", "bomb", "death", "murder", "radical", "terrorist"]
attribute_words["2_b"] = ["compassionate", "gentle", "humane", "kind", "tolerant"]
weats_name.append("Muslims")
target_words["3_a"] = ["bisexual", "gay", "homosexual", "lesbian"]
target_words["3_b"] = ["het", "hetero", "heterosexual", "straight"]
attribute_words["3_a"] = ["immoral", "outrageous", "promiscuous", "revolting", "sinner"]
attribute_words["3_b"] = ["moral", "natural", "normal"]
weats_name.append("LGBT People")
target_words["4_a"] = ["math","algebra","geometry","calculus","equations","computation","numbers" ]
target_words["4_b"] = ["poetry","art","dance","literature","novel","symphony","drama","sculpture" ]
attribute_words["4_a"] = ["male","man","boy","brother","he","him","his", "son"]
attribute_words["4_b"] = ["female","woman","girl","sister","she","her","hers", "daughter"]
weats_name.append("Math Arts")
target_words["5_a"] = ["church","bible","christian","christianity"]
target_words["5_b"] = ["synagogue","torah","jew","judaism"]
attribute_words["5_a"] = ["terrific","delight","celebrate","glad","beautiful","appealing", "pleasure", "enjoy" ]
attribute_words["5_b"] = ["hate","angry","detest","despise","dirty","humiliate","pain", "horrible"]
weats_name.append("Jewish")
target_words["6_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["6_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["6_a"] = [ "joy", "love", "peace", "wonderful", "pleasure", "friend", "laughter", "happy"]
attribute_words["6_b"] = ["agony", "terrible", "horrible", "nasty", "evil", "war", "awful","failure"]
weats_name.append("African - 1")
target_words["7_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["7_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["7_a"] = ["caress", "freedom", "health", "love", "peace",
"cheer", "friend", "heaven", "loyal", "pleasure",
"diamond", "gentle", "honest", "lucky", "rainbow",
"diploma", "gift", "honor", "miracle", "sunrise",
"family", "happy","laughter","paradise", "vacation"]
attribute_words["7_b"] = ["abuse", "crash", "filth", "murder", "sickness",
"accident", "death", "grief", "poison", "stink",
"assault", "disaster", "hatred","pollute", "tragedy",
"bomb", "divorce", "jail", "poverty", "ugly", "cancer",
"evil", "kill", "rotten","vomit"]
weats_name.append("African - 2")
def statistic_test(X,Y,A,B,M):
result = 0.0
sum_X = 0.0
sum_Y = 0.0
for word_X in X:
sum_X += sub_statistic_test(word_X, A,B,M)
for word_Y in Y:
sum_Y += sub_statistic_test(word_Y, A,B,M)
return (sum_X - sum_Y)
def sub_statistic_test(w,A,B,M):
result = 0.0
sum_cos_A = 0.0
sum_cos_B = 0.0
for word_A in A:
sum_cos_A += numpy.dot(M[w],M[word_A])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_A]))
for word_B in B:
sum_cos_B += numpy.dot(M[w],M[word_B])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_B]))
return (sum_cos_A/len(A) - sum_cos_B/len(B))
def effect_size(x_words,y_words,a_attributes,b_attributes,M):
test_x = 0.0
test_y = 0.0
samples = []
for word_x in target_words[x_words]:
test_x += sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M))
for word_y in target_words[y_words]:
test_y += sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M))
mean_x = test_x/len(target_words[x_words])
mean_y = test_y/len(target_words[y_words])
std_dev = numpy.std(samples)
effect_size = (mean_x - mean_y)/std_dev
return effect_size
def p_value(X,Y,A,B,model):
null_hipotese_evidance = 0.0
number_permitations = 0.0
X_size = len(target_words[X])
Y_size = len(target_words[Y])
size = max(X_size, Y_size)
union = set(target_words[X] + target_words[Y])
random_test_statistic_values = []
test_statistic_value = statistic_test(target_words[X],target_words[Y],attribute_words[A],attribute_words[B],model)
if (Y_size + X_size) < 14:
permutations = itertools.combinations(union,size)
for i,permutation in enumerate(permutations):
x_i = permutation
y_i = union - set(permutation)
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
p_value_result = null_hipotese_evidance/number_permitations
return(p_value_result)
else:
print("Generating 5k random")
classes = target_words[X] + target_words[Y]
for i in range(5000):
random.shuffle(classes)
x_i = classes[:size]
y_i = classes[size+1:]
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
p_value_result = null_hipotese_evidance/number_permitations
return(p_value_result)
def main():
political_biases_model = ["left", "leftcenter", "center", "right-center", "right"]
model_types = [ "captions", "comments"]
weats = [1,2,3]
with open("../data/weat/weat_results.csv", "w") as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow(["channel","WEAT","political_bias", "source", "effect_size", "p_value"])
base model")
model = gensim.models.Word2Vec.load("../models/wiki-word2vec/wiki-en.word2vec.model")
print("Executing WEATs on current model" )
for weat_number in weats:
X = str(weat_number) + "_a"
Y = str(weat_number) + "_b"
A = str(weat_number) + "_a"
B = str(weat_number) + "_b"
= effect_size(X,Y,A,B,model)
print("Effect-Size("+str(weat_number)+ "):" + str(effect_size_result))
p_value_result = p_value(X,Y,A,B,model)
print("P-value("+str(weat_number)+ "):" + str(p_value_result))
writer.writerow(["wikipedia",weats_name[weat_number -1], "wiki", "wiki", effect_size_result, p_value_result])
if __name__ == "__main__":
main()
| true | true |
f71f6c8fd9d986ab03b10daa79ec6a243a174abe | 1,152 | py | Python | cgi-bin/utils.py | alexander1389/IMS.WebAPI | cfc8c6c899655c337973f9a32a620e9cd6af34b9 | [
"MIT"
] | null | null | null | cgi-bin/utils.py | alexander1389/IMS.WebAPI | cfc8c6c899655c337973f9a32a620e9cd6af34b9 | [
"MIT"
] | null | null | null | cgi-bin/utils.py | alexander1389/IMS.WebAPI | cfc8c6c899655c337973f9a32a620e9cd6af34b9 | [
"MIT"
] | null | null | null | from datetime import datetime
def validate_dt(date):
""" Validate datetime string
:param date: The datetime string
:type date: str
:returns: True if the date is correct datetime string,
False otherwise
:rtype: bool
"""
pattern = '000101000000'
# letters in date
if not date.isdecimal():
return False
# at least year must be specified
if len(date) < 2 or len(date) > 12:
return False
if len(date) % 2 > 0:
return False
chk = date + pattern[len(date):]
try:
datetime.strptime(chk, '%y%m%d%H%M%S')
except ValueError:
return False
return True
if __name__ == '__main__':
print('\nDate Validator Check --- START')
print('------------------------------\n')
dates = [
'99', '1312', '010212', '200229', '131024122203', '0',
'03014', '01021312121222', '201301', '200230', '310131271212'
]
for date in dates:
print('%-15s - %s' % (date,
'valid' if validate_dt(date) else 'invalid'))
print('\n----------------------------')
print('Date Validator Check --- END\n')
| 22.588235 | 69 | 0.539063 | from datetime import datetime
def validate_dt(date):
pattern = '000101000000'
if not date.isdecimal():
return False
if len(date) < 2 or len(date) > 12:
return False
if len(date) % 2 > 0:
return False
chk = date + pattern[len(date):]
try:
datetime.strptime(chk, '%y%m%d%H%M%S')
except ValueError:
return False
return True
if __name__ == '__main__':
print('\nDate Validator Check --- START')
print('------------------------------\n')
dates = [
'99', '1312', '010212', '200229', '131024122203', '0',
'03014', '01021312121222', '201301', '200230', '310131271212'
]
for date in dates:
print('%-15s - %s' % (date,
'valid' if validate_dt(date) else 'invalid'))
print('\n----------------------------')
print('Date Validator Check --- END\n')
| true | true |
f71f6cf1e351242fc9e0d3e8fd6d87cf389216c6 | 383 | py | Python | mitre_attack/data/types/group.py | check-spelling/mitre-attack | f3be1ccff235593c4277f3b9ec2696757924894b | [
"MIT"
] | 1 | 2022-01-13T06:32:10.000Z | 2022-01-13T06:32:10.000Z | mitre_attack/data/types/group.py | check-spelling/mitre-attack | f3be1ccff235593c4277f3b9ec2696757924894b | [
"MIT"
] | null | null | null | mitre_attack/data/types/group.py | check-spelling/mitre-attack | f3be1ccff235593c4277f3b9ec2696757924894b | [
"MIT"
] | 1 | 2022-01-14T00:00:24.000Z | 2022-01-14T00:00:24.000Z | from dataclasses import dataclass, field
from typing import List
from mitre_attack import INTRUSION_SET
from mitre_attack.data.types.object import Object
@dataclass(frozen=True)
class Group(Object):
type: str = field(default=INTRUSION_SET, init=False)
name: str
aliases: List[str] = field(default_factory=list)
contributors: List[str] = field(default_factory=list)
| 29.461538 | 57 | 0.775457 | from dataclasses import dataclass, field
from typing import List
from mitre_attack import INTRUSION_SET
from mitre_attack.data.types.object import Object
@dataclass(frozen=True)
class Group(Object):
type: str = field(default=INTRUSION_SET, init=False)
name: str
aliases: List[str] = field(default_factory=list)
contributors: List[str] = field(default_factory=list)
| true | true |
f71f6d3f9666a930b13bac187344c124d81e2c1e | 31,993 | py | Python | electrum/gui/kivy/uix/dialogs/lightning_channels.py | jacky4566/electrum | f1c2191392780a559ecdc374c81c82191a5d1eb5 | [
"MIT"
] | null | null | null | electrum/gui/kivy/uix/dialogs/lightning_channels.py | jacky4566/electrum | f1c2191392780a559ecdc374c81c82191a5d1eb5 | [
"MIT"
] | null | null | null | electrum/gui/kivy/uix/dialogs/lightning_channels.py | jacky4566/electrum | f1c2191392780a559ecdc374c81c82191a5d1eb5 | [
"MIT"
] | null | null | null | import asyncio
from typing import TYPE_CHECKING, Optional, Union
from kivy.lang import Builder
from kivy.factory import Factory
from kivy.uix.popup import Popup
from .fee_dialog import FeeDialog
from electrum.util import bh2u
from electrum.logging import Logger
from electrum.lnutil import LOCAL, REMOTE, format_short_channel_id
from electrum.lnchannel import AbstractChannel, Channel, ChannelState
from electrum.gui.kivy.i18n import _
from .question import Question
from electrum.transaction import PartialTxOutput, Transaction
from electrum.util import NotEnoughFunds, NoDynamicFeeEstimates, format_fee_satoshis, quantize_feerate
from electrum.lnutil import ln_dummy_address
from electrum.gui import messages
from .qr_dialog import QRDialog
from .choice_dialog import ChoiceDialog
if TYPE_CHECKING:
from ...main_window import ElectrumWindow
from electrum import SimpleConfig
Builder.load_string(r'''
<SwapDialog@Popup>
id: popup
title: _('Lightning Swap')
size_hint: 0.8, 0.8
pos_hint: {'top':0.9}
mining_fee_text: ''
fee_rate_text: ''
method: 0
BoxLayout:
orientation: 'vertical'
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Send') + ':'
size_hint: 0.4, 1
Label:
id: send_amount_label
size_hint: 0.6, 1
text: _('0')
background_color: (0,0,0,0)
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Receive') + ':'
size_hint: 0.4, 1
Label:
id: receive_amount_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Server Fee') + ':'
size_hint: 0.4, 1
Label:
id: server_fee_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
id: swap_action_label
text: _('Adds receiving capacity')
background_color: (0,0,0,0)
font_size: '14dp'
Slider:
id: swap_slider
range: 0, 4
step: 1
on_value: root.swap_slider_moved(self.value)
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Mining Fee') + ':'
size_hint: 0.4, 1
Button:
text: root.mining_fee_text + ' (' + root.fee_rate_text + ')'
background_color: (0,0,0,0)
bold: True
on_release:
root.on_fee_button()
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
TopLabel:
id: fee_estimate
text: ''
font_size: '14dp'
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Button:
text: 'Cancel'
size_hint: 0.5, None
height: '48dp'
on_release: root.dismiss()
Button:
id: ok_button
text: 'OK'
size_hint: 0.5, None
height: '48dp'
on_release:
root.on_ok()
root.dismiss()
<LightningChannelItem@CardItem>
details: {}
active: False
short_channel_id: '<channelId not set>'
status: ''
is_backup: False
balances: ''
node_alias: ''
_chan: None
BoxLayout:
size_hint: 0.7, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
color: (.5,.5,.5,1) if not root.active else (1,1,1,1)
text: root.short_channel_id
font_size: '15sp'
Widget
CardLabel:
font_size: '13sp'
shorten: True
text: root.node_alias
Widget
BoxLayout:
size_hint: 0.3, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
text: root.status
font_size: '13sp'
halign: 'right'
Widget
CardLabel:
text: root.balances if not root.is_backup else ''
font_size: '13sp'
halign: 'right'
Widget
<LightningChannelsDialog@Popup>:
name: 'lightning_channels'
title: _('Lightning Network')
has_lightning: False
has_gossip: False
can_send: ''
can_receive: ''
num_channels_text: ''
id: popup
BoxLayout:
id: box
orientation: 'vertical'
spacing: '2dp'
padding: '12dp'
BoxLabel:
text: _('You can send') + ':'
value: root.can_send
BoxLabel:
text: _('You can receive') + ':'
value: root.can_receive
TopLabel:
text: root.num_channels_text
ScrollView:
GridLayout:
cols: 1
id: lightning_channels_container
size_hint: 1, None
height: self.minimum_height
spacing: '2dp'
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Open Channel')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('lightning_open_channel_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Swap')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('swap_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Gossip')
disabled: not root.has_gossip
on_release: popup.app.popup_dialog('lightning')
<ChannelDetailsPopup@Popup>:
id: popuproot
data: []
is_closed: False
is_redeemed: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
local_ctn:0
remote_ctn:0
local_csv:0
remote_csv:0
feerate:''
can_send:''
can_receive:''
is_open:False
warning: ''
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
TopLabel:
text: root.warning
color: .905, .709, .509, 1
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
BoxLabel:
text: _('Can send')
value: root.can_send if root.is_open else 'n/a'
BoxLabel:
text: _('Can receive')
value: root.can_receive if root.is_open else 'n/a'
BoxLabel:
text: _('CSV delay')
value: 'Local: %d\nRemote: %d' % (root.local_csv, root.remote_csv)
BoxLabel:
text: _('CTN')
value: 'Local: %d\nRemote: %d' % (root.local_ctn, root.remote_ctn)
BoxLabel:
text: _('Fee rate')
value: '{} sat/byte'.format(root.feerate)
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Backup')
on_release: root.export_backup()
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Close')
on_release: root.close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Force-close')
on_release: root.force_close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_channel()
disabled: not root.is_redeemed
<ChannelBackupPopup@Popup>:
id: popuproot
data: []
is_funded: False
is_imported: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
is_open:False
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Request force-close')
on_release: root.request_force_close()
disabled: not root.is_funded
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_backup()
disabled: not root.is_imported
''')
class ChannelBackupPopup(Popup, Logger):
def __init__(self, chan: AbstractChannel, app, **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.chan = chan
self.is_funded = chan.get_state() == ChannelState.FUNDED
self.is_imported = chan.is_imported
self.funding_txid = chan.funding_outpoint.txid
self.app = app
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.title = _('Channel Backup')
def request_force_close(self):
msg = _('Request force close?')
Question(msg, self._request_force_close).open()
def _request_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id), loop)
try:
coro.result(5)
self.app.show_info(_('Request sent'))
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e)) # repr because str(Exception()) == ''
def remove_backup(self):
msg = _('Delete backup?')
Question(msg, self._remove_backup).open()
def _remove_backup(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel_backup(self.chan.channel_id)
self.dismiss()
class ChannelDetailsPopup(Popup, Logger):
def __init__(self, chan: Channel, app: 'ElectrumWindow', **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.is_closed = chan.is_closed()
self.is_redeemed = chan.is_redeemed()
self.app = app
self.chan = chan
self.title = _('Channel details')
self.node_id = bh2u(chan.node_id)
self.channel_id = bh2u(chan.channel_id)
self.funding_txid = chan.funding_outpoint.txid
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.local_ctn = chan.get_latest_ctn(LOCAL)
self.remote_ctn = chan.get_latest_ctn(REMOTE)
self.local_csv = chan.config[LOCAL].to_self_delay
self.remote_csv = chan.config[REMOTE].to_self_delay
self.initiator = 'Local' if chan.constraints.is_initiator else 'Remote'
feerate_kw = chan.get_latest_feerate(LOCAL)
self.feerate = str(quantize_feerate(Transaction.satperbyte_from_satperkw(feerate_kw)))
self.can_send = self.app.format_amount_and_units(chan.available_to_spend(LOCAL) // 1000)
self.can_receive = self.app.format_amount_and_units(chan.available_to_spend(REMOTE) // 1000)
self.is_open = chan.is_open()
closed = chan.get_closing_height()
if closed:
self.closing_txid, closing_height, closing_timestamp = closed
msg = ' '.join([
_("Trampoline routing is enabled, but this channel is with a non-trampoline node."),
_("This channel may still be used for receiving, but it is frozen for sending."),
_("If you want to keep using this channel, you need to disable trampoline routing in your preferences."),
])
self.warning = '' if self.app.wallet.lnworker.channel_db or self.app.wallet.lnworker.is_trampoline_peer(chan.node_id) else _('Warning') + ': ' + msg
def close(self):
dialog = ChoiceDialog(
title=_('Close channel'),
choices={0:_('Cooperative close'), 1:_('Request force-close')}, key=0,
callback=self._close,
description=_(messages.MSG_REQUEST_FORCE_CLOSE),
keep_choice_order=True)
dialog.open()
def _close(self, choice):
loop = self.app.wallet.network.asyncio_loop
if choice == 1:
coro = self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id)
msg = _('Request sent')
else:
coro = self.app.wallet.lnworker.close_channel(self.chan.channel_id)
msg = _('Channel closed')
f = asyncio.run_coroutine_threadsafe(coro, loop)
try:
f.result(5)
self.app.show_info(msg)
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e)) # repr because str(Exception()) == ''
def remove_channel(self):
msg = _('Are you sure you want to delete this channel? This will purge associated transactions from your wallet history.')
Question(msg, self._remove_channel).open()
def _remove_channel(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel(self.chan.channel_id)
self.app._trigger_update_history()
self.dismiss()
def export_backup(self):
text = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
# TODO: some messages are duplicated between Kivy and Qt.
help_text = ' '.join([
_("Channel backups can be imported in another instance of the same wallet, by scanning this QR code."),
_("Please note that channel backups cannot be used to restore your channels."),
_("If you lose your wallet file, the only thing you can do with a backup is to request your channel to be closed, so that your funds will be sent on-chain."),
])
self.app.qr_dialog(_("Channel Backup " + self.chan.short_id_for_GUI()), text, help_text=help_text)
def force_close(self):
if self.chan.is_closed():
self.app.show_error(_('Channel already closed'))
return
to_self_delay = self.chan.config[REMOTE].to_self_delay
help_text = ' '.join([
_('If you force-close this channel, the funds you have in it will not be available for {} blocks.').format(to_self_delay),
_('During that time, funds will not be recoverable from your seed, and may be lost if you lose your device.'),
_('To prevent that, please save this channel backup.'),
_('It may be imported in another wallet with the same seed.')
])
title = _('Save backup and force-close')
data = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
popup = QRDialog(
title, data,
show_text=False,
text_for_clipboard=data,
help_text=help_text,
close_button_text=_('Next'),
on_close=self._confirm_force_close)
popup.open()
def _confirm_force_close(self):
Question(
_('Confirm force close?'),
self._do_force_close,
title=_('Force-close channel'),
no_str=_('Cancel'),
yes_str=_('Proceed')).open()
def _do_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.force_close_channel(self.chan.channel_id), loop)
try:
coro.result(1)
self.app.show_info(_('Channel closed, you may need to wait at least {} blocks, because of CSV delays'.format(self.chan.config[REMOTE].to_self_delay)))
except Exception as e:
self.logger.exception("Could not force close channel")
self.app.show_info(_('Could not force close channel: ') + repr(e)) # repr because str(Exception()) == ''
class LightningChannelsDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow'):
super(LightningChannelsDialog, self).__init__()
self.clocks = []
self.app = app
self.has_lightning = app.wallet.has_lightning()
self.has_gossip = self.app.network.channel_db is not None
self.update()
def show_item(self, obj):
chan = obj._chan
if chan.is_backup():
p = ChannelBackupPopup(chan, self.app)
else:
p = ChannelDetailsPopup(chan, self.app)
p.open()
def format_fields(self, chan):
labels = {}
for subject in (REMOTE, LOCAL):
bal_minus_htlcs = chan.balance_minus_outgoing_htlcs(subject)//1000
label = self.app.format_amount(bal_minus_htlcs)
other = subject.inverted()
bal_other = chan.balance(other)//1000
bal_minus_htlcs_other = chan.balance_minus_outgoing_htlcs(other)//1000
if bal_other != bal_minus_htlcs_other:
label += ' (+' + self.app.format_amount(bal_other - bal_minus_htlcs_other) + ')'
labels[subject] = label
closed = chan.is_closed()
return [
'n/a' if closed else labels[LOCAL],
'n/a' if closed else labels[REMOTE],
]
def update_item(self, item):
chan = item._chan
item.status = chan.get_state_for_GUI()
item.short_channel_id = chan.short_id_for_GUI()
l, r = self.format_fields(chan)
item.balances = l + '/' + r
self.update_can_send()
def update(self):
channel_cards = self.ids.lightning_channels_container
channel_cards.clear_widgets()
if not self.app.wallet:
return
lnworker = self.app.wallet.lnworker
channels = list(lnworker.channels.values()) if lnworker else []
backups = list(lnworker.channel_backups.values()) if lnworker else []
for i in channels + backups:
item = Factory.LightningChannelItem()
item.screen = self
item.active = not i.is_closed()
item.is_backup = i.is_backup()
item._chan = i
item.node_alias = lnworker.get_node_alias(i.node_id) or i.node_id.hex()
self.update_item(item)
channel_cards.add_widget(item)
self.update_can_send()
def update_can_send(self):
lnworker = self.app.wallet.lnworker
if not lnworker:
self.can_send = 'n/a'
self.can_receive = 'n/a'
return
self.num_channels_text = _(f'You have {len(lnworker.channels)} channels.')
self.can_send = self.app.format_amount_and_units(lnworker.num_sats_can_send())
self.can_receive = self.app.format_amount_and_units(lnworker.num_sats_can_receive())
# Swaps should be done in due time which is why we recommend a certain fee.
RECOMMEND_BLOCKS_SWAP = 25
class SwapDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow', config: 'SimpleConfig'):
super(SwapDialog, self).__init__()
self.app = app
self.config = config
self.fmt_amt = self.app.format_amount_and_units
self.lnworker = self.app.wallet.lnworker
# swap related
self.swap_manager = self.lnworker.swap_manager
self.send_amount: Optional[int] = None
self.receive_amount: Optional[int] = None
self.tx = None # only for forward swap
self.is_reverse = None
# init swaps and sliders
asyncio.run(self.swap_manager.get_pairs())
self.update_and_init()
def update_and_init(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(0)
def on_fee_button(self):
fee_dialog = FeeDialog(self, self.config, self.after_fee_changed)
fee_dialog.open()
def after_fee_changed(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(self.ids.swap_slider.value)
def update_fee_text(self):
fee_per_kb = self.config.fee_per_kb()
# eta is -1 when block inclusion cannot be estimated for low fees
eta = self.config.fee_to_eta(fee_per_kb)
fee_per_b = format_fee_satoshis(fee_per_kb / 1000)
suggest_fee = self.config.eta_target_to_fee(RECOMMEND_BLOCKS_SWAP)
suggest_fee_per_b = format_fee_satoshis(suggest_fee / 1000)
s = 's' if eta > 1 else ''
if eta > RECOMMEND_BLOCKS_SWAP or eta == -1:
msg = f'Warning: Your fee rate of {fee_per_b} sat/B may be too ' \
f'low for the swap to succeed before its timeout. ' \
f'The recommended fee rate is at least {suggest_fee_per_b} ' \
f'sat/B.'
else:
msg = f'Info: Your swap is estimated to be processed in {eta} ' \
f'block{s} with an onchain fee rate of {fee_per_b} sat/B.'
self.fee_rate_text = f'{fee_per_b} sat/B'
self.ids.fee_estimate.text = msg
def update_tx(self, onchain_amount: Union[int, str]):
"""Updates the transaction associated with a forward swap."""
if onchain_amount is None:
self.tx = None
self.ids.ok_button.disabled = True
return
outputs = [PartialTxOutput.from_address_and_value(ln_dummy_address(), onchain_amount)]
coins = self.app.wallet.get_spendable_coins(None)
try:
self.tx = self.app.wallet.make_unsigned_transaction(
coins=coins,
outputs=outputs)
except (NotEnoughFunds, NoDynamicFeeEstimates):
self.tx = None
self.ids.ok_button.disabled = True
def update_swap_slider(self):
"""Sets the minimal and maximal amount that can be swapped for the swap
slider."""
# tx is updated again afterwards with send_amount in case of normal swap
# this is just to estimate the maximal spendable onchain amount for HTLC
self.update_tx('!')
try:
max_onchain_spend = self.tx.output_value_for_address(ln_dummy_address())
except AttributeError: # happens if there are no utxos
max_onchain_spend = 0
reverse = int(min(self.lnworker.num_sats_can_send(),
self.swap_manager.get_max_amount()))
forward = int(min(self.lnworker.num_sats_can_receive(),
# maximally supported swap amount by provider
self.swap_manager.get_max_amount(),
max_onchain_spend))
# we expect range to adjust the value of the swap slider to be in the
# correct range, i.e., to correct an overflow when reducing the limits
self.ids.swap_slider.range = (-reverse, forward)
def swap_slider_moved(self, position: float):
position = int(position)
# pay_amount and receive_amounts are always with fees already included
# so they reflect the net balance change after the swap
if position < 0: # reverse swap
self.ids.swap_action_label.text = "Adds Lightning receiving capacity."
self.is_reverse = True
pay_amount = abs(position)
self.send_amount = pay_amount
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (offchain)" if pay_amount else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=pay_amount, is_reverse=True)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (onchain)" if receive_amount else ""
# fee breakdown
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.lockup_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.swap_manager.get_claim_fee())}"
else: # forward (normal) swap
self.ids.swap_action_label.text = f"Adds Lightning sending capacity."
self.is_reverse = False
self.send_amount = position
self.update_tx(self.send_amount)
# add lockup fees, but the swap amount is position
pay_amount = position + self.tx.get_fee() if self.tx else 0
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (onchain)" if self.fmt_amt(pay_amount) else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=position, is_reverse=False)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (offchain)" if receive_amount else ""
# fee breakdown
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.normal_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.tx.get_fee())}" if self.tx else ""
if pay_amount and receive_amount:
self.ids.ok_button.disabled = False
else:
# add more nuanced error reporting?
self.ids.swap_action_label.text = "Swap below minimal swap size, change the slider."
self.ids.ok_button.disabled = True
def do_normal_swap(self, lightning_amount, onchain_amount, password):
tx = self.tx
assert tx
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.normal_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount,
password=password,
tx=tx,
)
asyncio.run_coroutine_threadsafe(coro, loop)
def do_reverse_swap(self, lightning_amount, onchain_amount, password):
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.reverse_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount + self.swap_manager.get_claim_fee(),
)
asyncio.run_coroutine_threadsafe(coro, loop)
def on_ok(self):
if not self.app.network:
self.window.show_error(_("You are offline."))
return
if self.is_reverse:
lightning_amount = self.send_amount
onchain_amount = self.receive_amount
self.app.protected(
'Do you want to do a reverse submarine swap?',
self.do_reverse_swap, (lightning_amount, onchain_amount))
else:
lightning_amount = self.receive_amount
onchain_amount = self.send_amount
self.app.protected(
'Do you want to do a submarine swap? '
'You will need to wait for the swap transaction to confirm.',
self.do_normal_swap, (lightning_amount, onchain_amount))
| 37.772137 | 170 | 0.568218 | import asyncio
from typing import TYPE_CHECKING, Optional, Union
from kivy.lang import Builder
from kivy.factory import Factory
from kivy.uix.popup import Popup
from .fee_dialog import FeeDialog
from electrum.util import bh2u
from electrum.logging import Logger
from electrum.lnutil import LOCAL, REMOTE, format_short_channel_id
from electrum.lnchannel import AbstractChannel, Channel, ChannelState
from electrum.gui.kivy.i18n import _
from .question import Question
from electrum.transaction import PartialTxOutput, Transaction
from electrum.util import NotEnoughFunds, NoDynamicFeeEstimates, format_fee_satoshis, quantize_feerate
from electrum.lnutil import ln_dummy_address
from electrum.gui import messages
from .qr_dialog import QRDialog
from .choice_dialog import ChoiceDialog
if TYPE_CHECKING:
from ...main_window import ElectrumWindow
from electrum import SimpleConfig
Builder.load_string(r'''
<SwapDialog@Popup>
id: popup
title: _('Lightning Swap')
size_hint: 0.8, 0.8
pos_hint: {'top':0.9}
mining_fee_text: ''
fee_rate_text: ''
method: 0
BoxLayout:
orientation: 'vertical'
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Send') + ':'
size_hint: 0.4, 1
Label:
id: send_amount_label
size_hint: 0.6, 1
text: _('0')
background_color: (0,0,0,0)
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Receive') + ':'
size_hint: 0.4, 1
Label:
id: receive_amount_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Server Fee') + ':'
size_hint: 0.4, 1
Label:
id: server_fee_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
id: swap_action_label
text: _('Adds receiving capacity')
background_color: (0,0,0,0)
font_size: '14dp'
Slider:
id: swap_slider
range: 0, 4
step: 1
on_value: root.swap_slider_moved(self.value)
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Mining Fee') + ':'
size_hint: 0.4, 1
Button:
text: root.mining_fee_text + ' (' + root.fee_rate_text + ')'
background_color: (0,0,0,0)
bold: True
on_release:
root.on_fee_button()
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
TopLabel:
id: fee_estimate
text: ''
font_size: '14dp'
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Button:
text: 'Cancel'
size_hint: 0.5, None
height: '48dp'
on_release: root.dismiss()
Button:
id: ok_button
text: 'OK'
size_hint: 0.5, None
height: '48dp'
on_release:
root.on_ok()
root.dismiss()
<LightningChannelItem@CardItem>
details: {}
active: False
short_channel_id: '<channelId not set>'
status: ''
is_backup: False
balances: ''
node_alias: ''
_chan: None
BoxLayout:
size_hint: 0.7, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
color: (.5,.5,.5,1) if not root.active else (1,1,1,1)
text: root.short_channel_id
font_size: '15sp'
Widget
CardLabel:
font_size: '13sp'
shorten: True
text: root.node_alias
Widget
BoxLayout:
size_hint: 0.3, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
text: root.status
font_size: '13sp'
halign: 'right'
Widget
CardLabel:
text: root.balances if not root.is_backup else ''
font_size: '13sp'
halign: 'right'
Widget
<LightningChannelsDialog@Popup>:
name: 'lightning_channels'
title: _('Lightning Network')
has_lightning: False
has_gossip: False
can_send: ''
can_receive: ''
num_channels_text: ''
id: popup
BoxLayout:
id: box
orientation: 'vertical'
spacing: '2dp'
padding: '12dp'
BoxLabel:
text: _('You can send') + ':'
value: root.can_send
BoxLabel:
text: _('You can receive') + ':'
value: root.can_receive
TopLabel:
text: root.num_channels_text
ScrollView:
GridLayout:
cols: 1
id: lightning_channels_container
size_hint: 1, None
height: self.minimum_height
spacing: '2dp'
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Open Channel')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('lightning_open_channel_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Swap')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('swap_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Gossip')
disabled: not root.has_gossip
on_release: popup.app.popup_dialog('lightning')
<ChannelDetailsPopup@Popup>:
id: popuproot
data: []
is_closed: False
is_redeemed: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
local_ctn:0
remote_ctn:0
local_csv:0
remote_csv:0
feerate:''
can_send:''
can_receive:''
is_open:False
warning: ''
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
TopLabel:
text: root.warning
color: .905, .709, .509, 1
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
BoxLabel:
text: _('Can send')
value: root.can_send if root.is_open else 'n/a'
BoxLabel:
text: _('Can receive')
value: root.can_receive if root.is_open else 'n/a'
BoxLabel:
text: _('CSV delay')
value: 'Local: %d\nRemote: %d' % (root.local_csv, root.remote_csv)
BoxLabel:
text: _('CTN')
value: 'Local: %d\nRemote: %d' % (root.local_ctn, root.remote_ctn)
BoxLabel:
text: _('Fee rate')
value: '{} sat/byte'.format(root.feerate)
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Backup')
on_release: root.export_backup()
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Close')
on_release: root.close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Force-close')
on_release: root.force_close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_channel()
disabled: not root.is_redeemed
<ChannelBackupPopup@Popup>:
id: popuproot
data: []
is_funded: False
is_imported: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
is_open:False
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Request force-close')
on_release: root.request_force_close()
disabled: not root.is_funded
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_backup()
disabled: not root.is_imported
''')
class ChannelBackupPopup(Popup, Logger):
def __init__(self, chan: AbstractChannel, app, **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.chan = chan
self.is_funded = chan.get_state() == ChannelState.FUNDED
self.is_imported = chan.is_imported
self.funding_txid = chan.funding_outpoint.txid
self.app = app
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.title = _('Channel Backup')
def request_force_close(self):
msg = _('Request force close?')
Question(msg, self._request_force_close).open()
def _request_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id), loop)
try:
coro.result(5)
self.app.show_info(_('Request sent'))
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e))
def remove_backup(self):
msg = _('Delete backup?')
Question(msg, self._remove_backup).open()
def _remove_backup(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel_backup(self.chan.channel_id)
self.dismiss()
class ChannelDetailsPopup(Popup, Logger):
def __init__(self, chan: Channel, app: 'ElectrumWindow', **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.is_closed = chan.is_closed()
self.is_redeemed = chan.is_redeemed()
self.app = app
self.chan = chan
self.title = _('Channel details')
self.node_id = bh2u(chan.node_id)
self.channel_id = bh2u(chan.channel_id)
self.funding_txid = chan.funding_outpoint.txid
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.local_ctn = chan.get_latest_ctn(LOCAL)
self.remote_ctn = chan.get_latest_ctn(REMOTE)
self.local_csv = chan.config[LOCAL].to_self_delay
self.remote_csv = chan.config[REMOTE].to_self_delay
self.initiator = 'Local' if chan.constraints.is_initiator else 'Remote'
feerate_kw = chan.get_latest_feerate(LOCAL)
self.feerate = str(quantize_feerate(Transaction.satperbyte_from_satperkw(feerate_kw)))
self.can_send = self.app.format_amount_and_units(chan.available_to_spend(LOCAL) // 1000)
self.can_receive = self.app.format_amount_and_units(chan.available_to_spend(REMOTE) // 1000)
self.is_open = chan.is_open()
closed = chan.get_closing_height()
if closed:
self.closing_txid, closing_height, closing_timestamp = closed
msg = ' '.join([
_("Trampoline routing is enabled, but this channel is with a non-trampoline node."),
_("This channel may still be used for receiving, but it is frozen for sending."),
_("If you want to keep using this channel, you need to disable trampoline routing in your preferences."),
])
self.warning = '' if self.app.wallet.lnworker.channel_db or self.app.wallet.lnworker.is_trampoline_peer(chan.node_id) else _('Warning') + ': ' + msg
def close(self):
dialog = ChoiceDialog(
title=_('Close channel'),
choices={0:_('Cooperative close'), 1:_('Request force-close')}, key=0,
callback=self._close,
description=_(messages.MSG_REQUEST_FORCE_CLOSE),
keep_choice_order=True)
dialog.open()
def _close(self, choice):
loop = self.app.wallet.network.asyncio_loop
if choice == 1:
coro = self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id)
msg = _('Request sent')
else:
coro = self.app.wallet.lnworker.close_channel(self.chan.channel_id)
msg = _('Channel closed')
f = asyncio.run_coroutine_threadsafe(coro, loop)
try:
f.result(5)
self.app.show_info(msg)
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e))
def remove_channel(self):
msg = _('Are you sure you want to delete this channel? This will purge associated transactions from your wallet history.')
Question(msg, self._remove_channel).open()
def _remove_channel(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel(self.chan.channel_id)
self.app._trigger_update_history()
self.dismiss()
def export_backup(self):
text = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
help_text = ' '.join([
_("Channel backups can be imported in another instance of the same wallet, by scanning this QR code."),
_("Please note that channel backups cannot be used to restore your channels."),
_("If you lose your wallet file, the only thing you can do with a backup is to request your channel to be closed, so that your funds will be sent on-chain."),
])
self.app.qr_dialog(_("Channel Backup " + self.chan.short_id_for_GUI()), text, help_text=help_text)
def force_close(self):
if self.chan.is_closed():
self.app.show_error(_('Channel already closed'))
return
to_self_delay = self.chan.config[REMOTE].to_self_delay
help_text = ' '.join([
_('If you force-close this channel, the funds you have in it will not be available for {} blocks.').format(to_self_delay),
_('During that time, funds will not be recoverable from your seed, and may be lost if you lose your device.'),
_('To prevent that, please save this channel backup.'),
_('It may be imported in another wallet with the same seed.')
])
title = _('Save backup and force-close')
data = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
popup = QRDialog(
title, data,
show_text=False,
text_for_clipboard=data,
help_text=help_text,
close_button_text=_('Next'),
on_close=self._confirm_force_close)
popup.open()
def _confirm_force_close(self):
Question(
_('Confirm force close?'),
self._do_force_close,
title=_('Force-close channel'),
no_str=_('Cancel'),
yes_str=_('Proceed')).open()
def _do_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.force_close_channel(self.chan.channel_id), loop)
try:
coro.result(1)
self.app.show_info(_('Channel closed, you may need to wait at least {} blocks, because of CSV delays'.format(self.chan.config[REMOTE].to_self_delay)))
except Exception as e:
self.logger.exception("Could not force close channel")
self.app.show_info(_('Could not force close channel: ') + repr(e))
class LightningChannelsDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow'):
super(LightningChannelsDialog, self).__init__()
self.clocks = []
self.app = app
self.has_lightning = app.wallet.has_lightning()
self.has_gossip = self.app.network.channel_db is not None
self.update()
def show_item(self, obj):
chan = obj._chan
if chan.is_backup():
p = ChannelBackupPopup(chan, self.app)
else:
p = ChannelDetailsPopup(chan, self.app)
p.open()
def format_fields(self, chan):
labels = {}
for subject in (REMOTE, LOCAL):
bal_minus_htlcs = chan.balance_minus_outgoing_htlcs(subject)//1000
label = self.app.format_amount(bal_minus_htlcs)
other = subject.inverted()
bal_other = chan.balance(other)//1000
bal_minus_htlcs_other = chan.balance_minus_outgoing_htlcs(other)//1000
if bal_other != bal_minus_htlcs_other:
label += ' (+' + self.app.format_amount(bal_other - bal_minus_htlcs_other) + ')'
labels[subject] = label
closed = chan.is_closed()
return [
'n/a' if closed else labels[LOCAL],
'n/a' if closed else labels[REMOTE],
]
def update_item(self, item):
chan = item._chan
item.status = chan.get_state_for_GUI()
item.short_channel_id = chan.short_id_for_GUI()
l, r = self.format_fields(chan)
item.balances = l + '/' + r
self.update_can_send()
def update(self):
channel_cards = self.ids.lightning_channels_container
channel_cards.clear_widgets()
if not self.app.wallet:
return
lnworker = self.app.wallet.lnworker
channels = list(lnworker.channels.values()) if lnworker else []
backups = list(lnworker.channel_backups.values()) if lnworker else []
for i in channels + backups:
item = Factory.LightningChannelItem()
item.screen = self
item.active = not i.is_closed()
item.is_backup = i.is_backup()
item._chan = i
item.node_alias = lnworker.get_node_alias(i.node_id) or i.node_id.hex()
self.update_item(item)
channel_cards.add_widget(item)
self.update_can_send()
def update_can_send(self):
lnworker = self.app.wallet.lnworker
if not lnworker:
self.can_send = 'n/a'
self.can_receive = 'n/a'
return
self.num_channels_text = _(f'You have {len(lnworker.channels)} channels.')
self.can_send = self.app.format_amount_and_units(lnworker.num_sats_can_send())
self.can_receive = self.app.format_amount_and_units(lnworker.num_sats_can_receive())
RECOMMEND_BLOCKS_SWAP = 25
class SwapDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow', config: 'SimpleConfig'):
super(SwapDialog, self).__init__()
self.app = app
self.config = config
self.fmt_amt = self.app.format_amount_and_units
self.lnworker = self.app.wallet.lnworker
self.swap_manager = self.lnworker.swap_manager
self.send_amount: Optional[int] = None
self.receive_amount: Optional[int] = None
self.tx = None
self.is_reverse = None
asyncio.run(self.swap_manager.get_pairs())
self.update_and_init()
def update_and_init(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(0)
def on_fee_button(self):
fee_dialog = FeeDialog(self, self.config, self.after_fee_changed)
fee_dialog.open()
def after_fee_changed(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(self.ids.swap_slider.value)
def update_fee_text(self):
fee_per_kb = self.config.fee_per_kb()
eta = self.config.fee_to_eta(fee_per_kb)
fee_per_b = format_fee_satoshis(fee_per_kb / 1000)
suggest_fee = self.config.eta_target_to_fee(RECOMMEND_BLOCKS_SWAP)
suggest_fee_per_b = format_fee_satoshis(suggest_fee / 1000)
s = 's' if eta > 1 else ''
if eta > RECOMMEND_BLOCKS_SWAP or eta == -1:
msg = f'Warning: Your fee rate of {fee_per_b} sat/B may be too ' \
f'low for the swap to succeed before its timeout. ' \
f'The recommended fee rate is at least {suggest_fee_per_b} ' \
f'sat/B.'
else:
msg = f'Info: Your swap is estimated to be processed in {eta} ' \
f'block{s} with an onchain fee rate of {fee_per_b} sat/B.'
self.fee_rate_text = f'{fee_per_b} sat/B'
self.ids.fee_estimate.text = msg
def update_tx(self, onchain_amount: Union[int, str]):
if onchain_amount is None:
self.tx = None
self.ids.ok_button.disabled = True
return
outputs = [PartialTxOutput.from_address_and_value(ln_dummy_address(), onchain_amount)]
coins = self.app.wallet.get_spendable_coins(None)
try:
self.tx = self.app.wallet.make_unsigned_transaction(
coins=coins,
outputs=outputs)
except (NotEnoughFunds, NoDynamicFeeEstimates):
self.tx = None
self.ids.ok_button.disabled = True
def update_swap_slider(self):
self.update_tx('!')
try:
max_onchain_spend = self.tx.output_value_for_address(ln_dummy_address())
except AttributeError:
max_onchain_spend = 0
reverse = int(min(self.lnworker.num_sats_can_send(),
self.swap_manager.get_max_amount()))
forward = int(min(self.lnworker.num_sats_can_receive(),
self.swap_manager.get_max_amount(),
max_onchain_spend))
self.ids.swap_slider.range = (-reverse, forward)
def swap_slider_moved(self, position: float):
position = int(position)
if position < 0:
self.ids.swap_action_label.text = "Adds Lightning receiving capacity."
self.is_reverse = True
pay_amount = abs(position)
self.send_amount = pay_amount
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (offchain)" if pay_amount else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=pay_amount, is_reverse=True)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (onchain)" if receive_amount else ""
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.lockup_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.swap_manager.get_claim_fee())}"
else:
self.ids.swap_action_label.text = f"Adds Lightning sending capacity."
self.is_reverse = False
self.send_amount = position
self.update_tx(self.send_amount)
pay_amount = position + self.tx.get_fee() if self.tx else 0
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (onchain)" if self.fmt_amt(pay_amount) else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=position, is_reverse=False)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (offchain)" if receive_amount else ""
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.normal_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.tx.get_fee())}" if self.tx else ""
if pay_amount and receive_amount:
self.ids.ok_button.disabled = False
else:
self.ids.swap_action_label.text = "Swap below minimal swap size, change the slider."
self.ids.ok_button.disabled = True
def do_normal_swap(self, lightning_amount, onchain_amount, password):
tx = self.tx
assert tx
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.normal_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount,
password=password,
tx=tx,
)
asyncio.run_coroutine_threadsafe(coro, loop)
def do_reverse_swap(self, lightning_amount, onchain_amount, password):
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.reverse_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount + self.swap_manager.get_claim_fee(),
)
asyncio.run_coroutine_threadsafe(coro, loop)
def on_ok(self):
if not self.app.network:
self.window.show_error(_("You are offline."))
return
if self.is_reverse:
lightning_amount = self.send_amount
onchain_amount = self.receive_amount
self.app.protected(
'Do you want to do a reverse submarine swap?',
self.do_reverse_swap, (lightning_amount, onchain_amount))
else:
lightning_amount = self.receive_amount
onchain_amount = self.send_amount
self.app.protected(
'Do you want to do a submarine swap? '
'You will need to wait for the swap transaction to confirm.',
self.do_normal_swap, (lightning_amount, onchain_amount))
| true | true |
f71f6d9f3398355ffe923f131ddebd4aceaed71f | 8,876 | py | Python | tests/conftest.py | forestriveral/floris | 02c31e121283ad6ccae987cfa3aa1bf1e4b43014 | [
"Apache-2.0"
] | null | null | null | tests/conftest.py | forestriveral/floris | 02c31e121283ad6ccae987cfa3aa1bf1e4b43014 | [
"Apache-2.0"
] | null | null | null | tests/conftest.py | forestriveral/floris | 02c31e121283ad6ccae987cfa3aa1bf1e4b43014 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 NREL
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# See https://floris.readthedocs.io for documentation
import pytest
def turbines_to_array(turbine_list: list):
return [[t.Ct, t.power, t.aI, t.average_velocity] for t in turbine_list]
def print_test_values(turbine_list: list):
for t in turbine_list:
print(
"({:.7f}, {:.7f}, {:.7f}, {:.7f}),".format(
t.Ct, t.power, t.aI, t.average_velocity
)
)
@pytest.fixture
def sample_inputs_fixture():
return SampleInputs()
class SampleInputs:
"""
SampleInputs class
"""
def __init__(self):
self.turbine = {
"type": "turbine",
"name": "nrel_5mw",
"description": "NREL 5MW",
"properties": {
"rotor_diameter": 126.0,
"hub_height": 90.0,
"blade_count": 3,
"pP": 1.88,
"pT": 1.88,
"generator_efficiency": 1.0,
"power_thrust_table": {
"power": [
0.0,
0.0,
0.1780851,
0.28907459,
0.34902166,
0.3847278,
0.40605878,
0.4202279,
0.42882274,
0.43387274,
0.43622267,
0.43684468,
0.43657497,
0.43651053,
0.4365612,
0.43651728,
0.43590309,
0.43467276,
0.43322955,
0.43003137,
0.37655587,
0.33328466,
0.29700574,
0.26420779,
0.23839379,
0.21459275,
0.19382354,
0.1756635,
0.15970926,
0.14561785,
0.13287856,
0.12130194,
0.11219941,
0.10311631,
0.09545392,
0.08813781,
0.08186763,
0.07585005,
0.07071926,
0.06557558,
0.06148104,
0.05755207,
0.05413366,
0.05097969,
0.04806545,
0.04536883,
0.04287006,
0.04055141
],
"thrust": [
1.19187945,
1.17284634,
1.09860817,
1.02889592,
0.97373036,
0.92826162,
0.89210543,
0.86100905,
0.835423,
0.81237673,
0.79225789,
0.77584769,
0.7629228,
0.76156073,
0.76261984,
0.76169723,
0.75232027,
0.74026851,
0.72987175,
0.70701647,
0.54054532,
0.45509459,
0.39343381,
0.34250785,
0.30487242,
0.27164979,
0.24361964,
0.21973831,
0.19918151,
0.18131868,
0.16537679,
0.15103727,
0.13998636,
0.1289037,
0.11970413,
0.11087113,
0.10339901,
0.09617888,
0.09009926,
0.08395078,
0.0791188,
0.07448356,
0.07050731,
0.06684119,
0.06345518,
0.06032267,
0.05741999,
0.05472609
],
"wind_speed": [
2.0,
2.5,
3.0,
3.5,
4.0,
4.5,
5.0,
5.5,
6.0,
6.5,
7.0,
7.5,
8.0,
8.5,
9.0,
9.5,
10.0,
10.5,
11.0,
11.5,
12.0,
12.5,
13.0,
13.5,
14.0,
14.5,
15.0,
15.5,
16.0,
16.5,
17.0,
17.5,
18.0,
18.5,
19.0,
19.5,
20.0,
20.5,
21.0,
21.5,
22.0,
22.5,
23.0,
23.5,
24.0,
24.5,
25.0,
25.5
],
},
"yaw_angle": 0.0,
"tilt_angle": 0.0,
"TSR": 8.0,
},
}
self.farm = {
"type": "farm",
"name": "farm_example_2x2",
"properties": {
"wind_speed": [8.0],
"wind_direction": [270.0],
"turbulence_intensity": [0.1],
"wind_shear": 0.12,
"wind_veer": 0.0,
"air_density": 1.225,
"wake_combination": "sosfs",
"layout_x": [
0.0,
5 * self.turbine["properties"]["rotor_diameter"],
10 * self.turbine["properties"]["rotor_diameter"],
],
"layout_y": [0.0, 0.0, 0.0],
"wind_x": [0],
"wind_y": [0],
"specified_wind_height": self.turbine["properties"]["hub_height"],
},
}
self.wake = {
"type": "wake",
"name": "wake_default",
"properties": {
"velocity_model": "gauss_legacy",
"deflection_model": "gauss",
"combination_model": "sosfs",
"turbulence_model": "crespo_hernandez",
"parameters": {
"wake_deflection_parameters": {
"gauss": {
"dm": 1.0,
"eps_gain": 0.2,
"use_secondary_steering": False,
}
},
"wake_velocity_parameters": {
"gauss_legacy": {
"calculate_VW_velocities": False,
"eps_gain": 0.2,
"ka": 0.38,
"kb": 0.004,
"use_yaw_added_recovery": False,
}
},
},
},
}
self.floris = {
"farm": self.farm,
"turbine": self.turbine,
"wake": self.wake,
"logging": {
"console": {"enable": True, "level": 1},
"file": {"enable": False, "level": 1},
},
}
| 32.512821 | 82 | 0.305994 |
import pytest
def turbines_to_array(turbine_list: list):
return [[t.Ct, t.power, t.aI, t.average_velocity] for t in turbine_list]
def print_test_values(turbine_list: list):
for t in turbine_list:
print(
"({:.7f}, {:.7f}, {:.7f}, {:.7f}),".format(
t.Ct, t.power, t.aI, t.average_velocity
)
)
@pytest.fixture
def sample_inputs_fixture():
return SampleInputs()
class SampleInputs:
def __init__(self):
self.turbine = {
"type": "turbine",
"name": "nrel_5mw",
"description": "NREL 5MW",
"properties": {
"rotor_diameter": 126.0,
"hub_height": 90.0,
"blade_count": 3,
"pP": 1.88,
"pT": 1.88,
"generator_efficiency": 1.0,
"power_thrust_table": {
"power": [
0.0,
0.0,
0.1780851,
0.28907459,
0.34902166,
0.3847278,
0.40605878,
0.4202279,
0.42882274,
0.43387274,
0.43622267,
0.43684468,
0.43657497,
0.43651053,
0.4365612,
0.43651728,
0.43590309,
0.43467276,
0.43322955,
0.43003137,
0.37655587,
0.33328466,
0.29700574,
0.26420779,
0.23839379,
0.21459275,
0.19382354,
0.1756635,
0.15970926,
0.14561785,
0.13287856,
0.12130194,
0.11219941,
0.10311631,
0.09545392,
0.08813781,
0.08186763,
0.07585005,
0.07071926,
0.06557558,
0.06148104,
0.05755207,
0.05413366,
0.05097969,
0.04806545,
0.04536883,
0.04287006,
0.04055141
],
"thrust": [
1.19187945,
1.17284634,
1.09860817,
1.02889592,
0.97373036,
0.92826162,
0.89210543,
0.86100905,
0.835423,
0.81237673,
0.79225789,
0.77584769,
0.7629228,
0.76156073,
0.76261984,
0.76169723,
0.75232027,
0.74026851,
0.72987175,
0.70701647,
0.54054532,
0.45509459,
0.39343381,
0.34250785,
0.30487242,
0.27164979,
0.24361964,
0.21973831,
0.19918151,
0.18131868,
0.16537679,
0.15103727,
0.13998636,
0.1289037,
0.11970413,
0.11087113,
0.10339901,
0.09617888,
0.09009926,
0.08395078,
0.0791188,
0.07448356,
0.07050731,
0.06684119,
0.06345518,
0.06032267,
0.05741999,
0.05472609
],
"wind_speed": [
2.0,
2.5,
3.0,
3.5,
4.0,
4.5,
5.0,
5.5,
6.0,
6.5,
7.0,
7.5,
8.0,
8.5,
9.0,
9.5,
10.0,
10.5,
11.0,
11.5,
12.0,
12.5,
13.0,
13.5,
14.0,
14.5,
15.0,
15.5,
16.0,
16.5,
17.0,
17.5,
18.0,
18.5,
19.0,
19.5,
20.0,
20.5,
21.0,
21.5,
22.0,
22.5,
23.0,
23.5,
24.0,
24.5,
25.0,
25.5
],
},
"yaw_angle": 0.0,
"tilt_angle": 0.0,
"TSR": 8.0,
},
}
self.farm = {
"type": "farm",
"name": "farm_example_2x2",
"properties": {
"wind_speed": [8.0],
"wind_direction": [270.0],
"turbulence_intensity": [0.1],
"wind_shear": 0.12,
"wind_veer": 0.0,
"air_density": 1.225,
"wake_combination": "sosfs",
"layout_x": [
0.0,
5 * self.turbine["properties"]["rotor_diameter"],
10 * self.turbine["properties"]["rotor_diameter"],
],
"layout_y": [0.0, 0.0, 0.0],
"wind_x": [0],
"wind_y": [0],
"specified_wind_height": self.turbine["properties"]["hub_height"],
},
}
self.wake = {
"type": "wake",
"name": "wake_default",
"properties": {
"velocity_model": "gauss_legacy",
"deflection_model": "gauss",
"combination_model": "sosfs",
"turbulence_model": "crespo_hernandez",
"parameters": {
"wake_deflection_parameters": {
"gauss": {
"dm": 1.0,
"eps_gain": 0.2,
"use_secondary_steering": False,
}
},
"wake_velocity_parameters": {
"gauss_legacy": {
"calculate_VW_velocities": False,
"eps_gain": 0.2,
"ka": 0.38,
"kb": 0.004,
"use_yaw_added_recovery": False,
}
},
},
},
}
self.floris = {
"farm": self.farm,
"turbine": self.turbine,
"wake": self.wake,
"logging": {
"console": {"enable": True, "level": 1},
"file": {"enable": False, "level": 1},
},
}
| true | true |
f71f6e1acacc2c48f4a28b2d425b5fac6cb232dd | 113,230 | py | Python | tests/test_class.py | michelp/cxxheaderparser | 83bb2903790cf448bf838cdb8a93ca96e758bd1a | [
"BSD-3-Clause"
] | 12 | 2020-12-28T09:40:53.000Z | 2022-03-13T15:36:21.000Z | tests/test_class.py | michelp/cxxheaderparser | 83bb2903790cf448bf838cdb8a93ca96e758bd1a | [
"BSD-3-Clause"
] | 28 | 2021-01-04T14:58:59.000Z | 2022-01-03T03:00:16.000Z | tests/test_class.py | michelp/cxxheaderparser | 83bb2903790cf448bf838cdb8a93ca96e758bd1a | [
"BSD-3-Clause"
] | 1 | 2021-11-06T03:44:53.000Z | 2021-11-06T03:44:53.000Z | # Note: testcases generated via `python -m cxxheaderparser.gentest`
from cxxheaderparser.types import (
AnonymousName,
Array,
BaseClass,
ClassDecl,
EnumDecl,
Enumerator,
Field,
ForwardDecl,
Function,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
Operator,
PQName,
Parameter,
Pointer,
Reference,
TemplateArgument,
TemplateDecl,
TemplateSpecialization,
TemplateTypeParam,
Token,
Type,
Typedef,
UsingDecl,
Value,
Variable,
)
from cxxheaderparser.simple import (
ClassScope,
NamespaceScope,
parse_string,
ParsedData,
)
def test_class_member_spec_1():
content = """
class S {
int d1; // non-static data member
int a[10] = {1, 2}; // non-static data member with initializer (C++11)
static const int d2 = 1; // static data member with initializer
virtual void f1(int) = 0; // pure virtual member function
std::string d3, *d4, f2(int); // two data members and a member function
enum { NORTH, SOUTH, EAST, WEST };
struct NestedS {
std::string s;
} d5, *d6;
typedef NestedS value_type, *pointer_type;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
access="private",
),
fields=[
Field(
name="s",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="public",
)
],
)
],
enums=[
EnumDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="enum"
),
values=[
Enumerator(name="NORTH"),
Enumerator(name="SOUTH"),
Enumerator(name="EAST"),
Enumerator(name="WEST"),
],
access="private",
)
],
fields=[
Field(
name="d1",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="private",
),
Field(
name="a",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="10")]),
),
access="private",
value=Value(
tokens=[
Token(value="{"),
Token(value="1"),
Token(value=","),
Token(value="2"),
Token(value="}"),
]
),
),
Field(
name="d2",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
access="private",
value=Value(tokens=[Token(value="1")]),
static=True,
),
Field(
name="d3",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="private",
),
Field(
name="d4",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
)
),
access="private",
),
Field(
name="d5",
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
),
access="private",
),
Field(
name="d6",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
)
),
access="private",
),
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
name=PQName(segments=[NameSpecifier(name="f2")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
),
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
),
name="value_type",
access="private",
),
Typedef(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
)
),
name="pointer_type",
access="private",
),
],
)
]
)
)
def test_class_member_spec_2():
content = """
class M {
std::size_t C;
std::vector<int> data;
public:
M(std::size_t R, std::size_t C)
: C(C), data(R * C) {} // constructor definition
int operator()(size_t r, size_t c) const { // member function definition
return data[r * C + c];
}
int &operator()(size_t r, size_t c) { // another member function definition
return data[r * C + c];
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="M")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="data",
),
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="M")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="R",
),
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
],
has_body=True,
access="public",
constructor=True,
),
Operator(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
const=True,
operator="()",
),
Operator(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
operator="()",
),
],
)
]
)
)
def test_class_member_spec_3():
content = """
class S {
public:
S(); // public constructor
S(const S &); // public copy constructor
virtual ~S(); // public virtual destructor
private:
int *ptr; // private data member
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
fields=[
Field(
name="ptr",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
access="private",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="S")]
),
const=True,
)
)
)
],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="~S")]),
parameters=[],
access="public",
destructor=True,
virtual=True,
),
],
)
]
)
)
def test_class_using():
content = """
class Base {
protected:
int d;
};
class Derived : public Base {
public:
using Base::Base; // inherit all parent's constructors (C++11)
using Base::d; // make Base's protected member d a public member of Derived
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Base")], classkey="class"
)
),
fields=[
Field(
name="d",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="protected",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Derived")], classkey="class"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Base")]),
)
],
),
using=[
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="Base"),
]
),
access="public",
),
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="d"),
]
),
access="public",
),
],
),
]
)
)
def test_class_member_spec_6():
content = """
struct S {
template<typename T>
void f(T&& n);
template<class CharT>
struct NestedS {
std::basic_string<CharT> s;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="class", name="CharT")
]
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="basic_string",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="CharT"
)
]
)
)
)
]
),
),
]
)
),
name="s",
)
],
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="T")]
)
)
),
name="n",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="public",
)
],
)
]
)
)
def test_class_fn_default_params():
content = """
// clang-format off
class Hen
{
public:
void add(int a=100, b=0xfd, float c=1.7e-3, float d=3.14);
void join(string s1="", string s2="nothing");
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Hen")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
default=Value(tokens=[Token(value="100")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="b")]
)
),
default=Value(tokens=[Token(value="0xfd")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="c",
default=Value(tokens=[Token(value="1.7e-3")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="d",
default=Value(tokens=[Token(value="3.14")]),
),
],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="join")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s1",
default=Value(tokens=[Token(value='""')]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s2",
default=Value(tokens=[Token(value='"nothing"')]),
),
],
access="public",
),
],
)
]
)
)
def test_class_fn_inline_virtual():
content = """
class B {
public:
virtual inline int aMethod();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="B")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="aMethod")]),
parameters=[],
inline=True,
access="public",
virtual=True,
)
],
)
]
)
)
def test_class_fn_pure_virtual_const():
content = """
class StoneClass {
virtual int getNum2() const = 0;
int getNum3();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="StoneClass")],
classkey="class",
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum2")]),
parameters=[],
access="private",
const=True,
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum3")]),
parameters=[],
access="private",
),
],
)
]
)
)
def test_class_fn_return_global_ns():
content = """
struct Avacado {
uint8_t foo() { return 4; }
::uint8_t bar() { return 0; }
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Avacado")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[NameSpecifier(name="uint8_t")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name=""),
NameSpecifier(name="uint8_t"),
]
)
),
name=PQName(segments=[NameSpecifier(name="bar")]),
parameters=[],
has_body=True,
access="public",
),
],
)
]
)
)
def test_class_ns_class():
content = """
namespace ns {
class N;
};
class ns::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="N"),
],
classkey="class",
)
)
)
],
namespaces={
"ns": NamespaceScope(
name="ns",
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
)
)
],
)
},
)
)
def test_class_ns_w_base():
content = """
class Herb::Cilantro : public Plant {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="Herb"),
NameSpecifier(name="Cilantro"),
],
classkey="class",
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Plant")]),
)
],
)
)
]
)
)
def test_class_inner_class():
content = """
class C {
class Inner {};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Inner")],
classkey="class",
),
access="private",
)
)
],
)
]
)
)
def test_class_inner_fwd_class():
content = """
class C {
class N;
};
class C::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
),
access="private",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C"), NameSpecifier(name="N")],
classkey="class",
)
)
),
]
)
)
def test_class_inner_var_access():
content = """
class Bug_3488053 {
public:
class Bug_3488053_Nested {
public:
int x;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053_Nested")],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
)
]
)
)
def test_class_ns_and_inner():
content = """
namespace RoosterNamespace {
class RoosterOuterClass {
public:
int member1;
class RoosterSubClass1 {
public:
int publicMember1;
private:
int privateMember1;
};
private:
int member2;
class RoosterSubClass2 {
public:
int publicMember2;
private:
int privateMember2;
};
};
} // namespace RoosterNamespace
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"RoosterNamespace": NamespaceScope(
name="RoosterNamespace",
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="RoosterOuterClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass1")
],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember1",
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass2")
],
classkey="class",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember2",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember2",
),
],
),
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member2",
),
],
)
],
)
}
)
)
def test_class_struct_access():
content = """
struct SampleStruct {
unsigned int meth();
private:
int prop;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="SampleStruct")],
classkey="struct",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="prop",
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="unsigned int")]
)
),
name=PQName(segments=[NameSpecifier(name="meth")]),
parameters=[],
access="public",
)
],
)
]
)
)
def test_class_volatile_move_deleted_fn():
content = """
struct C {
void foo() volatile && = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
volatile=True,
ref_qualifier="&&",
deleted=True,
)
],
)
]
)
)
def test_class_bitfield_1():
content = """
struct S {
// will usually occupy 2 bytes:
// 3 bits: value of b1
// 2 bits: unused
// 6 bits: value of b2
// 2 bits: value of b3
// 3 bits: unused
unsigned char b1 : 3, : 2, b2 : 6, b3 : 2;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
fields=[
Field(
name="b1",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=3,
),
Field(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
Field(
name="b2",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=6,
),
Field(
name="b3",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
],
)
]
)
)
def test_class_bitfield_2():
content = """
struct HAL_ControlWord {
int x : 1;
int y : 1;
};
typedef struct HAL_ControlWord HAL_ControlWord;
int HAL_GetControlWord(HAL_ControlWord *controlWord);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
fields=[
Field(
name="x",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
Field(
name="y",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="HAL_GetControlWord")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")]
)
)
),
name="controlWord",
)
],
)
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
name="HAL_ControlWord",
)
],
)
)
def test_class_anon_struct_as_globalvar():
content = """
struct {
int m;
} unnamed, *p_unnamed;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
fields=[
Field(
name="m",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")],
)
),
access="public",
)
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="unnamed")]),
type=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="p_unnamed")]),
type=Pointer(
ptr_to=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
)
),
),
],
)
)
def test_class_anon_struct_as_classvar():
content = """
struct AnonHolderClass {
struct {
int x;
} a;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="AnonHolderClass")],
classkey="struct",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
)
),
name="a",
)
],
)
]
)
)
def test_initializer_with_initializer_list_1():
content = """
struct ComplexInit : SomeBase {
ComplexInit(int i) : m_stuff{i, 2} { auto i = something(); }
void fn();
std::vector<int> m_stuff;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="ComplexInit")],
classkey="struct",
),
bases=[
BaseClass(
access="public",
typename=PQName(
segments=[NameSpecifier(name="SomeBase")]
),
)
],
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="m_stuff",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="ComplexInit")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="i",
)
],
has_body=True,
access="public",
constructor=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
access="public",
),
],
)
]
)
)
def test_initializer_with_initializer_list_2():
content = """
template <typename T> class future final {
public:
template <typename R>
future(future<R> &&oth) noexcept
: future(oth.then([](R &&val) -> T { return val; })) {}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="future")], classkey="class"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
final=True,
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="future")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[
NameSpecifier(
name="future",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="R"
)
]
)
)
)
]
),
)
]
)
)
),
name="oth",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="R")]
),
noexcept=Value(tokens=[]),
access="public",
constructor=True,
)
],
)
]
)
)
def test_class_with_arrays():
content = """
const int MAX_ITEM = 7;
class Bird {
int items[MAX_ITEM];
int otherItems[7];
int oneItem;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bird")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="MAX_ITEM")]),
),
name="items",
),
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="7")]),
),
name="otherItems",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="oneItem",
),
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="MAX_ITEM")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")]),
const=True,
),
value=Value(tokens=[Token(value="7")]),
)
],
)
)
def test_class_fn_inline_impl():
content = """
class Monkey {
private:
static void Create();
};
inline void Monkey::Create() {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Monkey")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="Create")]),
parameters=[],
static=True,
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(
segments=[
NameSpecifier(name="Monkey"),
NameSpecifier(name="Create"),
]
),
parameters=[],
inline=True,
has_body=True,
)
],
)
)
def test_class_fn_virtual_final_override():
content = """
struct Lemon {
virtual void foo() final;
virtual void foo2();
};
struct Lime final : Lemon {
void abc();
void foo2() override;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lemon")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
virtual=True,
final=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
virtual=True,
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lime")], classkey="struct"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Lemon")]),
)
],
final=True,
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="abc")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
override=True,
),
],
),
]
)
)
def test_class_fn_return_class():
content = """
class Peach {
int abc;
};
class Plumb {
class Peach *doSomethingGreat(class Peach *pInCurPtr);
class Peach *var;
};
class Peach *Plumb::myMethod(class Peach *pInPtr) {
return pInPtr;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="abc",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Plumb")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="var",
)
],
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name=PQName(
segments=[NameSpecifier(name="doSomethingGreat")]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInCurPtr",
)
],
access="private",
)
],
),
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
)
),
name=PQName(
segments=[
NameSpecifier(name="Plumb"),
NameSpecifier(name="myMethod"),
]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInPtr",
)
],
has_body=True,
)
],
)
)
def test_class_fn_template_impl():
content = """
class Owl {
private:
template <typename T> int *tFunc(int count);
};
template <typename T> int *Owl::tFunc(int count) {
if (count == 0) {
return NULL;
}
return NULL;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Owl")], classkey="class"
)
),
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="tFunc")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
),
name=PQName(
segments=[
NameSpecifier(name="Owl"),
NameSpecifier(name="tFunc"),
]
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_inline_template_impl():
content = """
class Chicken {
template <typename T> static T Get();
};
template <typename T> T Chicken::Get() { return T(); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Chicken")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="Get")]),
parameters=[],
static=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(
segments=[
NameSpecifier(name="Chicken"),
NameSpecifier(name="Get"),
]
),
parameters=[],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_explicit_constructors():
content = """
class Lizzard {
Lizzard();
explicit Lizzard(int a);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lizzard")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[],
access="private",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
access="private",
constructor=True,
explicit=True,
),
],
)
]
)
)
def test_class_fn_default_constructor():
content = """
class DefaultConstDest {
public:
DefaultConstDest() = default;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="DefaultConstDest")],
classkey="class",
)
),
methods=[
Method(
return_type=None,
name=PQName(
segments=[NameSpecifier(name="DefaultConstDest")]
),
parameters=[],
access="public",
constructor=True,
default=True,
)
],
)
]
)
)
def test_class_fn_delete_constructor():
content = """
class A {
public:
A() = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="A")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="A")]),
parameters=[],
access="public",
constructor=True,
deleted=True,
)
],
)
]
)
)
def test_class_multi_vars():
content = """
class Grape {
public:
int a, b, c;
map<string, int> d;
map<string, int> e, f;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grape")], classkey="class"
)
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="b",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="c",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="d",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="e",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="f",
),
],
)
]
)
)
def test_class_static_const_var_expr():
content = """
class PandaClass {
static const int CONST_A = (1 << 7) - 1;
static const int CONST_B = sizeof(int);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PandaClass")],
classkey="class",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_A",
value=Value(
tokens=[
Token(value="("),
Token(value="1"),
Token(value="<<"),
Token(value="7"),
Token(value=")"),
Token(value="-"),
Token(value="1"),
]
),
static=True,
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_B",
value=Value(
tokens=[
Token(value="sizeof"),
Token(value="("),
Token(value="int"),
Token(value=")"),
]
),
static=True,
),
],
)
]
)
)
def test_class_fwd_struct():
content = """
class PotatoClass {
struct FwdStruct;
FwdStruct *ptr;
struct FwdStruct {
int a;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PotatoClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
)
],
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")]
)
)
),
name="ptr",
)
],
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
)
],
)
]
)
)
def test_class_multi_array():
content = """
struct Picture {
char name[25];
unsigned int pdata[128][256];
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Picture")], classkey="struct"
)
),
fields=[
Field(
access="public",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="char")]
)
),
size=Value(tokens=[Token(value="25")]),
),
name="name",
),
Field(
access="public",
type=Array(
array_of=Array(
array_of=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="unsigned int"
)
]
)
),
size=Value(tokens=[Token(value="256")]),
),
size=Value(tokens=[Token(value="128")]),
),
name="pdata",
),
],
)
]
)
)
def test_class_noexcept():
content = """
struct Grackle {
void no_noexcept();
void just_noexcept() noexcept;
void const_noexcept() const noexcept;
void noexcept_bool() noexcept(true);
void const_noexcept_bool() const noexcept(true);
void noexcept_noexceptOperator() noexcept(noexcept(Grackle()));
void const_noexcept_noexceptOperator() const noexcept(noexcept(Grackle()));
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grackle")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="no_noexcept")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="just_noexcept")]),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept")]
),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="noexcept_bool")]),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept_bool")]
),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(name="noexcept_noexceptOperator")
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(
name="const_noexcept_noexceptOperator"
)
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
const=True,
),
],
)
]
)
)
def test_class_volatile():
content = """
class Foo
{
public:
private:
volatile bool myToShutDown;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Foo")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="bool")]
),
volatile=True,
),
name="myToShutDown",
)
],
)
]
)
)
| 38.240459 | 94 | 0.2641 |
from cxxheaderparser.types import (
AnonymousName,
Array,
BaseClass,
ClassDecl,
EnumDecl,
Enumerator,
Field,
ForwardDecl,
Function,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
Operator,
PQName,
Parameter,
Pointer,
Reference,
TemplateArgument,
TemplateDecl,
TemplateSpecialization,
TemplateTypeParam,
Token,
Type,
Typedef,
UsingDecl,
Value,
Variable,
)
from cxxheaderparser.simple import (
ClassScope,
NamespaceScope,
parse_string,
ParsedData,
)
def test_class_member_spec_1():
content = """
class S {
int d1; // non-static data member
int a[10] = {1, 2}; // non-static data member with initializer (C++11)
static const int d2 = 1; // static data member with initializer
virtual void f1(int) = 0; // pure virtual member function
std::string d3, *d4, f2(int); // two data members and a member function
enum { NORTH, SOUTH, EAST, WEST };
struct NestedS {
std::string s;
} d5, *d6;
typedef NestedS value_type, *pointer_type;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
access="private",
),
fields=[
Field(
name="s",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="public",
)
],
)
],
enums=[
EnumDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="enum"
),
values=[
Enumerator(name="NORTH"),
Enumerator(name="SOUTH"),
Enumerator(name="EAST"),
Enumerator(name="WEST"),
],
access="private",
)
],
fields=[
Field(
name="d1",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="private",
),
Field(
name="a",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="10")]),
),
access="private",
value=Value(
tokens=[
Token(value="{"),
Token(value="1"),
Token(value=","),
Token(value="2"),
Token(value="}"),
]
),
),
Field(
name="d2",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
access="private",
value=Value(tokens=[Token(value="1")]),
static=True,
),
Field(
name="d3",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="private",
),
Field(
name="d4",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
)
),
access="private",
),
Field(
name="d5",
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
),
access="private",
),
Field(
name="d6",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
)
),
access="private",
),
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
name=PQName(segments=[NameSpecifier(name="f2")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
),
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
),
name="value_type",
access="private",
),
Typedef(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
)
),
name="pointer_type",
access="private",
),
],
)
]
)
)
def test_class_member_spec_2():
content = """
class M {
std::size_t C;
std::vector<int> data;
public:
M(std::size_t R, std::size_t C)
: C(C), data(R * C) {} // constructor definition
int operator()(size_t r, size_t c) const { // member function definition
return data[r * C + c];
}
int &operator()(size_t r, size_t c) { // another member function definition
return data[r * C + c];
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="M")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="data",
),
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="M")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="R",
),
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
],
has_body=True,
access="public",
constructor=True,
),
Operator(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
const=True,
operator="()",
),
Operator(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
operator="()",
),
],
)
]
)
)
def test_class_member_spec_3():
content = """
class S {
public:
S(); // public constructor
S(const S &); // public copy constructor
virtual ~S(); // public virtual destructor
private:
int *ptr; // private data member
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
fields=[
Field(
name="ptr",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
access="private",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="S")]
),
const=True,
)
)
)
],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="~S")]),
parameters=[],
access="public",
destructor=True,
virtual=True,
),
],
)
]
)
)
def test_class_using():
content = """
class Base {
protected:
int d;
};
class Derived : public Base {
public:
using Base::Base; // inherit all parent's constructors (C++11)
using Base::d; // make Base's protected member d a public member of Derived
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Base")], classkey="class"
)
),
fields=[
Field(
name="d",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="protected",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Derived")], classkey="class"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Base")]),
)
],
),
using=[
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="Base"),
]
),
access="public",
),
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="d"),
]
),
access="public",
),
],
),
]
)
)
def test_class_member_spec_6():
content = """
struct S {
template<typename T>
void f(T&& n);
template<class CharT>
struct NestedS {
std::basic_string<CharT> s;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="class", name="CharT")
]
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="basic_string",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="CharT"
)
]
)
)
)
]
),
),
]
)
),
name="s",
)
],
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="T")]
)
)
),
name="n",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="public",
)
],
)
]
)
)
def test_class_fn_default_params():
content = """
// clang-format off
class Hen
{
public:
void add(int a=100, b=0xfd, float c=1.7e-3, float d=3.14);
void join(string s1="", string s2="nothing");
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Hen")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
default=Value(tokens=[Token(value="100")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="b")]
)
),
default=Value(tokens=[Token(value="0xfd")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="c",
default=Value(tokens=[Token(value="1.7e-3")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="d",
default=Value(tokens=[Token(value="3.14")]),
),
],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="join")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s1",
default=Value(tokens=[Token(value='""')]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s2",
default=Value(tokens=[Token(value='"nothing"')]),
),
],
access="public",
),
],
)
]
)
)
def test_class_fn_inline_virtual():
content = """
class B {
public:
virtual inline int aMethod();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="B")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="aMethod")]),
parameters=[],
inline=True,
access="public",
virtual=True,
)
],
)
]
)
)
def test_class_fn_pure_virtual_const():
content = """
class StoneClass {
virtual int getNum2() const = 0;
int getNum3();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="StoneClass")],
classkey="class",
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum2")]),
parameters=[],
access="private",
const=True,
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum3")]),
parameters=[],
access="private",
),
],
)
]
)
)
def test_class_fn_return_global_ns():
content = """
struct Avacado {
uint8_t foo() { return 4; }
::uint8_t bar() { return 0; }
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Avacado")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[NameSpecifier(name="uint8_t")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name=""),
NameSpecifier(name="uint8_t"),
]
)
),
name=PQName(segments=[NameSpecifier(name="bar")]),
parameters=[],
has_body=True,
access="public",
),
],
)
]
)
)
def test_class_ns_class():
content = """
namespace ns {
class N;
};
class ns::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="N"),
],
classkey="class",
)
)
)
],
namespaces={
"ns": NamespaceScope(
name="ns",
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
)
)
],
)
},
)
)
def test_class_ns_w_base():
content = """
class Herb::Cilantro : public Plant {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="Herb"),
NameSpecifier(name="Cilantro"),
],
classkey="class",
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Plant")]),
)
],
)
)
]
)
)
def test_class_inner_class():
content = """
class C {
class Inner {};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Inner")],
classkey="class",
),
access="private",
)
)
],
)
]
)
)
def test_class_inner_fwd_class():
content = """
class C {
class N;
};
class C::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
),
access="private",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C"), NameSpecifier(name="N")],
classkey="class",
)
)
),
]
)
)
def test_class_inner_var_access():
content = """
class Bug_3488053 {
public:
class Bug_3488053_Nested {
public:
int x;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053_Nested")],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
)
]
)
)
def test_class_ns_and_inner():
content = """
namespace RoosterNamespace {
class RoosterOuterClass {
public:
int member1;
class RoosterSubClass1 {
public:
int publicMember1;
private:
int privateMember1;
};
private:
int member2;
class RoosterSubClass2 {
public:
int publicMember2;
private:
int privateMember2;
};
};
} // namespace RoosterNamespace
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"RoosterNamespace": NamespaceScope(
name="RoosterNamespace",
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="RoosterOuterClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass1")
],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember1",
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass2")
],
classkey="class",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember2",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember2",
),
],
),
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member2",
),
],
)
],
)
}
)
)
def test_class_struct_access():
content = """
struct SampleStruct {
unsigned int meth();
private:
int prop;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="SampleStruct")],
classkey="struct",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="prop",
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="unsigned int")]
)
),
name=PQName(segments=[NameSpecifier(name="meth")]),
parameters=[],
access="public",
)
],
)
]
)
)
def test_class_volatile_move_deleted_fn():
content = """
struct C {
void foo() volatile && = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
volatile=True,
ref_qualifier="&&",
deleted=True,
)
],
)
]
)
)
def test_class_bitfield_1():
content = """
struct S {
// will usually occupy 2 bytes:
// 3 bits: value of b1
// 2 bits: unused
// 6 bits: value of b2
// 2 bits: value of b3
// 3 bits: unused
unsigned char b1 : 3, : 2, b2 : 6, b3 : 2;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
fields=[
Field(
name="b1",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=3,
),
Field(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
Field(
name="b2",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=6,
),
Field(
name="b3",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
],
)
]
)
)
def test_class_bitfield_2():
content = """
struct HAL_ControlWord {
int x : 1;
int y : 1;
};
typedef struct HAL_ControlWord HAL_ControlWord;
int HAL_GetControlWord(HAL_ControlWord *controlWord);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
fields=[
Field(
name="x",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
Field(
name="y",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="HAL_GetControlWord")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")]
)
)
),
name="controlWord",
)
],
)
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
name="HAL_ControlWord",
)
],
)
)
def test_class_anon_struct_as_globalvar():
content = """
struct {
int m;
} unnamed, *p_unnamed;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
fields=[
Field(
name="m",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")],
)
),
access="public",
)
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="unnamed")]),
type=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="p_unnamed")]),
type=Pointer(
ptr_to=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
)
),
),
],
)
)
def test_class_anon_struct_as_classvar():
content = """
struct AnonHolderClass {
struct {
int x;
} a;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="AnonHolderClass")],
classkey="struct",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
)
),
name="a",
)
],
)
]
)
)
def test_initializer_with_initializer_list_1():
content = """
struct ComplexInit : SomeBase {
ComplexInit(int i) : m_stuff{i, 2} { auto i = something(); }
void fn();
std::vector<int> m_stuff;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="ComplexInit")],
classkey="struct",
),
bases=[
BaseClass(
access="public",
typename=PQName(
segments=[NameSpecifier(name="SomeBase")]
),
)
],
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="m_stuff",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="ComplexInit")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="i",
)
],
has_body=True,
access="public",
constructor=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
access="public",
),
],
)
]
)
)
def test_initializer_with_initializer_list_2():
content = """
template <typename T> class future final {
public:
template <typename R>
future(future<R> &&oth) noexcept
: future(oth.then([](R &&val) -> T { return val; })) {}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="future")], classkey="class"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
final=True,
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="future")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[
NameSpecifier(
name="future",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="R"
)
]
)
)
)
]
),
)
]
)
)
),
name="oth",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="R")]
),
noexcept=Value(tokens=[]),
access="public",
constructor=True,
)
],
)
]
)
)
def test_class_with_arrays():
content = """
const int MAX_ITEM = 7;
class Bird {
int items[MAX_ITEM];
int otherItems[7];
int oneItem;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bird")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="MAX_ITEM")]),
),
name="items",
),
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="7")]),
),
name="otherItems",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="oneItem",
),
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="MAX_ITEM")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")]),
const=True,
),
value=Value(tokens=[Token(value="7")]),
)
],
)
)
def test_class_fn_inline_impl():
content = """
class Monkey {
private:
static void Create();
};
inline void Monkey::Create() {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Monkey")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="Create")]),
parameters=[],
static=True,
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(
segments=[
NameSpecifier(name="Monkey"),
NameSpecifier(name="Create"),
]
),
parameters=[],
inline=True,
has_body=True,
)
],
)
)
def test_class_fn_virtual_final_override():
content = """
struct Lemon {
virtual void foo() final;
virtual void foo2();
};
struct Lime final : Lemon {
void abc();
void foo2() override;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lemon")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
virtual=True,
final=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
virtual=True,
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lime")], classkey="struct"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Lemon")]),
)
],
final=True,
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="abc")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
override=True,
),
],
),
]
)
)
def test_class_fn_return_class():
content = """
class Peach {
int abc;
};
class Plumb {
class Peach *doSomethingGreat(class Peach *pInCurPtr);
class Peach *var;
};
class Peach *Plumb::myMethod(class Peach *pInPtr) {
return pInPtr;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="abc",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Plumb")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="var",
)
],
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name=PQName(
segments=[NameSpecifier(name="doSomethingGreat")]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInCurPtr",
)
],
access="private",
)
],
),
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
)
),
name=PQName(
segments=[
NameSpecifier(name="Plumb"),
NameSpecifier(name="myMethod"),
]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInPtr",
)
],
has_body=True,
)
],
)
)
def test_class_fn_template_impl():
content = """
class Owl {
private:
template <typename T> int *tFunc(int count);
};
template <typename T> int *Owl::tFunc(int count) {
if (count == 0) {
return NULL;
}
return NULL;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Owl")], classkey="class"
)
),
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="tFunc")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
),
name=PQName(
segments=[
NameSpecifier(name="Owl"),
NameSpecifier(name="tFunc"),
]
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_inline_template_impl():
content = """
class Chicken {
template <typename T> static T Get();
};
template <typename T> T Chicken::Get() { return T(); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Chicken")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="Get")]),
parameters=[],
static=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(
segments=[
NameSpecifier(name="Chicken"),
NameSpecifier(name="Get"),
]
),
parameters=[],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_explicit_constructors():
content = """
class Lizzard {
Lizzard();
explicit Lizzard(int a);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lizzard")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[],
access="private",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
access="private",
constructor=True,
explicit=True,
),
],
)
]
)
)
def test_class_fn_default_constructor():
content = """
class DefaultConstDest {
public:
DefaultConstDest() = default;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="DefaultConstDest")],
classkey="class",
)
),
methods=[
Method(
return_type=None,
name=PQName(
segments=[NameSpecifier(name="DefaultConstDest")]
),
parameters=[],
access="public",
constructor=True,
default=True,
)
],
)
]
)
)
def test_class_fn_delete_constructor():
content = """
class A {
public:
A() = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="A")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="A")]),
parameters=[],
access="public",
constructor=True,
deleted=True,
)
],
)
]
)
)
def test_class_multi_vars():
content = """
class Grape {
public:
int a, b, c;
map<string, int> d;
map<string, int> e, f;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grape")], classkey="class"
)
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="b",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="c",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="d",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="e",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="f",
),
],
)
]
)
)
def test_class_static_const_var_expr():
content = """
class PandaClass {
static const int CONST_A = (1 << 7) - 1;
static const int CONST_B = sizeof(int);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PandaClass")],
classkey="class",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_A",
value=Value(
tokens=[
Token(value="("),
Token(value="1"),
Token(value="<<"),
Token(value="7"),
Token(value=")"),
Token(value="-"),
Token(value="1"),
]
),
static=True,
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_B",
value=Value(
tokens=[
Token(value="sizeof"),
Token(value="("),
Token(value="int"),
Token(value=")"),
]
),
static=True,
),
],
)
]
)
)
def test_class_fwd_struct():
content = """
class PotatoClass {
struct FwdStruct;
FwdStruct *ptr;
struct FwdStruct {
int a;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PotatoClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
)
],
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")]
)
)
),
name="ptr",
)
],
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
)
],
)
]
)
)
def test_class_multi_array():
content = """
struct Picture {
char name[25];
unsigned int pdata[128][256];
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Picture")], classkey="struct"
)
),
fields=[
Field(
access="public",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="char")]
)
),
size=Value(tokens=[Token(value="25")]),
),
name="name",
),
Field(
access="public",
type=Array(
array_of=Array(
array_of=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="unsigned int"
)
]
)
),
size=Value(tokens=[Token(value="256")]),
),
size=Value(tokens=[Token(value="128")]),
),
name="pdata",
),
],
)
]
)
)
def test_class_noexcept():
content = """
struct Grackle {
void no_noexcept();
void just_noexcept() noexcept;
void const_noexcept() const noexcept;
void noexcept_bool() noexcept(true);
void const_noexcept_bool() const noexcept(true);
void noexcept_noexceptOperator() noexcept(noexcept(Grackle()));
void const_noexcept_noexceptOperator() const noexcept(noexcept(Grackle()));
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grackle")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="no_noexcept")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="just_noexcept")]),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept")]
),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="noexcept_bool")]),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept_bool")]
),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(name="noexcept_noexceptOperator")
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(
name="const_noexcept_noexceptOperator"
)
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
const=True,
),
],
)
]
)
)
def test_class_volatile():
content = """
class Foo
{
public:
private:
volatile bool myToShutDown;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Foo")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="bool")]
),
volatile=True,
),
name="myToShutDown",
)
],
)
]
)
)
| true | true |
f71f6e862b3a393d8f1a1757bbce7092bfb70ae4 | 33,635 | py | Python | demisto_sdk/commands/common/tests/pack_unique_files_test.py | guiguitodelperuu/demisto-sdk | 3eb0206593bc955a64c6594d717c04e52e254e1d | [
"MIT"
] | null | null | null | demisto_sdk/commands/common/tests/pack_unique_files_test.py | guiguitodelperuu/demisto-sdk | 3eb0206593bc955a64c6594d717c04e52e254e1d | [
"MIT"
] | null | null | null | demisto_sdk/commands/common/tests/pack_unique_files_test.py | guiguitodelperuu/demisto-sdk | 3eb0206593bc955a64c6594d717c04e52e254e1d | [
"MIT"
] | null | null | null | import json
import os
import click
import pytest
import requests_mock
from click.testing import CliRunner
from git import GitCommandError
from demisto_sdk.__main__ import main
from demisto_sdk.commands.common import tools
from demisto_sdk.commands.common.constants import (PACK_METADATA_DESC,
PACK_METADATA_SUPPORT,
PACK_METADATA_TAGS,
PACK_METADATA_USE_CASES,
PACKS_README_FILE_NAME,
XSOAR_SUPPORT)
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.hook_validations.base_validator import \
BaseValidator
from demisto_sdk.commands.common.hook_validations.pack_unique_files import \
PackUniqueFilesValidator
from demisto_sdk.commands.common.legacy_git_tools import git_path
from TestSuite.test_tools import ChangeCWD
VALIDATE_CMD = "validate"
PACK_METADATA_PARTNER = {
"name": "test",
"description": "test",
"support": "partner",
"currentVersion": "1.0.1",
"author": "bar",
"categories": [
"Data Enrichment & Threat Intelligence"
],
"tags": [],
"useCases": [],
"keywords": [],
"price": 2,
"email": "some@mail.com",
"url": "https://www.paloaltonetworks.com/cortex"
}
README_INPUT_RESULTS_LIST = [
('', False),
(' ', False),
('\t\t\n ', False),
('Text', True),
]
class TestPackUniqueFilesValidator:
FILES_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files', 'Packs'))
FAKE_PACK_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files',
'fake_pack'))
FAKE_PATH_NAME = 'fake_pack'
validator = PackUniqueFilesValidator(FAKE_PATH_NAME)
validator.pack_path = FAKE_PACK_PATH
def restart_validator(self):
self.validator.pack_path = ''
self.validator = PackUniqueFilesValidator(self.FAKE_PATH_NAME)
self.validator.pack_path = self.FAKE_PACK_PATH
def test_is_error_added_name_only(self):
self.validator._add_error(('boop', '101'), 'file_name')
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_error_added_full_path(self):
self.validator._add_error(('boop', '101'), f'{self.validator.pack_path}/file/name')
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_file_exist(self):
assert self.validator._is_pack_file_exists(PACKS_README_FILE_NAME)
assert not self.validator._is_pack_file_exists('boop')
self.validator._errors = []
def test_parse_file_into_list(self):
assert ['boop', 'sade', ''] == self.validator._parse_file_into_list(PACKS_README_FILE_NAME)
assert not self.validator._parse_file_into_list('boop')
self.validator._errors = []
def test_validate_pack_unique_files(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_pack_metadata(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_partner_contribute_pack_metadata_no_mail_and_url(self, mocker, repo):
"""
Given
- Partner contributed pack without email and url.
When
- Running validate on it.
Then
- Ensure validate found errors.
"""
pack_metadata_no_email_and_url = PACK_METADATA_PARTNER.copy()
pack_metadata_no_email_and_url['email'] = ''
pack_metadata_no_email_and_url['url'] = ''
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_no_email_and_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_no_email_and_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'Contributed packs must include email or url' in result.stdout
@pytest.mark.parametrize('url, is_valid', [
('https://github.com/pont_to_repo', False),
('some_support_url', True),
('https://github.com/pont_to_repo/issues', True),
])
def test_validate_partner_pack_metadata_url(self, mocker, repo, url, is_valid):
"""
Given
- Partner contributed pack with an is_valid url.
When
- Running validate on it.
Then
- Ensure validate finds errors accordingly.
"""
pack_metadata_changed_url = PACK_METADATA_PARTNER.copy()
pack_metadata_changed_url['url'] = url
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_changed_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_changed_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
error_text = 'The metadata URL leads to a GitHub repo instead of a support page.'
if is_valid:
assert error_text not in result.stdout
else:
assert error_text in result.stdout
def test_validate_partner_contribute_pack_metadata_price_change(self, mocker, repo):
"""
Given
- Partner contributed pack where price has changed.
When
- Running validate on it.
Then
- Ensure validate found errors.
"""
pack_metadata_price_changed = PACK_METADATA_PARTNER.copy()
pack_metadata_price_changed['price'] = 3
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file',
return_value=PACK_METADATA_PARTNER)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_price_changed))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_price_changed)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'The pack price was changed from 2 to 3 - revert the change' in result.stdout
def test_check_timestamp_format(self):
"""
Given
- timestamps in various formats.
When
- Running check_timestamp_format on them.
Then
- Ensure True for iso format and False for any other format.
"""
fake_validator = PackUniqueFilesValidator('fake')
good_format_timestamp = '2020-04-14T00:00:00Z'
missing_z = '2020-04-14T00:00:00'
missing_t = '2020-04-14 00:00:00Z'
only_date = '2020-04-14'
with_hyphen = '2020-04-14T00-00-00Z'
assert fake_validator.check_timestamp_format(good_format_timestamp)
assert not fake_validator.check_timestamp_format(missing_t)
assert not fake_validator.check_timestamp_format(missing_z)
assert not fake_validator.check_timestamp_format(only_date)
assert not fake_validator.check_timestamp_format(with_hyphen)
def test_validate_pack_dependencies_invalid_id_set(self, mocker, repo):
"""
Given
- An invalid id set error being raised
When
- Running validate_pack_dependencies.
Then
- Ensure that the validation fails and that the invalid id set error is printed.
"""
self.restart_validator()
def error_raising_function(*args, **kwargs):
raise ValueError("Couldn't find any items for pack 'PackID'. make sure your spelling is correct.")
mocker.patch(
'demisto_sdk.commands.common.hook_validations.pack_unique_files.get_core_pack_list',
side_effect=error_raising_function
)
assert not self.validator.validate_pack_dependencies()
assert Errors.invalid_id_set()[0] in self.validator.get_errors()
def test_validate_core_pack_dependencies(self):
"""
Given
- A list of non-core packs
When
- Running validate_core_pack_dependencies.
Then
- Ensure that the validation fails and that the invalid core pack dependencies error is printed.
"""
self.restart_validator()
dependencies_packs = {'dependency_pack_1': {'mandatory': True, 'display_name': 'dependency pack 1'},
'dependency_pack_2': {'mandatory': False, 'display_name': 'dependency pack 2'},
'dependency_pack_3': {'mandatory': True, 'display_name': 'dependency pack 3'}}
assert not self.validator.validate_core_pack_dependencies(dependencies_packs)
assert Errors.invalid_core_pack_dependencies('fake_pack', ['dependency_pack_1', 'dependency_pack_3'])[0] \
in self.validator.get_errors()
def test_validate_pack_dependencies_skip_id_set_creation(self, capsys):
"""
Given
- skip_id_set_creation flag set to true.
- No id_set file exists
When
- Running validate_pack_dependencies.
Then
- Ensure that the validation passes and that the skipping message is printed.
"""
self.restart_validator()
self.validator.skip_id_set_creation = True
res = self.validator.validate_pack_dependencies()
self.validator.skip_id_set_creation = False # reverting to default for next tests
assert res
assert "No first level dependencies found" in capsys.readouterr().out
@pytest.mark.parametrize('usecases, is_valid, branch_usecases', [
([], True, []),
(['Phishing', 'Malware'], True, []),
(['NonApprovedUsecase', 'Case Management'], False, []),
(['NewUseCase'], True, ['NewUseCase']),
(['NewUseCase1, NewUseCase2'], False, ['NewUseCase1'])
])
def test_is_approved_usecases(self, repo, usecases, is_valid, branch_usecases, mocker):
"""
Given:
- Case A: Pack without usecases
- Case B: Pack with approved usecases (Phishing and Malware)
- Case C: Pack with non-approved usecase (NonApprovedUsecase) and approved usecase (Case Management)
- Case D: Pack with approved usecase (NewUseCase) located in my branch only
- Case E: Pack with non-approved usecase (NewUseCase2) and approved usecase (NewUseCase1)
located in my branch only
When:
- Validating approved usecases
Then:
- Case A: Ensure validation passes as there are no usecases to verify
- Case B: Ensure validation passes as both usecases are approved
- Case C: Ensure validation fails as it contains a non-approved usecase (NonApprovedUsecase)
Verify expected error is printed
- Case D: Ensure validation passes as usecase is approved on the same branch
- Case E: Ensure validation fails as it contains a non-approved usecase (NewUseCase2)
Verify expected error is printed
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: usecases,
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: []
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_usecases}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_usecases() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved usecases:' in self.validator.get_errors()
@pytest.mark.parametrize('tags, is_valid, branch_tags', [
([], True, []),
(['Machine Learning', 'Spam'], True, []),
(['NonApprovedTag', 'GDPR'], False, []),
(['NewTag'], True, ['NewTag']),
(['NewTag1, NewTag2'], False, ['NewTag1'])
])
def test_is_approved_tags(self, repo, tags, is_valid, branch_tags, mocker):
"""
Given:
- Case A: Pack without tags
- Case B: Pack with approved tags (Machine Learning and Spam)
- Case C: Pack with non-approved tags (NonApprovedTag) and approved tags (GDPR)
- Case D: Pack with approved tags (NewTag) located in my branch only
- Case E: Pack with non-approved tags (NewTag) and approved tags (NewTag)
located in my branch only
When:
- Validating approved tags
Then:
- Case A: Ensure validation passes as there are no tags to verify
- Case B: Ensure validation passes as both tags are approved
- Case C: Ensure validation fails as it contains a non-approved tags (NonApprovedTag)
Verify expected error is printed
- Case D: Ensure validation passes as tags is approved on the same branch
- Case E: Ensure validation fails as it contains a non-approved tag (NewTag2)
Verify expected error is printed
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_tags}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_tags() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved tags:' in self.validator.get_errors()
@pytest.mark.parametrize('pack_content, tags, is_valid', [
("none", [], True),
("none", ["Use Case"], False),
("playbook", ["Use Case"], True),
("incident", ["Use Case"], True),
("layout", ["Use Case"], True),
("playbook", [], True),
])
def test_is_right_usage_of_usecase_tag(self, repo, pack_content, tags, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags,
})
if pack_content == "playbook":
pack.create_playbook(name="PlaybookName")
elif pack_content == "incident":
pack.create_incident_type(name="IncidentTypeName")
elif pack_content == "layout":
pack.create_layout(name="Layout")
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.is_right_usage_of_usecase_tag() == is_valid
@pytest.mark.parametrize('type, is_valid', [
('community', True),
('partner', True),
('xsoar', True),
('someName', False),
('test', False),
('developer', True)
])
def test_is_valid_support_type(self, repo, type, is_valid):
"""
Given:
- Pack with support type in the metadata file.
When:
- Running _is_valid_support_type.
Then:
- Ensure True when the support types are valid, else False with the right error message.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: type
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_valid_support_type() == is_valid
if not is_valid:
assert 'Support field should be one of the following: xsoar, partner, developer or community.' in \
self.validator.get_errors()
def test_get_master_private_repo_meta_file_running_on_master(self, mocker, repo, capsys):
"""
Given:
- A repo which runs on master branch
When:
- Running get_master_private_repo_meta_file.
Then:
- Ensure result is None and the appropriate skipping message is printed.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'master'
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Running on master branch - skipping price change validation" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_getting_git_error(self, repo, capsys, mocker):
"""
Given:
- A repo which runs on non-master branch.
- git.show command raises GitCommandError.
When:
- Running get_master_private_repo_meta_file.
Then:
- Ensure result is None and the appropriate skipping message is printed.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
raise GitCommandError("A", "B")
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Got an error while trying to connect to git" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_file_not_found(self, mocker, repo, capsys):
"""
Given:
- A repo which runs on non-master branch.
- git.show command returns None.
When:
- Running get_master_private_repo_meta_file.
Then:
- Ensure result is None and the appropriate skipping message is printed.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
return None
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Unable to find previous pack_metadata.json file - skipping price change validation" in \
capsys.readouterr().out
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_partner(self, mocker, text, result):
"""
Given:
- partner pack
When:
- Running test_validate_pack_readme_file_is_not_empty_partner.
Then:
- Ensure result is False for empty README.md file and True otherwise.
"""
self.validator = PackUniqueFilesValidator(self.FAKE_PACK_PATH)
self.validator.support = 'partner'
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_use_case(self, mocker, text, result):
"""
Given:
- pack with use case
When:
- Running test_validate_pack_readme_file_is_not_empty_partner.
Then:
- Ensure result is False for empty README.md file and True otherwise.
"""
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'CortexXDR'))
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
def test_validate_pack_readme_file_is_not_empty_missing_file(self):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack'))
assert self.validator._is_pack_file_exists(self.validator.readme_file) is False
def test_validate_pack_readme_valid_images(self, mocker):
"""
Given
- A pack README file with valid absolute image paths in it.
When
- Run validate on pack README file
Then
- Ensure:
- Validation succeed
- Valid absolute image paths were not caught
"""
from demisto_sdk.commands.common.hook_validations.readme import \
ReadMeValidator
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
mocker.patch.object(ReadMeValidator, 'check_readme_relative_image_paths', return_value=[]) # Test only absolute paths
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=200, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert result
assert 'please repair it:\n' not in errors
assert 'please repair it:\n' not in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' not in errors
def test_validate_pack_readme_invalid_images(self):
"""
Given
- A pack README file with invalid absolute and relative image paths in it.
When
- Run validate on pack README file
Then
- Ensure:
- Validation fails
- Invalid relative image paths were caught correctly
- Invalid absolute image paths were caught correctly
"""
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=404, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert not result
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: (../../doc_files/Access_investigation_-_Generic_4_5.png)' in errors
assert 'Image link was not found, either insert it or remove it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' in errors
@pytest.mark.parametrize('readme_content, is_valid', [
('Hey there, just testing', True),
('This is a test. All good!', False),
])
def test_pack_readme_is_different_then_pack_description(self, repo, readme_content, is_valid):
"""
Given:
- Case A: A unique pack readme.
- Case B: Pack readme that is equal to pack description
When:
- Validating pack readme vs pack description
Then:
- Case A: Ensure validation passes as the pack readme and pack description are different.
- Case B: Ensure validation fails as the pack readme is the same as the pack description.
Verify expected error is printed
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.readme.write_text(readme_content)
pack.pack_metadata.write_json({
PACK_METADATA_DESC: 'This is a test. All good!',
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.validate_pack_readme_and_pack_description() == is_valid
if not is_valid:
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' in self.validator.get_errors()
def test_validate_pack_readme_and_pack_description_no_readme_file(self, repo):
"""
Given:
- A pack with no readme.
When:
- Validating pack readme vs pack description
Then:
- Fail on no README file and not on descrption error.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
os.remove(pack.readme.path)
assert self.validator.validate_pack_readme_and_pack_description()
assert '"README.md" file does not exist, create one in the root of the pack' in self.validator.get_errors()
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' not in self.validator.get_errors()
def test_valid_is_pack_metadata_desc_too_long(self, repo):
"""
Given:
- Valid description length
When:
- Validating pack description length
Then:
- Ensure validation passes as the description field length is valid.
"""
pack_description = 'Hey there, just testing'
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
def test_invalid_is_pack_metadata_desc_too_long(self, mocker, repo):
"""
Given:
- Invalid description length - higher than 130
When:
- Validating pack description length
Then:
- Ensure validation passes although description field length is higher than 130
- Ensure warning will be printed.
"""
pack_description = 'This is will fail cause the description here is too long.' \
'test test test test test test test test test test test test test test test test test' \
' test test test test test'
error_desc = 'The description field of the pack_metadata.json file is longer than 130 characters.'
mocker.patch("click.secho")
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
assert error_desc in click.secho.call_args_list[0][0][0]
def test_validate_author_image_exists_valid(self, repo):
"""
Given:
- Pack with partner support and author image
When:
- Validating if author image exists
Then:
- Ensure validation passes.
"""
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
res = self.validator.validate_author_image_exists()
assert res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' not in \
self.validator.get_errors()
def test_validate_author_image_exists_invalid(self, repo):
"""
Given:
- Pack with partner support and no author image
When:
- Validating if author image exists
Then:
- Ensure validation fails.
"""
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
os.remove(author_image_path)
res = self.validator.validate_author_image_exists()
assert not res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' in \
self.validator.get_errors()
| 43.456072 | 150 | 0.648759 | import json
import os
import click
import pytest
import requests_mock
from click.testing import CliRunner
from git import GitCommandError
from demisto_sdk.__main__ import main
from demisto_sdk.commands.common import tools
from demisto_sdk.commands.common.constants import (PACK_METADATA_DESC,
PACK_METADATA_SUPPORT,
PACK_METADATA_TAGS,
PACK_METADATA_USE_CASES,
PACKS_README_FILE_NAME,
XSOAR_SUPPORT)
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.hook_validations.base_validator import \
BaseValidator
from demisto_sdk.commands.common.hook_validations.pack_unique_files import \
PackUniqueFilesValidator
from demisto_sdk.commands.common.legacy_git_tools import git_path
from TestSuite.test_tools import ChangeCWD
VALIDATE_CMD = "validate"
PACK_METADATA_PARTNER = {
"name": "test",
"description": "test",
"support": "partner",
"currentVersion": "1.0.1",
"author": "bar",
"categories": [
"Data Enrichment & Threat Intelligence"
],
"tags": [],
"useCases": [],
"keywords": [],
"price": 2,
"email": "some@mail.com",
"url": "https://www.paloaltonetworks.com/cortex"
}
README_INPUT_RESULTS_LIST = [
('', False),
(' ', False),
('\t\t\n ', False),
('Text', True),
]
class TestPackUniqueFilesValidator:
FILES_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files', 'Packs'))
FAKE_PACK_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files',
'fake_pack'))
FAKE_PATH_NAME = 'fake_pack'
validator = PackUniqueFilesValidator(FAKE_PATH_NAME)
validator.pack_path = FAKE_PACK_PATH
def restart_validator(self):
self.validator.pack_path = ''
self.validator = PackUniqueFilesValidator(self.FAKE_PATH_NAME)
self.validator.pack_path = self.FAKE_PACK_PATH
def test_is_error_added_name_only(self):
self.validator._add_error(('boop', '101'), 'file_name')
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_error_added_full_path(self):
self.validator._add_error(('boop', '101'), f'{self.validator.pack_path}/file/name')
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_file_exist(self):
assert self.validator._is_pack_file_exists(PACKS_README_FILE_NAME)
assert not self.validator._is_pack_file_exists('boop')
self.validator._errors = []
def test_parse_file_into_list(self):
assert ['boop', 'sade', ''] == self.validator._parse_file_into_list(PACKS_README_FILE_NAME)
assert not self.validator._parse_file_into_list('boop')
self.validator._errors = []
def test_validate_pack_unique_files(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_pack_metadata(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_partner_contribute_pack_metadata_no_mail_and_url(self, mocker, repo):
pack_metadata_no_email_and_url = PACK_METADATA_PARTNER.copy()
pack_metadata_no_email_and_url['email'] = ''
pack_metadata_no_email_and_url['url'] = ''
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_no_email_and_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_no_email_and_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'Contributed packs must include email or url' in result.stdout
@pytest.mark.parametrize('url, is_valid', [
('https://github.com/pont_to_repo', False),
('some_support_url', True),
('https://github.com/pont_to_repo/issues', True),
])
def test_validate_partner_pack_metadata_url(self, mocker, repo, url, is_valid):
pack_metadata_changed_url = PACK_METADATA_PARTNER.copy()
pack_metadata_changed_url['url'] = url
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_changed_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_changed_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
error_text = 'The metadata URL leads to a GitHub repo instead of a support page.'
if is_valid:
assert error_text not in result.stdout
else:
assert error_text in result.stdout
def test_validate_partner_contribute_pack_metadata_price_change(self, mocker, repo):
pack_metadata_price_changed = PACK_METADATA_PARTNER.copy()
pack_metadata_price_changed['price'] = 3
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file',
return_value=PACK_METADATA_PARTNER)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_price_changed))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_price_changed)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'The pack price was changed from 2 to 3 - revert the change' in result.stdout
def test_check_timestamp_format(self):
fake_validator = PackUniqueFilesValidator('fake')
good_format_timestamp = '2020-04-14T00:00:00Z'
missing_z = '2020-04-14T00:00:00'
missing_t = '2020-04-14 00:00:00Z'
only_date = '2020-04-14'
with_hyphen = '2020-04-14T00-00-00Z'
assert fake_validator.check_timestamp_format(good_format_timestamp)
assert not fake_validator.check_timestamp_format(missing_t)
assert not fake_validator.check_timestamp_format(missing_z)
assert not fake_validator.check_timestamp_format(only_date)
assert not fake_validator.check_timestamp_format(with_hyphen)
def test_validate_pack_dependencies_invalid_id_set(self, mocker, repo):
self.restart_validator()
def error_raising_function(*args, **kwargs):
raise ValueError("Couldn't find any items for pack 'PackID'. make sure your spelling is correct.")
mocker.patch(
'demisto_sdk.commands.common.hook_validations.pack_unique_files.get_core_pack_list',
side_effect=error_raising_function
)
assert not self.validator.validate_pack_dependencies()
assert Errors.invalid_id_set()[0] in self.validator.get_errors()
def test_validate_core_pack_dependencies(self):
self.restart_validator()
dependencies_packs = {'dependency_pack_1': {'mandatory': True, 'display_name': 'dependency pack 1'},
'dependency_pack_2': {'mandatory': False, 'display_name': 'dependency pack 2'},
'dependency_pack_3': {'mandatory': True, 'display_name': 'dependency pack 3'}}
assert not self.validator.validate_core_pack_dependencies(dependencies_packs)
assert Errors.invalid_core_pack_dependencies('fake_pack', ['dependency_pack_1', 'dependency_pack_3'])[0] \
in self.validator.get_errors()
def test_validate_pack_dependencies_skip_id_set_creation(self, capsys):
self.restart_validator()
self.validator.skip_id_set_creation = True
res = self.validator.validate_pack_dependencies()
self.validator.skip_id_set_creation = False # reverting to default for next tests
assert res
assert "No first level dependencies found" in capsys.readouterr().out
@pytest.mark.parametrize('usecases, is_valid, branch_usecases', [
([], True, []),
(['Phishing', 'Malware'], True, []),
(['NonApprovedUsecase', 'Case Management'], False, []),
(['NewUseCase'], True, ['NewUseCase']),
(['NewUseCase1, NewUseCase2'], False, ['NewUseCase1'])
])
def test_is_approved_usecases(self, repo, usecases, is_valid, branch_usecases, mocker):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: usecases,
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: []
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_usecases}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_usecases() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved usecases:' in self.validator.get_errors()
@pytest.mark.parametrize('tags, is_valid, branch_tags', [
([], True, []),
(['Machine Learning', 'Spam'], True, []),
(['NonApprovedTag', 'GDPR'], False, []),
(['NewTag'], True, ['NewTag']),
(['NewTag1, NewTag2'], False, ['NewTag1'])
])
def test_is_approved_tags(self, repo, tags, is_valid, branch_tags, mocker):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_tags}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_tags() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved tags:' in self.validator.get_errors()
@pytest.mark.parametrize('pack_content, tags, is_valid', [
("none", [], True),
("none", ["Use Case"], False),
("playbook", ["Use Case"], True),
("incident", ["Use Case"], True),
("layout", ["Use Case"], True),
("playbook", [], True),
])
def test_is_right_usage_of_usecase_tag(self, repo, pack_content, tags, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags,
})
if pack_content == "playbook":
pack.create_playbook(name="PlaybookName")
elif pack_content == "incident":
pack.create_incident_type(name="IncidentTypeName")
elif pack_content == "layout":
pack.create_layout(name="Layout")
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.is_right_usage_of_usecase_tag() == is_valid
@pytest.mark.parametrize('type, is_valid', [
('community', True),
('partner', True),
('xsoar', True),
('someName', False),
('test', False),
('developer', True)
])
def test_is_valid_support_type(self, repo, type, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: type
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_valid_support_type() == is_valid
if not is_valid:
assert 'Support field should be one of the following: xsoar, partner, developer or community.' in \
self.validator.get_errors()
def test_get_master_private_repo_meta_file_running_on_master(self, mocker, repo, capsys):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'master'
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Running on master branch - skipping price change validation" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_getting_git_error(self, repo, capsys, mocker):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
raise GitCommandError("A", "B")
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Got an error while trying to connect to git" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_file_not_found(self, mocker, repo, capsys):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
return None
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Unable to find previous pack_metadata.json file - skipping price change validation" in \
capsys.readouterr().out
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_partner(self, mocker, text, result):
self.validator = PackUniqueFilesValidator(self.FAKE_PACK_PATH)
self.validator.support = 'partner'
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_use_case(self, mocker, text, result):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'CortexXDR'))
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
def test_validate_pack_readme_file_is_not_empty_missing_file(self):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack'))
assert self.validator._is_pack_file_exists(self.validator.readme_file) is False
def test_validate_pack_readme_valid_images(self, mocker):
from demisto_sdk.commands.common.hook_validations.readme import \
ReadMeValidator
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
mocker.patch.object(ReadMeValidator, 'check_readme_relative_image_paths', return_value=[]) # Test only absolute paths
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=200, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert result
assert 'please repair it:\n' not in errors
assert 'please repair it:\n' not in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' not in errors
def test_validate_pack_readme_invalid_images(self):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=404, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert not result
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: (../../doc_files/Access_investigation_-_Generic_4_5.png)' in errors
assert 'Image link was not found, either insert it or remove it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' in errors
@pytest.mark.parametrize('readme_content, is_valid', [
('Hey there, just testing', True),
('This is a test. All good!', False),
])
def test_pack_readme_is_different_then_pack_description(self, repo, readme_content, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.readme.write_text(readme_content)
pack.pack_metadata.write_json({
PACK_METADATA_DESC: 'This is a test. All good!',
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.validate_pack_readme_and_pack_description() == is_valid
if not is_valid:
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' in self.validator.get_errors()
def test_validate_pack_readme_and_pack_description_no_readme_file(self, repo):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
os.remove(pack.readme.path)
assert self.validator.validate_pack_readme_and_pack_description()
assert '"README.md" file does not exist, create one in the root of the pack' in self.validator.get_errors()
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' not in self.validator.get_errors()
def test_valid_is_pack_metadata_desc_too_long(self, repo):
pack_description = 'Hey there, just testing'
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
def test_invalid_is_pack_metadata_desc_too_long(self, mocker, repo):
pack_description = 'This is will fail cause the description here is too long.' \
'test test test test test test test test test test test test test test test test test' \
' test test test test test'
error_desc = 'The description field of the pack_metadata.json file is longer than 130 characters.'
mocker.patch("click.secho")
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
assert error_desc in click.secho.call_args_list[0][0][0]
def test_validate_author_image_exists_valid(self, repo):
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
res = self.validator.validate_author_image_exists()
assert res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' not in \
self.validator.get_errors()
def test_validate_author_image_exists_invalid(self, repo):
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
os.remove(author_image_path)
res = self.validator.validate_author_image_exists()
assert not res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' in \
self.validator.get_errors()
| true | true |
f71f6ee079b895d1562283af73f4c7cb38b99b68 | 371 | py | Python | src/example02/main.py | luisibanez/cssi-appengine-introduction-01 | 617c27147f8ba91bdecc7b774ccd2d3204607514 | [
"Apache-2.0"
] | null | null | null | src/example02/main.py | luisibanez/cssi-appengine-introduction-01 | 617c27147f8ba91bdecc7b774ccd2d3204607514 | [
"Apache-2.0"
] | null | null | null | src/example02/main.py | luisibanez/cssi-appengine-introduction-01 | 617c27147f8ba91bdecc7b774ccd2d3204607514 | [
"Apache-2.0"
] | null | null | null | import webapp2
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello world!')
class CountHandler(webapp2.RequestHandler):
def get(self):
for i in range(1, 21):
self.response.write('Hello %d <br>' % i)
app = webapp2.WSGIApplication([
('/', MainHandler),
('/count', CountHandler)
], debug=True)
| 23.1875 | 52 | 0.638814 | import webapp2
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello world!')
class CountHandler(webapp2.RequestHandler):
def get(self):
for i in range(1, 21):
self.response.write('Hello %d <br>' % i)
app = webapp2.WSGIApplication([
('/', MainHandler),
('/count', CountHandler)
], debug=True)
| true | true |
f71f6f31a5c782d44a541c5b9d96b9cf0320881f | 2,317 | py | Python | tests/test_json.py | pestun/strace-parser | 8bcddb1670c891785c1fa798b948e9637462c474 | [
"MIT"
] | 6 | 2020-02-03T10:29:59.000Z | 2022-03-07T13:24:26.000Z | tests/test_json.py | pestun/strace-parser | 8bcddb1670c891785c1fa798b948e9637462c474 | [
"MIT"
] | 2 | 2020-11-23T03:04:00.000Z | 2021-09-25T00:39:00.000Z | tests/test_json.py | pestun/strace-parser | 8bcddb1670c891785c1fa798b948e9637462c474 | [
"MIT"
] | 2 | 2020-04-23T03:25:04.000Z | 2021-10-21T23:07:21.000Z | from importlib.resources import read_text
import pytest
from lark import Token, Tree
from strace_parser.json_transformer import to_json
from strace_parser.parser import get_parser
from . import data
def assert_fully_serialized(obj):
def _assert_fully_serialized(obj):
assert not isinstance(obj, Tree), original
assert not isinstance(obj, Token), original
if isinstance(obj, dict):
for k, v in obj.items():
_assert_fully_serialized(k)
_assert_fully_serialized(v)
elif isinstance(obj, list):
for v in obj:
_assert_fully_serialized(v)
else:
assert isinstance(
obj, (str, float, bool)
), f"Unexpected type {obj} in {original}"
original = obj
_assert_fully_serialized(obj)
@pytest.mark.parametrize("line", read_text(data, "samples.txt").splitlines())
def test_json_fully_transforms(line):
tree = get_parser().parse(line + "\n")
result = to_json(tree)
assert_fully_serialized(result)
def test_json_transformer():
text = (
"1577836800.000000 connect("
r'0<\x01\x23\x45>, {sa_family=AF_UNIX, sun_path="\x01\x23\x45"}, 123)'
" = -123 ENOENT (No such file or directory) <0.000001>\n"
)
parser = get_parser()
tree = parser.parse(text)
result = to_json(tree)
assert len(result) == 1
line = result[0]
assert {
"timestamp": 1577836800.000000,
"type": "syscall",
"args": [
{"type": "other", "value": r"0<\x01\x23\x45>"},
{
"type": "braced",
"value": [
{
"type": "key_value",
"key": "sa_family",
"value": {"type": "other", "value": "AF_UNIX"},
},
{
"type": "key_value",
"key": "sun_path",
"value": {"type": "other", "value": r'"\x01\x23\x45"'},
},
],
},
{"type": "other", "value": "123"},
],
"name": "connect",
"result": "-123 ENOENT (No such file or directory) <0.000001>",
} == line, f"Did not match {tree.pretty()}"
| 30.893333 | 79 | 0.518774 | from importlib.resources import read_text
import pytest
from lark import Token, Tree
from strace_parser.json_transformer import to_json
from strace_parser.parser import get_parser
from . import data
def assert_fully_serialized(obj):
def _assert_fully_serialized(obj):
assert not isinstance(obj, Tree), original
assert not isinstance(obj, Token), original
if isinstance(obj, dict):
for k, v in obj.items():
_assert_fully_serialized(k)
_assert_fully_serialized(v)
elif isinstance(obj, list):
for v in obj:
_assert_fully_serialized(v)
else:
assert isinstance(
obj, (str, float, bool)
), f"Unexpected type {obj} in {original}"
original = obj
_assert_fully_serialized(obj)
@pytest.mark.parametrize("line", read_text(data, "samples.txt").splitlines())
def test_json_fully_transforms(line):
tree = get_parser().parse(line + "\n")
result = to_json(tree)
assert_fully_serialized(result)
def test_json_transformer():
text = (
"1577836800.000000 connect("
r'0<\x01\x23\x45>, {sa_family=AF_UNIX, sun_path="\x01\x23\x45"}, 123)'
" = -123 ENOENT (No such file or directory) <0.000001>\n"
)
parser = get_parser()
tree = parser.parse(text)
result = to_json(tree)
assert len(result) == 1
line = result[0]
assert {
"timestamp": 1577836800.000000,
"type": "syscall",
"args": [
{"type": "other", "value": r"0<\x01\x23\x45>"},
{
"type": "braced",
"value": [
{
"type": "key_value",
"key": "sa_family",
"value": {"type": "other", "value": "AF_UNIX"},
},
{
"type": "key_value",
"key": "sun_path",
"value": {"type": "other", "value": r'"\x01\x23\x45"'},
},
],
},
{"type": "other", "value": "123"},
],
"name": "connect",
"result": "-123 ENOENT (No such file or directory) <0.000001>",
} == line, f"Did not match {tree.pretty()}"
| true | true |
f71f6f77797715e2642a7242a6f13d06b57a1ac6 | 6,833 | py | Python | loops/__init__.py | fenhl/python-loops | ea36e3b1ad68c2257071724a1f760b0e352bb29c | [
"MIT"
] | null | null | null | loops/__init__.py | fenhl/python-loops | ea36e3b1ad68c2257071724a1f760b0e352bb29c | [
"MIT"
] | null | null | null | loops/__init__.py | fenhl/python-loops | ea36e3b1ad68c2257071724a1f760b0e352bb29c | [
"MIT"
] | null | null | null | import datetime
import threading
import time
try:
from loops.version import __version__
except ImportError:
__version__ = None
class IterThread(threading.Thread):
"""Helper class used in loops."""
def __init__(self, iterator):
super().__init__()
self.daemon = True
self.iterator = iterator
self.stopped = False
def run(self):
try:
self.value = next(self.iterator)
except StopIteration:
self.stopped = True
class Loop(threading.Thread):
"""Generic loop thread that periodically checks if it should stop while waiting for the iterable to yield.
Keyword-only arguments:
iterable -- The iterable to be looped over. By default, self.get_iterable is called.
on_exception -- What to do when an exception occurs in process_value. If given, must be an iterable of actions, which will be done in order. Possible actions are 'log_stdout' (write traceback to sys.stdout), 'log_stderr' (write traceback to sys.stderr), or 'raise' (the default; lets the exception through to threading's default handling). Set to an empty iterable to ignore exceptions and continue the loop.
process_value -- A function which will be called with each yielded value as argument. Defaults to self.process_value.
sleep_length -- A datetime.timedelta representing how long to sleep between each check for the next value or the stop signal. Defaults to half a second.
"""
def __init__(self, *, iterable=None, on_exception=('raise',), process_value=None, sleep_length=datetime.timedelta(seconds=0.5)):
super().__init__()
if iterable is None:
self.iterable = self.iterable()
else:
self.iterable = iterable
self.on_exception = tuple(on_exception)
if process_value is not None:
self.process_value = process_value
self.stopped = False
self.sleep_length = sleep_length
@staticmethod
def iterable():
"""The iterable to be looped over. Must be overridden in a subclass, or by passing the `iterable' keyword argument to the constructor."""
raise NotImplementedError('iterable must be overwritten in subclasses, or set explicitly')
def run(self):
iterator = iter(self.iterable)
iter_thread = IterThread(iterator)
iter_thread.start() # get the first value
while not self.stopped:
if not iter_thread.is_alive():
if iter_thread.stopped: # iterator exhausted
return
else: # iterator has yielded a value
try:
self.process_value(iter_thread.value)
except:
for exception_action in self.on_exception:
if exception_action == 'log_stdout':
traceback.print_exc(file=sys.stdout)
elif exception_action == 'log_stderr':
traceback.print_exc(file=sys.stderr)
elif exception_action == 'raise':
raise
else:
raise ValueError('Unrecognized exception action: {!r}'.format(exception_action))
iter_thread = IterThread(iterator)
iter_thread.start() # get the next value
continue
time.sleep(self.sleep_length.total_seconds())
@staticmethod
def process_value(value):
"""Will be called with each yielded value as argument. Must be overridden in a subclass, or by passing the `process_value' keyword argument to the constructor."""
raise NotImplementedError('process_value must be overwritten in subclasses, or set explicitly')
def start(self):
self.stopped = False
super().start()
def stop(self):
self.stopped = True
def timeout_single(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
"""This function creates an iterator that yields from the given iterable, but aborts when the iterable takes too long to yield a value.
Required arguments:
iterable -- The iterable to yield from.
timeout -- A datetime.timedelta representing the maximum time the iterable may take to produce a single value. If any iteration step takes longer than this, the iteration is aborted.
Optional arguments:
sleep_length -- A datetime.timedelta representing how long to sleep between each check for the next value. Will be truncated to the remainder of the timeout. Defaults to half a second.
Yields:
The values from `iterable', until it is exhausted or `timeout' is reached.
"""
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start() # get the first value
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped: # iterator exhausted
return
else: # iterator has yielded a value
yield iter_thread.value
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start() # get the next value
def timeout_total(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
"""This function creates an iterator that yields from the given iterable, but aborts after a timeout.
Required arguments:
iterable -- The iterable to yield from.
timeout -- A datetime.timedelta representing how long after iteration is started it should be aborted.
Optional arguments:
sleep_length -- A datetime.timedelta representing how long to sleep between each check for the next value. Will be truncated to the remainder of the timeout. Defaults to half a second.
Yields:
The values from `iterable', until it is exhausted or `timeout' is reached.
"""
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start() # get the first value
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped: # iterator exhausted
return
else: # iterator has yielded a value
yield iter_thread.value
iter_thread = IterThread(iterator)
iter_thread.start() # get the next value
| 46.80137 | 412 | 0.658861 | import datetime
import threading
import time
try:
from loops.version import __version__
except ImportError:
__version__ = None
class IterThread(threading.Thread):
def __init__(self, iterator):
super().__init__()
self.daemon = True
self.iterator = iterator
self.stopped = False
def run(self):
try:
self.value = next(self.iterator)
except StopIteration:
self.stopped = True
class Loop(threading.Thread):
def __init__(self, *, iterable=None, on_exception=('raise',), process_value=None, sleep_length=datetime.timedelta(seconds=0.5)):
super().__init__()
if iterable is None:
self.iterable = self.iterable()
else:
self.iterable = iterable
self.on_exception = tuple(on_exception)
if process_value is not None:
self.process_value = process_value
self.stopped = False
self.sleep_length = sleep_length
@staticmethod
def iterable():
raise NotImplementedError('iterable must be overwritten in subclasses, or set explicitly')
def run(self):
iterator = iter(self.iterable)
iter_thread = IterThread(iterator)
iter_thread.start()
while not self.stopped:
if not iter_thread.is_alive():
if iter_thread.stopped:
return
else:
try:
self.process_value(iter_thread.value)
except:
for exception_action in self.on_exception:
if exception_action == 'log_stdout':
traceback.print_exc(file=sys.stdout)
elif exception_action == 'log_stderr':
traceback.print_exc(file=sys.stderr)
elif exception_action == 'raise':
raise
else:
raise ValueError('Unrecognized exception action: {!r}'.format(exception_action))
iter_thread = IterThread(iterator)
iter_thread.start()
continue
time.sleep(self.sleep_length.total_seconds())
@staticmethod
def process_value(value):
raise NotImplementedError('process_value must be overwritten in subclasses, or set explicitly')
def start(self):
self.stopped = False
super().start()
def stop(self):
self.stopped = True
def timeout_single(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start()
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped:
return
else:
yield iter_thread.value
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start()
def timeout_total(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start()
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped:
return
else:
yield iter_thread.value
iter_thread = IterThread(iterator)
iter_thread.start()
| true | true |
f71f6ffc94e95da06954304971002720ccddd90b | 537 | py | Python | plugins/yt.py | ctburley/akesho-irc3 | 7d27a45f401ffcfa3a380c7de01687cbe69b874d | [
"MIT"
] | 3 | 2018-06-03T11:55:28.000Z | 2020-01-03T02:33:22.000Z | plugins/yt.py | ctburley/akesho-irc3 | 7d27a45f401ffcfa3a380c7de01687cbe69b874d | [
"MIT"
] | 14 | 2018-05-07T13:33:21.000Z | 2021-04-30T20:46:54.000Z | plugins/yt.py | ctburley/akesho-irc3 | 7d27a45f401ffcfa3a380c7de01687cbe69b874d | [
"MIT"
] | 1 | 2018-06-04T04:45:58.000Z | 2018-06-04T04:45:58.000Z | import irc3
from irc3.plugins.command import command
@irc3.plugin
class Plugin:
def __init__(self, bot):
self.bot = bot
print("yt loaded")
@irc3.event('^(@(?P<tags>\S+) )?:(?P<nick>\S+)(?P<mask>!\S+@\S+) PRIVMSG (?P<channel>\S+) :\.yt\s+(?P<target>.*?)$')
def yt(self, nick=None, mask=None, channel=None, target=None, **kw):
if self.bot.obeying_commands(channel):
target = target.strip()
self.bot.privmsg(channel, "Hey " + nick + " .yt isn't working right now, try '.gse youtube "+target+"' instead! <3")
| 35.8 | 122 | 0.621974 | import irc3
from irc3.plugins.command import command
@irc3.plugin
class Plugin:
def __init__(self, bot):
self.bot = bot
print("yt loaded")
@irc3.event('^(@(?P<tags>\S+) )?:(?P<nick>\S+)(?P<mask>!\S+@\S+) PRIVMSG (?P<channel>\S+) :\.yt\s+(?P<target>.*?)$')
def yt(self, nick=None, mask=None, channel=None, target=None, **kw):
if self.bot.obeying_commands(channel):
target = target.strip()
self.bot.privmsg(channel, "Hey " + nick + " .yt isn't working right now, try '.gse youtube "+target+"' instead! <3")
| true | true |
f71f70018ea2bb974a7995e741772da0a860e199 | 11,666 | py | Python | mesh_voxel_color/color_pil_cupy.py | naysok/Mesh_Voxel_Color | 9ca3549822ada1be67efcb3e47cf4c193d54cbaa | [
"MIT"
] | null | null | null | mesh_voxel_color/color_pil_cupy.py | naysok/Mesh_Voxel_Color | 9ca3549822ada1be67efcb3e47cf4c193d54cbaa | [
"MIT"
] | null | null | null | mesh_voxel_color/color_pil_cupy.py | naysok/Mesh_Voxel_Color | 9ca3549822ada1be67efcb3e47cf4c193d54cbaa | [
"MIT"
] | null | null | null | import sys
sys.path.append("C:\\Users\\ysoky\\Documents\\Mesh_Voxel_Color\\_module_\\Mesh_Contour")
import math
import cupy as cp
import random
from PIL import Image, ImageDraw, ImageOps, ImageEnhance
from mesh_contour import stl_parser
sp = stl_parser.StlParser()
from .import util
ut = util.Util()
class ColorPILCupy():
###############################
#### ###
#### I/O + Utilities ###
#### ###
###############################
def remap_number_cp(self, arr, old_min, old_max, target_min, target_max):
new_arr = (arr - old_min) / (old_max - old_min) * (target_max - target_min) + target_min
return new_arr
def get_points_from_stl(self, file_path):
### Point From STL
pts = sp.stl2points(file_path)
return pts
def get_points_from_stl_np(self, file_path, volume_size, canvas_size):
### Cupy
### Point From STL
pts = sp.stl2points(file_path)
pts_format = [pts]
# print(pts_format)
pts_cp = cp.array(pts_format)
pts_cp_remap = self.remap_number_cp(pts_cp, 0, volume_size, 0, canvas_size)
# print(pts_cp)
return pts_cp_remap
def get_points_from_txt_np(self, file_path, volume_size, canvas_size):
### Cupy
with open(file_path) as f:
lines = f.readlines()
xyz_list = []
for line in lines:
elm = line.split(",")
xyz =[float(elm[0]), float(elm[1]), float(elm[2])]
xyz_list.append(xyz)
xyz_list = [xyz_list]
pts_cp = cp.array(xyz_list)
pts_cp_remap = self.remap_number_cp(pts_cp, 0, volume_size, 0, canvas_size)
# print("pts_cp_remap.shape :", pts_np_remap.shape)
# print(pts_cp_remap)
return pts_cp_remap
################################################################################
######################################
#### ###
#### Image Processing (PIL) ###
#### ###
######################################
def open_image(self, path):
img = Image.open(path)
return img
def export_image(self, img, path):
img.save(path, quality=100)
print("Export : {}".format(path))
def create_canvas(self, canvas_size):
new = Image.new("RGB", (canvas_size, canvas_size), (255, 255, 255))
return new
def create_canvas_alpha(self, canvas_size):
new = Image.new("RGBA", (canvas_size, canvas_size), (0, 0, 0, 0))
return new
################################################################################
###########################
#### ###
#### Math (Cupy) ###
#### ###
###########################
def clac_all_distance(self, pos, pts):
### Calc Distance with Cupy
### Generate Vector
v = pos - pts
# print("v.shape :", v.shape)
# print(v)
vt = v.T
### Calc Distance
d = cp.sqrt((vt[0] * vt[0]) + (vt[1] * vt[1]) + (vt[2] * vt[2]))
# print("d.shape :", d.shape)
### Select Min Value
dm_cp = cp.amin(d, axis=0)
# print("dm.shape :", dm_cp.shape)
return dm_cp
def gen_disctance_list(self, w, h, height, pts_cp):
### Generate Distance-List
# print("Distance")
px_list = []
for i in range(w):
for j in range(h):
px_list.append([[j, i, height]])
### pos-numpy array (from Image)
pos_cp = cp.array(px_list)
# print("pos.shape :", pos_cp.shape)
### Separate Process
### https://qiita.com/kazuki_hayakawa/items/557edd922f9f1fafafe0
SPLIT = 250
pos_cp_split = cp.array_split(pos_cp, SPLIT)
# print(len(pos_cp_split))
dist_tmp = []
for i in range(SPLIT):
tmp_p = pos_cp_split[i]
# print("pts.shape :", tmp_p.shape)
### pts-numpy array (from STL)
# print("pts.shape :", pts_cp.shape)
###
d = self.clac_all_distance(tmp_p, pts_cp)
dist_tmp.append(d)
dist_list = cp.concatenate(dist_tmp, 0)
# print(len(dist_list))
return dist_list
def gen_disctance_list_ds(self, w, h, height, downsampling_xy, pts_cp):
### Generate Distance-List
### with DownSampling
# print("Distance")
px_list = []
for i in range(w):
for j in range(h):
px = [j * downsampling_xy, i * downsampling_xy, height]
px_list.append([px])
### pos-numpy array (from Image)
pos_cp = cp.array(px_list)
# print(pos_cp)
# print("pos.shape :", pos_cp.shape)
### Separate Process
### https://qiita.com/kazuki_hayakawa/items/557edd922f9f1fafafe0
SPLIT = 250
pos_cp_split = cp.array_split(pos_cp, SPLIT)
# print(len(pos_cp_split))
dist_tmp = []
for i in range(SPLIT):
tmp_p = pos_cp_split[i]
# print("pts.shape :", tmp_p.shape)
### pts-numpy array (from STL)
# print("pts.shape :", pts_cp.shape)
###
d = self.clac_all_distance(tmp_p, pts_cp)
dist_tmp.append(d)
dist_list = cp.concatenate(dist_tmp, 0)
# print(len(dist_list))
return dist_list
################################################################################
####################
### ###
### Draw ###
### ###
####################
def scan_image_calc_color(self, file_path, height, pts_cp, downsampling_xy):
### Open Image
img_src = self.open_image(file_path)
w, h = img_src.size
### DownSampling
ww = int(w / downsampling_xy)
hh = int(h / downsampling_xy)
img = img_src.resize((ww, hh), Image.LANCZOS)
### Read Shape
px = img.getdata()
px_cp = cp.array(px)
# print("px_cp.shape :", px_cp.shape)
### Create Result Canvas
img_tmp = self.create_canvas_alpha(ww)
img_result = self.create_canvas_alpha(w)
### Segment Contour True/False
px_seg_0 = cp.amax(px_cp)
### Contour : False
if px_seg_0 < 127:
### Export None-Image
px_result = [(0, 0, 0, 0) for i in range(w) for j in range(h)]
img_result.putdata(tuple(px_result))
return img_result
### Contour : True
else:
### Running on Cuda
# print("Running on Cuda !!")
################################################################################################
###########################
### ###
### Calc Distance ###
### ###
###########################
# print("Distance")
### [X] Clac Distance
# dist_list = self.gen_disctance_list(w, h, height, pts_cp)
### [O] Clac Distance with DownSampling
dist_list = self.gen_disctance_list_ds(ww, hh, height, downsampling_xy, pts_cp)
################################################################################################
############################################
### ###
### Generate Color From Distance ###
### ###
############################################
# print("Color")
### Define Colors
################################################################################################
### Offset Pattern (Small)
dist_src = dist_list.tolist()
# print("len(dist_src) :", len(dist_src))
clrs = []
amp = 1 / 2
for d in dist_src:
c = int((math.sin(d * amp) + 1) * (1 / 2) * 255)
cc = 255 - c
clrs.append([c, c, cc, 255])
clrs_tuple = tuple(map(tuple, clrs))
### Generate New Image
img_tmp.putdata(tuple(clrs_tuple))
################################################################################################
"""
### Offset Pattern (Large)
dist_src = dist_list.tolist()
# print("len(dist_src) :", len(dist_src))
clrs = []
for d in dist_src:
th = 30
if d < (th * 1):
clrs.append([255, 0, 0, 255])
elif d < (th * 2):
clrs.append([0, 255, 0, 255])
elif d < (th * 3):
clrs.append([0, 0, 255, 255])
else:
clrs.append([255, 255, 255, 255])
clrs_tuple = tuple(map(tuple, clrs))
### Generate New Image
img_tmp.putdata(tuple(clrs_tuple))
"""
################################################################################################
"""
### Test Distance Map
dist_remap = self.remap_number_cp(dist_list, 0, 200, 0, 255)
dist_remap = dist_remap.astype('int64')
# print("dist_remap.shape :", dist_remap.shape)
### Fill Array (255)
alpha_array = cp.ones(dist_list.shape) * 255
alpha_array = alpha_array.astype('int64')
dist_img = cp.stack([dist_remap, dist_remap, dist_remap, alpha_array])
dist_img = dist_img.T
# print("dist_img.shape :", dist_img.shape)
# print(dist_img)
dist_4 = dist_img.tolist()
dist_4 = tuple(map(tuple, dist_4))
# print("type(dist_4) :", type(dist_4))
### Generate New Image
img_tmp.putdata(tuple(dist_4))
"""
################################################################################################
#########################
### ###
### Composite ###
### ###
#########################
# print("Composite")
### Scaling
img_dist = img_tmp.resize((w, h), Image.LANCZOS)
### Create Canvas for Composite
img_canvas = self.create_canvas_alpha(w)
### Define Mask
img_mask = img_src.convert("L")
### Composite
img_result = Image.composite(img_dist, img_canvas, img_mask)
### Flip
### Image Coordination >> Rhino Coordination
img_flip = ImageOps.flip(img_result)
return img_flip | 27.193473 | 109 | 0.407252 | import sys
sys.path.append("C:\\Users\\ysoky\\Documents\\Mesh_Voxel_Color\\_module_\\Mesh_Contour")
import math
import cupy as cp
import random
from PIL import Image, ImageDraw, ImageOps, ImageEnhance
from mesh_contour import stl_parser
sp = stl_parser.StlParser()
from .import util
ut = util.Util()
class ColorPILCupy():
| true | true |
f71f7031c4f8fd46d4b8fe54a23ba04cced48350 | 644 | py | Python | coronavirus/json_update.py | StevenHuang2020/WebSpider | 40ab36416e061da3eb98a3174f18f50260b2e2d3 | [
"MIT"
] | null | null | null | coronavirus/json_update.py | StevenHuang2020/WebSpider | 40ab36416e061da3eb98a3174f18f50260b2e2d3 | [
"MIT"
] | null | null | null | coronavirus/json_update.py | StevenHuang2020/WebSpider | 40ab36416e061da3eb98a3174f18f50260b2e2d3 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
# Date: 27/Apr/2020
# Author: Steven Huang, Auckland, NZ
# License: MIT License
"""
Description: Update json file
"""
import json
import datetime
def write_file(file, content):
with open(file, 'w', newline='\n', encoding='utf-8') as f:
f.write(content)
def get_datetime():
daytime = datetime.datetime.now()
return str(daytime.strftime("%Y-%m-%d %H:%M:%S"))
def update_json(file=r'update.json'):
info = {"schemaVersion": 1, "label": "Last update", "message": "2020-01-01 01:01:01"}
info["message"] = get_datetime()
# print(json.dumps(info))
write_file(file, json.dumps(info))
| 23 | 89 | 0.641304 |
import json
import datetime
def write_file(file, content):
with open(file, 'w', newline='\n', encoding='utf-8') as f:
f.write(content)
def get_datetime():
daytime = datetime.datetime.now()
return str(daytime.strftime("%Y-%m-%d %H:%M:%S"))
def update_json(file=r'update.json'):
info = {"schemaVersion": 1, "label": "Last update", "message": "2020-01-01 01:01:01"}
info["message"] = get_datetime()
write_file(file, json.dumps(info))
| true | true |
f71f706d2b3fdf3882c5261d9237067d22214993 | 694 | py | Python | src/decisionengine_modules/glideinwms/tests/test_UniversalFrontendParams.py | BrunoCoimbra/decisionengine_modules | bfd14644eb2e16b72b75fdcc3ebe8ad1323b904f | [
"Apache-2.0"
] | null | null | null | src/decisionengine_modules/glideinwms/tests/test_UniversalFrontendParams.py | BrunoCoimbra/decisionengine_modules | bfd14644eb2e16b72b75fdcc3ebe8ad1323b904f | [
"Apache-2.0"
] | null | null | null | src/decisionengine_modules/glideinwms/tests/test_UniversalFrontendParams.py | BrunoCoimbra/decisionengine_modules | bfd14644eb2e16b72b75fdcc3ebe8ad1323b904f | [
"Apache-2.0"
] | null | null | null | from decisionengine_modules.glideinwms.tests.fixtures import ( # noqa: F401
gwms_module_config,
gwms_module_invalid_config,
gwms_src_dir,
)
from decisionengine_modules.glideinwms.UniversalFrontendParams import UniversalFrontendParams
def test_instantiation(gwms_src_dir, gwms_module_config): # noqa: F811
params = UniversalFrontendParams(gwms_src_dir, gwms_module_config)
assert params.subparams["frontend_name"] == "mock_frontend"
def test_config_error(gwms_src_dir, gwms_module_invalid_config): # noqa: F811
try:
_ = UniversalFrontendParams(gwms_src_dir, gwms_module_invalid_config)
except Exception as e:
assert isinstance(e, RuntimeError)
| 36.526316 | 93 | 0.792507 | from decisionengine_modules.glideinwms.tests.fixtures import (
gwms_module_config,
gwms_module_invalid_config,
gwms_src_dir,
)
from decisionengine_modules.glideinwms.UniversalFrontendParams import UniversalFrontendParams
def test_instantiation(gwms_src_dir, gwms_module_config):
params = UniversalFrontendParams(gwms_src_dir, gwms_module_config)
assert params.subparams["frontend_name"] == "mock_frontend"
def test_config_error(gwms_src_dir, gwms_module_invalid_config):
try:
_ = UniversalFrontendParams(gwms_src_dir, gwms_module_invalid_config)
except Exception as e:
assert isinstance(e, RuntimeError)
| true | true |
f71f73422050b5b292bd93215895e5ecf77f8aa9 | 4,482 | py | Python | app/run.py | imisi-akande/disaster-response-pipeline | d691e643c57e45b226ca3cb2c0b4a708c7edfe8b | [
"MIT"
] | null | null | null | app/run.py | imisi-akande/disaster-response-pipeline | d691e643c57e45b226ca3cb2c0b4a708c7edfe8b | [
"MIT"
] | null | null | null | app/run.py | imisi-akande/disaster-response-pipeline | d691e643c57e45b226ca3cb2c0b4a708c7edfe8b | [
"MIT"
] | null | null | null | import json
import plotly
import pandas as pd
import nltk
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize, sent_tokenize
from nltk import pos_tag, word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar
from sklearn.base import BaseEstimator, TransformerMixin
import joblib
from sqlalchemy import create_engine
app = Flask(__name__)
class StartingVerbExtractor(BaseEstimator, TransformerMixin):
def starting_verb(self, text):
sentence_list = nltk.sent_tokenize(text)
for sentence in sentence_list:
pos_tags = nltk.pos_tag(tokenize(sentence))
first_word, first_tag = pos_tags[0]
if first_tag in ['VB', 'VBP'] or first_word == 'RT':
return True
return False
def fit(self, X, y=None):
return self
def transform(self, X):
X_tagged = pd.Series(X).apply(self.starting_verb)
return pd.DataFrame(X_tagged)
def tokenize(text):
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
# load data
engine = create_engine('sqlite:///../data/disaster_response.db')
df = pd.read_sql_table('disaster_response_table', engine)
# load model
model = joblib.load("../models/classifier.pkl")
# index webpage displays cool visuals and receives user input text for model
@app.route('/')
@app.route('/index')
def index():
# extract data needed for visuals
# TODO: Below is an example - modify to extract data for your own visuals
genre_counts = df.groupby('genre').count()['message']
genre_percent = round(100*genre_counts/genre_counts.sum(), 2)
genre_names = list(genre_counts.index)
category_names = df.iloc[:,4:].columns
category_boolean = (df.iloc[:,4:] != 0).sum().values
# create visuals
# TODO: Below is an example - modify to create your own visuals
graphs = [
# GRAPH 1 - genre graph
{
"data": [
{
"type": "pie",
"uid": "f4de1f",
"hole": 0.4,
"name": "Genre",
"pull": 0,
"domain": {
"x": genre_percent,
"y": genre_names
},
"marker": {
"colors": [
"#7fc97f",
"#bc5090",
"#ffa600"
]
},
"textinfo": "label+value",
"hoverinfo": "all",
"labels": genre_names,
"values": genre_counts
}
],
"layout": {
"title": "Count and Percentage of Messages by Genre"
}
},
# GRAPH 2 - category graph
{
'data': [
Bar(
x=category_names,
y=category_boolean
)
],
'layout': {
'title': 'Distribution of Message Categories',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Category",
'tickangle': 35
}
}
}
]
# encode plotly graphs in JSON
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
# render web page with plotly graphs
return render_template('master.html', ids=ids, graphJSON=graphJSON)
# web page that handles user query and displays model results
@app.route('/go')
def go():
# save user input in query
query = request.args.get('query', '')
# use model to predict classification for query
classification_labels = model.predict([query])[0]
classification_results = dict(zip(df.columns[4:], classification_labels))
# This will render the go.html Please see that file.
return render_template(
'go.html',
query=query,
classification_result=classification_results
)
def main():
app.run(host='0.0.0.0', port=5000, debug=True)
if __name__ == '__main__':
main() | 28.367089 | 77 | 0.56805 | import json
import plotly
import pandas as pd
import nltk
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize, sent_tokenize
from nltk import pos_tag, word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar
from sklearn.base import BaseEstimator, TransformerMixin
import joblib
from sqlalchemy import create_engine
app = Flask(__name__)
class StartingVerbExtractor(BaseEstimator, TransformerMixin):
def starting_verb(self, text):
sentence_list = nltk.sent_tokenize(text)
for sentence in sentence_list:
pos_tags = nltk.pos_tag(tokenize(sentence))
first_word, first_tag = pos_tags[0]
if first_tag in ['VB', 'VBP'] or first_word == 'RT':
return True
return False
def fit(self, X, y=None):
return self
def transform(self, X):
X_tagged = pd.Series(X).apply(self.starting_verb)
return pd.DataFrame(X_tagged)
def tokenize(text):
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
engine = create_engine('sqlite:///../data/disaster_response.db')
df = pd.read_sql_table('disaster_response_table', engine)
model = joblib.load("../models/classifier.pkl")
@app.route('/')
@app.route('/index')
def index():
genre_counts = df.groupby('genre').count()['message']
genre_percent = round(100*genre_counts/genre_counts.sum(), 2)
genre_names = list(genre_counts.index)
category_names = df.iloc[:,4:].columns
category_boolean = (df.iloc[:,4:] != 0).sum().values
graphs = [
{
"data": [
{
"type": "pie",
"uid": "f4de1f",
"hole": 0.4,
"name": "Genre",
"pull": 0,
"domain": {
"x": genre_percent,
"y": genre_names
},
"marker": {
"colors": [
"#7fc97f",
"#bc5090",
"#ffa600"
]
},
"textinfo": "label+value",
"hoverinfo": "all",
"labels": genre_names,
"values": genre_counts
}
],
"layout": {
"title": "Count and Percentage of Messages by Genre"
}
},
{
'data': [
Bar(
x=category_names,
y=category_boolean
)
],
'layout': {
'title': 'Distribution of Message Categories',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Category",
'tickangle': 35
}
}
}
]
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
return render_template('master.html', ids=ids, graphJSON=graphJSON)
@app.route('/go')
def go():
query = request.args.get('query', '')
classification_labels = model.predict([query])[0]
classification_results = dict(zip(df.columns[4:], classification_labels))
return render_template(
'go.html',
query=query,
classification_result=classification_results
)
def main():
app.run(host='0.0.0.0', port=5000, debug=True)
if __name__ == '__main__':
main() | true | true |
f71f7408f54375e5147ae5b03a495305fdff73de | 1,853 | py | Python | mercury_agent/procedures/inspector.py | jr0d/mercury-agent | 12b75ecc951d3ab5cd15c5213df2412b108cf47c | [
"Apache-2.0"
] | null | null | null | mercury_agent/procedures/inspector.py | jr0d/mercury-agent | 12b75ecc951d3ab5cd15c5213df2412b108cf47c | [
"Apache-2.0"
] | 4 | 2017-11-01T16:25:49.000Z | 2018-08-22T13:50:23.000Z | mercury_agent/procedures/inspector.py | jr0d/mercury-agent | 12b75ecc951d3ab5cd15c5213df2412b108cf47c | [
"Apache-2.0"
] | 5 | 2017-10-19T12:40:15.000Z | 2018-08-21T20:18:54.000Z | # Copyright 2015 Jared Rodriguez (jared.rodriguez@rackspace.com)
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mercury_agent.capabilities import capability
from mercury_agent.configuration import get_configuration
from mercury_agent.inspector import inspect
from mercury_agent.inspector.inspect import global_device_info
from mercury_agent.inspector.inspectors import health
@capability('inspector', description='Run inspector')
def inspector():
"""
Manually run inspectors
:return: results
"""
return inspect.inspect()
@capability('check_hardware', description='Check hardware for errors')
def check_hardware():
"""
Checks hardware for inconsistencies and defects. Returns a list of discovered critical errors.
:return:
"""
configuration = get_configuration().agent
errors = []
_health_data = health.system_health_inspector(global_device_info)
if _health_data['corrected_hardware_event_count'] >= configuration.hardware.mce_threshold:
errors.append(
'MCE count is {} which is above the configured threshold of {}'.format(
_health_data['corrected_hardware_event_count'],
configuration.hardware.mce_threshold))
return {
'errors': errors,
'error_count': len(errors)
}
| 36.333333 | 98 | 0.729628 |
from mercury_agent.capabilities import capability
from mercury_agent.configuration import get_configuration
from mercury_agent.inspector import inspect
from mercury_agent.inspector.inspect import global_device_info
from mercury_agent.inspector.inspectors import health
@capability('inspector', description='Run inspector')
def inspector():
return inspect.inspect()
@capability('check_hardware', description='Check hardware for errors')
def check_hardware():
configuration = get_configuration().agent
errors = []
_health_data = health.system_health_inspector(global_device_info)
if _health_data['corrected_hardware_event_count'] >= configuration.hardware.mce_threshold:
errors.append(
'MCE count is {} which is above the configured threshold of {}'.format(
_health_data['corrected_hardware_event_count'],
configuration.hardware.mce_threshold))
return {
'errors': errors,
'error_count': len(errors)
}
| true | true |
f71f755bceeeb2c38e3122cc3e6f50cb403624cb | 453 | py | Python | examples/user/user_playlists.py | LorenzoCavatorta/spotify.py | 7f375f030fbac4ef3dbbd577a898b4d72f37b72b | [
"MIT"
] | null | null | null | examples/user/user_playlists.py | LorenzoCavatorta/spotify.py | 7f375f030fbac4ef3dbbd577a898b4d72f37b72b | [
"MIT"
] | null | null | null | examples/user/user_playlists.py | LorenzoCavatorta/spotify.py | 7f375f030fbac4ef3dbbd577a898b4d72f37b72b | [
"MIT"
] | null | null | null | import asyncio
import spotify
client = spotify.Client('someid', 'somesecret')
async def main():
# You can use a user with a http presence
user = await client.user_from_token('sometoken')
# Or you can get a generic user
user = await client.get_user(user_id)
# returns a list of spotify.Playlist objects
playlists = await user.get_playlists()
if __name__ == '__main__':
asyncio.get_event_loop().run_until_complete(main())
| 25.166667 | 55 | 0.715232 | import asyncio
import spotify
client = spotify.Client('someid', 'somesecret')
async def main():
user = await client.user_from_token('sometoken')
user = await client.get_user(user_id)
playlists = await user.get_playlists()
if __name__ == '__main__':
asyncio.get_event_loop().run_until_complete(main())
| true | true |
f71f75b68bb7f3fa7cd5a31932f2aebd38d239e8 | 8,668 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/aio/operations/_load_balancer_outbound_rules_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/aio/operations/_load_balancer_outbound_rules_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/aio/operations/_load_balancer_outbound_rules_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerOutboundRulesOperations:
"""LoadBalancerOutboundRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerOutboundRuleListResult"]:
"""Gets all the outbound rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerOutboundRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_02_01.models.LoadBalancerOutboundRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerOutboundRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerOutboundRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
outbound_rule_name: str,
**kwargs
) -> "_models.OutboundRule":
"""Gets the specified load balancer outbound rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param outbound_rule_name: The name of the outbound rule.
:type outbound_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OutboundRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_02_01.models.OutboundRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'outboundRuleName': self._serialize.url("outbound_rule_name", outbound_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OutboundRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules/{outboundRuleName}'} # type: ignore
| 48.424581 | 206 | 0.671666 |
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerOutboundRulesOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerOutboundRuleListResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerOutboundRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules'}
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
outbound_rule_name: str,
**kwargs
) -> "_models.OutboundRule":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'outboundRuleName': self._serialize.url("outbound_rule_name", outbound_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OutboundRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules/{outboundRuleName}'}
| true | true |
f71f76fbbb3977874071bfc11924aee5822e4bea | 2,317 | py | Python | itng/common/templatetags/ng_utils.py | NoviSystems/ng-utils | 29d20ba65fe2078694d18e6a33f7a448b26fa297 | [
"BSD-3-Clause"
] | null | null | null | itng/common/templatetags/ng_utils.py | NoviSystems/ng-utils | 29d20ba65fe2078694d18e6a33f7a448b26fa297 | [
"BSD-3-Clause"
] | null | null | null | itng/common/templatetags/ng_utils.py | NoviSystems/ng-utils | 29d20ba65fe2078694d18e6a33f7a448b26fa297 | [
"BSD-3-Clause"
] | null | null | null |
import re
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.encoding import force_text
from ordered_set import OrderedSet
register = template.Library()
@register.filter
def required(field):
"""
Return 'required' as a string if the BoundField's underlying field is required.
"""
return "required" if field.field.required else ""
@register.filter
def add_class(value, css_classes):
"""
Add a single or multiple css classes to a form widget. To add multiple classes, pass
them as a whitespace delimited string. eg, {{ field|add_class:"foo bar" }}
"""
if not css_classes:
return value
widget = value.field.widget
orig_classes = OrderedSet(widget.attrs.get('class', '').split())
new_classes = OrderedSet(css_classes.split())
widget.attrs['class'] = " ".join(orig_classes | new_classes)
return value
@register.simple_tag(takes_context=True)
def isactive(context, url, active='active', inactive='', exact=False):
"""
A ternary tag for whether a URL is 'active'. An active URL is defined as matching
the current request URL. The default behavior is to match the beginning of the URL.
For example, if `url` is '/some/path' and the current request URL is
'/some/path/subpath', then the URL is considered active. If `exact` is set to True,
then the URL's must match exactly.
Example::
{% url 'named-url' as named_url %}
<div class="{% isactive named_url 'active' 'inactive' %}">
</div>
"""
request_url = context['request'].path_info
if (request_url == url if exact else request_url.startswith(url)):
return active
return inactive
# def ifactive
# refer to {% ifequal %} implementation because it doesn't perform {% if %} condition parsing
# Originally from: https://djangosnippets.org/snippets/1519/
CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE | re.VERBOSE)
VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE | re.VERBOSE)
@register.filter
def an(text):
text = force_text(text)
match = not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text)
return '%s %s' % ('an' if match else 'a', text)
| 31.739726 | 156 | 0.686664 |
import re
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.encoding import force_text
from ordered_set import OrderedSet
register = template.Library()
@register.filter
def required(field):
return "required" if field.field.required else ""
@register.filter
def add_class(value, css_classes):
if not css_classes:
return value
widget = value.field.widget
orig_classes = OrderedSet(widget.attrs.get('class', '').split())
new_classes = OrderedSet(css_classes.split())
widget.attrs['class'] = " ".join(orig_classes | new_classes)
return value
@register.simple_tag(takes_context=True)
def isactive(context, url, active='active', inactive='', exact=False):
request_url = context['request'].path_info
if (request_url == url if exact else request_url.startswith(url)):
return active
return inactive
# Originally from: https://djangosnippets.org/snippets/1519/
CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE | re.VERBOSE)
VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE | re.VERBOSE)
@register.filter
def an(text):
text = force_text(text)
match = not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text)
return '%s %s' % ('an' if match else 'a', text)
| true | true |
f71f78bb67acd2a761bf282de28af8274e07ab9d | 1,636 | py | Python | Largest_Range.py | Le-bruit-de-nos-pas/python-functions | 0d86f924087da228ef46f6b984239b4ec8b7b305 | [
"MIT"
] | null | null | null | Largest_Range.py | Le-bruit-de-nos-pas/python-functions | 0d86f924087da228ef46f6b984239b4ec8b7b305 | [
"MIT"
] | null | null | null | Largest_Range.py | Le-bruit-de-nos-pas/python-functions | 0d86f924087da228ef46f6b984239b4ec8b7b305 | [
"MIT"
] | null | null | null | array_to_analyze = [11,7,3,4,2,5,1,0]
def largestRange(array_to_analyze):
# create a dictionary / hash table to keep track if we've seen the number already
elements = {x:0 for x in array_to_analyze} # set them all to "0"
#how many places have we moved to the left and right
left = 0
right = 0
#for each number
for entry in array_to_analyze:
#if the number has not been seen yet
if elements[entry] == 0:
left_count = entry-1 # start moving to the left
right_count = entry +1 # and the right
# if this left exists
while left_count in elements:
elements[left_count] = 1 # add it to the dictionary
left_count = left_count-1 #keep moving left if the previous number existed in the array
left_count = left_count +1
# if this right exists
while right_count in elements:
elements[right_count] = 1 # add it to the dictionary
right_count = right_count+1 #keep moving right if the previous number existed in the array
right_count = right_count -1
#if it doesn't exist, subtract 1 because we've added one to check a new number
#but it doesn't exist so we need to set it back to the very last number verified
#if there was any different from or we still stay at 0,0, return that sub-array
if (right-left) <= (right_count-left_count):
right = right_count
left = left_count
return[left, right]
# all good
print(largestRange(array_to_analyze)) | 38.046512 | 106 | 0.620416 | array_to_analyze = [11,7,3,4,2,5,1,0]
def largestRange(array_to_analyze):
elements = {x:0 for x in array_to_analyze} # set them all to "0"
#how many places have we moved to the left and right
left = 0
right = 0
#for each number
for entry in array_to_analyze:
#if the number has not been seen yet
if elements[entry] == 0:
left_count = entry-1 # start moving to the left
right_count = entry +1 # and the right
# if this left exists
while left_count in elements:
elements[left_count] = 1 # add it to the dictionary
left_count = left_count-1 #keep moving left if the previous number existed in the array
left_count = left_count +1
# if this right exists
while right_count in elements:
elements[right_count] = 1 # add it to the dictionary
right_count = right_count+1 #keep moving right if the previous number existed in the array
right_count = right_count -1
#if it doesn't exist, subtract 1 because we've added one to check a new number
#but it doesn't exist so we need to set it back to the very last number verified
if (right-left) <= (right_count-left_count):
right = right_count
left = left_count
return[left, right]
print(largestRange(array_to_analyze)) | true | true |
f71f7a46733f735693deb2dee446ef1ebe2704f2 | 2,218 | py | Python | notifier.py | gkumar7/vaccine-notifier | 3177fcf7fa0eef38779e544db95844ac5b6edbdd | [
"MIT"
] | 1 | 2021-03-24T02:52:34.000Z | 2021-03-24T02:52:34.000Z | notifier.py | gkumar7/vaccine-notifier | 3177fcf7fa0eef38779e544db95844ac5b6edbdd | [
"MIT"
] | null | null | null | notifier.py | gkumar7/vaccine-notifier | 3177fcf7fa0eef38779e544db95844ac5b6edbdd | [
"MIT"
] | null | null | null | import time
from datetime import datetime
from math import radians, cos, sin, asin, sqrt
import requests
url = "https://www.vaccinespotter.org/api/v0/states/IL.json"
minutes = 1
center = {'lat': 0.0, 'lon': 0.0}
max_distance = 50
found = []
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 3956
return c * r
def get_distance(data):
return data['distance']
def sound(data):
print("FOUND! {}".format(data))
# GPIO.output(23, GPIO.HIGH)
# time.sleep(10)
# GPIO.output(23, GPIO.LOW)
def run():
print("{} - Running".format(datetime.now()))
# GPIO.setwarnings(False)
# GPIO.setmode(GPIO.BCM)
# GPIO.setup(23, GPIO.OUT)
# GPIO.output(23, GPIO.LOW)
resp = requests.get(url)
data = resp.json()
for feature in data['features']:
coordinates = feature['geometry']['coordinates']
if coordinates[0] is None or coordinates[1] is None:
continue
pharmacy_loc = {'lat': coordinates[1], 'lon': coordinates[0]}
props = feature['properties']
distance = haversine(center['lon'], center['lat'], pharmacy_loc['lon'], pharmacy_loc['lat'])
if props['appointments_available'] and distance <= max_distance:
found.append({
"name": props['name'],
"url": props['url'],
"address": props['address'],
"city": props['city'],
"state": props['state'],
"zip": props['postal_code'],
"distance": distance
})
found.sort(key=get_distance)
if len(found):
sound(found)
# GPIO.cleanup()
def main():
while True:
run()
print("{} - Sleeping for {} minutes".format(datetime.now(), minutes))
time.sleep(minutes * 60)
if __name__ == '__main__':
main()
| 27.04878 | 100 | 0.576646 | import time
from datetime import datetime
from math import radians, cos, sin, asin, sqrt
import requests
url = "https://www.vaccinespotter.org/api/v0/states/IL.json"
minutes = 1
center = {'lat': 0.0, 'lon': 0.0}
max_distance = 50
found = []
def haversine(lon1, lat1, lon2, lat2):
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 3956
return c * r
def get_distance(data):
return data['distance']
def sound(data):
print("FOUND! {}".format(data))
def run():
print("{} - Running".format(datetime.now()))
resp = requests.get(url)
data = resp.json()
for feature in data['features']:
coordinates = feature['geometry']['coordinates']
if coordinates[0] is None or coordinates[1] is None:
continue
pharmacy_loc = {'lat': coordinates[1], 'lon': coordinates[0]}
props = feature['properties']
distance = haversine(center['lon'], center['lat'], pharmacy_loc['lon'], pharmacy_loc['lat'])
if props['appointments_available'] and distance <= max_distance:
found.append({
"name": props['name'],
"url": props['url'],
"address": props['address'],
"city": props['city'],
"state": props['state'],
"zip": props['postal_code'],
"distance": distance
})
found.sort(key=get_distance)
if len(found):
sound(found)
def main():
while True:
run()
print("{} - Sleeping for {} minutes".format(datetime.now(), minutes))
time.sleep(minutes * 60)
if __name__ == '__main__':
main()
| true | true |
f71f7aaa0bb10df8c141305e95139c15bca2394f | 4,249 | py | Python | tests/test_histogram2d.py | ess-dmsc/JustBinIt | dc8242ed44f03e92f60618c96596025ec8cbc40e | [
"BSD-2-Clause"
] | null | null | null | tests/test_histogram2d.py | ess-dmsc/JustBinIt | dc8242ed44f03e92f60618c96596025ec8cbc40e | [
"BSD-2-Clause"
] | 23 | 2018-12-04T11:50:37.000Z | 2022-03-17T11:30:39.000Z | tests/test_histogram2d.py | ess-dmsc/JustBinIt | dc8242ed44f03e92f60618c96596025ec8cbc40e | [
"BSD-2-Clause"
] | 2 | 2019-07-24T11:13:41.000Z | 2020-08-04T18:33:22.000Z | import numpy as np
import pytest
from just_bin_it.histograms.histogram2d import Histogram2d
IRRELEVANT_TOPIC = "some-topic"
class TestHistogram2dFunctionality:
@pytest.fixture(autouse=True)
def prepare(self):
self.pulse_time = 1234
self.num_bins = (5, 10)
self.tof_range = (0, 10)
self.det_range = (0, 5)
self.data = np.array([x for x in range(self.num_bins[0])])
self.hist = Histogram2d("topic", self.num_bins, self.tof_range, self.det_range)
def test_if_single_value_for_num_bins_then_value_used_for_both_x_and_y(self):
num_bins = 5
hist = Histogram2d("topic", num_bins, self.tof_range, self.det_range)
assert len(hist.x_edges) == num_bins + 1
assert len(hist.y_edges) == num_bins + 1
assert hist.shape == (num_bins, num_bins)
def test_on_construction_histogram_is_uninitialised(self):
assert self.hist.x_edges is not None
assert self.hist.y_edges is not None
assert self.hist.shape == self.num_bins
assert len(self.hist.x_edges) == self.num_bins[0] + 1
assert len(self.hist.y_edges) == self.num_bins[1] + 1
assert self.hist.x_edges[0] == self.data[0]
assert self.hist.x_edges[-1] == 10
assert self.hist.y_edges[0] == self.data[0]
assert self.hist.y_edges[-1] == 5
assert self.hist.data.sum() == 0
def test_adding_data_to_initialised_histogram_new_data_is_added(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
# Add the data again
self.hist.add_data(self.pulse_time, self.data, self.data)
# Sum should be double
assert self.hist.data.sum() == first_sum * 2
def test_adding_data_outside_initial_bins_is_ignored(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
x_edges = self.hist.x_edges[:]
y_edges = self.hist.y_edges[:]
# Add data that is outside the edges
new_data = np.array([x + self.num_bins[0] + 1 for x in range(self.num_bins[0])])
self.hist.add_data(self.pulse_time, new_data, new_data)
# Sum should not change
assert self.hist.data.sum() == first_sum
# Edges should not change
assert np.array_equal(self.hist.x_edges, x_edges)
assert np.array_equal(self.hist.y_edges, y_edges)
def test_if_no_id_supplied_then_defaults_to_empty_string(self):
assert self.hist.identifier == ""
def test_id_supplied_then_is_set(self):
example_id = "abcdef"
hist = Histogram2d(
"topic1",
self.num_bins,
self.tof_range,
self.det_range,
identifier=example_id,
)
assert hist.identifier == example_id
def test_only_data_with_correct_source_is_added(self):
hist = Histogram2d(
"topic", self.num_bins, self.tof_range, self.det_range, source="source1"
)
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="OTHER")
assert hist.data.sum() == 10
def test_clearing_histogram_data_clears_histogram(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
assert self.hist.data.sum() == 0
def test_after_clearing_histogram_can_add_data(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
self.hist.add_data(self.pulse_time, self.data, self.data)
assert self.hist.shape == self.num_bins
assert self.hist.data.sum() == 5
def test_adding_empty_data_does_nothing(self):
self.hist.add_data(self.pulse_time, [], [])
assert self.hist.data.sum() == 0
def test_histogram_keeps_track_of_last_pulse_time_processed(self):
self.hist.add_data(1234, self.data, self.data)
self.hist.add_data(1235, self.data, self.data)
self.hist.add_data(1236, self.data, self.data)
assert self.hist.last_pulse_time == 1236
| 36.62931 | 88 | 0.65992 | import numpy as np
import pytest
from just_bin_it.histograms.histogram2d import Histogram2d
IRRELEVANT_TOPIC = "some-topic"
class TestHistogram2dFunctionality:
@pytest.fixture(autouse=True)
def prepare(self):
self.pulse_time = 1234
self.num_bins = (5, 10)
self.tof_range = (0, 10)
self.det_range = (0, 5)
self.data = np.array([x for x in range(self.num_bins[0])])
self.hist = Histogram2d("topic", self.num_bins, self.tof_range, self.det_range)
def test_if_single_value_for_num_bins_then_value_used_for_both_x_and_y(self):
num_bins = 5
hist = Histogram2d("topic", num_bins, self.tof_range, self.det_range)
assert len(hist.x_edges) == num_bins + 1
assert len(hist.y_edges) == num_bins + 1
assert hist.shape == (num_bins, num_bins)
def test_on_construction_histogram_is_uninitialised(self):
assert self.hist.x_edges is not None
assert self.hist.y_edges is not None
assert self.hist.shape == self.num_bins
assert len(self.hist.x_edges) == self.num_bins[0] + 1
assert len(self.hist.y_edges) == self.num_bins[1] + 1
assert self.hist.x_edges[0] == self.data[0]
assert self.hist.x_edges[-1] == 10
assert self.hist.y_edges[0] == self.data[0]
assert self.hist.y_edges[-1] == 5
assert self.hist.data.sum() == 0
def test_adding_data_to_initialised_histogram_new_data_is_added(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
self.hist.add_data(self.pulse_time, self.data, self.data)
assert self.hist.data.sum() == first_sum * 2
def test_adding_data_outside_initial_bins_is_ignored(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
x_edges = self.hist.x_edges[:]
y_edges = self.hist.y_edges[:]
new_data = np.array([x + self.num_bins[0] + 1 for x in range(self.num_bins[0])])
self.hist.add_data(self.pulse_time, new_data, new_data)
assert self.hist.data.sum() == first_sum
assert np.array_equal(self.hist.x_edges, x_edges)
assert np.array_equal(self.hist.y_edges, y_edges)
def test_if_no_id_supplied_then_defaults_to_empty_string(self):
assert self.hist.identifier == ""
def test_id_supplied_then_is_set(self):
example_id = "abcdef"
hist = Histogram2d(
"topic1",
self.num_bins,
self.tof_range,
self.det_range,
identifier=example_id,
)
assert hist.identifier == example_id
def test_only_data_with_correct_source_is_added(self):
hist = Histogram2d(
"topic", self.num_bins, self.tof_range, self.det_range, source="source1"
)
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="OTHER")
assert hist.data.sum() == 10
def test_clearing_histogram_data_clears_histogram(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
assert self.hist.data.sum() == 0
def test_after_clearing_histogram_can_add_data(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
self.hist.add_data(self.pulse_time, self.data, self.data)
assert self.hist.shape == self.num_bins
assert self.hist.data.sum() == 5
def test_adding_empty_data_does_nothing(self):
self.hist.add_data(self.pulse_time, [], [])
assert self.hist.data.sum() == 0
def test_histogram_keeps_track_of_last_pulse_time_processed(self):
self.hist.add_data(1234, self.data, self.data)
self.hist.add_data(1235, self.data, self.data)
self.hist.add_data(1236, self.data, self.data)
assert self.hist.last_pulse_time == 1236
| true | true |
f71f7e5bf94980d2547f9d71b092b8666b476e67 | 17,709 | py | Python | contrib/tools/python3/src/Lib/wave.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 486 | 2016-05-28T18:51:54.000Z | 2022-03-20T17:30:31.000Z | contrib/tools/python3/src/Lib/wave.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 42 | 2018-05-25T15:57:08.000Z | 2021-01-17T18:39:59.000Z | contrib/tools/python3/src/Lib/wave.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 46 | 2016-05-28T18:52:03.000Z | 2021-06-01T07:57:51.000Z | """Stuff to parse WAVE files.
Usage.
Reading WAVE files:
f = wave.open(file, 'r')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods read(), seek(), and close().
When the setpos() and rewind() methods are not used, the seek()
method is not necessary.
This returns an instance of a class with the following public methods:
getnchannels() -- returns number of audio channels (1 for
mono, 2 for stereo)
getsampwidth() -- returns sample width in bytes
getframerate() -- returns sampling frequency
getnframes() -- returns number of audio frames
getcomptype() -- returns compression type ('NONE' for linear samples)
getcompname() -- returns human-readable version of
compression type ('not compressed' linear samples)
getparams() -- returns a namedtuple consisting of all of the
above in the above order
getmarkers() -- returns None (for compatibility with the
aifc module)
getmark(id) -- raises an error since the mark does not
exist (for compatibility with the aifc module)
readframes(n) -- returns at most n frames of audio
rewind() -- rewind to the beginning of the audio stream
setpos(pos) -- seek to the specified position
tell() -- return the current position
close() -- close the instance (make it unusable)
The position returned by tell() and the position given to setpos()
are compatible and have nothing to do with the actual position in the
file.
The close() method is called automatically when the class instance
is destroyed.
Writing WAVE files:
f = wave.open(file, 'w')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods write(), tell(), seek(), and
close().
This returns an instance of a class with the following public methods:
setnchannels(n) -- set the number of channels
setsampwidth(n) -- set the sample width
setframerate(n) -- set the frame rate
setnframes(n) -- set the number of frames
setcomptype(type, name)
-- set the compression type and the
human-readable compression type
setparams(tuple)
-- set all parameters at once
tell() -- return current position in output file
writeframesraw(data)
-- write audio frames without pathing up the
file header
writeframes(data)
-- write audio frames and patch up the file header
close() -- patch up the file header and close the
output file
You should set the parameters before the first writeframesraw or
writeframes. The total number of frames does not need to be set,
but when it is set to the correct value, the header does not have to
be patched up.
It is best to first set all parameters, perhaps possibly the
compression type, and then write audio frames using writeframesraw.
When all frames have been written, either call writeframes(b'') or
close() to patch up the sizes in the header.
The close() method is called automatically when the class instance
is destroyed.
"""
import builtins
__all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"]
class Error(Exception):
pass
WAVE_FORMAT_PCM = 0x0001
_array_fmts = None, 'b', 'h', None, 'i'
import audioop
import struct
import sys
from chunk import Chunk
from collections import namedtuple
_wave_params = namedtuple('_wave_params',
'nchannels sampwidth framerate nframes comptype compname')
class Wave_read:
"""Variables used in this class:
These variables are available to the user though appropriate
methods of this class:
_file -- the open file with methods read(), close(), and seek()
set through the __init__() method
_nchannels -- the number of audio channels
available through the getnchannels() method
_nframes -- the number of audio frames
available through the getnframes() method
_sampwidth -- the number of bytes per audio sample
available through the getsampwidth() method
_framerate -- the sampling frequency
available through the getframerate() method
_comptype -- the AIFF-C compression type ('NONE' if AIFF)
available through the getcomptype() method
_compname -- the human-readable AIFF-C compression type
available through the getcomptype() method
_soundpos -- the position in the audio stream
available through the tell() method, set through the
setpos() method
These variables are used internally only:
_fmt_chunk_read -- 1 iff the FMT chunk has been read
_data_seek_needed -- 1 iff positioned correctly in audio
file for readframes()
_data_chunk -- instantiation of a chunk class for the DATA chunk
_framesize -- size of one frame in the file
"""
def initfp(self, file):
self._convert = None
self._soundpos = 0
self._file = Chunk(file, bigendian = 0)
if self._file.getname() != b'RIFF':
raise Error('file does not start with RIFF id')
if self._file.read(4) != b'WAVE':
raise Error('not a WAVE file')
self._fmt_chunk_read = 0
self._data_chunk = None
while 1:
self._data_seek_needed = 1
try:
chunk = Chunk(self._file, bigendian = 0)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == b'fmt ':
self._read_fmt_chunk(chunk)
self._fmt_chunk_read = 1
elif chunkname == b'data':
if not self._fmt_chunk_read:
raise Error('data chunk before fmt chunk')
self._data_chunk = chunk
self._nframes = chunk.chunksize // self._framesize
self._data_seek_needed = 0
break
chunk.skip()
if not self._fmt_chunk_read or not self._data_chunk:
raise Error('fmt chunk and/or data chunk missing')
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'rb')
self._i_opened_the_file = f
# else, assume it is an open file object already
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
#
# User visible methods.
#
def getfp(self):
return self._file
def rewind(self):
self._data_seek_needed = 1
self._soundpos = 0
def close(self):
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def getparams(self):
return _wave_params(self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def getmarkers(self):
return None
def getmark(self, id):
raise Error('no marks')
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error('position not in range')
self._soundpos = pos
self._data_seek_needed = 1
def readframes(self, nframes):
if self._data_seek_needed:
self._data_chunk.seek(0, 0)
pos = self._soundpos * self._framesize
if pos:
self._data_chunk.seek(pos, 0)
self._data_seek_needed = 0
if nframes == 0:
return b''
data = self._data_chunk.read(nframes * self._framesize)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
return data
#
# Internal methods.
#
def _read_fmt_chunk(self, chunk):
wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack_from('<HHLLH', chunk.read(14))
if wFormatTag == WAVE_FORMAT_PCM:
sampwidth = struct.unpack_from('<H', chunk.read(2))[0]
self._sampwidth = (sampwidth + 7) // 8
else:
raise Error('unknown format: %r' % (wFormatTag,))
self._framesize = self._nchannels * self._sampwidth
self._comptype = 'NONE'
self._compname = 'not compressed'
class Wave_write:
"""Variables used in this class:
These variables are user settable through appropriate methods
of this class:
_file -- the open file with methods write(), close(), tell(), seek()
set through the __init__() method
_comptype -- the AIFF-C compression type ('NONE' in AIFF)
set through the setcomptype() or setparams() method
_compname -- the human-readable AIFF-C compression type
set through the setcomptype() or setparams() method
_nchannels -- the number of audio channels
set through the setnchannels() or setparams() method
_sampwidth -- the number of bytes per audio sample
set through the setsampwidth() or setparams() method
_framerate -- the sampling frequency
set through the setframerate() or setparams() method
_nframes -- the number of audio frames written to the header
set through the setnframes() or setparams() method
These variables are used internally only:
_datalength -- the size of the audio samples written to the header
_nframeswritten -- the number of frames actually written
_datawritten -- the size of the audio samples actually written
"""
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'wb')
self._i_opened_the_file = f
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def initfp(self, file):
self._file = file
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._headerwritten = False
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
#
# User visible methods.
#
def setnchannels(self, nchannels):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if nchannels < 1:
raise Error('bad # of channels')
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error('number of channels not set')
return self._nchannels
def setsampwidth(self, sampwidth):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if sampwidth < 1 or sampwidth > 4:
raise Error('bad sample width')
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error('sample width not set')
return self._sampwidth
def setframerate(self, framerate):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if framerate <= 0:
raise Error('bad frame rate')
self._framerate = int(round(framerate))
def getframerate(self):
if not self._framerate:
raise Error('frame rate not set')
return self._framerate
def setnframes(self, nframes):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if comptype not in ('NONE',):
raise Error('unsupported compression type')
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error('not all parameters set')
return _wave_params(self._nchannels, self._sampwidth, self._framerate,
self._nframes, self._comptype, self._compname)
def setmark(self, id, pos, name):
raise Error('setmark() not supported')
def getmark(self, id):
raise Error('no marks')
def getmarkers(self):
return None
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
if not isinstance(data, (bytes, bytearray)):
data = memoryview(data).cast('B')
self._ensure_header_written(len(data))
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
self._file.write(data)
self._datawritten += len(data)
self._nframeswritten = self._nframeswritten + nframes
def writeframes(self, data):
self.writeframesraw(data)
if self._datalength != self._datawritten:
self._patchheader()
def close(self):
try:
if self._file:
self._ensure_header_written(0)
if self._datalength != self._datawritten:
self._patchheader()
self._file.flush()
finally:
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
#
# Internal methods.
#
def _ensure_header_written(self, datasize):
if not self._headerwritten:
if not self._nchannels:
raise Error('# channels not specified')
if not self._sampwidth:
raise Error('sample width not specified')
if not self._framerate:
raise Error('sampling rate not specified')
self._write_header(datasize)
def _write_header(self, initlength):
assert not self._headerwritten
self._file.write(b'RIFF')
if not self._nframes:
self._nframes = initlength // (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
try:
self._form_length_pos = self._file.tell()
except (AttributeError, OSError):
self._form_length_pos = None
self._file.write(struct.pack('<L4s4sLHHLLHH4s',
36 + self._datalength, b'WAVE', b'fmt ', 16,
WAVE_FORMAT_PCM, self._nchannels, self._framerate,
self._nchannels * self._framerate * self._sampwidth,
self._nchannels * self._sampwidth,
self._sampwidth * 8, b'data'))
if self._form_length_pos is not None:
self._data_length_pos = self._file.tell()
self._file.write(struct.pack('<L', self._datalength))
self._headerwritten = True
def _patchheader(self):
assert self._headerwritten
if self._datawritten == self._datalength:
return
curpos = self._file.tell()
self._file.seek(self._form_length_pos, 0)
self._file.write(struct.pack('<L', 36 + self._datawritten))
self._file.seek(self._data_length_pos, 0)
self._file.write(struct.pack('<L', self._datawritten))
self._file.seek(curpos, 0)
self._datalength = self._datawritten
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Wave_read(f)
elif mode in ('w', 'wb'):
return Wave_write(f)
else:
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
openfp = open # B/W compatibility
| 34.998024 | 130 | 0.61257 |
import builtins
__all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"]
class Error(Exception):
pass
WAVE_FORMAT_PCM = 0x0001
_array_fmts = None, 'b', 'h', None, 'i'
import audioop
import struct
import sys
from chunk import Chunk
from collections import namedtuple
_wave_params = namedtuple('_wave_params',
'nchannels sampwidth framerate nframes comptype compname')
class Wave_read:
def initfp(self, file):
self._convert = None
self._soundpos = 0
self._file = Chunk(file, bigendian = 0)
if self._file.getname() != b'RIFF':
raise Error('file does not start with RIFF id')
if self._file.read(4) != b'WAVE':
raise Error('not a WAVE file')
self._fmt_chunk_read = 0
self._data_chunk = None
while 1:
self._data_seek_needed = 1
try:
chunk = Chunk(self._file, bigendian = 0)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == b'fmt ':
self._read_fmt_chunk(chunk)
self._fmt_chunk_read = 1
elif chunkname == b'data':
if not self._fmt_chunk_read:
raise Error('data chunk before fmt chunk')
self._data_chunk = chunk
self._nframes = chunk.chunksize // self._framesize
self._data_seek_needed = 0
break
chunk.skip()
if not self._fmt_chunk_read or not self._data_chunk:
raise Error('fmt chunk and/or data chunk missing')
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'rb')
self._i_opened_the_file = f
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def getfp(self):
return self._file
def rewind(self):
self._data_seek_needed = 1
self._soundpos = 0
def close(self):
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def getparams(self):
return _wave_params(self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def getmarkers(self):
return None
def getmark(self, id):
raise Error('no marks')
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error('position not in range')
self._soundpos = pos
self._data_seek_needed = 1
def readframes(self, nframes):
if self._data_seek_needed:
self._data_chunk.seek(0, 0)
pos = self._soundpos * self._framesize
if pos:
self._data_chunk.seek(pos, 0)
self._data_seek_needed = 0
if nframes == 0:
return b''
data = self._data_chunk.read(nframes * self._framesize)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
return data
def _read_fmt_chunk(self, chunk):
wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack_from('<HHLLH', chunk.read(14))
if wFormatTag == WAVE_FORMAT_PCM:
sampwidth = struct.unpack_from('<H', chunk.read(2))[0]
self._sampwidth = (sampwidth + 7) // 8
else:
raise Error('unknown format: %r' % (wFormatTag,))
self._framesize = self._nchannels * self._sampwidth
self._comptype = 'NONE'
self._compname = 'not compressed'
class Wave_write:
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'wb')
self._i_opened_the_file = f
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def initfp(self, file):
self._file = file
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._headerwritten = False
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def setnchannels(self, nchannels):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if nchannels < 1:
raise Error('bad # of channels')
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error('number of channels not set')
return self._nchannels
def setsampwidth(self, sampwidth):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if sampwidth < 1 or sampwidth > 4:
raise Error('bad sample width')
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error('sample width not set')
return self._sampwidth
def setframerate(self, framerate):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if framerate <= 0:
raise Error('bad frame rate')
self._framerate = int(round(framerate))
def getframerate(self):
if not self._framerate:
raise Error('frame rate not set')
return self._framerate
def setnframes(self, nframes):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if comptype not in ('NONE',):
raise Error('unsupported compression type')
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error('not all parameters set')
return _wave_params(self._nchannels, self._sampwidth, self._framerate,
self._nframes, self._comptype, self._compname)
def setmark(self, id, pos, name):
raise Error('setmark() not supported')
def getmark(self, id):
raise Error('no marks')
def getmarkers(self):
return None
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
if not isinstance(data, (bytes, bytearray)):
data = memoryview(data).cast('B')
self._ensure_header_written(len(data))
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
self._file.write(data)
self._datawritten += len(data)
self._nframeswritten = self._nframeswritten + nframes
def writeframes(self, data):
self.writeframesraw(data)
if self._datalength != self._datawritten:
self._patchheader()
def close(self):
try:
if self._file:
self._ensure_header_written(0)
if self._datalength != self._datawritten:
self._patchheader()
self._file.flush()
finally:
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
def _ensure_header_written(self, datasize):
if not self._headerwritten:
if not self._nchannels:
raise Error('# channels not specified')
if not self._sampwidth:
raise Error('sample width not specified')
if not self._framerate:
raise Error('sampling rate not specified')
self._write_header(datasize)
def _write_header(self, initlength):
assert not self._headerwritten
self._file.write(b'RIFF')
if not self._nframes:
self._nframes = initlength // (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
try:
self._form_length_pos = self._file.tell()
except (AttributeError, OSError):
self._form_length_pos = None
self._file.write(struct.pack('<L4s4sLHHLLHH4s',
36 + self._datalength, b'WAVE', b'fmt ', 16,
WAVE_FORMAT_PCM, self._nchannels, self._framerate,
self._nchannels * self._framerate * self._sampwidth,
self._nchannels * self._sampwidth,
self._sampwidth * 8, b'data'))
if self._form_length_pos is not None:
self._data_length_pos = self._file.tell()
self._file.write(struct.pack('<L', self._datalength))
self._headerwritten = True
def _patchheader(self):
assert self._headerwritten
if self._datawritten == self._datalength:
return
curpos = self._file.tell()
self._file.seek(self._form_length_pos, 0)
self._file.write(struct.pack('<L', 36 + self._datawritten))
self._file.seek(self._data_length_pos, 0)
self._file.write(struct.pack('<L', self._datawritten))
self._file.seek(curpos, 0)
self._datalength = self._datawritten
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Wave_read(f)
elif mode in ('w', 'wb'):
return Wave_write(f)
else:
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
openfp = open
| true | true |
f71f7e70877767a16cf3a649cd197af3470937c5 | 2,541 | py | Python | interface/Movie.py | BrickText/JHROM | d99b907e0837d8dcc57ab474e9435891736f0dda | [
"MIT"
] | null | null | null | interface/Movie.py | BrickText/JHROM | d99b907e0837d8dcc57ab474e9435891736f0dda | [
"MIT"
] | null | null | null | interface/Movie.py | BrickText/JHROM | d99b907e0837d8dcc57ab474e9435891736f0dda | [
"MIT"
] | null | null | null | from database.queries.insert_queries import INSERT_MOVIE
from database.queries.update_queries import UPDATE_MOVIE
from database.queries.delete_queries import DELETE_MOVIE
from database.queries.select_queries import SELECT_MOVIES_ORDERED_BY_RATING,\
SELECT_PROJECTION_FOR_MOVIE, \
SELECT_MOVIE_BY_ID
from database.connection.execute_query import execute_query
from settings.SharedVariables import SharedVariables
from prettytable import PrettyTable
class Movies:
def __init__(self):
try:
self.data = execute_query(SELECT_MOVIES_ORDERED_BY_RATING, [])
except Exception:
print("Database not initilized or connected")
def __str__(self):
t = PrettyTable(SharedVariables.movie_col)
for row in self.data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def get_movie(id):
try:
data = execute_query(SELECT_MOVIE_BY_ID, [id, ])
except Exception:
print("Database not initilized or connected")
t = PrettyTable(SharedVariables.movie_col)
for row in data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def add_movie(name, rating):
try:
execute_query(INSERT_MOVIE, [name, rating, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def delete_movie(id):
try:
execute_query(DELETE_MOVIE, [id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def update_movie(id, name, rating):
try:
execute_query(UPDATE_MOVIE, [name, rating, id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def movie_projections(id):
try:
data = execute_query(SELECT_PROJECTION_FOR_MOVIE, [id, ])
t = PrettyTable(SharedVariables.projection_col)
for row in data:
t.add_row([row[0], row[1], row[2], row[3], (100 - row[4])])
return str(t)
except Exception:
print("Database not initilized or connected!")
if __name__ == '__main__':
from database.connection.database_connection import Database
SharedVariables.database = Database()
Movies.add_movie("Baywatch", 10)
print(Movies.get_movie(2))
| 32.164557 | 77 | 0.63046 | from database.queries.insert_queries import INSERT_MOVIE
from database.queries.update_queries import UPDATE_MOVIE
from database.queries.delete_queries import DELETE_MOVIE
from database.queries.select_queries import SELECT_MOVIES_ORDERED_BY_RATING,\
SELECT_PROJECTION_FOR_MOVIE, \
SELECT_MOVIE_BY_ID
from database.connection.execute_query import execute_query
from settings.SharedVariables import SharedVariables
from prettytable import PrettyTable
class Movies:
def __init__(self):
try:
self.data = execute_query(SELECT_MOVIES_ORDERED_BY_RATING, [])
except Exception:
print("Database not initilized or connected")
def __str__(self):
t = PrettyTable(SharedVariables.movie_col)
for row in self.data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def get_movie(id):
try:
data = execute_query(SELECT_MOVIE_BY_ID, [id, ])
except Exception:
print("Database not initilized or connected")
t = PrettyTable(SharedVariables.movie_col)
for row in data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def add_movie(name, rating):
try:
execute_query(INSERT_MOVIE, [name, rating, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def delete_movie(id):
try:
execute_query(DELETE_MOVIE, [id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def update_movie(id, name, rating):
try:
execute_query(UPDATE_MOVIE, [name, rating, id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def movie_projections(id):
try:
data = execute_query(SELECT_PROJECTION_FOR_MOVIE, [id, ])
t = PrettyTable(SharedVariables.projection_col)
for row in data:
t.add_row([row[0], row[1], row[2], row[3], (100 - row[4])])
return str(t)
except Exception:
print("Database not initilized or connected!")
if __name__ == '__main__':
from database.connection.database_connection import Database
SharedVariables.database = Database()
Movies.add_movie("Baywatch", 10)
print(Movies.get_movie(2))
| true | true |
f71f7f0a14770a0fbed65f68d8dd2ab2c222a92a | 5,067 | py | Python | cardinal_pythonlib/cmdline.py | RudolfCardinal/pythonlib | 4c583ad1aae3c1166a4e6f964df87eb6c02a73cb | [
"Apache-2.0"
] | 10 | 2015-09-30T02:46:48.000Z | 2021-07-23T05:03:38.000Z | cardinal_pythonlib/cmdline.py | RudolfCardinal/pythonlib | 4c583ad1aae3c1166a4e6f964df87eb6c02a73cb | [
"Apache-2.0"
] | 9 | 2019-07-04T11:10:31.000Z | 2021-09-23T21:11:42.000Z | cardinal_pythonlib/cmdline.py | RudolfCardinal/pythonlib | 4c583ad1aae3c1166a4e6f964df87eb6c02a73cb | [
"Apache-2.0"
] | 4 | 2017-07-17T15:17:44.000Z | 2021-07-23T05:03:41.000Z | #!/usr/bin/env python
# cardinal_pythonlib/cmdline.py
"""
===============================================================================
Original code copyright (C) 2009-2021 Rudolf Cardinal (rudolf@pobox.com).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Functions for manipulating command-line parameters.**
"""
import re
# import shlex
import subprocess
import sys
from typing import List, Union
def cmdline_split(s: str, platform: Union[int, str] = 'this') -> List[str]:
"""
As per
https://stackoverflow.com/questions/33560364/python-windows-parsing-command-lines-with-shlex.
Multi-platform variant of ``shlex.split()`` for command-line splitting.
For use with ``subprocess``, for ``argv`` injection etc. Using fast REGEX.
Args:
s:
string to split
platform:
- ``'this'`` = auto from current platform;
- ``1`` = POSIX;
- ``0`` = Windows/CMD
- (other values reserved)
""" # noqa
if platform == 'this':
platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows
if platform == 1: # POSIX
re_cmd_lex = r'''"((?:\\["\\]|[^"])*)"|'([^']*)'|(\\.)|(&&?|\|\|?|\d?\>|[<])|([^\s'"\\&|<>]+)|(\s+)|(.)''' # noqa
elif platform == 0: # Windows/CMD
re_cmd_lex = r'''"((?:""|\\["\\]|[^"])*)"?()|(\\\\(?=\\*")|\\")|(&&?|\|\|?|\d?>|[<])|([^\s"&|<>]+)|(\s+)|(.)''' # noqa
else:
raise AssertionError(f"unknown platform {platform!r}")
args = []
accu = None # collects pieces of one arg
for qs, qss, esc, pipe, word, white, fail in re.findall(re_cmd_lex, s):
if word:
pass # most frequent
elif esc:
word = esc[1]
elif white or pipe:
if accu is not None:
args.append(accu)
if pipe:
args.append(pipe)
accu = None
continue
elif fail:
raise ValueError("invalid or incomplete shell string")
elif qs:
word = qs.replace(r'\"', '"').replace(r'\\', '\\')
# ... raw strings can't end in single backslashes;
# https://stackoverflow.com/questions/647769/why-cant-pythons-raw-string-literals-end-with-a-single-backslash # noqa
if platform == 0:
word = word.replace('""', '"')
else:
word = qss # may be even empty; must be last
accu = (accu or '') + word
if accu is not None:
args.append(accu)
return args
def cmdline_quote_posix(seq: List[str]) -> str:
"""
Quotes arguments for POSIX, producing a single string suitable for
copying/pasting.
Based on subprocess.list2cmdline().
"""
result = [] # type: List[str]
for arg in seq:
bs_buf = [] # type: List[str]
# Add a space to separate this argument from the others
if result:
result.append(' ')
# Modified here: quote arguments with "*"
needquote = (" " in arg) or ("\t" in arg) or ("*" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf) * 2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backslashes, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
def cmdline_quote(args: List[str], platform: Union[int, str] = 'this') -> str:
"""
Convert a list of command-line arguments to a suitably quoted command-line
string that should be copy/pastable into a comand prompt.
"""
if platform == 'this':
platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows
if platform == 1: # POSIX
return cmdline_quote_posix(args)
elif platform == 0: # Windows/CMD
return subprocess.list2cmdline(args)
else:
raise AssertionError(f"unknown platform {platform!r}")
| 32.273885 | 129 | 0.532465 |
import re
import subprocess
import sys
from typing import List, Union
def cmdline_split(s: str, platform: Union[int, str] = 'this') -> List[str]:
if platform == 'this':
platform = (sys.platform != 'win32')
if platform == 1:
re_cmd_lex = r'''"((?:\\["\\]|[^"])*)"|'([^']*)'|(\\.)|(&&?|\|\|?|\d?\>|[<])|([^\s'"\\&|<>]+)|(\s+)|(.)''' # noqa
elif platform == 0: # Windows/CMD
re_cmd_lex = r'''"((?:""|\\["\\]|[^"])*)"?()|(\\\\(?=\\*")|\\")|(&&?|\|\|?|\d?>|[<])|([^\s"&|<>]+)|(\s+)|(.)'''
else:
raise AssertionError(f"unknown platform {platform!r}")
args = []
accu = None
for qs, qss, esc, pipe, word, white, fail in re.findall(re_cmd_lex, s):
if word:
pass
elif esc:
word = esc[1]
elif white or pipe:
if accu is not None:
args.append(accu)
if pipe:
args.append(pipe)
accu = None
continue
elif fail:
raise ValueError("invalid or incomplete shell string")
elif qs:
word = qs.replace(r'\"', '"').replace(r'\\', '\\')
# https://stackoverflow.com/questions/647769/why-cant-pythons-raw-string-literals-end-with-a-single-backslash # noqa
if platform == 0:
word = word.replace('""', '"')
else:
word = qss # may be even empty; must be last
accu = (accu or '') + word
if accu is not None:
args.append(accu)
return args
def cmdline_quote_posix(seq: List[str]) -> str:
result = [] # type: List[str]
for arg in seq:
bs_buf = [] # type: List[str]
# Add a space to separate this argument from the others
if result:
result.append(' ')
# Modified here: quote arguments with "*"
needquote = (" " in arg) or ("\t" in arg) or ("*" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf) * 2)
bs_buf = []
result.append('\\"')
else:
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
def cmdline_quote(args: List[str], platform: Union[int, str] = 'this') -> str:
if platform == 'this':
platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows
if platform == 1: # POSIX
return cmdline_quote_posix(args)
elif platform == 0: # Windows/CMD
return subprocess.list2cmdline(args)
else:
raise AssertionError(f"unknown platform {platform!r}")
| true | true |
f71f809f8758a5472aea90c604d0f3c9e8cb4804 | 25,645 | py | Python | autolabeling.py | MGH-LMIC/CXR-autolabeling | 74eac30bb6eaa6c1d5a8b343743024ef6bd9db7d | [
"Apache-2.0"
] | null | null | null | autolabeling.py | MGH-LMIC/CXR-autolabeling | 74eac30bb6eaa6c1d5a8b343743024ef6bd9db7d | [
"Apache-2.0"
] | null | null | null | autolabeling.py | MGH-LMIC/CXR-autolabeling | 74eac30bb6eaa6c1d5a8b343743024ef6bd9db7d | [
"Apache-2.0"
] | null | null | null | import re
import pickle
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cm as mpl_color_map
from tqdm import tqdm
from pathlib import Path
from prettytable import PrettyTable
from scipy.ndimage import gaussian_filter
from sklearn.metrics import roc_curve, precision_recall_curve
import torch
import torchnet as tnt
import torch.nn.functional as F
from utils import logger
from environment import TestEnvironment, initialize, print_label_name
from gradcam import GradCam, save_class_activation_images
from data import CxrDataset, EXT_DATA_BASE
from atlasmethod import EX_AI
import time
ATLAS_GEN = False
atlas_name = 'cardiomegaly'
# 'cardiomegaly', 'atelectasis', 'pulmonary_edema', 'pneumonia', 'pleural_effusion'
class Tester:
def __init__(self, env, pt_runtime="test", fn_net=None, fl_gradcam=False, cls_gradcam=None, id_prob=None, fl_ensemble=False, fl_exai=False, f_name='sim', f_csv=None):
self.env = env
self.pt_runtime = pt_runtime
self.fl_prob = False if id_prob == None else True
self.id_prob = id_prob
self.f_name = f_name
self.fl_ensemble = fl_ensemble
# for multiple class and binary label tasks
self.pf_metric = {
'loss': [],
'accuracy': [],
'sensitivity': [],
'specificity': [],
'auc_score': [],
'ap_score': [],
'mse_score': []
}
self.fn_net = fn_net
self.fl_gradcam = fl_gradcam
self.cls_gradcam = cls_gradcam
self.th_gradcam = 0.5
self.fl_gradcam_save = True
#explainable methods
self.fl_exai = fl_exai
if self.fl_exai:
self.fl_gradcam = True
self.cls_gradcam = [
'Hilar/mediastinum>Cardiomegaly>.',
'Lung density>Increased lung density>Atelectasis',
'Lung density>Increased lung density>Pulmonary edema',
'Lung density>Increased lung density>pneumonia',
'Pleura>Pleural effusion>.'
]
self.th_gradcam = 0.5
self.ex_method = EX_AI(env, pt_runtime=pt_runtime, thr=0.5, f_name=f_name, ext_data_csv=f_csv)
def load(self):
pt_file = self.pt_runtime.joinpath(f'train.pkl')
with open(pt_file, 'rb') as f:
self.pf_metric = pickle.load(f)
def test_evaluation(self, epoch=1, fl_save=False):
if self.fn_net == None:
pt_model = self.pt_runtime.joinpath(f'model_epoch_{epoch:04d}.pth.tar')
else:
pt_model = self.pt_runtime.joinpath(str(self.fn_net))
self.env.load_model(pt_model)
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, _, _ = self.test(epoch, self.env.test_loader, fl_save=fl_save)
if False:
self.algorithm_attribution(self.env.gradcam_loader)
if self.fl_gradcam:
_, _, _ = self.gradcam_data(self.env.gradcam_loader)
def test_ensemble_evaluation(self, epoch=1, fl_save=False, n_ens=1):
predict = []
target = []
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
if ATLAS_GEN:
gradcam_df = pd.DataFrame(columns=[f'{x:03d}' for x in range(256)])
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
#logger.info(f'network to test: {self.env.model}')
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, pred, tar = self.test(epoch, self.env.test_loader, fl_save=False)
predict.append(pred)
target.append(tar)
# evaluate ensemble's performance
prob_ens = self.ensemble_performance(predict, target, n_ens, fl_save=fl_save)
if self.fl_exai:
prob_in = pd.DataFrame(prob_ens.cpu().numpy()[:,1:])
prob_in['PATH'] = self.env.test_loader.dataset.entries['PATH']
self.ex_method.input_preparation(prob_in)
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
start = time.time()
_, _, cam = self.gradcam_data(self.env.gradcam_loader, prob_ens=prob_ens)
#review_cam
#cams *= cam
cams += cam
end = time.time()
print(f'{k:02d} model gradcam time: {end-start} sec')
_, _, cams = self.gradcam_data(self.env.gradcam_loader, ens_flg=True, cams_ens=cams, prob_ens=prob_ens)
if self.fl_exai:
start = time.time()
self.ex_method.run(cams)
end = time.time()
print(f'self-annotation time: {end-start} sec')
if ATLAS_GEN:
for k in range(len(self.env.gradcam_loader)):
gradcam_df.loc[k] = cams[k].flatten()
print(f"[{atlas_name}]Atlas generation: {k:5d}")
gradcam_df['PATH'] = self.env.gradcam_loader.dataset.entries['PATH']
gradcam_df.to_csv(self.pt_runtime.joinpath(f'gradcam_atlas_{atlas_name}.csv'), index=False)
def ensemble_performance(self, predict, target, n_ens, fl_save=False):
pred_ens = torch.zeros(predict[0].shape).to(self.env.device)
#pred_ens = np.zeros(predict[0].shape)
for i in range(n_ens):
pred_ens += torch.from_numpy(predict[i]).to(self.env.device)
pred_ens /= n_ens
targ_ens = torch.from_numpy(target[0]).to(self.env.device)
aucs, aps = self.AUC_AP_metric(pred_ens, targ_ens)
correct, total = self.ACC_metric(pred_ens, targ_ens)
self.Per_print(correct=correct, total=total, aucs=aucs, aps=aps)
if fl_save:
test_set = self.env.test_loader.dataset
labels = self.env.labels
self.roc_evaluation(test_set, pred_ens, targ_ens, labels)
return pred_ens
def AUC_AP_metric(self, output, target):
out_dim = output.shape[1]
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
return aucs, aps
def MSE__metric(self, output, target):
out_dim = 1
mses = [tnt.meter.MSEMeter() for i in range(out_dim)]
mses[0].add(output[:, -1], target[:, -1])
return mses
def ACC_metric(self, output, target):
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0.5, ones, zeros)
correct = pred.eq(mask_tar.int()).sum().item()
total = len(mask_tar)
return correct, total
def Per_print(self, correct=None, total=None, aucs=None, aps=None, mses=None):
labels = self.env.labels
out_dim = len(aucs)
percent = 100. * correct / total
logger.info(f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
#p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
p.add_row([f'E-{labels[i]}', f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"Ensemble-performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average acu scores")
ave_auc = 0
ave_ap = 0
def test(self, epoch, test_loader, fl_save=False):
test_set = test_loader.dataset
out_dim = self.env.out_dim
labels = self.env.labels
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
CxrDataset.eval()
self.env.model.eval()
with torch.no_grad():
correct = 0
total = 0
predict_seq = torch.FloatTensor().to(self.env.device)
target_seq = torch.FloatTensor().to(self.env.device)
tqdm_desc = f'testing '
t = tqdm(enumerate(test_loader), total=len(test_loader), desc=tqdm_desc,
dynamic_ncols=True)
for bt_idx, tp_data in t:
output, target = self.test_batch(tp_data)
# Network outputs
predict_seq = torch.cat((predict_seq, F.sigmoid(output)), dim=0)
target_seq = torch.cat((target_seq, target), dim=0)
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0., ones, zeros)
correct += pred.eq(mask_tar.int()).sum().item()
total += len(mask_tar)
#pred = torch.where(output > 0., ones, zeros)
#correct += pred.eq(target.int()).sum().item()
#total = len(test_loader.sampler) * out_dim
percent = 100. * correct / total
logger.info(f"val epoch {epoch:03d}: "
f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
if fl_save:
self.roc_evaluation(test_set, predict_seq, target_seq, labels)
if self.fl_prob:
self.df_prob = pd.DataFrame()
self.df_prob['PATH_CHECK'] = test_set.entries['PATH']
self.df_prob['PROB'] = predict_seq.cpu().numpy()[:, self.id_prob]
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average auc scores")
ave_auc = 0
ave_ap = 0
self.pf_metric[f'accuracy'].append((epoch, correct / total))
self.pf_metric[f'auc_score'].append((epoch, ave_auc))
self.pf_metric[f'ap_score'].append((epoch, ave_ap))
return ave_auc, predict_seq.cpu().numpy(), target_seq.cpu().numpy()
def mask_pred(self, output, target):
mask_one = torch.ones(output.shape, dtype=torch.uint8, device=self.env.device)
mask_zero = torch.zeros(output.shape, dtype=torch.uint8, device=self.env.device)
#mask = torch.where(target == -1, mask_zero, mask_one)
mask = torch.where(target == -1, mask_zero, mask_one).bool()
mask_output = output.masked_select(mask.to(self.env.device))
mask_target = target.masked_select(mask.to(self.env.device))
return mask_output, mask_target
def test_batch(self, tp_data, fl_input=False):
# to support different types of models.
if self.env.type == 0:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#data, target = data.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data)
elif self.env.type == 1:
data1 = tp_data[0]
data2 = tp_data[1]
target = tp_data[2]
data1, data2, target = data1.to(self.env.device), data2.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data1, data2)
elif self.env.type == 3:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#network output
output = self.env.model(data, info)
if fl_input == False:
return output, target
else:
return data, info, output
def gradcam_data(self, test_loader, hmp_dims=(512,512), ens_flg=False, cams_ens=None, prob_ens=None):
# threshold to draw a heatmap
out_dim = self.env.out_dim
CxrDataset.eval()
self.env.model.eval()
#with torch.no_grad():
gradcam_res_list = []
gradcam_path_list = []
cams = np.zeros((len(test_loader), len(self.cls_gradcam), 16, 16))
grad_cam = GradCam(self.env.model, self.env.type)
for batch_idx, (data, target, info) in enumerate(test_loader):
#data, target = data.to(self.env.device), target.to(self.env.device)
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
# Grad CAM
#grad_cam = GradCam(self.env.model, self.env.type)
if self.cls_gradcam == None:
gradcam_res, gradcam_path = self.gradcam_save_maxcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
else:
if self.fl_ensemble:
cam = self.gradcam_save_argcls_ens(grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=ens_flg, cams_ens=cams_ens, prob_ens=prob_ens)
else:
gradcam_res, gradcam_path = self.gradcam_save_argcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
try:
if self.fl_ensemble:
cams[batch_idx, :, :, :] = cam
else:
gradcam_res_list.append(gradcam_res.tolist())
gradcam_path_list.append(gradcam_path)
except AttributeError as e:
print("No GradCam result?")
if False:
self.gradcam_thumbnail()
return gradcam_res_list, gradcam_path_list, cams
def gradcam_save_maxcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info)
else:
cam, prob, tcls = grad_cam.generate_cam(data)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
if prob >= self.th_gradcam:
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
path_name = "_".join([path_name, label_name])
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image"), path_name, hmp_dims)
return cam_rs, path_name
else:
cam_rs = save_class_activation_images(data, noPlotflg, self.pt_runtime.joinpath("gradcam_image"), path_name, hmp_dims)
return None, None
def gradcam_save_argcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info, target_class=id_tcls)
else:
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
cam, prob, tcls, _ = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
label_name = label_name.strip('>.').split('>')[-1]
#path_name = "_".join([f'{int(prob*1000):04d}', path_name, label_name])
if prob >= self.th_gradcam:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image_{label_name}"), path_name, hmp_dims)
cam_list=[]
path_list=[]
path_list.append(path_name)
return cam_list, path_list
def gradcam_save_argcls_ens(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=False, cams_ens=None, prob_ens=None):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
cams = np.zeros((len(self.cls_gradcam), 16, 16))
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ens_flg == True:
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, ens_flg=True, ens_cam=cams_ens[batch_idx, i, :, :])
cams[i, :, :] = cam_low
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
label_name = print_label_name[tcls]
if ATLAS_GEN:
label_name = f"ATLAS_{atlas_name}"
#if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ATLAS_GEN:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"{label_name}"), path_name, hmp_dims)
else:
if '/' in path_name:
self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}/{path_name}").parent.mkdir(parents=True, exist_ok=True)
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}"), path_name, hmp_dims)
else:
#review_cam
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, th_cam=0.5)
cams[i, :, :] = cam_low
return cams
def roc_evaluation(self, test_set, predict_seq, target_seq, labels):
out_dim = self.env.out_dim
df_data = pd.DataFrame()
df_data['PATH'] = test_set.entries['PATH']
for i in range(out_dim):
df_data[f'{labels[i]}'] = predict_seq.cpu().numpy()[:, i]
df_data[f'{labels[i]}_GT'] = target_seq.cpu().numpy()[:, i]
t = self.pt_runtime.joinpath('roc_result')
Path.mkdir(t, parents=True, exist_ok=True)
df_data.to_excel(t.joinpath('save_predicted_probabilities.xlsx'))
roc_dim = out_dim
for i in range(roc_dim):
mask_out, mask_tar = self.mask_pred(predict_seq[:, i], target_seq[:, i])
if mask_tar.cpu().numpy().size != 0 :
fpr, tpr, thresholds = roc_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
pre, rec, thresholds_pr = precision_recall_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
#logger.debug(f"{predict_seq.cpu().numpy()}")
df = pd.DataFrame()
df[f'specificity'] = 1-fpr
df[f'sensitivity'] = tpr
df[f'thresholds'] = thresholds
label_name = print_label_name[i]
df.to_excel(t.joinpath(f'save_{i:03d}_{label_name}_sensitivity_specificity.xlsx'))
del df
if False:
# ROC plot
fig, (ax1, ax2) = plt.subplots(1,2)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = 'ROC curve')
ax1.set_title(f'ROC curve for {labels[i]}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
# PR plot
ax2.plot(rec, pre, color = 'darkorange', lw = 2, label = 'Precision-Recall curve')
ax2.set_title(f'Precision-Recall curve')
ax2.set(xlabel='Recall', ylabel='Precision')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
else:
# ROC plot
fig, ax1 = plt.subplots(1,1)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = f'{label_name}')
ax1.set_title(f'ROC curve for {label_name}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
def save_prob(self, input_file, save_path):
df = pd.read_csv(input_file)
df.insert(6, 'prob', self.df_prob.PROB)
df.insert(6, 'path_check', self.df_prob.PATH_CHECK)
df.to_excel(save_path)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Testng Our Explainable AI Model on CXR")
parser.add_argument('--cuda', default=None, type=str, help="use GPUs with its device ids, separated by commas")
args = parser.parse_args()
args.in_dim = 1
args.out_dim = 21
args.labels = None
args.paths = None
args.runtime_dir = 'autolabeling'
args.type = 0
args.pretr_net = 'pa_feat_model'
args.gradcam = False
args.gradcam_cls = None
args.fl_save = False
args.id_prob = None
args.test_csv = 'autolabeling_5_features_490_cases.csv'
args.arch = None
args.Nens = 6
args.exai = True
args.simname = 'Outputs'
args.seed = -1
runtime_path, device = initialize(args, fl_demo=True)
fl_ensemble = False if args.Nens == 1 else True
# start training
env = TestEnvironment(device, mtype=args.type, in_dim=args.in_dim, out_dim=args.out_dim, name_labels=args.labels, name_paths=args.paths, testset_csv=args.test_csv, name_model=args.arch, r_seed=args.seed)
t = Tester(env, pt_runtime=runtime_path, fn_net=args.pretr_net, fl_gradcam=args.gradcam, cls_gradcam=args.gradcam_cls, id_prob=args.id_prob, fl_ensemble=fl_ensemble, fl_exai=args.exai, f_name=args.simname, f_csv=args.test_csv)
if(fl_ensemble):
t.test_ensemble_evaluation(fl_save=args.fl_save, n_ens = args.Nens)
else:
t.test_evaluation(fl_save=args.fl_save)
| 40.259027 | 230 | 0.569429 | import re
import pickle
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cm as mpl_color_map
from tqdm import tqdm
from pathlib import Path
from prettytable import PrettyTable
from scipy.ndimage import gaussian_filter
from sklearn.metrics import roc_curve, precision_recall_curve
import torch
import torchnet as tnt
import torch.nn.functional as F
from utils import logger
from environment import TestEnvironment, initialize, print_label_name
from gradcam import GradCam, save_class_activation_images
from data import CxrDataset, EXT_DATA_BASE
from atlasmethod import EX_AI
import time
ATLAS_GEN = False
atlas_name = 'cardiomegaly'
class Tester:
def __init__(self, env, pt_runtime="test", fn_net=None, fl_gradcam=False, cls_gradcam=None, id_prob=None, fl_ensemble=False, fl_exai=False, f_name='sim', f_csv=None):
self.env = env
self.pt_runtime = pt_runtime
self.fl_prob = False if id_prob == None else True
self.id_prob = id_prob
self.f_name = f_name
self.fl_ensemble = fl_ensemble
self.pf_metric = {
'loss': [],
'accuracy': [],
'sensitivity': [],
'specificity': [],
'auc_score': [],
'ap_score': [],
'mse_score': []
}
self.fn_net = fn_net
self.fl_gradcam = fl_gradcam
self.cls_gradcam = cls_gradcam
self.th_gradcam = 0.5
self.fl_gradcam_save = True
self.fl_exai = fl_exai
if self.fl_exai:
self.fl_gradcam = True
self.cls_gradcam = [
'Hilar/mediastinum>Cardiomegaly>.',
'Lung density>Increased lung density>Atelectasis',
'Lung density>Increased lung density>Pulmonary edema',
'Lung density>Increased lung density>pneumonia',
'Pleura>Pleural effusion>.'
]
self.th_gradcam = 0.5
self.ex_method = EX_AI(env, pt_runtime=pt_runtime, thr=0.5, f_name=f_name, ext_data_csv=f_csv)
def load(self):
pt_file = self.pt_runtime.joinpath(f'train.pkl')
with open(pt_file, 'rb') as f:
self.pf_metric = pickle.load(f)
def test_evaluation(self, epoch=1, fl_save=False):
if self.fn_net == None:
pt_model = self.pt_runtime.joinpath(f'model_epoch_{epoch:04d}.pth.tar')
else:
pt_model = self.pt_runtime.joinpath(str(self.fn_net))
self.env.load_model(pt_model)
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, _, _ = self.test(epoch, self.env.test_loader, fl_save=fl_save)
if False:
self.algorithm_attribution(self.env.gradcam_loader)
if self.fl_gradcam:
_, _, _ = self.gradcam_data(self.env.gradcam_loader)
def test_ensemble_evaluation(self, epoch=1, fl_save=False, n_ens=1):
predict = []
target = []
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
if ATLAS_GEN:
gradcam_df = pd.DataFrame(columns=[f'{x:03d}' for x in range(256)])
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, pred, tar = self.test(epoch, self.env.test_loader, fl_save=False)
predict.append(pred)
target.append(tar)
prob_ens = self.ensemble_performance(predict, target, n_ens, fl_save=fl_save)
if self.fl_exai:
prob_in = pd.DataFrame(prob_ens.cpu().numpy()[:,1:])
prob_in['PATH'] = self.env.test_loader.dataset.entries['PATH']
self.ex_method.input_preparation(prob_in)
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
start = time.time()
_, _, cam = self.gradcam_data(self.env.gradcam_loader, prob_ens=prob_ens)
#review_cam
#cams *= cam
cams += cam
end = time.time()
print(f'{k:02d} model gradcam time: {end-start} sec')
_, _, cams = self.gradcam_data(self.env.gradcam_loader, ens_flg=True, cams_ens=cams, prob_ens=prob_ens)
if self.fl_exai:
start = time.time()
self.ex_method.run(cams)
end = time.time()
print(f'self-annotation time: {end-start} sec')
if ATLAS_GEN:
for k in range(len(self.env.gradcam_loader)):
gradcam_df.loc[k] = cams[k].flatten()
print(f"[{atlas_name}]Atlas generation: {k:5d}")
gradcam_df['PATH'] = self.env.gradcam_loader.dataset.entries['PATH']
gradcam_df.to_csv(self.pt_runtime.joinpath(f'gradcam_atlas_{atlas_name}.csv'), index=False)
def ensemble_performance(self, predict, target, n_ens, fl_save=False):
pred_ens = torch.zeros(predict[0].shape).to(self.env.device)
#pred_ens = np.zeros(predict[0].shape)
for i in range(n_ens):
pred_ens += torch.from_numpy(predict[i]).to(self.env.device)
pred_ens /= n_ens
targ_ens = torch.from_numpy(target[0]).to(self.env.device)
aucs, aps = self.AUC_AP_metric(pred_ens, targ_ens)
correct, total = self.ACC_metric(pred_ens, targ_ens)
self.Per_print(correct=correct, total=total, aucs=aucs, aps=aps)
if fl_save:
test_set = self.env.test_loader.dataset
labels = self.env.labels
self.roc_evaluation(test_set, pred_ens, targ_ens, labels)
return pred_ens
def AUC_AP_metric(self, output, target):
out_dim = output.shape[1]
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
return aucs, aps
def MSE__metric(self, output, target):
out_dim = 1
mses = [tnt.meter.MSEMeter() for i in range(out_dim)]
mses[0].add(output[:, -1], target[:, -1])
return mses
def ACC_metric(self, output, target):
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0.5, ones, zeros)
correct = pred.eq(mask_tar.int()).sum().item()
total = len(mask_tar)
return correct, total
def Per_print(self, correct=None, total=None, aucs=None, aps=None, mses=None):
labels = self.env.labels
out_dim = len(aucs)
percent = 100. * correct / total
logger.info(f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
#p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
p.add_row([f'E-{labels[i]}', f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"Ensemble-performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average acu scores")
ave_auc = 0
ave_ap = 0
def test(self, epoch, test_loader, fl_save=False):
test_set = test_loader.dataset
out_dim = self.env.out_dim
labels = self.env.labels
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
CxrDataset.eval()
self.env.model.eval()
with torch.no_grad():
correct = 0
total = 0
predict_seq = torch.FloatTensor().to(self.env.device)
target_seq = torch.FloatTensor().to(self.env.device)
tqdm_desc = f'testing '
t = tqdm(enumerate(test_loader), total=len(test_loader), desc=tqdm_desc,
dynamic_ncols=True)
for bt_idx, tp_data in t:
output, target = self.test_batch(tp_data)
# Network outputs
predict_seq = torch.cat((predict_seq, F.sigmoid(output)), dim=0)
target_seq = torch.cat((target_seq, target), dim=0)
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0., ones, zeros)
correct += pred.eq(mask_tar.int()).sum().item()
total += len(mask_tar)
#pred = torch.where(output > 0., ones, zeros)
#correct += pred.eq(target.int()).sum().item()
#total = len(test_loader.sampler) * out_dim
percent = 100. * correct / total
logger.info(f"val epoch {epoch:03d}: "
f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
if fl_save:
self.roc_evaluation(test_set, predict_seq, target_seq, labels)
if self.fl_prob:
self.df_prob = pd.DataFrame()
self.df_prob['PATH_CHECK'] = test_set.entries['PATH']
self.df_prob['PROB'] = predict_seq.cpu().numpy()[:, self.id_prob]
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average auc scores")
ave_auc = 0
ave_ap = 0
self.pf_metric[f'accuracy'].append((epoch, correct / total))
self.pf_metric[f'auc_score'].append((epoch, ave_auc))
self.pf_metric[f'ap_score'].append((epoch, ave_ap))
return ave_auc, predict_seq.cpu().numpy(), target_seq.cpu().numpy()
def mask_pred(self, output, target):
mask_one = torch.ones(output.shape, dtype=torch.uint8, device=self.env.device)
mask_zero = torch.zeros(output.shape, dtype=torch.uint8, device=self.env.device)
#mask = torch.where(target == -1, mask_zero, mask_one)
mask = torch.where(target == -1, mask_zero, mask_one).bool()
mask_output = output.masked_select(mask.to(self.env.device))
mask_target = target.masked_select(mask.to(self.env.device))
return mask_output, mask_target
def test_batch(self, tp_data, fl_input=False):
# to support different types of models.
if self.env.type == 0:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#data, target = data.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data)
elif self.env.type == 1:
data1 = tp_data[0]
data2 = tp_data[1]
target = tp_data[2]
data1, data2, target = data1.to(self.env.device), data2.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data1, data2)
elif self.env.type == 3:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#network output
output = self.env.model(data, info)
if fl_input == False:
return output, target
else:
return data, info, output
def gradcam_data(self, test_loader, hmp_dims=(512,512), ens_flg=False, cams_ens=None, prob_ens=None):
# threshold to draw a heatmap
out_dim = self.env.out_dim
CxrDataset.eval()
self.env.model.eval()
#with torch.no_grad():
gradcam_res_list = []
gradcam_path_list = []
cams = np.zeros((len(test_loader), len(self.cls_gradcam), 16, 16))
grad_cam = GradCam(self.env.model, self.env.type)
for batch_idx, (data, target, info) in enumerate(test_loader):
#data, target = data.to(self.env.device), target.to(self.env.device)
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
# Grad CAM
#grad_cam = GradCam(self.env.model, self.env.type)
if self.cls_gradcam == None:
gradcam_res, gradcam_path = self.gradcam_save_maxcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
else:
if self.fl_ensemble:
cam = self.gradcam_save_argcls_ens(grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=ens_flg, cams_ens=cams_ens, prob_ens=prob_ens)
else:
gradcam_res, gradcam_path = self.gradcam_save_argcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
try:
if self.fl_ensemble:
cams[batch_idx, :, :, :] = cam
else:
gradcam_res_list.append(gradcam_res.tolist())
gradcam_path_list.append(gradcam_path)
except AttributeError as e:
print("No GradCam result?")
if False:
self.gradcam_thumbnail()
return gradcam_res_list, gradcam_path_list, cams
def gradcam_save_maxcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info)
else:
cam, prob, tcls = grad_cam.generate_cam(data)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
if prob >= self.th_gradcam:
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
path_name = "_".join([path_name, label_name])
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image"), path_name, hmp_dims)
return cam_rs, path_name
else:
cam_rs = save_class_activation_images(data, noPlotflg, self.pt_runtime.joinpath("gradcam_image"), path_name, hmp_dims)
return None, None
def gradcam_save_argcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info, target_class=id_tcls)
else:
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
cam, prob, tcls, _ = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
label_name = label_name.strip('>.').split('>')[-1]
#path_name = "_".join([f'{int(prob*1000):04d}', path_name, label_name])
if prob >= self.th_gradcam:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image_{label_name}"), path_name, hmp_dims)
cam_list=[]
path_list=[]
path_list.append(path_name)
return cam_list, path_list
def gradcam_save_argcls_ens(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=False, cams_ens=None, prob_ens=None):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
cams = np.zeros((len(self.cls_gradcam), 16, 16))
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ens_flg == True:
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, ens_flg=True, ens_cam=cams_ens[batch_idx, i, :, :])
cams[i, :, :] = cam_low
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
label_name = print_label_name[tcls]
if ATLAS_GEN:
label_name = f"ATLAS_{atlas_name}"
#if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ATLAS_GEN:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"{label_name}"), path_name, hmp_dims)
else:
if '/' in path_name:
self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}/{path_name}").parent.mkdir(parents=True, exist_ok=True)
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}"), path_name, hmp_dims)
else:
#review_cam
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, th_cam=0.5)
cams[i, :, :] = cam_low
return cams
def roc_evaluation(self, test_set, predict_seq, target_seq, labels):
out_dim = self.env.out_dim
df_data = pd.DataFrame()
df_data['PATH'] = test_set.entries['PATH']
for i in range(out_dim):
df_data[f'{labels[i]}'] = predict_seq.cpu().numpy()[:, i]
df_data[f'{labels[i]}_GT'] = target_seq.cpu().numpy()[:, i]
t = self.pt_runtime.joinpath('roc_result')
Path.mkdir(t, parents=True, exist_ok=True)
df_data.to_excel(t.joinpath('save_predicted_probabilities.xlsx'))
roc_dim = out_dim
for i in range(roc_dim):
mask_out, mask_tar = self.mask_pred(predict_seq[:, i], target_seq[:, i])
if mask_tar.cpu().numpy().size != 0 :
fpr, tpr, thresholds = roc_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
pre, rec, thresholds_pr = precision_recall_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
#logger.debug(f"{predict_seq.cpu().numpy()}")
df = pd.DataFrame()
df[f'specificity'] = 1-fpr
df[f'sensitivity'] = tpr
df[f'thresholds'] = thresholds
label_name = print_label_name[i]
df.to_excel(t.joinpath(f'save_{i:03d}_{label_name}_sensitivity_specificity.xlsx'))
del df
if False:
# ROC plot
fig, (ax1, ax2) = plt.subplots(1,2)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = 'ROC curve')
ax1.set_title(f'ROC curve for {labels[i]}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
# PR plot
ax2.plot(rec, pre, color = 'darkorange', lw = 2, label = 'Precision-Recall curve')
ax2.set_title(f'Precision-Recall curve')
ax2.set(xlabel='Recall', ylabel='Precision')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
else:
# ROC plot
fig, ax1 = plt.subplots(1,1)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = f'{label_name}')
ax1.set_title(f'ROC curve for {label_name}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
def save_prob(self, input_file, save_path):
df = pd.read_csv(input_file)
df.insert(6, 'prob', self.df_prob.PROB)
df.insert(6, 'path_check', self.df_prob.PATH_CHECK)
df.to_excel(save_path)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Testng Our Explainable AI Model on CXR")
parser.add_argument('--cuda', default=None, type=str, help="use GPUs with its device ids, separated by commas")
args = parser.parse_args()
args.in_dim = 1
args.out_dim = 21
args.labels = None
args.paths = None
args.runtime_dir = 'autolabeling'
args.type = 0
args.pretr_net = 'pa_feat_model'
args.gradcam = False
args.gradcam_cls = None
args.fl_save = False
args.id_prob = None
args.test_csv = 'autolabeling_5_features_490_cases.csv'
args.arch = None
args.Nens = 6
args.exai = True
args.simname = 'Outputs'
args.seed = -1
runtime_path, device = initialize(args, fl_demo=True)
fl_ensemble = False if args.Nens == 1 else True
# start training
env = TestEnvironment(device, mtype=args.type, in_dim=args.in_dim, out_dim=args.out_dim, name_labels=args.labels, name_paths=args.paths, testset_csv=args.test_csv, name_model=args.arch, r_seed=args.seed)
t = Tester(env, pt_runtime=runtime_path, fn_net=args.pretr_net, fl_gradcam=args.gradcam, cls_gradcam=args.gradcam_cls, id_prob=args.id_prob, fl_ensemble=fl_ensemble, fl_exai=args.exai, f_name=args.simname, f_csv=args.test_csv)
if(fl_ensemble):
t.test_ensemble_evaluation(fl_save=args.fl_save, n_ens = args.Nens)
else:
t.test_evaluation(fl_save=args.fl_save)
| true | true |
f71f8112d97cf0d0c960835f729f2a0a204f5395 | 6,801 | py | Python | src/python/tests/core/bot/tasks/task_creation_test.py | stplaydog/clusterfuzz | faa957d265641c031631c36f701c1dc76704a5c7 | [
"Apache-2.0"
] | null | null | null | src/python/tests/core/bot/tasks/task_creation_test.py | stplaydog/clusterfuzz | faa957d265641c031631c36f701c1dc76704a5c7 | [
"Apache-2.0"
] | 2 | 2021-03-31T19:59:19.000Z | 2021-05-20T22:08:07.000Z | src/python/tests/core/bot/tasks/task_creation_test.py | hixio-mh/clusterfuzz | 3f9a69ed71a4420b1a1df8864dd7f3fc1d5b6e07 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for task_creation."""
import mock
import unittest
from bot.tasks import task_creation
from datastore import data_types
from tests.test_libs import helpers
from tests.test_libs import mock_config
from tests.test_libs import test_utils
@test_utils.with_cloud_emulators('datastore')
class RequestBisectionTest(unittest.TestCase):
"""Tests request_bisection."""
def setUp(self):
helpers.patch(self, [
'build_management.build_manager.get_primary_bucket_path',
'build_management.build_manager.get_revisions_list',
'build_management.revisions.get_component_range_list',
'config.local_config.ProjectConfig',
'google_cloud_utils.blobs.read_key',
'google_cloud_utils.pubsub.PubSubClient.publish',
])
data_types.FuzzTarget(
id='libFuzzer_proj_target',
engine='libFuzzer',
project='proj',
binary='target').put()
self.testcase = data_types.Testcase(
crash_type='crash-type',
security_flag=True,
bug_information='1337',
job_type='libfuzzer_asan_proj',
fuzzer_name='libFuzzer',
overridden_fuzzer_name='libFuzzer_proj_target',
regression='123:456',
fixed='123:456',
crash_revision=3,
additional_metadata='{"last_tested_crash_revision": 4}')
self.testcase.put()
self.mock.read_key.return_value = b'reproducer'
self.mock.get_component_range_list.return_value = [
{
'link_text': 'old:new',
},
]
self.mock.ProjectConfig.return_value = mock_config.MockConfig({
'bisect_service': {
'pubsub_topic': '/projects/project/topics/topic',
}
})
def _test(self, sanitizer, old_commit='old', new_commit='new'):
"""Test task publication."""
task_creation.request_bisection(self.testcase.key.id())
publish_calls = self.mock.publish.call_args_list
bisect_types = ('regressed', 'fixed')
self.assertEqual(2, len(publish_calls))
for bisect_type, publish_call in zip(bisect_types, publish_calls):
topic = publish_call[0][1]
message = publish_call[0][2][0]
self.assertEqual('/projects/project/topics/topic', topic)
self.assertEqual(b'reproducer', message.data)
self.assertDictEqual({
'crash_type': 'crash-type',
'security': 'True',
'fuzz_target': 'target',
'new_commit': new_commit,
'old_commit': old_commit,
'project_name': 'proj',
'sanitizer': sanitizer,
'testcase_id': '1',
'issue_id': '1337',
'type': bisect_type,
}, message.attributes)
testcase = self.testcase.key.get()
self.assertTrue(testcase.get_metadata('requested_regressed_bisect'))
self.assertTrue(testcase.get_metadata('requested_fixed_bisect'))
def test_request_bisection_asan(self):
"""Basic regressed test (asan)."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.put()
self._test('address')
def test_request_bisection_msan(self):
"""Basic regressed test (asan)."""
self.testcase.job_type = 'libfuzzer_msan_proj'
self.testcase.put()
self._test('memory')
def test_request_bisection_ubsan(self):
"""Basic regressed test (ubsan)."""
self.testcase.job_type = 'libfuzzer_ubsan_proj'
self.testcase.put()
self._test('undefined')
def test_request_bisection_blackbox(self):
"""Test request bisection for blackbox."""
self.testcase.job_type = 'blackbox'
self.testcase.overridden_fuzzer_name = None
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_non_security(self):
"""Test request bisection for non-security testcases."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.security_flag = False
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_flaky(self):
"""Test request bisection for flaky testcases."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.one_time_crasher_flag = True
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_no_bug(self):
"""Test request bisection for testcases with no bug attached."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.bug_information = ''
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_invalid_range(self):
"""Test request bisection for testcases with no bug attached."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.regression = 'NA'
self.testcase.fixed = 'NA'
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_once_only(self):
"""Test request bisection for testcases isn't repeated if already
requested."""
self.testcase.set_metadata('requested_regressed_bisect', True)
self.testcase.set_metadata('requested_fixed_bisect', True)
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_single_commit_range(self):
"""Request bisection with a single commit (invalid range)."""
self.mock.get_primary_bucket_path.return_value = 'bucket'
self.mock.get_revisions_list.return_value = list(range(6))
self.mock.get_component_range_list.return_value = [
{
'link_text': 'one',
},
]
task_creation.request_bisection(self.testcase.key.id())
self._test('address', old_commit='one', new_commit='one')
self.mock.get_component_range_list.assert_has_calls([
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(0, 3, 'libfuzzer_asan_proj'),
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(4, 5, 'libfuzzer_asan_proj'),
])
| 36.762162 | 74 | 0.704161 |
import mock
import unittest
from bot.tasks import task_creation
from datastore import data_types
from tests.test_libs import helpers
from tests.test_libs import mock_config
from tests.test_libs import test_utils
@test_utils.with_cloud_emulators('datastore')
class RequestBisectionTest(unittest.TestCase):
def setUp(self):
helpers.patch(self, [
'build_management.build_manager.get_primary_bucket_path',
'build_management.build_manager.get_revisions_list',
'build_management.revisions.get_component_range_list',
'config.local_config.ProjectConfig',
'google_cloud_utils.blobs.read_key',
'google_cloud_utils.pubsub.PubSubClient.publish',
])
data_types.FuzzTarget(
id='libFuzzer_proj_target',
engine='libFuzzer',
project='proj',
binary='target').put()
self.testcase = data_types.Testcase(
crash_type='crash-type',
security_flag=True,
bug_information='1337',
job_type='libfuzzer_asan_proj',
fuzzer_name='libFuzzer',
overridden_fuzzer_name='libFuzzer_proj_target',
regression='123:456',
fixed='123:456',
crash_revision=3,
additional_metadata='{"last_tested_crash_revision": 4}')
self.testcase.put()
self.mock.read_key.return_value = b'reproducer'
self.mock.get_component_range_list.return_value = [
{
'link_text': 'old:new',
},
]
self.mock.ProjectConfig.return_value = mock_config.MockConfig({
'bisect_service': {
'pubsub_topic': '/projects/project/topics/topic',
}
})
def _test(self, sanitizer, old_commit='old', new_commit='new'):
task_creation.request_bisection(self.testcase.key.id())
publish_calls = self.mock.publish.call_args_list
bisect_types = ('regressed', 'fixed')
self.assertEqual(2, len(publish_calls))
for bisect_type, publish_call in zip(bisect_types, publish_calls):
topic = publish_call[0][1]
message = publish_call[0][2][0]
self.assertEqual('/projects/project/topics/topic', topic)
self.assertEqual(b'reproducer', message.data)
self.assertDictEqual({
'crash_type': 'crash-type',
'security': 'True',
'fuzz_target': 'target',
'new_commit': new_commit,
'old_commit': old_commit,
'project_name': 'proj',
'sanitizer': sanitizer,
'testcase_id': '1',
'issue_id': '1337',
'type': bisect_type,
}, message.attributes)
testcase = self.testcase.key.get()
self.assertTrue(testcase.get_metadata('requested_regressed_bisect'))
self.assertTrue(testcase.get_metadata('requested_fixed_bisect'))
def test_request_bisection_asan(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.put()
self._test('address')
def test_request_bisection_msan(self):
self.testcase.job_type = 'libfuzzer_msan_proj'
self.testcase.put()
self._test('memory')
def test_request_bisection_ubsan(self):
self.testcase.job_type = 'libfuzzer_ubsan_proj'
self.testcase.put()
self._test('undefined')
def test_request_bisection_blackbox(self):
self.testcase.job_type = 'blackbox'
self.testcase.overridden_fuzzer_name = None
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_non_security(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.security_flag = False
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_flaky(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.one_time_crasher_flag = True
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_no_bug(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.bug_information = ''
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_invalid_range(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.regression = 'NA'
self.testcase.fixed = 'NA'
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_once_only(self):
self.testcase.set_metadata('requested_regressed_bisect', True)
self.testcase.set_metadata('requested_fixed_bisect', True)
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_single_commit_range(self):
self.mock.get_primary_bucket_path.return_value = 'bucket'
self.mock.get_revisions_list.return_value = list(range(6))
self.mock.get_component_range_list.return_value = [
{
'link_text': 'one',
},
]
task_creation.request_bisection(self.testcase.key.id())
self._test('address', old_commit='one', new_commit='one')
self.mock.get_component_range_list.assert_has_calls([
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(0, 3, 'libfuzzer_asan_proj'),
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(4, 5, 'libfuzzer_asan_proj'),
])
| true | true |
f71f83d71e89545d5f222b0941888734de4afcee | 1,798 | py | Python | benchmark/memory_profile_tool.py | coolteemf/coolteemf-deformetrica | f965d6ecc0d04f243e487468a9dafe9fe864eed2 | [
"MIT"
] | 2 | 2022-03-04T11:19:30.000Z | 2022-03-08T04:47:22.000Z | benchmark/memory_profile_tool.py | lepennec/Deformetrica_multiscale | dbcb69962dd02f14dde5d63a9abc1de69112f273 | [
"MIT"
] | null | null | null | benchmark/memory_profile_tool.py | lepennec/Deformetrica_multiscale | dbcb69962dd02f14dde5d63a9abc1de69112f273 | [
"MIT"
] | 1 | 2022-03-07T09:52:52.000Z | 2022-03-07T09:52:52.000Z | import resource
import sys
import time
from threading import Thread
from memory_profiler import memory_usage
import GPUtil
import torch
# _cudart = ctypes.CDLL('libcudart.so')
#
#
# def start_cuda_profile():
# # As shown at http://docs.nvidia.com/cuda/cuda-runtime-api/group__CUDART__PROFILER.html,
# # the return value will unconditionally be 0. This check is just in case it changes in
# # the future.
# ret = _cudart.cudaProfilerStart()
# if ret != 0:
# raise Exception("cudaProfilerStart() returned %d" % ret)
#
#
# def stop_cuda_profile():
# ret = _cudart.cudaProfilerStop()
# if ret != 0:
# raise Exception("cudaProfilerStop() returned %d" % ret)
class MemoryProfiler(Thread):
def __init__(self, freq=0.1):
Thread.__init__(self)
self.freq = freq
self.run_flag = True
self.data = {'ram': []}
def run(self):
# logger.info('MemoryProfiler::run()')
while self.run_flag:
self.data['ram'].append(self.current_ram_usage())
time.sleep(self.freq)
def stop(self):
# logger.info('MemoryProfiler::stop()')
self.run_flag = False
self.join()
return dict(self.data)
def clear(self):
self.data.clear()
@staticmethod
def current_ram_usage():
return memory_usage(-1, interval=0)[0] # -1 is for current process
def start_memory_profile(freq=0.001):
ret = MemoryProfiler(freq)
ret.start()
return ret
def stop_memory_profile(memory_profiler):
return memory_profiler.stop()
def stop_and_clear_memory_profile(memory_profiler):
ret = memory_profiler.stop()
clear_memory_profile(memory_profiler)
return ret
def clear_memory_profile(memory_profiler):
memory_profiler.clear()
| 23.350649 | 94 | 0.660178 | import resource
import sys
import time
from threading import Thread
from memory_profiler import memory_usage
import GPUtil
import torch
': []}
def run(self):
while self.run_flag:
self.data['ram'].append(self.current_ram_usage())
time.sleep(self.freq)
def stop(self):
self.run_flag = False
self.join()
return dict(self.data)
def clear(self):
self.data.clear()
@staticmethod
def current_ram_usage():
return memory_usage(-1, interval=0)[0]
def start_memory_profile(freq=0.001):
ret = MemoryProfiler(freq)
ret.start()
return ret
def stop_memory_profile(memory_profiler):
return memory_profiler.stop()
def stop_and_clear_memory_profile(memory_profiler):
ret = memory_profiler.stop()
clear_memory_profile(memory_profiler)
return ret
def clear_memory_profile(memory_profiler):
memory_profiler.clear()
| true | true |
f71f84709e6e370286285ed6bcfe99e6b5009b1b | 436 | py | Python | lessrpc_stub/StubConstants.py | MoujiRPC/mouji_stub_py2x | 3f8d7c0ccdfade7f80020528ca9ddb52556def6c | [
"MIT"
] | 2 | 2019-03-19T21:44:11.000Z | 2019-04-16T21:41:50.000Z | lessrpc_stub/StubConstants.py | MoujiRPC/mouji_stub_py2x | 3f8d7c0ccdfade7f80020528ca9ddb52556def6c | [
"MIT"
] | null | null | null | lessrpc_stub/StubConstants.py | MoujiRPC/mouji_stub_py2x | 3f8d7c0ccdfade7f80020528ca9ddb52556def6c | [
"MIT"
] | null | null | null | '''
Created on Nov 7, 2017
@author: Salim
'''
CONF_PARAM_NAME_SERVER_URL = "CONF.NAMESERVER.URL"
CONF_PARAM_NAME_SERVER_PORT = "CONF.NAMESERVER.PORT"
RPC_PROTOCOL = "http://"
LESS_RPC_REQUEST_PING = "/ping"
LESS_RPC_REQUEST_EXECUTE = "/execute"
LESS_RPC_REQUEST_SERVICE = "/service"
LESS_RPC_REQUEST_INFO = "/info"
HTTP_PROTOCOL = "http://"
HTTPS_PROTOCOL = "http://"
HTTP_WAIT_TIME_SHORT = 5
HTTP_WAIT_TIME_LONG = 60 * 60 * 5
| 17.44 | 52 | 0.743119 |
CONF_PARAM_NAME_SERVER_URL = "CONF.NAMESERVER.URL"
CONF_PARAM_NAME_SERVER_PORT = "CONF.NAMESERVER.PORT"
RPC_PROTOCOL = "http://"
LESS_RPC_REQUEST_PING = "/ping"
LESS_RPC_REQUEST_EXECUTE = "/execute"
LESS_RPC_REQUEST_SERVICE = "/service"
LESS_RPC_REQUEST_INFO = "/info"
HTTP_PROTOCOL = "http://"
HTTPS_PROTOCOL = "http://"
HTTP_WAIT_TIME_SHORT = 5
HTTP_WAIT_TIME_LONG = 60 * 60 * 5
| true | true |
f71f85094dbcb9fd0be92bb6eec98b8e5363d046 | 100,399 | py | Python | src/sardana/macroserver/macros/scan.py | aureocarneiro/sardana | 43644c9966d73c7a9023b53e97b530f3ea0dfb39 | [
"CC-BY-3.0"
] | null | null | null | src/sardana/macroserver/macros/scan.py | aureocarneiro/sardana | 43644c9966d73c7a9023b53e97b530f3ea0dfb39 | [
"CC-BY-3.0"
] | null | null | null | src/sardana/macroserver/macros/scan.py | aureocarneiro/sardana | 43644c9966d73c7a9023b53e97b530f3ea0dfb39 | [
"CC-BY-3.0"
] | null | null | null | ##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""
Macro library containning scan macros for the macros server Tango device
server as part of the Sardana project.
"""
__all__ = ["a2scan", "a3scan", "a4scan", "amultiscan", "aNscan", "ascan",
"d2scan", "d3scan", "d4scan", "dmultiscan", "dNscan", "dscan",
"fscan", "mesh", "timescan", "rscan", "r2scan", "r3scan",
"a2scanc", "a3scanc", "a4scanc", "ascanc",
"d2scanc", "d3scanc", "d4scanc", "dscanc",
"meshc",
"a2scanct", "a3scanct", "a4scanct", "ascanct", "meshct",
"scanhist", "getCallable", "UNCONSTRAINED",
"scanstats"]
__docformat__ = 'restructuredtext'
import os
import copy
import datetime
import numpy
from taurus.core.util import SafeEvaluator
from sardana.macroserver.msexception import UnknownEnv
from sardana.macroserver.macro import Hookable, Macro, Type, Table, List
from sardana.macroserver.scan.gscan import SScan, CTScan, HScan, \
MoveableDesc, CSScan, TScan
from sardana.util.motion import MotionPath
from sardana.util.tree import BranchNode
UNCONSTRAINED = "unconstrained"
StepMode = 's'
# TODO: change it to be more verbose e.g. ContinuousSwMode
ContinuousMode = 'c'
ContinuousHwTimeMode = 'ct'
HybridMode = 'h'
def getCallable(repr):
"""
returns a function .
Ideas: repr could be an URL for a file where the function is contained,
or be evaluable code, or a pickled function object,...
In any case, the return from it should be a callable of the form:
f(x1,x2) where x1, x2 are points in the moveable space and the return value
of f is True if the movement from x1 to x2 is allowed. False otherwise
"""
if repr == UNCONSTRAINED:
return lambda x1, x2: True
else:
return lambda: None
# TODO: remove starts
def _calculate_positions(moveable_node, start, end):
'''Function to calculate starting and ending positions on the physical
motors level.
:param moveable_node: (BaseNode) node representing a moveable.
Can be a BranchNode representing a PseudoMotor,
or a LeafNode representing a PhysicalMotor).
:param start: (float) starting position of the moveable
:param end: (float) ending position of the moveable
:return: (list<(float,float)>) a list of tuples comprising starting
and ending positions. List order is important and preserved.'''
start_positions = []
end_positions = []
if isinstance(moveable_node, BranchNode):
pseudo_node = moveable_node
moveable = pseudo_node.data
moveable_nodes = moveable_node.children
starts = moveable.calcPhysical(start)
ends = moveable.calcPhysical(end)
for moveable_node, start, end in zip(moveable_nodes, starts,
ends):
_start_positions, _end_positions = _calculate_positions(
moveable_node,
start, end)
start_positions += _start_positions
end_positions += _end_positions
else:
start_positions = [start]
end_positions = [end]
return start_positions, end_positions
class aNscan(Hookable):
"""N-dimensional scan. This is **not** meant to be called by the user,
but as a generic base to construct ascan, a2scan, a3scan,..."""
hints = {'scan': 'aNscan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
def _prepare(self, motorlist, startlist, endlist, scan_length, integ_time,
mode=StepMode, latency_time=0, **opts):
self.motors = motorlist
self.starts = numpy.array(startlist, dtype='d')
self.finals = numpy.array(endlist, dtype='d')
self.mode = mode
self.integ_time = integ_time
self.opts = opts
if len(self.motors) == self.starts.size == self.finals.size:
self.N = self.finals.size
else:
raise ValueError(
'Moveablelist, startlist and endlist must all be same length')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
if mode == StepMode:
self.nr_interv = scan_length
self.nb_points = self.nr_interv + 1
self.interv_sizes = (self.finals - self.starts) / self.nr_interv
self.name = opts.get('name', 'a%iscan' % self.N)
self._gScan = SScan(self, self._stepGenerator,
moveables, env, constrains, extrainfodesc)
elif mode in [ContinuousMode, ContinuousHwTimeMode]:
# TODO: probably not 100% correct,
# the idea is to allow passing a list of waypoints
if isinstance(endlist[0], list):
self.waypoints = self.finals
else:
self.waypoints = [self.finals]
self.nr_waypoints = len(self.waypoints)
if mode == ContinuousMode:
self.slow_down = scan_length
# aNscans will only have two waypoints (the start and the final
# positions)
self.nr_waypoints = 2
self.way_lengths = (
self.finals - self.starts) / (self.nr_waypoints - 1)
self.name = opts.get('name', 'a%iscanc' % self.N)
self._gScan = CSScan(self, self._waypoint_generator,
self._period_generator, moveables, env,
constrains, extrainfodesc)
elif mode == ContinuousHwTimeMode:
self.nr_interv = scan_length
self.nb_points = self.nr_interv + 1
mg_name = self.getEnv('ActiveMntGrp')
mg = self.getMeasurementGroup(mg_name)
mg_latency_time = mg.getLatencyTime()
if mg_latency_time > latency_time:
self.info("Choosing measurement group latency time: %f" %
mg_latency_time)
latency_time = mg_latency_time
self.latency_time = latency_time
self.name = opts.get('name', 'a%iscanct' % self.N)
self._gScan = CTScan(self, self._waypoint_generator_hwtime,
moveables,
env,
constrains,
extrainfodesc)
elif mode == HybridMode:
self.nr_interv = scan_length
self.nb_points = self.nr_interv + 1
self.interv_sizes = (self.finals - self.starts) / self.nr_interv
self.name = opts.get('name', 'a%iscanh' % self.N)
self._gScan = HScan(self, self._stepGenerator,
moveables, env, constrains, extrainfodesc)
else:
raise ValueError('invalid value for mode %s' % mode)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc. Ideally this should be done by the data setter
# but this is available in the Macro class and we inherit from it
# latter. More details in sardana-org/sardana#683.
self._data = self._gScan.data
def _stepGenerator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for point_no in range(self.nb_points):
step["positions"] = self.starts + point_no * self.interv_sizes
step["point_id"] = point_no
yield step
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
for point_no in range(self.nr_waypoints):
step["positions"] = self.starts + point_no * self.way_lengths
step["waypoint_id"] = point_no
yield step
def _waypoint_generator_hwtime(self):
# CScan in its constructor populates a list of data structures - trees.
# Each tree represent one Moveables with its hierarchy of inferior
# moveables.
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
step["positions"] = []
step["start_positions"] = []
starts = self.starts
for point_no, waypoint in enumerate(self.waypoints):
for start, end, moveable_tree in zip(starts, waypoint,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
step["waypoint_id"] = point_no
starts = waypoint
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
gScan = self._gScan
mode = self.mode
it = gScan.generator()
v_motors = gScan.get_virtual_motors()
curr_pos = gScan.motion.readPosition()
total_time = 0.0
if mode == StepMode:
# calculate motion time
max_step0_time, max_step_time = 0.0, 0.0
# first motion takes longer, all others should be "equal"
step0 = next(it)
for v_motor, start, stop, length in zip(v_motors, curr_pos,
step0['positions'],
self.interv_sizes):
path0 = MotionPath(v_motor, start, stop)
path = MotionPath(v_motor, 0, length)
max_step0_time = max(max_step0_time, path0.duration)
max_step_time = max(max_step_time, path.duration)
motion_time = max_step0_time + self.nr_interv * max_step_time
# calculate acquisition time
acq_time = self.nb_points * self.integ_time
total_time = motion_time + acq_time
elif mode == ContinuousMode:
total_time = gScan.waypoint_estimation()
# TODO: add time estimation for ContinuousHwTimeMode
return total_time
def getIntervalEstimation(self):
mode = self.mode
if mode in [StepMode, ContinuousHwTimeMode, HybridMode]:
return self.nr_interv
elif mode == ContinuousMode:
return self.nr_waypoints
def _fill_missing_records(self):
# fill record list with dummy records for the final padding
nb_of_points = self.nb_points
scan = self._gScan
nb_of_records = len(scan.data.records)
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class dNscan(aNscan):
"""
same as aNscan but it interprets the positions as being relative to the
current positions and upon completion, it returns the motors to their
original positions
"""
hints = copy.deepcopy(aNscan.hints)
hints['scan'] = 'dNscan'
def _prepare(self, motorlist, startlist, endlist, scan_length,
integ_time, mode=StepMode, **opts):
self._motion = self.getMotion([m.getName() for m in motorlist])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
starts = numpy.array(startlist, dtype='d') + self.originalPositions
finals = numpy.array(endlist, dtype='d') + self.originalPositions
aNscan._prepare(self, motorlist, starts, finals,
scan_length, integ_time, mode=mode, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class ascan(aNscan, Macro):
"""
Do an absolute scan of the specified motor.
ascan scans one motor, as specified by motor. The motor starts at the
position given by start_pos and ends at the position given by final_pos.
The step size is (start_pos-final_pos)/nr_interv. The number of data
points collected will be nr_interv+1. Count time is given by time which
if positive, specifies seconds and if negative, specifies monitor counts.
"""
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class a2scan(aNscan, Macro):
"""
two-motor scan.
a2scan scans two motors, as specified by motor1 and motor2.
Each motor moves the same number of intervals with starting and ending
positions given by start_pos1 and final_pos1, start_pos2 and final_pos2,
respectively. The step size for each motor is:
(start_pos-final_pos)/nr_interv
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class a3scan(aNscan, Macro):
"""three-motor scan .
a3scan scans three motors, as specified by motor1, motor2 and motor3.
Each motor moves the same number of intervals with starting and ending
positions given by start_pos1 and final_pos1, start_pos2 and final_pos2,
start_pos3 and final_pos3, respectively.
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class a4scan(aNscan, Macro):
"""four-motor scan .
a4scan scans four motors, as specified by motor1, motor2, motor3 and
motor4.
Each motor moves the same number of intervals with starting and ending
positions given by start_posN and final_posN (for N=1,2,3,4).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class amultiscan(aNscan, Macro):
"""
Multiple motor scan.
amultiscan scans N motors, as specified by motor1, motor2,...,motorN.
Each motor moves the same number of intervals with starting and ending
positions given by start_posN and final_posN (for N=1,2,...).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
"""
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dmultiscan(dNscan, Macro):
"""
Multiple motor scan relative to the starting positions.
dmultiscan scans N motors, as specified by motor1, motor2,...,motorN.
Each motor moves the same number of intervals If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2,...)
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
"""
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dscan(dNscan, Macro):
"""motor scan relative to the starting position.
dscan scans one motor, as specified by motor. If motor motor is at a
position X before the scan begins, it will be scanned from X+start_pos
to X+final_pos. The step size is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1. Count time is
given by time which if positive, specifies seconds and if negative,
specifies monitor counts. """
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class d2scan(dNscan, Macro):
"""two-motor scan relative to the starting position.
d2scan scans two motors, as specified by motor1 and motor2.
Each motor moves the same number of intervals. If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class d3scan(dNscan, Macro):
"""three-motor scan .
d3scan scans three motors, as specified by motor1, motor2 and motor3.
Each motor moves the same number of intervals. If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2,3)
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class d4scan(dNscan, Macro):
"""four-motor scan relative to the starting positions
a4scan scans four motors, as specified by motor1, motor2, motor3 and
motor4.
Each motor moves the same number of intervals. If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2,3,4).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
Upon termination, the motors are returned to their starting positions.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class mesh(Macro, Hookable):
"""2d grid scan.
The mesh scan traces out a grid using motor1 and motor2.
The first motor scans from m1_start_pos to m1_final_pos using the specified
number of intervals. The second motor similarly scans from m2_start_pos
to m2_final_pos. Each point is counted for for integ_time seconds
(or monitor counts, if integ_time is negative).
The scan of motor1 is done at each point scanned by motor2. That is, the
first motor scan is nested within the second motor scan.
"""
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
self.nb_points = (m1_nr_interv + 1) * (m2_nr_interv + 1)
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.name = opts.get('name', 'mesh')
generator = self._generator
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m,
min_value=min(start, final),
max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan')
self._gScan = SScan(self, generator, moveables, env, constrains)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
point_no = 1
m1_space = numpy.linspace(m1start, m1end, points1)
m1_space_inv = numpy.linspace(m1end, m1start, points1)
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
space = m1_space
if i % 2 != 0 and self.bidirectional_mode:
space = m1_space_inv
for m1pos in space:
step["positions"] = numpy.array([m1pos, m2pos])
# TODO: maybe another ID would be better? (e.g. "(A,B)")
step["point_id"] = point_no
point_no += 1
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class dmesh(mesh):
"""
2d relative grid scan.
The relative mesh scan traces out a grid using motor1 and motor2.
If first motor is at the position X before the scan begins, it will
be scanned from X+m1_start_pos to X+m1_final_pos using the specified
m1_nr_interv number of intervals. If the second motor is
at the position Y before the scan begins, it will be scanned
from Y+m2_start_pos to Y+m2_final_pos using the specified m2_nr_interv
number of intervals.
Each point is counted for the integ_time seconds (or monitor counts,
if integ_time is negative).
The scan of motor1 is done at each point scanned by motor2. That is, the
first motor scan is nested within the second motor scan.
Upon scan completion, it returns the motors to their original positions.
"""
hints = copy.deepcopy(mesh.hints)
hints['scan'] = 'dmesh'
env = copy.deepcopy(mesh.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
mesh.prepare(self, m1, start1, final1, m1_nr_interv,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class fscan(Macro, Hookable):
"""
N-dimensional scan along user defined paths.
The motion path for each motor is defined through the evaluation of a
user-supplied function that is evaluated as a function of the independent
variables.
-independent variables are supplied through the indepvar string.
The syntax for indepvar is "x=expresion1,y=expresion2,..."
-If no indep vars need to be defined, write "!" or "*" or "None"
-motion path for motor is generated by evaluating the corresponding
function 'func'
-Count time is given by integ_time. If integ_time is a scalar, then
the same integ_time is used for all points. If it evaluates as an array
(with same length as the paths), fscan will assign a different integration
time to each acquisition point.
-If integ_time is positive, it specifies seconds and if negative, specifies
monitor counts.
IMPORTANT Notes:
-no spaces are allowed in the indepvar string.
-all funcs must evaluate to the same number of points
>>> fscan "x=[1,3,5,7,9],y=arange(5)" 0.1 motor1 x**2 motor2 sqrt(y*x+3)
>>> fscan "x=[1,3,5,7,9],y=arange(5)" "[0.1,0.2,0.3,0.4,0.5]" motor1 x**2 \
motor2 sqrt(y*x+3)
"""
# ['integ_time', Type.String, None, 'Integration time']
hints = {'scan': 'fscan',
'allowsHooks': ('pre-scan', 'pre-move', 'post-move', 'pre-acq',
'post-acq', 'post-step', 'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['indepvars', Type.String, None, 'Independent Variables'],
['integ_time', Type.String, None, 'Integration time'],
['motor_funcs',
[['motor', Type.Moveable, None, 'motor'],
['func', Type.String, None, 'curve defining path']],
None, 'List of motor and path curves']
]
def prepare(self, *args, **opts):
if args[0].lower() in ["!", "*", "none", None]:
indepvars = {}
else:
indepvars = SafeEvaluator({'dict': dict}).eval(
'dict(%s)' % args[0]) # create a dict containing the indepvars
self.motors = [item[0] for item in args[2]]
self.funcstrings = [item[1] for item in args[2]]
globals_lst = [dict(list(zip(indepvars, values)))
for values in zip(*list(indepvars.values()))]
self.paths = [[SafeEvaluator(globals).eval(
func) for globals in globals_lst] for func in self.funcstrings]
self._integ_time = numpy.array(eval(args[1]), dtype='d')
self.opts = opts
if len(self.motors) == len(self.paths) > 0:
self.N = len(self.motors)
else:
raise ValueError(
'Moveable and func lists must be non-empty and same length')
npoints = len(self.paths[0])
try:
# if everything is OK, the following lines should return a 2D array
# n which each motor path is a row.
# Typical failure is due to shape mismatch due to inconsistent
# input
self.paths = numpy.array(self.paths, dtype='d')
self.paths.reshape((self.N, npoints))
except Exception: # shape mismatch?
# try to give a meaningful description of the error
for p, fs in zip(self.paths, self.funcstrings):
if len(p) != npoints:
raise ValueError('"%s" and "%s" yield different number '
'of points (%i vs %i)' %
(self.funcstrings[0], fs, npoints,
len(p)))
raise # the problem wasn't a shape mismatch
self._nb_points = npoints
if self._integ_time.size == 1:
self._integ_time = self._integ_time * \
numpy.ones(self._nb_points) # extend integ_time
elif self._integ_time.size != self._nb_points:
raise ValueError('time_integ must either be a scalar or '
'length=npoints (%i)' % self._nb_points)
self.name = opts.get('name', 'fscan')
generator = self._generator
moveables = self.motors
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = SScan(self, generator, moveables, env, constrains)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for i in range(self._nb_points):
step["positions"] = self.paths[:, i]
step["integ_time"] = self._integ_time[i]
step["point_id"] = i
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class ascanh(aNscan, Macro):
"""Do an absolute scan of the specified motor.
ascan scans one motor, as specified by motor. The motor starts at the
position given by start_pos and ends at the position given by final_pos.
The step size is (start_pos-final_pos)/nr_interv. The number of data
points collected will be nr_interv+1. Count time is given by time which
if positive, specifies seconds and if negative, specifies monitor
counts. """
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv, integ_time,
mode=HybridMode, **opts)
class rscan(Macro, Hookable):
"""rscan.
Do an absolute scan of the specified motor with different number of intervals for each region.
It uses the gscan framework.
"""
hints = {'scan': 'rscan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, regions, integ_time, **opts):
self.name = 'rscan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r2scan(Macro, Hookable):
"""r2scan.
Do an absolute scan of the specified motors with different number of intervals for each region.
It uses the gscan framework. All the motors will be drived to the same position in each step
"""
hints = {'scan': 'r2scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, start_pos, regions, integ_time, **opts):
self.name = 'r2scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r3scan(Macro, Hookable):
"""r3scan.
Do an absolute scan of the specified motors with different number of
intervals for each region. It uses the gscan framework.
All the motors will be drived to the same position in each step
"""
hints = {'scan': 'r3scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['motor3', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, motor3, start_pos, regions, integ_time, **opts):
self.name = 'r3scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2, motor3]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class scanhist(Macro):
"""Shows scan history information. Give optional parameter scan number to
display details about a specific scan"""
param_def = [
['scan number', Type.Integer, -1,
'scan number. [default=-1 meaning show all scans]'],
]
def run(self, scan_number):
try:
hist = self.getEnv("ScanHistory")
except UnknownEnv:
print("No scan recorded in history")
return
if scan_number < 0:
self.show_all(hist)
else:
self.show_one(hist, scan_number)
def show_one(self, hist, scan_number):
item = None
for h in hist:
if h['serialno'] == scan_number:
item = h
break
if item is None:
self.warning("Could not find scan number %s", scan_number)
return
serialno, title = h['serialno'], h['title']
start = datetime.datetime.fromtimestamp(h['startts'])
end = datetime.datetime.fromtimestamp(h['endts'])
total_time = end - start
start, end, total_time = start.ctime(), end.ctime(), str(total_time)
scan_dir, scan_file = h['ScanDir'], h['ScanFile']
deadtime = '%.1f%%' % h['deadtime']
user = h['user']
store = "Not stored!"
if scan_dir is not None and scan_file is not None:
if isinstance(scan_file, str):
store = os.path.join(scan_dir, scan_file)
else:
store = scan_dir + os.path.sep + str(scan_file)
channels = ", ".join(h['channels'])
cols = ["#", "Title", "Start time", "End time", "Took", "Dead time",
"User", "Stored", "Channels"]
data = [serialno, title, start, end, total_time, deadtime, user, store,
channels]
table = Table([data], row_head_str=cols, row_head_fmt='%*s',
elem_fmt=['%-*s'],
col_sep=' : ')
for line in table.genOutput():
self.output(line)
def show_all(self, hist):
cols = "#", "Title", "Start time", "End time", "Stored"
width = -1, -1, -1, -1, -1
out = List(cols, max_col_width=width)
today = datetime.datetime.today().date()
for h in hist:
start = datetime.datetime.fromtimestamp(h['startts'])
if start.date() == today:
start = start.time().strftime("%H:%M:%S")
else:
start = start.strftime("%Y-%m-%d %H:%M:%S")
end = datetime.datetime.fromtimestamp(h['endts'])
if end.date() == today:
end = end.time().strftime("%H:%M:%S")
else:
end = end.strftime("%Y-%m-%d %H:%M:%S")
scan_file = h['ScanFile']
store = "Not stored!"
if scan_file is not None:
store = ", ".join(scan_file)
row = h['serialno'], h['title'], start, end, store
out.appendRow(row)
for line in out.genOutput():
self.output(line)
class ascanc(aNscan, Macro):
"""Do an absolute continuous scan of the specified motor.
ascanc scans one motor, as specified by motor."""
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a2scanc(aNscan, Macro):
"""two-motor continuous scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class a3scanc(aNscan, Macro):
"""three-motor continuous scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a4scanc(aNscan, Macro):
"""four-motor continuous scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class dNscanc(dNscan):
def do_restore(self):
# set velocities to maximum and then move to initial positions
for moveable in self.motors:
self._gScan.set_max_top_velocity(moveable)
dNscan.do_restore(self)
class dscanc(dNscanc, Macro):
"""continuous motor scan relative to the starting position."""
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d2scanc(dNscanc, Macro):
"""continuous two-motor scan relative to the starting positions"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d3scanc(dNscanc, Macro):
"""continuous three-motor scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class d4scanc(dNscanc, Macro):
"""continuous four-motor scan relative to the starting positions"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class meshc(Macro, Hookable):
"""2d grid scan. scans continuous"""
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.slow_down = slow_down
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.m2_nr_interv = m2_nr_interv
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.nr_waypoints = m2_nr_interv + 1
self.name = opts.get('name', 'meshc')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = CSScan(self, self._waypoint_generator,
self._period_generator, moveables, env,
constrains, extrainfodesc)
self._gScan.frozen_motors = [m2]
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
points2 = self.m2_nr_interv + 1
m1start, m2start = self.starts
m1end, m2end = self.finals
point_no = 1
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
start, end = m1start, m1end
if i % 2 != 0 and self.bidirectional_mode:
start, end = m1end, m1start
step["start_positions"] = numpy.array([start, m2pos])
step["positions"] = numpy.array([end, m2pos])
step["point_id"] = point_no
point_no += 1
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return self._gScan.waypoint_estimation()
def getIntervalEstimation(self):
return self.nr_waypoints
class dmeshc(meshc):
"""2d relative continuous grid scan.
The relative mesh scan traces out a grid using motor1 and motor2.
If first motor is at the position X before the scan begins, it will
be continuously scanned from X+m1_start_pos to X+m1_final_pos.
If the second motor is at the position Y before the scan begins,
it will be discrete scanned from Y+m2_start_pos to Y+m2_final_pos
using the specified m2_nr_interv number of intervals.
The scan considers the accel. and decel. times of the motor1, so the
counts (for the integ_time seconds or monitor counts,
if integ_time is negative) are executed while motor1 is moving
with the constant velocity.
Upon scan completion, it returns the motors to their original positions.
"""
hints = copy.deepcopy(meshc.hints)
hints['scan'] = 'dmeshc'
env = copy.deepcopy(meshc.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
meshc.prepare(self, m1, start1, final1, slow_down,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class aNscanct(aNscan):
"""N-dimensional continuous scan. This is **not** meant to be called by
the user, but as a generic base to construct ascanct, a2scanct, a3scanct,
..."""
hints = {"scan": "aNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class ascanct(aNscanct, Macro):
"""Do an absolute continuous scan of the specified motor.
ascanct scans one motor, as specified by motor. The motor starts before the
position given by start_pos in order to reach the constant velocity at the
start_pos and finishes at the position after the final_pos in order to
maintain the constant velocity until the final_pos."""
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a2scanct(aNscanct, Macro):
"""Two-motor continuous scan.
a2scanct scans two motors, as specified by motor1 and motor2. Each motor
starts before the position given by its start_pos in order to reach the
constant velocity at its start_pos and finishes at the position after
its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a3scanct(aNscanct, Macro):
"""Three-motor continuous scan.
a2scanct scans three motors, as specified by motor1, motor2 and motor3.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a4scanct(aNscan, Macro):
"""Four-motor continuous scan.
a2scanct scans four motors, as specified by motor1, motor2, motor3 and
motor4. Each motor starts before the position given by its start_pos in
order to reach the constant velocity at its start_pos and finishes at the
position after its final_pos in order to maintain the constant velocity
until its final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 4 to move'],
['start_pos4', Type.Float, None, 'Scan start position 4'],
['final_pos4', Type.Float, None, 'Scan final position 4'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, latency_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
nr_interv, integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class dNscanct(dNscan):
"""N-dimensional continuous scan. This is **not** meant to be called by
the user, but as a generic base to construct ascanct, a2scanct, a3scanct,
..."""
hints = {"scan": "dNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class dscanct(dNscanct, Macro):
"""Do an a relative continuous motor scan,
dscanct scans a motor, as specified by motor1.
The Motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class d2scanct(dNscanct, Macro):
"""continuous two-motor scan relative to the starting positions,
d2scanct scans three motors, as specified by motor1 and motor2.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, integ_time, slow_down, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], slow_down, integ_time,
mode=ContinuousHwTimeMode, **opts)
class d3scanct(dNscanct, Macro):
"""continuous three-motor scan relative to the starting positions,
d3scanct scans three motors, as specified by motor1, motor2 and motor3.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousHwTimeMode, **opts)
class d4scanct(dNscanct, Macro):
"""continuous four-motor scan relative to the starting positions,
d4scanct scans three motors, as specified by motor1, motor2, motor3 and
motor4.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousHwTimeMode, **opts)
class meshct(Macro, Hookable):
"""2d grid scan .
The mesh scan traces out a grid using motor1 and motor2.
The first motor scans in contiuous mode from m1_start_pos to m1_final_pos
using the specified number of intervals. The second motor similarly
scans from m2_start_pos to m2_final_pos but it does not move during the
continuous scan. Each point is counted for integ_time seconds
(or monitor counts, if integ_time is negative).
The scan of motor1 is done at each point scanned by motor2. That is, the
first motor scan is nested within the second motor scan.
"""
hints = {"scan": "meshct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped'],
['latency_time', Type.Float, 0, 'Latency time']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, latency_time, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
# Number of intervals of the first motor which is doing the
# continuous scan.
self.nr_interv = m1_nr_interv
self.nb_points = self.nr_interv + 1
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
# Prepare the waypoints
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
m2_space = numpy.linspace(m2start, m2end, points2)
self.waypoints = []
self.starts_points = []
for i, m2pos in enumerate(m2_space):
self.starts_points.append(numpy.array([m1start, m2pos], dtype='d'))
self.waypoints.append(numpy.array([m1end, m2pos], dtype='d'))
if self.bidirectional_mode:
m1start, m1end = m1end, m1start
self.name = opts.get('name', 'meshct')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
mg_name = self.getEnv('ActiveMntGrp')
mg = self.getMeasurementGroup(mg_name)
mg_latency_time = mg.getLatencyTime()
if mg_latency_time > latency_time:
self.info("Choosing measurement group latency time: %f" %
mg_latency_time)
latency_time = mg_latency_time
self.latency_time = latency_time
constrains = [getCallable(cns) for cns in opts.get('constrains',
[UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan')
self._gScan = CTScan(self, self._generator, moveables, env, constrains,
extrainfodesc)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
points1, _ = self.nr_intervs + 1
for i, waypoint in enumerate(self.waypoints):
self.point_id = points1 * i
step["waypoint_id"] = i
self.starts = self.starts_points[i]
self.finals = waypoint
step["positions"] = []
step["start_positions"] = []
for start, end, moveable_tree in zip(self.starts, self.finals,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return 0.0
def getIntervalEstimation(self):
return len(self.waypoints)
def _fill_missing_records(self):
# fill record list with dummy records for the final padding
nb_of_points = self.nb_points
scan = self._gScan
nb_of_total_records = len(scan.data.records)
nb_of_records = nb_of_total_records - self.point_id
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class timescan(Macro, Hookable):
"""Do a time scan over the specified time intervals. The scan starts
immediately. The number of data points collected will be nr_interv + 1.
Count time is given by integ_time. Latency time will be the longer one
of latency_time and measurement group latency time.
"""
hints = {'scan': 'timescan', 'allowsHooks': ('pre-scan', 'pre-acq',
'post-acq', 'post-scan')}
param_def = [
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, nr_interv, integ_time, latency_time):
self.nr_interv = nr_interv
self.nb_points = nr_interv + 1
self.integ_time = integ_time
self.latency_time = latency_time
self._gScan = TScan(self)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
mg_latency_time = self._gScan.measurement_group.getLatencyTime()
latency_time = max(self.latency_time, mg_latency_time)
return self.nb_points * (self.integ_time + latency_time)
def getIntervalEstimation(self):
return self.nr_interv
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class scanstats(Macro):
"""Calculate basic statistics of the enabled and plotted channels in
the active measurement group for the last scan. If no channel is selected
for plotting it fallbacks to the first enabled channel. Print stats and
publish them in the env.
The macro must be hooked in the post-scan hook place.
"""
env = ("ActiveMntGrp", )
param_def = [
["channel",
[["channel", Type.ExpChannel, None, ""], {"min": 0}],
None,
"List of channels for statistics calculations"
]
]
def run(self, channel):
parent = self.getParentMacro()
if not parent:
self.warning("for now the scanstats macro can only be executed as"
" a post-scan hook")
return
if not hasattr(parent, "motors"):
self.warning("scan must involve at least one moveable "
"to calculate statistics")
return
active_meas_grp = self.getEnv("ActiveMntGrp")
meas_grp = self.getMeasurementGroup(active_meas_grp)
calc_channels = []
enabled_channels = meas_grp.getEnabled()
if channel:
stat_channels = [chan.name for chan in channel]
else:
stat_channels = [key for key in enabled_channels.keys()]
for chan in stat_channels:
enabled = enabled_channels.get(chan)
if enabled is None:
self.warning("{} not in {}".format(chan, meas_grp.name))
else:
if not enabled and channel:
self.warning("{} not enabled".format(chan))
elif enabled and channel:
# channel was given as parameters
calc_channels.append(chan)
elif enabled and meas_grp.getPlotType(chan)[chan] == 1:
calc_channels.append(chan)
if len(calc_channels) == 0:
# fallback is first enabled channel in meas_grp
calc_channels.append(next(iter(enabled_channels)))
scalar_channels = []
for _, chan in self.getExpChannels().items():
if chan.type in ("OneDExpChannel", "TwoDExpChannel"):
continue
scalar_channels.append(chan.name)
calc_channels = [ch for ch in calc_channels if ch in scalar_channels]
if len(calc_channels) == 0:
self.warning("measurement group must contain at least one "
"enabled scalar channel to calculate statistics")
return
selected_motor = str(parent.motors[0])
stats = {}
col_header = []
cols = []
motor_data = []
channels_data = {}
for channel_name in calc_channels:
channels_data[channel_name] = []
for idx, rc in parent.data.items():
motor_data.append(rc[selected_motor])
for channel_name in calc_channels:
channels_data[channel_name].append(rc[channel_name])
motor_data = numpy.array(motor_data)
for channel_name, data in channels_data.items():
channel_data = numpy.array(data)
(_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen) = self._calcStats(motor_data, channel_data)
stats[channel_name] = {
"min": _min,
"max": _max,
"minpos": min_at,
"maxpos": max_at,
"mean": mean,
"int": _int,
"com": com,
"fwhm": fwhm,
"cen": cen}
col_header.append([channel_name])
cols.append([
stats[channel_name]["min"],
stats[channel_name]["max"],
stats[channel_name]["minpos"],
stats[channel_name]["maxpos"],
stats[channel_name]["mean"],
stats[channel_name]["int"],
stats[channel_name]["com"],
stats[channel_name]["fwhm"],
stats[channel_name]["cen"],
])
self.info("Statistics for movable: {:s}".format(selected_motor))
table = Table(elem_list=cols, elem_fmt=["%*g"],
row_head_str=["MIN", "MAX", "MIN@", "MAX@",
"MEAN", "INT", "COM", "FWHM", "CEN"],
col_head_str=col_header, col_head_sep="-")
out = table.genOutput()
for line in out:
self.info(line)
self.setEnv("{:s}.ScanStats".format(self.getDoorName()),
{"Stats": stats,
"Motor": selected_motor,
"ScanID": self.getEnv("ScanID")})
@staticmethod
def _calcStats(x, y):
# max and min
_min = numpy.min(y)
_max = numpy.max(y)
min_idx = numpy.argmin(y)
min_at = x[min_idx]
max_idx = numpy.argmax(y)
max_at = x[max_idx]
# center of mass (com)
try:
com = numpy.sum(y*x)/numpy.sum(y)
except ZeroDivisionError:
com = 0
mean = numpy.mean(y)
_int = numpy.sum(y)
# determine if it is a peak- or erf-like function
half_max = (_max-_min)/2+_min
lower_left = False
lower_right = False
if numpy.any(y[0:max_idx] < half_max):
lower_left = True
if numpy.any(y[max_idx:] < half_max):
lower_right = True
if lower_left and lower_right:
# it is a peak-like function
y_data = y
elif lower_left:
# it is an erf-like function
# use the gradient for further calculation
y_data = numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
else:
# it is an erf-like function
# use the gradient for further calculation
y_data = -1*numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
# cen and fwhm
# this part is adapted from:
#
# The PyMca X-Ray Fluorescence Toolkit
#
# Copyright (c) 2004-2014 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed
# at the ESRF by the Software group.
max_idx_data = numpy.argmax(y_data)
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx-1
x0 = x[idx]
x1 = x[idx+1]
y0 = y_data[idx]
y1 = y_data[idx+1]
lhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
lhmx = 0
except IndexError:
lhmx = x[0]
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx+1
x0 = x[idx-1]
x1 = x[idx]
y0 = y_data[idx-1]
y1 = y_data[idx]
uhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
uhmx = 0
except IndexError:
uhmx = x[-1]
fwhm = uhmx - lhmx
cen = (uhmx + lhmx)/2
return (_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen)
| 43.126718 | 99 | 0.587436 | id value for mode %s' % mode)
self._data = self._gScan.data
def _stepGenerator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for point_no in range(self.nb_points):
step["positions"] = self.starts + point_no * self.interv_sizes
step["point_id"] = point_no
yield step
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
for point_no in range(self.nr_waypoints):
step["positions"] = self.starts + point_no * self.way_lengths
step["waypoint_id"] = point_no
yield step
def _waypoint_generator_hwtime(self):
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
step["positions"] = []
step["start_positions"] = []
starts = self.starts
for point_no, waypoint in enumerate(self.waypoints):
for start, end, moveable_tree in zip(starts, waypoint,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
step["waypoint_id"] = point_no
starts = waypoint
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
gScan = self._gScan
mode = self.mode
it = gScan.generator()
v_motors = gScan.get_virtual_motors()
curr_pos = gScan.motion.readPosition()
total_time = 0.0
if mode == StepMode:
max_step0_time, max_step_time = 0.0, 0.0
step0 = next(it)
for v_motor, start, stop, length in zip(v_motors, curr_pos,
step0['positions'],
self.interv_sizes):
path0 = MotionPath(v_motor, start, stop)
path = MotionPath(v_motor, 0, length)
max_step0_time = max(max_step0_time, path0.duration)
max_step_time = max(max_step_time, path.duration)
motion_time = max_step0_time + self.nr_interv * max_step_time
acq_time = self.nb_points * self.integ_time
total_time = motion_time + acq_time
elif mode == ContinuousMode:
total_time = gScan.waypoint_estimation()
return total_time
def getIntervalEstimation(self):
mode = self.mode
if mode in [StepMode, ContinuousHwTimeMode, HybridMode]:
return self.nr_interv
elif mode == ContinuousMode:
return self.nr_waypoints
def _fill_missing_records(self):
nb_of_points = self.nb_points
scan = self._gScan
nb_of_records = len(scan.data.records)
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class dNscan(aNscan):
hints = copy.deepcopy(aNscan.hints)
hints['scan'] = 'dNscan'
def _prepare(self, motorlist, startlist, endlist, scan_length,
integ_time, mode=StepMode, **opts):
self._motion = self.getMotion([m.getName() for m in motorlist])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
starts = numpy.array(startlist, dtype='d') + self.originalPositions
finals = numpy.array(endlist, dtype='d') + self.originalPositions
aNscan._prepare(self, motorlist, starts, finals,
scan_length, integ_time, mode=mode, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class ascan(aNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class a2scan(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class a3scan(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class a4scan(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class amultiscan(aNscan, Macro):
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dmultiscan(dNscan, Macro):
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dscan(dNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class d2scan(dNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class d3scan(dNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class d4scan(dNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class mesh(Macro, Hookable):
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
self.nb_points = (m1_nr_interv + 1) * (m2_nr_interv + 1)
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.name = opts.get('name', 'mesh')
generator = self._generator
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m,
min_value=min(start, final),
max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
self._gScan = SScan(self, generator, moveables, env, constrains)
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
point_no = 1
m1_space = numpy.linspace(m1start, m1end, points1)
m1_space_inv = numpy.linspace(m1end, m1start, points1)
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
space = m1_space
if i % 2 != 0 and self.bidirectional_mode:
space = m1_space_inv
for m1pos in space:
step["positions"] = numpy.array([m1pos, m2pos])
step["point_id"] = point_no
point_no += 1
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class dmesh(mesh):
hints = copy.deepcopy(mesh.hints)
hints['scan'] = 'dmesh'
env = copy.deepcopy(mesh.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
mesh.prepare(self, m1, start1, final1, m1_nr_interv,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class fscan(Macro, Hookable):
hints = {'scan': 'fscan',
'allowsHooks': ('pre-scan', 'pre-move', 'post-move', 'pre-acq',
'post-acq', 'post-step', 'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['indepvars', Type.String, None, 'Independent Variables'],
['integ_time', Type.String, None, 'Integration time'],
['motor_funcs',
[['motor', Type.Moveable, None, 'motor'],
['func', Type.String, None, 'curve defining path']],
None, 'List of motor and path curves']
]
def prepare(self, *args, **opts):
if args[0].lower() in ["!", "*", "none", None]:
indepvars = {}
else:
indepvars = SafeEvaluator({'dict': dict}).eval(
'dict(%s)' % args[0])
self.motors = [item[0] for item in args[2]]
self.funcstrings = [item[1] for item in args[2]]
globals_lst = [dict(list(zip(indepvars, values)))
for values in zip(*list(indepvars.values()))]
self.paths = [[SafeEvaluator(globals).eval(
func) for globals in globals_lst] for func in self.funcstrings]
self._integ_time = numpy.array(eval(args[1]), dtype='d')
self.opts = opts
if len(self.motors) == len(self.paths) > 0:
self.N = len(self.motors)
else:
raise ValueError(
'Moveable and func lists must be non-empty and same length')
npoints = len(self.paths[0])
try:
self.paths = numpy.array(self.paths, dtype='d')
self.paths.reshape((self.N, npoints))
except Exception:
for p, fs in zip(self.paths, self.funcstrings):
if len(p) != npoints:
raise ValueError('"%s" and "%s" yield different number '
'of points (%i vs %i)' %
(self.funcstrings[0], fs, npoints,
len(p)))
raise
self._nb_points = npoints
if self._integ_time.size == 1:
self._integ_time = self._integ_time * \
numpy.ones(self._nb_points) # extend integ_time
elif self._integ_time.size != self._nb_points:
raise ValueError('time_integ must either be a scalar or '
'length=npoints (%i)' % self._nb_points)
self.name = opts.get('name', 'fscan')
generator = self._generator
moveables = self.motors
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = SScan(self, generator, moveables, env, constrains)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for i in range(self._nb_points):
step["positions"] = self.paths[:, i]
step["integ_time"] = self._integ_time[i]
step["point_id"] = i
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class ascanh(aNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv, integ_time,
mode=HybridMode, **opts)
class rscan(Macro, Hookable):
hints = {'scan': 'rscan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, regions, integ_time, **opts):
self.name = 'rscan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r2scan(Macro, Hookable):
hints = {'scan': 'r2scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, start_pos, regions, integ_time, **opts):
self.name = 'r2scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r3scan(Macro, Hookable):
hints = {'scan': 'r3scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['motor3', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, motor3, start_pos, regions, integ_time, **opts):
self.name = 'r3scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2, motor3]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class scanhist(Macro):
param_def = [
['scan number', Type.Integer, -1,
'scan number. [default=-1 meaning show all scans]'],
]
def run(self, scan_number):
try:
hist = self.getEnv("ScanHistory")
except UnknownEnv:
print("No scan recorded in history")
return
if scan_number < 0:
self.show_all(hist)
else:
self.show_one(hist, scan_number)
def show_one(self, hist, scan_number):
item = None
for h in hist:
if h['serialno'] == scan_number:
item = h
break
if item is None:
self.warning("Could not find scan number %s", scan_number)
return
serialno, title = h['serialno'], h['title']
start = datetime.datetime.fromtimestamp(h['startts'])
end = datetime.datetime.fromtimestamp(h['endts'])
total_time = end - start
start, end, total_time = start.ctime(), end.ctime(), str(total_time)
scan_dir, scan_file = h['ScanDir'], h['ScanFile']
deadtime = '%.1f%%' % h['deadtime']
user = h['user']
store = "Not stored!"
if scan_dir is not None and scan_file is not None:
if isinstance(scan_file, str):
store = os.path.join(scan_dir, scan_file)
else:
store = scan_dir + os.path.sep + str(scan_file)
channels = ", ".join(h['channels'])
cols = ["#", "Title", "Start time", "End time", "Took", "Dead time",
"User", "Stored", "Channels"]
data = [serialno, title, start, end, total_time, deadtime, user, store,
channels]
table = Table([data], row_head_str=cols, row_head_fmt='%*s',
elem_fmt=['%-*s'],
col_sep=' : ')
for line in table.genOutput():
self.output(line)
def show_all(self, hist):
cols = "#", "Title", "Start time", "End time", "Stored"
width = -1, -1, -1, -1, -1
out = List(cols, max_col_width=width)
today = datetime.datetime.today().date()
for h in hist:
start = datetime.datetime.fromtimestamp(h['startts'])
if start.date() == today:
start = start.time().strftime("%H:%M:%S")
else:
start = start.strftime("%Y-%m-%d %H:%M:%S")
end = datetime.datetime.fromtimestamp(h['endts'])
if end.date() == today:
end = end.time().strftime("%H:%M:%S")
else:
end = end.strftime("%Y-%m-%d %H:%M:%S")
scan_file = h['ScanFile']
store = "Not stored!"
if scan_file is not None:
store = ", ".join(scan_file)
row = h['serialno'], h['title'], start, end, store
out.appendRow(row)
for line in out.genOutput():
self.output(line)
class ascanc(aNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a2scanc(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class a3scanc(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a4scanc(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class dNscanc(dNscan):
def do_restore(self):
# set velocities to maximum and then move to initial positions
for moveable in self.motors:
self._gScan.set_max_top_velocity(moveable)
dNscan.do_restore(self)
class dscanc(dNscanc, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d2scanc(dNscanc, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d3scanc(dNscanc, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class d4scanc(dNscanc, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class meshc(Macro, Hookable):
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.slow_down = slow_down
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.m2_nr_interv = m2_nr_interv
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.nr_waypoints = m2_nr_interv + 1
self.name = opts.get('name', 'meshc')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = CSScan(self, self._waypoint_generator,
self._period_generator, moveables, env,
constrains, extrainfodesc)
self._gScan.frozen_motors = [m2]
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
points2 = self.m2_nr_interv + 1
m1start, m2start = self.starts
m1end, m2end = self.finals
point_no = 1
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
start, end = m1start, m1end
if i % 2 != 0 and self.bidirectional_mode:
start, end = m1end, m1start
step["start_positions"] = numpy.array([start, m2pos])
step["positions"] = numpy.array([end, m2pos])
step["point_id"] = point_no
point_no += 1
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return self._gScan.waypoint_estimation()
def getIntervalEstimation(self):
return self.nr_waypoints
class dmeshc(meshc):
hints = copy.deepcopy(meshc.hints)
hints['scan'] = 'dmeshc'
env = copy.deepcopy(meshc.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
meshc.prepare(self, m1, start1, final1, slow_down,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class aNscanct(aNscan):
hints = {"scan": "aNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class ascanct(aNscanct, Macro):
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a2scanct(aNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a3scanct(aNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a4scanct(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 4 to move'],
['start_pos4', Type.Float, None, 'Scan start position 4'],
['final_pos4', Type.Float, None, 'Scan final position 4'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, latency_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
nr_interv, integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class dNscanct(dNscan):
hints = {"scan": "dNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class dscanct(dNscanct, Macro):
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class d2scanct(dNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, integ_time, slow_down, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], slow_down, integ_time,
mode=ContinuousHwTimeMode, **opts)
class d3scanct(dNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousHwTimeMode, **opts)
class d4scanct(dNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousHwTimeMode, **opts)
class meshct(Macro, Hookable):
hints = {"scan": "meshct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped'],
['latency_time', Type.Float, 0, 'Latency time']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, latency_time, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
# Number of intervals of the first motor which is doing the
# continuous scan.
self.nr_interv = m1_nr_interv
self.nb_points = self.nr_interv + 1
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
# Prepare the waypoints
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
m2_space = numpy.linspace(m2start, m2end, points2)
self.waypoints = []
self.starts_points = []
for i, m2pos in enumerate(m2_space):
self.starts_points.append(numpy.array([m1start, m2pos], dtype='d'))
self.waypoints.append(numpy.array([m1end, m2pos], dtype='d'))
if self.bidirectional_mode:
m1start, m1end = m1end, m1start
self.name = opts.get('name', 'meshct')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
mg_name = self.getEnv('ActiveMntGrp')
mg = self.getMeasurementGroup(mg_name)
mg_latency_time = mg.getLatencyTime()
if mg_latency_time > latency_time:
self.info("Choosing measurement group latency time: %f" %
mg_latency_time)
latency_time = mg_latency_time
self.latency_time = latency_time
constrains = [getCallable(cns) for cns in opts.get('constrains',
[UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan')
self._gScan = CTScan(self, self._generator, moveables, env, constrains,
extrainfodesc)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
points1, _ = self.nr_intervs + 1
for i, waypoint in enumerate(self.waypoints):
self.point_id = points1 * i
step["waypoint_id"] = i
self.starts = self.starts_points[i]
self.finals = waypoint
step["positions"] = []
step["start_positions"] = []
for start, end, moveable_tree in zip(self.starts, self.finals,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return 0.0
def getIntervalEstimation(self):
return len(self.waypoints)
def _fill_missing_records(self):
# fill record list with dummy records for the final padding
nb_of_points = self.nb_points
scan = self._gScan
nb_of_total_records = len(scan.data.records)
nb_of_records = nb_of_total_records - self.point_id
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class timescan(Macro, Hookable):
hints = {'scan': 'timescan', 'allowsHooks': ('pre-scan', 'pre-acq',
'post-acq', 'post-scan')}
param_def = [
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, nr_interv, integ_time, latency_time):
self.nr_interv = nr_interv
self.nb_points = nr_interv + 1
self.integ_time = integ_time
self.latency_time = latency_time
self._gScan = TScan(self)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
mg_latency_time = self._gScan.measurement_group.getLatencyTime()
latency_time = max(self.latency_time, mg_latency_time)
return self.nb_points * (self.integ_time + latency_time)
def getIntervalEstimation(self):
return self.nr_interv
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class scanstats(Macro):
env = ("ActiveMntGrp", )
param_def = [
["channel",
[["channel", Type.ExpChannel, None, ""], {"min": 0}],
None,
"List of channels for statistics calculations"
]
]
def run(self, channel):
parent = self.getParentMacro()
if not parent:
self.warning("for now the scanstats macro can only be executed as"
" a post-scan hook")
return
if not hasattr(parent, "motors"):
self.warning("scan must involve at least one moveable "
"to calculate statistics")
return
active_meas_grp = self.getEnv("ActiveMntGrp")
meas_grp = self.getMeasurementGroup(active_meas_grp)
calc_channels = []
enabled_channels = meas_grp.getEnabled()
if channel:
stat_channels = [chan.name for chan in channel]
else:
stat_channels = [key for key in enabled_channels.keys()]
for chan in stat_channels:
enabled = enabled_channels.get(chan)
if enabled is None:
self.warning("{} not in {}".format(chan, meas_grp.name))
else:
if not enabled and channel:
self.warning("{} not enabled".format(chan))
elif enabled and channel:
# channel was given as parameters
calc_channels.append(chan)
elif enabled and meas_grp.getPlotType(chan)[chan] == 1:
calc_channels.append(chan)
if len(calc_channels) == 0:
# fallback is first enabled channel in meas_grp
calc_channels.append(next(iter(enabled_channels)))
scalar_channels = []
for _, chan in self.getExpChannels().items():
if chan.type in ("OneDExpChannel", "TwoDExpChannel"):
continue
scalar_channels.append(chan.name)
calc_channels = [ch for ch in calc_channels if ch in scalar_channels]
if len(calc_channels) == 0:
self.warning("measurement group must contain at least one "
"enabled scalar channel to calculate statistics")
return
selected_motor = str(parent.motors[0])
stats = {}
col_header = []
cols = []
motor_data = []
channels_data = {}
for channel_name in calc_channels:
channels_data[channel_name] = []
for idx, rc in parent.data.items():
motor_data.append(rc[selected_motor])
for channel_name in calc_channels:
channels_data[channel_name].append(rc[channel_name])
motor_data = numpy.array(motor_data)
for channel_name, data in channels_data.items():
channel_data = numpy.array(data)
(_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen) = self._calcStats(motor_data, channel_data)
stats[channel_name] = {
"min": _min,
"max": _max,
"minpos": min_at,
"maxpos": max_at,
"mean": mean,
"int": _int,
"com": com,
"fwhm": fwhm,
"cen": cen}
col_header.append([channel_name])
cols.append([
stats[channel_name]["min"],
stats[channel_name]["max"],
stats[channel_name]["minpos"],
stats[channel_name]["maxpos"],
stats[channel_name]["mean"],
stats[channel_name]["int"],
stats[channel_name]["com"],
stats[channel_name]["fwhm"],
stats[channel_name]["cen"],
])
self.info("Statistics for movable: {:s}".format(selected_motor))
table = Table(elem_list=cols, elem_fmt=["%*g"],
row_head_str=["MIN", "MAX", "MIN@", "MAX@",
"MEAN", "INT", "COM", "FWHM", "CEN"],
col_head_str=col_header, col_head_sep="-")
out = table.genOutput()
for line in out:
self.info(line)
self.setEnv("{:s}.ScanStats".format(self.getDoorName()),
{"Stats": stats,
"Motor": selected_motor,
"ScanID": self.getEnv("ScanID")})
@staticmethod
def _calcStats(x, y):
# max and min
_min = numpy.min(y)
_max = numpy.max(y)
min_idx = numpy.argmin(y)
min_at = x[min_idx]
max_idx = numpy.argmax(y)
max_at = x[max_idx]
# center of mass (com)
try:
com = numpy.sum(y*x)/numpy.sum(y)
except ZeroDivisionError:
com = 0
mean = numpy.mean(y)
_int = numpy.sum(y)
# determine if it is a peak- or erf-like function
half_max = (_max-_min)/2+_min
lower_left = False
lower_right = False
if numpy.any(y[0:max_idx] < half_max):
lower_left = True
if numpy.any(y[max_idx:] < half_max):
lower_right = True
if lower_left and lower_right:
# it is a peak-like function
y_data = y
elif lower_left:
# it is an erf-like function
# use the gradient for further calculation
y_data = numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
else:
# it is an erf-like function
# use the gradient for further calculation
y_data = -1*numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
# cen and fwhm
# this part is adapted from:
#
# The PyMca X-Ray Fluorescence Toolkit
#
# Copyright (c) 2004-2014 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed
# at the ESRF by the Software group.
max_idx_data = numpy.argmax(y_data)
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx-1
x0 = x[idx]
x1 = x[idx+1]
y0 = y_data[idx]
y1 = y_data[idx+1]
lhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
lhmx = 0
except IndexError:
lhmx = x[0]
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx+1
x0 = x[idx-1]
x1 = x[idx]
y0 = y_data[idx-1]
y1 = y_data[idx]
uhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
uhmx = 0
except IndexError:
uhmx = x[-1]
fwhm = uhmx - lhmx
cen = (uhmx + lhmx)/2
return (_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen)
| true | true |
f71f8633b734353bac2000dd7387efb6ae942340 | 2,457 | py | Python | cli/polyaxon/utils/cache.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | null | null | null | cli/polyaxon/utils/cache.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | 1 | 2022-01-24T11:26:47.000Z | 2022-03-18T23:17:58.000Z | cli/polyaxon/utils/cache.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.managers.project import ProjectConfigManager
from polyaxon.utils.formatting import Printer
CACHE_ERROR = (
"Found an invalid project config or project config cache, "
"if you are using Polyaxon CLI please run: "
"`polyaxon config purge --cache-only`"
)
def get_local_project(is_cli: bool = False):
try:
return ProjectConfigManager.get_config()
except Exception: # noqa
if is_cli:
Printer.print_error(CACHE_ERROR, sys_exit=True)
else:
raise PolyaxonSchemaError(CACHE_ERROR)
def _is_same_project(owner=None, project=None):
local_project = get_local_project(is_cli=True)
if project and project == local_project.name:
return not all([owner, local_project.owner]) or owner == local_project.owner
def _cache_project(config, owner=None, project=None):
if (
ProjectConfigManager.is_initialized()
and ProjectConfigManager.is_locally_initialized()
):
if _is_same_project(owner, project):
ProjectConfigManager.set_config(config)
return
ProjectConfigManager.set_config(
config, visibility=ProjectConfigManager.VISIBILITY_GLOBAL
)
def cache(config_manager, config, owner=None, project=None):
if config_manager == ProjectConfigManager:
_cache_project(config=config, project=project, owner=owner)
# Set caching only if we have an initialized project
if not ProjectConfigManager.is_initialized():
return
if not _is_same_project(owner, project):
return
visibility = (
ProjectConfigManager.VISIBILITY_LOCAL
if ProjectConfigManager.is_locally_initialized()
else ProjectConfigManager.VISIBILITY_GLOBAL
)
config_manager.set_config(config, visibility=visibility)
| 33.202703 | 84 | 0.728938 |
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.managers.project import ProjectConfigManager
from polyaxon.utils.formatting import Printer
CACHE_ERROR = (
"Found an invalid project config or project config cache, "
"if you are using Polyaxon CLI please run: "
"`polyaxon config purge --cache-only`"
)
def get_local_project(is_cli: bool = False):
try:
return ProjectConfigManager.get_config()
except Exception:
if is_cli:
Printer.print_error(CACHE_ERROR, sys_exit=True)
else:
raise PolyaxonSchemaError(CACHE_ERROR)
def _is_same_project(owner=None, project=None):
local_project = get_local_project(is_cli=True)
if project and project == local_project.name:
return not all([owner, local_project.owner]) or owner == local_project.owner
def _cache_project(config, owner=None, project=None):
if (
ProjectConfigManager.is_initialized()
and ProjectConfigManager.is_locally_initialized()
):
if _is_same_project(owner, project):
ProjectConfigManager.set_config(config)
return
ProjectConfigManager.set_config(
config, visibility=ProjectConfigManager.VISIBILITY_GLOBAL
)
def cache(config_manager, config, owner=None, project=None):
if config_manager == ProjectConfigManager:
_cache_project(config=config, project=project, owner=owner)
if not ProjectConfigManager.is_initialized():
return
if not _is_same_project(owner, project):
return
visibility = (
ProjectConfigManager.VISIBILITY_LOCAL
if ProjectConfigManager.is_locally_initialized()
else ProjectConfigManager.VISIBILITY_GLOBAL
)
config_manager.set_config(config, visibility=visibility)
| true | true |
f71f8692d84797110282e3423509cce733cecedd | 13,447 | py | Python | containers.py | Fy-Network/fysql | 9a5910601e9aa13479c9fbd05eb64e958e90dea2 | [
"MIT"
] | 1 | 2016-06-17T08:48:52.000Z | 2016-06-17T08:48:52.000Z | containers.py | Fy-/fysql | 9a5910601e9aa13479c9fbd05eb64e958e90dea2 | [
"MIT"
] | 1 | 2016-06-17T18:06:41.000Z | 2016-06-17T18:06:41.000Z | containers.py | Fy-Network/fysql | 9a5910601e9aa13479c9fbd05eb64e958e90dea2 | [
"MIT"
] | 2 | 2018-02-11T02:14:11.000Z | 2020-01-07T05:40:34.000Z | # -*- coding: utf-8 -*-
"""
fysql.containers
~~~~~~~~~~~~~~~~
:copyright: (c) 2016 by Gasquez Florian
:license: MIT, see LICENSE for more details.
"""
from __future__ import unicode_literals
from functools import wraps
import copy
import hashlib
from .entities import SQLEntity, SQLJoin, SQLCondition, SQLColumn
from .columns import FKeyColumn, PKeyColumn, IntegerColumn
from .static import Tables
'''
class ContainerWalkerType(type):
_instances = {}
def __new__(cls, *args, **kwargs):
if not args[2]:
return super(ContainerWalker, cls).__new__(cls, *args, **kwargs)
key = hashlib.md5(args[0].encode('utf-8')).hexdigest()
if key not in ContainerWalkerType._instances.keys():
ContainerWalkerType._instances[key] = super(ContainerWalker, cls).__new__(cls, *args, **kwargs)
return ContainerWalkerType._instances[key]
'''
class ContainerWalker(object):
"""ContainerWalker: walk through a list of SQLEntity and EntityContainer.
Attributes:
_sql (str): description of the SQL query filled by the walker.
"""
def __init__(self, entities, separator, executable, *args, **kwargs):
self._sql = False
self.entities = entities
self.separator = separator
def prepare(self):
sql = []
for entity in self.entities:
if isinstance(entity, EntityContainer):
sql.append(
entity.separator.join(
map(str, entity.walker.prepare())
)
)
else:
sql.append(str(entity))
self._sql = self.separator.join(map(str, sql)).strip()
return sql
@property
def sql(self):
if self._sql is False:
self.prepare()
return self._sql
@staticmethod
def _sql_entity(value):
return '{0}{1}'.format(str(value))
class ResultContainer(object):
"""Assign sql select datas to Table._data"""
def __init__(self, table, cursor):
self.table = table
self.cursor = cursor
self.sql2py = {}
self.result = []
if self.cursor.description is not None:
for i in range(len(self.cursor.description)):
desc = self.cursor.description[i][0]
if isinstance(desc, bytes):
desc = desc.decode('utf-8')
self.sql2py[i] = desc
self.parse()
def parse(self):
"""Parse rows
Todo:
* Allow cursor.fetchone()? (memory issue)
"""
rows = self.cursor.fetchall()
for row in rows:
self.parse_row(row)
self.cursor.close()
def parse_row(self, row):
item = self.table()
for k, f in self.sql2py.items():
tables = Tables.tables
id_table = f.split('_')[0]
id_column = f.split('_', 1)[1]
if id_table != self.table._db_table:
id_table = self.table._backrefs[id_table]
if '_py' in dir(tables[id_table]._columns[id_column]):
item._data[f] = tables[id_table]._columns[id_column]._py(row[k])
else:
item._data[f] = row[k]
item.__load__()
self.result.append(item)
class EntityContainer(object):
"""List of SQLEntity
Attributes:
entities (list) SQLEntity and EntityContainer
seperator (str) Separator for each element of entities
"""
def __init__(self, separator=' '):
self._walker = False
self.entities = []
self.separator = separator
self.executable = False
def __add__(self, entity):
self.entities.append(entity)
return self
def __len__(self):
return len(self.entities)
@property
def walker(self):
if not self._walker:
self._walker = ContainerWalker(self.entities, self.separator, self.executable)
return self._walker
class EntityExecutableContainer(EntityContainer):
"""List of SQLEntity that can be converted to an executable SQL query."""
def __init__(self, table):
super(EntityExecutableContainer, self).__init__()
self.table = table
self.executable = True
@property
def sql(self):
return self.walker.sql
def execute(self, commit=False):
return self.table._database.execute(self.sql, commit=commit)
class DropContainer(EntityExecutableContainer):
"""DROP TABLE SQL query."""
def __init__(self, table):
super(DropContainer, self).__init__(table)
self += SQLEntity('DROP TABLE IF EXISTS {0};'.format(self.table._sql_entity))
self.execute()
class CreateTableContainer(EntityExecutableContainer):
"""CREATE TABLE SQL query."""
def __init__(self, table):
super(CreateTableContainer, self).__init__(table)
self += SQLEntity('CREATE TABLE IF NOT EXISTS {0} ('.format(self.table._sql_entity))
args_create = EntityContainer(separator=', ')
indexes = EntityContainer(separator=', ')
indexes += SQLEntity('PRIMARY KEY ({0})'.format(self.table._pkey.sql_entities['name']))
for key, column in self.table._columns.items():
column_create = EntityContainer(separator=' ')
column_create += column.sql_entities['name']
if column.sql_type_size is not None:
column_create += SQLEntity('{0}({1})'.format(column.sql_type, column.sql_type_size))
else:
column_create += SQLEntity(column.sql_type)
if isinstance(column, FKeyColumn) or isinstance(column, PKeyColumn):
column_create += SQLEntity('UNSIGNED')
if column.unique and not column.index:
column_create += SQLEntity('UNIQUE')
if column.null is False:
column_create += SQLEntity('NOT NULL')
else:
column_create += SQLEntity('NULL')
# if column.default:
# column_create += SQLEntity('DEFAULT {0}'.format(column.escape(column.default)))
if column.pkey and isinstance(column, IntegerColumn):
column_create += SQLEntity('AUTO_INCREMENT')
args_create += column_create
if column.index:
unique = '' if not column.unique else 'UNIQUE'
indexes += SQLEntity('{0} INDEX {1} ({2})'.format(unique, column.sql_entities['index'], column.sql_entities['name']))
args_create += indexes
self += args_create
self += SQLEntity(') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;')
DropContainer(self.table)
self.execute()
class InsertContainer(EntityExecutableContainer):
"""Table.insert(table_instance)"""
def __init__(self, table, instance):
super(InsertContainer, self).__init__(table)
self.filled = []
self.instance = instance
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=', ')
columns_values = EntityContainer(separator=', ')
for key, column in self.table._columns.items():
value = getattr(self.instance, key)
print (key +':'+ value)
if value:
if column.pkey is True:
self.pkey_id = value
columns_names += column.sql_entities['name']
columns_values += column.escape(getattr(self.instance, key))
for k, v in self.table._defaults.items():
if not value and key == k:
columns_names += self.table._columns[k].sql_entities['name']
columns_values += column.escape(v)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class CreateContainer(EntityExecutableContainer):
"""INSERT INTO SQL query. Used for Table.create()"""
def __init__(self, table, **kwargs):
super(CreateContainer, self).__init__(table)
self.filled = []
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=',')
columns_values = EntityContainer(separator=',')
for attr, value in kwargs.items():
if attr in self.table._columns.keys():
columns_names += self.table._columns[attr].sql_entities['name']
columns_values += self.table._columns[attr].escape(value)
if self.table._columns[attr].pkey is True:
self.pkey_id = value
self.filled.append(attr)
for key, column in self.table._defaults.items():
if key not in self.filled:
columns_names += self.table._columns[key].sql_entities['name']
columns_values += self.table._columns[key].escape(self.table._columns[key].default)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class SaveContainer(EntityExecutableContainer):
"""UPDATE SQL Query. Used for TableInstance.save()"""
def __init__(self, table, instance):
super(SaveContainer, self).__init__(table)
self += SQLEntity('UPDATE')
self += self.table._sql_entity
self += SQLEntity('SET')
columns = EntityContainer(separator=',')
to_update = []
for key, column in self.table._columns.items():
columns += SQLEntity('{0}={1}'.format(
column,
column.escape(getattr(instance, key))
)
)
if isinstance(column, FKeyColumn):
to_update.append(getattr(instance, column.reference))
self += columns
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
for item in to_update:
if item:
item.save()
class RemoveContainer(EntityExecutableContainer):
"""DELETE SQL Query. Used for TableInstance.remove()"""
def __init__(self, table, instance):
super(RemoveContainer, self).__init__(table)
self += SQLEntity('DELETE FROM')
self += self.table._sql_entity
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
def _generative(func):
"""Chainable method"""
@wraps(func)
def decorator(self, *args, **kwargs):
func(self, *args, **kwargs)
return self
return decorator
class ConditionableExecutableContainer(EntityExecutableContainer):
"""Conditionable query, with where, limit, group, having..."""
def __init__(self, table, *args, **kwargs):
super(ConditionableExecutableContainer, self).__init__(table)
self._where = False
self._group = False
self._order = False
def clone(self):
return copy.deepcopy(self)
@_generative
def where(self, *conditions):
if self._where is False:
self += SQLEntity('WHERE')
self._where = True
else:
self += SQLEntity('AND')
size = len(conditions) - 1
i = 0
if size == 0:
if isinstance(conditions[0], SQLCondition):
self += conditions[0]
else:
self += SQLEntity(conditions[0])
else:
for condition in conditions:
if isinstance(condition, SQLCondition):
self += SQLEntity('(')
self += condition
self += SQLEntity(')')
if i < size:
self += SQLEntity('AND')
i += 1
@_generative
def order_by(self, column, order='DESC'):
if self._order is False:
self += SQLEntity('ORDER BY')
self._order = True
else:
self += SQLEntity(',')
if isinstance(column, str):
self += SQLEntity(column)
else:
self += column
self += SQLEntity(order)
@_generative
def group_by(self, group_by):
if self._group is False:
self += SQLEntity('GROUP BY')
self._group = True
else:
self += SQLEntity(',')
if isinstance(group_by, str):
self += SQLEntity(group_by)
def limit(self, limit, position=0):
self += SQLEntity('LIMIT {0},{1}'.format(position, limit))
if limit == 1:
return self.execute(unique=True)
return self.execute()
def one(self):
return self.limit(1)
def all(self):
return self.execute()
class SelectContainer(ConditionableExecutableContainer):
"""SELECT SQL Query."""
def __init__(self, table, *args, **kwargs):
super(SelectContainer, self).__init__(table)
self.kwargs = kwargs
self.args = args
self.is_count = kwargs.get('is_count') or False
self.selected = []
self.add_from = kwargs.get('add_from') or False
self.executable = True
# add selected columns
if self.is_count:
columns = SQLEntity('COUNT(*)')
else:
columns = EntityContainer(separator=',')
for column in self.table._columns.values() if not args else args:
columns += column.sql_entities['selection']
self.selected.append(hash(column))
# add selected tables
tables = EntityContainer(separator=',')
tables += self.table._sql_entity
if self.add_from:
tables += SQLEntity(self.add_from)
# add joins
joins = EntityContainer()
for foreign in reversed(self.table._foreigns):
if hash(foreign['column']) in self.selected or self.is_count:
join = 'INNER' if foreign['column'].required else 'LEFT'
joins += SQLJoin(join, foreign['table']._sql_entity, foreign['left_on'], foreign['right_on'])
if not self.is_count:
for key, column in foreign['table']._columns.items():
columns += SQLColumn(
column.sql_column,
column.table._db_table,
'{0}_{1}'.format(foreign['column'].reference, column.sql_column)
)
self += SQLEntity('SELECT')
self += columns
self += SQLEntity('FROM')
self += tables
if len(joins) != 0:
self += joins
def execute(self, unique=False):
cursor = self.table._database.execute(self.sql)
if self.is_count:
return cursor.fetchone()[0]
if unique:
try:
return ResultContainer(self.table, cursor).result[0]
except IndexError:
return False
return ResultContainer(self.table, cursor).result
def count(self):
self.entities[1] = SQLEntity('COUNT(*)')
self.is_count = True
return self.execute()
| 25.809981 | 121 | 0.692348 |
from __future__ import unicode_literals
from functools import wraps
import copy
import hashlib
from .entities import SQLEntity, SQLJoin, SQLCondition, SQLColumn
from .columns import FKeyColumn, PKeyColumn, IntegerColumn
from .static import Tables
class ContainerWalker(object):
def __init__(self, entities, separator, executable, *args, **kwargs):
self._sql = False
self.entities = entities
self.separator = separator
def prepare(self):
sql = []
for entity in self.entities:
if isinstance(entity, EntityContainer):
sql.append(
entity.separator.join(
map(str, entity.walker.prepare())
)
)
else:
sql.append(str(entity))
self._sql = self.separator.join(map(str, sql)).strip()
return sql
@property
def sql(self):
if self._sql is False:
self.prepare()
return self._sql
@staticmethod
def _sql_entity(value):
return '{0}{1}'.format(str(value))
class ResultContainer(object):
def __init__(self, table, cursor):
self.table = table
self.cursor = cursor
self.sql2py = {}
self.result = []
if self.cursor.description is not None:
for i in range(len(self.cursor.description)):
desc = self.cursor.description[i][0]
if isinstance(desc, bytes):
desc = desc.decode('utf-8')
self.sql2py[i] = desc
self.parse()
def parse(self):
rows = self.cursor.fetchall()
for row in rows:
self.parse_row(row)
self.cursor.close()
def parse_row(self, row):
item = self.table()
for k, f in self.sql2py.items():
tables = Tables.tables
id_table = f.split('_')[0]
id_column = f.split('_', 1)[1]
if id_table != self.table._db_table:
id_table = self.table._backrefs[id_table]
if '_py' in dir(tables[id_table]._columns[id_column]):
item._data[f] = tables[id_table]._columns[id_column]._py(row[k])
else:
item._data[f] = row[k]
item.__load__()
self.result.append(item)
class EntityContainer(object):
def __init__(self, separator=' '):
self._walker = False
self.entities = []
self.separator = separator
self.executable = False
def __add__(self, entity):
self.entities.append(entity)
return self
def __len__(self):
return len(self.entities)
@property
def walker(self):
if not self._walker:
self._walker = ContainerWalker(self.entities, self.separator, self.executable)
return self._walker
class EntityExecutableContainer(EntityContainer):
def __init__(self, table):
super(EntityExecutableContainer, self).__init__()
self.table = table
self.executable = True
@property
def sql(self):
return self.walker.sql
def execute(self, commit=False):
return self.table._database.execute(self.sql, commit=commit)
class DropContainer(EntityExecutableContainer):
def __init__(self, table):
super(DropContainer, self).__init__(table)
self += SQLEntity('DROP TABLE IF EXISTS {0};'.format(self.table._sql_entity))
self.execute()
class CreateTableContainer(EntityExecutableContainer):
def __init__(self, table):
super(CreateTableContainer, self).__init__(table)
self += SQLEntity('CREATE TABLE IF NOT EXISTS {0} ('.format(self.table._sql_entity))
args_create = EntityContainer(separator=', ')
indexes = EntityContainer(separator=', ')
indexes += SQLEntity('PRIMARY KEY ({0})'.format(self.table._pkey.sql_entities['name']))
for key, column in self.table._columns.items():
column_create = EntityContainer(separator=' ')
column_create += column.sql_entities['name']
if column.sql_type_size is not None:
column_create += SQLEntity('{0}({1})'.format(column.sql_type, column.sql_type_size))
else:
column_create += SQLEntity(column.sql_type)
if isinstance(column, FKeyColumn) or isinstance(column, PKeyColumn):
column_create += SQLEntity('UNSIGNED')
if column.unique and not column.index:
column_create += SQLEntity('UNIQUE')
if column.null is False:
column_create += SQLEntity('NOT NULL')
else:
column_create += SQLEntity('NULL')
if column.pkey and isinstance(column, IntegerColumn):
column_create += SQLEntity('AUTO_INCREMENT')
args_create += column_create
if column.index:
unique = '' if not column.unique else 'UNIQUE'
indexes += SQLEntity('{0} INDEX {1} ({2})'.format(unique, column.sql_entities['index'], column.sql_entities['name']))
args_create += indexes
self += args_create
self += SQLEntity(') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;')
DropContainer(self.table)
self.execute()
class InsertContainer(EntityExecutableContainer):
def __init__(self, table, instance):
super(InsertContainer, self).__init__(table)
self.filled = []
self.instance = instance
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=', ')
columns_values = EntityContainer(separator=', ')
for key, column in self.table._columns.items():
value = getattr(self.instance, key)
print (key +':'+ value)
if value:
if column.pkey is True:
self.pkey_id = value
columns_names += column.sql_entities['name']
columns_values += column.escape(getattr(self.instance, key))
for k, v in self.table._defaults.items():
if not value and key == k:
columns_names += self.table._columns[k].sql_entities['name']
columns_values += column.escape(v)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class CreateContainer(EntityExecutableContainer):
def __init__(self, table, **kwargs):
super(CreateContainer, self).__init__(table)
self.filled = []
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=',')
columns_values = EntityContainer(separator=',')
for attr, value in kwargs.items():
if attr in self.table._columns.keys():
columns_names += self.table._columns[attr].sql_entities['name']
columns_values += self.table._columns[attr].escape(value)
if self.table._columns[attr].pkey is True:
self.pkey_id = value
self.filled.append(attr)
for key, column in self.table._defaults.items():
if key not in self.filled:
columns_names += self.table._columns[key].sql_entities['name']
columns_values += self.table._columns[key].escape(self.table._columns[key].default)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class SaveContainer(EntityExecutableContainer):
def __init__(self, table, instance):
super(SaveContainer, self).__init__(table)
self += SQLEntity('UPDATE')
self += self.table._sql_entity
self += SQLEntity('SET')
columns = EntityContainer(separator=',')
to_update = []
for key, column in self.table._columns.items():
columns += SQLEntity('{0}={1}'.format(
column,
column.escape(getattr(instance, key))
)
)
if isinstance(column, FKeyColumn):
to_update.append(getattr(instance, column.reference))
self += columns
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
for item in to_update:
if item:
item.save()
class RemoveContainer(EntityExecutableContainer):
def __init__(self, table, instance):
super(RemoveContainer, self).__init__(table)
self += SQLEntity('DELETE FROM')
self += self.table._sql_entity
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
def _generative(func):
@wraps(func)
def decorator(self, *args, **kwargs):
func(self, *args, **kwargs)
return self
return decorator
class ConditionableExecutableContainer(EntityExecutableContainer):
def __init__(self, table, *args, **kwargs):
super(ConditionableExecutableContainer, self).__init__(table)
self._where = False
self._group = False
self._order = False
def clone(self):
return copy.deepcopy(self)
@_generative
def where(self, *conditions):
if self._where is False:
self += SQLEntity('WHERE')
self._where = True
else:
self += SQLEntity('AND')
size = len(conditions) - 1
i = 0
if size == 0:
if isinstance(conditions[0], SQLCondition):
self += conditions[0]
else:
self += SQLEntity(conditions[0])
else:
for condition in conditions:
if isinstance(condition, SQLCondition):
self += SQLEntity('(')
self += condition
self += SQLEntity(')')
if i < size:
self += SQLEntity('AND')
i += 1
@_generative
def order_by(self, column, order='DESC'):
if self._order is False:
self += SQLEntity('ORDER BY')
self._order = True
else:
self += SQLEntity(',')
if isinstance(column, str):
self += SQLEntity(column)
else:
self += column
self += SQLEntity(order)
@_generative
def group_by(self, group_by):
if self._group is False:
self += SQLEntity('GROUP BY')
self._group = True
else:
self += SQLEntity(',')
if isinstance(group_by, str):
self += SQLEntity(group_by)
def limit(self, limit, position=0):
self += SQLEntity('LIMIT {0},{1}'.format(position, limit))
if limit == 1:
return self.execute(unique=True)
return self.execute()
def one(self):
return self.limit(1)
def all(self):
return self.execute()
class SelectContainer(ConditionableExecutableContainer):
def __init__(self, table, *args, **kwargs):
super(SelectContainer, self).__init__(table)
self.kwargs = kwargs
self.args = args
self.is_count = kwargs.get('is_count') or False
self.selected = []
self.add_from = kwargs.get('add_from') or False
self.executable = True
if self.is_count:
columns = SQLEntity('COUNT(*)')
else:
columns = EntityContainer(separator=',')
for column in self.table._columns.values() if not args else args:
columns += column.sql_entities['selection']
self.selected.append(hash(column))
tables = EntityContainer(separator=',')
tables += self.table._sql_entity
if self.add_from:
tables += SQLEntity(self.add_from)
joins = EntityContainer()
for foreign in reversed(self.table._foreigns):
if hash(foreign['column']) in self.selected or self.is_count:
join = 'INNER' if foreign['column'].required else 'LEFT'
joins += SQLJoin(join, foreign['table']._sql_entity, foreign['left_on'], foreign['right_on'])
if not self.is_count:
for key, column in foreign['table']._columns.items():
columns += SQLColumn(
column.sql_column,
column.table._db_table,
'{0}_{1}'.format(foreign['column'].reference, column.sql_column)
)
self += SQLEntity('SELECT')
self += columns
self += SQLEntity('FROM')
self += tables
if len(joins) != 0:
self += joins
def execute(self, unique=False):
cursor = self.table._database.execute(self.sql)
if self.is_count:
return cursor.fetchone()[0]
if unique:
try:
return ResultContainer(self.table, cursor).result[0]
except IndexError:
return False
return ResultContainer(self.table, cursor).result
def count(self):
self.entities[1] = SQLEntity('COUNT(*)')
self.is_count = True
return self.execute()
| true | true |
f71f86944f4a3f67142dcc0a2330fcdd6e0e21be | 8,966 | py | Python | lib/kubernetes/client/models/v1_resource_attributes.py | splunkenizer/splunk_as_a_service_app | 97c4aaf927d2171bf131126cf9b70489ac75bc5a | [
"Apache-2.0"
] | 7 | 2019-12-21T00:14:14.000Z | 2021-03-11T14:51:37.000Z | lib/kubernetes/client/models/v1_resource_attributes.py | splunkenizer/splunk_as_a_service_app | 97c4aaf927d2171bf131126cf9b70489ac75bc5a | [
"Apache-2.0"
] | 29 | 2019-10-09T11:16:21.000Z | 2020-06-23T09:32:09.000Z | lib/kubernetes/client/models/v1_resource_attributes.py | splunkenizer/splunk_as_a_service_app | 97c4aaf927d2171bf131126cf9b70489ac75bc5a | [
"Apache-2.0"
] | 1 | 2021-05-07T10:13:31.000Z | 2021-05-07T10:13:31.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1ResourceAttributes(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'group': 'str',
'name': 'str',
'namespace': 'str',
'resource': 'str',
'subresource': 'str',
'verb': 'str',
'version': 'str'
}
attribute_map = {
'group': 'group',
'name': 'name',
'namespace': 'namespace',
'resource': 'resource',
'subresource': 'subresource',
'verb': 'verb',
'version': 'version'
}
def __init__(self, group=None, name=None, namespace=None, resource=None, subresource=None, verb=None, version=None):
"""
V1ResourceAttributes - a model defined in Swagger
"""
self._group = None
self._name = None
self._namespace = None
self._resource = None
self._subresource = None
self._verb = None
self._version = None
self.discriminator = None
if group is not None:
self.group = group
if name is not None:
self.name = name
if namespace is not None:
self.namespace = namespace
if resource is not None:
self.resource = resource
if subresource is not None:
self.subresource = subresource
if verb is not None:
self.verb = verb
if version is not None:
self.version = version
@property
def group(self):
"""
Gets the group of this V1ResourceAttributes.
Group is the API Group of the Resource. \"*\" means all.
:return: The group of this V1ResourceAttributes.
:rtype: str
"""
return self._group
@group.setter
def group(self, group):
"""
Sets the group of this V1ResourceAttributes.
Group is the API Group of the Resource. \"*\" means all.
:param group: The group of this V1ResourceAttributes.
:type: str
"""
self._group = group
@property
def name(self):
"""
Gets the name of this V1ResourceAttributes.
Name is the name of the resource being requested for a \"get\" or deleted for a \"delete\". \"\" (empty) means all.
:return: The name of this V1ResourceAttributes.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this V1ResourceAttributes.
Name is the name of the resource being requested for a \"get\" or deleted for a \"delete\". \"\" (empty) means all.
:param name: The name of this V1ResourceAttributes.
:type: str
"""
self._name = name
@property
def namespace(self):
"""
Gets the namespace of this V1ResourceAttributes.
Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces \"\" (empty) is defaulted for LocalSubjectAccessReviews \"\" (empty) is empty for cluster-scoped resources \"\" (empty) means \"all\" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview
:return: The namespace of this V1ResourceAttributes.
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""
Sets the namespace of this V1ResourceAttributes.
Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces \"\" (empty) is defaulted for LocalSubjectAccessReviews \"\" (empty) is empty for cluster-scoped resources \"\" (empty) means \"all\" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview
:param namespace: The namespace of this V1ResourceAttributes.
:type: str
"""
self._namespace = namespace
@property
def resource(self):
"""
Gets the resource of this V1ResourceAttributes.
Resource is one of the existing resource types. \"*\" means all.
:return: The resource of this V1ResourceAttributes.
:rtype: str
"""
return self._resource
@resource.setter
def resource(self, resource):
"""
Sets the resource of this V1ResourceAttributes.
Resource is one of the existing resource types. \"*\" means all.
:param resource: The resource of this V1ResourceAttributes.
:type: str
"""
self._resource = resource
@property
def subresource(self):
"""
Gets the subresource of this V1ResourceAttributes.
Subresource is one of the existing resource types. \"\" means none.
:return: The subresource of this V1ResourceAttributes.
:rtype: str
"""
return self._subresource
@subresource.setter
def subresource(self, subresource):
"""
Sets the subresource of this V1ResourceAttributes.
Subresource is one of the existing resource types. \"\" means none.
:param subresource: The subresource of this V1ResourceAttributes.
:type: str
"""
self._subresource = subresource
@property
def verb(self):
"""
Gets the verb of this V1ResourceAttributes.
Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. \"*\" means all.
:return: The verb of this V1ResourceAttributes.
:rtype: str
"""
return self._verb
@verb.setter
def verb(self, verb):
"""
Sets the verb of this V1ResourceAttributes.
Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. \"*\" means all.
:param verb: The verb of this V1ResourceAttributes.
:type: str
"""
self._verb = verb
@property
def version(self):
"""
Gets the version of this V1ResourceAttributes.
Version is the API Version of the Resource. \"*\" means all.
:return: The version of this V1ResourceAttributes.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this V1ResourceAttributes.
Version is the API Version of the Resource. \"*\" means all.
:param version: The version of this V1ResourceAttributes.
:type: str
"""
self._version = version
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1ResourceAttributes):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 30.39322 | 361 | 0.565135 |
from pprint import pformat
from six import iteritems
import re
class V1ResourceAttributes(object):
swagger_types = {
'group': 'str',
'name': 'str',
'namespace': 'str',
'resource': 'str',
'subresource': 'str',
'verb': 'str',
'version': 'str'
}
attribute_map = {
'group': 'group',
'name': 'name',
'namespace': 'namespace',
'resource': 'resource',
'subresource': 'subresource',
'verb': 'verb',
'version': 'version'
}
def __init__(self, group=None, name=None, namespace=None, resource=None, subresource=None, verb=None, version=None):
self._group = None
self._name = None
self._namespace = None
self._resource = None
self._subresource = None
self._verb = None
self._version = None
self.discriminator = None
if group is not None:
self.group = group
if name is not None:
self.name = name
if namespace is not None:
self.namespace = namespace
if resource is not None:
self.resource = resource
if subresource is not None:
self.subresource = subresource
if verb is not None:
self.verb = verb
if version is not None:
self.version = version
@property
def group(self):
return self._group
@group.setter
def group(self, group):
self._group = group
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@property
def namespace(self):
return self._namespace
@namespace.setter
def namespace(self, namespace):
self._namespace = namespace
@property
def resource(self):
return self._resource
@resource.setter
def resource(self, resource):
self._resource = resource
@property
def subresource(self):
return self._subresource
@subresource.setter
def subresource(self, subresource):
self._subresource = subresource
@property
def verb(self):
return self._verb
@verb.setter
def verb(self, verb):
self._verb = verb
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, V1ResourceAttributes):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f71f86e48de8074e6e823ee832ad036d915afdae | 16,478 | py | Python | figures/kCSD_properties/targeted_basis.py | rdarie/kCSD-python | 5b9e1b1dce2ff95c0d981c2c4015b7a75199de9a | [
"BSD-3-Clause"
] | 11 | 2017-11-06T21:24:18.000Z | 2022-02-07T21:17:13.000Z | figures/kCSD_properties/targeted_basis.py | aeladly91/kCSD-python | 4dd0015e9c5598e7eceeeb25668e696e495b2026 | [
"BSD-3-Clause"
] | 105 | 2017-12-13T12:49:54.000Z | 2022-03-19T12:25:51.000Z | figures/kCSD_properties/targeted_basis.py | aeladly91/kCSD-python | 4dd0015e9c5598e7eceeeb25668e696e495b2026 | [
"BSD-3-Clause"
] | 27 | 2017-06-08T07:32:32.000Z | 2022-02-07T21:17:15.000Z | """
@author: mkowalska
"""
import os
from os.path import expanduser
import numpy as np
import matplotlib.pyplot as plt
import datetime
import time
from kcsd import ValidateKCSD, ValidateKCSD1D, SpectralStructure, KCSD1D
__abs_file__ = os.path.abspath(__file__)
home = expanduser('~')
DAY = datetime.datetime.now()
DAY = DAY.strftime('%Y%m%d')
TIMESTR = time.strftime("%H%M%S")
SAVE_PATH = home + "/kCSD_results/" + DAY + '/' + TIMESTR
def makemydir(directory):
"""
Creates a new folder if it doesn't exist
Parameters
----------
directory: string
directory
Returns
-------
None
"""
try:
os.makedirs(directory)
except OSError:
pass
os.chdir(directory)
def save_source_code(save_path, timestr):
"""
Saves the source code.
Parameters
----------
save_path: string
directory
timestr: float
Returns
-------
None
"""
with open(save_path + '/source_code_' + str(timestr), 'w') as sf:
sf.write(open(__file__).read())
def csd_profile(x, seed):
'''Function used for adding multiple 1D gaussians.
Parameters
----------
x: numpy array
x coordinates of true source profile.
seed: list [r, mu]
Returns
-------
gauss: numpy array
Gaussian profile for given R and M.
'''
r = seed[0]
mu = seed[1]
STDDEV = r/3.0
gauss = (np.exp(-((x - mu)**2)/(2 * STDDEV**2)) /
(np.sqrt(2 * np.pi) * STDDEV)**1)
gauss /= np.max(gauss)
return gauss
def targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None):
'''
Function investigating kCSD analysis for targeted bases.
Parameters
----------
val: object of the class ValidateKCSD.
csd_at: numpy array
Coordinates of ground truth data.
true_csd: numpy array
Values of ground truth data (true_csd).
ele_pos: numpy array
Locations of electrodes.
pots: numpy array
Potentials measured (calculated) on electrodes.
n_src: int
Number of basis sources.
R: float
Thickness of the groundtruth source.
MU: float
x coordinate of maximum ampliude of groundtruth source.
true_csd_xlims: list
Boundaries for ground truth space.
ele_lims: list
Boundaries for electrodes placement.
title: string
Name of the figure that is to be saved
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
csd_res: int
Resolution of ground truth.
Default: 100.
method: string
Determines the method of regularization.
Default: cross-validation.
Rs: numpy 1D array
Basis source parameter for crossvalidation.
Default: None.
lambdas: numpy 1D array
Regularization parameter for crossvalidation.
Default: None.
Returns
-------
obj: object of the class KCSD1D
k: object of the class ValidateKCSD1D
'''
k = ValidateKCSD1D(1, n_src_init=n_src, R_init=0.23,
ele_lims=ele_lims, est_xres=0.01,
true_csd_xlims=true_csd_xlims, sigma=sigma, h=h,
src_type='gauss')
obj, est_csd = k.do_kcsd(pots, ele_pos, method=method, Rs=Rs,
lambdas=lambdas)
test_csd = csd_profile(obj.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
titl = "Lambda: %0.2E; R: %0.2f; RMS_Error: %0.2E;" % (obj.lambd, obj.R,
rms)
fig = k.make_plot(csd_at, true_csd, obj, est_csd, ele_pos, pots, titl)
save_as = (SAVE_PATH)
fig.savefig(os.path.join(SAVE_PATH, save_as + '/' + title + '.png'))
plt.close()
return obj, k
def simulate_data(csd_profile, true_csd_xlims, R, MU, total_ele, ele_lims,
h=0.25, sigma=0.3, csd_res=100, noise=0):
'''
Generates groundtruth profiles and interpolates potentials.
Parameters
----------
csd_profile: function
Function to produce csd profile.
true_csd_xlims: list
Boundaries for ground truth space.
R: float
Thickness of the groundtruth source.
MU: float
x coordinate of maximum ampliude of groundtruth source.
total_ele: int
Number of electrodes.
ele_lims: list
Boundaries for electrodes placement.
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
csd_res: int
Resolution of ground truth.
Default: 100.
noise: float
Determines the level of noise in the data.
Default: 0.
Returns
-------
csd_at: numpy array
Coordinates of ground truth data.
true_csd: numpy array
Values of ground truth data (true_csd).
ele_pos: numpy array
Locations of electrodes.
pots: numpy array
Potentials measured (calculated) on electrodes.
val: object of the class ValidateKCSD
'''
val = ValidateKCSD(1)
csd_at = np.linspace(true_csd_xlims[0], true_csd_xlims[1], csd_res)
true_csd = csd_profile(csd_at, [R, MU])
ele_pos = val.generate_electrodes(total_ele=total_ele, ele_lims=ele_lims)
pots = val.calculate_potential(true_csd, csd_at, ele_pos, h, sigma)
if noise is not None:
pots = val.add_noise(pots, 10, level=noise)
return csd_at, true_csd, ele_pos, pots, val
def structure_investigation(csd_profile, true_csd_xlims, n_src, R, MU,
total_ele, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None, noise=0):
'''
.
Parameters
----------
csd_profile: function
Function to produce csd profile.
true_csd_xlims: list
Boundaries for ground truth space.
n_src: int
Number of basis sources.
R: float
Thickness of the groundtruth source.
MU: float
x coordinate of maximum ampliude of groundtruth source.
total_ele: int
Number of electrodes.
ele_lims: list
Boundaries for electrodes placement.
title: string
Name of the figure that is to be saved
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
csd_res: int
Resolution of ground truth.
Default: 100.
method: string
Determines the method of regularization.
Default: cross-validation.
Rs: numpy 1D array
Basis source parameter for crossvalidation.
Default: None.
lambdas: numpy 1D array
Regularization parameter for crossvalidation.
Default: None.
noise: float
Determines the level of noise in the data.
Default: 0.
Returns
-------
obj: object of the class KCSD1D
'''
val = ValidateKCSD(1)
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
true_csd_xlims, R, MU,
total_ele, ele_lims,
h=h, sigma=sigma,
noise=noise)
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25,
sigma=0.3, csd_res=100, method=method, Rs=Rs,
lambdas=lambdas)
return obj
def plot_eigenvalues(eigenvalues, save_path, title):
'''
Creates plot of eigenvalues of kernel matrix (k_pot).
Parameters
----------
eigenvalues: numpy array
Eigenvalues of k_pot matrix.
save_path: string
Directory.
title: string
Title of the plot.
Returns
-------
None
'''
fig = plt.figure()
plt.plot(eigenvalues, '--', marker='.')
plt.title('Eigenvalue decomposition of kernel matrix. ele_lims=basis_lims')
plt.xlabel('Number of components')
plt.ylabel('Eigenvalues')
plt.show()
save_as = (save_path + '/eigenvalues_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def plot_eigenvectors(eigenvectors, save_path, title):
"""
Creates plot of eigenvectors of kernel matrix (k_pot).
Parameters
----------
eigenvectors: numpy array
Eigenvectors of k_pot matrix.
save_path: string
Directory.
title: string
Title of the plot.
Returns
-------
None
"""
fig = plt.figure(figsize=(15, 15))
plt.suptitle('Eigenvalue decomposition of kernel matrix for different '
'number of basis sources')
for i in range(eigenvectors.shape[1]):
plt.subplot(int(eigenvectors.shape[1]/2) + 1, 2, i + 1)
plt.plot(eigenvectors[:, i].T, '--', marker='.')
plt.ylabel('Eigenvectors')
plt.title(r'$v_' + str(i + 1) + '$')
plt.xlabel('Number of components')
plt.tight_layout()
plt.show()
save_as = (save_path + '/eigenvectors_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def modified_bases(val, pots, ele_pos, n_src, title=None, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1, R=0.2, MU=0.25,
method='cross-validation', Rs=None, lambdas=None):
'''
Parameters
----------
val: object of the class ValidateKCSD1D
pots: numpy array
Potentials measured (calculated) on electrodes.
ele_pos: numpy array
Locations of electrodes.
n_src: int
Number of basis sources.
title: string
Title of the plot.
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
gdx: float
Space increments in the estimation space.
Default: 0.035.
ext_x: float
Length of space extension: xmin-ext_x ... xmax+ext_x.
Default: 0.
xmin: float
Boundaries for CSD estimation space.
xmax: float
boundaries for CSD estimation space.
R: float
Thickness of the groundtruth source.
Default: 0.2.
MU: float
Central position of Gaussian source
Default: 0.25.
method: string
Determines the method of regularization.
Default: cross-validation.
Rs: numpy 1D array
Basis source parameter for crossvalidation.
Default: None.
lambdas: numpy 1D array
Regularization parameter for crossvalidation.
Default: None.
Returns
-------
obj_m: object of the class KCSD1D
'''
pots = pots.reshape((len(ele_pos), 1))
obj_m = KCSD1D(ele_pos, pots, src_type='gauss', sigma=sigma, h=h, gdx=gdx,
n_src_init=n_src, ext_x=ext_x, xmin=xmin, xmax=xmax)
if method == 'cross-validation':
obj_m.cross_validate(Rs=Rs, lambdas=lambdas)
elif method == 'L-curve':
obj_m.L_curve(Rs=Rs, lambdas=lambdas)
est_csd = obj_m.values('CSD')
test_csd = csd_profile(obj_m.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
# titl = "Lambda: %0.2E; R: %0.2f; RMS_Error: %0.2E;" % (obj_m.lambd,
# obj_m.R, rms)
# fig = k.make_plot(csd_at, true_csd, obj_m, est_csd, ele_pos, pots, titl)
# save_as = (SAVE_PATH)
# fig.savefig(os.path.join(SAVE_PATH, save_as + '/' + title + '.png'))
# plt.close()
# ss = SpectralStructure(obj_m)
# eigenvectors, eigenvalues = ss.evd()
return obj_m
def plot_k_interp_cross_v(k_icross, eigenvectors, save_path, title):
"""
Creates plot of product of cross kernel vectors and eigenvectors for
different number of basis sources
Parameters
----------
k_icross: numpy array
List of cross kernel matrixes for different number of basis sources.
eigenvectors: numpy array
Eigenvectors of k_pot matrix.
save_path: string
Directory.
title: string
Name of the figure that is to be saved.
Returns
-------
None
"""
fig = plt.figure(figsize=(15, 15))
for i in range(eigenvectors.shape[0]):
plt.subplot(int(k_icross.shape[1]/2) + 1, 2, i + 1)
plt.plot(np.dot(k_icross, eigenvectors[:, i]), '--',
marker='.')
plt.title(r'$\tilde{K}*v_' + str(i + 1) + '$')
# plt.ylabel('Product K~V')
plt.xlabel('Number of estimation points')
fig.tight_layout()
plt.show()
save_path = save_path + '/cross_kernel'
makemydir(save_path)
save_as = (save_path + '/cross_kernel_eigenvector_product' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
if __name__ == '__main__':
makemydir(SAVE_PATH)
save_source_code(SAVE_PATH, time.strftime("%Y%m%d-%H%M%S"))
CSD_SEED = 15
N_SRC = 64
ELE_LIMS = [0, 1.] # range of electrodes space
TRUE_CSD_XLIMS = [0., 1.]
TOTAL_ELE = 12
noise = 0
method = 'cross-validation'
Rs = None
lambdas = None
# A
R = 0.2
MU = 0.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'A_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
# A.2
title = 'A_basis_lims_0_0_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
# A.2.b
title = 'A_basis_lims_0_0_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
# B
TRUE_CSD_XLIMS = [0., 1.5]
R = 0.2
MU = 1.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'B_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
# B.2
title = 'B_basis_lims_1_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
# B.2.b
title = 'B_basis_lims_1_1_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
# B.3
title = 'B_basis_lims_0_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
| 31.749518 | 82 | 0.585933 | import os
from os.path import expanduser
import numpy as np
import matplotlib.pyplot as plt
import datetime
import time
from kcsd import ValidateKCSD, ValidateKCSD1D, SpectralStructure, KCSD1D
__abs_file__ = os.path.abspath(__file__)
home = expanduser('~')
DAY = datetime.datetime.now()
DAY = DAY.strftime('%Y%m%d')
TIMESTR = time.strftime("%H%M%S")
SAVE_PATH = home + "/kCSD_results/" + DAY + '/' + TIMESTR
def makemydir(directory):
try:
os.makedirs(directory)
except OSError:
pass
os.chdir(directory)
def save_source_code(save_path, timestr):
with open(save_path + '/source_code_' + str(timestr), 'w') as sf:
sf.write(open(__file__).read())
def csd_profile(x, seed):
r = seed[0]
mu = seed[1]
STDDEV = r/3.0
gauss = (np.exp(-((x - mu)**2)/(2 * STDDEV**2)) /
(np.sqrt(2 * np.pi) * STDDEV)**1)
gauss /= np.max(gauss)
return gauss
def targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None):
k = ValidateKCSD1D(1, n_src_init=n_src, R_init=0.23,
ele_lims=ele_lims, est_xres=0.01,
true_csd_xlims=true_csd_xlims, sigma=sigma, h=h,
src_type='gauss')
obj, est_csd = k.do_kcsd(pots, ele_pos, method=method, Rs=Rs,
lambdas=lambdas)
test_csd = csd_profile(obj.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
titl = "Lambda: %0.2E; R: %0.2f; RMS_Error: %0.2E;" % (obj.lambd, obj.R,
rms)
fig = k.make_plot(csd_at, true_csd, obj, est_csd, ele_pos, pots, titl)
save_as = (SAVE_PATH)
fig.savefig(os.path.join(SAVE_PATH, save_as + '/' + title + '.png'))
plt.close()
return obj, k
def simulate_data(csd_profile, true_csd_xlims, R, MU, total_ele, ele_lims,
h=0.25, sigma=0.3, csd_res=100, noise=0):
val = ValidateKCSD(1)
csd_at = np.linspace(true_csd_xlims[0], true_csd_xlims[1], csd_res)
true_csd = csd_profile(csd_at, [R, MU])
ele_pos = val.generate_electrodes(total_ele=total_ele, ele_lims=ele_lims)
pots = val.calculate_potential(true_csd, csd_at, ele_pos, h, sigma)
if noise is not None:
pots = val.add_noise(pots, 10, level=noise)
return csd_at, true_csd, ele_pos, pots, val
def structure_investigation(csd_profile, true_csd_xlims, n_src, R, MU,
total_ele, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None, noise=0):
val = ValidateKCSD(1)
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
true_csd_xlims, R, MU,
total_ele, ele_lims,
h=h, sigma=sigma,
noise=noise)
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25,
sigma=0.3, csd_res=100, method=method, Rs=Rs,
lambdas=lambdas)
return obj
def plot_eigenvalues(eigenvalues, save_path, title):
fig = plt.figure()
plt.plot(eigenvalues, '--', marker='.')
plt.title('Eigenvalue decomposition of kernel matrix. ele_lims=basis_lims')
plt.xlabel('Number of components')
plt.ylabel('Eigenvalues')
plt.show()
save_as = (save_path + '/eigenvalues_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def plot_eigenvectors(eigenvectors, save_path, title):
fig = plt.figure(figsize=(15, 15))
plt.suptitle('Eigenvalue decomposition of kernel matrix for different '
'number of basis sources')
for i in range(eigenvectors.shape[1]):
plt.subplot(int(eigenvectors.shape[1]/2) + 1, 2, i + 1)
plt.plot(eigenvectors[:, i].T, '--', marker='.')
plt.ylabel('Eigenvectors')
plt.title(r'$v_' + str(i + 1) + '$')
plt.xlabel('Number of components')
plt.tight_layout()
plt.show()
save_as = (save_path + '/eigenvectors_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def modified_bases(val, pots, ele_pos, n_src, title=None, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1, R=0.2, MU=0.25,
method='cross-validation', Rs=None, lambdas=None):
pots = pots.reshape((len(ele_pos), 1))
obj_m = KCSD1D(ele_pos, pots, src_type='gauss', sigma=sigma, h=h, gdx=gdx,
n_src_init=n_src, ext_x=ext_x, xmin=xmin, xmax=xmax)
if method == 'cross-validation':
obj_m.cross_validate(Rs=Rs, lambdas=lambdas)
elif method == 'L-curve':
obj_m.L_curve(Rs=Rs, lambdas=lambdas)
est_csd = obj_m.values('CSD')
test_csd = csd_profile(obj_m.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
return obj_m
def plot_k_interp_cross_v(k_icross, eigenvectors, save_path, title):
fig = plt.figure(figsize=(15, 15))
for i in range(eigenvectors.shape[0]):
plt.subplot(int(k_icross.shape[1]/2) + 1, 2, i + 1)
plt.plot(np.dot(k_icross, eigenvectors[:, i]), '--',
marker='.')
plt.title(r'$\tilde{K}*v_' + str(i + 1) + '$')
plt.xlabel('Number of estimation points')
fig.tight_layout()
plt.show()
save_path = save_path + '/cross_kernel'
makemydir(save_path)
save_as = (save_path + '/cross_kernel_eigenvector_product' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
if __name__ == '__main__':
makemydir(SAVE_PATH)
save_source_code(SAVE_PATH, time.strftime("%Y%m%d-%H%M%S"))
CSD_SEED = 15
N_SRC = 64
ELE_LIMS = [0, 1.]
TRUE_CSD_XLIMS = [0., 1.]
TOTAL_ELE = 12
noise = 0
method = 'cross-validation'
Rs = None
lambdas = None
R = 0.2
MU = 0.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'A_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
title = 'A_basis_lims_0_0_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
title = 'A_basis_lims_0_0_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
TRUE_CSD_XLIMS = [0., 1.5]
R = 0.2
MU = 1.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'B_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
title = 'B_basis_lims_1_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
title = 'B_basis_lims_1_1_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
title = 'B_basis_lims_0_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
| true | true |
f71f873815e728bc7fb92f7c3c25537c688114fb | 58,117 | py | Python | src/train_eval.py | chanyh0/PyTorch-StudioGAN | 5a912affc1ec975d97a33a12d1c96d05d4b883f0 | [
"MIT"
] | 75 | 2021-02-25T20:04:53.000Z | 2022-03-12T12:12:58.000Z | src/train_eval.py | chanyh0/PyTorch-StudioGAN | 5a912affc1ec975d97a33a12d1c96d05d4b883f0 | [
"MIT"
] | 1 | 2021-08-08T13:12:27.000Z | 2021-08-08T13:12:27.000Z | src/train_eval.py | chanyh0/PyTorch-StudioGAN | 5a912affc1ec975d97a33a12d1c96d05d4b883f0 | [
"MIT"
] | 7 | 2021-03-02T18:47:45.000Z | 2022-01-26T13:49:25.000Z | # PyTorch StudioGAN: https://github.com/POSTECH-CVLab/PyTorch-StudioGAN
# The MIT License (MIT)
# See license file or visit https://github.com/POSTECH-CVLab/PyTorch-StudioGAN for details
# train_eval.py
import numpy as np
import sys
import glob
from scipy import ndimage
from os.path import join
from PIL import Image
from tqdm import tqdm
from datetime import datetime
from metrics.IS import calculate_incep_score
from metrics.FID import calculate_fid_score
from metrics.F_beta import calculate_f_beta_score
from metrics.Accuracy import calculate_accuracy
from utils.ada import augment
from utils.biggan_utils import interp
from utils.sample import sample_latents, sample_1hot, make_mask, target_class_sampler
from utils.misc import *
from utils.losses import calc_derv4gp, calc_derv4dra, calc_derv, latent_optimise
from utils.losses import Conditional_Contrastive_loss, Proxy_NCA_loss, NT_Xent_loss
from utils.diff_aug import DiffAugment
from utils.cr_diff_aug import CR_DiffAug
import torch
import torch.nn as nn
from torch.nn import DataParallel
import torch.nn.functional as F
import torchvision
from torchvision import transforms
SAVE_FORMAT = 'step={step:0>3}-Inception_mean={Inception_mean:<.4}-Inception_std={Inception_std:<.4}-FID={FID:<.5}.pth'
LOG_FORMAT = (
"Step: {step:>7} "
"Progress: {progress:<.1%} "
"Elapsed: {elapsed} "
"temperature: {temperature:<.6} "
"ada_p: {ada_p:<.6} "
"Discriminator_loss: {dis_loss:<.6} "
"Generator_loss: {gen_loss:<.6} "
)
def set_temperature(conditional_strategy, tempering_type, start_temperature, end_temperature, step_count, tempering_step, total_step):
if conditional_strategy == 'ContraGAN':
if tempering_type == 'continuous':
t = start_temperature + step_count*(end_temperature - start_temperature)/total_step
elif tempering_type == 'discrete':
tempering_interval = total_step//(tempering_step + 1)
t = start_temperature + \
(step_count//tempering_interval)*(end_temperature-start_temperature)/tempering_step
else:
t = start_temperature
else:
t = 'no'
return t
class Train_Eval(object):
def __init__(self, run_name, best_step, dataset_name, eval_type, logger, writer, n_gpus, gen_model, dis_model, inception_model,
Gen_copy, Gen_ema, train_dataset, eval_dataset, train_dataloader, eval_dataloader, freeze_layers, conditional_strategy,
pos_collected_numerator, z_dim, num_classes, hypersphere_dim, d_spectral_norm, g_spectral_norm, G_optimizer, D_optimizer,
batch_size, g_steps_per_iter, d_steps_per_iter, accumulation_steps, total_step, G_loss, D_loss, contrastive_lambda, margin,
tempering_type, tempering_step, start_temperature, end_temperature, weight_clipping_for_dis, weight_clipping_bound,
gradient_penalty_for_dis, gradient_penalty_lambda, deep_regret_analysis_for_dis, regret_penalty_lambda, cr, cr_lambda, bcr,
real_lambda, fake_lambda, zcr, gen_lambda, dis_lambda, sigma_noise, diff_aug, ada, prev_ada_p, ada_target, ada_length, prior,
truncated_factor, ema, latent_op, latent_op_rate, latent_op_step, latent_op_step4eval, latent_op_alpha, latent_op_beta,
latent_norm_reg_weight, default_device, print_every, save_every, checkpoint_dir, evaluate, mu, sigma, best_fid,
best_fid_checkpoint_path, mixed_precision, train_config, model_config, gamma, steps):
self.run_name = run_name
self.best_step = best_step
self.dataset_name = dataset_name
self.eval_type = eval_type
self.logger = logger
self.writer = writer
self.n_gpus = n_gpus
self.gen_model = gen_model
self.dis_model = dis_model
self.inception_model = inception_model
self.Gen_copy = Gen_copy
self.Gen_ema = Gen_ema
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.train_dataloader = train_dataloader
self.eval_dataloader = eval_dataloader
self.freeze_layers = freeze_layers
self.conditional_strategy = conditional_strategy
self.pos_collected_numerator = pos_collected_numerator
self.z_dim = z_dim
self.num_classes = num_classes
self.hypersphere_dim = hypersphere_dim
self.d_spectral_norm = d_spectral_norm
self.g_spectral_norm = g_spectral_norm
self.G_optimizer = G_optimizer
self.D_optimizer = D_optimizer
self.batch_size = batch_size
self.g_steps_per_iter = g_steps_per_iter
self.d_steps_per_iter = d_steps_per_iter
self.accumulation_steps = accumulation_steps
self.total_step = total_step
self.G_loss = G_loss
self.D_loss = D_loss
self.contrastive_lambda = contrastive_lambda
self.margin = margin
self.tempering_type = tempering_type
self.tempering_step = tempering_step
self.start_temperature = start_temperature
self.end_temperature = end_temperature
self.weight_clipping_for_dis = weight_clipping_for_dis
self.weight_clipping_bound = weight_clipping_bound
self.gradient_penalty_for_dis = gradient_penalty_for_dis
self.gradient_penalty_lambda = gradient_penalty_lambda
self.deep_regret_analysis_for_dis = deep_regret_analysis_for_dis
self.regret_penalty_lambda = regret_penalty_lambda
self.cr = cr
self.cr_lambda = cr_lambda
self.bcr = bcr
self.real_lambda = real_lambda
self.fake_lambda = fake_lambda
self.zcr = zcr
self.gen_lambda = gen_lambda
self.dis_lambda = dis_lambda
self.sigma_noise = sigma_noise
self.diff_aug = diff_aug
self.ada = ada
self.prev_ada_p = prev_ada_p
self.ada_target = ada_target
self.ada_length = ada_length
self.prior = prior
self.truncated_factor = truncated_factor
self.ema = ema
self.latent_op = latent_op
self.latent_op_rate = latent_op_rate
self.latent_op_step = latent_op_step
self.latent_op_step4eval = latent_op_step4eval
self.latent_op_alpha = latent_op_alpha
self.latent_op_beta = latent_op_beta
self.latent_norm_reg_weight = latent_norm_reg_weight
self.default_device = default_device
self.print_every = print_every
self.save_every = save_every
self.checkpoint_dir = checkpoint_dir
self.evaluate = evaluate
self.mu = mu
self.sigma = sigma
self.best_fid = best_fid
self.best_fid_checkpoint_path = best_fid_checkpoint_path
self.mixed_precision = mixed_precision
self.train_config = train_config
self.model_config = model_config
self.start_time = datetime.now()
self.l2_loss = torch.nn.MSELoss()
self.ce_loss = torch.nn.CrossEntropyLoss()
self.policy = "color,translation,cutout"
self.steps = steps
self.gamma = gamma
sampler = define_sampler(self.dataset_name, self.conditional_strategy)
check_flag_1(self.tempering_type, self.pos_collected_numerator, self.conditional_strategy, self.diff_aug, self.ada,
self.mixed_precision, self.gradient_penalty_for_dis, self.deep_regret_analysis_for_dis, self.cr, self.bcr, self.zcr)
if self.conditional_strategy == 'ContraGAN':
self.contrastive_criterion = Conditional_Contrastive_loss(self.default_device, self.batch_size, self.pos_collected_numerator)
elif self.conditional_strategy == 'Proxy_NCA_GAN':
if isinstance(self.dis_model, DataParallel):
self.embedding_layer = self.dis_model.module.embedding
else:
self.embedding_layer = self.dis_model.embedding
self.NCA_criterion = Proxy_NCA_loss(self.default_device, self.embedding_layer, self.num_classes, self.batch_size)
elif self.conditional_strategy == 'NT_Xent_GAN':
self.NT_Xent_criterion = NT_Xent_loss(self.default_device, self.batch_size)
else:
pass
if self.mixed_precision:
self.scaler = torch.cuda.amp.GradScaler()
if self.dataset_name in ["imagenet"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less_0.25"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name == "tiny_imagenet":
self.num_eval = {'train':50000, 'valid':10000}
elif self.dataset_name == "cifar10":
self.num_eval = {'train':50000, 'test':10000}
elif self.dataset_name == "cifar10_less":
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name in ["cifar100_less"]:
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name == "custom":
num_train_images = len(self.train_dataset.data)
num_eval_images = len(self.eval_dataset.data)
self.num_eval = {'train':num_train_images, 'valid':num_eval_images}
else:
raise NotImplementedError
################################################################################################################################
def train(self, current_step, total_step):
self.dis_model.train()
self.gen_model.train()
if self.Gen_copy is not None:
self.Gen_copy.train()
self.logger.info('Start training....')
step_count = current_step
train_iter = iter(self.train_dataloader)
if self.ada:
self.ada_augment = torch.tensor([0.0, 0.0], device = self.default_device)
if self.prev_ada_p is not None:
self.ada_aug_p = self.prev_ada_p
else:
self.ada_aug_p = 0.0
self.ada_aug_step = self.ada_target/self.ada_length
else:
self.ada_aug_p = 'No'
while step_count <= total_step:
# ================== TRAIN D ================== #
toggle_grad(self.dis_model, True, freeze_layers=self.freeze_layers)
toggle_grad(self.gen_model, False, freeze_layers=-1)
t = set_temperature(self.conditional_strategy, self.tempering_type, self.start_temperature, self.end_temperature, step_count, self.tempering_step, total_step)
for step_index in range(self.d_steps_per_iter):
self.D_optimizer.zero_grad()
for acml_index in range(self.accumulation_steps):
try:
real_images, real_labels = next(train_iter)
except StopIteration:
train_iter = iter(self.train_dataloader)
real_images, real_labels = next(train_iter)
real_images, real_labels = real_images.to(self.default_device), real_labels.to(self.default_device)
with torch.cuda.amp.autocast() if self.mixed_precision else dummy_context_mgr() as mpc:
if self.diff_aug:
real_images = DiffAugment(real_images, policy=self.policy)
if self.ada:
real_images, _ = augment(real_images, self.ada_aug_p)
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha, self.latent_op_beta,
False, self.default_device)
fake_images = self.gen_model(zs, fake_labels)
if self.diff_aug:
fake_images = DiffAugment(fake_images, policy=self.policy)
if self.ada:
fake_images, _ = augment(fake_images, self.ada_aug_p)
if self.conditional_strategy == "ACGAN":
cls_out_real, dis_out_real = self.dis_model(real_images, real_labels)
cls_out_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real = self.dis_model(real_images, real_labels)
dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
real_cls_mask = make_mask(real_labels, self.num_classes, self.default_device)
cls_proxies_real, cls_embed_real, dis_out_real = self.dis_model(real_images, real_labels)
cls_proxies_fake, cls_embed_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == 'ProjGAN_adv':
dis_out_real_prefc = self.dis_model(real_images, real_labels, fc=False)
dis_out_fake_prefc = self.dis_model(fake_images, fake_labels, fc=False)
loss_real = lambda x: torch.mean(F.relu(1. - x))
loss_fake = lambda x: torch.mean(F.relu(1. + x))
dis_out_real_prefc_adv = PGD(dis_out_real_prefc, real_labels, loss_real, self.dis_model, steps=self.steps, gamma=self.gamma)
dis_out_fake_prefc_adv = PGD(dis_out_fake_prefc, fake_labels, loss_real, self.dis_model, steps=self.steps, gamma=self.gamma)
fake_images = fake_images.detach()
dis_out_real_prefc = self.dis_model(real_images, real_labels, fc=False, only_fc=False)
dis_out_fake_prefc = self.dis_model(fake_images, fake_labels, fc=False, only_fc=False)
dis_out_real = self.dis_model(dis_out_real_prefc, real_labels, only_fc=True, fc=True)
dis_out_fake = self.dis_model(dis_out_fake_prefc, fake_labels, only_fc=True, fc=True)
dis_out_real_adv = self.dis_model(dis_out_real_prefc_adv, real_labels, only_fc=True)
dis_out_fake_adv = self.dis_model(dis_out_fake_prefc_adv, fake_labels, only_fc=True)
else:
raise NotImplementedError
#if self.conditional_strategy != 'ProjGAN_adv':
if self.conditional_strategy != 'ProjGAN_adv':
dis_acml_loss = self.D_loss(dis_out_real, dis_out_fake)
else:
dis_acml_loss = (self.D_loss(dis_out_real, dis_out_fake) + self.D_loss(dis_out_real_adv, dis_out_fake_adv)) / 2
if self.conditional_strategy == "ACGAN":
dis_acml_loss += (self.ce_loss(cls_out_real, real_labels) + self.ce_loss(cls_out_fake, fake_labels))
elif self.conditional_strategy == "NT_Xent_GAN":
real_images_aug = CR_DiffAug(real_images)
_, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_acml_loss += self.contrastive_lambda*self.NT_Xent_criterion(cls_embed_real, cls_embed_real_aug, t)
elif self.conditional_strategy == "Proxy_NCA_GAN":
dis_acml_loss += self.contrastive_lambda*self.NCA_criterion(cls_embed_real, cls_proxies_real, real_labels)
elif self.conditional_strategy == "ContraGAN":
dis_acml_loss += self.contrastive_lambda*self.contrastive_criterion(cls_embed_real, cls_proxies_real,
real_cls_mask, real_labels, t, self.margin)
else:
pass
if self.cr:
real_images_aug = CR_DiffAug(real_images)
if self.conditional_strategy == "ACGAN":
cls_out_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_consistency_loss = self.l2_loss(cls_out_real, cls_out_real_aug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
_, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_consistency_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
else:
raise NotImplementedError
consistency_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
consistency_loss += cls_consistency_loss
dis_acml_loss += self.cr_lambda*consistency_loss
if self.bcr:
real_images_aug = CR_DiffAug(real_images)
fake_images_aug = CR_DiffAug(fake_images)
if self.conditional_strategy == "ACGAN":
cls_out_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_out_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_out_real, cls_out_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_out_fake, cls_out_fake_aug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_real_aug, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_proxies_fake_aug, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
else:
raise NotImplementedError
bcr_real_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
bcr_fake_loss = self.l2_loss(dis_out_fake, dis_out_fake_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
bcr_real_loss += cls_bcr_real_loss
bcr_fake_loss += cls_bcr_fake_loss
dis_acml_loss += self.real_lambda*bcr_real_loss + self.fake_lambda*bcr_fake_loss
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
if self.conditional_strategy == "ACGAN":
cls_out_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_out_fake, cls_out_fake_zaug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_fake_zaug, cls_embed_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_zaug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
else:
raise NotImplementedError
zcr_dis_loss = self.l2_loss(dis_out_fake, dis_out_fake_zaug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
zcr_dis_loss += cls_zcr_dis_loss
dis_acml_loss += self.dis_lambda*zcr_dis_loss
if self.gradient_penalty_for_dis:
dis_acml_loss += self.gradient_penalty_lambda*calc_derv4gp(self.dis_model, self.conditional_strategy, real_images,
fake_images, real_labels, self.default_device)
if self.deep_regret_analysis_for_dis:
dis_acml_loss += self.regret_penalty_lambda*calc_derv4dra(self.dis_model, self.conditional_strategy, real_images,
real_labels, self.default_device)
if self.ada:
ada_aug_data = torch.tensor((torch.sign(dis_out_real).sum().item(), dis_out_real.shape[0]), device = self.default_device)
self.ada_augment += ada_aug_data
if self.ada_augment[1] > (self.batch_size*4 - 1):
authen_out_signs, num_outputs = self.ada_augment.tolist()
r_t_stat = authen_out_signs/num_outputs
sign = 1 if r_t_stat > self.ada_target else -1
self.ada_aug_p += sign*self.ada_aug_step*num_outputs
self.ada_aug_p = min(1.0, max(0.0, self.ada_aug_p))
self.ada_augment.mul_(0.0)
dis_acml_loss = dis_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(dis_acml_loss).backward()
else:
dis_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.D_optimizer)
self.scaler.update()
else:
self.D_optimizer.step()
if self.weight_clipping_for_dis:
for p in self.dis_model.parameters():
p.data.clamp_(-self.weight_clipping_bound, self.weight_clipping_bound)
if step_count % self.print_every == 0 and step_count !=0 and self.logger:
if self.d_spectral_norm:
dis_sigmas = calculate_all_sn(self.dis_model)
self.writer.add_scalars('SN_of_dis', dis_sigmas, step_count)
# ================== TRAIN G ================== #
toggle_grad(self.dis_model, False, freeze_layers=-1)
toggle_grad(self.gen_model, True, freeze_layers=-1)
for step_index in range(self.g_steps_per_iter):
self.G_optimizer.zero_grad()
for acml_step in range(self.accumulation_steps):
with torch.cuda.amp.autocast() if self.mixed_precision else dummy_context_mgr() as mpc:
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs, transport_cost = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha,
self.latent_op_beta, True, self.default_device)
if not self.conditional_strategy == 'ProjGAN_adv':
fake_images = self.gen_model(zs, fake_labels)
else:
gen_out_prefc, labels_prefc = self.gen_model(zs, fake_labels, only_l1=True)
loss_fake = lambda x: -torch.mean(x)
gen_out_adv = PGD_G(gen_out_prefc, labels_prefc, fake_labels, loss_fake, self.gen_model, self.dis_model, steps=self.steps, gamma=self.gamma)
fake_images = self.gen_model(gen_out_prefc, labels_prefc, l1=False)
fake_images_adv = self.gen_model(gen_out_adv, labels_prefc, l1=False)
if self.diff_aug:
fake_images = DiffAugment(fake_images, policy=self.policy)
if self.ada:
fake_images, _ = augment(fake_images, self.ada_aug_p)
if self.conditional_strategy == "ACGAN":
cls_out_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
fake_cls_mask = make_mask(fake_labels, self.num_classes, self.default_device)
cls_proxies_fake, cls_embed_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == 'ProjGAN_adv':
dis_out_fake = self.dis_model(fake_images, fake_labels)
dis_out_adv = self.dis_model(fake_images_adv, fake_labels)
else:
raise NotImplementedError
gen_acml_loss = self.G_loss(dis_out_fake)
if self.latent_op:
gen_acml_loss += transport_cost*self.latent_norm_reg_weight
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
zcr_gen_loss = -1 * self.l2_loss(fake_images, fake_images_zaug)
gen_acml_loss += self.gen_lambda*zcr_gen_loss
if self.conditional_strategy == "ACGAN":
gen_acml_loss += self.ce_loss(cls_out_fake, fake_labels)
elif self.conditional_strategy == "ContraGAN":
gen_acml_loss += self.contrastive_lambda*self.contrastive_criterion(cls_embed_fake, cls_proxies_fake, fake_cls_mask, fake_labels, t, self.margin)
elif self.conditional_strategy == "Proxy_NCA_GAN":
gen_acml_loss += self.contrastive_lambda*self.NCA_criterion(cls_embed_fake, cls_proxies_fake, fake_labels)
elif self.conditional_strategy == "NT_Xent_GAN":
fake_images_aug = CR_DiffAug(fake_images)
_, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
gen_acml_loss += self.contrastive_lambda*self.NT_Xent_criterion(cls_embed_fake, cls_embed_fake_aug, t)
elif self.conditional_strategy == 'ProjGAN_adv':
gen_acml_loss = (self.G_loss(dis_out_fake) + self.G_loss(dis_out_adv)) / 2
else:
pass
gen_acml_loss = gen_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(gen_acml_loss).backward()
else:
gen_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.G_optimizer)
self.scaler.update()
else:
self.G_optimizer.step()
# if ema is True: we update parameters of the Gen_copy in adaptive way.
if self.ema:
self.Gen_ema.update(step_count)
step_count += 1
if step_count % self.print_every == 0 and self.logger:
log_message = LOG_FORMAT.format(step=step_count,
progress=step_count/total_step,
elapsed=elapsed_time(self.start_time),
temperature=t,
ada_p=self.ada_aug_p,
dis_loss=dis_acml_loss.item(),
gen_loss=gen_acml_loss.item(),
)
self.logger.info(log_message)
if self.g_spectral_norm:
gen_sigmas = calculate_all_sn(self.gen_model)
self.writer.add_scalars('SN_of_gen', gen_sigmas, step_count)
self.writer.add_scalars('Losses', {'discriminator': dis_acml_loss.item(),
'generator': gen_acml_loss.item()}, step_count)
if self.ada:
self.writer.add_scalar('ada_p', self.ada_aug_p, step_count)
if step_count % self.save_every == 0 or step_count == total_step:
if self.evaluate:
is_best = self.evaluation(step_count, False, "N/A")
self.save(step_count, is_best)
else:
self.save(step_count, False)
return step_count-1
################################################################################################################################
################################################################################################################################
def save(self, step, is_best):
when = "best" if is_best is True else "current"
self.dis_model.eval()
self.gen_model.eval()
if self.Gen_copy is not None:
self.Gen_copy.eval()
if isinstance(self.gen_model, DataParallel):
gen = self.gen_model.module
dis = self.dis_model.module
if self.Gen_copy is not None:
gen_copy = self.Gen_copy.module
else:
gen, dis = self.gen_model, self.dis_model
if self.Gen_copy is not None:
gen_copy = self.Gen_copy
g_states = {'seed': self.train_config['seed'], 'run_name': self.run_name, 'step': step, 'best_step': self.best_step,
'state_dict': gen.state_dict(), 'optimizer': self.G_optimizer.state_dict(), 'ada_p': self.ada_aug_p}
d_states = {'seed': self.train_config['seed'], 'run_name': self.run_name, 'step': step, 'best_step': self.best_step,
'state_dict': dis.state_dict(), 'optimizer': self.D_optimizer.state_dict(), 'ada_p': self.ada_aug_p,
'best_fid': self.best_fid, 'best_fid_checkpoint_path': self.checkpoint_dir}
if len(glob.glob(join(self.checkpoint_dir,"model=G-{when}-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=G-{when}-weights-step*.pth".format(when=when)))[0])
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=D-{when}-weights-step*.pth".format(when=when)))[0])
g_checkpoint_output_path = join(self.checkpoint_dir, "model=G-{when}-weights-step={step}.pth".format(when=when, step=str(step)))
d_checkpoint_output_path = join(self.checkpoint_dir, "model=D-{when}-weights-step={step}.pth".format(when=when, step=str(step)))
if when == "best":
if len(glob.glob(join(self.checkpoint_dir,"model=G-current-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=G-current-weights-step*.pth".format(when=when)))[0])
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=D-current-weights-step*.pth".format(when=when)))[0])
g_checkpoint_output_path_ = join(self.checkpoint_dir, "model=G-current-weights-step={step}.pth".format(when=when, step=str(step)))
d_checkpoint_output_path_ = join(self.checkpoint_dir, "model=D-current-weights-step={step}.pth".format(when=when, step=str(step)))
torch.save(g_states, g_checkpoint_output_path_)
torch.save(d_states, d_checkpoint_output_path_)
torch.save(g_states, g_checkpoint_output_path)
torch.save(d_states, d_checkpoint_output_path)
if self.Gen_copy is not None:
g_ema_states = {'state_dict': gen_copy.state_dict()}
if len(glob.glob(join(self.checkpoint_dir, "model=G_ema-{when}-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir, "model=G_ema-{when}-weights-step*.pth".format(when=when)))[0])
g_ema_checkpoint_output_path = join(self.checkpoint_dir, "model=G_ema-{when}-weights-step={step}.pth".format(when=when, step=str(step)))
if when == "best":
if len(glob.glob(join(self.checkpoint_dir,"model=G_ema-current-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=G_ema-current-weights-step*.pth".format(when=when)))[0])
g_ema_checkpoint_output_path_ = join(self.checkpoint_dir, "model=G_ema-current-weights-step={step}.pth".format(when=when, step=str(step)))
torch.save(g_ema_states, g_ema_checkpoint_output_path_)
torch.save(g_ema_states, g_ema_checkpoint_output_path)
if self.logger:
self.logger.info("Saved model to {}".format(self.checkpoint_dir))
self.dis_model.train()
self.gen_model.train()
if self.Gen_copy is not None:
self.Gen_copy.train()
################################################################################################################################
################################################################################################################################
def evaluation(self, step, standing_statistics, standing_step):
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
self.logger.info("Start Evaluation ({step} Step): {run_name}".format(step=step, run_name=self.run_name))
is_best = False
num_split, num_run4PR, num_cluster4PR, beta4PR = 1, 10, 20, 8
self.dis_model.eval()
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
fid_score, self.m1, self.s1 = calculate_fid_score(self.eval_dataloader, generator, self.dis_model, self.inception_model, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step4eval, self.latent_op_alpha,
self.latent_op_beta, self.default_device, self.mu, self.sigma, self.run_name)
kl_score, kl_std = calculate_incep_score(self.eval_dataloader, generator, self.dis_model, self.inception_model, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step4eval, self.latent_op_alpha,
self.latent_op_beta, num_split, self.default_device)
precision, recall, f_beta, f_beta_inv = calculate_f_beta_score(self.eval_dataloader, generator, self.dis_model, self.inception_model, self.num_eval[self.eval_type],
num_run4PR, num_cluster4PR, beta4PR, self.truncated_factor, self.prior, self.latent_op,
self.latent_op_step4eval, self.latent_op_alpha, self.latent_op_beta, self.default_device)
PR_Curve = plot_pr_curve(precision, recall, self.run_name, self.logger)
'''
if self.D_loss.__name__ != "loss_wgan_dis":
real_train_acc, fake_acc = calculate_accuracy(self.train_dataloader, generator, self.dis_model, self.D_loss, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step, self.latent_op_alpha,
self.latent_op_beta, self.default_device, cr=self.cr, eval_generated_sample=True)
if self.eval_type == 'train':
acc_dict = {'real_train': real_train_acc, 'fake': fake_acc}
else:
real_eval_acc = calculate_accuracy(self.eval_dataloader, generator, self.dis_model, self.D_loss, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step, self.latent_op_alpha,
self. latent_op_beta, self.default_device, cr=self.cr, eval_generated_sample=False)
acc_dict = {'real_train': real_train_acc, 'real_valid': real_eval_acc, 'fake': fake_acc}
self.writer.add_scalars('{}/Accuracy'.format(self.prune_round), acc_dict, step)
'''
if self.best_fid is None:
self.best_fid, self.best_step, is_best, f_beta_best, f_beta_inv_best = fid_score, step, True, f_beta, f_beta_inv
else:
if fid_score <= self.best_fid:
self.best_fid, self.best_step, is_best, f_beta_best, f_beta_inv_best = fid_score, step, True, f_beta, f_beta_inv
self.writer.add_scalars('FID score', {'using {type} moments'.format(type=self.eval_type):fid_score}, step)
self.writer.add_scalars('F_beta score', {'{num} generated images'.format(num=str(self.num_eval[self.eval_type])):f_beta}, step)
self.writer.add_scalars('F_beta_inv score', {'{num} generated images'.format(num=str(self.num_eval[self.eval_type])):f_beta_inv}, step)
self.writer.add_scalars('IS score', {'{num} generated images'.format(num=str(self.num_eval[self.eval_type])):kl_score}, step)
self.writer.add_figure('PR_Curve', PR_Curve, global_step=step)
self.logger.info('F_{beta} score (Step: {step}, Using {type} images): {F_beta}'.format(beta=beta4PR, step=step, type=self.eval_type, F_beta=f_beta))
self.logger.info('F_1/{beta} score (Step: {step}, Using {type} images): {F_beta_inv}'.format(beta=beta4PR, step=step, type=self.eval_type, F_beta_inv=f_beta_inv))
self.logger.info('FID score (Step: {step}, Using {type} moments): {FID}'.format(step=step, type=self.eval_type, FID=fid_score))
self.logger.info('Inception score (Step: {step}, {num} generated images): {IS}'.format(step=step, num=str(self.num_eval[self.eval_type]), IS=kl_score))
if self.train:
self.logger.info('Best FID score (Step: {step}, Using {type} moments): {FID}'.format(step=self.best_step, type=self.eval_type, FID=self.best_fid))
self.dis_model.train()
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
return is_best
################################################################################################################################
################################################################################################################################
def save_images(self, is_generate, standing_statistics, standing_step, png=True, npz=True):
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
self.dis_model.eval()
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
if png:
save_images_png(self.run_name, self.eval_dataloader, self.num_eval[self.eval_type], self.num_classes, generator,
self.dis_model, is_generate, self.truncated_factor, self.prior, self.latent_op, self.latent_op_step,
self.latent_op_alpha, self.latent_op_beta, self.default_device)
if npz:
save_images_npz(self.run_name, self.eval_dataloader, self.num_eval[self.eval_type], self.num_classes, generator,
self.dis_model, is_generate, self.truncated_factor, self.prior, self.latent_op, self.latent_op_step,
self.latent_op_alpha, self.latent_op_beta, self.default_device)
################################################################################################################################
################################################################################################################################
def run_image_visualization(self, nrow, ncol, standing_statistics, standing_step):
self.logger.info('Start visualizing images....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
sampler = "default" if self.conditional_strategy == "no" else "class_order_some"
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device, sampler=sampler)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device, sampler=sampler)
if self.latent_op:
zs = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha, self.latent_op_beta,
False, self.default_device)
generated_images = generator(zs, fake_labels, evaluation=True)
plot_img_canvas((generated_images.detach().cpu()+1)/2, "./figures/{run_name}/generated_canvas.png".\
format(run_name=self.run_name), self.logger, ncol)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
################################################################################################################################
def run_linear_interpolation(self, nrow, ncol, fix_z, fix_y, standing_statistics, standing_step):
self.logger.info('Start linear interpolation analysis....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
shared = generator.module.shared if isinstance(generator, DataParallel) else generator.shared
assert int(fix_z)*int(fix_y) != 1, "unable to switch fix_z and fix_y on together!"
if fix_z:
zs = torch.randn(nrow, 1, self.z_dim, device=self.default_device)
zs = zs.repeat(1, ncol, 1).view(-1, self.z_dim)
name = "fix_z"
else:
zs = interp(torch.randn(nrow, 1, self.z_dim, device=self.default_device),
torch.randn(nrow, 1, self.z_dim, device=self.default_device),
ncol - 2).view(-1, self.z_dim)
if fix_y:
ys = sample_1hot(nrow, self.num_classes, device=self.default_device)
ys = shared(ys).view(nrow, 1, -1)
ys = ys.repeat(1, ncol, 1).view(nrow * (ncol), -1)
name = "fix_y"
else:
ys = interp(shared(sample_1hot(nrow, self.num_classes)).view(nrow, 1, -1),
shared(sample_1hot(nrow, self.num_classes)).view(nrow, 1, -1),
ncol-2).view(nrow * (ncol), -1)
interpolated_images = generator(zs, None, shared_label=ys, evaluation=True)
plot_img_canvas((interpolated_images.detach().cpu()+1)/2, "./figures/{run_name}/Interpolated_images_{fix_flag}.png".\
format(run_name=self.run_name, fix_flag=name), self.logger, ncol)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
################################################################################################################################
def run_nearest_neighbor(self, nrow, ncol, standing_statistics, standing_step):
self.logger.info('Start nearest neighbor analysis....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
resnet50_model = torch.hub.load('pytorch/vision:v0.6.0', 'resnet50', pretrained=True)
resnet50_conv = nn.Sequential(*list(resnet50_model.children())[:-1]).to(self.default_device)
if self.n_gpus > 1:
resnet50_conv = DataParallel(resnet50_conv, output_device=self.default_device)
resnet50_conv.eval()
for c in tqdm(range(self.num_classes)):
fake_images, fake_labels = generate_images_for_KNN(self.batch_size, c, generator, self.dis_model, self.truncated_factor, self.prior, self.latent_op,
self.latent_op_step, self.latent_op_alpha, self.latent_op_beta, self.default_device)
fake_image = torch.unsqueeze(fake_images[0], dim=0)
fake_anchor_embedding = torch.squeeze(resnet50_conv((fake_image+1)/2))
num_samples, target_sampler = target_class_sampler(self.train_dataset, c)
train_dataloader = torch.utils.data.DataLoader(self.train_dataset, batch_size=self.batch_size, shuffle=False, sampler=target_sampler,
num_workers=self.train_config['num_workers'], pin_memory=True)
train_iter = iter(train_dataloader)
for batch_idx in range(num_samples//self.batch_size):
real_images, real_labels = next(train_iter)
real_images = real_images.to(self.default_device)
real_embeddings = torch.squeeze(resnet50_conv((real_images+1)/2))
if batch_idx == 0:
distances = torch.square(real_embeddings - fake_anchor_embedding).mean(dim=1).detach().cpu().numpy()
holder = real_images.detach().cpu().numpy()
else:
distances = np.concatenate([distances, torch.square(real_embeddings - fake_anchor_embedding).mean(dim=1).detach().cpu().numpy()], axis=0)
holder = np.concatenate([holder, real_images.detach().cpu().numpy()], axis=0)
nearest_indices = (-distances).argsort()[-(ncol-1):][::-1]
if c % nrow == 0:
canvas = np.concatenate([fake_image.detach().cpu().numpy(), holder[nearest_indices]], axis=0)
elif c % nrow == nrow-1:
row_images = np.concatenate([fake_image.detach().cpu().numpy(), holder[nearest_indices]], axis=0)
canvas = np.concatenate((canvas, row_images), axis=0)
plot_img_canvas((torch.from_numpy(canvas)+1)/2, "./figures/{run_name}/Fake_anchor_{ncol}NN_{cls}.png".\
format(run_name=self.run_name,ncol=ncol, cls=c), self.logger, ncol)
else:
row_images = np.concatenate([fake_image.detach().cpu().numpy(), holder[nearest_indices]], axis=0)
canvas = np.concatenate((canvas, row_images), axis=0)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
################################################################################################################################
def run_frequency_analysis(self, num_images, standing_statistics, standing_step):
self.logger.info('Start linear interpolation analysis....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
train_iter = iter(self.train_dataloader)
num_batches = num_images//self.batch_size
for i in range(num_batches):
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha, self.latent_op_beta,
False, self.default_device)
real_images, real_labels = next(train_iter)
fake_images = generator(zs, fake_labels, evaluation=True).detach().cpu().numpy()
real_images = np.asarray((real_images + 1)*127.5, np.uint8)
fake_images = np.asarray((fake_images + 1)*127.5, np.uint8)
if i == 0:
real_array = real_images
fake_array = fake_images
else:
real_array = np.concatenate([real_array, real_images], axis = 0)
fake_array = np.concatenate([fake_array, fake_images], axis = 0)
N, C, H, W = np.shape(real_array)
real_r, real_g, real_b = real_array[:,0,:,:], real_array[:,1,:,:], real_array[:,2,:,:]
real_gray = 0.2989 * real_r + 0.5870 * real_g + 0.1140 * real_b
fake_r, fake_g, fake_b = fake_array[:,0,:,:], fake_array[:,1,:,:], fake_array[:,2,:,:]
fake_gray = 0.2989 * fake_r + 0.5870 * fake_g + 0.1140 * fake_b
for j in tqdm(range(N)):
real_gray_f = np.fft.fft2(real_gray[j] - ndimage.median_filter(real_gray[j], size= H//8))
fake_gray_f = np.fft.fft2(fake_gray[j] - ndimage.median_filter(fake_gray[j], size=H//8))
real_gray_f_shifted = np.fft.fftshift(real_gray_f)
fake_gray_f_shifted = np.fft.fftshift(fake_gray_f)
if j == 0:
real_gray_spectrum = 20*np.log(np.abs(real_gray_f_shifted))/N
fake_gray_spectrum = 20*np.log(np.abs(fake_gray_f_shifted))/N
else:
real_gray_spectrum += 20*np.log(np.abs(real_gray_f_shifted))/N
fake_gray_spectrum += 20*np.log(np.abs(fake_gray_f_shifted))/N
plot_spectrum_image(real_gray_spectrum, fake_gray_spectrum, self.run_name, self.logger)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
def PGD(x, label, loss, model=None, steps=1, gamma=0.1, eps=(1/255), randinit=False, clip=False):
# Compute loss
x_adv = x.clone()
if randinit:
# adv noise (-eps, eps)
x_adv += (2.0 * torch.rand(x_adv.shape).cuda() - 1.0) * eps
x_adv = x_adv.cuda()
x = x.cuda()
for t in range(steps):
out = model(x_adv, label, only_fc=True)
loss_adv0 = -loss(out)
grad0 = torch.autograd.grad(loss_adv0, x_adv, only_inputs=True)[0]
x_adv.data.add_(gamma * torch.sign(grad0.data))
if clip:
linfball_proj(x, eps, x_adv, in_place=True)
return x_adv
def PGD_G(x, gen_labels, label, loss, gen_model, dis_model, steps=1, gamma=0.1, eps=(1/255), randinit=False, clip=False):
# Compute loss
x_adv = x.clone()
x_adv = x_adv.cuda()
x = x.cuda()
for t in range(steps):
out = gen_model(x_adv, gen_labels, l1=False)
out = dis_model(out, label)
loss_adv0 = -loss(out)
grad0 = torch.autograd.grad(loss_adv0, x_adv, only_inputs=True)[0]
x_adv.data.add_(gamma * torch.sign(grad0.data))
if clip:
linfball_proj(x, eps, x_adv, in_place=True)
return x_adv | 61.958422 | 176 | 0.573189 |
import numpy as np
import sys
import glob
from scipy import ndimage
from os.path import join
from PIL import Image
from tqdm import tqdm
from datetime import datetime
from metrics.IS import calculate_incep_score
from metrics.FID import calculate_fid_score
from metrics.F_beta import calculate_f_beta_score
from metrics.Accuracy import calculate_accuracy
from utils.ada import augment
from utils.biggan_utils import interp
from utils.sample import sample_latents, sample_1hot, make_mask, target_class_sampler
from utils.misc import *
from utils.losses import calc_derv4gp, calc_derv4dra, calc_derv, latent_optimise
from utils.losses import Conditional_Contrastive_loss, Proxy_NCA_loss, NT_Xent_loss
from utils.diff_aug import DiffAugment
from utils.cr_diff_aug import CR_DiffAug
import torch
import torch.nn as nn
from torch.nn import DataParallel
import torch.nn.functional as F
import torchvision
from torchvision import transforms
SAVE_FORMAT = 'step={step:0>3}-Inception_mean={Inception_mean:<.4}-Inception_std={Inception_std:<.4}-FID={FID:<.5}.pth'
LOG_FORMAT = (
"Step: {step:>7} "
"Progress: {progress:<.1%} "
"Elapsed: {elapsed} "
"temperature: {temperature:<.6} "
"ada_p: {ada_p:<.6} "
"Discriminator_loss: {dis_loss:<.6} "
"Generator_loss: {gen_loss:<.6} "
)
def set_temperature(conditional_strategy, tempering_type, start_temperature, end_temperature, step_count, tempering_step, total_step):
if conditional_strategy == 'ContraGAN':
if tempering_type == 'continuous':
t = start_temperature + step_count*(end_temperature - start_temperature)/total_step
elif tempering_type == 'discrete':
tempering_interval = total_step//(tempering_step + 1)
t = start_temperature + \
(step_count//tempering_interval)*(end_temperature-start_temperature)/tempering_step
else:
t = start_temperature
else:
t = 'no'
return t
class Train_Eval(object):
def __init__(self, run_name, best_step, dataset_name, eval_type, logger, writer, n_gpus, gen_model, dis_model, inception_model,
Gen_copy, Gen_ema, train_dataset, eval_dataset, train_dataloader, eval_dataloader, freeze_layers, conditional_strategy,
pos_collected_numerator, z_dim, num_classes, hypersphere_dim, d_spectral_norm, g_spectral_norm, G_optimizer, D_optimizer,
batch_size, g_steps_per_iter, d_steps_per_iter, accumulation_steps, total_step, G_loss, D_loss, contrastive_lambda, margin,
tempering_type, tempering_step, start_temperature, end_temperature, weight_clipping_for_dis, weight_clipping_bound,
gradient_penalty_for_dis, gradient_penalty_lambda, deep_regret_analysis_for_dis, regret_penalty_lambda, cr, cr_lambda, bcr,
real_lambda, fake_lambda, zcr, gen_lambda, dis_lambda, sigma_noise, diff_aug, ada, prev_ada_p, ada_target, ada_length, prior,
truncated_factor, ema, latent_op, latent_op_rate, latent_op_step, latent_op_step4eval, latent_op_alpha, latent_op_beta,
latent_norm_reg_weight, default_device, print_every, save_every, checkpoint_dir, evaluate, mu, sigma, best_fid,
best_fid_checkpoint_path, mixed_precision, train_config, model_config, gamma, steps):
self.run_name = run_name
self.best_step = best_step
self.dataset_name = dataset_name
self.eval_type = eval_type
self.logger = logger
self.writer = writer
self.n_gpus = n_gpus
self.gen_model = gen_model
self.dis_model = dis_model
self.inception_model = inception_model
self.Gen_copy = Gen_copy
self.Gen_ema = Gen_ema
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.train_dataloader = train_dataloader
self.eval_dataloader = eval_dataloader
self.freeze_layers = freeze_layers
self.conditional_strategy = conditional_strategy
self.pos_collected_numerator = pos_collected_numerator
self.z_dim = z_dim
self.num_classes = num_classes
self.hypersphere_dim = hypersphere_dim
self.d_spectral_norm = d_spectral_norm
self.g_spectral_norm = g_spectral_norm
self.G_optimizer = G_optimizer
self.D_optimizer = D_optimizer
self.batch_size = batch_size
self.g_steps_per_iter = g_steps_per_iter
self.d_steps_per_iter = d_steps_per_iter
self.accumulation_steps = accumulation_steps
self.total_step = total_step
self.G_loss = G_loss
self.D_loss = D_loss
self.contrastive_lambda = contrastive_lambda
self.margin = margin
self.tempering_type = tempering_type
self.tempering_step = tempering_step
self.start_temperature = start_temperature
self.end_temperature = end_temperature
self.weight_clipping_for_dis = weight_clipping_for_dis
self.weight_clipping_bound = weight_clipping_bound
self.gradient_penalty_for_dis = gradient_penalty_for_dis
self.gradient_penalty_lambda = gradient_penalty_lambda
self.deep_regret_analysis_for_dis = deep_regret_analysis_for_dis
self.regret_penalty_lambda = regret_penalty_lambda
self.cr = cr
self.cr_lambda = cr_lambda
self.bcr = bcr
self.real_lambda = real_lambda
self.fake_lambda = fake_lambda
self.zcr = zcr
self.gen_lambda = gen_lambda
self.dis_lambda = dis_lambda
self.sigma_noise = sigma_noise
self.diff_aug = diff_aug
self.ada = ada
self.prev_ada_p = prev_ada_p
self.ada_target = ada_target
self.ada_length = ada_length
self.prior = prior
self.truncated_factor = truncated_factor
self.ema = ema
self.latent_op = latent_op
self.latent_op_rate = latent_op_rate
self.latent_op_step = latent_op_step
self.latent_op_step4eval = latent_op_step4eval
self.latent_op_alpha = latent_op_alpha
self.latent_op_beta = latent_op_beta
self.latent_norm_reg_weight = latent_norm_reg_weight
self.default_device = default_device
self.print_every = print_every
self.save_every = save_every
self.checkpoint_dir = checkpoint_dir
self.evaluate = evaluate
self.mu = mu
self.sigma = sigma
self.best_fid = best_fid
self.best_fid_checkpoint_path = best_fid_checkpoint_path
self.mixed_precision = mixed_precision
self.train_config = train_config
self.model_config = model_config
self.start_time = datetime.now()
self.l2_loss = torch.nn.MSELoss()
self.ce_loss = torch.nn.CrossEntropyLoss()
self.policy = "color,translation,cutout"
self.steps = steps
self.gamma = gamma
sampler = define_sampler(self.dataset_name, self.conditional_strategy)
check_flag_1(self.tempering_type, self.pos_collected_numerator, self.conditional_strategy, self.diff_aug, self.ada,
self.mixed_precision, self.gradient_penalty_for_dis, self.deep_regret_analysis_for_dis, self.cr, self.bcr, self.zcr)
if self.conditional_strategy == 'ContraGAN':
self.contrastive_criterion = Conditional_Contrastive_loss(self.default_device, self.batch_size, self.pos_collected_numerator)
elif self.conditional_strategy == 'Proxy_NCA_GAN':
if isinstance(self.dis_model, DataParallel):
self.embedding_layer = self.dis_model.module.embedding
else:
self.embedding_layer = self.dis_model.embedding
self.NCA_criterion = Proxy_NCA_loss(self.default_device, self.embedding_layer, self.num_classes, self.batch_size)
elif self.conditional_strategy == 'NT_Xent_GAN':
self.NT_Xent_criterion = NT_Xent_loss(self.default_device, self.batch_size)
else:
pass
if self.mixed_precision:
self.scaler = torch.cuda.amp.GradScaler()
if self.dataset_name in ["imagenet"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less_0.25"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name == "tiny_imagenet":
self.num_eval = {'train':50000, 'valid':10000}
elif self.dataset_name == "cifar10":
self.num_eval = {'train':50000, 'test':10000}
elif self.dataset_name == "cifar10_less":
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name in ["cifar100_less"]:
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name == "custom":
num_train_images = len(self.train_dataset.data)
num_eval_images = len(self.eval_dataset.data)
self.num_eval = {'train':num_train_images, 'valid':num_eval_images}
else:
raise NotImplementedError
_aug = self.dis_model(real_images_aug, real_labels)
cls_consistency_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
else:
raise NotImplementedError
consistency_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
consistency_loss += cls_consistency_loss
dis_acml_loss += self.cr_lambda*consistency_loss
if self.bcr:
real_images_aug = CR_DiffAug(real_images)
fake_images_aug = CR_DiffAug(fake_images)
if self.conditional_strategy == "ACGAN":
cls_out_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_out_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_out_real, cls_out_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_out_fake, cls_out_fake_aug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_real_aug, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_proxies_fake_aug, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
else:
raise NotImplementedError
bcr_real_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
bcr_fake_loss = self.l2_loss(dis_out_fake, dis_out_fake_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
bcr_real_loss += cls_bcr_real_loss
bcr_fake_loss += cls_bcr_fake_loss
dis_acml_loss += self.real_lambda*bcr_real_loss + self.fake_lambda*bcr_fake_loss
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
if self.conditional_strategy == "ACGAN":
cls_out_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_out_fake, cls_out_fake_zaug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_fake_zaug, cls_embed_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_zaug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
else:
raise NotImplementedError
zcr_dis_loss = self.l2_loss(dis_out_fake, dis_out_fake_zaug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
zcr_dis_loss += cls_zcr_dis_loss
dis_acml_loss += self.dis_lambda*zcr_dis_loss
if self.gradient_penalty_for_dis:
dis_acml_loss += self.gradient_penalty_lambda*calc_derv4gp(self.dis_model, self.conditional_strategy, real_images,
fake_images, real_labels, self.default_device)
if self.deep_regret_analysis_for_dis:
dis_acml_loss += self.regret_penalty_lambda*calc_derv4dra(self.dis_model, self.conditional_strategy, real_images,
real_labels, self.default_device)
if self.ada:
ada_aug_data = torch.tensor((torch.sign(dis_out_real).sum().item(), dis_out_real.shape[0]), device = self.default_device)
self.ada_augment += ada_aug_data
if self.ada_augment[1] > (self.batch_size*4 - 1):
authen_out_signs, num_outputs = self.ada_augment.tolist()
r_t_stat = authen_out_signs/num_outputs
sign = 1 if r_t_stat > self.ada_target else -1
self.ada_aug_p += sign*self.ada_aug_step*num_outputs
self.ada_aug_p = min(1.0, max(0.0, self.ada_aug_p))
self.ada_augment.mul_(0.0)
dis_acml_loss = dis_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(dis_acml_loss).backward()
else:
dis_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.D_optimizer)
self.scaler.update()
else:
self.D_optimizer.step()
if self.weight_clipping_for_dis:
for p in self.dis_model.parameters():
p.data.clamp_(-self.weight_clipping_bound, self.weight_clipping_bound)
if step_count % self.print_every == 0 and step_count !=0 and self.logger:
if self.d_spectral_norm:
dis_sigmas = calculate_all_sn(self.dis_model)
self.writer.add_scalars('SN_of_dis', dis_sigmas, step_count)
toggle_grad(self.dis_model, False, freeze_layers=-1)
toggle_grad(self.gen_model, True, freeze_layers=-1)
for step_index in range(self.g_steps_per_iter):
self.G_optimizer.zero_grad()
for acml_step in range(self.accumulation_steps):
with torch.cuda.amp.autocast() if self.mixed_precision else dummy_context_mgr() as mpc:
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs, transport_cost = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha,
self.latent_op_beta, True, self.default_device)
if not self.conditional_strategy == 'ProjGAN_adv':
fake_images = self.gen_model(zs, fake_labels)
else:
gen_out_prefc, labels_prefc = self.gen_model(zs, fake_labels, only_l1=True)
loss_fake = lambda x: -torch.mean(x)
gen_out_adv = PGD_G(gen_out_prefc, labels_prefc, fake_labels, loss_fake, self.gen_model, self.dis_model, steps=self.steps, gamma=self.gamma)
fake_images = self.gen_model(gen_out_prefc, labels_prefc, l1=False)
fake_images_adv = self.gen_model(gen_out_adv, labels_prefc, l1=False)
if self.diff_aug:
fake_images = DiffAugment(fake_images, policy=self.policy)
if self.ada:
fake_images, _ = augment(fake_images, self.ada_aug_p)
if self.conditional_strategy == "ACGAN":
cls_out_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
fake_cls_mask = make_mask(fake_labels, self.num_classes, self.default_device)
cls_proxies_fake, cls_embed_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == 'ProjGAN_adv':
dis_out_fake = self.dis_model(fake_images, fake_labels)
dis_out_adv = self.dis_model(fake_images_adv, fake_labels)
else:
raise NotImplementedError
gen_acml_loss = self.G_loss(dis_out_fake)
if self.latent_op:
gen_acml_loss += transport_cost*self.latent_norm_reg_weight
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
zcr_gen_loss = -1 * self.l2_loss(fake_images, fake_images_zaug)
gen_acml_loss += self.gen_lambda*zcr_gen_loss
if self.conditional_strategy == "ACGAN":
gen_acml_loss += self.ce_loss(cls_out_fake, fake_labels)
elif self.conditional_strategy == "ContraGAN":
gen_acml_loss += self.contrastive_lambda*self.contrastive_criterion(cls_embed_fake, cls_proxies_fake, fake_cls_mask, fake_labels, t, self.margin)
elif self.conditional_strategy == "Proxy_NCA_GAN":
gen_acml_loss += self.contrastive_lambda*self.NCA_criterion(cls_embed_fake, cls_proxies_fake, fake_labels)
elif self.conditional_strategy == "NT_Xent_GAN":
fake_images_aug = CR_DiffAug(fake_images)
_, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
gen_acml_loss += self.contrastive_lambda*self.NT_Xent_criterion(cls_embed_fake, cls_embed_fake_aug, t)
elif self.conditional_strategy == 'ProjGAN_adv':
gen_acml_loss = (self.G_loss(dis_out_fake) + self.G_loss(dis_out_adv)) / 2
else:
pass
gen_acml_loss = gen_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(gen_acml_loss).backward()
else:
gen_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.G_optimizer)
self.scaler.update()
else:
self.G_optimizer.step()
if self.ema:
self.Gen_ema.update(step_count)
step_count += 1
if step_count % self.print_every == 0 and self.logger:
log_message = LOG_FORMAT.format(step=step_count,
progress=step_count/total_step,
elapsed=elapsed_time(self.start_time),
temperature=t,
ada_p=self.ada_aug_p,
dis_loss=dis_acml_loss.item(),
gen_loss=gen_acml_loss.item(),
)
self.logger.info(log_message)
if self.g_spectral_norm:
gen_sigmas = calculate_all_sn(self.gen_model)
self.writer.add_scalars('SN_of_gen', gen_sigmas, step_count)
self.writer.add_scalars('Losses', {'discriminator': dis_acml_loss.item(),
'generator': gen_acml_loss.item()}, step_count)
if self.ada:
self.writer.add_scalar('ada_p', self.ada_aug_p, step_count)
if step_count % self.save_every == 0 or step_count == total_step:
if self.evaluate:
is_best = self.evaluation(step_count, False, "N/A")
self.save(step_count, is_best)
else:
self.save(step_count, False)
return step_count-1
| true | true |
f71f885cac4c2f109c64f495a43df8973c10dbfa | 2,038 | py | Python | benchmark/points/edge_cnn_ke.py | KuangenZhang/pytorch_geometric | 0bfc79a5eaccfcd16a82395e8578a90c5e44759f | [
"MIT"
] | 1 | 2021-09-14T15:55:56.000Z | 2021-09-14T15:55:56.000Z | benchmark/points/edge_cnn_ke.py | KuangenZhang/pytorch_geometric | 0bfc79a5eaccfcd16a82395e8578a90c5e44759f | [
"MIT"
] | null | null | null | benchmark/points/edge_cnn_ke.py | KuangenZhang/pytorch_geometric | 0bfc79a5eaccfcd16a82395e8578a90c5e44759f | [
"MIT"
] | null | null | null | import argparse
import torch
import torch.nn.functional as F
from torch.nn import Sequential as Seq, Linear as Lin, ReLU, LeakyReLU
from torch_geometric.nn import DynamicEdgeConv, global_max_pool
from datasets import get_dataset
from train_eval import run
parser = argparse.ArgumentParser()
parser.add_argument('--epochs', type=int, default=200)
parser.add_argument('--batch_size', type=int, default=24)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--lr_decay_factor', type=float, default=0.5)
parser.add_argument('--lr_decay_step_size', type=int, default=50)
parser.add_argument('--weight_decay', type=float, default=0)
args = parser.parse_args()
class Net(torch.nn.Module):
def __init__(self, num_classes):
super(Net, self).__init__()
nn = Seq(Lin(6, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2))
self.conv1 = DynamicEdgeConv(nn, k=20, aggr='max')
nn = Seq(
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 256), LeakyReLU(negative_slope=0.2))
self.conv2 = DynamicEdgeConv(nn, k=20, aggr='max')
self.lin0 = Lin(256, 512)
self.lin1 = Lin(512, 256)
self.lin2 = Lin(256, 256)
self.lin3 = Lin(256, num_classes)
def forward(self, pos, batch):
x = self.conv1(pos, batch)
x = self.conv2(x, batch)
x = F.relu(self.lin0(x))
x = global_max_pool(x, batch)
x = F.relu(self.lin1(x))
x = F.relu(self.lin2(x))
x = F.dropout(x, p=0.5, training=self.training)
x = self.lin3(x)
return F.log_softmax(x, dim=-1)
train_dataset, test_dataset = get_dataset(num_points=1024)
model = Net(train_dataset.num_classes)
run(train_dataset, test_dataset, model, args.epochs, args.batch_size, args.lr,
args.lr_decay_factor, args.lr_decay_step_size, args.weight_decay)
| 33.966667 | 78 | 0.663395 | import argparse
import torch
import torch.nn.functional as F
from torch.nn import Sequential as Seq, Linear as Lin, ReLU, LeakyReLU
from torch_geometric.nn import DynamicEdgeConv, global_max_pool
from datasets import get_dataset
from train_eval import run
parser = argparse.ArgumentParser()
parser.add_argument('--epochs', type=int, default=200)
parser.add_argument('--batch_size', type=int, default=24)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--lr_decay_factor', type=float, default=0.5)
parser.add_argument('--lr_decay_step_size', type=int, default=50)
parser.add_argument('--weight_decay', type=float, default=0)
args = parser.parse_args()
class Net(torch.nn.Module):
def __init__(self, num_classes):
super(Net, self).__init__()
nn = Seq(Lin(6, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2))
self.conv1 = DynamicEdgeConv(nn, k=20, aggr='max')
nn = Seq(
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 256), LeakyReLU(negative_slope=0.2))
self.conv2 = DynamicEdgeConv(nn, k=20, aggr='max')
self.lin0 = Lin(256, 512)
self.lin1 = Lin(512, 256)
self.lin2 = Lin(256, 256)
self.lin3 = Lin(256, num_classes)
def forward(self, pos, batch):
x = self.conv1(pos, batch)
x = self.conv2(x, batch)
x = F.relu(self.lin0(x))
x = global_max_pool(x, batch)
x = F.relu(self.lin1(x))
x = F.relu(self.lin2(x))
x = F.dropout(x, p=0.5, training=self.training)
x = self.lin3(x)
return F.log_softmax(x, dim=-1)
train_dataset, test_dataset = get_dataset(num_points=1024)
model = Net(train_dataset.num_classes)
run(train_dataset, test_dataset, model, args.epochs, args.batch_size, args.lr,
args.lr_decay_factor, args.lr_decay_step_size, args.weight_decay)
| true | true |
f71f895df82d2833eb823b4a18567f17d274743e | 1,316 | py | Python | torch2trt/converters/grid_sample.py | huliang2016/torch2trt_dynamic | aa55f354a742d26272eae93934d0cff7cd946cbf | [
"MIT"
] | null | null | null | torch2trt/converters/grid_sample.py | huliang2016/torch2trt_dynamic | aa55f354a742d26272eae93934d0cff7cd946cbf | [
"MIT"
] | null | null | null | torch2trt/converters/grid_sample.py | huliang2016/torch2trt_dynamic | aa55f354a742d26272eae93934d0cff7cd946cbf | [
"MIT"
] | null | null | null | from torch2trt.torch2trt import *
from torch2trt.plugins import *
@tensorrt_converter('torch.nn.functional.grid_sample')
def convert_grid_sample(ctx):
input = ctx.method_args[0]
grid = get_arg(ctx, 'grid', pos=1, default=None)
mode = get_arg(ctx, 'mode', pos=2, default='bilinear')
padding_mode = get_arg(ctx, 'padding_mode', pos=3, default='zeros')
align_corners = get_arg(ctx, 'align_corners', pos=4, default=False)
output = ctx.method_return
input_trt = trt_(ctx.network, input)
grid_trt = trt_(ctx.network, grid)
if mode == 'bilinear':
mode = trt.ResizeMode.LINEAR
elif mode == 'nearest':
mode = trt.ResizeMode.NEAREST
if padding_mode == 'zeros':
padding_mode = 0
elif padding_mode == 'border':
padding_mode = 1
elif padding_mode == 'reflection':
padding_mode = 2
plugin = create_gridsample_plugin("torch_gridsample_"+str(id(input)),
mode=mode,
padding_mode=padding_mode,
align_corners=align_corners)
layer = ctx.network.add_plugin_v2(
inputs=[input_trt, grid_trt], plugin=plugin)
output._trt = layer.get_output(0) | 34.631579 | 74 | 0.591945 | from torch2trt.torch2trt import *
from torch2trt.plugins import *
@tensorrt_converter('torch.nn.functional.grid_sample')
def convert_grid_sample(ctx):
input = ctx.method_args[0]
grid = get_arg(ctx, 'grid', pos=1, default=None)
mode = get_arg(ctx, 'mode', pos=2, default='bilinear')
padding_mode = get_arg(ctx, 'padding_mode', pos=3, default='zeros')
align_corners = get_arg(ctx, 'align_corners', pos=4, default=False)
output = ctx.method_return
input_trt = trt_(ctx.network, input)
grid_trt = trt_(ctx.network, grid)
if mode == 'bilinear':
mode = trt.ResizeMode.LINEAR
elif mode == 'nearest':
mode = trt.ResizeMode.NEAREST
if padding_mode == 'zeros':
padding_mode = 0
elif padding_mode == 'border':
padding_mode = 1
elif padding_mode == 'reflection':
padding_mode = 2
plugin = create_gridsample_plugin("torch_gridsample_"+str(id(input)),
mode=mode,
padding_mode=padding_mode,
align_corners=align_corners)
layer = ctx.network.add_plugin_v2(
inputs=[input_trt, grid_trt], plugin=plugin)
output._trt = layer.get_output(0) | true | true |
f71f89ceb3c12643c41afd03550daee9d2e132a8 | 658 | py | Python | main/lynx/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | 46 | 2021-06-10T02:27:32.000Z | 2022-03-27T11:33:24.000Z | main/lynx/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | 58 | 2021-07-03T13:58:20.000Z | 2022-03-13T16:45:35.000Z | main/lynx/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | 6 | 2021-07-04T10:46:40.000Z | 2022-01-09T00:03:59.000Z | pkgname = "lynx"
pkgver = "2.9.0_pre10"
_uver = "2.9.0dev.10"
pkgrel = 0
build_style = "gnu_configure"
configure_args = [
"--enable-widec", "--enable-ipv6", "--with-zlib", "--with-bzlib",
"--with-ssl"
]
hostmakedepends = ["pkgconf"]
makedepends = [
"zlib-devel", "libbz2-devel", "ncurses-devel", "openssl-devel"
]
pkgdesc = "Text web browser"
maintainer = "q66 <q66@chimera-linux.org>"
license = "GPL-2.0-or-later"
url = "http://lynx.invisible-island.net"
source = f"http://invisible-mirror.net/archives/{pkgname}/tarballs/{pkgname}{_uver}.tar.bz2"
sha256 = "898ac82bcfcbd4b20ea39afdf66fd659b8773c7549623b0f8802bf392a41a912"
options = ["!cross"]
| 31.333333 | 92 | 0.696049 | pkgname = "lynx"
pkgver = "2.9.0_pre10"
_uver = "2.9.0dev.10"
pkgrel = 0
build_style = "gnu_configure"
configure_args = [
"--enable-widec", "--enable-ipv6", "--with-zlib", "--with-bzlib",
"--with-ssl"
]
hostmakedepends = ["pkgconf"]
makedepends = [
"zlib-devel", "libbz2-devel", "ncurses-devel", "openssl-devel"
]
pkgdesc = "Text web browser"
maintainer = "q66 <q66@chimera-linux.org>"
license = "GPL-2.0-or-later"
url = "http://lynx.invisible-island.net"
source = f"http://invisible-mirror.net/archives/{pkgname}/tarballs/{pkgname}{_uver}.tar.bz2"
sha256 = "898ac82bcfcbd4b20ea39afdf66fd659b8773c7549623b0f8802bf392a41a912"
options = ["!cross"]
| true | true |
f71f8a26e59f7652219836c7aac3c3072c01af66 | 5,150 | py | Python | conf.py | SmartDataProjects/dynamo-docs | dffc4e853ffd8bc1a1eabce8b34c1084412cb562 | [
"MIT"
] | null | null | null | conf.py | SmartDataProjects/dynamo-docs | dffc4e853ffd8bc1a1eabce8b34c1084412cb562 | [
"MIT"
] | null | null | null | conf.py | SmartDataProjects/dynamo-docs | dffc4e853ffd8bc1a1eabce8b34c1084412cb562 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Dynamo documentation build configuration file, created by
# sphinx-quickstart on Tue Jun 5 10:40:30 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Dynamo'
copyright = u'2018, Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
author = u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'nature'
html_theme = 'classic'
#html_theme = 'agogo'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'rightsidebar': False
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Dynamodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Dynamo.tex', u'Dynamo Documentation',
u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dynamo', u'Dynamo Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Dynamo', u'Dynamo Documentation',
author, 'Dynamo', 'A Dynamic Data Data Management System',
'Miscellaneous'),
]
html_sidebars = { '**': ['globaltoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'] }
| 32.389937 | 121 | 0.687379 |
extensions = ['sphinx.ext.intersphinx']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Dynamo'
copyright = u'2018, Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
author = u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'nature'
html_theme = 'classic'
#html_theme = 'agogo'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'rightsidebar': False
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Dynamodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Dynamo.tex', u'Dynamo Documentation',
u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dynamo', u'Dynamo Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Dynamo', u'Dynamo Documentation',
author, 'Dynamo', 'A Dynamic Data Data Management System',
'Miscellaneous'),
]
html_sidebars = { '**': ['globaltoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'] }
| true | true |
f71f8a38750a1ac3b7381ca20272a675585b6e22 | 414 | py | Python | scanEngine/migrations/0006_auto_20200718_0429.py | Suprita-25/rengine | d6aabb49f27f7ad6039477c16a96213b0d80f81f | [
"MIT"
] | null | null | null | scanEngine/migrations/0006_auto_20200718_0429.py | Suprita-25/rengine | d6aabb49f27f7ad6039477c16a96213b0d80f81f | [
"MIT"
] | null | null | null | scanEngine/migrations/0006_auto_20200718_0429.py | Suprita-25/rengine | d6aabb49f27f7ad6039477c16a96213b0d80f81f | [
"MIT"
] | null | null | null | # Generated by Django 3.0.7 on 2020-07-18 04:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scanEngine', '0005_auto_20200718_0407'),
]
operations = [
migrations.AlterField(
model_name='wordlist',
name='path',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| 21.789474 | 75 | 0.60628 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scanEngine', '0005_auto_20200718_0407'),
]
operations = [
migrations.AlterField(
model_name='wordlist',
name='path',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| true | true |
f71f8aad34c9f5bcb36563c0f477edba60015bc9 | 1,029 | py | Python | printing.py | shuckc/printerface | 1f3eeca4c4d090c119404fd354eac02a4f68a56b | [
"BSD-3-Clause"
] | 3 | 2017-02-03T18:29:35.000Z | 2020-02-19T14:46:05.000Z | printing.py | TeaEngineering/printerface | 1f3eeca4c4d090c119404fd354eac02a4f68a56b | [
"BSD-3-Clause"
] | 3 | 2015-08-03T12:01:25.000Z | 2015-12-26T13:52:18.000Z | printing.py | TeaEngineering/printerface | 1f3eeca4c4d090c119404fd354eac02a4f68a56b | [
"BSD-3-Clause"
] | 1 | 2016-03-21T13:45:34.000Z | 2016-03-21T13:45:34.000Z | #!/usr/bin/python
import sys
import subprocess
printers = []
def getPrinters():
global printers
if not sys.platform == "linux2":
return ['default']
if len(printers) > 0: return printers
try:
process = subprocess.Popen(["lpstat", "-a"], stdout=subprocess.PIPE)
result = process.communicate()[0].strip()
# KONICA_bizhub_192.168.12.10 accepting requests since Sun 16 Dec 2012 07:43:59 PM GMT
print(result)
printers = [x.split(' ')[0] for x in result.split('\n')]
print('[print] printers=%s' % repr(printers))
except OSError as e:
print('[print] %s' % repr(e))
return printers
def printFile(file, printer):
cmd = ["lpr","-P", printer, file]
print("[print] printer=%s file=%s cmd=%s" %(printer, file, repr(cmd) ))
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
results = process.communicate()
results = (None,None)
print("[print] printer=%s file=%s cmd=%s result=%s" %(printer, file, repr(cmd), repr(results)))
if __name__=="__main__":
print ('Installed printers: %s' % repr(getPrinters()))
| 30.264706 | 96 | 0.678328 |
import sys
import subprocess
printers = []
def getPrinters():
global printers
if not sys.platform == "linux2":
return ['default']
if len(printers) > 0: return printers
try:
process = subprocess.Popen(["lpstat", "-a"], stdout=subprocess.PIPE)
result = process.communicate()[0].strip()
print(result)
printers = [x.split(' ')[0] for x in result.split('\n')]
print('[print] printers=%s' % repr(printers))
except OSError as e:
print('[print] %s' % repr(e))
return printers
def printFile(file, printer):
cmd = ["lpr","-P", printer, file]
print("[print] printer=%s file=%s cmd=%s" %(printer, file, repr(cmd) ))
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
results = process.communicate()
results = (None,None)
print("[print] printer=%s file=%s cmd=%s result=%s" %(printer, file, repr(cmd), repr(results)))
if __name__=="__main__":
print ('Installed printers: %s' % repr(getPrinters()))
| true | true |
f71f8acdefea0e50130dc14864f4cd1d3a47060b | 4,453 | py | Python | rpc/client.py | watermelonano/melonbot | 7ac8418020e63e340f1f6df13ad4e85d6c864cda | [
"MIT"
] | null | null | null | rpc/client.py | watermelonano/melonbot | 7ac8418020e63e340f1f6df13ad4e85d6c864cda | [
"MIT"
] | 1 | 2019-12-03T20:13:23.000Z | 2019-12-03T20:13:23.000Z | rpc/client.py | watermelonano/melonbot | 7ac8418020e63e340f1f6df13ad4e85d6c864cda | [
"MIT"
] | null | null | null | import aiohttp
import rapidjson as json
import socket
from config import Config
from typing import List, Tuple
class RPCClient(object):
_instance = None
def __init__(self):
raise RuntimeError('Call instance() instead')
@classmethod
def instance(cls) -> 'RPCClient':
if cls._instance is None:
cls._instance = cls.__new__(cls)
cls.node_url = Config.instance().node_url
cls.node_port = Config.instance().node_port
cls.wallet_id = Config.instance().wallet
cls.ipv6 = '::' in cls.node_url
cls.connector = aiohttp.TCPConnector(family=socket.AF_INET6 if cls.ipv6 else socket.AF_INET,resolver=aiohttp.AsyncResolver())
cls.session = aiohttp.ClientSession(connector=cls.connector, json_serialize=json.dumps)
return cls._instance
@classmethod
async def close(cls):
if hasattr(cls, 'session') and cls.session is not None:
await cls.session.close()
if cls._instance is not None:
cls._instance = None
async def make_request(self, req_json: dict):
async with self.session.post("http://{0}:{1}".format(self.node_url, self.node_port),json=req_json, timeout=300) as resp:
return await resp.json()
async def account_create(self) -> str:
account_create = {
'action': 'account_create',
'wallet': self.wallet_id
}
respjson = await self.make_request(account_create)
if 'account' in respjson:
return respjson['account']
return None
async def account_balance(self, account: str) -> dict:
account_balance = {
'action': 'account_balance',
'account': account
}
respjson = await self.make_request(account_balance)
if 'balance' in respjson:
return respjson
return None
async def send(self, id: str, source: str, destination: str, amount: str) -> str:
"""Make transaction, return hash if successful"""
send_action = {
'action': 'send',
'wallet': Config.instance().wallet,
'source': source,
'destination': destination,
'amount': amount,
'id': id
}
respjson = await self.make_request(send_action)
if 'block' in respjson:
return respjson['block']
return None
async def pending(self, account: str, count: int = 5) -> List[str]:
"""Return a list of pending blocks"""
pending_action = {
'action': 'pending',
'account': account,
'count': count
}
respjson = await self.make_request(pending_action)
if 'blocks' in respjson:
return respjson['blocks']
return None
async def receive(self, account: str, hash: str) -> str:
"""Receive a block and return hash of receive block if successful"""
receive_action = {
'action': 'receive',
'wallet': Config.instance().wallet,
'account': account,
'block': hash
}
respjson = await self.make_request(receive_action)
if 'block' in respjson:
return respjson['block']
return None
async def account_info(self, account: str) -> dict:
info_action = {
'action': 'account_info',
'account': account,
'representative': True
}
respjson = await self.make_request(info_action)
if 'error' not in respjson:
return respjson
return None
async def account_representative_set(self, account: str, rep: str) -> str:
rep_action = {
"action": "account_representative_set",
"wallet": Config.instance().wallet,
"account": account,
"representative": rep
}
respjson = await self.make_request(rep_action)
if 'block' in respjson:
return respjson['block']
return None
async def block_count(self) -> Tuple[int, int]:
"Returns block_count from the node as a tuple count, unchecked"
count_action = {
"action": "block_count"
}
respjson = await self.make_request(count_action)
if 'count' in respjson and 'unchecked' in respjson:
return int(respjson['count']), int(respjson['unchecked'])
return None, None | 35.062992 | 137 | 0.587469 | import aiohttp
import rapidjson as json
import socket
from config import Config
from typing import List, Tuple
class RPCClient(object):
_instance = None
def __init__(self):
raise RuntimeError('Call instance() instead')
@classmethod
def instance(cls) -> 'RPCClient':
if cls._instance is None:
cls._instance = cls.__new__(cls)
cls.node_url = Config.instance().node_url
cls.node_port = Config.instance().node_port
cls.wallet_id = Config.instance().wallet
cls.ipv6 = '::' in cls.node_url
cls.connector = aiohttp.TCPConnector(family=socket.AF_INET6 if cls.ipv6 else socket.AF_INET,resolver=aiohttp.AsyncResolver())
cls.session = aiohttp.ClientSession(connector=cls.connector, json_serialize=json.dumps)
return cls._instance
@classmethod
async def close(cls):
if hasattr(cls, 'session') and cls.session is not None:
await cls.session.close()
if cls._instance is not None:
cls._instance = None
async def make_request(self, req_json: dict):
async with self.session.post("http://{0}:{1}".format(self.node_url, self.node_port),json=req_json, timeout=300) as resp:
return await resp.json()
async def account_create(self) -> str:
account_create = {
'action': 'account_create',
'wallet': self.wallet_id
}
respjson = await self.make_request(account_create)
if 'account' in respjson:
return respjson['account']
return None
async def account_balance(self, account: str) -> dict:
account_balance = {
'action': 'account_balance',
'account': account
}
respjson = await self.make_request(account_balance)
if 'balance' in respjson:
return respjson
return None
async def send(self, id: str, source: str, destination: str, amount: str) -> str:
send_action = {
'action': 'send',
'wallet': Config.instance().wallet,
'source': source,
'destination': destination,
'amount': amount,
'id': id
}
respjson = await self.make_request(send_action)
if 'block' in respjson:
return respjson['block']
return None
async def pending(self, account: str, count: int = 5) -> List[str]:
pending_action = {
'action': 'pending',
'account': account,
'count': count
}
respjson = await self.make_request(pending_action)
if 'blocks' in respjson:
return respjson['blocks']
return None
async def receive(self, account: str, hash: str) -> str:
receive_action = {
'action': 'receive',
'wallet': Config.instance().wallet,
'account': account,
'block': hash
}
respjson = await self.make_request(receive_action)
if 'block' in respjson:
return respjson['block']
return None
async def account_info(self, account: str) -> dict:
info_action = {
'action': 'account_info',
'account': account,
'representative': True
}
respjson = await self.make_request(info_action)
if 'error' not in respjson:
return respjson
return None
async def account_representative_set(self, account: str, rep: str) -> str:
rep_action = {
"action": "account_representative_set",
"wallet": Config.instance().wallet,
"account": account,
"representative": rep
}
respjson = await self.make_request(rep_action)
if 'block' in respjson:
return respjson['block']
return None
async def block_count(self) -> Tuple[int, int]:
count_action = {
"action": "block_count"
}
respjson = await self.make_request(count_action)
if 'count' in respjson and 'unchecked' in respjson:
return int(respjson['count']), int(respjson['unchecked'])
return None, None | true | true |
f71f8b0bf067a0ad1bfabdb4bd73bb6ce0671e67 | 2,071 | py | Python | ryu/tests/unit/packet/test_openflow.py | MrCocoaCat/ryu | 9e9571991a73380099b7ba7c6f37e0e587080a6a | [
"Apache-2.0"
] | null | null | null | ryu/tests/unit/packet/test_openflow.py | MrCocoaCat/ryu | 9e9571991a73380099b7ba7c6f37e0e587080a6a | [
"Apache-2.0"
] | null | null | null | ryu/tests/unit/packet/test_openflow.py | MrCocoaCat/ryu | 9e9571991a73380099b7ba7c6f37e0e587080a6a | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2017 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import logging
import os
import sys
import unittest
from nose.tools import eq_
from nose.tools import ok_
from ryu.lib import pcaplib
from ryu.lib.packet import openflow
from ryu.lib.packet import packet
from ryu.utils import binary_str
LOG = logging.getLogger(__name__)
OPENFLOW_DATA_DIR = os.path.join(
os.path.dirname(sys.modules[__name__].__file__),
'../../packet_data/pcap/')
class Test_openflow(unittest.TestCase):
"""
Test case for ryu.lib.packet.openflow.
"""
def test_pcap(self):
files = [
'openflow_flowmod',
'openflow_flowstats_req',
'openflow_invalid_version',
]
for f in files:
# print('*** testing %s ...' % f)
for _, buf in pcaplib.Reader(
open(OPENFLOW_DATA_DIR + f + '.pcap', 'rb')):
# Checks if message can be parsed as expected.
pkt = packet.Packet(buf)
openflow_pkt = pkt.get_protocol(openflow.openflow)
ok_(isinstance(openflow_pkt, openflow.openflow),
'Failed to parse OpenFlow message: %s' % pkt)
# Checks if message can be serialized as expected.
pkt.serialize()
eq_(buf, pkt.data,
"b'%s' != b'%s'" % (binary_str(buf), binary_str(pkt.data)))
| 31.861538 | 80 | 0.627233 |
from __future__ import print_function
import logging
import os
import sys
import unittest
from nose.tools import eq_
from nose.tools import ok_
from ryu.lib import pcaplib
from ryu.lib.packet import openflow
from ryu.lib.packet import packet
from ryu.utils import binary_str
LOG = logging.getLogger(__name__)
OPENFLOW_DATA_DIR = os.path.join(
os.path.dirname(sys.modules[__name__].__file__),
'../../packet_data/pcap/')
class Test_openflow(unittest.TestCase):
def test_pcap(self):
files = [
'openflow_flowmod',
'openflow_flowstats_req',
'openflow_invalid_version',
]
for f in files:
for _, buf in pcaplib.Reader(
open(OPENFLOW_DATA_DIR + f + '.pcap', 'rb')):
pkt = packet.Packet(buf)
openflow_pkt = pkt.get_protocol(openflow.openflow)
ok_(isinstance(openflow_pkt, openflow.openflow),
'Failed to parse OpenFlow message: %s' % pkt)
pkt.serialize()
eq_(buf, pkt.data,
"b'%s' != b'%s'" % (binary_str(buf), binary_str(pkt.data)))
| true | true |
f71f8bd977b164017df62a900a494bb42ac54683 | 2,444 | py | Python | tests/test_data_structures/test_linked_list.py | titus-ong/my-python-algorithms | d9eecf2846c0a7dd8978f11fec8e8f52be23f3bc | [
"MIT"
] | null | null | null | tests/test_data_structures/test_linked_list.py | titus-ong/my-python-algorithms | d9eecf2846c0a7dd8978f11fec8e8f52be23f3bc | [
"MIT"
] | null | null | null | tests/test_data_structures/test_linked_list.py | titus-ong/my-python-algorithms | d9eecf2846c0a7dd8978f11fec8e8f52be23f3bc | [
"MIT"
] | null | null | null | import pytest
from my_python_algorithms.data_structures.linked_list import LinkedList, Node
def test_node():
n = Node(1)
assert 1 == n.value
assert None is n.next
def test_empty_ll():
ll = LinkedList()
assert None is ll.head
def test_ll_with_head():
ll = LinkedList(1)
assert 1 == ll.head.value
def test_append_with_no_head():
ll = LinkedList()
ll.append(1)
assert 1 == ll.head.value
def test_append():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll.head.next.value
def test_indexing_1():
ll = LinkedList(1)
assert 1 == ll[0]
def test_indexing_2():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll[1]
def test_indexing_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll[0]
def test_indexing_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll[2]
def test_index():
ll = LinkedList(1)
assert 0 == ll.index(1)
def test_index_error_1():
ll = LinkedList()
with pytest.raises(ValueError):
ll.index(1)
def test_index_error_1():
ll = LinkedList(1)
with pytest.raises(ValueError):
ll.index(2)
def test_insert_head():
ll = LinkedList(1)
ll.insert(0, "hello")
assert "hello" == ll[0]
def test_insert_1():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
ll.insert(1, "hello")
assert 1 == ll[0]
assert "hello" == ll[1]
assert 2 == ll[2]
assert 3 == ll[3]
def test_insert_2():
ll = LinkedList(1)
ll.insert(1, 'hey')
assert 'hey' == ll[1]
def test_insert_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.insert(1, 1)
def test_insert_error_2():
ll = LinkedList(1)
with pytest.raises(IndexError):
ll.insert(2, 1)
def test_insert_error_3():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
with pytest.raises(IndexError):
ll.insert(4, "hey")
def test_delete_head():
ll = LinkedList(1)
ll.delete(0)
assert None is ll.head
def test_delete_1():
ll = LinkedList(1)
ll.append(2)
ll.delete(0)
assert 2 == ll[0]
with pytest.raises(IndexError):
ll[1]
def test_delete_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.delete(0)
def test_delete_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll.delete(3)
| 17.090909 | 77 | 0.614157 | import pytest
from my_python_algorithms.data_structures.linked_list import LinkedList, Node
def test_node():
n = Node(1)
assert 1 == n.value
assert None is n.next
def test_empty_ll():
ll = LinkedList()
assert None is ll.head
def test_ll_with_head():
ll = LinkedList(1)
assert 1 == ll.head.value
def test_append_with_no_head():
ll = LinkedList()
ll.append(1)
assert 1 == ll.head.value
def test_append():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll.head.next.value
def test_indexing_1():
ll = LinkedList(1)
assert 1 == ll[0]
def test_indexing_2():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll[1]
def test_indexing_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll[0]
def test_indexing_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll[2]
def test_index():
ll = LinkedList(1)
assert 0 == ll.index(1)
def test_index_error_1():
ll = LinkedList()
with pytest.raises(ValueError):
ll.index(1)
def test_index_error_1():
ll = LinkedList(1)
with pytest.raises(ValueError):
ll.index(2)
def test_insert_head():
ll = LinkedList(1)
ll.insert(0, "hello")
assert "hello" == ll[0]
def test_insert_1():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
ll.insert(1, "hello")
assert 1 == ll[0]
assert "hello" == ll[1]
assert 2 == ll[2]
assert 3 == ll[3]
def test_insert_2():
ll = LinkedList(1)
ll.insert(1, 'hey')
assert 'hey' == ll[1]
def test_insert_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.insert(1, 1)
def test_insert_error_2():
ll = LinkedList(1)
with pytest.raises(IndexError):
ll.insert(2, 1)
def test_insert_error_3():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
with pytest.raises(IndexError):
ll.insert(4, "hey")
def test_delete_head():
ll = LinkedList(1)
ll.delete(0)
assert None is ll.head
def test_delete_1():
ll = LinkedList(1)
ll.append(2)
ll.delete(0)
assert 2 == ll[0]
with pytest.raises(IndexError):
ll[1]
def test_delete_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.delete(0)
def test_delete_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll.delete(3)
| true | true |
f71f8c4271a58e5975430cb596344aa1a4927d19 | 3,169 | py | Python | homeassistant/components/device_tracker/bluetooth_tracker.py | shire210/home-assistant | 63cd8bbee6f1b74ae9c6c249ac820119a8a573d8 | [
"Apache-2.0"
] | 2 | 2017-02-25T00:27:06.000Z | 2017-02-25T03:09:30.000Z | homeassistant/components/device_tracker/bluetooth_tracker.py | shire210/home-assistant | 63cd8bbee6f1b74ae9c6c249ac820119a8a573d8 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/device_tracker/bluetooth_tracker.py | shire210/home-assistant | 63cd8bbee6f1b74ae9c6c249ac820119a8a573d8 | [
"Apache-2.0"
] | 2 | 2018-06-03T11:14:44.000Z | 2018-11-04T18:18:12.000Z | """Tracking for bluetooth devices."""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant.components.device_tracker import (
YAML_DEVICES, CONF_TRACK_NEW, CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL,
load_config, PLATFORM_SCHEMA, DEFAULT_TRACK_NEW)
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['pybluez==0.22']
BT_PREFIX = 'BT_'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_TRACK_NEW): cv.boolean
})
def setup_scanner(hass, config, see, discovery_info=None):
"""Setup the Bluetooth Scanner."""
# pylint: disable=import-error
import bluetooth
def see_device(device):
"""Mark a device as seen."""
see(mac=BT_PREFIX + device[0], host_name=device[1])
def discover_devices():
"""Discover bluetooth devices."""
result = bluetooth.discover_devices(duration=8,
lookup_names=True,
flush_cache=True,
lookup_class=False)
_LOGGER.debug("Bluetooth devices discovered = " + str(len(result)))
return result
yaml_path = hass.config.path(YAML_DEVICES)
devs_to_track = []
devs_donot_track = []
# Load all known devices.
# We just need the devices so set consider_home and home range
# to 0
for device in load_config(yaml_path, hass, 0):
# check if device is a valid bluetooth device
if device.mac and device.mac[:3].upper() == BT_PREFIX:
if device.track:
devs_to_track.append(device.mac[3:])
else:
devs_donot_track.append(device.mac[3:])
# if track new devices is true discover new devices on startup.
track_new = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
see_device(dev)
interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
def update_bluetooth(now):
"""Lookup bluetooth device and update status."""
try:
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
for mac in devs_to_track:
_LOGGER.debug("Scanning " + mac)
result = bluetooth.lookup_name(mac, timeout=5)
if not result:
# Could not lookup device name
continue
see_device((mac, result))
except bluetooth.BluetoothError:
_LOGGER.exception('Error looking up bluetooth device!')
track_point_in_utc_time(
hass, update_bluetooth, dt_util.utcnow() + interval)
update_bluetooth(dt_util.utcnow())
return True
| 34.824176 | 76 | 0.618492 | import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant.components.device_tracker import (
YAML_DEVICES, CONF_TRACK_NEW, CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL,
load_config, PLATFORM_SCHEMA, DEFAULT_TRACK_NEW)
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['pybluez==0.22']
BT_PREFIX = 'BT_'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_TRACK_NEW): cv.boolean
})
def setup_scanner(hass, config, see, discovery_info=None):
import bluetooth
def see_device(device):
see(mac=BT_PREFIX + device[0], host_name=device[1])
def discover_devices():
result = bluetooth.discover_devices(duration=8,
lookup_names=True,
flush_cache=True,
lookup_class=False)
_LOGGER.debug("Bluetooth devices discovered = " + str(len(result)))
return result
yaml_path = hass.config.path(YAML_DEVICES)
devs_to_track = []
devs_donot_track = []
for device in load_config(yaml_path, hass, 0):
if device.mac and device.mac[:3].upper() == BT_PREFIX:
if device.track:
devs_to_track.append(device.mac[3:])
else:
devs_donot_track.append(device.mac[3:])
track_new = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
see_device(dev)
interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
def update_bluetooth(now):
try:
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
for mac in devs_to_track:
_LOGGER.debug("Scanning " + mac)
result = bluetooth.lookup_name(mac, timeout=5)
if not result:
continue
see_device((mac, result))
except bluetooth.BluetoothError:
_LOGGER.exception('Error looking up bluetooth device!')
track_point_in_utc_time(
hass, update_bluetooth, dt_util.utcnow() + interval)
update_bluetooth(dt_util.utcnow())
return True
| true | true |
f71f8d6b054c39b44978826f1f894a184c1e7cce | 212 | py | Python | social/__init__.py | Diolor/python-social-auth | ba4e30d4a11b2e188954770bae4df9426d61a470 | [
"BSD-3-Clause"
] | 1 | 2015-04-19T21:38:46.000Z | 2015-04-19T21:38:46.000Z | social/__init__.py | nvbn/python-social-auth | 3e0e99404f20e7b6847ca069e0844ba8c090415f | [
"BSD-3-Clause"
] | null | null | null | social/__init__.py | nvbn/python-social-auth | 3e0e99404f20e7b6847ca069e0844ba8c090415f | [
"BSD-3-Clause"
] | 1 | 2020-05-23T05:49:36.000Z | 2020-05-23T05:49:36.000Z | """
python-social-auth application, allows OpenId or OAuth user
registration/authentication just adding a few configurations.
"""
version = (0, 1, 13)
extra = ''
__version__ = '.'.join(map(str, version)) + extra
| 26.5 | 61 | 0.721698 | version = (0, 1, 13)
extra = ''
__version__ = '.'.join(map(str, version)) + extra
| true | true |
f71f8de993bce72bcbeae4db86fff99ba77edf5b | 324 | py | Python | AOJ/pra_intro/7-d.py | Nishi05/Competitive-programming | e59a6755b706d9d5c1f359f4511d92c114e6a94e | [
"MIT"
] | null | null | null | AOJ/pra_intro/7-d.py | Nishi05/Competitive-programming | e59a6755b706d9d5c1f359f4511d92c114e6a94e | [
"MIT"
] | null | null | null | AOJ/pra_intro/7-d.py | Nishi05/Competitive-programming | e59a6755b706d9d5c1f359f4511d92c114e6a94e | [
"MIT"
] | null | null | null | n, m, l = map(int, input().split())
a = [list(map(int, input().split())) for i in range(n)]
b = [list(map(int, input().split())) for i in range(m)]
C = [[0]*l for _ in range(n)]
for i in range(n):
for j in range(l):
for k in range(m):
C[i][j] += a[i][k] * b[k][j]
for line in C:
print(*line)
| 21.6 | 55 | 0.506173 | n, m, l = map(int, input().split())
a = [list(map(int, input().split())) for i in range(n)]
b = [list(map(int, input().split())) for i in range(m)]
C = [[0]*l for _ in range(n)]
for i in range(n):
for j in range(l):
for k in range(m):
C[i][j] += a[i][k] * b[k][j]
for line in C:
print(*line)
| true | true |
f71f8e826c91b8b02c825b8f93c01a539cdf6534 | 1,888 | py | Python | tsutils/cog_settings.py | kary5678/tsutils | ab6ecdcd2f0e10ba19092028909b3f74bf1708a9 | [
"MIT"
] | 1 | 2021-07-28T19:41:18.000Z | 2021-07-28T19:41:18.000Z | tsutils/cog_settings.py | kary5678/tsutils | ab6ecdcd2f0e10ba19092028909b3f74bf1708a9 | [
"MIT"
] | 19 | 2020-09-14T07:55:14.000Z | 2022-03-06T17:23:14.000Z | tsutils/cog_settings.py | kary5678/tsutils | ab6ecdcd2f0e10ba19092028909b3f74bf1708a9 | [
"MIT"
] | 3 | 2020-09-14T07:47:27.000Z | 2021-09-14T02:16:33.000Z | from redbot.core import data_manager
from .json_utils import *
class CogSettings(object):
SETTINGS_FILE_NAME = "legacy_settings.json"
def __init__(self, cog_name, bot=None):
self.folder = str(data_manager.cog_data_path(raw_name=cog_name))
self.file_path = os.path.join(self.folder, CogSettings.SETTINGS_FILE_NAME)
self.bot = bot
self.check_folder()
self.default_settings = self.make_default_settings()
if not os.path.isfile(self.file_path):
logger.warning("CogSettings config for {} not found. Creating default...".format(self.file_path))
self.bot_settings = self.default_settings
self.save_settings()
else:
current = self.intify(read_json_file(self.file_path))
updated = False
for key in self.default_settings.keys():
if key not in current.keys():
current[key] = self.default_settings[key]
updated = True
self.bot_settings = current
if updated:
self.save_settings()
def check_folder(self):
if not os.path.exists(self.folder):
logger.info("Creating {}".format(self.folder))
os.makedirs(self.folder)
def save_settings(self):
write_json_file(self.file_path, self.bot_settings)
def make_default_settings(self):
return {}
@classmethod
def intify(cls, key):
if isinstance(key, dict):
return {cls.intify(k): cls.intify(v) for k, v in key.items()}
elif isinstance(key, (list, tuple)):
return [cls.intify(x) for x in key]
elif isinstance(key, str) and key.isdigit():
return int(key)
elif isinstance(key, str) and key.replace('.', '', 1).isdigit():
return float(key)
else:
return key
| 33.122807 | 110 | 0.603814 | from redbot.core import data_manager
from .json_utils import *
class CogSettings(object):
SETTINGS_FILE_NAME = "legacy_settings.json"
def __init__(self, cog_name, bot=None):
self.folder = str(data_manager.cog_data_path(raw_name=cog_name))
self.file_path = os.path.join(self.folder, CogSettings.SETTINGS_FILE_NAME)
self.bot = bot
self.check_folder()
self.default_settings = self.make_default_settings()
if not os.path.isfile(self.file_path):
logger.warning("CogSettings config for {} not found. Creating default...".format(self.file_path))
self.bot_settings = self.default_settings
self.save_settings()
else:
current = self.intify(read_json_file(self.file_path))
updated = False
for key in self.default_settings.keys():
if key not in current.keys():
current[key] = self.default_settings[key]
updated = True
self.bot_settings = current
if updated:
self.save_settings()
def check_folder(self):
if not os.path.exists(self.folder):
logger.info("Creating {}".format(self.folder))
os.makedirs(self.folder)
def save_settings(self):
write_json_file(self.file_path, self.bot_settings)
def make_default_settings(self):
return {}
@classmethod
def intify(cls, key):
if isinstance(key, dict):
return {cls.intify(k): cls.intify(v) for k, v in key.items()}
elif isinstance(key, (list, tuple)):
return [cls.intify(x) for x in key]
elif isinstance(key, str) and key.isdigit():
return int(key)
elif isinstance(key, str) and key.replace('.', '', 1).isdigit():
return float(key)
else:
return key
| true | true |
f71f90277bdccfb77a66e3b8e60c836a40516eb8 | 4,991 | py | Python | preql/core/casts.py | otherJL0/Preql | 958a8dfd3a040f9c40fa394a8bfc3295f32a3019 | [
"MIT"
] | null | null | null | preql/core/casts.py | otherJL0/Preql | 958a8dfd3a040f9c40fa394a8bfc3295f32a3019 | [
"MIT"
] | null | null | null | preql/core/casts.py | otherJL0/Preql | 958a8dfd3a040f9c40fa394a8bfc3295f32a3019 | [
"MIT"
] | null | null | null | from . import pql_objects as objects
from . import sql
from .exceptions import Signal
from .interp_common import call_builtin_func
from .pql_types import ITEM_NAME, T, dp_type
from .types_impl import kernel_type
@dp_type
def _cast(inst_type, target_type, inst):
if inst_type <= target_type:
return inst
raise Signal.make(
T.TypeError, None, f"Cast not implemented for {inst_type}->{target_type}"
)
@dp_type
def _cast(inst_type: T.list, target_type: T.list, inst):
if inst is objects.EmptyList:
return inst.replace(type=target_type)
if inst_type.elem <= target_type.elem:
return inst
value = inst.get_column(ITEM_NAME)
elem = _cast(value.type, target_type.elem, value)
code = sql.Select(target_type, inst.code, [sql.ColumnAlias(elem.code, ITEM_NAME)])
return inst.replace(code=code, type=T.list[elem.type])
@dp_type
def _cast(inst_type: T.aggregated, target_type: T.list, inst):
res = _cast(inst_type.elem, target_type.elem, inst)
return objects.aggregate(res) # ??
@dp_type
def _cast(inst_type: T.table, target_type: T.list, inst):
t = inst.type
if len(t.elems) != 1:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Too many columns",
)
if not inst_type.elem <= target_type.elem:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Elements not matching",
)
((elem_name, elem_type),) = inst_type.elems.items()
code = sql.Select(
T.list[elem_type],
inst.code,
[sql.ColumnAlias(sql.Name(elem_type, elem_name), ITEM_NAME)],
)
return objects.TableInstance.make(code, T.list[elem_type], [inst])
@dp_type
def _cast(inst_type: T.table, target_type: T.primitive, inst):
t = inst.type
if len(t.elems) != 1:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Expected exactly 1 column, instead got {len(t.elems)}",
)
if not inst_type.elem <= target_type:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Elements type doesn't match",
)
res = inst.localize()
if len(res) != 1:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Expected exactly 1 row, instead got {len(res)}",
)
(item,) = res
return objects.pyvalue_inst(item, inst_type.elem)
@dp_type
def _cast(_inst_type: T.t_id, _target_type: T.int, inst):
return inst.replace(type=T.int)
@dp_type
def _cast(_inst_type: T.int, target_type: T.t_id, inst):
return inst.replace(type=target_type)
@dp_type
def _cast(_inst_type: T.union[T.float, T.bool], _target_type: T.int, inst):
code = sql.Cast(T.int, inst.code)
return objects.Instance.make(code, T.int, [inst])
@dp_type
def _cast(_inst_type: T.number, _target_type: T.bool, inst):
code = sql.Compare('!=', [inst.code, sql.make_value(0)])
return objects.Instance.make(code, T.bool, [inst])
@dp_type
def _cast(_inst_type: T.string, _target_type: T.bool, inst):
code = sql.Compare('!=', [inst.code, sql.make_value('')])
return objects.Instance.make(code, T.bool, [inst])
@dp_type
def _cast(_inst_type: T.string, _target_type: T.text, inst):
return inst.replace(type=T.text)
@dp_type
def _cast(_inst_type: T.text, _target_type: T.string, inst):
return inst.replace(type=T.string)
@dp_type
def _cast(
_inst_type: T.string, _target_type: T.string, inst
): # Disambiguate text<->string due to inheritance
return inst
@dp_type
def _cast(_inst_type: T.union[T.int, T.bool], _target_type: T.float, inst):
code = sql.Cast(T.float, inst.code)
return objects.Instance.make(code, T.float, [inst])
@dp_type
def _cast(_inst_type: T.string, _target_type: T.int, inst):
return call_builtin_func("_cast_string_to_int", [inst])
# @dp_type
# def _cast(_inst_type: T.string, _target_type: T.datetime, inst):
# # XXX unsafe cast, bad strings won't throw an error
# return objects.Instance.make(inst.code, T.datetime, [inst])
@dp_type
def _cast(_inst_type: T.primitive, _target_type: T.string, inst):
code = sql.Cast(T.string, inst.code)
return objects.Instance.make(code, T.string, [inst])
@dp_type
def _cast(_inst_type: T.t_relation, target_type: T.t_id, inst):
# TODO verify same table? same type?
return inst.replace(type=target_type)
@dp_type
def _cast(inst_type: T.t_relation, target_type: T.int, inst):
if inst.type.elem <= T.int:
return inst.replace(type=target_type)
raise Signal.make(
T.TypeError, None, f"Cast not implemented for {inst_type}->{target_type}"
)
def cast(obj, t):
res = _cast(kernel_type(obj.type), t, obj)
return objects.inherit_phantom_type(res, [obj])
| 28.19774 | 111 | 0.661992 | from . import pql_objects as objects
from . import sql
from .exceptions import Signal
from .interp_common import call_builtin_func
from .pql_types import ITEM_NAME, T, dp_type
from .types_impl import kernel_type
@dp_type
def _cast(inst_type, target_type, inst):
if inst_type <= target_type:
return inst
raise Signal.make(
T.TypeError, None, f"Cast not implemented for {inst_type}->{target_type}"
)
@dp_type
def _cast(inst_type: T.list, target_type: T.list, inst):
if inst is objects.EmptyList:
return inst.replace(type=target_type)
if inst_type.elem <= target_type.elem:
return inst
value = inst.get_column(ITEM_NAME)
elem = _cast(value.type, target_type.elem, value)
code = sql.Select(target_type, inst.code, [sql.ColumnAlias(elem.code, ITEM_NAME)])
return inst.replace(code=code, type=T.list[elem.type])
@dp_type
def _cast(inst_type: T.aggregated, target_type: T.list, inst):
res = _cast(inst_type.elem, target_type.elem, inst)
return objects.aggregate(res)
@dp_type
def _cast(inst_type: T.table, target_type: T.list, inst):
t = inst.type
if len(t.elems) != 1:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Too many columns",
)
if not inst_type.elem <= target_type.elem:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Elements not matching",
)
((elem_name, elem_type),) = inst_type.elems.items()
code = sql.Select(
T.list[elem_type],
inst.code,
[sql.ColumnAlias(sql.Name(elem_type, elem_name), ITEM_NAME)],
)
return objects.TableInstance.make(code, T.list[elem_type], [inst])
@dp_type
def _cast(inst_type: T.table, target_type: T.primitive, inst):
t = inst.type
if len(t.elems) != 1:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Expected exactly 1 column, instead got {len(t.elems)}",
)
if not inst_type.elem <= target_type:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Elements type doesn't match",
)
res = inst.localize()
if len(res) != 1:
raise Signal.make(
T.TypeError,
None,
f"Cannot cast {inst_type} to {target_type}. Expected exactly 1 row, instead got {len(res)}",
)
(item,) = res
return objects.pyvalue_inst(item, inst_type.elem)
@dp_type
def _cast(_inst_type: T.t_id, _target_type: T.int, inst):
return inst.replace(type=T.int)
@dp_type
def _cast(_inst_type: T.int, target_type: T.t_id, inst):
return inst.replace(type=target_type)
@dp_type
def _cast(_inst_type: T.union[T.float, T.bool], _target_type: T.int, inst):
code = sql.Cast(T.int, inst.code)
return objects.Instance.make(code, T.int, [inst])
@dp_type
def _cast(_inst_type: T.number, _target_type: T.bool, inst):
code = sql.Compare('!=', [inst.code, sql.make_value(0)])
return objects.Instance.make(code, T.bool, [inst])
@dp_type
def _cast(_inst_type: T.string, _target_type: T.bool, inst):
code = sql.Compare('!=', [inst.code, sql.make_value('')])
return objects.Instance.make(code, T.bool, [inst])
@dp_type
def _cast(_inst_type: T.string, _target_type: T.text, inst):
return inst.replace(type=T.text)
@dp_type
def _cast(_inst_type: T.text, _target_type: T.string, inst):
return inst.replace(type=T.string)
@dp_type
def _cast(
_inst_type: T.string, _target_type: T.string, inst
): # Disambiguate text<->string due to inheritance
return inst
@dp_type
def _cast(_inst_type: T.union[T.int, T.bool], _target_type: T.float, inst):
code = sql.Cast(T.float, inst.code)
return objects.Instance.make(code, T.float, [inst])
@dp_type
def _cast(_inst_type: T.string, _target_type: T.int, inst):
return call_builtin_func("_cast_string_to_int", [inst])
# @dp_type
# def _cast(_inst_type: T.string, _target_type: T.datetime, inst):
# # XXX unsafe cast, bad strings won't throw an error
@dp_type
def _cast(_inst_type: T.primitive, _target_type: T.string, inst):
code = sql.Cast(T.string, inst.code)
return objects.Instance.make(code, T.string, [inst])
@dp_type
def _cast(_inst_type: T.t_relation, target_type: T.t_id, inst):
return inst.replace(type=target_type)
@dp_type
def _cast(inst_type: T.t_relation, target_type: T.int, inst):
if inst.type.elem <= T.int:
return inst.replace(type=target_type)
raise Signal.make(
T.TypeError, None, f"Cast not implemented for {inst_type}->{target_type}"
)
def cast(obj, t):
res = _cast(kernel_type(obj.type), t, obj)
return objects.inherit_phantom_type(res, [obj])
| true | true |
f71f912ae78f467af2ef0d275e047683484c9024 | 132 | py | Python | clvm/EvalError.py | ChiaMineJP/clvm | 1a5cb17895d8707f784a85180bc97d3c6ebe71a0 | [
"Apache-2.0"
] | 83 | 2020-02-23T13:02:41.000Z | 2022-03-31T06:27:11.000Z | clvm/EvalError.py | ChiaMineJP/clvm | 1a5cb17895d8707f784a85180bc97d3c6ebe71a0 | [
"Apache-2.0"
] | 56 | 2020-01-30T00:28:33.000Z | 2022-03-29T10:38:14.000Z | venv/lib/python3.8/site-packages/clvm/EvalError.py | hu12305204/chia-analyze | 1c9e2104dbe340412e79051fad4cb5b591f6d1a3 | [
"Apache-2.0"
] | 31 | 2019-12-06T09:27:37.000Z | 2022-03-21T13:38:14.000Z | class EvalError(Exception):
def __init__(self, message: str, sexp):
super().__init__(message)
self._sexp = sexp
| 26.4 | 43 | 0.643939 | class EvalError(Exception):
def __init__(self, message: str, sexp):
super().__init__(message)
self._sexp = sexp
| true | true |
f71f91ba96fb0454f17f5b10798f2ab9bc54d086 | 7,164 | py | Python | src/socket_proxy/utils.py | fkantelberg/socket-proxy | 4fc971cfef29282f30299f40106263b53463cdd3 | [
"MIT"
] | 1 | 2021-05-18T02:28:37.000Z | 2021-05-18T02:28:37.000Z | src/socket_proxy/utils.py | fkantelberg/socket-proxy | 4fc971cfef29282f30299f40106263b53463cdd3 | [
"MIT"
] | null | null | null | src/socket_proxy/utils.py | fkantelberg/socket-proxy | 4fc971cfef29282f30299f40106263b53463cdd3 | [
"MIT"
] | null | null | null | import argparse
import ipaddress
import itertools
import logging
import os
import re
import secrets
import socket
import ssl
import sys
from random import shuffle
from typing import List, Tuple, Union
from urllib.parse import urlsplit
from . import base
_logger = logging.getLogger(__name__)
def configure_logging(log_file: str, level: str) -> None:
""" Configure the logging """
level = base.LOG_LEVELS.get(level.lower(), logging.DEBUG)
log = logging.getLogger()
log.setLevel(level)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter(base.LOG_FORMAT, style="{"))
log.addHandler(handler)
if log_file:
handler = logging.FileHandler(log_file)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter(base.LOG_FORMAT, style="{"))
log.addHandler(handler)
def format_transfer(b: int) -> str:
""" Format a number of bytes in a more human readable format """
symbols = [("T", 1 << 40), ("G", 1 << 30), ("M", 1 << 20), ("K", 1 << 10)]
if b < 0:
raise ValueError("Must be bigger than 0")
for symbol, size in symbols:
if b >= size:
return f"{b / size:.1f} {symbol}"
return str(b)
def generate_token() -> bytes:
""" Generate a random token used for identification of clients and tunnels """
return secrets.token_bytes(base.CLIENT_NAME_SIZE)
def generate_ssl_context(
*,
cert: str = None,
key: str = None,
ca: str = None,
server: bool = False,
ciphers: List[str] = None,
check_hostname: bool = False,
) -> ssl.SSLContext:
""" Generate a SSL context for the tunnel """
# Set the protocol and create the basic context
proto = ssl.PROTOCOL_TLS_SERVER if server else ssl.PROTOCOL_TLS_CLIENT
ctx = ssl.SSLContext(proto)
ctx.check_hostname = check_hostname
ctx.minimum_version = ssl.TLSVersion.TLSv1_2
# Prevent the reuse of parameters
if server:
ctx.options |= ssl.OP_SINGLE_DH_USE | ssl.OP_SINGLE_ECDH_USE
# Load a certificate and key for the connection
if cert:
ctx.load_cert_chain(cert, keyfile=key)
# Load the CA to verify the other side
if ca:
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(cafile=ca)
# Set possible ciphers to use
if ciphers:
ctx.set_ciphers(ciphers)
# Output debugging
_logger.info("CA usage: %s", bool(ca))
_logger.info("Certificate: %s", bool(cert))
_logger.info("Hostname verification: %s", bool(check_hostname))
_logger.info("Minimal TLS Versions: %s", ctx.minimum_version.name)
ciphers = sorted(c["name"] for c in ctx.get_ciphers())
_logger.info("Ciphers: %s", ", ".join(ciphers))
return ctx
def get_unused_port(min_port: int, max_port: int, udp: bool = False) -> int:
""" Returns a random unused port within the given range or None if all are used """
sock = socket.socket(type=socket.SOCK_DGRAM) if udp else socket.socket()
ports = list(range(min_port, max_port + 1))
shuffle(ports)
for port in ports:
try:
sock.bind(("", port))
sock.close()
return port
except Exception:
pass
return None
def merge_settings(a: int, b: int) -> int:
"""Merge the settings of the tunnel. If one of them is 0 the other one will
take place. otherwise the lower value will be used"""
return min(a, b) if a and b else max(a, b)
def optimize_networks(*networks: List[base.IPvXNetwork]) -> List[base.IPvXNetwork]:
"""Try to optimize the list of networks by using the minimal network
configuration"""
grouped = itertools.groupby(networks, lambda n: n.version)
groups = {}
for version, group in grouped:
group = sorted(set(group))
tmp = set()
for i, a in enumerate(group):
for b in group[i + 1 :]:
if b.subnet_of(a):
tmp.add(b)
break
else:
tmp.add(a)
groups[version] = sorted(tmp)
return sum([g for _, g in sorted(groups.items())], [])
def parse_address(
address: str, host: str = None, port: int = None, multiple: bool = False
) -> Tuple[Union[str, List[str]], int]:
"""Parse an address and split hostname and port. The port is required. The
default host is "" which means all"""
# Only the address without scheme and path. We only support IPs if multiple hosts
# are activated
pattern = r"[0-9.:\[\],]*?" if multiple else r"[0-9a-zA-Z.:\[\],]*?"
match = re.match(fr"^(?P<hosts>{pattern})(:(?P<port>\d+))?$", address)
if not match:
raise argparse.ArgumentTypeError(
"Invalid address parsed. Only host and port are supported."
)
# Try to parse the port first
data = match.groupdict()
if data.get("port"):
port = int(data["port"])
if port <= 0 or port >= 65536:
raise argparse.ArgumentTypeError("Invalid address parsed. Invalid port.")
if port is None:
raise argparse.ArgumentTypeError("Port required.")
# Try parsing the different host addresses
hosts = set()
for h in data.get("hosts", "").split(","):
if not h:
hosts.add(h or host)
continue
try:
parsed = urlsplit(f"http://{h}")
hosts.add(parsed.hostname)
except Exception as e:
raise argparse.ArgumentTypeError(
"Invalid address parsed. Invalid host."
) from e
# Multiple hosts are supported if the flag is set
if len(hosts) > 1 and multiple:
return sorted(hosts), port
# Otherwise we fail
if len(hosts) > 1:
raise argparse.ArgumentTypeError(
"Invalid address parsed. Only one host is required."
)
if len(hosts) == 1:
host = hosts.pop() or host
if host is not None:
return host, port
raise argparse.ArgumentTypeError("Invalid address parsed. Host required.")
def parse_networks(network: str) -> List[base.IPvXNetwork]:
""" Try to parse multiple networks and return them optimized """
try:
return optimize_networks(*map(ipaddress.ip_network, network.split(",")))
except Exception as e:
raise argparse.ArgumentTypeError("Invalid network format") from e
def valid_file(path: str) -> str:
"""Check if a file exists and return the absolute path otherwise raise an
error. This function is used for the argument parsing"""
path = os.path.abspath(path)
if not os.path.isfile(path):
raise argparse.ArgumentTypeError("Not a file.")
return path
def valid_ports(ports: Tuple[int, int]) -> Tuple[int, int]:
""" Check if the argument is a valid port range with IP family """
m = re.match(r"^(\d+):(\d+)?$", ports, re.IGNORECASE)
if m:
a, b = sorted(map(int, m.groups()))
if 0 < a < b < 65536:
return a, b
raise argparse.ArgumentTypeError("Port must be in range (1, 65536)")
raise argparse.ArgumentTypeError("Invalid port scheme.")
| 31.012987 | 87 | 0.630374 | import argparse
import ipaddress
import itertools
import logging
import os
import re
import secrets
import socket
import ssl
import sys
from random import shuffle
from typing import List, Tuple, Union
from urllib.parse import urlsplit
from . import base
_logger = logging.getLogger(__name__)
def configure_logging(log_file: str, level: str) -> None:
level = base.LOG_LEVELS.get(level.lower(), logging.DEBUG)
log = logging.getLogger()
log.setLevel(level)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter(base.LOG_FORMAT, style="{"))
log.addHandler(handler)
if log_file:
handler = logging.FileHandler(log_file)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter(base.LOG_FORMAT, style="{"))
log.addHandler(handler)
def format_transfer(b: int) -> str:
symbols = [("T", 1 << 40), ("G", 1 << 30), ("M", 1 << 20), ("K", 1 << 10)]
if b < 0:
raise ValueError("Must be bigger than 0")
for symbol, size in symbols:
if b >= size:
return f"{b / size:.1f} {symbol}"
return str(b)
def generate_token() -> bytes:
return secrets.token_bytes(base.CLIENT_NAME_SIZE)
def generate_ssl_context(
*,
cert: str = None,
key: str = None,
ca: str = None,
server: bool = False,
ciphers: List[str] = None,
check_hostname: bool = False,
) -> ssl.SSLContext:
proto = ssl.PROTOCOL_TLS_SERVER if server else ssl.PROTOCOL_TLS_CLIENT
ctx = ssl.SSLContext(proto)
ctx.check_hostname = check_hostname
ctx.minimum_version = ssl.TLSVersion.TLSv1_2
if server:
ctx.options |= ssl.OP_SINGLE_DH_USE | ssl.OP_SINGLE_ECDH_USE
if cert:
ctx.load_cert_chain(cert, keyfile=key)
if ca:
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(cafile=ca)
if ciphers:
ctx.set_ciphers(ciphers)
_logger.info("CA usage: %s", bool(ca))
_logger.info("Certificate: %s", bool(cert))
_logger.info("Hostname verification: %s", bool(check_hostname))
_logger.info("Minimal TLS Versions: %s", ctx.minimum_version.name)
ciphers = sorted(c["name"] for c in ctx.get_ciphers())
_logger.info("Ciphers: %s", ", ".join(ciphers))
return ctx
def get_unused_port(min_port: int, max_port: int, udp: bool = False) -> int:
sock = socket.socket(type=socket.SOCK_DGRAM) if udp else socket.socket()
ports = list(range(min_port, max_port + 1))
shuffle(ports)
for port in ports:
try:
sock.bind(("", port))
sock.close()
return port
except Exception:
pass
return None
def merge_settings(a: int, b: int) -> int:
return min(a, b) if a and b else max(a, b)
def optimize_networks(*networks: List[base.IPvXNetwork]) -> List[base.IPvXNetwork]:
grouped = itertools.groupby(networks, lambda n: n.version)
groups = {}
for version, group in grouped:
group = sorted(set(group))
tmp = set()
for i, a in enumerate(group):
for b in group[i + 1 :]:
if b.subnet_of(a):
tmp.add(b)
break
else:
tmp.add(a)
groups[version] = sorted(tmp)
return sum([g for _, g in sorted(groups.items())], [])
def parse_address(
address: str, host: str = None, port: int = None, multiple: bool = False
) -> Tuple[Union[str, List[str]], int]:
pattern = r"[0-9.:\[\],]*?" if multiple else r"[0-9a-zA-Z.:\[\],]*?"
match = re.match(fr"^(?P<hosts>{pattern})(:(?P<port>\d+))?$", address)
if not match:
raise argparse.ArgumentTypeError(
"Invalid address parsed. Only host and port are supported."
)
data = match.groupdict()
if data.get("port"):
port = int(data["port"])
if port <= 0 or port >= 65536:
raise argparse.ArgumentTypeError("Invalid address parsed. Invalid port.")
if port is None:
raise argparse.ArgumentTypeError("Port required.")
hosts = set()
for h in data.get("hosts", "").split(","):
if not h:
hosts.add(h or host)
continue
try:
parsed = urlsplit(f"http://{h}")
hosts.add(parsed.hostname)
except Exception as e:
raise argparse.ArgumentTypeError(
"Invalid address parsed. Invalid host."
) from e
if len(hosts) > 1 and multiple:
return sorted(hosts), port
if len(hosts) > 1:
raise argparse.ArgumentTypeError(
"Invalid address parsed. Only one host is required."
)
if len(hosts) == 1:
host = hosts.pop() or host
if host is not None:
return host, port
raise argparse.ArgumentTypeError("Invalid address parsed. Host required.")
def parse_networks(network: str) -> List[base.IPvXNetwork]:
try:
return optimize_networks(*map(ipaddress.ip_network, network.split(",")))
except Exception as e:
raise argparse.ArgumentTypeError("Invalid network format") from e
def valid_file(path: str) -> str:
path = os.path.abspath(path)
if not os.path.isfile(path):
raise argparse.ArgumentTypeError("Not a file.")
return path
def valid_ports(ports: Tuple[int, int]) -> Tuple[int, int]:
m = re.match(r"^(\d+):(\d+)?$", ports, re.IGNORECASE)
if m:
a, b = sorted(map(int, m.groups()))
if 0 < a < b < 65536:
return a, b
raise argparse.ArgumentTypeError("Port must be in range (1, 65536)")
raise argparse.ArgumentTypeError("Invalid port scheme.")
| true | true |
f71f91d24c0fd013e9b7c15807da9faf8ffce3f9 | 63 | py | Python | src/com/view/__init__.py | amzpiper/synchronize_data | a0ef983a6445ac5b793691dd3d4a86790c0581a7 | [
"Apache-2.0"
] | null | null | null | src/com/view/__init__.py | amzpiper/synchronize_data | a0ef983a6445ac5b793691dd3d4a86790c0581a7 | [
"Apache-2.0"
] | null | null | null | src/com/view/__init__.py | amzpiper/synchronize_data | a0ef983a6445ac5b793691dd3d4a86790c0581a7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env/python
# -*- coding:utf-8 -*-
# Author:guoyuhang | 21 | 22 | 0.634921 | true | true | |
f71f928563424af0b3d6d9d923982a0a08ad1255 | 910 | py | Python | nautilus/network/events/util.py | AlecAivazis/python | 70e2acef27a2f87355590be1a6ca60ce3ab4d09c | [
"MIT"
] | 9 | 2019-02-17T01:33:43.000Z | 2022-02-03T02:14:12.000Z | nautilus/network/events/util.py | AlecAivazis/python | 70e2acef27a2f87355590be1a6ca60ce3ab4d09c | [
"MIT"
] | 59 | 2016-03-14T15:55:50.000Z | 2016-07-17T15:22:56.000Z | nautilus/network/events/util.py | AlecAivazis/python | 70e2acef27a2f87355590be1a6ca60ce3ab4d09c | [
"MIT"
] | 3 | 2017-08-03T20:18:59.000Z | 2018-07-18T02:03:41.000Z | """
This module defines various utilities for dealing with the network.
"""
from asyncio import iscoroutinefunction, iscoroutine
def combine_action_handlers(*handlers):
"""
This function combines the given action handlers into a single function
which will call all of them.
"""
# make sure each of the given handlers is callable
for handler in handlers:
# if the handler is not a function
if not (iscoroutinefunction(handler) or iscoroutine(handler)):
# yell loudly
raise ValueError("Provided handler is not a coroutine: %s" % handler)
# the combined action handler
async def combined_handler(*args, **kwds):
# goes over every given handler
for handler in handlers:
# call the handler
await handler(*args, **kwds)
# return the combined action handler
return combined_handler
| 33.703704 | 81 | 0.668132 | from asyncio import iscoroutinefunction, iscoroutine
def combine_action_handlers(*handlers):
for handler in handlers:
if not (iscoroutinefunction(handler) or iscoroutine(handler)):
raise ValueError("Provided handler is not a coroutine: %s" % handler)
async def combined_handler(*args, **kwds):
for handler in handlers:
await handler(*args, **kwds)
return combined_handler
| true | true |
f71f92cb89c3fdddcc269f270e17130bd0cea3de | 2,945 | py | Python | app/auth/forms.py | xdhuxc/xblog | ff0383140a5a0c1e8422223154cb98defee73121 | [
"W3C"
] | null | null | null | app/auth/forms.py | xdhuxc/xblog | ff0383140a5a0c1e8422223154cb98defee73121 | [
"W3C"
] | null | null | null | app/auth/forms.py | xdhuxc/xblog | ff0383140a5a0c1e8422223154cb98defee73121 | [
"W3C"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms import PasswordField
from wtforms import BooleanField
from wtforms import SubmitField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from wtforms.validators import Email
from wtforms.validators import Regexp
from wtforms.validators import EqualTo
from wtforms import ValidationError
from ..models import User
class LoginForm(FlaskForm):
"""
用户登录表单
"""
user_email = StringField('电子邮箱', validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField('密码', validators=[DataRequired()])
remember_me = BooleanField('记住我')
submit = SubmitField('登录')
class RegistrationForm(FlaskForm):
# 第一个参数是在页面显示的字符
user_email = StringField('电子邮箱', validators=[DataRequired(), Length(1, 64), Email()])
user_name = StringField('用户名', validators=[DataRequired(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$',
0, 'User Name must have two letters,numbers dots or underscores')])
password = PasswordField('密码', validators=[DataRequired(), EqualTo('password2', message='两次输入的密码必须一致。')])
password2 = PasswordField('确认密码', validators=[DataRequired()])
submit = SubmitField('注册')
"""
这个表单中还有两个自定义的验证函数,以方法的形式实现。
如果表单类中定义了以validate_开头且后面跟着字段名的方法,这个方法就和常规的验证函数一起调用。
"""
def validate_user_email(self, field):
if User.query.filter_by(user_email=field.data).first():
raise ValidationError('该邮件地址已经被注册。')
def validate_user_name(self, field):
if User.query.filter_by(user_name=field.data).first():
raise ValidationError('该用户名已经被使用。')
class ChangePasswordForm(FlaskForm):
"""
更新密码的表单
"""
old_password = PasswordField('旧密码', validators=[DataRequired()])
password = PasswordField('新密码', validators=[
DataRequired(), EqualTo('password2', message='两次输入的密码必须一致。')])
password2 = PasswordField('确认新密码', validators=[DataRequired()])
submit = SubmitField('更改密码')
class PasswordResetRequestForm(FlaskForm):
"""
重置密码请求表单
"""
user_email = StringField('电子邮箱', validators=[DataRequired(), Length(1, 64), Email()])
submit = SubmitField('重置密码')
class PasswordResetForm(FlaskForm):
"""
重置密码表单
"""
password = PasswordField('新密码', validators=[
DataRequired(), EqualTo('password2', message='两次输入的密码不一致。')])
password2 = PasswordField('确认密码', validators=[DataRequired()])
submit = SubmitField('重置密码')
class ChangeEmailForm(FlaskForm):
user_email = StringField('新电子邮件地址', validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField('密码', validators=[DataRequired()])
submit = SubmitField('更改电子邮箱')
@staticmethod
def validate_user_email(self, field):
if User.query.filter_by(user_email=field.data).first():
raise ValidationError('该邮箱已经注册。') | 32.722222 | 112 | 0.696774 |
from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms import PasswordField
from wtforms import BooleanField
from wtforms import SubmitField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from wtforms.validators import Email
from wtforms.validators import Regexp
from wtforms.validators import EqualTo
from wtforms import ValidationError
from ..models import User
class LoginForm(FlaskForm):
user_email = StringField('电子邮箱', validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField('密码', validators=[DataRequired()])
remember_me = BooleanField('记住我')
submit = SubmitField('登录')
class RegistrationForm(FlaskForm):
user_email = StringField('电子邮箱', validators=[DataRequired(), Length(1, 64), Email()])
user_name = StringField('用户名', validators=[DataRequired(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$',
0, 'User Name must have two letters,numbers dots or underscores')])
password = PasswordField('密码', validators=[DataRequired(), EqualTo('password2', message='两次输入的密码必须一致。')])
password2 = PasswordField('确认密码', validators=[DataRequired()])
submit = SubmitField('注册')
def validate_user_email(self, field):
if User.query.filter_by(user_email=field.data).first():
raise ValidationError('该邮件地址已经被注册。')
def validate_user_name(self, field):
if User.query.filter_by(user_name=field.data).first():
raise ValidationError('该用户名已经被使用。')
class ChangePasswordForm(FlaskForm):
old_password = PasswordField('旧密码', validators=[DataRequired()])
password = PasswordField('新密码', validators=[
DataRequired(), EqualTo('password2', message='两次输入的密码必须一致。')])
password2 = PasswordField('确认新密码', validators=[DataRequired()])
submit = SubmitField('更改密码')
class PasswordResetRequestForm(FlaskForm):
user_email = StringField('电子邮箱', validators=[DataRequired(), Length(1, 64), Email()])
submit = SubmitField('重置密码')
class PasswordResetForm(FlaskForm):
password = PasswordField('新密码', validators=[
DataRequired(), EqualTo('password2', message='两次输入的密码不一致。')])
password2 = PasswordField('确认密码', validators=[DataRequired()])
submit = SubmitField('重置密码')
class ChangeEmailForm(FlaskForm):
user_email = StringField('新电子邮件地址', validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField('密码', validators=[DataRequired()])
submit = SubmitField('更改电子邮箱')
@staticmethod
def validate_user_email(self, field):
if User.query.filter_by(user_email=field.data).first():
raise ValidationError('该邮箱已经注册。') | true | true |
f71f9324bc59454b4d777a1a8dfc0cbafae774e6 | 9,717 | py | Python | mbam/parsing/base.py | danebjork/AutomatedMBAM | 91183dcfef634ad9150ee187da8172cff6845fe3 | [
"MIT"
] | 2 | 2018-11-10T17:06:36.000Z | 2018-12-19T23:47:26.000Z | mbam/parsing/base.py | danebjork/AutomatedMBAM | 91183dcfef634ad9150ee187da8172cff6845fe3 | [
"MIT"
] | 6 | 2018-05-16T21:06:34.000Z | 2019-01-14T22:23:15.000Z | mbam/parsing/base.py | danebjork/AutomatedMBAM | 91183dcfef634ad9150ee187da8172cff6845fe3 | [
"MIT"
] | 2 | 2018-11-14T13:30:55.000Z | 2019-01-14T20:49:15.000Z | import os
from sympy.printing import julia_code
from sympy import Symbol
import json
import logging
import re
class BaseParser:
"""Parent class for all model parsers.
"""
def __init__(self, mbam_model, data_path):
"""
Parameters
----------
mbam_model : ``mbammodel``
Can be any of the following: Function, ODE, DAE.
data_path : ``str``
The full path to the hdf5 file to be included in the
model.
"""
self.logger = logging.getLogger("MBAM.BaseParser")
self.logger.debug("Initializing BaseParser")
self.mm = mbam_model
self.create_default_options()
# self.data_path = data_path
self.data_path = 'temp.h5' # os.path.join(os.pardir, 'temp.h5') # data should be in parent directory
self.script = '\n'
self.dir = os.path.join("julia_scripts", "models")
self.name = self.mm.name
self.file_name = self.name + ".jl"
self.file_path = os.path.join(os.getcwd(), os.path.join(self.dir, self.file_name))
self.create_julia_swap()
def update_options(self, options):
"""Creates and saves a new script generated with the given options.
Parameters
----------
options : ``dict``
Must follow format: {"bare": ``bool``, "weights": ``bool``, "imports": ``string``, "args": ``string``, "kwargs": ``string``}
"""
self.logger.debug("Updating options: %s" %options)
self.options = options
def create_default_options(self):
""" Used until options are updated with update_options
"""
self.options = {}
self.options['bare'] = False
self.options['weights'] = False
self.options["imports"] = ""
self.options["args"] = ""
self.options["kwargs"] = ""
def create_julia_swap(self):
""" Generates a list of sympy substitution tuples to sub out sympy vars
with julia formating.
Example
-------
Time update: t => _t.
Vars update: x1 => _x[1].
Params update: p1 => ps.p1.
Inputs update: u1 => _inp[1].
"""
self.julia_swap = []
self.julia_swap.append(('t', '_t'))
for p in self.mm.model_ps.list:
self.julia_swap.append((p, 'ps.' + p))
for i, v in enumerate(self.mm.model_vs.list):
self.julia_swap.append((v, '_x[{0}]'.format(i+1)))
for i, u in enumerate(self.mm.model_eqs['inp'].eq_symbols):
self.julia_swap.append((u.strip(), '_inp[{0}]'.format(i+1)))
def write_xi(self):
return 'xi = ParametricModels.xvalues(parametricmodel)\n'
def write_end(self):
return 'end # module'
## model specific
def write_bare_model(self):
ret = ''
ret += 'zerodata = ParametricModels.OLSData("%s"_zero, zero(ydata))\n' % self.name
ret += 'bareparametricmodel = @ParametricModels.ODEModel(zerodata, %s, ic, rhs, obs, _t, (), Tuple{Symbol, Any}[])\n' % self.name
ret += self.write_param_transforms(bare=True)
ret += 'modelbare = Models.Model(bareparametricmodel)\n'
return ret
def write_imports(self):
ret = 'module {0}_Model\n\n'.format(self.name)
ret += 'import Models\n'
ret += 'import ParametricModels\n'
ret += 'using Parameters\n'
self.logger.debug("Extra modules to import: %s" %self.options["imports"])
if self.options["imports"] != "":
ret += "import %s\n\n" %self.options["imports"]
else:
ret += "\n"
return ret
def write_params(self):
ret = '@with_kw mutable struct %s{T<:Real} <: ParametricModels.AbstractParameterSpace{T} @deftype T\n' % self.name
for p in self.mm.model_ps.dict['ps']:
ret += '\t'
ret += p['name']
ret += ' = '
ret += str(p['init_val'])
ret += '\n'
ret += 'end\n\n'
return ret
def write_inputs(self):
ret = 'function inp(ps::%s{T}, _t) where T <: Real\n' % self.name
ret += self.write_substitutions(self.mm.model_eqs['inp'].sbs_sym_list)
ret += self.write_equation_return(self.mm.model_eqs['inp'].eqs_sym_list)
return ret
def write_data(self):
ret = "import HDF5\n"
ret += 'ydata = HDF5.h5read("%s", "/ydata")\n' % self.data_path.replace("\\", "\\\\")
ret += '_t = HDF5.h5read("%s", "/t")\n' % self.data_path.replace("\\", "\\\\")
if not self.options['weights']:
ret += 'data = ParametricModels.OLSData("%s", ydata)\n' % self.name
else:
ret += 'weights = HDF5.h5read("%s", "/weights")\n' % self.data_path
ret += 'data = ParametricModels.WLSData("%s", ydata, weights)\n' % self.name
return ret
def write_substitutions(self, sub_list):
""" Given a list of substitutions, write them out prior to the return
statement or the main equations.
Parameters
----------
sub_list: ``list``
A list containing equation dictionaries.
Example
-------
c = a + b => {"sym": "c", "eq": "a + b"}
"""
ret = ''
for sub in sub_list:
ret += '\t'
ret += str(sub['sym'])
ret += ' = '
ret += julia_code(sub['eq'].subs(self.julia_swap))
ret += '\n'
return ret
def write_equation_return(self, eq_list):
""" Given a list of equations, write them out as a julia array following
the return statement.
Parameters
----------
eq_list: ``list``
A list containing equation dictionaries. Where each equation is
only on the right-hand side. There should be no symbol for this
function.
Example
-------
a + b => {"sym": "", "eq": "a + b"}
"""
ret = '\treturn '
ret += self.write_equation_list(eq_list)
ret += '\nend\n\n'
return ret
def write_equation_list(self, eq_list):
""" Given a list of equations, write them out as a julia array.
Parameters
----------
eq_list: ``list``
A list containing equation dictionaries. Where each equation is
only on the right-hand side. There should be no symbol for this
function. The non-julia values will be subbed with julia values.
Example
-------
x + b => {"sym": "", "eq": "a + b"}
"""
ret = 'T['
for i, eq in enumerate(eq_list):
ret += julia_code(eq['eq'].subs(self.julia_swap))
if i != len(eq_list)-1:
ret += ', '
ret += ']'
return ret
def write_constants(self):
ret = '@ParametricModels.SetConstant(parametricmodel, '
for i, p in enumerate(self.mm.model_ps.dict['ps']):
if p['transform'] == 'constant':
ret += p['name']
ret += ', '
ret = ret[:-2]
ret += ')\n'
return ret
def write_param_transforms(self, bare=False):
if not bare:
ret = 'for p in parametricmodel.parameters\n'
else:
ret = 'for p in bareparametricmodel.parameters\n'
ret += '\tif p.s in ' + self.list_out_param_type('log') + '\n'
ret += '\t\tp.t = exp\n'
ret += '\t\tp.invt = log\n'
ret += '\telseif p.s in ' + self.list_out_param_type('sinh') + '\n'
ret += '\t\tp.t = sinh\n'
ret += '\t\tp.invt = asinh\n'
ret += '\tend\n'
ret += 'end\n\n'
return ret
def list_out_param_type(self, p_type):
ret = '['
for i, p in enumerate(self.mm.model_ps.dict['ps']):
if p['transform'] == p_type:
ret += ':'+ p['name'] + ', '
if len(ret) > 1:
ret = ret[:-2]
ret += ']'
return ret
def init_models_dir(self):
""" Generates a directory: julia_scripts/models/ to save the model
file within.
"""
if not os.path.exists(self.dir):
os.makedirs(self.dir)
def save_to_file(self, script):
""" Overwrites the current script with the given script string.
Parameters
----------
script: ``str``
A string representation of a full julia model script.
"""
self.logger.info("Writing script to file: %s" %self.file_path)
self.init_models_dir()
with open(self.file_path, "w", encoding="utf-8") as jl:
jl.write(script)
def parse_args(self):
self.logger.debug("Parsing args: %s", self.options["args"])
if self.options["args"] == "":
args = "()"
else:
args = "(%s,)" %self.options["args"]
self.logger.debug("Parsed args = %s" %args)
return args
def parse_kwargs(self):
self.logger.debug("Parsing kwargs: %s", self.options["kwargs"])
if self.options["kwargs"] == "":
kwargs = "Tuple{Symbol, Any}[]"
else:
kwargs = "Tuple{Symbol, Any}["
for kwarg in self.options["kwargs"].split(','):
s,v = kwarg.split("=")
kwargs += "(:%s, %s)," %(s.strip(),v.strip())
kwargs += "]"
self.logger.debug("Parsed kwargs = %s" %kwargs)
return kwargs
def find_replace_vectorized(self,string):
d = {"\.\*": ' .* ', "\.\/": ' ./ ', "\.\^": ' .^ '}
for item in d.keys():
# sub item for item's paired value in string
string = re.sub(item, d[item], string)
return string
| 34.580071 | 137 | 0.529793 | import os
from sympy.printing import julia_code
from sympy import Symbol
import json
import logging
import re
class BaseParser:
def __init__(self, mbam_model, data_path):
self.logger = logging.getLogger("MBAM.BaseParser")
self.logger.debug("Initializing BaseParser")
self.mm = mbam_model
self.create_default_options()
self.data_path = 'temp.h5' self.dir = os.path.join("julia_scripts", "models")
self.name = self.mm.name
self.file_name = self.name + ".jl"
self.file_path = os.path.join(os.getcwd(), os.path.join(self.dir, self.file_name))
self.create_julia_swap()
def update_options(self, options):
self.logger.debug("Updating options: %s" %options)
self.options = options
def create_default_options(self):
self.options = {}
self.options['bare'] = False
self.options['weights'] = False
self.options["imports"] = ""
self.options["args"] = ""
self.options["kwargs"] = ""
def create_julia_swap(self):
self.julia_swap = []
self.julia_swap.append(('t', '_t'))
for p in self.mm.model_ps.list:
self.julia_swap.append((p, 'ps.' + p))
for i, v in enumerate(self.mm.model_vs.list):
self.julia_swap.append((v, '_x[{0}]'.format(i+1)))
for i, u in enumerate(self.mm.model_eqs['inp'].eq_symbols):
self.julia_swap.append((u.strip(), '_inp[{0}]'.format(i+1)))
def write_xi(self):
return 'xi = ParametricModels.xvalues(parametricmodel)\n'
def write_end(self):
return 'end # module'
are_model(self):
ret = ''
ret += 'zerodata = ParametricModels.OLSData("%s"_zero, zero(ydata))\n' % self.name
ret += 'bareparametricmodel = @ParametricModels.ODEModel(zerodata, %s, ic, rhs, obs, _t, (), Tuple{Symbol, Any}[])\n' % self.name
ret += self.write_param_transforms(bare=True)
ret += 'modelbare = Models.Model(bareparametricmodel)\n'
return ret
def write_imports(self):
ret = 'module {0}_Model\n\n'.format(self.name)
ret += 'import Models\n'
ret += 'import ParametricModels\n'
ret += 'using Parameters\n'
self.logger.debug("Extra modules to import: %s" %self.options["imports"])
if self.options["imports"] != "":
ret += "import %s\n\n" %self.options["imports"]
else:
ret += "\n"
return ret
def write_params(self):
ret = '@with_kw mutable struct %s{T<:Real} <: ParametricModels.AbstractParameterSpace{T} @deftype T\n' % self.name
for p in self.mm.model_ps.dict['ps']:
ret += '\t'
ret += p['name']
ret += ' = '
ret += str(p['init_val'])
ret += '\n'
ret += 'end\n\n'
return ret
def write_inputs(self):
ret = 'function inp(ps::%s{T}, _t) where T <: Real\n' % self.name
ret += self.write_substitutions(self.mm.model_eqs['inp'].sbs_sym_list)
ret += self.write_equation_return(self.mm.model_eqs['inp'].eqs_sym_list)
return ret
def write_data(self):
ret = "import HDF5\n"
ret += 'ydata = HDF5.h5read("%s", "/ydata")\n' % self.data_path.replace("\\", "\\\\")
ret += '_t = HDF5.h5read("%s", "/t")\n' % self.data_path.replace("\\", "\\\\")
if not self.options['weights']:
ret += 'data = ParametricModels.OLSData("%s", ydata)\n' % self.name
else:
ret += 'weights = HDF5.h5read("%s", "/weights")\n' % self.data_path
ret += 'data = ParametricModels.WLSData("%s", ydata, weights)\n' % self.name
return ret
def write_substitutions(self, sub_list):
ret = ''
for sub in sub_list:
ret += '\t'
ret += str(sub['sym'])
ret += ' = '
ret += julia_code(sub['eq'].subs(self.julia_swap))
ret += '\n'
return ret
def write_equation_return(self, eq_list):
ret = '\treturn '
ret += self.write_equation_list(eq_list)
ret += '\nend\n\n'
return ret
def write_equation_list(self, eq_list):
ret = 'T['
for i, eq in enumerate(eq_list):
ret += julia_code(eq['eq'].subs(self.julia_swap))
if i != len(eq_list)-1:
ret += ', '
ret += ']'
return ret
def write_constants(self):
ret = '@ParametricModels.SetConstant(parametricmodel, '
for i, p in enumerate(self.mm.model_ps.dict['ps']):
if p['transform'] == 'constant':
ret += p['name']
ret += ', '
ret = ret[:-2]
ret += ')\n'
return ret
def write_param_transforms(self, bare=False):
if not bare:
ret = 'for p in parametricmodel.parameters\n'
else:
ret = 'for p in bareparametricmodel.parameters\n'
ret += '\tif p.s in ' + self.list_out_param_type('log') + '\n'
ret += '\t\tp.t = exp\n'
ret += '\t\tp.invt = log\n'
ret += '\telseif p.s in ' + self.list_out_param_type('sinh') + '\n'
ret += '\t\tp.t = sinh\n'
ret += '\t\tp.invt = asinh\n'
ret += '\tend\n'
ret += 'end\n\n'
return ret
def list_out_param_type(self, p_type):
ret = '['
for i, p in enumerate(self.mm.model_ps.dict['ps']):
if p['transform'] == p_type:
ret += ':'+ p['name'] + ', '
if len(ret) > 1:
ret = ret[:-2]
ret += ']'
return ret
def init_models_dir(self):
if not os.path.exists(self.dir):
os.makedirs(self.dir)
def save_to_file(self, script):
self.logger.info("Writing script to file: %s" %self.file_path)
self.init_models_dir()
with open(self.file_path, "w", encoding="utf-8") as jl:
jl.write(script)
def parse_args(self):
self.logger.debug("Parsing args: %s", self.options["args"])
if self.options["args"] == "":
args = "()"
else:
args = "(%s,)" %self.options["args"]
self.logger.debug("Parsed args = %s" %args)
return args
def parse_kwargs(self):
self.logger.debug("Parsing kwargs: %s", self.options["kwargs"])
if self.options["kwargs"] == "":
kwargs = "Tuple{Symbol, Any}[]"
else:
kwargs = "Tuple{Symbol, Any}["
for kwarg in self.options["kwargs"].split(','):
s,v = kwarg.split("=")
kwargs += "(:%s, %s)," %(s.strip(),v.strip())
kwargs += "]"
self.logger.debug("Parsed kwargs = %s" %kwargs)
return kwargs
def find_replace_vectorized(self,string):
d = {"\.\*": ' .* ', "\.\/": ' ./ ', "\.\^": ' .^ '}
for item in d.keys():
string = re.sub(item, d[item], string)
return string
| true | true |
f71f93774042467cdcd4ca52abc67522df7b11cf | 649 | py | Python | intro-python/part2/structure.py | cobeam/DevNetRepo | d824bb6ad7d21bcae03485b571e97fc2b6b61df9 | [
"MIT"
] | 90 | 2018-04-07T00:39:23.000Z | 2020-06-09T02:44:02.000Z | intro-python/part2/structure.py | cobeam/DevNetRepo | d824bb6ad7d21bcae03485b571e97fc2b6b61df9 | [
"MIT"
] | 38 | 2018-04-06T18:11:36.000Z | 2020-05-11T23:36:24.000Z | intro-python/part2/structure.py | cobeam/DevNetRepo | d824bb6ad7d21bcae03485b571e97fc2b6b61df9 | [
"MIT"
] | 143 | 2018-04-20T00:17:24.000Z | 2020-06-12T15:07:42.000Z | #!/usr/bin/env python
"""Module docstring."""
# Imports
import os
import sys
# Module Constants
START_MESSAGE = "CLI Inspection Script"
# Module "Global" Variables
location = os.path.abspath(__file__)
# Module Functions and Classes
def main(*args):
"""My main script function.
Displays the full patch to this script, and a list of the arguments passed
to the script.
"""
print(START_MESSAGE)
print("Script Location:", location)
print("Arguments Passed:", args)
# Check to see if this file is the "__main__" script being executed
if __name__ == '__main__':
_, *script_args = sys.argv
main(*script_args)
| 19.088235 | 78 | 0.694915 |
import os
import sys
START_MESSAGE = "CLI Inspection Script"
location = os.path.abspath(__file__)
def main(*args):
print(START_MESSAGE)
print("Script Location:", location)
print("Arguments Passed:", args)
if __name__ == '__main__':
_, *script_args = sys.argv
main(*script_args)
| true | true |
f71f943b61674db1e41754a8a4cbc52954162b47 | 10,921 | py | Python | setup.py | dineshsonachalam/atheris | 7c96c70056478b29d81d634b197c356f479fb6d7 | [
"Apache-2.0"
] | null | null | null | setup.py | dineshsonachalam/atheris | 7c96c70056478b29d81d634b197c356f479fb6d7 | [
"Apache-2.0"
] | null | null | null | setup.py | dineshsonachalam/atheris | 7c96c70056478b29d81d634b197c356f479fb6d7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
# Copyright 2021 Fraunhofer FKIE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setuptools for Atheris."""
import os
import shutil
import subprocess
import sys
import tempfile
import setuptools
from setuptools import Extension
from setuptools import setup
from setuptools.command.build_ext import build_ext
__version__ = os.getenv("ATHERIS_VERSION", "1.0.11")
if len(sys.argv) > 1 and sys.argv[1] == "print_version":
print(__version__)
quit()
clang_install_instructions = """download and build the latest version of Clang:
git clone https://github.com/llvm/llvm-project.git
cd llvm-project
mkdir build
cd build
cmake -DLLVM_ENABLE_PROJECTS='clang;compiler-rt' -G "Unix Makefiles" ../llvm
make -j 100 # This step is very slow
Then, set $CLANG_BIN="$(pwd)/bin/clang" and run pip again.
You should use this same Clang for building any Python extensions you plan to fuzz.
"""
too_old_error = """Your libFuzzer version is too old; set either $CLANG_BIN to point to a more recent Clang, or $LIBFUZZER_VERSION to point directly to a more recent libFuzzer .a file. If needed, """ + clang_install_instructions
no_libfuzzer_error = """Failed to find libFuzzer; set either $CLANG_BIN to point to your Clang binary, or $LIBFUZZER_LIB to point directly to your libFuzzer .a file. If needed, """ + clang_install_instructions
if sys.platform == "darwin":
too_old_error = ("Your libFuzzer version is too old.\nPlease" +
clang_install_instructions + "Do not use Apple "
"Clang; Apple Clang does not come with libFuzzer.")
no_libfuzzer_error = ("Failed to find libFuzzer; you may be building using "
"Apple Clang. Apple Clang does not come with "
"libFuzzer.\nPlease " + clang_install_instructions)
class PybindIncludeGetter(object):
"""Helper class to determine the pybind11 include path.
The purpose of this class is to postpone importing pybind11
until it is actually installed, so that the ``get_include()``
method can be invoked.
"""
def __str__(self):
import pybind11 # pylint: disable=g-import-not-at-top
return pybind11.get_include()
def check_libfuzzer_version(libfuzzer):
"""Verifies that the specified libFuzzer is of a sufficiently high version."""
current_path = os.path.dirname(os.path.realpath(__file__))
try:
version = subprocess.check_output(
[current_path + "/setup_utils/check_libfuzzer_version.sh", libfuzzer])
except subprocess.CalledProcessError as e:
sys.stderr.write("Failed to check libFuzzer version: %s" % e.stderr)
sys.stderr.write("Assuming libFuxzzer is up-to-date.")
return "up-to-date"
version = version.strip().decode("utf-8")
return version
def upgrade_libfuzzer(libfuzzer):
"""Hacky code for upgrading libFuzzer to be compatible with Atheris."""
current_path = os.path.dirname(os.path.realpath(__file__))
try:
new_libfuzzer = subprocess.check_output(
[current_path + "/setup_utils/upgrade_libfuzzer.sh", libfuzzer])
except subprocess.CalledProcessError as e:
sys.stderr.write("libFuzzer upgrade failed: %s" % e.stderr)
return libfuzzer
new_libfuzzer = new_libfuzzer.strip().decode("utf-8")
return new_libfuzzer
def get_libfuzzer_lib():
"""Returns path to the libFuzzer .a library."""
libfuzzer_lib = os.getenv("LIBFUZZER_LIB", "")
if libfuzzer_lib:
return libfuzzer_lib
current_path = os.path.dirname(os.path.realpath(__file__))
try:
libfuzzer = subprocess.check_output(
[current_path + "/setup_utils/find_libfuzzer.sh"])
except subprocess.CalledProcessError as e:
sys.stderr.write(no_libfuzzer_error + "\n")
raise RuntimeError(no_libfuzzer_error)
libfuzzer = libfuzzer.strip().decode("utf-8")
return libfuzzer
ext_modules = [
Extension(
"atheris.atheris",
sorted([
"atheris.cc",
"util.cc",
"fuzzed_data_provider.cc",
]),
include_dirs=[
# Path to pybind11 headers
PybindIncludeGetter(),
],
language="c++"),
Extension(
"atheris.core_with_libfuzzer",
sorted([
"core.cc",
"tracer.cc",
"util.cc",
]),
include_dirs=[
# Path to pybind11 headers
PybindIncludeGetter(),
],
language="c++"),
Extension(
"atheris.core_without_libfuzzer",
sorted([
"core.cc",
"tracer.cc",
"util.cc",
]),
include_dirs=[
# Path to pybind11 headers
PybindIncludeGetter(),
],
language="c++"),
]
# cf http://bugs.python.org/issue26689
def has_flag(compiler, flagname):
"""Return a boolean indicating whether a flag name."""
with tempfile.NamedTemporaryFile("w", suffix=".cpp", delete=False) as f:
f.write("int main (int argc, char **argv) { return 0; }")
fname = f.name
try:
compiler.compile([fname], extra_postargs=[flagname])
except setuptools.distutils.errors.CompileError:
return False
finally:
try:
os.remove(fname)
except OSError:
pass
return True
def cpp_flag(compiler):
"""Return the highest-supported -std=c++[11/14/17] compiler flag."""
if os.getenv("FORCE_MIN_VERSION"):
# Use for testing, to make sure Atheris supports C++11
flags = ["-std=c++11"]
elif os.getenv("FORCE_VERSION"):
flags = ["-std=c++" + os.getenv("FORCE_VERSION")]
else:
flags = [
#"-std=c++17", C++17 disabled unless explicitly requested, to work
# around https://github.com/pybind/pybind11/issues/1818
"-std=c++14",
"-std=c++11"]
for flag in flags:
if has_flag(compiler, flag):
return flag
raise RuntimeError("Unsupported compiler -- at least C++11 support "
"is needed!")
class BuildExt(build_ext):
"""A custom build extension for adding compiler-specific options."""
def build_extensions(self):
libfuzzer = get_libfuzzer_lib()
orig_libfuzzer = libfuzzer
orig_libfuzzer_name = os.path.basename(libfuzzer)
version = check_libfuzzer_version(libfuzzer)
if sys.platform == "darwin" and version != "up-to-date":
raise RuntimeError(too_old_error)
if version == "outdated-unrecoverable":
raise RuntimeError(too_old_error)
elif version == "outdated-recoverable":
sys.stderr.write("Your libFuzzer version is too old, but it's possible "
"to attempt an in-place upgrade. Trying that now.\n")
libfuzzer = upgrade_libfuzzer(libfuzzer)
if check_libfuzzer_version(libfuzzer) != "up-to-date":
sys.stderr.write("Upgrade failed.")
raise RuntimeError(too_old_error)
elif version != "up-to-date":
raise RuntimeError("Unexpected up-to-date status: " + version)
sys.stderr.write("Your libFuzzer is up-to-date.\n")
c_opts = []
l_opts = []
if sys.platform == "darwin":
darwin_opts = ["-stdlib=libc++", "-mmacosx-version-min=10.7"]
c_opts += darwin_opts
l_opts += darwin_opts
ct = self.compiler.compiler_type
if ct == "unix":
c_opts.append(cpp_flag(self.compiler))
for ext in self.extensions:
ext.define_macros = [("VERSION_INFO",
"'{}'".format(self.distribution.get_version())),
("ATHERIS_MODULE_NAME", ext.name.split(".")[1])]
ext.extra_compile_args = c_opts
if ext.name == "atheris.core_with_libfuzzer":
ext.extra_link_args = l_opts + [libfuzzer]
else:
ext.extra_link_args = l_opts
build_ext.build_extensions(self)
try:
self.deploy_file(libfuzzer, orig_libfuzzer_name)
except Exception as e:
sys.stderr.write(str(e))
sys.stderr.write("\n")
# Deploy versions of ASan and UBSan that have been merged with libFuzzer
asan_name = orig_libfuzzer.replace(".fuzzer_no_main-", ".asan-")
merged_asan_name = "asan_with_fuzzer.so"
self.merge_deploy_libfuzzer_sanitizer(
libfuzzer, asan_name, merged_asan_name,
"asan_preinit.cc.o asan_preinit.cpp.o")
ubsan_name = orig_libfuzzer.replace(".fuzzer_no_main-",
".ubsan_standalone-")
merged_ubsan_name = "ubsan_with_fuzzer.so"
self.merge_deploy_libfuzzer_sanitizer(
libfuzzer, ubsan_name, merged_ubsan_name,
"ubsan_init_standalone_preinit.cc.o ubsan_init_standalone_preinit.cpp.o"
)
ubsanxx_name = orig_libfuzzer.replace(".fuzzer_no_main-",
".ubsan_standalone_cxx-")
merged_ubsanxx_name = "ubsan_cxx_with_fuzzer.so"
self.merge_deploy_libfuzzer_sanitizer(
libfuzzer, ubsanxx_name, merged_ubsanxx_name,
"ubsan_init_standalone_preinit.cc.o ubsan_init_standalone_preinit.cpp.o"
)
def deploy_file(self, name, target_filename):
atheris = self.get_ext_fullpath("atheris")
dest_file = os.path.join(os.path.dirname(atheris), target_filename)
shutil.copy(name, dest_file)
def merge_libfuzzer_sanitizer(self, libfuzzer, sanitizer, strip_preinit):
"""Generate a .so that contains both libFuzzer and a sanitizer."""
current_path = os.path.dirname(os.path.realpath(__file__))
new_sanitizer = subprocess.check_output([
os.path.join(current_path, "setup_utils/merge_libfuzzer_sanitizer.sh"),
libfuzzer, sanitizer, strip_preinit
])
return new_sanitizer.strip().decode("utf-8")
def merge_deploy_libfuzzer_sanitizer(self, libfuzzer, lib_name,
merged_lib_name, preinit):
try:
merged_lib = self.merge_libfuzzer_sanitizer(libfuzzer, lib_name, preinit)
self.deploy_file(merged_lib, merged_lib_name)
except Exception as e:
sys.stderr.write(str(e))
sys.stderr.write("\n")
setup(
name="atheris",
version=__version__,
author="Ian Eldred Pudney",
author_email="puddles@google.com",
url="https://pypi.org/project/atheris/",
description="A coverage-guided fuzzer for Python and Python extensions.",
long_description=open("README.md", "r").read(),
long_description_content_type="text/markdown",
packages=["atheris"],
ext_modules=ext_modules,
setup_requires=["pybind11>=2.5.0"],
cmdclass={"build_ext": BuildExt},
zip_safe=False,
)
| 34.451104 | 228 | 0.672557 |
import os
import shutil
import subprocess
import sys
import tempfile
import setuptools
from setuptools import Extension
from setuptools import setup
from setuptools.command.build_ext import build_ext
__version__ = os.getenv("ATHERIS_VERSION", "1.0.11")
if len(sys.argv) > 1 and sys.argv[1] == "print_version":
print(__version__)
quit()
clang_install_instructions = """download and build the latest version of Clang:
git clone https://github.com/llvm/llvm-project.git
cd llvm-project
mkdir build
cd build
cmake -DLLVM_ENABLE_PROJECTS='clang;compiler-rt' -G "Unix Makefiles" ../llvm
make -j 100 # This step is very slow
Then, set $CLANG_BIN="$(pwd)/bin/clang" and run pip again.
You should use this same Clang for building any Python extensions you plan to fuzz.
"""
too_old_error = """Your libFuzzer version is too old; set either $CLANG_BIN to point to a more recent Clang, or $LIBFUZZER_VERSION to point directly to a more recent libFuzzer .a file. If needed, """ + clang_install_instructions
no_libfuzzer_error = """Failed to find libFuzzer; set either $CLANG_BIN to point to your Clang binary, or $LIBFUZZER_LIB to point directly to your libFuzzer .a file. If needed, """ + clang_install_instructions
if sys.platform == "darwin":
too_old_error = ("Your libFuzzer version is too old.\nPlease" +
clang_install_instructions + "Do not use Apple "
"Clang; Apple Clang does not come with libFuzzer.")
no_libfuzzer_error = ("Failed to find libFuzzer; you may be building using "
"Apple Clang. Apple Clang does not come with "
"libFuzzer.\nPlease " + clang_install_instructions)
class PybindIncludeGetter(object):
def __str__(self):
import pybind11
return pybind11.get_include()
def check_libfuzzer_version(libfuzzer):
current_path = os.path.dirname(os.path.realpath(__file__))
try:
version = subprocess.check_output(
[current_path + "/setup_utils/check_libfuzzer_version.sh", libfuzzer])
except subprocess.CalledProcessError as e:
sys.stderr.write("Failed to check libFuzzer version: %s" % e.stderr)
sys.stderr.write("Assuming libFuxzzer is up-to-date.")
return "up-to-date"
version = version.strip().decode("utf-8")
return version
def upgrade_libfuzzer(libfuzzer):
current_path = os.path.dirname(os.path.realpath(__file__))
try:
new_libfuzzer = subprocess.check_output(
[current_path + "/setup_utils/upgrade_libfuzzer.sh", libfuzzer])
except subprocess.CalledProcessError as e:
sys.stderr.write("libFuzzer upgrade failed: %s" % e.stderr)
return libfuzzer
new_libfuzzer = new_libfuzzer.strip().decode("utf-8")
return new_libfuzzer
def get_libfuzzer_lib():
libfuzzer_lib = os.getenv("LIBFUZZER_LIB", "")
if libfuzzer_lib:
return libfuzzer_lib
current_path = os.path.dirname(os.path.realpath(__file__))
try:
libfuzzer = subprocess.check_output(
[current_path + "/setup_utils/find_libfuzzer.sh"])
except subprocess.CalledProcessError as e:
sys.stderr.write(no_libfuzzer_error + "\n")
raise RuntimeError(no_libfuzzer_error)
libfuzzer = libfuzzer.strip().decode("utf-8")
return libfuzzer
ext_modules = [
Extension(
"atheris.atheris",
sorted([
"atheris.cc",
"util.cc",
"fuzzed_data_provider.cc",
]),
include_dirs=[
PybindIncludeGetter(),
],
language="c++"),
Extension(
"atheris.core_with_libfuzzer",
sorted([
"core.cc",
"tracer.cc",
"util.cc",
]),
include_dirs=[
PybindIncludeGetter(),
],
language="c++"),
Extension(
"atheris.core_without_libfuzzer",
sorted([
"core.cc",
"tracer.cc",
"util.cc",
]),
include_dirs=[
PybindIncludeGetter(),
],
language="c++"),
]
def has_flag(compiler, flagname):
with tempfile.NamedTemporaryFile("w", suffix=".cpp", delete=False) as f:
f.write("int main (int argc, char **argv) { return 0; }")
fname = f.name
try:
compiler.compile([fname], extra_postargs=[flagname])
except setuptools.distutils.errors.CompileError:
return False
finally:
try:
os.remove(fname)
except OSError:
pass
return True
def cpp_flag(compiler):
if os.getenv("FORCE_MIN_VERSION"):
flags = ["-std=c++11"]
elif os.getenv("FORCE_VERSION"):
flags = ["-std=c++" + os.getenv("FORCE_VERSION")]
else:
flags = [
"-std=c++14",
"-std=c++11"]
for flag in flags:
if has_flag(compiler, flag):
return flag
raise RuntimeError("Unsupported compiler -- at least C++11 support "
"is needed!")
class BuildExt(build_ext):
def build_extensions(self):
libfuzzer = get_libfuzzer_lib()
orig_libfuzzer = libfuzzer
orig_libfuzzer_name = os.path.basename(libfuzzer)
version = check_libfuzzer_version(libfuzzer)
if sys.platform == "darwin" and version != "up-to-date":
raise RuntimeError(too_old_error)
if version == "outdated-unrecoverable":
raise RuntimeError(too_old_error)
elif version == "outdated-recoverable":
sys.stderr.write("Your libFuzzer version is too old, but it's possible "
"to attempt an in-place upgrade. Trying that now.\n")
libfuzzer = upgrade_libfuzzer(libfuzzer)
if check_libfuzzer_version(libfuzzer) != "up-to-date":
sys.stderr.write("Upgrade failed.")
raise RuntimeError(too_old_error)
elif version != "up-to-date":
raise RuntimeError("Unexpected up-to-date status: " + version)
sys.stderr.write("Your libFuzzer is up-to-date.\n")
c_opts = []
l_opts = []
if sys.platform == "darwin":
darwin_opts = ["-stdlib=libc++", "-mmacosx-version-min=10.7"]
c_opts += darwin_opts
l_opts += darwin_opts
ct = self.compiler.compiler_type
if ct == "unix":
c_opts.append(cpp_flag(self.compiler))
for ext in self.extensions:
ext.define_macros = [("VERSION_INFO",
"'{}'".format(self.distribution.get_version())),
("ATHERIS_MODULE_NAME", ext.name.split(".")[1])]
ext.extra_compile_args = c_opts
if ext.name == "atheris.core_with_libfuzzer":
ext.extra_link_args = l_opts + [libfuzzer]
else:
ext.extra_link_args = l_opts
build_ext.build_extensions(self)
try:
self.deploy_file(libfuzzer, orig_libfuzzer_name)
except Exception as e:
sys.stderr.write(str(e))
sys.stderr.write("\n")
# Deploy versions of ASan and UBSan that have been merged with libFuzzer
asan_name = orig_libfuzzer.replace(".fuzzer_no_main-", ".asan-")
merged_asan_name = "asan_with_fuzzer.so"
self.merge_deploy_libfuzzer_sanitizer(
libfuzzer, asan_name, merged_asan_name,
"asan_preinit.cc.o asan_preinit.cpp.o")
ubsan_name = orig_libfuzzer.replace(".fuzzer_no_main-",
".ubsan_standalone-")
merged_ubsan_name = "ubsan_with_fuzzer.so"
self.merge_deploy_libfuzzer_sanitizer(
libfuzzer, ubsan_name, merged_ubsan_name,
"ubsan_init_standalone_preinit.cc.o ubsan_init_standalone_preinit.cpp.o"
)
ubsanxx_name = orig_libfuzzer.replace(".fuzzer_no_main-",
".ubsan_standalone_cxx-")
merged_ubsanxx_name = "ubsan_cxx_with_fuzzer.so"
self.merge_deploy_libfuzzer_sanitizer(
libfuzzer, ubsanxx_name, merged_ubsanxx_name,
"ubsan_init_standalone_preinit.cc.o ubsan_init_standalone_preinit.cpp.o"
)
def deploy_file(self, name, target_filename):
atheris = self.get_ext_fullpath("atheris")
dest_file = os.path.join(os.path.dirname(atheris), target_filename)
shutil.copy(name, dest_file)
def merge_libfuzzer_sanitizer(self, libfuzzer, sanitizer, strip_preinit):
current_path = os.path.dirname(os.path.realpath(__file__))
new_sanitizer = subprocess.check_output([
os.path.join(current_path, "setup_utils/merge_libfuzzer_sanitizer.sh"),
libfuzzer, sanitizer, strip_preinit
])
return new_sanitizer.strip().decode("utf-8")
def merge_deploy_libfuzzer_sanitizer(self, libfuzzer, lib_name,
merged_lib_name, preinit):
try:
merged_lib = self.merge_libfuzzer_sanitizer(libfuzzer, lib_name, preinit)
self.deploy_file(merged_lib, merged_lib_name)
except Exception as e:
sys.stderr.write(str(e))
sys.stderr.write("\n")
setup(
name="atheris",
version=__version__,
author="Ian Eldred Pudney",
author_email="puddles@google.com",
url="https://pypi.org/project/atheris/",
description="A coverage-guided fuzzer for Python and Python extensions.",
long_description=open("README.md", "r").read(),
long_description_content_type="text/markdown",
packages=["atheris"],
ext_modules=ext_modules,
setup_requires=["pybind11>=2.5.0"],
cmdclass={"build_ext": BuildExt},
zip_safe=False,
)
| true | true |
f71f94cc9ad067fb869ddb7fb431d9594d731530 | 361 | py | Python | fast_gui/core.py | asvcode/nbdev_test | e5bc1a1da28e7c5d87cbba0a207e016219644ee4 | [
"Apache-2.0"
] | 2 | 2020-06-04T08:38:00.000Z | 2020-07-15T15:42:13.000Z | fast_gui/core.py | asvcode/nbdev_test | e5bc1a1da28e7c5d87cbba0a207e016219644ee4 | [
"Apache-2.0"
] | 2 | 2021-09-28T03:23:17.000Z | 2022-02-26T08:14:46.000Z | fast_gui/core.py | asvcode/nbdev_test | e5bc1a1da28e7c5d87cbba0a207e016219644ee4 | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED! DO NOT EDIT! File to edit: 00_core.ipynb (unless otherwise specified).
__all__ = ['repeat_one']
# Cell
import ipywidgets as widgets
from fastai2.vision.all import*
from .dashboard_two import ds_choice
# Cell
def repeat_one(source, n=128):
"""Single image helper for displaying batch"""
return [get_image_files(ds_choice.source)[9]]*n | 27.769231 | 87 | 0.753463 |
__all__ = ['repeat_one']
import ipywidgets as widgets
from fastai2.vision.all import*
from .dashboard_two import ds_choice
def repeat_one(source, n=128):
return [get_image_files(ds_choice.source)[9]]*n | true | true |
f71f978be69f377fe3ca350b8404a8eecbbbb6b5 | 11,993 | py | Python | tests/contrib/falcon/test_suite.py | discord/dd-trace-py | 3f6bca078e751bf7459fd02b7aff7f96eff0eeb6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/contrib/falcon/test_suite.py | discord/dd-trace-py | 3f6bca078e751bf7459fd02b7aff7f96eff0eeb6 | [
"Apache-2.0",
"BSD-3-Clause"
] | 3 | 2022-02-16T09:35:37.000Z | 2022-03-04T16:48:45.000Z | tests/contrib/falcon/test_suite.py | goodspark/dd-trace-py | e2089c7b348e9d1a70e01f96927d85a643d6ae56 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2022-02-11T16:34:22.000Z | 2022-02-11T16:34:22.000Z | from ddtrace import config
from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY
from ddtrace.constants import ERROR_TYPE
from ddtrace.contrib.falcon.patch import FALCON_VERSION
from ddtrace.ext import http as httpx
from tests.opentracer.utils import init_tracer
from tests.utils import assert_is_measured
from tests.utils import assert_span_http_status_code
class FalconTestMixin(object):
def make_test_call(self, url, method="get", expected_status_code=None, **kwargs):
func = getattr(self.client, "simulate_%s" % (method,))
out = func(url, **kwargs)
if FALCON_VERSION < (2, 0, 0):
if expected_status_code is not None:
assert out.status_code == expected_status_code
else:
if expected_status_code is not None:
assert out.status[:3] == str(expected_status_code)
return out
class FalconTestCase(FalconTestMixin):
"""Falcon mixin test case that includes all possible tests. If you need
to add new tests, add them here so that they're shared across manual
and automatic instrumentation.
"""
def test_404(self):
self.make_test_call("/fake_endpoint", expected_status_code=404)
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET 404"
assert_span_http_status_code(span, 404)
assert span.get_tag(httpx.URL) == "http://falconframework.org/fake_endpoint"
assert httpx.QUERY_STRING not in span.get_tags()
assert span.parent_id is None
assert span.error == 0
def test_exception(self):
try:
self.make_test_call("/exception")
except Exception:
pass
else:
if FALCON_VERSION < (3, 0, 0):
assert 0
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.ResourceException"
assert_span_http_status_code(span, 500)
assert span.get_tag(httpx.URL) == "http://falconframework.org/exception"
assert span.parent_id is None
assert span.error == 1
def test_200(self, query_string="", trace_query_string=False):
out = self.make_test_call("/200", expected_status_code=200, query_string=query_string)
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.Resource200"
assert_span_http_status_code(span, 200)
fqs = ("?" + query_string) if query_string and trace_query_string else ""
assert span.get_tag(httpx.URL) == "http://falconframework.org/200" + fqs
if config.falcon.trace_query_string:
assert span.get_tag(httpx.QUERY_STRING) == query_string
else:
assert httpx.QUERY_STRING not in span.get_tags()
assert span.parent_id is None
assert span.span_type == "web"
assert span.error == 0
def test_200_qs(self):
return self.test_200("foo=bar")
def test_200_multi_qs(self):
return self.test_200("foo=bar&foo=baz&x=y")
def test_200_qs_trace(self):
with self.override_http_config("falcon", dict(trace_query_string=True)):
return self.test_200("foo=bar", trace_query_string=True)
def test_200_multi_qs_trace(self):
with self.override_http_config("falcon", dict(trace_query_string=True)):
return self.test_200("foo=bar&foo=baz&x=y", trace_query_string=True)
def test_analytics_global_on_integration_default(self):
"""
When making a request
When an integration trace search is not event sample rate is not set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
with self.override_global_config(dict(analytics_enabled=True)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
self.assert_structure(dict(name="falcon.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 1.0}))
def test_analytics_global_on_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
with self.override_global_config(dict(analytics_enabled=True)):
with self.override_config("falcon", dict(analytics_enabled=True, analytics_sample_rate=0.5)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
self.assert_structure(dict(name="falcon.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 0.5}))
def test_analytics_global_off_integration_default(self):
"""
When making a request
When an integration trace search is not set and sample rate is set and globally trace search is disabled
We expect the root span to not include tag
"""
with self.override_global_config(dict(analytics_enabled=False)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
root = self.get_root_span()
self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is disabled
We expect the root span to have the appropriate tag
"""
with self.override_global_config(dict(analytics_enabled=False)):
with self.override_config("falcon", dict(analytics_enabled=True, analytics_sample_rate=0.5)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
self.assert_structure(dict(name="falcon.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 0.5}))
def test_201(self):
out = self.make_test_call("/201", method="post", expected_status_code=201)
assert out.status_code == 201
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "POST tests.contrib.falcon.app.resources.Resource201"
assert_span_http_status_code(span, 201)
assert span.get_tag(httpx.URL) == "http://falconframework.org/201"
assert span.parent_id is None
assert span.error == 0
def test_500(self):
out = self.make_test_call("/500", expected_status_code=500)
assert out.content.decode("utf-8") == "Failure"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.Resource500"
assert_span_http_status_code(span, 500)
assert span.get_tag(httpx.URL) == "http://falconframework.org/500"
assert span.parent_id is None
assert span.error == 1
def test_404_exception(self):
self.make_test_call("/not_found", expected_status_code=404)
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.ResourceNotFound"
assert_span_http_status_code(span, 404)
assert span.get_tag(httpx.URL) == "http://falconframework.org/not_found"
assert span.parent_id is None
assert span.error == 0
def test_404_exception_no_stacktracer(self):
# it should not have the stacktrace when a 404 exception is raised
self.make_test_call("/not_found", expected_status_code=404)
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert_span_http_status_code(span, 404)
assert span.get_tag(ERROR_TYPE) is None
assert span.parent_id is None
assert span.error == 0
def test_200_ot(self):
"""OpenTracing version of test_200."""
writer = self.tracer._writer
ot_tracer = init_tracer("my_svc", self.tracer)
ot_tracer._dd_tracer.configure(writer=writer)
with ot_tracer.start_active_span("ot_span"):
out = self.make_test_call("/200", expected_status_code=200)
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 2
ot_span, dd_span = traces[0]
# confirm the parenting
assert ot_span.parent_id is None
assert dd_span.parent_id == ot_span.span_id
assert ot_span.service == "my_svc"
assert ot_span.resource == "ot_span"
assert_is_measured(dd_span)
assert dd_span.name == "falcon.request"
assert dd_span.service == self._service
assert dd_span.resource == "GET tests.contrib.falcon.app.resources.Resource200"
assert_span_http_status_code(dd_span, 200)
assert dd_span.get_tag(httpx.URL) == "http://falconframework.org/200"
assert dd_span.error == 0
def test_falcon_request_hook(self):
@config.falcon.hooks.on("request")
def on_falcon_request(span, request, response):
span.set_tag("my.custom", "tag")
out = self.make_test_call("/200", expected_status_code=200)
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert span.get_tag("http.request.headers.my_header") is None
assert span.get_tag("http.response.headers.my_response_header") is None
assert span.name == "falcon.request"
assert span.get_tag("my.custom") == "tag"
assert span.error == 0
def test_http_header_tracing(self):
with self.override_config("falcon", {}):
config.falcon.http.trace_headers(["my-header", "my-response-header"])
self.make_test_call("/200", headers={"my-header": "my_value"})
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert span.get_tag("http.request.headers.my-header") == "my_value"
assert span.get_tag("http.response.headers.my-response-header") == "my_response_value"
| 41.071918 | 117 | 0.650546 | from ddtrace import config
from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY
from ddtrace.constants import ERROR_TYPE
from ddtrace.contrib.falcon.patch import FALCON_VERSION
from ddtrace.ext import http as httpx
from tests.opentracer.utils import init_tracer
from tests.utils import assert_is_measured
from tests.utils import assert_span_http_status_code
class FalconTestMixin(object):
def make_test_call(self, url, method="get", expected_status_code=None, **kwargs):
func = getattr(self.client, "simulate_%s" % (method,))
out = func(url, **kwargs)
if FALCON_VERSION < (2, 0, 0):
if expected_status_code is not None:
assert out.status_code == expected_status_code
else:
if expected_status_code is not None:
assert out.status[:3] == str(expected_status_code)
return out
class FalconTestCase(FalconTestMixin):
def test_404(self):
self.make_test_call("/fake_endpoint", expected_status_code=404)
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET 404"
assert_span_http_status_code(span, 404)
assert span.get_tag(httpx.URL) == "http://falconframework.org/fake_endpoint"
assert httpx.QUERY_STRING not in span.get_tags()
assert span.parent_id is None
assert span.error == 0
def test_exception(self):
try:
self.make_test_call("/exception")
except Exception:
pass
else:
if FALCON_VERSION < (3, 0, 0):
assert 0
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.ResourceException"
assert_span_http_status_code(span, 500)
assert span.get_tag(httpx.URL) == "http://falconframework.org/exception"
assert span.parent_id is None
assert span.error == 1
def test_200(self, query_string="", trace_query_string=False):
out = self.make_test_call("/200", expected_status_code=200, query_string=query_string)
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.Resource200"
assert_span_http_status_code(span, 200)
fqs = ("?" + query_string) if query_string and trace_query_string else ""
assert span.get_tag(httpx.URL) == "http://falconframework.org/200" + fqs
if config.falcon.trace_query_string:
assert span.get_tag(httpx.QUERY_STRING) == query_string
else:
assert httpx.QUERY_STRING not in span.get_tags()
assert span.parent_id is None
assert span.span_type == "web"
assert span.error == 0
def test_200_qs(self):
return self.test_200("foo=bar")
def test_200_multi_qs(self):
return self.test_200("foo=bar&foo=baz&x=y")
def test_200_qs_trace(self):
with self.override_http_config("falcon", dict(trace_query_string=True)):
return self.test_200("foo=bar", trace_query_string=True)
def test_200_multi_qs_trace(self):
with self.override_http_config("falcon", dict(trace_query_string=True)):
return self.test_200("foo=bar&foo=baz&x=y", trace_query_string=True)
def test_analytics_global_on_integration_default(self):
with self.override_global_config(dict(analytics_enabled=True)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
self.assert_structure(dict(name="falcon.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 1.0}))
def test_analytics_global_on_integration_on(self):
with self.override_global_config(dict(analytics_enabled=True)):
with self.override_config("falcon", dict(analytics_enabled=True, analytics_sample_rate=0.5)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
self.assert_structure(dict(name="falcon.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 0.5}))
def test_analytics_global_off_integration_default(self):
with self.override_global_config(dict(analytics_enabled=False)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
root = self.get_root_span()
self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_on(self):
with self.override_global_config(dict(analytics_enabled=False)):
with self.override_config("falcon", dict(analytics_enabled=True, analytics_sample_rate=0.5)):
out = self.make_test_call("/200", expected_status_code=200)
self.assertEqual(out.content.decode("utf-8"), "Success")
self.assert_structure(dict(name="falcon.request", metrics={ANALYTICS_SAMPLE_RATE_KEY: 0.5}))
def test_201(self):
out = self.make_test_call("/201", method="post", expected_status_code=201)
assert out.status_code == 201
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "POST tests.contrib.falcon.app.resources.Resource201"
assert_span_http_status_code(span, 201)
assert span.get_tag(httpx.URL) == "http://falconframework.org/201"
assert span.parent_id is None
assert span.error == 0
def test_500(self):
out = self.make_test_call("/500", expected_status_code=500)
assert out.content.decode("utf-8") == "Failure"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.Resource500"
assert_span_http_status_code(span, 500)
assert span.get_tag(httpx.URL) == "http://falconframework.org/500"
assert span.parent_id is None
assert span.error == 1
def test_404_exception(self):
self.make_test_call("/not_found", expected_status_code=404)
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert span.resource == "GET tests.contrib.falcon.app.resources.ResourceNotFound"
assert_span_http_status_code(span, 404)
assert span.get_tag(httpx.URL) == "http://falconframework.org/not_found"
assert span.parent_id is None
assert span.error == 0
def test_404_exception_no_stacktracer(self):
self.make_test_call("/not_found", expected_status_code=404)
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert_is_measured(span)
assert span.name == "falcon.request"
assert span.service == self._service
assert_span_http_status_code(span, 404)
assert span.get_tag(ERROR_TYPE) is None
assert span.parent_id is None
assert span.error == 0
def test_200_ot(self):
writer = self.tracer._writer
ot_tracer = init_tracer("my_svc", self.tracer)
ot_tracer._dd_tracer.configure(writer=writer)
with ot_tracer.start_active_span("ot_span"):
out = self.make_test_call("/200", expected_status_code=200)
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 2
ot_span, dd_span = traces[0]
assert ot_span.parent_id is None
assert dd_span.parent_id == ot_span.span_id
assert ot_span.service == "my_svc"
assert ot_span.resource == "ot_span"
assert_is_measured(dd_span)
assert dd_span.name == "falcon.request"
assert dd_span.service == self._service
assert dd_span.resource == "GET tests.contrib.falcon.app.resources.Resource200"
assert_span_http_status_code(dd_span, 200)
assert dd_span.get_tag(httpx.URL) == "http://falconframework.org/200"
assert dd_span.error == 0
def test_falcon_request_hook(self):
@config.falcon.hooks.on("request")
def on_falcon_request(span, request, response):
span.set_tag("my.custom", "tag")
out = self.make_test_call("/200", expected_status_code=200)
assert out.content.decode("utf-8") == "Success"
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert span.get_tag("http.request.headers.my_header") is None
assert span.get_tag("http.response.headers.my_response_header") is None
assert span.name == "falcon.request"
assert span.get_tag("my.custom") == "tag"
assert span.error == 0
def test_http_header_tracing(self):
with self.override_config("falcon", {}):
config.falcon.http.trace_headers(["my-header", "my-response-header"])
self.make_test_call("/200", headers={"my-header": "my_value"})
traces = self.tracer.pop_traces()
assert len(traces) == 1
assert len(traces[0]) == 1
span = traces[0][0]
assert span.get_tag("http.request.headers.my-header") == "my_value"
assert span.get_tag("http.response.headers.my-response-header") == "my_response_value"
| true | true |
f71f98aa12c458b74228cfac53a00bd4f6d9a013 | 11,247 | py | Python | tensorflow/python/ops/standard_ops.py | noahl/tensorflow | b95d8cce7323d328565378e0d60d72603393f87d | [
"Apache-2.0"
] | 5 | 2018-09-22T20:16:46.000Z | 2022-02-28T10:35:19.000Z | tensorflow/python/ops/standard_ops.py | noahl/tensorflow | b95d8cce7323d328565378e0d60d72603393f87d | [
"Apache-2.0"
] | null | null | null | tensorflow/python/ops/standard_ops.py | noahl/tensorflow | b95d8cce7323d328565378e0d60d72603393f87d | [
"Apache-2.0"
] | 2 | 2019-08-14T09:04:37.000Z | 2022-02-02T20:08:02.000Z | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=unused-import
"""Import names of Tensor Flow standard Ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys as _sys
# pylint: disable=g-bad-import-order
# Imports the following modules so that @RegisterGradient get executed.
from tensorflow.python.ops import array_grad
from tensorflow.python.ops import cudnn_rnn_grad
from tensorflow.python.ops import data_flow_grad
from tensorflow.python.ops import manip_grad
from tensorflow.python.ops import math_grad
from tensorflow.python.ops import sparse_grad
from tensorflow.python.ops import spectral_grad
from tensorflow.python.ops import state_grad
from tensorflow.python.ops import tensor_array_grad
from tensorflow.python.util.all_util import remove_undocumented
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.array_ops import *
from tensorflow.python.ops.check_ops import *
from tensorflow.python.ops.clip_ops import *
from tensorflow.python.ops.special_math_ops import *
# TODO(vrv): Switch to import * once we're okay with exposing the module.
from tensorflow.python.ops.confusion_matrix import confusion_matrix
from tensorflow.python.ops.control_flow_ops import Assert
from tensorflow.python.ops.control_flow_ops import case
from tensorflow.python.ops.control_flow_ops import cond
from tensorflow.python.ops.control_flow_ops import group
from tensorflow.python.ops.control_flow_ops import no_op
from tensorflow.python.ops.control_flow_ops import tuple # pylint: disable=redefined-builtin
# pylint: enable=redefined-builtin
from tensorflow.python.ops.control_flow_ops import while_loop
from tensorflow.python.ops.data_flow_ops import *
from tensorflow.python.ops.functional_ops import *
from tensorflow.python.ops.gradients import *
from tensorflow.python.ops.histogram_ops import *
from tensorflow.python.ops.init_ops import *
from tensorflow.python.ops.io_ops import *
from tensorflow.python.ops.linalg_ops import *
from tensorflow.python.ops.logging_ops import Print
from tensorflow.python.ops.logging_ops import get_summary_op
from tensorflow.python.ops.logging_ops import timestamp
from tensorflow.python.ops.lookup_ops import initialize_all_tables
from tensorflow.python.ops.lookup_ops import tables_initializer
from tensorflow.python.ops.manip_ops import *
from tensorflow.python.ops.math_ops import *
from tensorflow.python.ops.numerics import *
from tensorflow.python.ops.parsing_ops import *
from tensorflow.python.ops.partitioned_variables import *
from tensorflow.python.ops.random_ops import *
from tensorflow.python.ops.script_ops import py_func
from tensorflow.python.ops.session_ops import *
from tensorflow.python.ops.sparse_ops import *
from tensorflow.python.ops.state_ops import assign
from tensorflow.python.ops.state_ops import assign_add
from tensorflow.python.ops.state_ops import assign_sub
from tensorflow.python.ops.state_ops import count_up_to
from tensorflow.python.ops.state_ops import scatter_add
from tensorflow.python.ops.state_ops import scatter_div
from tensorflow.python.ops.state_ops import scatter_mul
from tensorflow.python.ops.state_ops import scatter_sub
from tensorflow.python.ops.state_ops import scatter_min
from tensorflow.python.ops.state_ops import scatter_max
from tensorflow.python.ops.state_ops import scatter_update
from tensorflow.python.ops.state_ops import scatter_nd_add
from tensorflow.python.ops.state_ops import scatter_nd_sub
# TODO(simister): Re-enable once binary size increase due to scatter_nd
# ops is under control.
# from tensorflow.python.ops.state_ops import scatter_nd_mul
# from tensorflow.python.ops.state_ops import scatter_nd_div
from tensorflow.python.ops.state_ops import scatter_nd_update
from tensorflow.python.ops.string_ops import *
from tensorflow.python.ops.template import *
from tensorflow.python.ops.tensor_array_ops import *
from tensorflow.python.ops.variable_scope import *
from tensorflow.python.ops.variables import *
# pylint: enable=wildcard-import
# pylint: enable=g-bad-import-order
#### For use in remove_undocumented below:
from tensorflow.python.framework import constant_op as _constant_op
from tensorflow.python.ops import array_ops as _array_ops
from tensorflow.python.ops import check_ops as _check_ops
from tensorflow.python.ops import clip_ops as _clip_ops
from tensorflow.python.ops import confusion_matrix as _confusion_matrix
from tensorflow.python.ops import control_flow_ops as _control_flow_ops
from tensorflow.python.ops import data_flow_ops as _data_flow_ops
from tensorflow.python.ops import functional_ops as _functional_ops
from tensorflow.python.ops import gradients as _gradients
from tensorflow.python.ops import histogram_ops as _histogram_ops
from tensorflow.python.ops import init_ops as _init_ops
from tensorflow.python.ops import io_ops as _io_ops
from tensorflow.python.ops import linalg_ops as _linalg_ops
from tensorflow.python.ops import logging_ops as _logging_ops
from tensorflow.python.ops import manip_ops as _manip_ops
from tensorflow.python.ops import math_ops as _math_ops
from tensorflow.python.ops import numerics as _numerics
from tensorflow.python.ops import parsing_ops as _parsing_ops
from tensorflow.python.ops import partitioned_variables as _partitioned_variables
from tensorflow.python.ops import random_ops as _random_ops
from tensorflow.python.ops import script_ops as _script_ops
from tensorflow.python.ops import session_ops as _session_ops
from tensorflow.python.ops import sparse_ops as _sparse_ops
from tensorflow.python.ops import special_math_ops as _special_math_ops
from tensorflow.python.ops import state_ops as _state_ops
from tensorflow.python.ops import string_ops as _string_ops
from tensorflow.python.ops import template as _template
from tensorflow.python.ops import tensor_array_ops as _tensor_array_ops
from tensorflow.python.ops import variable_scope as _variable_scope
from tensorflow.python.ops import variables as _variables
_allowed_symbols_math_ops = [
# TODO(drpng): decide if we want to reference these in the documentation.
"reduced_shape",
"sparse_segment_mean_grad",
"sparse_segment_sqrt_n_grad",
# Legacy: will be removed.
"arg_max",
"arg_min",
"lin_space",
"sparse_matmul", # Use tf.matmul.
# Deprecated (see versions.h):
"batch_fft",
"batch_fft2d",
"batch_fft3d",
"batch_ifft",
"batch_ifft2d",
"batch_ifft3d",
"mul", # use tf.multiply instead.
"neg", # use tf.negative instead.
"sub", # use tf.subtract instead.
# These are documented in nn.
# We are not importing nn because it would create a circular dependency.
"sigmoid",
"log_sigmoid",
"tanh",
]
_allowed_symbols_array_ops = [
# TODO(drpng): make sure they are documented.
# Scalars:
"NEW_AXIS",
"SHRINK_AXIS",
"newaxis",
# Documented in training.py.
# I do not import train, to avoid circular dependencies.
# TODO(drpng): this is defined in gen_array_ops, clearly not the right
# place.
"stop_gradient",
# See gen_docs_combined for tf.copy documentation.
"copy",
## TODO(drpng): make them inaccessible directly.
## TODO(drpng): Below, to-doc means that we need to find an appropriate
## documentation section to reference.
## For re-exporting to tf.*:
"constant",
"edit_distance", # to-doc
# From gen_array_ops:
"copy_host", # to-doc
"immutable_const", # to-doc
"invert_permutation", # to-doc
"quantize_and_dequantize", # to-doc
# TODO(drpng): legacy symbols to be removed.
"batch_matrix_diag",
"batch_matrix_band_part",
"batch_matrix_diag_part",
"batch_matrix_set_diag",
]
_allowed_symbols_partitioned_variables = [
"PartitionedVariable", # Requires doc link.
# Legacy.
"create_partitioned_variables",
"variable_axis_size_partitioner",
"min_max_variable_partitioner",
"fixed_size_partitioner",
]
_allowed_symbols_control_flow_ops = [
# TODO(drpng): Find a place in the documentation to reference these or
# remove.
"control_trigger",
"loop_cond",
"merge",
"switch",
]
_allowed_symbols_functional_ops = [
"nest", # Used by legacy code.
]
_allowed_symbols_gradients = [
# Documented in training.py:
# Not importing training.py to avoid complex graph dependencies.
"AggregationMethod",
"GradientTape",
"custom_gradient",
"gradients", # tf.gradients = gradients.gradients
"hessians",
]
_allowed_symbols_clip_ops = [
# Documented in training.py:
# Not importing training.py to avoid complex graph dependencies.
"clip_by_average_norm",
"clip_by_global_norm",
"clip_by_norm",
"clip_by_value",
"global_norm",
]
_allowed_symbols_logging_ops = [
# Documented in training.py.
# We are not importing training.py to avoid complex dependencies.
"audio_summary",
"histogram_summary",
"image_summary",
"merge_all_summaries",
"merge_summary",
"scalar_summary",
# TODO(drpng): link in training.py if it should be documented.
"get_summary_op",
]
_allowed_symbols_variable_scope_ops = [
"get_local_variable", # Documented in framework package.
]
_allowed_symbols_misc = [
"deserialize_many_sparse",
"parse_single_sequence_example",
"serialize_many_sparse",
"serialize_sparse",
"confusion_matrix",
]
_allowed_symbols = (_allowed_symbols_array_ops +
_allowed_symbols_clip_ops +
_allowed_symbols_control_flow_ops +
_allowed_symbols_functional_ops +
_allowed_symbols_gradients +
_allowed_symbols_logging_ops +
_allowed_symbols_math_ops +
_allowed_symbols_variable_scope_ops +
_allowed_symbols_misc +
_allowed_symbols_partitioned_variables)
remove_undocumented(__name__, _allowed_symbols, [
_sys.modules[__name__],
_array_ops,
_check_ops,
_clip_ops,
_confusion_matrix,
_control_flow_ops,
_constant_op,
_data_flow_ops,
_functional_ops,
_gradients,
_histogram_ops,
_init_ops,
_io_ops,
_linalg_ops,
_logging_ops,
_manip_ops,
_math_ops,
_numerics,
_parsing_ops,
_partitioned_variables,
_random_ops,
_script_ops,
_session_ops,
_sparse_ops,
_special_math_ops,
_state_ops,
_string_ops,
_template,
_tensor_array_ops,
_variable_scope,
_variables,
])
| 36.163987 | 93 | 0.767049 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys as _sys
from tensorflow.python.ops import array_grad
from tensorflow.python.ops import cudnn_rnn_grad
from tensorflow.python.ops import data_flow_grad
from tensorflow.python.ops import manip_grad
from tensorflow.python.ops import math_grad
from tensorflow.python.ops import sparse_grad
from tensorflow.python.ops import spectral_grad
from tensorflow.python.ops import state_grad
from tensorflow.python.ops import tensor_array_grad
from tensorflow.python.util.all_util import remove_undocumented
from tensorflow.python.ops.array_ops import *
from tensorflow.python.ops.check_ops import *
from tensorflow.python.ops.clip_ops import *
from tensorflow.python.ops.special_math_ops import *
from tensorflow.python.ops.confusion_matrix import confusion_matrix
from tensorflow.python.ops.control_flow_ops import Assert
from tensorflow.python.ops.control_flow_ops import case
from tensorflow.python.ops.control_flow_ops import cond
from tensorflow.python.ops.control_flow_ops import group
from tensorflow.python.ops.control_flow_ops import no_op
from tensorflow.python.ops.control_flow_ops import tuple # pylint: disable=redefined-builtin
# pylint: enable=redefined-builtin
from tensorflow.python.ops.control_flow_ops import while_loop
from tensorflow.python.ops.data_flow_ops import *
from tensorflow.python.ops.functional_ops import *
from tensorflow.python.ops.gradients import *
from tensorflow.python.ops.histogram_ops import *
from tensorflow.python.ops.init_ops import *
from tensorflow.python.ops.io_ops import *
from tensorflow.python.ops.linalg_ops import *
from tensorflow.python.ops.logging_ops import Print
from tensorflow.python.ops.logging_ops import get_summary_op
from tensorflow.python.ops.logging_ops import timestamp
from tensorflow.python.ops.lookup_ops import initialize_all_tables
from tensorflow.python.ops.lookup_ops import tables_initializer
from tensorflow.python.ops.manip_ops import *
from tensorflow.python.ops.math_ops import *
from tensorflow.python.ops.numerics import *
from tensorflow.python.ops.parsing_ops import *
from tensorflow.python.ops.partitioned_variables import *
from tensorflow.python.ops.random_ops import *
from tensorflow.python.ops.script_ops import py_func
from tensorflow.python.ops.session_ops import *
from tensorflow.python.ops.sparse_ops import *
from tensorflow.python.ops.state_ops import assign
from tensorflow.python.ops.state_ops import assign_add
from tensorflow.python.ops.state_ops import assign_sub
from tensorflow.python.ops.state_ops import count_up_to
from tensorflow.python.ops.state_ops import scatter_add
from tensorflow.python.ops.state_ops import scatter_div
from tensorflow.python.ops.state_ops import scatter_mul
from tensorflow.python.ops.state_ops import scatter_sub
from tensorflow.python.ops.state_ops import scatter_min
from tensorflow.python.ops.state_ops import scatter_max
from tensorflow.python.ops.state_ops import scatter_update
from tensorflow.python.ops.state_ops import scatter_nd_add
from tensorflow.python.ops.state_ops import scatter_nd_sub
# TODO(simister): Re-enable once binary size increase due to scatter_nd
# ops is under control.
# from tensorflow.python.ops.state_ops import scatter_nd_mul
# from tensorflow.python.ops.state_ops import scatter_nd_div
from tensorflow.python.ops.state_ops import scatter_nd_update
from tensorflow.python.ops.string_ops import *
from tensorflow.python.ops.template import *
from tensorflow.python.ops.tensor_array_ops import *
from tensorflow.python.ops.variable_scope import *
from tensorflow.python.ops.variables import *
# pylint: enable=wildcard-import
# pylint: enable=g-bad-import-order
#### For use in remove_undocumented below:
from tensorflow.python.framework import constant_op as _constant_op
from tensorflow.python.ops import array_ops as _array_ops
from tensorflow.python.ops import check_ops as _check_ops
from tensorflow.python.ops import clip_ops as _clip_ops
from tensorflow.python.ops import confusion_matrix as _confusion_matrix
from tensorflow.python.ops import control_flow_ops as _control_flow_ops
from tensorflow.python.ops import data_flow_ops as _data_flow_ops
from tensorflow.python.ops import functional_ops as _functional_ops
from tensorflow.python.ops import gradients as _gradients
from tensorflow.python.ops import histogram_ops as _histogram_ops
from tensorflow.python.ops import init_ops as _init_ops
from tensorflow.python.ops import io_ops as _io_ops
from tensorflow.python.ops import linalg_ops as _linalg_ops
from tensorflow.python.ops import logging_ops as _logging_ops
from tensorflow.python.ops import manip_ops as _manip_ops
from tensorflow.python.ops import math_ops as _math_ops
from tensorflow.python.ops import numerics as _numerics
from tensorflow.python.ops import parsing_ops as _parsing_ops
from tensorflow.python.ops import partitioned_variables as _partitioned_variables
from tensorflow.python.ops import random_ops as _random_ops
from tensorflow.python.ops import script_ops as _script_ops
from tensorflow.python.ops import session_ops as _session_ops
from tensorflow.python.ops import sparse_ops as _sparse_ops
from tensorflow.python.ops import special_math_ops as _special_math_ops
from tensorflow.python.ops import state_ops as _state_ops
from tensorflow.python.ops import string_ops as _string_ops
from tensorflow.python.ops import template as _template
from tensorflow.python.ops import tensor_array_ops as _tensor_array_ops
from tensorflow.python.ops import variable_scope as _variable_scope
from tensorflow.python.ops import variables as _variables
_allowed_symbols_math_ops = [
# TODO(drpng): decide if we want to reference these in the documentation.
"reduced_shape",
"sparse_segment_mean_grad",
"sparse_segment_sqrt_n_grad",
# Legacy: will be removed.
"arg_max",
"arg_min",
"lin_space",
"sparse_matmul", # Use tf.matmul.
# Deprecated (see versions.h):
"batch_fft",
"batch_fft2d",
"batch_fft3d",
"batch_ifft",
"batch_ifft2d",
"batch_ifft3d",
"mul", # use tf.multiply instead.
"neg", # use tf.negative instead.
"sub", # use tf.subtract instead.
# These are documented in nn.
# We are not importing nn because it would create a circular dependency.
"sigmoid",
"log_sigmoid",
"tanh",
]
_allowed_symbols_array_ops = [
# TODO(drpng): make sure they are documented.
# Scalars:
"NEW_AXIS",
"SHRINK_AXIS",
"newaxis",
# Documented in training.py.
# I do not import train, to avoid circular dependencies.
# TODO(drpng): this is defined in gen_array_ops, clearly not the right
# place.
"stop_gradient",
# See gen_docs_combined for tf.copy documentation.
"copy",
## TODO(drpng): make them inaccessible directly.
## TODO(drpng): Below, to-doc means that we need to find an appropriate
## documentation section to reference.
## For re-exporting to tf.*:
"constant",
"edit_distance", # to-doc
# From gen_array_ops:
"copy_host", # to-doc
"immutable_const", # to-doc
"invert_permutation", # to-doc
"quantize_and_dequantize", # to-doc
# TODO(drpng): legacy symbols to be removed.
"batch_matrix_diag",
"batch_matrix_band_part",
"batch_matrix_diag_part",
"batch_matrix_set_diag",
]
_allowed_symbols_partitioned_variables = [
"PartitionedVariable", # Requires doc link.
# Legacy.
"create_partitioned_variables",
"variable_axis_size_partitioner",
"min_max_variable_partitioner",
"fixed_size_partitioner",
]
_allowed_symbols_control_flow_ops = [
# TODO(drpng): Find a place in the documentation to reference these or
# remove.
"control_trigger",
"loop_cond",
"merge",
"switch",
]
_allowed_symbols_functional_ops = [
"nest", # Used by legacy code.
]
_allowed_symbols_gradients = [
# Documented in training.py:
# Not importing training.py to avoid complex graph dependencies.
"AggregationMethod",
"GradientTape",
"custom_gradient",
"gradients", # tf.gradients = gradients.gradients
"hessians",
]
_allowed_symbols_clip_ops = [
# Documented in training.py:
# Not importing training.py to avoid complex graph dependencies.
"clip_by_average_norm",
"clip_by_global_norm",
"clip_by_norm",
"clip_by_value",
"global_norm",
]
_allowed_symbols_logging_ops = [
# Documented in training.py.
# We are not importing training.py to avoid complex dependencies.
"audio_summary",
"histogram_summary",
"image_summary",
"merge_all_summaries",
"merge_summary",
"scalar_summary",
# TODO(drpng): link in training.py if it should be documented.
"get_summary_op",
]
_allowed_symbols_variable_scope_ops = [
"get_local_variable", # Documented in framework package.
]
_allowed_symbols_misc = [
"deserialize_many_sparse",
"parse_single_sequence_example",
"serialize_many_sparse",
"serialize_sparse",
"confusion_matrix",
]
_allowed_symbols = (_allowed_symbols_array_ops +
_allowed_symbols_clip_ops +
_allowed_symbols_control_flow_ops +
_allowed_symbols_functional_ops +
_allowed_symbols_gradients +
_allowed_symbols_logging_ops +
_allowed_symbols_math_ops +
_allowed_symbols_variable_scope_ops +
_allowed_symbols_misc +
_allowed_symbols_partitioned_variables)
remove_undocumented(__name__, _allowed_symbols, [
_sys.modules[__name__],
_array_ops,
_check_ops,
_clip_ops,
_confusion_matrix,
_control_flow_ops,
_constant_op,
_data_flow_ops,
_functional_ops,
_gradients,
_histogram_ops,
_init_ops,
_io_ops,
_linalg_ops,
_logging_ops,
_manip_ops,
_math_ops,
_numerics,
_parsing_ops,
_partitioned_variables,
_random_ops,
_script_ops,
_session_ops,
_sparse_ops,
_special_math_ops,
_state_ops,
_string_ops,
_template,
_tensor_array_ops,
_variable_scope,
_variables,
])
| true | true |
f71f992d007cb05563bc79a20eaf79c8910f3047 | 7,240 | py | Python | run_scripts/FreeSurfer/nipype_reconall_with_tracker.py | neurodatascience/watts_up_compute | 1ed41e62690f99f699b44180208689cc19616bb7 | [
"MIT"
] | null | null | null | run_scripts/FreeSurfer/nipype_reconall_with_tracker.py | neurodatascience/watts_up_compute | 1ed41e62690f99f699b44180208689cc19616bb7 | [
"MIT"
] | null | null | null | run_scripts/FreeSurfer/nipype_reconall_with_tracker.py | neurodatascience/watts_up_compute | 1ed41e62690f99f699b44180208689cc19616bb7 | [
"MIT"
] | null | null | null | # Import modules
import os
import sys
from os.path import join as opj
import pandas as pd
import time
from nipype.interfaces.freesurfer import ReconAll
from nipype.interfaces.utility import IdentityInterface
from nipype.pipeline.engine import Workflow, Node
from pypapi import events, papi_high as high
import argparse
# Add paths (singularity should see these)
# FastSurfer and carbon trackers are in the mounted dir as these repos keep getting updated.
# TODO replace this with setup.py once the dependencis become stable
# sys.path.append('../../../experiment-impact-tracker/')
# sys.path.append('../../../codecarbon/')
from experiment_impact_tracker.compute_tracker import ImpactTracker
from codecarbon import EmissionsTracker, OfflineEmissionsTracker
def get_reconall(recon_directive,fs_folder):
# This node represents the actual recon-all command
reconall = Node(ReconAll(directive=recon_directive,
flags='-nuintensitycor -3T',
subjects_dir=fs_folder),
name="reconall")
return reconall
# This function returns for each subject the path to struct.nii.gz
def pathfinder(subject, foldername, filename):
from os.path import join as opj
struct_path = opj(foldername, subject, filename)
return struct_path
def main():
# setup
exp_start_time = time.time()
# argparse
parser = argparse.ArgumentParser(description='Script to run freesurfer reconall with nipype and track compute costs', epilog='$Id: fast_surfer_cnn, v 1.0 2019/09/30$')
# Data
parser.add_argument('--experiment_dir', dest='experiment_dir', help='path to directory to store freesurfer derived data.')
parser.add_argument('--data_dir', help="path to input data", default='/neurohub/ukbb/imaging/')
parser.add_argument('--subject_id', dest='subject_id', help='subject_id')
parser.add_argument('--T1_identifier', help='T1 identifier string relateive to the subject directory')
# FreeSurfer
parser.add_argument('--recon_directive', dest='recon_directive', help='recon_directive (autorecon 1, 2, or 3)', default='1') #MTL
# Trackers
parser.add_argument('--tracker_log_dir', dest='tracker_log_dir',
help="log dir for experiment impact tracker",
type=str, default='./tracker_logs/')
parser.add_argument('--geo_loc', dest='geo_loc',
help="(lat,log) coords for experiment impact tracker",
type=str, default='45.4972159,-73.6103642') #MTL Beluga
parser.add_argument('--CC_offline',
help="Run CC in offline mode",
action='store_true')
parser.add_argument('--TZ', dest='TZ',
help="TimeZone",
type=str, default='America/New_York')
parser.add_argument('--iso_code', dest='iso_code',
help="Country ISO code",
type=str, default='USA')
# PAPI
parser.add_argument('--count_FLOPs', dest='count_FLOPs',help="Count FLOPs using PAPI",action='store_true')
args = parser.parse_args()
# Data
experiment_dir = args.experiment_dir
data_dir = args.data_dir
subject_id = args.subject_id
T1_identifier = args.T1_identifier
# FreeSurfer
recon_directive = args.recon_directive
# FLOPs
count_FLOPs = args.count_FLOPs
# Trackers
tracker_log_dir = args.tracker_log_dir
geo_loc = args.geo_loc
CC_offline = args.CC_offline
TZ = args.TZ
iso_code = args.iso_code
print(f'Using offline mode for CC tracker: {CC_offline}')
if CC_offline:
print(f'Using {TZ} timezone and {iso_code} country iso code')
print(f'Starting subject: {subject_id}')
# Set up the trackers
log_dir = '{}/{}/'.format(tracker_log_dir,subject_id)
log_dir_EIT = f'{log_dir}/EIT/'
log_dir_CC = f'{log_dir}/CC/'
for d in [log_dir_EIT,log_dir_CC]:
if not os.path.exists(d):
os.makedirs(d)
# Use specified geo location for the HPC
ly,lx = float(geo_loc.split(',')[0]), float(geo_loc.split(',')[1])
coords = (ly,lx)
print(f'Using geographical coordinates (long,lat): {coords}')
# EIT tracker
tracker_EIT = ImpactTracker(log_dir_EIT,coords)
tracker_EIT.launch_impact_monitor()
# CodeCarbon tracker
os.environ['TZ']= TZ
if CC_offline:
tracker_CC = OfflineEmissionsTracker(output_dir=log_dir_CC, country_iso_code=iso_code)
else:
tracker_CC = EmissionsTracker(output_dir=log_dir_CC)
tracker_CC.start()
if count_FLOPs:
print('Counting flops using PAPI')
flop_csv = tracker_log_dir + 'compute_costs_flop.csv'
flop_df = pd.DataFrame(columns=['task','start_time','duration','DP'])
# Start FS processing for a given subject
subject_list = [subject_id]
fs_folder = opj(experiment_dir, 'freesurfer') # location of freesurfer folder
# Create the output folder - FreeSurfer can only run if this folder exists
os.system('mkdir -p %s' % fs_folder)
# Specify recon workflow stages
if recon_directive == 'all':
recon_directives = ['autorecon1','autorecon2','autorecon3']
else:
recon_directives = [recon_directive]
for r, recon_directive in enumerate(recon_directives):
print('\nStarting stage: {}'.format(recon_directive))
# Create the pipeline that runs the recon-all command
reconflow = Workflow(name="reconflow")
reconflow.base_dir = opj(experiment_dir, 'workingdir_reconflow')
# Some magical stuff happens here (not important for now)
infosource = Node(IdentityInterface(fields=['subject_id']), name="infosource")
infosource.iterables = ('subject_id', subject_list)
# Specify recon-all stage based on recon-directive
reconall = get_reconall(recon_directive, fs_folder)
# This section connects all the nodes of the pipeline to each other
reconflow.connect([(infosource, reconall, [('subject_id', 'subject_id')]),
(infosource, reconall, [(('subject_id', pathfinder,
data_dir, T1_identifier),
'T1_files')]),
])
if count_FLOPs:
# start flop counter
start_time = time.time()
high.start_counters([events.PAPI_DP_OPS,]) #default: PAPI_FP_OPS
# This command runs the recon-all pipeline in parallel (using n_procs cores)
# reconflow.run('MultiProc', plugin_args={'n_procs': 4})
reconflow.run()
if count_FLOPs:
# stop flop counter
DP = high.stop_counters()[0]
end_time = time.time()
duration = end_time - start_time
print('Duration: {}, Flops: {}'.format(duration, DP))
flop_df.loc[r] = [recon_directive,start_time, duration, DP]
## code-carbon tracker
tracker_CC.stop()
if count_FLOPs:
flop_df.to_csv(flop_csv)
if __name__=='__main__':
main()
| 36.938776 | 171 | 0.642403 |
import os
import sys
from os.path import join as opj
import pandas as pd
import time
from nipype.interfaces.freesurfer import ReconAll
from nipype.interfaces.utility import IdentityInterface
from nipype.pipeline.engine import Workflow, Node
from pypapi import events, papi_high as high
import argparse
from experiment_impact_tracker.compute_tracker import ImpactTracker
from codecarbon import EmissionsTracker, OfflineEmissionsTracker
def get_reconall(recon_directive,fs_folder):
reconall = Node(ReconAll(directive=recon_directive,
flags='-nuintensitycor -3T',
subjects_dir=fs_folder),
name="reconall")
return reconall
def pathfinder(subject, foldername, filename):
from os.path import join as opj
struct_path = opj(foldername, subject, filename)
return struct_path
def main():
exp_start_time = time.time()
parser = argparse.ArgumentParser(description='Script to run freesurfer reconall with nipype and track compute costs', epilog='$Id: fast_surfer_cnn, v 1.0 2019/09/30$')
parser.add_argument('--experiment_dir', dest='experiment_dir', help='path to directory to store freesurfer derived data.')
parser.add_argument('--data_dir', help="path to input data", default='/neurohub/ukbb/imaging/')
parser.add_argument('--subject_id', dest='subject_id', help='subject_id')
parser.add_argument('--T1_identifier', help='T1 identifier string relateive to the subject directory')
parser.add_argument('--recon_directive', dest='recon_directive', help='recon_directive (autorecon 1, 2, or 3)', default='1')
parser.add_argument('--tracker_log_dir', dest='tracker_log_dir',
help="log dir for experiment impact tracker",
type=str, default='./tracker_logs/')
parser.add_argument('--geo_loc', dest='geo_loc',
help="(lat,log) coords for experiment impact tracker",
type=str, default='45.4972159,-73.6103642')
parser.add_argument('--CC_offline',
help="Run CC in offline mode",
action='store_true')
parser.add_argument('--TZ', dest='TZ',
help="TimeZone",
type=str, default='America/New_York')
parser.add_argument('--iso_code', dest='iso_code',
help="Country ISO code",
type=str, default='USA')
parser.add_argument('--count_FLOPs', dest='count_FLOPs',help="Count FLOPs using PAPI",action='store_true')
args = parser.parse_args()
experiment_dir = args.experiment_dir
data_dir = args.data_dir
subject_id = args.subject_id
T1_identifier = args.T1_identifier
recon_directive = args.recon_directive
count_FLOPs = args.count_FLOPs
tracker_log_dir = args.tracker_log_dir
geo_loc = args.geo_loc
CC_offline = args.CC_offline
TZ = args.TZ
iso_code = args.iso_code
print(f'Using offline mode for CC tracker: {CC_offline}')
if CC_offline:
print(f'Using {TZ} timezone and {iso_code} country iso code')
print(f'Starting subject: {subject_id}')
log_dir = '{}/{}/'.format(tracker_log_dir,subject_id)
log_dir_EIT = f'{log_dir}/EIT/'
log_dir_CC = f'{log_dir}/CC/'
for d in [log_dir_EIT,log_dir_CC]:
if not os.path.exists(d):
os.makedirs(d)
ly,lx = float(geo_loc.split(',')[0]), float(geo_loc.split(',')[1])
coords = (ly,lx)
print(f'Using geographical coordinates (long,lat): {coords}')
tracker_EIT = ImpactTracker(log_dir_EIT,coords)
tracker_EIT.launch_impact_monitor()
os.environ['TZ']= TZ
if CC_offline:
tracker_CC = OfflineEmissionsTracker(output_dir=log_dir_CC, country_iso_code=iso_code)
else:
tracker_CC = EmissionsTracker(output_dir=log_dir_CC)
tracker_CC.start()
if count_FLOPs:
print('Counting flops using PAPI')
flop_csv = tracker_log_dir + 'compute_costs_flop.csv'
flop_df = pd.DataFrame(columns=['task','start_time','duration','DP'])
subject_list = [subject_id]
fs_folder = opj(experiment_dir, 'freesurfer')
os.system('mkdir -p %s' % fs_folder)
if recon_directive == 'all':
recon_directives = ['autorecon1','autorecon2','autorecon3']
else:
recon_directives = [recon_directive]
for r, recon_directive in enumerate(recon_directives):
print('\nStarting stage: {}'.format(recon_directive))
reconflow = Workflow(name="reconflow")
reconflow.base_dir = opj(experiment_dir, 'workingdir_reconflow')
infosource = Node(IdentityInterface(fields=['subject_id']), name="infosource")
infosource.iterables = ('subject_id', subject_list)
reconall = get_reconall(recon_directive, fs_folder)
reconflow.connect([(infosource, reconall, [('subject_id', 'subject_id')]),
(infosource, reconall, [(('subject_id', pathfinder,
data_dir, T1_identifier),
'T1_files')]),
])
if count_FLOPs:
start_time = time.time()
high.start_counters([events.PAPI_DP_OPS,])
reconflow.run()
if count_FLOPs:
DP = high.stop_counters()[0]
end_time = time.time()
duration = end_time - start_time
print('Duration: {}, Flops: {}'.format(duration, DP))
flop_df.loc[r] = [recon_directive,start_time, duration, DP]
)
if count_FLOPs:
flop_df.to_csv(flop_csv)
if __name__=='__main__':
main()
| true | true |
f71f9baff849e1b3e85a4e00a676e11b093d2eb9 | 7,777 | py | Python | examples/vae.py | strint/myia | 3d00d3fb3df80ab7a264a724226c5f56c6ff1a8a | [
"MIT"
] | 222 | 2019-02-13T07:56:28.000Z | 2022-03-28T07:07:54.000Z | examples/vae.py | strint/myia | 3d00d3fb3df80ab7a264a724226c5f56c6ff1a8a | [
"MIT"
] | 107 | 2019-02-12T21:56:39.000Z | 2022-03-12T01:08:03.000Z | examples/vae.py | strint/myia | 3d00d3fb3df80ab7a264a724226c5f56c6ff1a8a | [
"MIT"
] | 27 | 2017-11-14T17:58:15.000Z | 2019-01-14T01:36:09.000Z | """Example of an MLP in Myia.
Myia is still a work in progress, and this example may change in the future.
"""
import time
from dataclasses import dataclass
import numpy
import torch
from numpy.random import RandomState
from torchvision import datasets, transforms
import myia.public_api as pub
from myia import ArithmeticData, myia, value_and_grad
from myia.api import to_device
from myia.debug import traceback # noqa
from myia.operations import array_exp, array_pow, random_initialize
###########
# Options #
###########
dtype = "float32"
backend = "pytorch"
# backend = 'relay' # Uncomment to use relay backend
device_type = "cpu"
# device_type = 'cuda' # Uncomment to run on the gpu
backend_options_dict = {
"pytorch": {"device": device_type},
"relay": {"target": device_type, "device_id": 0},
}
backend_options = backend_options_dict[backend]
###############
# Hyperparams #
###############
lr = getattr(numpy, dtype)(0.01)
########
# Data #
########
# This just generates random data so we don't have to load a real dataset,
# but the model will work just as well on a real dataset.
def param(R, *size):
"""Generates a random array using the generator R."""
return numpy.array(R.rand(*size) * 2 - 1, dtype=dtype)
def generate_data(n, batch_size, input_size, target_size, *, seed=87):
"""Generate inputs and targets.
Generates n batches of samples of size input_size, matched with
a single target.
"""
R = RandomState(seed=seed)
return [
(param(R, batch_size, input_size), param(R, batch_size, target_size))
for i in range(n)
]
def mlp_parameters(*layer_sizes, seed=90909):
"""Generates parameters for a MLP given a list of layer sizes."""
R = RandomState(seed=seed)
parameters = []
for i, o in zip(layer_sizes[:-1], layer_sizes[1:]):
W = param(R, i, o)
b = param(R, 1, o)
parameters.append((W, b))
return parameters
#########
# Model #
#########
# We generate a MLP model with some arbitrary number of layers and tanh
# activations.
@dataclass(frozen=True)
class Linear(ArithmeticData):
"""Linear layer."""
W: "Weights array"
b: "Biases vector"
def apply(self, input):
"""Apply the layer."""
return input @ self.W + self.b
@dataclass(frozen=True)
class Tanh(ArithmeticData):
"""Tanh layer."""
def apply(self, input):
"""Apply the layer."""
return numpy.tanh(input)
@dataclass(frozen=True)
class Sequential(ArithmeticData):
"""Sequential layer, applies all sub-layers in order."""
layers: "Tuple of layers"
def apply(self, x):
"""Apply the layer."""
for layer in self.layers:
x = layer.apply(x)
return x
@dataclass(frozen=True)
class VAE(ArithmeticData):
"""Sequential layer, applies all sub-layers in order."""
fc1: "layer fc1"
fc21: "layer fc21"
fc22: "layer fc22"
fc3: "layer fc3"
fc4: "layer fc4"
def encode(self, x):
h1 = pub.relu(self.fc1.apply(x))
return self.fc21.apply(h1), self.fc22.apply(h1)
def reparameterize(self, mu, logvar, rstate):
std = array_exp(0.5 * logvar)
eps, rstate = pub.uniform(rstate, (2, 20), -1.0, 1.0)
return mu + eps * std, rstate
def decode(self, z):
h3 = pub.relu(self.fc3.apply(z))
return pub.sigmoid(self.fc4.apply(h3))
def forward(self, x, rstate):
mu, logvar = self.encode(pub.reshape(x, (-1, 784)))
z, rstate = self.reparameterize(mu, logvar, rstate)
return self.decode(z), mu, logvar, rstate
params = (
mlp_parameters(*(784, 400))[0],
mlp_parameters(*(400, 20))[0],
mlp_parameters(*(400, 20))[0],
mlp_parameters(*(20, 400))[0],
mlp_parameters(*(400, 784))[0],
)
model = VAE(
Linear(params[0][0], params[0][1]),
Linear(params[1][0], params[1][1]),
Linear(params[2][0], params[2][1]),
Linear(params[3][0], params[3][1]),
Linear(params[4][0], params[4][1]),
)
model = to_device(model, backend, backend_options, broaden=False)
# Reconstruction + KL divergence losses summed over all elements and batch
def loss_function(recon_x, x, mu, logvar):
BCE = pub.binary_cross_entropy(
recon_x, pub.reshape(x, (-1, 784)), reduction="sum"
)
# see Appendix B from VAE paper:
# Kingma and Welling. Auto-Encoding Variational Bayes. ICLR, 2014
# https://arxiv.org/abs/1312.6114
# 0.5 * sum(1 + log(sigma^2) - mu^2 - sigma^2)
KLD = -0.5 * pub._sum(1 + logvar - array_pow(mu, 2) - array_exp(logvar))
return BCE + KLD
def cost(model, data, rstate):
recon_batch, mu, logvar, _rstate = model.forward(data, rstate)
loss = loss_function(recon_batch, data, mu, logvar)
return loss.item(), _rstate
@myia(backend=backend, backend_options=backend_options, return_backend=True)
def step(model, data, lr, rstate):
"""Returns the loss and parameter gradients.
value_and_grad will return cost(model, x, y) and dcost(...)/dmodel.
The 'model' argument can be omitted: by default the derivative wrt
the first argument is returned.
"""
(_cost, rstate), dmodel = value_and_grad(cost, "model")(
model, data, rstate, dout=(1, 1)
)
return _cost, model - lr * dmodel, rstate
@myia(backend=backend, backend_options=backend_options, return_backend=True)
def step_eval(model, data, rstate):
"""Returns the loss and parameter gradients.
value_and_grad will return cost(model, x, y) and dcost(...)/dmodel.
The 'model' argument can be omitted: by default the derivative wrt
the first argument is returned.
"""
return cost(model, data, rstate)
@myia(backend=backend, backend_options=backend_options, return_backend=True)
def step_init_seed():
"""Returns the loss and parameter gradients.
value_and_grad will return cost(model, x, y) and dcost(...)/dmodel.
The 'model' argument can be omitted: by default the derivative wrt
the first argument is returned.
"""
return random_initialize(1)
lr = getattr(numpy, dtype)(0.01)
if __name__ == "__main__":
seed = 123
cuda = False
batch_size = 2
epochs = 1
torch.manual_seed(seed)
device = torch.device("cuda" if cuda else "cpu")
kwargs = {"num_workers": 1, "pin_memory": True} if cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST(
"../data",
train=True,
download=True,
transform=transforms.ToTensor(),
),
batch_size=batch_size,
shuffle=True,
**kwargs,
)
rand_state = step_init_seed()
for _ in range(epochs):
costs = []
t0 = time.time()
for i, (data, _) in enumerate(train_loader):
print("i", i + 1, "/", len(train_loader))
_cost, model, rand_state = step(
model, data.reshape((batch_size, 784)).numpy(), lr, rand_state
)
costs.append(_cost)
costs = [float(c.from_device()) for c in costs]
c = sum(costs) / len(costs)
t = time.time() - t0
print(f"Cost: {c:15.10f}\tTime: {t:15.10f}")
test_loader = torch.utils.data.DataLoader(
datasets.MNIST("../data", train=False, transform=transforms.ToTensor()),
batch_size=batch_size,
shuffle=True,
**kwargs,
)
costs = []
t0 = time.time()
for i, (data, _) in enumerate(test_loader):
_cost, rand_state = step_eval(
model, data.reshape((batch_size, 784)).numpy(), rand_state
)
costs.append(_cost)
costs = [float(c.from_device()) for c in costs]
c = sum(costs) / len(costs)
t = time.time() - t0
print(f"Cost: {c:15.10f}\tTime: {t:15.10f}")
| 26.542662 | 80 | 0.626077 |
import time
from dataclasses import dataclass
import numpy
import torch
from numpy.random import RandomState
from torchvision import datasets, transforms
import myia.public_api as pub
from myia import ArithmeticData, myia, value_and_grad
from myia.api import to_device
from myia.debug import traceback
from myia.operations import array_exp, array_pow, random_initialize
evice_id": 0},
}
backend_options = backend_options_dict[backend]
andomState(seed=seed)
return [
(param(R, batch_size, input_size), param(R, batch_size, target_size))
for i in range(n)
]
def mlp_parameters(*layer_sizes, seed=90909):
R = RandomState(seed=seed)
parameters = []
for i, o in zip(layer_sizes[:-1], layer_sizes[1:]):
W = param(R, i, o)
b = param(R, 1, o)
parameters.append((W, b))
return parameters
#########
# Model #
#########
# We generate a MLP model with some arbitrary number of layers and tanh
# activations.
@dataclass(frozen=True)
class Linear(ArithmeticData):
W: "Weights array"
b: "Biases vector"
def apply(self, input):
return input @ self.W + self.b
@dataclass(frozen=True)
class Tanh(ArithmeticData):
def apply(self, input):
return numpy.tanh(input)
@dataclass(frozen=True)
class Sequential(ArithmeticData):
layers: "Tuple of layers"
def apply(self, x):
for layer in self.layers:
x = layer.apply(x)
return x
@dataclass(frozen=True)
class VAE(ArithmeticData):
fc1: "layer fc1"
fc21: "layer fc21"
fc22: "layer fc22"
fc3: "layer fc3"
fc4: "layer fc4"
def encode(self, x):
h1 = pub.relu(self.fc1.apply(x))
return self.fc21.apply(h1), self.fc22.apply(h1)
def reparameterize(self, mu, logvar, rstate):
std = array_exp(0.5 * logvar)
eps, rstate = pub.uniform(rstate, (2, 20), -1.0, 1.0)
return mu + eps * std, rstate
def decode(self, z):
h3 = pub.relu(self.fc3.apply(z))
return pub.sigmoid(self.fc4.apply(h3))
def forward(self, x, rstate):
mu, logvar = self.encode(pub.reshape(x, (-1, 784)))
z, rstate = self.reparameterize(mu, logvar, rstate)
return self.decode(z), mu, logvar, rstate
params = (
mlp_parameters(*(784, 400))[0],
mlp_parameters(*(400, 20))[0],
mlp_parameters(*(400, 20))[0],
mlp_parameters(*(20, 400))[0],
mlp_parameters(*(400, 784))[0],
)
model = VAE(
Linear(params[0][0], params[0][1]),
Linear(params[1][0], params[1][1]),
Linear(params[2][0], params[2][1]),
Linear(params[3][0], params[3][1]),
Linear(params[4][0], params[4][1]),
)
model = to_device(model, backend, backend_options, broaden=False)
# Reconstruction + KL divergence losses summed over all elements and batch
def loss_function(recon_x, x, mu, logvar):
BCE = pub.binary_cross_entropy(
recon_x, pub.reshape(x, (-1, 784)), reduction="sum"
)
# see Appendix B from VAE paper:
# Kingma and Welling. Auto-Encoding Variational Bayes. ICLR, 2014
# https://arxiv.org/abs/1312.6114
# 0.5 * sum(1 + log(sigma^2) - mu^2 - sigma^2)
KLD = -0.5 * pub._sum(1 + logvar - array_pow(mu, 2) - array_exp(logvar))
return BCE + KLD
def cost(model, data, rstate):
recon_batch, mu, logvar, _rstate = model.forward(data, rstate)
loss = loss_function(recon_batch, data, mu, logvar)
return loss.item(), _rstate
@myia(backend=backend, backend_options=backend_options, return_backend=True)
def step(model, data, lr, rstate):
(_cost, rstate), dmodel = value_and_grad(cost, "model")(
model, data, rstate, dout=(1, 1)
)
return _cost, model - lr * dmodel, rstate
@myia(backend=backend, backend_options=backend_options, return_backend=True)
def step_eval(model, data, rstate):
return cost(model, data, rstate)
@myia(backend=backend, backend_options=backend_options, return_backend=True)
def step_init_seed():
return random_initialize(1)
lr = getattr(numpy, dtype)(0.01)
if __name__ == "__main__":
seed = 123
cuda = False
batch_size = 2
epochs = 1
torch.manual_seed(seed)
device = torch.device("cuda" if cuda else "cpu")
kwargs = {"num_workers": 1, "pin_memory": True} if cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST(
"../data",
train=True,
download=True,
transform=transforms.ToTensor(),
),
batch_size=batch_size,
shuffle=True,
**kwargs,
)
rand_state = step_init_seed()
for _ in range(epochs):
costs = []
t0 = time.time()
for i, (data, _) in enumerate(train_loader):
print("i", i + 1, "/", len(train_loader))
_cost, model, rand_state = step(
model, data.reshape((batch_size, 784)).numpy(), lr, rand_state
)
costs.append(_cost)
costs = [float(c.from_device()) for c in costs]
c = sum(costs) / len(costs)
t = time.time() - t0
print(f"Cost: {c:15.10f}\tTime: {t:15.10f}")
test_loader = torch.utils.data.DataLoader(
datasets.MNIST("../data", train=False, transform=transforms.ToTensor()),
batch_size=batch_size,
shuffle=True,
**kwargs,
)
costs = []
t0 = time.time()
for i, (data, _) in enumerate(test_loader):
_cost, rand_state = step_eval(
model, data.reshape((batch_size, 784)).numpy(), rand_state
)
costs.append(_cost)
costs = [float(c.from_device()) for c in costs]
c = sum(costs) / len(costs)
t = time.time() - t0
print(f"Cost: {c:15.10f}\tTime: {t:15.10f}")
| true | true |
f71f9c72961197bdb094aca591c521ff5e6e78f6 | 2,944 | py | Python | scripts/cfg/cfg.py | jepler/aocl-libm-ose | 4033e022da428125747e118ccd6fdd9cee21c470 | [
"BSD-3-Clause"
] | 66 | 2020-11-04T17:06:10.000Z | 2022-03-10T08:03:12.000Z | scripts/cfg/cfg.py | HollowMan6/aocl-libm-ose | 4033e022da428125747e118ccd6fdd9cee21c470 | [
"BSD-3-Clause"
] | 8 | 2021-04-18T18:37:53.000Z | 2022-03-11T12:49:31.000Z | scripts/cfg/cfg.py | HollowMan6/aocl-libm-ose | 4033e022da428125747e118ccd6fdd9cee21c470 | [
"BSD-3-Clause"
] | 8 | 2020-11-09T03:45:01.000Z | 2021-11-08T02:25:31.000Z | #
# Copyright (C) 2008-2020 Advanced Micro Devices, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from SCons.Variables import Variables as SVariables
from SCons.Script import AddOption
class LocalOption:
def __init__(self):
self.__help_texts = ""
pass
def Add(self, *args, **kwargs):
col_width = 30
help = " " + ", ".join(args)
if "help" in kwargs:
length = len(help)
if length >= col_width:
help += "\n" + " " * col_width
else:
help += " " * (col_width - length)
help += kwargs["help"]
self.__help_texts += help + "\n"
AddOption(*args, **kwargs)
def GetHelpTexts(self):
return self.__help_texts
class Variables(SVariables):
def __init__(self, files=[], args={}, is_global=1):
self.required = []
super(self.__class__,self).__init__(files, args, is_global)
def Add(self, key, help="", default=None, validator=None, converter=None, required=False):
SVariables.Add(self, key, help, default, validator, converter)
if required:
print("adding required option ", key[0])
self.required.append(key[0])
def Update(self, env):
print("required options are: ", self.required)
SVariables.Update(self, env)
for requirement in self.required:
if not env.has_key(requirement):
print('violation: ', requirement)
| 41.464789 | 94 | 0.686481 |
from SCons.Variables import Variables as SVariables
from SCons.Script import AddOption
class LocalOption:
def __init__(self):
self.__help_texts = ""
pass
def Add(self, *args, **kwargs):
col_width = 30
help = " " + ", ".join(args)
if "help" in kwargs:
length = len(help)
if length >= col_width:
help += "\n" + " " * col_width
else:
help += " " * (col_width - length)
help += kwargs["help"]
self.__help_texts += help + "\n"
AddOption(*args, **kwargs)
def GetHelpTexts(self):
return self.__help_texts
class Variables(SVariables):
def __init__(self, files=[], args={}, is_global=1):
self.required = []
super(self.__class__,self).__init__(files, args, is_global)
def Add(self, key, help="", default=None, validator=None, converter=None, required=False):
SVariables.Add(self, key, help, default, validator, converter)
if required:
print("adding required option ", key[0])
self.required.append(key[0])
def Update(self, env):
print("required options are: ", self.required)
SVariables.Update(self, env)
for requirement in self.required:
if not env.has_key(requirement):
print('violation: ', requirement)
| true | true |
f71f9c7e66717452a4ba40fb6f3d1934e7331d68 | 7,731 | py | Python | sadedegel/bblock/vocabulary.py | GlobalMaksimum/sadedegel | 8e28dbeabc3bf0d6f2222089ac5e3a849f9d3a6b | [
"MIT"
] | 100 | 2020-07-06T05:50:49.000Z | 2022-03-21T21:56:55.000Z | sadedegel/bblock/vocabulary.py | LyotardPostmodernizm/sadedegel | 8e28dbeabc3bf0d6f2222089ac5e3a849f9d3a6b | [
"MIT"
] | 244 | 2020-07-06T06:31:01.000Z | 2022-02-26T10:40:17.000Z | sadedegel/bblock/vocabulary.py | LyotardPostmodernizm/sadedegel | 8e28dbeabc3bf0d6f2222089ac5e3a849f9d3a6b | [
"MIT"
] | 23 | 2020-07-27T16:32:48.000Z | 2022-03-18T11:13:07.000Z | import warnings
from collections import defaultdict
from os.path import dirname
from pathlib import Path
import h5py
import numpy as np
from cached_property import cached_property
from rich.console import Console
from .util import tr_lower, normalize_tokenizer_name
console = Console()
class InvalidTokenizer(Exception):
"""Invalid tokenizer name"""
def vocabulary_file(tokenizer: str, verify_exists=True):
normalized_name = normalize_tokenizer_name(tokenizer)
if normalized_name not in ['bert', 'icu', 'simple']:
raise InvalidTokenizer(
(f"Currently only valid tokenizers are BERT, ICU Tokenizer for vocabulary generation."
" {normalized_name} found"))
vocab_file = Path(dirname(__file__)) / 'data' / normalized_name / 'vocabulary.hdf5'
if not vocab_file.exists() and verify_exists:
raise FileNotFoundError(f"Vocabulary file for {tokenizer} ({normalized_name}) tokenizer not found.")
return vocab_file
class VocabularyCounter:
def __init__(self, tokenizer, case_sensitive=True, min_tf=1, min_df=1):
self.tokenizer = tokenizer
self.doc_counter = defaultdict(set)
self.doc_set = set()
self.term_freq = defaultdict(int)
self.min_tf = min_tf
self.min_df = min_df
self.case_sensitive = case_sensitive
def inc(self, word: str, document_id: int, count: int = 1):
if self.case_sensitive:
w = word
else:
w = tr_lower(word)
self.doc_counter[w].add(document_id)
self.doc_set.add(document_id)
self.term_freq[w] += count
def add_word_to_doc(self, word: str, document_id: int):
"""Implemented for backward compatibility"""
self.inc(word, document_id, 1)
@property
def vocabulary_size(self):
return len(self.term_freq)
@property
def document_count(self):
return len(self.doc_set)
def prune(self):
to_remove = []
for w in self.term_freq:
if self.term_freq[w] < self.min_tf or len(self.doc_counter[w]) < self.min_df:
to_remove.append(w)
for w in to_remove:
del self.doc_counter[w]
del self.term_freq[w]
console.log(
f"{len(to_remove)} terms (case sensitive={self.case_sensitive}) are pruned by tf (>= {self.min_tf}) or df filter(>= {self.min_df})")
return self
def df(self, w: str):
if self.case_sensitive:
return len(self.doc_counter[w])
else:
return len(self.doc_counter[tr_lower(w)])
def tf(self, w: str):
if self.case_sensitive:
return self.term_freq[w]
else:
return self.term_freq[tr_lower(w)]
def to_hdf5(self, w2v=None):
with h5py.File(vocabulary_file(self.tokenizer, verify_exists=False), "a") as fp:
if self.case_sensitive:
group = fp.create_group("form_")
else:
group = fp.create_group("lower_")
words = sorted(list(self.term_freq.keys()), key=lambda w: tr_lower(w))
group.attrs['size'] = len(words)
group.attrs['document_count'] = len(self.doc_set)
group.attrs['tokenizer'] = self.tokenizer
group.attrs['min_tf'] = self.min_tf
group.attrs['min_df'] = self.min_df
if w2v is not None:
group.attrs['vector_size'] = w2v.vector_size
group.create_dataset("vector", data=np.array(
[w2v[w] if w in w2v else np.zeros(w2v.vector_size) for w in words]).astype(
np.float32),
compression="gzip",
compression_opts=9)
group.create_dataset("has_vector", data=np.array([w in w2v in w2v for w in words]),
compression="gzip",
compression_opts=9)
group.create_dataset("word", data=words, compression="gzip", compression_opts=9)
group.create_dataset("df", data=np.array([self.df(w) for w in words]), compression="gzip",
compression_opts=9)
group.create_dataset("tf", data=np.array([self.tf(w) for w in words]), compression="gzip",
compression_opts=9)
console.print(f"|D|: {self.document_count}, |V|: {self.vocabulary_size} (case sensitive={self.case_sensitive})")
class Vocabulary:
def __init__(self, tokenizer):
self.tokenizer = tokenizer
self.file_name = vocabulary_file(tokenizer)
self._df = None
self._df_cs = None
self._has_vector = None
self._vector = None
self.dword_cs = None
self.dword = None
@cached_property
def size_cs(self) -> int:
with h5py.File(self.file_name, "r") as fp:
return fp['form_'].attrs['size']
@cached_property
def size(self) -> int:
with h5py.File(self.file_name, "r") as fp:
return fp['lower_'].attrs['size']
def __len__(self):
return self.size
def id_cs(self, word: str, default: int = -1):
if self.dword_cs is None:
with h5py.File(self.file_name, "r") as fp:
self.dword = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['lower_']['word'])))
self.dword_cs = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['form_']['word'])))
return self.dword_cs.get(word, default)
def id(self, word: str, default: int = -1):
if self.dword is None:
with h5py.File(self.file_name, "r") as fp:
self.dword = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['lower_']['word'])))
self.dword_cs = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['form_']['word'])))
return self.dword.get(tr_lower(word), default)
def df(self, word: str):
i = self.id(word)
if i == -1:
return 0
else:
if self._df is None:
with h5py.File(self.file_name, "r") as fp:
self._df = np.array(fp['lower_']['df'])
return self._df[i]
def df_cs(self, word: str):
i = self.id_cs(word)
if i == -1:
return 0
else:
if self._df_cs is None:
with h5py.File(self.file_name, "r") as fp:
self._df_cs = np.array(fp['form_']['df'])
return self._df_cs[i]
def has_vector(self, word: str):
with h5py.File(self.file_name, "r") as fp:
if "has_vector" in fp['lower_']:
i = self.id(word)
if i == -1:
return False
else:
if self._has_vector is None:
self._has_vector = np.array(fp['lower_']['has_vector'])
return self._has_vector[i]
else:
return False
def vector(self, word: str):
# TODO: Performance improvement required
with h5py.File(self.file_name, "r") as fp:
if "vector" in fp['lower_']:
i = self.id(word)
if i == -1:
return False
else:
if self._vector is None:
self._vector = np.array(fp['lower_']['vector'])
return self._vector[i, :]
else:
return False
@cached_property
def document_count(self):
with h5py.File(self.file_name, "r") as fp:
return fp['form_'].attrs['document_count']
| 32.078838 | 144 | 0.56073 | import warnings
from collections import defaultdict
from os.path import dirname
from pathlib import Path
import h5py
import numpy as np
from cached_property import cached_property
from rich.console import Console
from .util import tr_lower, normalize_tokenizer_name
console = Console()
class InvalidTokenizer(Exception):
def vocabulary_file(tokenizer: str, verify_exists=True):
normalized_name = normalize_tokenizer_name(tokenizer)
if normalized_name not in ['bert', 'icu', 'simple']:
raise InvalidTokenizer(
(f"Currently only valid tokenizers are BERT, ICU Tokenizer for vocabulary generation."
" {normalized_name} found"))
vocab_file = Path(dirname(__file__)) / 'data' / normalized_name / 'vocabulary.hdf5'
if not vocab_file.exists() and verify_exists:
raise FileNotFoundError(f"Vocabulary file for {tokenizer} ({normalized_name}) tokenizer not found.")
return vocab_file
class VocabularyCounter:
def __init__(self, tokenizer, case_sensitive=True, min_tf=1, min_df=1):
self.tokenizer = tokenizer
self.doc_counter = defaultdict(set)
self.doc_set = set()
self.term_freq = defaultdict(int)
self.min_tf = min_tf
self.min_df = min_df
self.case_sensitive = case_sensitive
def inc(self, word: str, document_id: int, count: int = 1):
if self.case_sensitive:
w = word
else:
w = tr_lower(word)
self.doc_counter[w].add(document_id)
self.doc_set.add(document_id)
self.term_freq[w] += count
def add_word_to_doc(self, word: str, document_id: int):
self.inc(word, document_id, 1)
@property
def vocabulary_size(self):
return len(self.term_freq)
@property
def document_count(self):
return len(self.doc_set)
def prune(self):
to_remove = []
for w in self.term_freq:
if self.term_freq[w] < self.min_tf or len(self.doc_counter[w]) < self.min_df:
to_remove.append(w)
for w in to_remove:
del self.doc_counter[w]
del self.term_freq[w]
console.log(
f"{len(to_remove)} terms (case sensitive={self.case_sensitive}) are pruned by tf (>= {self.min_tf}) or df filter(>= {self.min_df})")
return self
def df(self, w: str):
if self.case_sensitive:
return len(self.doc_counter[w])
else:
return len(self.doc_counter[tr_lower(w)])
def tf(self, w: str):
if self.case_sensitive:
return self.term_freq[w]
else:
return self.term_freq[tr_lower(w)]
def to_hdf5(self, w2v=None):
with h5py.File(vocabulary_file(self.tokenizer, verify_exists=False), "a") as fp:
if self.case_sensitive:
group = fp.create_group("form_")
else:
group = fp.create_group("lower_")
words = sorted(list(self.term_freq.keys()), key=lambda w: tr_lower(w))
group.attrs['size'] = len(words)
group.attrs['document_count'] = len(self.doc_set)
group.attrs['tokenizer'] = self.tokenizer
group.attrs['min_tf'] = self.min_tf
group.attrs['min_df'] = self.min_df
if w2v is not None:
group.attrs['vector_size'] = w2v.vector_size
group.create_dataset("vector", data=np.array(
[w2v[w] if w in w2v else np.zeros(w2v.vector_size) for w in words]).astype(
np.float32),
compression="gzip",
compression_opts=9)
group.create_dataset("has_vector", data=np.array([w in w2v in w2v for w in words]),
compression="gzip",
compression_opts=9)
group.create_dataset("word", data=words, compression="gzip", compression_opts=9)
group.create_dataset("df", data=np.array([self.df(w) for w in words]), compression="gzip",
compression_opts=9)
group.create_dataset("tf", data=np.array([self.tf(w) for w in words]), compression="gzip",
compression_opts=9)
console.print(f"|D|: {self.document_count}, |V|: {self.vocabulary_size} (case sensitive={self.case_sensitive})")
class Vocabulary:
def __init__(self, tokenizer):
self.tokenizer = tokenizer
self.file_name = vocabulary_file(tokenizer)
self._df = None
self._df_cs = None
self._has_vector = None
self._vector = None
self.dword_cs = None
self.dword = None
@cached_property
def size_cs(self) -> int:
with h5py.File(self.file_name, "r") as fp:
return fp['form_'].attrs['size']
@cached_property
def size(self) -> int:
with h5py.File(self.file_name, "r") as fp:
return fp['lower_'].attrs['size']
def __len__(self):
return self.size
def id_cs(self, word: str, default: int = -1):
if self.dword_cs is None:
with h5py.File(self.file_name, "r") as fp:
self.dword = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['lower_']['word'])))
self.dword_cs = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['form_']['word'])))
return self.dword_cs.get(word, default)
def id(self, word: str, default: int = -1):
if self.dword is None:
with h5py.File(self.file_name, "r") as fp:
self.dword = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['lower_']['word'])))
self.dword_cs = dict((b.decode("utf-8"), i) for i, b in enumerate(list(fp['form_']['word'])))
return self.dword.get(tr_lower(word), default)
def df(self, word: str):
i = self.id(word)
if i == -1:
return 0
else:
if self._df is None:
with h5py.File(self.file_name, "r") as fp:
self._df = np.array(fp['lower_']['df'])
return self._df[i]
def df_cs(self, word: str):
i = self.id_cs(word)
if i == -1:
return 0
else:
if self._df_cs is None:
with h5py.File(self.file_name, "r") as fp:
self._df_cs = np.array(fp['form_']['df'])
return self._df_cs[i]
def has_vector(self, word: str):
with h5py.File(self.file_name, "r") as fp:
if "has_vector" in fp['lower_']:
i = self.id(word)
if i == -1:
return False
else:
if self._has_vector is None:
self._has_vector = np.array(fp['lower_']['has_vector'])
return self._has_vector[i]
else:
return False
def vector(self, word: str):
with h5py.File(self.file_name, "r") as fp:
if "vector" in fp['lower_']:
i = self.id(word)
if i == -1:
return False
else:
if self._vector is None:
self._vector = np.array(fp['lower_']['vector'])
return self._vector[i, :]
else:
return False
@cached_property
def document_count(self):
with h5py.File(self.file_name, "r") as fp:
return fp['form_'].attrs['document_count']
| true | true |
f71f9cca7a4b7ebe90d9da227393163bb8dccc2f | 215 | py | Python | employee_management/employee_management/doctype/category_module_info/test_category_module_info.py | Vivekananthan112599/Frappe-Vivek | 6a2b70c736e17e9748c6a30e5722341acfb3b5c5 | [
"MIT"
] | null | null | null | employee_management/employee_management/doctype/category_module_info/test_category_module_info.py | Vivekananthan112599/Frappe-Vivek | 6a2b70c736e17e9748c6a30e5722341acfb3b5c5 | [
"MIT"
] | null | null | null | employee_management/employee_management/doctype/category_module_info/test_category_module_info.py | Vivekananthan112599/Frappe-Vivek | 6a2b70c736e17e9748c6a30e5722341acfb3b5c5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2021, Gopi and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestCategoryModuleInfo(unittest.TestCase):
pass
| 19.545455 | 48 | 0.767442 |
from __future__ import unicode_literals
import unittest
class TestCategoryModuleInfo(unittest.TestCase):
pass
| true | true |
f71f9d8b86afa01dd3ceaf3a886e43128a317c40 | 600 | py | Python | sources/t05/t05ej12.py | workready/pythonbasic | 59bd82caf99244f5e711124e1f6f4dec8de22141 | [
"MIT"
] | null | null | null | sources/t05/t05ej12.py | workready/pythonbasic | 59bd82caf99244f5e711124e1f6f4dec8de22141 | [
"MIT"
] | null | null | null | sources/t05/t05ej12.py | workready/pythonbasic | 59bd82caf99244f5e711124e1f6f4dec8de22141 | [
"MIT"
] | null | null | null | import argparse
parser = argparse.ArgumentParser(description="Este programa calcula X^Y")
group = parser.add_mutually_exclusive_group()
group.add_argument("-v", "--verbose", action="store_true")
group.add_argument("-q", "--quiet", action="store_true")
parser.add_argument("x", type=int, help="la base")
parser.add_argument("y", type=int, help="el exponente")
args = parser.parse_args()
answer = args.x**args.y
if args.quiet:
print(answer)
elif args.verbose:
print("{} elevado a {} es igual a {}".format(args.x, args.y, answer))
else:
print("{}^{} == {}".format(args.x, args.y, answer))
| 33.333333 | 73 | 0.695 | import argparse
parser = argparse.ArgumentParser(description="Este programa calcula X^Y")
group = parser.add_mutually_exclusive_group()
group.add_argument("-v", "--verbose", action="store_true")
group.add_argument("-q", "--quiet", action="store_true")
parser.add_argument("x", type=int, help="la base")
parser.add_argument("y", type=int, help="el exponente")
args = parser.parse_args()
answer = args.x**args.y
if args.quiet:
print(answer)
elif args.verbose:
print("{} elevado a {} es igual a {}".format(args.x, args.y, answer))
else:
print("{}^{} == {}".format(args.x, args.y, answer))
| true | true |
f71f9dd0c9b57edc5e44757006a6fb2ad0870d9a | 8,176 | py | Python | open_spiel/python/examples/hearts_supervised_learning.py | xujing1994/open_spiel | 7663a2717f16ff84c0d6a6bfdf19a9c21b37b765 | [
"Apache-2.0"
] | null | null | null | open_spiel/python/examples/hearts_supervised_learning.py | xujing1994/open_spiel | 7663a2717f16ff84c0d6a6bfdf19a9c21b37b765 | [
"Apache-2.0"
] | 4 | 2020-11-13T18:59:55.000Z | 2022-02-10T02:08:27.000Z | open_spiel/python/examples/hearts_supervised_learning.py | xujing1994/open_spiel | 7663a2717f16ff84c0d6a6bfdf19a9c21b37b765 | [
"Apache-2.0"
] | 1 | 2020-12-25T03:01:37.000Z | 2020-12-25T03:01:37.000Z | # Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Train a policy net on Hearts actions based given a dataset of trajectories.
Trajectories from the Hearts bot Xinxin can be generated using
open_spiel/games/hearts/xinxin_game_generator.cc.
"""
import os
import pickle
from typing import Any, Tuple
from absl import app
from absl import flags
import haiku as hk
import jax
from jax import numpy as jnp
from jax.experimental import optix
import numpy as np
import pyspiel
OptState = Any
Params = Any
FLAGS = flags.FLAGS
GAME = pyspiel.load_game('hearts')
NUM_CARDS = 52
NUM_ACTIONS = NUM_CARDS
NUM_PLAYERS = 4
TOP_K_ACTIONS = 5 # How many alternative actions to display
DEFAULT_LAYER_SIZES = [1024, 1024, 1024, 1024]
flags.DEFINE_integer('iterations', 100000, 'Number of iterations')
flags.DEFINE_string('data_path', None, 'Location for data')
flags.DEFINE_integer('eval_every', 10000, 'How often to evaluate the policy')
flags.DEFINE_integer('num_examples', 3,
'How many examples to print per evaluation')
flags.DEFINE_integer('train_batch', 128, 'Batch size for training step')
flags.DEFINE_integer('eval_batch', 10000, 'Batch size when evaluating')
flags.DEFINE_float('step_size', 1e-4, 'Step size for training')
flags.DEFINE_list('hidden_layer_sizes', None,
'Number of hidden units and layers in the network')
flags.DEFINE_integer('rng_seed', 42, 'Seed for initial network weights')
flags.DEFINE_string('save_path', None, 'Location for saved networks')
flags.DEFINE_string('checkpoint_file', None,
'Provides weights and optimzer state to resume training')
def _trajectory(line: str):
"""Returns parsed action trajectory."""
actions = [int(x) for x in line.split(' ')]
return tuple(actions)
def make_dataset(file: str):
"""Creates dataset as a generator of single examples."""
lines = [line for line in open(file)]
while True:
np.random.shuffle(lines)
for line in lines:
trajectory = _trajectory(line)
# skip pass_dir and deal actions
action_index = np.random.randint(NUM_CARDS + 1, len(trajectory))
state = GAME.new_initial_state()
for action in trajectory[:action_index]:
state.apply_action(action)
yield (state.information_state_tensor(), trajectory[action_index])
def batch(dataset, batch_size: int):
"""Creates a batched dataset from a one-at-a-time dataset."""
observations = np.zeros([batch_size] + GAME.information_state_tensor_shape(),
np.float32)
labels = np.zeros(batch_size, dtype=np.int32)
while True:
for batch_index in range(batch_size):
observations[batch_index], labels[batch_index] = next(dataset)
yield observations, labels
def one_hot(x, k):
"""Returns a one-hot encoding of `x` of size `k`."""
return jnp.array(x[..., jnp.newaxis] == jnp.arange(k), dtype=np.float32)
def net_fn(x):
"""Haiku module for our network."""
layers = []
for layer_size in FLAGS.hidden_layer_sizes:
layers.append(hk.Linear(int(layer_size)))
layers.append(jax.nn.relu)
layers.append(hk.Linear(NUM_ACTIONS))
layers.append(jax.nn.log_softmax)
net = hk.Sequential(layers)
return net(x)
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
if FLAGS.hidden_layer_sizes is None:
# Cannot pass default arguments as lists due to style requirements, so we
# override it here if they are not set.
FLAGS.hidden_layer_sizes = DEFAULT_LAYER_SIZES
# Make the network.
net = hk.without_apply_rng(hk.transform(net_fn, apply_rng=True))
# Make the optimiser.
opt = optix.adam(FLAGS.step_size)
@jax.jit
def loss(
params: Params,
inputs: np.ndarray,
targets: np.ndarray,
) -> jnp.DeviceArray:
"""Cross-entropy loss."""
assert targets.dtype == np.int32
log_probs = net.apply(params, inputs)
return -jnp.mean(one_hot(targets, NUM_ACTIONS) * log_probs)
@jax.jit
def accuracy(
params: Params,
inputs: np.ndarray,
targets: np.ndarray,
) -> jnp.DeviceArray:
"""Classification accuracy."""
predictions = net.apply(params, inputs)
return jnp.mean(jnp.argmax(predictions, axis=-1) == targets)
@jax.jit
def update(
params: Params,
opt_state: OptState,
inputs: np.ndarray,
targets: np.ndarray,
) -> Tuple[Params, OptState]:
"""Learning rule (stochastic gradient descent)."""
_, gradient = jax.value_and_grad(loss)(params, inputs, targets)
updates, opt_state = opt.update(gradient, opt_state)
new_params = optix.apply_updates(params, updates)
return new_params, opt_state
def output_samples(params: Params, max_samples: int):
"""Output some cases where the policy disagrees with the dataset action."""
if max_samples == 0:
return
count = 0
with open(os.path.join(FLAGS.data_path, 'test.txt')) as f:
lines = list(f)
np.random.shuffle(lines)
for line in lines:
state = GAME.new_initial_state()
actions = _trajectory(line)
for action in actions:
if not state.is_chance_node():
observation = np.array(state.information_state_tensor(), np.float32)
policy = np.exp(net.apply(params, observation))
probs_actions = [(p, a) for a, p in enumerate(policy)]
pred = max(probs_actions)[1]
if pred != action:
print(state)
for p, a in reversed(sorted(probs_actions)[-TOP_K_ACTIONS:]):
print('{:7} {:.2f}'.format(state.action_to_string(a), p))
print('Ground truth {}\n'.format(state.action_to_string(action)))
count += 1
break
state.apply_action(action)
if count >= max_samples:
return
# Store what we need to rebuild the Haiku net.
if FLAGS.save_path:
filename = os.path.join(FLAGS.save_path, 'layers.txt')
with open(filename, 'w') as layer_def_file:
for s in FLAGS.hidden_layer_sizes:
layer_def_file.write(f'{s} ')
layer_def_file.write('\n')
# Make datasets.
if FLAGS.data_path is None:
raise app.UsageError(
'Please generate your own supervised training data and supply the local'
'location as --data_path')
train = batch(
make_dataset(os.path.join(FLAGS.data_path, 'train.txt')),
FLAGS.train_batch)
test = batch(
make_dataset(os.path.join(FLAGS.data_path, 'test.txt')), FLAGS.eval_batch)
# Initialize network and optimiser.
if FLAGS.checkpoint_file:
with open(FLAGS.checkpoint_file, 'rb') as pkl_file:
params, opt_state = pickle.load(pkl_file)
else:
rng = jax.random.PRNGKey(FLAGS.rng_seed) # seed used for network weights
inputs, unused_targets = next(train)
params = net.init(rng, inputs)
opt_state = opt.init(params)
# Train/eval loop.
for step in range(FLAGS.iterations):
# Do SGD on a batch of training examples.
inputs, targets = next(train)
params, opt_state = update(params, opt_state, inputs, targets)
# Periodically evaluate classification accuracy on the test set.
if (1 + step) % FLAGS.eval_every == 0:
inputs, targets = next(test)
test_accuracy = accuracy(params, inputs, targets)
print(f'After {1+step} steps, test accuracy: {test_accuracy}.')
if FLAGS.save_path:
filename = os.path.join(FLAGS.save_path, f'checkpoint-{1 + step}.pkl')
with open(filename, 'wb') as pkl_file:
pickle.dump((params, opt_state), pkl_file)
output_samples(params, FLAGS.num_examples)
if __name__ == '__main__':
app.run(main)
| 34.209205 | 80 | 0.689946 |
import os
import pickle
from typing import Any, Tuple
from absl import app
from absl import flags
import haiku as hk
import jax
from jax import numpy as jnp
from jax.experimental import optix
import numpy as np
import pyspiel
OptState = Any
Params = Any
FLAGS = flags.FLAGS
GAME = pyspiel.load_game('hearts')
NUM_CARDS = 52
NUM_ACTIONS = NUM_CARDS
NUM_PLAYERS = 4
TOP_K_ACTIONS = 5
DEFAULT_LAYER_SIZES = [1024, 1024, 1024, 1024]
flags.DEFINE_integer('iterations', 100000, 'Number of iterations')
flags.DEFINE_string('data_path', None, 'Location for data')
flags.DEFINE_integer('eval_every', 10000, 'How often to evaluate the policy')
flags.DEFINE_integer('num_examples', 3,
'How many examples to print per evaluation')
flags.DEFINE_integer('train_batch', 128, 'Batch size for training step')
flags.DEFINE_integer('eval_batch', 10000, 'Batch size when evaluating')
flags.DEFINE_float('step_size', 1e-4, 'Step size for training')
flags.DEFINE_list('hidden_layer_sizes', None,
'Number of hidden units and layers in the network')
flags.DEFINE_integer('rng_seed', 42, 'Seed for initial network weights')
flags.DEFINE_string('save_path', None, 'Location for saved networks')
flags.DEFINE_string('checkpoint_file', None,
'Provides weights and optimzer state to resume training')
def _trajectory(line: str):
actions = [int(x) for x in line.split(' ')]
return tuple(actions)
def make_dataset(file: str):
lines = [line for line in open(file)]
while True:
np.random.shuffle(lines)
for line in lines:
trajectory = _trajectory(line)
action_index = np.random.randint(NUM_CARDS + 1, len(trajectory))
state = GAME.new_initial_state()
for action in trajectory[:action_index]:
state.apply_action(action)
yield (state.information_state_tensor(), trajectory[action_index])
def batch(dataset, batch_size: int):
observations = np.zeros([batch_size] + GAME.information_state_tensor_shape(),
np.float32)
labels = np.zeros(batch_size, dtype=np.int32)
while True:
for batch_index in range(batch_size):
observations[batch_index], labels[batch_index] = next(dataset)
yield observations, labels
def one_hot(x, k):
return jnp.array(x[..., jnp.newaxis] == jnp.arange(k), dtype=np.float32)
def net_fn(x):
layers = []
for layer_size in FLAGS.hidden_layer_sizes:
layers.append(hk.Linear(int(layer_size)))
layers.append(jax.nn.relu)
layers.append(hk.Linear(NUM_ACTIONS))
layers.append(jax.nn.log_softmax)
net = hk.Sequential(layers)
return net(x)
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
if FLAGS.hidden_layer_sizes is None:
FLAGS.hidden_layer_sizes = DEFAULT_LAYER_SIZES
net = hk.without_apply_rng(hk.transform(net_fn, apply_rng=True))
opt = optix.adam(FLAGS.step_size)
@jax.jit
def loss(
params: Params,
inputs: np.ndarray,
targets: np.ndarray,
) -> jnp.DeviceArray:
assert targets.dtype == np.int32
log_probs = net.apply(params, inputs)
return -jnp.mean(one_hot(targets, NUM_ACTIONS) * log_probs)
@jax.jit
def accuracy(
params: Params,
inputs: np.ndarray,
targets: np.ndarray,
) -> jnp.DeviceArray:
predictions = net.apply(params, inputs)
return jnp.mean(jnp.argmax(predictions, axis=-1) == targets)
@jax.jit
def update(
params: Params,
opt_state: OptState,
inputs: np.ndarray,
targets: np.ndarray,
) -> Tuple[Params, OptState]:
_, gradient = jax.value_and_grad(loss)(params, inputs, targets)
updates, opt_state = opt.update(gradient, opt_state)
new_params = optix.apply_updates(params, updates)
return new_params, opt_state
def output_samples(params: Params, max_samples: int):
if max_samples == 0:
return
count = 0
with open(os.path.join(FLAGS.data_path, 'test.txt')) as f:
lines = list(f)
np.random.shuffle(lines)
for line in lines:
state = GAME.new_initial_state()
actions = _trajectory(line)
for action in actions:
if not state.is_chance_node():
observation = np.array(state.information_state_tensor(), np.float32)
policy = np.exp(net.apply(params, observation))
probs_actions = [(p, a) for a, p in enumerate(policy)]
pred = max(probs_actions)[1]
if pred != action:
print(state)
for p, a in reversed(sorted(probs_actions)[-TOP_K_ACTIONS:]):
print('{:7} {:.2f}'.format(state.action_to_string(a), p))
print('Ground truth {}\n'.format(state.action_to_string(action)))
count += 1
break
state.apply_action(action)
if count >= max_samples:
return
if FLAGS.save_path:
filename = os.path.join(FLAGS.save_path, 'layers.txt')
with open(filename, 'w') as layer_def_file:
for s in FLAGS.hidden_layer_sizes:
layer_def_file.write(f'{s} ')
layer_def_file.write('\n')
if FLAGS.data_path is None:
raise app.UsageError(
'Please generate your own supervised training data and supply the local'
'location as --data_path')
train = batch(
make_dataset(os.path.join(FLAGS.data_path, 'train.txt')),
FLAGS.train_batch)
test = batch(
make_dataset(os.path.join(FLAGS.data_path, 'test.txt')), FLAGS.eval_batch)
if FLAGS.checkpoint_file:
with open(FLAGS.checkpoint_file, 'rb') as pkl_file:
params, opt_state = pickle.load(pkl_file)
else:
rng = jax.random.PRNGKey(FLAGS.rng_seed)
inputs, unused_targets = next(train)
params = net.init(rng, inputs)
opt_state = opt.init(params)
for step in range(FLAGS.iterations):
inputs, targets = next(train)
params, opt_state = update(params, opt_state, inputs, targets)
if (1 + step) % FLAGS.eval_every == 0:
inputs, targets = next(test)
test_accuracy = accuracy(params, inputs, targets)
print(f'After {1+step} steps, test accuracy: {test_accuracy}.')
if FLAGS.save_path:
filename = os.path.join(FLAGS.save_path, f'checkpoint-{1 + step}.pkl')
with open(filename, 'wb') as pkl_file:
pickle.dump((params, opt_state), pkl_file)
output_samples(params, FLAGS.num_examples)
if __name__ == '__main__':
app.run(main)
| true | true |
f71f9e454eb33061bb98dbbfbd0ff5e4e58bf745 | 38,248 | py | Python | conda_build/render.py | scopatz/conda-build | dd74b17f4e7cb4286fe9a403895f9d34feb8e071 | [
"BSD-3-Clause"
] | null | null | null | conda_build/render.py | scopatz/conda-build | dd74b17f4e7cb4286fe9a403895f9d34feb8e071 | [
"BSD-3-Clause"
] | null | null | null | conda_build/render.py | scopatz/conda-build | dd74b17f4e7cb4286fe9a403895f9d34feb8e071 | [
"BSD-3-Clause"
] | null | null | null | # (c) Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
from __future__ import absolute_import, division, print_function
from collections import OrderedDict, defaultdict
from locale import getpreferredencoding
import json
import os
from os.path import isdir, isfile, abspath
import random
import re
import shutil
import string
import subprocess
import sys
import tarfile
import tempfile
import yaml
from .conda_interface import (PY3, UnsatisfiableError, ProgressiveFetchExtract,
TemporaryDirectory)
from .conda_interface import execute_actions
from .conda_interface import pkgs_dirs
from .conda_interface import conda_43
from .conda_interface import specs_from_url
from .conda_interface import memoized
from conda_build import exceptions, utils, environ
from conda_build.metadata import MetaData, combine_top_level_metadata_with_output
import conda_build.source as source
from conda_build.variants import (get_package_variants, list_of_dicts_to_dict_of_lists,
filter_by_key_value)
from conda_build.exceptions import DependencyNeedsBuildingError
from conda_build.index import get_build_index
# from conda_build.jinja_context import pin_subpackage_against_outputs
try:
from conda.base.constants import CONDA_TARBALL_EXTENSIONS
except Exception:
from conda.base.constants import CONDA_TARBALL_EXTENSION
CONDA_TARBALL_EXTENSIONS = (CONDA_TARBALL_EXTENSION,)
def odict_representer(dumper, data):
return dumper.represent_dict(data.items())
yaml.add_representer(set, yaml.representer.SafeRepresenter.represent_list)
yaml.add_representer(tuple, yaml.representer.SafeRepresenter.represent_list)
yaml.add_representer(OrderedDict, odict_representer)
def bldpkg_path(m):
'''
Returns path to built package's tarball given its ``Metadata``.
'''
subdir = 'noarch' if m.noarch or m.noarch_python else m.config.host_subdir
if not hasattr(m, 'type'):
if m.config.conda_pkg_format == "2":
pkg_type = "conda_v2"
else:
pkg_type = "conda"
else:
pkg_type = m.type
# the default case will switch over to conda_v2 at some point
if pkg_type == "conda":
path = os.path.join(m.config.output_folder, subdir, '%s%s' % (m.dist(), CONDA_TARBALL_EXTENSIONS[0]))
elif pkg_type == "conda_v2":
path = os.path.join(m.config.output_folder, subdir, '%s%s' % (m.dist(), '.conda'))
else:
path = '{} file for {} in: {}'.format(m.type, m.name(), os.path.join(m.config.output_folder, subdir))
return path
def actions_to_pins(actions):
specs = []
if conda_43:
spec_name = lambda x: x.dist_name
else:
spec_name = lambda x: str(x)
if 'LINK' in actions:
specs = [' '.join(spec_name(spec).split()[0].rsplit('-', 2)) for spec in actions['LINK']]
return specs
def _categorize_deps(m, specs, exclude_pattern, variant):
subpackages = []
dependencies = []
pass_through_deps = []
dash_or_under = re.compile("[-_]")
# ones that get filtered from actual versioning, to exclude them from the hash calculation
for spec in specs:
if not exclude_pattern or not exclude_pattern.match(spec):
is_subpackage = False
spec_name = spec.split()[0]
for entry in m.get_section('outputs'):
name = entry.get('name')
if name == spec_name:
subpackages.append(' '.join((name, m.version())))
is_subpackage = True
if not is_subpackage:
dependencies.append(spec)
# fill in variant version iff no version at all is provided
for key, value in variant.items():
# for sake of comparison, ignore dashes and underscores
if (dash_or_under.sub("", key) == dash_or_under.sub("", spec_name) and
not re.search(r'%s\s+[0-9a-zA-Z\_\.\<\>\=\*]' % spec_name, spec)):
dependencies.append(" ".join((spec_name, value)))
elif exclude_pattern.match(spec):
pass_through_deps.append(spec)
return subpackages, dependencies, pass_through_deps
def get_env_dependencies(m, env, variant, exclude_pattern=None,
permit_unsatisfiable_variants=False,
merge_build_host_on_same_platform=True):
specs = m.get_depends_top_and_out(env)
# replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
if env in ('build', 'host'):
no_xx_specs = []
for spec in specs:
if ' x.x' in spec:
pkg_name = spec.split()[0]
no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, ""))))
else:
no_xx_specs.append(spec)
specs = no_xx_specs
subpackages, dependencies, pass_through_deps = _categorize_deps(m, specs, exclude_pattern, variant)
dependencies = set(dependencies)
unsat = None
random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
for _ in range(10))
with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
try:
actions = environ.get_install_actions(tmpdir, tuple(dependencies), env,
subdir=getattr(m.config, '{}_subdir'.format(env)),
debug=m.config.debug,
verbose=m.config.verbose,
locking=m.config.locking,
bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
timeout=m.config.timeout,
disable_pip=m.config.disable_pip,
max_env_retry=m.config.max_env_retry,
output_folder=m.config.output_folder,
channel_urls=tuple(m.config.channel_urls))
except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
# we'll get here if the environment is unsatisfiable
if hasattr(e, 'packages'):
unsat = ', '.join(e.packages)
else:
unsat = e.message
if permit_unsatisfiable_variants:
actions = {}
else:
raise
specs = actions_to_pins(actions)
return (utils.ensure_list((specs + subpackages + pass_through_deps) or
m.meta.get('requirements', {}).get(env, [])),
actions, unsat)
def strip_channel(spec_str):
if hasattr(spec_str, 'decode'):
spec_str = spec_str.decode()
if ':' in spec_str:
spec_str = spec_str.split("::")[-1]
return spec_str
def get_pin_from_build(m, dep, build_dep_versions):
dep_split = dep.split()
dep_name = dep_split[0]
build = ''
if len(dep_split) >= 3:
build = dep_split[2]
pin = None
version = build_dep_versions.get(dep_name) or m.config.variant.get(dep_name)
if (version and dep_name in m.config.variant.get('pin_run_as_build', {}) and
not (dep_name == 'python' and (m.noarch or m.noarch_python)) and
dep_name in build_dep_versions):
pin_cfg = m.config.variant['pin_run_as_build'][dep_name]
if isinstance(pin_cfg, str):
# if pin arg is a single 'x.x', use the same value for min and max
pin_cfg = dict(min_pin=pin_cfg, max_pin=pin_cfg)
pin = utils.apply_pin_expressions(version.split()[0], **pin_cfg)
elif dep.startswith('numpy') and 'x.x' in dep:
if not build_dep_versions.get(dep_name):
raise ValueError("numpy x.x specified, but numpy not in build requirements.")
pin = utils.apply_pin_expressions(version.split()[0], min_pin='x.x', max_pin='x.x')
if pin:
dep = " ".join((dep_name, pin, build)).strip()
return dep
def _filter_run_exports(specs, ignore_list):
filtered_specs = {}
for agent, specs_list in specs.items():
for spec in specs_list:
if hasattr(spec, 'decode'):
spec = spec.decode()
if not any((ignore_spec == '*' or spec == ignore_spec or
spec.startswith(ignore_spec + ' ')) for ignore_spec in ignore_list):
filtered_specs[agent] = filtered_specs.get(agent, []) + [spec]
return filtered_specs
def find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=False):
_pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
pkg_loc = None
for pkgs_dir in _pkgs_dirs:
pkg_dir = os.path.join(pkgs_dir, pkg_dist)
pkg_file = os.path.join(pkgs_dir, pkg_dist + CONDA_TARBALL_EXTENSIONS[0])
if not files_only and os.path.isdir(pkg_dir):
pkg_loc = pkg_dir
break
elif os.path.isfile(pkg_file):
pkg_loc = pkg_file
break
elif files_only and os.path.isdir(pkg_dir):
pkg_loc = pkg_file
# create the tarball on demand. This is so that testing on archives works.
with tarfile.open(pkg_file, 'w:bz2') as archive:
for entry in os.listdir(pkg_dir):
archive.add(os.path.join(pkg_dir, entry), arcname=entry)
pkg_subdir = os.path.join(m.config.croot, m.config.host_subdir)
pkg_loc = os.path.join(pkg_subdir, os.path.basename(pkg_file))
shutil.move(pkg_file, pkg_loc)
return pkg_loc
@memoized
def _read_specs_from_package(pkg_loc, pkg_dist):
specs = {}
if pkg_loc and os.path.isdir(pkg_loc):
downstream_file = os.path.join(pkg_loc, 'info/run_exports')
if os.path.isfile(downstream_file):
with open(downstream_file) as f:
specs = {'weak': [spec.rstrip() for spec in f.readlines()]}
# a later attempt: record more info in the yaml file, to support "strong" run exports
elif os.path.isfile(downstream_file + '.yaml'):
with open(downstream_file + '.yaml') as f:
specs = yaml.safe_load(f)
elif os.path.isfile(downstream_file + '.json'):
with open(downstream_file + '.json') as f:
specs = json.load(f)
if not specs and pkg_loc and os.path.isfile(pkg_loc):
# switching to json for consistency in conda-build 4
specs_yaml = utils.package_has_file(pkg_loc, 'info/run_exports.yaml')
specs_json = utils.package_has_file(pkg_loc, 'info/run_exports.json')
if hasattr(specs_json, "decode"):
specs_json = specs_json.decode("utf-8")
if specs_json:
specs = json.loads(specs_json)
elif specs_yaml:
specs = yaml.safe_load(specs_yaml)
else:
legacy_specs = utils.package_has_file(pkg_loc, 'info/run_exports')
# exclude packages pinning themselves (makes no sense)
if legacy_specs:
weak_specs = set()
if hasattr(pkg_dist, "decode"):
pkg_dist = pkg_dist.decode("utf-8")
for spec in legacy_specs.splitlines():
if hasattr(spec, "decode"):
spec = spec.decode("utf-8")
if not spec.startswith(pkg_dist.rsplit('-', 2)[0]):
weak_specs.add(spec.rstrip())
specs = {'weak': sorted(list(weak_specs))}
return specs
def execute_download_actions(m, actions, env, package_subset=None, require_files=False):
index, _, _ = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir,
output_folder=m.config.output_folder, channel_urls=m.config.channel_urls,
debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking,
timeout=m.config.timeout)
# this should be just downloading packages. We don't need to extract them -
# we read contents directly
if 'FETCH' in actions or 'EXTRACT' in actions:
# this is to force the download
execute_actions(actions, index, verbose=m.config.debug)
pkg_files = {}
packages = actions.get('LINK', [])
package_subset = utils.ensure_list(package_subset)
selected_packages = set()
if package_subset:
for pkg in package_subset:
if hasattr(pkg, 'name'):
if pkg in packages:
selected_packages.add(pkg)
else:
pkg_name = pkg.split()[0]
for link_pkg in packages:
if pkg_name == link_pkg.name:
selected_packages.add(link_pkg)
break
packages = selected_packages
for pkg in packages:
if hasattr(pkg, 'dist_name'):
pkg_dist = pkg.dist_name
else:
pkg = strip_channel(pkg)
pkg_dist = pkg.split(' ')[0]
pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=require_files)
# ran through all pkgs_dirs, and did not find package or folder. Download it.
# TODO: this is a vile hack reaching into conda's internals. Replace with
# proper conda API when available.
if not pkg_loc and conda_43:
try:
pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0]
# the conda 4.4 API uses a single `link_prefs` kwarg
# whereas conda 4.3 used `index` and `link_dists` kwargs
pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record],))
except TypeError:
# TypeError: __init__() got an unexpected keyword argument 'link_prefs'
pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
with utils.LoggingContext():
pfe.execute()
for pkg_dir in pkgs_dirs:
_loc = os.path.join(pkg_dir, index[pkg].fn)
if os.path.isfile(_loc):
pkg_loc = _loc
break
pkg_files[pkg] = pkg_loc, pkg_dist
return pkg_files
def get_upstream_pins(m, actions, env):
"""Download packages from specs, then inspect each downloaded package for additional
downstream dependency specs. Return these additional specs."""
env_specs = m.meta.get('requirements', {}).get(env, [])
explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else []
linked_packages = actions.get('LINK', [])
linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]
ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))
additional_specs = {}
for pkg in linked_packages:
channeldata = utils.download_channeldata(pkg.channel)
run_exports = channeldata.get('packages', {}).get(pkg.name, {}).get('run_exports', {}).get(pkg.version, {})
specs = _filter_run_exports(run_exports, ignore_list)
if specs:
additional_specs = utils.merge_dicts_of_lists(additional_specs, specs)
return additional_specs
def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_pattern):
deps, actions, unsat = get_env_dependencies(m, env, m.config.variant,
exclude_pattern,
permit_unsatisfiable_variants=permit_unsatisfiable_variants)
# extend host deps with strong build run exports. This is important for things like
# vc feature activation to work correctly in the host env.
extra_run_specs = get_upstream_pins(m, actions, env)
return list(set(deps)) or m.meta.get('requirements', {}).get(env, []), unsat, extra_run_specs
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
"""Applies run_exports from any build deps to host and run sections"""
# if we have host deps, they're more important than the build deps.
requirements = m.meta.get('requirements', {})
build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build',
permit_unsatisfiable_variants, exclude_pattern)
# is there a 'host' section?
if m.is_cross:
# this must come before we read upstream pins, because it will enforce things
# like vc version from the compiler.
host_reqs = utils.ensure_list(m.get_value('requirements/host'))
# ensure host_reqs is present, so in-place modification below is actually in-place
requirements = m.meta.setdefault('requirements', {})
requirements['host'] = host_reqs
if not host_reqs:
matching_output = [out for out in m.meta.get('outputs', []) if
out.get('name') == m.name()]
if matching_output:
requirements = utils.expand_reqs(matching_output[0].get('requirements', {}))
matching_output[0]['requirements'] = requirements
host_reqs = requirements.setdefault('host', [])
# in-place modification of above thingie
host_reqs.extend(extra_run_specs_from_build.get('strong', []))
host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host',
permit_unsatisfiable_variants, exclude_pattern)
extra_run_specs = set(extra_run_specs_from_host.get('strong', []) +
extra_run_specs_from_host.get('weak', []) +
extra_run_specs_from_build.get('strong', []))
else:
host_deps = []
host_unsat = []
extra_run_specs = set(extra_run_specs_from_build.get('strong', []))
if m.build_is_host:
extra_run_specs.update(extra_run_specs_from_build.get('weak', []))
build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', []))
else:
host_deps = set(extra_run_specs_from_build.get('strong', []))
run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run')))
for section, deps in (('build', build_deps), ('host', host_deps), ('run', run_deps)):
if deps:
requirements[section] = list(deps)
m.meta['requirements'] = requirements
return build_unsat, host_unsat
def _simplify_to_exact_constraints(metadata):
"""
For metapackages that are pinned exactly, we want to bypass all dependencies that may
be less exact.
"""
requirements = metadata.meta.get('requirements', {})
# collect deps on a per-section basis
for section in 'build', 'host', 'run':
deps = utils.ensure_list(requirements.get(section, []))
deps_dict = defaultdict(list)
for dep in deps:
spec_parts = utils.ensure_valid_spec(dep).split()
name = spec_parts[0]
if len(spec_parts) > 1:
deps_dict[name].append(spec_parts[1:])
else:
deps_dict[name].append([])
deps_list = []
for name, values in deps_dict.items():
exact_pins = []
for dep in values:
if len(dep) > 1:
version, build = dep[:2]
if not (any(c in version for c in ('>', '<', '*')) or '*' in build):
exact_pins.append(dep)
if len(values) == 1 and not any(values):
deps_list.append(name)
elif exact_pins:
if not all(pin == exact_pins[0] for pin in exact_pins):
raise ValueError("Conflicting exact pins: {}".format(exact_pins))
else:
deps_list.append(' '.join([name] + exact_pins[0]))
else:
deps_list.extend(' '.join([name] + dep) for dep in values if dep)
if section in requirements and deps_list:
requirements[section] = deps_list
metadata.meta['requirements'] = requirements
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
"""Fully render a recipe. Fill in versions for build/host dependencies."""
if not parent_metadata:
parent_metadata = m
if m.skip():
m.final = True
else:
exclude_pattern = None
excludes = set(m.config.variant.get('ignore_version', []))
for key in m.config.variant.get('pin_run_as_build', {}).keys():
if key in excludes:
excludes.remove(key)
output_excludes = set()
if hasattr(m, 'other_outputs'):
output_excludes = set(name for (name, variant) in m.other_outputs.keys())
if excludes or output_excludes:
exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
for exc in excludes | output_excludes))
parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})
# extract the topmost section where variables are defined, and put it on top of the
# requirements for a particular output
# Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
output = parent_metadata.get_rendered_output(m.name(), variant=m.config.variant)
is_top_level = True
if output:
if 'package' in output or 'name' not in output:
# it's just a top-level recipe
output = {'name': m.name()}
else:
is_top_level = False
if not parent_recipe or parent_recipe['name'] == m.name():
combine_top_level_metadata_with_output(m, output)
requirements = utils.expand_reqs(output.get('requirements', {}))
m.meta['requirements'] = requirements
if m.meta.get('requirements'):
utils.insert_variant_versions(m.meta['requirements'],
m.config.variant, 'build')
utils.insert_variant_versions(m.meta['requirements'],
m.config.variant, 'host')
m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
build_unsat, host_unsat = add_upstream_pins(m,
permit_unsatisfiable_variants,
exclude_pattern)
# getting this AFTER add_upstream_pins is important, because that function adds deps
# to the metadata.
requirements = m.meta.get('requirements', {})
# here's where we pin run dependencies to their build time versions. This happens based
# on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
# names to have this behavior.
if output_excludes:
exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
for exc in output_excludes))
pinning_env = 'host' if m.is_cross else 'build'
build_reqs = requirements.get(pinning_env, [])
# if python is in the build specs, but doesn't have a specific associated
# version, make sure to add one
if build_reqs and 'python' in build_reqs:
build_reqs.append('python {}'.format(m.config.variant['python']))
m.meta['requirements'][pinning_env] = build_reqs
full_build_deps, _, _ = get_env_dependencies(m, pinning_env,
m.config.variant,
exclude_pattern=exclude_pattern,
permit_unsatisfiable_variants=permit_unsatisfiable_variants)
full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:])
for dep in full_build_deps}
if isfile(m.requirements_path) and not requirements.get('run'):
requirements['run'] = specs_from_url(m.requirements_path)
run_deps = requirements.get('run', [])
versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions)
for dep in run_deps]
versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True)
for spec in versioned_run_deps]
requirements[pinning_env] = full_build_deps
requirements['run'] = versioned_run_deps
m.meta['requirements'] = requirements
# append other requirements, such as python.app, appropriately
m.append_requirements()
if m.pin_depends == 'strict':
m.meta['requirements']['run'] = environ.get_pinned_deps(
m, 'run')
test_deps = m.get_value('test/requires')
if test_deps:
versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions)
for dep in test_deps})
versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True)
for spec in versioned_test_deps]
m.meta['test']['requires'] = versioned_test_deps
extra = m.meta.get('extra', {})
extra['copy_test_source_files'] = m.config.copy_test_source_files
m.meta['extra'] = extra
# if source/path is relative, then the output package makes no sense at all. The next
# best thing is to hard-code the absolute path. This probably won't exist on any
# system other than the original build machine, but at least it will work there.
if m.meta.get('source'):
if 'path' in m.meta['source']:
source_path = m.meta['source']['path']
os.path.expanduser(source_path)
if not os.path.isabs(source_path):
m.meta['source']['path'] = os.path.normpath(
os.path.join(m.path, source_path))
elif ('git_url' in m.meta['source'] and not (
# absolute paths are not relative paths
os.path.isabs(m.meta['source']['git_url']) or
# real urls are not relative paths
":" in m.meta['source']['git_url'])):
m.meta['source']['git_url'] = os.path.normpath(
os.path.join(m.path, m.meta['source']['git_url']))
if not m.meta.get('build'):
m.meta['build'] = {}
_simplify_to_exact_constraints(m)
if build_unsat or host_unsat:
m.final = False
log = utils.get_logger(__name__)
log.warn("Returning non-final recipe for {}; one or more dependencies "
"was unsatisfiable:".format(m.dist()))
if build_unsat:
log.warn("Build: {}".format(build_unsat))
if host_unsat:
log.warn("Host: {}".format(host_unsat))
else:
m.final = True
if is_top_level:
parent_metadata = m
return m
def try_download(metadata, no_download_source, raise_error=False):
if not metadata.source_provided and not no_download_source:
# this try/catch is for when the tool to download source is actually in
# meta.yaml, and not previously installed in builder env.
try:
source.provide(metadata)
except subprocess.CalledProcessError as error:
print("Warning: failed to download source. If building, will try "
"again after downloading recipe dependencies.")
print("Error was: ")
print(error)
if not metadata.source_provided:
if no_download_source:
raise ValueError("no_download_source specified, but can't fully render recipe without"
" downloading source. Please fix the recipe, or don't use "
"no_download_source.")
elif raise_error:
raise RuntimeError("Failed to download or patch source. Please see build log for info.")
def reparse(metadata):
"""Some things need to be parsed again after the build environment has been created
and activated."""
metadata.final = False
sys.path.insert(0, metadata.config.build_prefix)
sys.path.insert(0, metadata.config.host_prefix)
py_ver = '.'.join(metadata.config.variant['python'].split('.')[:2])
sys.path.insert(0, utils.get_site_packages(metadata.config.host_prefix, py_ver))
metadata.parse_until_resolved()
metadata = finalize_metadata(metadata)
return metadata
def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False,
allow_no_other_outputs=False, bypass_env_check=False):
rendered_metadata = {}
need_source_download = True
# don't bother distributing python if it's a noarch package
if metadata.noarch or metadata.noarch_python:
variants = filter_by_key_value(variants, 'python', variants[0]['python'],
'noarch_reduction')
# store these for reference later
metadata.config.variants = variants
# These are always the full set. just 'variants' is the one that gets
# used mostly, and can be reduced
metadata.config.input_variants = variants
recipe_requirements = metadata.extract_requirements_text()
recipe_package_and_build_text = metadata.extract_package_and_build_text()
recipe_text = recipe_package_and_build_text + recipe_requirements
if PY3 and hasattr(recipe_text, 'decode'):
recipe_text = recipe_text.decode()
elif not PY3 and hasattr(recipe_text, 'encode'):
recipe_text = recipe_text.encode()
metadata.config.variant = variants[0]
used_variables = metadata.get_used_loop_vars(force_global=False)
top_loop = metadata.get_reduced_variant_set(used_variables)
for variant in top_loop:
mv = metadata.copy()
mv.config.variant = variant
pin_run_as_build = variant.get('pin_run_as_build', {})
if mv.numpy_xx and 'numpy' not in pin_run_as_build:
pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}
conform_dict = {}
for key in used_variables:
# We use this variant in the top-level recipe.
# constrain the stored variants to only this version in the output
# variant mapping
conform_dict[key] = variant[key]
for key, values in conform_dict.items():
mv.config.variants = (filter_by_key_value(mv.config.variants, key, values,
'distribute_variants_reduction') or
mv.config.variants)
pin_run_as_build = variant.get('pin_run_as_build', {})
if mv.numpy_xx and 'numpy' not in pin_run_as_build:
pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}
numpy_pinned_variants = []
for _variant in mv.config.variants:
_variant['pin_run_as_build'] = pin_run_as_build
numpy_pinned_variants.append(_variant)
mv.config.variants = numpy_pinned_variants
mv.config.squished_variants = list_of_dicts_to_dict_of_lists(mv.config.variants)
if mv.needs_source_for_render and mv.variant_in_source:
mv.parse_again()
utils.rm_rf(mv.config.work_dir)
source.provide(mv)
mv.parse_again()
try:
mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
bypass_env_check=bypass_env_check)
except SystemExit:
pass
need_source_download = (not mv.needs_source_for_render or not mv.source_provided)
rendered_metadata[(mv.dist(),
mv.config.variant.get('target_platform', mv.config.subdir),
tuple((var, mv.config.variant.get(var))
for var in mv.get_used_vars()))] = \
(mv, need_source_download, None)
# list of tuples.
# each tuple item is a tuple of 3 items:
# metadata, need_download, need_reparse_in_env
return list(rendered_metadata.values())
def expand_outputs(metadata_tuples):
"""Obtain all metadata objects for all outputs from recipe. Useful for outputting paths."""
expanded_outputs = OrderedDict()
for (_m, download, reparse) in metadata_tuples:
for (output_dict, m) in _m.copy().get_output_metadata_set(permit_unsatisfiable_variants=False):
expanded_outputs[m.dist()] = (output_dict, m)
return list(expanded_outputs.values())
def render_recipe(recipe_path, config, no_download_source=False, variants=None,
permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False):
"""Returns a list of tuples, each consisting of
(metadata-object, needs_download, needs_render_in_env)
You get one tuple per variant. Outputs are not factored in here (subpackages won't affect these
results returned here.)
"""
arg = recipe_path
# Don't use byte literals for paths in Python 2
if not PY3:
arg = arg.decode(getpreferredencoding() or 'utf-8')
if isfile(arg):
if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
recipe_dir = tempfile.mkdtemp()
t = tarfile.open(arg, 'r:*')
t.extractall(path=recipe_dir)
t.close()
need_cleanup = True
elif arg.endswith('.yaml'):
recipe_dir = os.path.dirname(arg)
need_cleanup = False
else:
print("Ignoring non-recipe: %s" % arg)
return None, None
else:
recipe_dir = abspath(arg)
need_cleanup = False
if not isdir(recipe_dir):
sys.exit("Error: no such directory: %s" % recipe_dir)
try:
m = MetaData(recipe_dir, config=config)
except exceptions.YamlParsingError as e:
sys.stderr.write(e.error_msg())
sys.exit(1)
rendered_metadata = {}
# important: set build id *before* downloading source. Otherwise source goes into a different
# build folder.
if config.set_build_id:
m.config.compute_build_id(m.name(), reset=reset_build_id)
# this source may go into a folder that doesn't match the eventual build folder.
# There's no way around it AFAICT. We must download the source to be able to render
# the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
# folder until rendering is complete, because package names can have variant jinja2 in them.
if m.needs_source_for_render and not m.source_provided:
try_download(m, no_download_source=no_download_source)
if m.final:
if not hasattr(m.config, 'variants') or not m.config.variant:
m.config.ignore_system_variants = True
if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')):
m.config.variant_config_files = [os.path.join(m.path, 'conda_build_config.yaml')]
m.config.variants = get_package_variants(m, variants=variants)
m.config.variant = m.config.variants[0]
rendered_metadata = [(m, False, False), ]
else:
# merge any passed-in variants with any files found
variants = get_package_variants(m, variants=variants)
# when building, we don't want to fully expand all outputs into metadata, only expand
# whatever variants we have (i.e. expand top-level variants, not output-only variants)
rendered_metadata = distribute_variants(m, variants,
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
allow_no_other_outputs=True, bypass_env_check=bypass_env_check)
if need_cleanup:
utils.rm_rf(recipe_dir)
return rendered_metadata
# Keep this out of the function below so it can be imported by other modules.
FIELDS = ["package", "source", "build", "requirements", "test", "app", "outputs", "about", "extra"]
# Next bit of stuff is to support YAML output in the order we expect.
# http://stackoverflow.com/a/17310199/1170370
class _MetaYaml(dict):
fields = FIELDS
def to_omap(self):
return [(field, self[field]) for field in _MetaYaml.fields if field in self]
def _represent_omap(dumper, data):
return dumper.represent_mapping(u'tag:yaml.org,2002:map', data.to_omap())
def _unicode_representer(dumper, uni):
node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=uni)
return node
class _IndentDumper(yaml.Dumper):
def increase_indent(self, flow=False, indentless=False):
return super(_IndentDumper, self).increase_indent(flow, False)
def ignore_aliases(self, data):
return True
yaml.add_representer(_MetaYaml, _represent_omap)
if PY3:
yaml.add_representer(str, _unicode_representer)
unicode = None # silence pyflakes about unicode not existing in py3
else:
yaml.add_representer(unicode, _unicode_representer)
def output_yaml(metadata, filename=None, suppress_outputs=False):
local_metadata = metadata.copy()
utils.trim_empty_keys(local_metadata.meta)
if suppress_outputs and local_metadata.is_output and 'outputs' in local_metadata.meta:
del local_metadata.meta['outputs']
output = yaml.dump(_MetaYaml(local_metadata.meta), Dumper=_IndentDumper,
default_flow_style=False, indent=4)
if filename:
if any(sep in filename for sep in ('\\', '/')):
try:
os.makedirs(os.path.dirname(filename))
except OSError:
pass
with open(filename, "w") as f:
f.write(output)
return "Wrote yaml to %s" % filename
else:
return output
| 43.912744 | 115 | 0.616189 |
from __future__ import absolute_import, division, print_function
from collections import OrderedDict, defaultdict
from locale import getpreferredencoding
import json
import os
from os.path import isdir, isfile, abspath
import random
import re
import shutil
import string
import subprocess
import sys
import tarfile
import tempfile
import yaml
from .conda_interface import (PY3, UnsatisfiableError, ProgressiveFetchExtract,
TemporaryDirectory)
from .conda_interface import execute_actions
from .conda_interface import pkgs_dirs
from .conda_interface import conda_43
from .conda_interface import specs_from_url
from .conda_interface import memoized
from conda_build import exceptions, utils, environ
from conda_build.metadata import MetaData, combine_top_level_metadata_with_output
import conda_build.source as source
from conda_build.variants import (get_package_variants, list_of_dicts_to_dict_of_lists,
filter_by_key_value)
from conda_build.exceptions import DependencyNeedsBuildingError
from conda_build.index import get_build_index
try:
from conda.base.constants import CONDA_TARBALL_EXTENSIONS
except Exception:
from conda.base.constants import CONDA_TARBALL_EXTENSION
CONDA_TARBALL_EXTENSIONS = (CONDA_TARBALL_EXTENSION,)
def odict_representer(dumper, data):
return dumper.represent_dict(data.items())
yaml.add_representer(set, yaml.representer.SafeRepresenter.represent_list)
yaml.add_representer(tuple, yaml.representer.SafeRepresenter.represent_list)
yaml.add_representer(OrderedDict, odict_representer)
def bldpkg_path(m):
subdir = 'noarch' if m.noarch or m.noarch_python else m.config.host_subdir
if not hasattr(m, 'type'):
if m.config.conda_pkg_format == "2":
pkg_type = "conda_v2"
else:
pkg_type = "conda"
else:
pkg_type = m.type
if pkg_type == "conda":
path = os.path.join(m.config.output_folder, subdir, '%s%s' % (m.dist(), CONDA_TARBALL_EXTENSIONS[0]))
elif pkg_type == "conda_v2":
path = os.path.join(m.config.output_folder, subdir, '%s%s' % (m.dist(), '.conda'))
else:
path = '{} file for {} in: {}'.format(m.type, m.name(), os.path.join(m.config.output_folder, subdir))
return path
def actions_to_pins(actions):
specs = []
if conda_43:
spec_name = lambda x: x.dist_name
else:
spec_name = lambda x: str(x)
if 'LINK' in actions:
specs = [' '.join(spec_name(spec).split()[0].rsplit('-', 2)) for spec in actions['LINK']]
return specs
def _categorize_deps(m, specs, exclude_pattern, variant):
subpackages = []
dependencies = []
pass_through_deps = []
dash_or_under = re.compile("[-_]")
for spec in specs:
if not exclude_pattern or not exclude_pattern.match(spec):
is_subpackage = False
spec_name = spec.split()[0]
for entry in m.get_section('outputs'):
name = entry.get('name')
if name == spec_name:
subpackages.append(' '.join((name, m.version())))
is_subpackage = True
if not is_subpackage:
dependencies.append(spec)
for key, value in variant.items():
if (dash_or_under.sub("", key) == dash_or_under.sub("", spec_name) and
not re.search(r'%s\s+[0-9a-zA-Z\_\.\<\>\=\*]' % spec_name, spec)):
dependencies.append(" ".join((spec_name, value)))
elif exclude_pattern.match(spec):
pass_through_deps.append(spec)
return subpackages, dependencies, pass_through_deps
def get_env_dependencies(m, env, variant, exclude_pattern=None,
permit_unsatisfiable_variants=False,
merge_build_host_on_same_platform=True):
specs = m.get_depends_top_and_out(env)
if env in ('build', 'host'):
no_xx_specs = []
for spec in specs:
if ' x.x' in spec:
pkg_name = spec.split()[0]
no_xx_specs.append(' '.join((pkg_name, variant.get(pkg_name, ""))))
else:
no_xx_specs.append(spec)
specs = no_xx_specs
subpackages, dependencies, pass_through_deps = _categorize_deps(m, specs, exclude_pattern, variant)
dependencies = set(dependencies)
unsat = None
random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
for _ in range(10))
with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
try:
actions = environ.get_install_actions(tmpdir, tuple(dependencies), env,
subdir=getattr(m.config, '{}_subdir'.format(env)),
debug=m.config.debug,
verbose=m.config.verbose,
locking=m.config.locking,
bldpkgs_dirs=tuple(m.config.bldpkgs_dirs),
timeout=m.config.timeout,
disable_pip=m.config.disable_pip,
max_env_retry=m.config.max_env_retry,
output_folder=m.config.output_folder,
channel_urls=tuple(m.config.channel_urls))
except (UnsatisfiableError, DependencyNeedsBuildingError) as e:
# we'll get here if the environment is unsatisfiable
if hasattr(e, 'packages'):
unsat = ', '.join(e.packages)
else:
unsat = e.message
if permit_unsatisfiable_variants:
actions = {}
else:
raise
specs = actions_to_pins(actions)
return (utils.ensure_list((specs + subpackages + pass_through_deps) or
m.meta.get('requirements', {}).get(env, [])),
actions, unsat)
def strip_channel(spec_str):
if hasattr(spec_str, 'decode'):
spec_str = spec_str.decode()
if ':' in spec_str:
spec_str = spec_str.split("::")[-1]
return spec_str
def get_pin_from_build(m, dep, build_dep_versions):
dep_split = dep.split()
dep_name = dep_split[0]
build = ''
if len(dep_split) >= 3:
build = dep_split[2]
pin = None
version = build_dep_versions.get(dep_name) or m.config.variant.get(dep_name)
if (version and dep_name in m.config.variant.get('pin_run_as_build', {}) and
not (dep_name == 'python' and (m.noarch or m.noarch_python)) and
dep_name in build_dep_versions):
pin_cfg = m.config.variant['pin_run_as_build'][dep_name]
if isinstance(pin_cfg, str):
pin_cfg = dict(min_pin=pin_cfg, max_pin=pin_cfg)
pin = utils.apply_pin_expressions(version.split()[0], **pin_cfg)
elif dep.startswith('numpy') and 'x.x' in dep:
if not build_dep_versions.get(dep_name):
raise ValueError("numpy x.x specified, but numpy not in build requirements.")
pin = utils.apply_pin_expressions(version.split()[0], min_pin='x.x', max_pin='x.x')
if pin:
dep = " ".join((dep_name, pin, build)).strip()
return dep
def _filter_run_exports(specs, ignore_list):
filtered_specs = {}
for agent, specs_list in specs.items():
for spec in specs_list:
if hasattr(spec, 'decode'):
spec = spec.decode()
if not any((ignore_spec == '*' or spec == ignore_spec or
spec.startswith(ignore_spec + ' ')) for ignore_spec in ignore_list):
filtered_specs[agent] = filtered_specs.get(agent, []) + [spec]
return filtered_specs
def find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=False):
_pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
pkg_loc = None
for pkgs_dir in _pkgs_dirs:
pkg_dir = os.path.join(pkgs_dir, pkg_dist)
pkg_file = os.path.join(pkgs_dir, pkg_dist + CONDA_TARBALL_EXTENSIONS[0])
if not files_only and os.path.isdir(pkg_dir):
pkg_loc = pkg_dir
break
elif os.path.isfile(pkg_file):
pkg_loc = pkg_file
break
elif files_only and os.path.isdir(pkg_dir):
pkg_loc = pkg_file
with tarfile.open(pkg_file, 'w:bz2') as archive:
for entry in os.listdir(pkg_dir):
archive.add(os.path.join(pkg_dir, entry), arcname=entry)
pkg_subdir = os.path.join(m.config.croot, m.config.host_subdir)
pkg_loc = os.path.join(pkg_subdir, os.path.basename(pkg_file))
shutil.move(pkg_file, pkg_loc)
return pkg_loc
@memoized
def _read_specs_from_package(pkg_loc, pkg_dist):
specs = {}
if pkg_loc and os.path.isdir(pkg_loc):
downstream_file = os.path.join(pkg_loc, 'info/run_exports')
if os.path.isfile(downstream_file):
with open(downstream_file) as f:
specs = {'weak': [spec.rstrip() for spec in f.readlines()]}
elif os.path.isfile(downstream_file + '.yaml'):
with open(downstream_file + '.yaml') as f:
specs = yaml.safe_load(f)
elif os.path.isfile(downstream_file + '.json'):
with open(downstream_file + '.json') as f:
specs = json.load(f)
if not specs and pkg_loc and os.path.isfile(pkg_loc):
specs_yaml = utils.package_has_file(pkg_loc, 'info/run_exports.yaml')
specs_json = utils.package_has_file(pkg_loc, 'info/run_exports.json')
if hasattr(specs_json, "decode"):
specs_json = specs_json.decode("utf-8")
if specs_json:
specs = json.loads(specs_json)
elif specs_yaml:
specs = yaml.safe_load(specs_yaml)
else:
legacy_specs = utils.package_has_file(pkg_loc, 'info/run_exports')
if legacy_specs:
weak_specs = set()
if hasattr(pkg_dist, "decode"):
pkg_dist = pkg_dist.decode("utf-8")
for spec in legacy_specs.splitlines():
if hasattr(spec, "decode"):
spec = spec.decode("utf-8")
if not spec.startswith(pkg_dist.rsplit('-', 2)[0]):
weak_specs.add(spec.rstrip())
specs = {'weak': sorted(list(weak_specs))}
return specs
def execute_download_actions(m, actions, env, package_subset=None, require_files=False):
index, _, _ = get_build_index(getattr(m.config, '{}_subdir'.format(env)), bldpkgs_dir=m.config.bldpkgs_dir,
output_folder=m.config.output_folder, channel_urls=m.config.channel_urls,
debug=m.config.debug, verbose=m.config.verbose, locking=m.config.locking,
timeout=m.config.timeout)
# we read contents directly
if 'FETCH' in actions or 'EXTRACT' in actions:
# this is to force the download
execute_actions(actions, index, verbose=m.config.debug)
pkg_files = {}
packages = actions.get('LINK', [])
package_subset = utils.ensure_list(package_subset)
selected_packages = set()
if package_subset:
for pkg in package_subset:
if hasattr(pkg, 'name'):
if pkg in packages:
selected_packages.add(pkg)
else:
pkg_name = pkg.split()[0]
for link_pkg in packages:
if pkg_name == link_pkg.name:
selected_packages.add(link_pkg)
break
packages = selected_packages
for pkg in packages:
if hasattr(pkg, 'dist_name'):
pkg_dist = pkg.dist_name
else:
pkg = strip_channel(pkg)
pkg_dist = pkg.split(' ')[0]
pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m, files_only=require_files)
# ran through all pkgs_dirs, and did not find package or folder. Download it.
# TODO: this is a vile hack reaching into conda's internals. Replace with
if not pkg_loc and conda_43:
try:
pkg_record = [_ for _ in index if _.dist_name == pkg_dist][0]
pfe = ProgressiveFetchExtract(link_prefs=(index[pkg_record],))
except TypeError:
pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
with utils.LoggingContext():
pfe.execute()
for pkg_dir in pkgs_dirs:
_loc = os.path.join(pkg_dir, index[pkg].fn)
if os.path.isfile(_loc):
pkg_loc = _loc
break
pkg_files[pkg] = pkg_loc, pkg_dist
return pkg_files
def get_upstream_pins(m, actions, env):
env_specs = m.meta.get('requirements', {}).get(env, [])
explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else []
linked_packages = actions.get('LINK', [])
linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]
ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))
additional_specs = {}
for pkg in linked_packages:
channeldata = utils.download_channeldata(pkg.channel)
run_exports = channeldata.get('packages', {}).get(pkg.name, {}).get('run_exports', {}).get(pkg.version, {})
specs = _filter_run_exports(run_exports, ignore_list)
if specs:
additional_specs = utils.merge_dicts_of_lists(additional_specs, specs)
return additional_specs
def _read_upstream_pin_files(m, env, permit_unsatisfiable_variants, exclude_pattern):
deps, actions, unsat = get_env_dependencies(m, env, m.config.variant,
exclude_pattern,
permit_unsatisfiable_variants=permit_unsatisfiable_variants)
extra_run_specs = get_upstream_pins(m, actions, env)
return list(set(deps)) or m.meta.get('requirements', {}).get(env, []), unsat, extra_run_specs
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
requirements = m.meta.get('requirements', {})
build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build',
permit_unsatisfiable_variants, exclude_pattern)
# is there a 'host' section?
if m.is_cross:
# this must come before we read upstream pins, because it will enforce things
# like vc version from the compiler.
host_reqs = utils.ensure_list(m.get_value('requirements/host'))
# ensure host_reqs is present, so in-place modification below is actually in-place
requirements = m.meta.setdefault('requirements', {})
requirements['host'] = host_reqs
if not host_reqs:
matching_output = [out for out in m.meta.get('outputs', []) if
out.get('name') == m.name()]
if matching_output:
requirements = utils.expand_reqs(matching_output[0].get('requirements', {}))
matching_output[0]['requirements'] = requirements
host_reqs = requirements.setdefault('host', [])
# in-place modification of above thingie
host_reqs.extend(extra_run_specs_from_build.get('strong', []))
host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host',
permit_unsatisfiable_variants, exclude_pattern)
extra_run_specs = set(extra_run_specs_from_host.get('strong', []) +
extra_run_specs_from_host.get('weak', []) +
extra_run_specs_from_build.get('strong', []))
else:
host_deps = []
host_unsat = []
extra_run_specs = set(extra_run_specs_from_build.get('strong', []))
if m.build_is_host:
extra_run_specs.update(extra_run_specs_from_build.get('weak', []))
build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', []))
else:
host_deps = set(extra_run_specs_from_build.get('strong', []))
run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run')))
for section, deps in (('build', build_deps), ('host', host_deps), ('run', run_deps)):
if deps:
requirements[section] = list(deps)
m.meta['requirements'] = requirements
return build_unsat, host_unsat
def _simplify_to_exact_constraints(metadata):
requirements = metadata.meta.get('requirements', {})
# collect deps on a per-section basis
for section in 'build', 'host', 'run':
deps = utils.ensure_list(requirements.get(section, []))
deps_dict = defaultdict(list)
for dep in deps:
spec_parts = utils.ensure_valid_spec(dep).split()
name = spec_parts[0]
if len(spec_parts) > 1:
deps_dict[name].append(spec_parts[1:])
else:
deps_dict[name].append([])
deps_list = []
for name, values in deps_dict.items():
exact_pins = []
for dep in values:
if len(dep) > 1:
version, build = dep[:2]
if not (any(c in version for c in ('>', '<', '*')) or '*' in build):
exact_pins.append(dep)
if len(values) == 1 and not any(values):
deps_list.append(name)
elif exact_pins:
if not all(pin == exact_pins[0] for pin in exact_pins):
raise ValueError("Conflicting exact pins: {}".format(exact_pins))
else:
deps_list.append(' '.join([name] + exact_pins[0]))
else:
deps_list.extend(' '.join([name] + dep) for dep in values if dep)
if section in requirements and deps_list:
requirements[section] = deps_list
metadata.meta['requirements'] = requirements
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
if not parent_metadata:
parent_metadata = m
if m.skip():
m.final = True
else:
exclude_pattern = None
excludes = set(m.config.variant.get('ignore_version', []))
for key in m.config.variant.get('pin_run_as_build', {}).keys():
if key in excludes:
excludes.remove(key)
output_excludes = set()
if hasattr(m, 'other_outputs'):
output_excludes = set(name for (name, variant) in m.other_outputs.keys())
if excludes or output_excludes:
exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
for exc in excludes | output_excludes))
parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})
# extract the topmost section where variables are defined, and put it on top of the
# requirements for a particular output
# Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
output = parent_metadata.get_rendered_output(m.name(), variant=m.config.variant)
is_top_level = True
if output:
if 'package' in output or 'name' not in output:
# it's just a top-level recipe
output = {'name': m.name()}
else:
is_top_level = False
if not parent_recipe or parent_recipe['name'] == m.name():
combine_top_level_metadata_with_output(m, output)
requirements = utils.expand_reqs(output.get('requirements', {}))
m.meta['requirements'] = requirements
if m.meta.get('requirements'):
utils.insert_variant_versions(m.meta['requirements'],
m.config.variant, 'build')
utils.insert_variant_versions(m.meta['requirements'],
m.config.variant, 'host')
m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
build_unsat, host_unsat = add_upstream_pins(m,
permit_unsatisfiable_variants,
exclude_pattern)
requirements = m.meta.get('requirements', {})
# on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
# names to have this behavior.
if output_excludes:
exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
for exc in output_excludes))
pinning_env = 'host' if m.is_cross else 'build'
build_reqs = requirements.get(pinning_env, [])
# if python is in the build specs, but doesn't have a specific associated
if build_reqs and 'python' in build_reqs:
build_reqs.append('python {}'.format(m.config.variant['python']))
m.meta['requirements'][pinning_env] = build_reqs
full_build_deps, _, _ = get_env_dependencies(m, pinning_env,
m.config.variant,
exclude_pattern=exclude_pattern,
permit_unsatisfiable_variants=permit_unsatisfiable_variants)
full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:])
for dep in full_build_deps}
if isfile(m.requirements_path) and not requirements.get('run'):
requirements['run'] = specs_from_url(m.requirements_path)
run_deps = requirements.get('run', [])
versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions)
for dep in run_deps]
versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True)
for spec in versioned_run_deps]
requirements[pinning_env] = full_build_deps
requirements['run'] = versioned_run_deps
m.meta['requirements'] = requirements
m.append_requirements()
if m.pin_depends == 'strict':
m.meta['requirements']['run'] = environ.get_pinned_deps(
m, 'run')
test_deps = m.get_value('test/requires')
if test_deps:
versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions)
for dep in test_deps})
versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True)
for spec in versioned_test_deps]
m.meta['test']['requires'] = versioned_test_deps
extra = m.meta.get('extra', {})
extra['copy_test_source_files'] = m.config.copy_test_source_files
m.meta['extra'] = extra
# system other than the original build machine, but at least it will work there.
if m.meta.get('source'):
if 'path' in m.meta['source']:
source_path = m.meta['source']['path']
os.path.expanduser(source_path)
if not os.path.isabs(source_path):
m.meta['source']['path'] = os.path.normpath(
os.path.join(m.path, source_path))
elif ('git_url' in m.meta['source'] and not (
# absolute paths are not relative paths
os.path.isabs(m.meta['source']['git_url']) or
# real urls are not relative paths
":" in m.meta['source']['git_url'])):
m.meta['source']['git_url'] = os.path.normpath(
os.path.join(m.path, m.meta['source']['git_url']))
if not m.meta.get('build'):
m.meta['build'] = {}
_simplify_to_exact_constraints(m)
if build_unsat or host_unsat:
m.final = False
log = utils.get_logger(__name__)
log.warn("Returning non-final recipe for {}; one or more dependencies "
"was unsatisfiable:".format(m.dist()))
if build_unsat:
log.warn("Build: {}".format(build_unsat))
if host_unsat:
log.warn("Host: {}".format(host_unsat))
else:
m.final = True
if is_top_level:
parent_metadata = m
return m
def try_download(metadata, no_download_source, raise_error=False):
if not metadata.source_provided and not no_download_source:
# this try/catch is for when the tool to download source is actually in
# meta.yaml, and not previously installed in builder env.
try:
source.provide(metadata)
except subprocess.CalledProcessError as error:
print("Warning: failed to download source. If building, will try "
"again after downloading recipe dependencies.")
print("Error was: ")
print(error)
if not metadata.source_provided:
if no_download_source:
raise ValueError("no_download_source specified, but can't fully render recipe without"
" downloading source. Please fix the recipe, or don't use "
"no_download_source.")
elif raise_error:
raise RuntimeError("Failed to download or patch source. Please see build log for info.")
def reparse(metadata):
metadata.final = False
sys.path.insert(0, metadata.config.build_prefix)
sys.path.insert(0, metadata.config.host_prefix)
py_ver = '.'.join(metadata.config.variant['python'].split('.')[:2])
sys.path.insert(0, utils.get_site_packages(metadata.config.host_prefix, py_ver))
metadata.parse_until_resolved()
metadata = finalize_metadata(metadata)
return metadata
def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False,
allow_no_other_outputs=False, bypass_env_check=False):
rendered_metadata = {}
need_source_download = True
# don't bother distributing python if it's a noarch package
if metadata.noarch or metadata.noarch_python:
variants = filter_by_key_value(variants, 'python', variants[0]['python'],
'noarch_reduction')
# store these for reference later
metadata.config.variants = variants
# These are always the full set. just 'variants' is the one that gets
# used mostly, and can be reduced
metadata.config.input_variants = variants
recipe_requirements = metadata.extract_requirements_text()
recipe_package_and_build_text = metadata.extract_package_and_build_text()
recipe_text = recipe_package_and_build_text + recipe_requirements
if PY3 and hasattr(recipe_text, 'decode'):
recipe_text = recipe_text.decode()
elif not PY3 and hasattr(recipe_text, 'encode'):
recipe_text = recipe_text.encode()
metadata.config.variant = variants[0]
used_variables = metadata.get_used_loop_vars(force_global=False)
top_loop = metadata.get_reduced_variant_set(used_variables)
for variant in top_loop:
mv = metadata.copy()
mv.config.variant = variant
pin_run_as_build = variant.get('pin_run_as_build', {})
if mv.numpy_xx and 'numpy' not in pin_run_as_build:
pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}
conform_dict = {}
for key in used_variables:
# We use this variant in the top-level recipe.
# constrain the stored variants to only this version in the output
# variant mapping
conform_dict[key] = variant[key]
for key, values in conform_dict.items():
mv.config.variants = (filter_by_key_value(mv.config.variants, key, values,
'distribute_variants_reduction') or
mv.config.variants)
pin_run_as_build = variant.get('pin_run_as_build', {})
if mv.numpy_xx and 'numpy' not in pin_run_as_build:
pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}
numpy_pinned_variants = []
for _variant in mv.config.variants:
_variant['pin_run_as_build'] = pin_run_as_build
numpy_pinned_variants.append(_variant)
mv.config.variants = numpy_pinned_variants
mv.config.squished_variants = list_of_dicts_to_dict_of_lists(mv.config.variants)
if mv.needs_source_for_render and mv.variant_in_source:
mv.parse_again()
utils.rm_rf(mv.config.work_dir)
source.provide(mv)
mv.parse_again()
try:
mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
bypass_env_check=bypass_env_check)
except SystemExit:
pass
need_source_download = (not mv.needs_source_for_render or not mv.source_provided)
rendered_metadata[(mv.dist(),
mv.config.variant.get('target_platform', mv.config.subdir),
tuple((var, mv.config.variant.get(var))
for var in mv.get_used_vars()))] = \
(mv, need_source_download, None)
# list of tuples.
# each tuple item is a tuple of 3 items:
# metadata, need_download, need_reparse_in_env
return list(rendered_metadata.values())
def expand_outputs(metadata_tuples):
expanded_outputs = OrderedDict()
for (_m, download, reparse) in metadata_tuples:
for (output_dict, m) in _m.copy().get_output_metadata_set(permit_unsatisfiable_variants=False):
expanded_outputs[m.dist()] = (output_dict, m)
return list(expanded_outputs.values())
def render_recipe(recipe_path, config, no_download_source=False, variants=None,
permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False):
arg = recipe_path
# Don't use byte literals for paths in Python 2
if not PY3:
arg = arg.decode(getpreferredencoding() or 'utf-8')
if isfile(arg):
if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
recipe_dir = tempfile.mkdtemp()
t = tarfile.open(arg, 'r:*')
t.extractall(path=recipe_dir)
t.close()
need_cleanup = True
elif arg.endswith('.yaml'):
recipe_dir = os.path.dirname(arg)
need_cleanup = False
else:
print("Ignoring non-recipe: %s" % arg)
return None, None
else:
recipe_dir = abspath(arg)
need_cleanup = False
if not isdir(recipe_dir):
sys.exit("Error: no such directory: %s" % recipe_dir)
try:
m = MetaData(recipe_dir, config=config)
except exceptions.YamlParsingError as e:
sys.stderr.write(e.error_msg())
sys.exit(1)
rendered_metadata = {}
if config.set_build_id:
m.config.compute_build_id(m.name(), reset=reset_build_id)
# There's no way around it AFAICT. We must download the source to be able to render
# folder until rendering is complete, because package names can have variant jinja2 in them.
if m.needs_source_for_render and not m.source_provided:
try_download(m, no_download_source=no_download_source)
if m.final:
if not hasattr(m.config, 'variants') or not m.config.variant:
m.config.ignore_system_variants = True
if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')):
m.config.variant_config_files = [os.path.join(m.path, 'conda_build_config.yaml')]
m.config.variants = get_package_variants(m, variants=variants)
m.config.variant = m.config.variants[0]
rendered_metadata = [(m, False, False), ]
else:
# merge any passed-in variants with any files found
variants = get_package_variants(m, variants=variants)
# when building, we don't want to fully expand all outputs into metadata, only expand
rendered_metadata = distribute_variants(m, variants,
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
allow_no_other_outputs=True, bypass_env_check=bypass_env_check)
if need_cleanup:
utils.rm_rf(recipe_dir)
return rendered_metadata
FIELDS = ["package", "source", "build", "requirements", "test", "app", "outputs", "about", "extra"]
class _MetaYaml(dict):
fields = FIELDS
def to_omap(self):
return [(field, self[field]) for field in _MetaYaml.fields if field in self]
def _represent_omap(dumper, data):
return dumper.represent_mapping(u'tag:yaml.org,2002:map', data.to_omap())
def _unicode_representer(dumper, uni):
node = yaml.ScalarNode(tag=u'tag:yaml.org,2002:str', value=uni)
return node
class _IndentDumper(yaml.Dumper):
def increase_indent(self, flow=False, indentless=False):
return super(_IndentDumper, self).increase_indent(flow, False)
def ignore_aliases(self, data):
return True
yaml.add_representer(_MetaYaml, _represent_omap)
if PY3:
yaml.add_representer(str, _unicode_representer)
unicode = None
else:
yaml.add_representer(unicode, _unicode_representer)
def output_yaml(metadata, filename=None, suppress_outputs=False):
local_metadata = metadata.copy()
utils.trim_empty_keys(local_metadata.meta)
if suppress_outputs and local_metadata.is_output and 'outputs' in local_metadata.meta:
del local_metadata.meta['outputs']
output = yaml.dump(_MetaYaml(local_metadata.meta), Dumper=_IndentDumper,
default_flow_style=False, indent=4)
if filename:
if any(sep in filename for sep in ('\\', '/')):
try:
os.makedirs(os.path.dirname(filename))
except OSError:
pass
with open(filename, "w") as f:
f.write(output)
return "Wrote yaml to %s" % filename
else:
return output
| true | true |
f71fa02523d9f3e25a04474d5b9b67ff8827679a | 346 | py | Python | .history/routes_20200723125644.py | rkustas/taskmanager | 3218b277a235c4e8d30b1d548ba28be3ab3f628f | [
"MIT"
] | null | null | null | .history/routes_20200723125644.py | rkustas/taskmanager | 3218b277a235c4e8d30b1d548ba28be3ab3f628f | [
"MIT"
] | null | null | null | .history/routes_20200723125644.py | rkustas/taskmanager | 3218b277a235c4e8d30b1d548ba28be3ab3f628f | [
"MIT"
] | null | null | null | from app import app
from flask import render_template
import forms
# Basic route
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html', current_title= 'Custom Title')
@app.route('/about', methods=['GET','POST'])
def about():
form = forms.AddTaskForm()
return render_template('about.html', form=form) | 23.066667 | 71 | 0.702312 | from app import app
from flask import render_template
import forms
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html', current_title= 'Custom Title')
@app.route('/about', methods=['GET','POST'])
def about():
form = forms.AddTaskForm()
return render_template('about.html', form=form) | true | true |
f71fa109caf8f0da6ce9758d0b1182b0c641a35c | 2,136 | py | Python | python/cuml/test/test_trustworthiness.py | efajardo-nv/cuml | bc86714836284ed4752c267513e5d447e884e1c5 | [
"Apache-2.0"
] | 3 | 2019-10-17T21:46:07.000Z | 2019-10-22T20:13:55.000Z | python/cuml/test/test_trustworthiness.py | efajardo-nv/cuml | bc86714836284ed4752c267513e5d447e884e1c5 | [
"Apache-2.0"
] | 1 | 2020-02-03T22:43:57.000Z | 2020-02-29T02:32:40.000Z | python/cuml/test/test_trustworthiness.py | efajardo-nv/cuml | bc86714836284ed4752c267513e5d447e884e1c5 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2018-2019, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from sklearn.manifold.t_sne import trustworthiness as sklearn_trustworthiness
from cuml.metrics import trustworthiness as cuml_trustworthiness
from sklearn.datasets.samples_generator import make_blobs
from umap import UMAP
import cudf
import numpy as np
@pytest.mark.parametrize('input_type', ['ndarray'])
@pytest.mark.parametrize('n_samples', [10, 100])
@pytest.mark.parametrize('n_features', [10, 100])
@pytest.mark.parametrize('n_components', [2, 8])
def test_trustworthiness(input_type, n_samples, n_features, n_components):
centers = round(n_samples*0.4)
X, y = make_blobs(n_samples=n_samples, centers=centers,
n_features=n_features)
X_embedded = \
UMAP(n_components=n_components).fit_transform(X)
X = X.astype(np.float32)
X_embedded = X_embedded.astype(np.float32)
if input_type == 'dataframe':
gdf = cudf.DataFrame()
for i in range(X.shape[1]):
gdf[str(i)] = np.asarray(X[:, i], dtype=np.float32)
gdf_embedded = cudf.DataFrame()
for i in range(X_embedded.shape[1]):
gdf_embedded[str(i)] = np.asarray(X_embedded[:, i],
dtype=np.float32)
score = cuml_trustworthiness(gdf, gdf_embedded)
else:
score = cuml_trustworthiness(X, X_embedded)
sk_score = sklearn_trustworthiness(X, X_embedded)
eps = 0.001
assert (sk_score * (1 - eps) <= score and
score <= sk_score * (1 + eps))
# assert cu_score == sk_score ideally
| 35.6 | 77 | 0.690543 |
import pytest
from sklearn.manifold.t_sne import trustworthiness as sklearn_trustworthiness
from cuml.metrics import trustworthiness as cuml_trustworthiness
from sklearn.datasets.samples_generator import make_blobs
from umap import UMAP
import cudf
import numpy as np
@pytest.mark.parametrize('input_type', ['ndarray'])
@pytest.mark.parametrize('n_samples', [10, 100])
@pytest.mark.parametrize('n_features', [10, 100])
@pytest.mark.parametrize('n_components', [2, 8])
def test_trustworthiness(input_type, n_samples, n_features, n_components):
centers = round(n_samples*0.4)
X, y = make_blobs(n_samples=n_samples, centers=centers,
n_features=n_features)
X_embedded = \
UMAP(n_components=n_components).fit_transform(X)
X = X.astype(np.float32)
X_embedded = X_embedded.astype(np.float32)
if input_type == 'dataframe':
gdf = cudf.DataFrame()
for i in range(X.shape[1]):
gdf[str(i)] = np.asarray(X[:, i], dtype=np.float32)
gdf_embedded = cudf.DataFrame()
for i in range(X_embedded.shape[1]):
gdf_embedded[str(i)] = np.asarray(X_embedded[:, i],
dtype=np.float32)
score = cuml_trustworthiness(gdf, gdf_embedded)
else:
score = cuml_trustworthiness(X, X_embedded)
sk_score = sklearn_trustworthiness(X, X_embedded)
eps = 0.001
assert (sk_score * (1 - eps) <= score and
score <= sk_score * (1 + eps))
| true | true |
f71fa140056dc835bd2625ea657d951e01d571d7 | 7,163 | py | Python | bindings/python/ensmallen_graph/datasets/networkrepository/socfbucsc68.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/networkrepository/socfbucsc68.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/networkrepository/socfbucsc68.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | """
This file offers the methods to automatically retrieve the graph socfb-UCSC68.
The graph is automatically retrieved from the NetworkRepository repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-06 11:50:55.897921
The undirected graph socfb-UCSC68 has 8991 nodes and 224584 unweighted
edges, of which none are self-loops. The graph is sparse as it has a density
of 0.00556 and has 7 connected components, where the component with most
nodes has 8979 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 39, the mean node degree is 49.96, and
the node degree mode is 1. The top 5 most central nodes are 2840 (degree
454), 7542 (degree 400), 4763 (degree 329), 692 (degree 323) and 2949 (degree
315).
References
---------------------
Please cite the following if you use the data:
@inproceedings{nr,
title = {The Network Data Repository with Interactive Graph Analytics and Visualization},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle = {AAAI},
url={http://networkrepository.com},
year={2015}
}
@article{traud2012social,
title={Social structure of {F}acebook networks},
author={Traud, Amanda L and Mucha, Peter J and Porter, Mason A},
journal={Phys. A},
month={Aug},
number={16},
pages={4165--4180},
volume={391},
year={2012}
}
@article{Traud:2011fs,
title={Comparing Community Structure to Characteristics in Online Collegiate Social Networks},
author={Traud, Amanda L and Kelsic, Eric D and Mucha, Peter J and Porter, Mason A},
journal={SIAM Rev.},
number={3},
pages={526--543},
volume={53},
year={2011}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.networkrepository import SocfbUcsc68
# Then load the graph
graph = SocfbUcsc68()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def SocfbUcsc68(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/networkrepository",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the socfb-UCSC68 graph.
The graph is automatically retrieved from the NetworkRepository repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of socfb-UCSC68 graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-06 11:50:55.897921
The undirected graph socfb-UCSC68 has 8991 nodes and 224584 unweighted
edges, of which none are self-loops. The graph is sparse as it has a density
of 0.00556 and has 7 connected components, where the component with most
nodes has 8979 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 39, the mean node degree is 49.96, and
the node degree mode is 1. The top 5 most central nodes are 2840 (degree
454), 7542 (degree 400), 4763 (degree 329), 692 (degree 323) and 2949 (degree
315).
References
---------------------
Please cite the following if you use the data:
@inproceedings{nr,
title = {The Network Data Repository with Interactive Graph Analytics and Visualization},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle = {AAAI},
url={http://networkrepository.com},
year={2015}
}
@article{traud2012social,
title={Social structure of {F}acebook networks},
author={Traud, Amanda L and Mucha, Peter J and Porter, Mason A},
journal={Phys. A},
month={Aug},
number={16},
pages={4165--4180},
volume={391},
year={2012}
}
@article{Traud:2011fs,
title={Comparing Community Structure to Characteristics in Online Collegiate Social Networks},
author={Traud, Amanda L and Kelsic, Eric D and Mucha, Peter J and Porter, Mason A},
journal={SIAM Rev.},
number={3},
pages={526--543},
volume={53},
year={2011}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.networkrepository import SocfbUcsc68
# Then load the graph
graph = SocfbUcsc68()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="SocfbUcsc68",
dataset="networkrepository",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 31.835556 | 103 | 0.668993 | from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph
def SocfbUcsc68(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/networkrepository",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
return AutomaticallyRetrievedGraph(
graph_name="SocfbUcsc68",
dataset="networkrepository",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| true | true |
f71fa2527a6d8ee637811cf737524b29b2058b63 | 8,210 | py | Python | var/spack/repos/builtin/packages/graphviz/package.py | varioustoxins/spack | cab0e4cb240f34891a6d753f3393e512f9a99e9a | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/graphviz/package.py | varioustoxins/spack | cab0e4cb240f34891a6d753f3393e512f9a99e9a | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 6 | 2022-01-08T08:41:11.000Z | 2022-03-14T19:28:07.000Z | var/spack/repos/builtin/packages/graphviz/package.py | foeroyingur/spack | 5300cbbb2e569190015c72d0970d25425ea38647 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import sys
from spack import *
from spack.operating_systems.mac_os import macos_version
MACOS_VERSION = macos_version() if sys.platform == 'darwin' else None
class Graphviz(AutotoolsPackage):
"""Graph Visualization Software"""
homepage = 'http://www.graphviz.org'
git = 'https://gitlab.com/graphviz/graphviz.git'
url = 'https://gitlab.com/graphviz/graphviz/-/archive/2.46.0/graphviz-2.46.0.tar.bz2'
version('2.49.0', sha256='b129555743bb9bfb7b63c55825da51763b2f1ee7c0eaa6234a42a61a3aff6cc9')
version('2.47.2', sha256='b5ebb00d4283c6d12cf16b2323e1820b535cc3823c8f261b783f7903b1d5b7fb')
version('2.46.0', sha256='1b11684fd5488940b45bf4624393140da6032abafae08f33dc3e986cffd55d71')
version('2.44.1', sha256='0f8f3fbeaddd474e0a270dc9bb0e247a1ae4284ae35125af4adceffae5c7ae9b')
version('2.42.4', sha256='a1ca0c4273d96bbf32fbfcbb784c8da2e38da13e7d2bbf9b24fe94ae45e79c4c')
version('2.40.1', sha256='581596aaeac5dae3f57da6ecde62ad7709a992df341e8f7c6177b41e8b1ae4f6')
version('2.38.0', sha256='c1b1e326b5d1f45b0ce91edd7acc68e80ff6be6b470008766e4d466aafc9801f', deprecated=True)
# Language bindings
language_bindings = ['java']
# Additional language bindings are nominally supported by GraphViz via SWIG
# but are untested and need the proper dependencies added:
# language_bindings += ['sharp', 'go', 'guile', 'io', 'lua', 'ocaml',
# 'perl', 'php', 'python', 'r', 'ruby', 'tcl']
for lang in language_bindings:
variant(lang, default=False,
description='Enable for optional {0} language '
'bindings'.format(lang))
# Feature variants
variant('doc', default=False,
description='Build and install graphviz documentation')
variant('expat', default=False,
description='Build with Expat support (enables HTML-like labels)')
variant('gts', default=False,
description='Build with GNU Triangulated Surface Library')
variant('ghostscript', default=False,
description='Build with Ghostscript support')
variant('gtkplus', default=False,
description='Build with GTK+ support')
variant('libgd', default=False,
description='Build with libgd support (more output formats)')
variant('pangocairo', default=False,
description='Build with pango+cairo support (more output formats)')
variant('poppler', default=False,
description='Build with poppler support (pdf formats)')
variant('qt', default=False,
description='Build with Qt support')
variant('quartz', default=(MACOS_VERSION is not None),
description='Build with Quartz and PDF support')
variant('x', default=False,
description='Use the X Window System')
patch('https://www.linuxfromscratch.org/patches/blfs/9.0/graphviz-2.40.1-qt5-1.patch',
sha256='bd532df325df811713e311d17aaeac3f5d6075ea4fd0eae8d989391e6afba930',
when='@:2.40+qt^qt@5:')
patch('https://raw.githubusercontent.com/easybuilders/easybuild-easyconfigs/master/easybuild/easyconfigs/g/Graphviz/Graphviz-2.38.0_icc_sfio.patch',
sha256='393a0a772315a89dcc970b5efd4765d22dba83493d7956303673eb89c45b949f',
level=0,
when='@:2.40%intel')
patch('https://raw.githubusercontent.com/easybuilders/easybuild-easyconfigs/master/easybuild/easyconfigs/g/Graphviz/Graphviz-2.40.1_icc_vmalloc.patch',
sha256='813e6529e79161a18b0f24a969b7de22f8417b2e942239e658b5402884541bc2',
when='@:2.40%intel')
patch('ps2pdf.patch', when='@:2.45')
patch('implicit.patch', level=0, when='@:2.44.0')
if not MACOS_VERSION:
conflicts('+quartz',
msg="Graphviz can only be build with Quartz on macOS.")
elif MACOS_VERSION >= Version('10.9'):
# Doesn't detect newer mac os systems as being new
patch('fix-quartz-darwin.patch', when='@:2.47.2')
# Language dependencies
for lang in language_bindings:
depends_on('swig', when=('+' + lang))
depends_on(lang, when=('+' + lang))
# Feature dependencies
depends_on('zlib')
depends_on('groff', type='build', when='+doc')
depends_on('ghostscript', type='build', when='+doc')
depends_on('expat', when='+expat')
depends_on('libgd', when='+libgd')
depends_on('fontconfig', when='+libgd')
depends_on('freetype', when='+libgd')
depends_on('ghostscript', when='+ghostscript')
depends_on('gtkplus', when='+gtkplus')
depends_on('gts', when='+gts')
depends_on('cairo+pdf+png+svg', when='+pangocairo')
depends_on('fontconfig', when='+pangocairo')
depends_on('freetype', when='+pangocairo')
depends_on('glib', when='+pangocairo')
depends_on('libpng', when='+pangocairo')
depends_on('pango', when='+pangocairo')
depends_on('poppler+glib', when='+poppler')
depends_on('qt', when='+qt')
depends_on('libx11', when="+x")
# Build dependencies (graphviz binaries don't include configure file)
depends_on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('bison@3.0.4:', type='build')
depends_on('flex', type='build')
depends_on('sed', type='build')
depends_on('libtool', type='build')
depends_on('pkgconfig', type='build')
# to process f-strings used in gen_version.py
depends_on('python@3.6:', when='@2.47:', type='build')
conflicts('~doc',
when='@:2.45',
msg='graphviz always builds documentation below version 2.46')
conflicts('%gcc@:5.9',
when='@2.40.1+qt ^qt@5:',
msg='graphviz-2.40.1 needs gcc-6 or greater to compile with QT5 '
'suppport')
def autoreconf(self, spec, prefix):
# We need to generate 'configure' when checking out sources from git
# If configure exists nothing needs to be done
if os.path.exists(self.configure_abs_path):
return
# Else bootstrap (disabling auto-configure with NOCONFIG)
bash = which('bash')
bash('./autogen.sh', 'NOCONFIG')
def setup_build_environment(self, env):
if '+quartz' in self.spec:
env.set('OBJC', self.compiler.cc)
@when('%clang platform=darwin')
def patch(self):
# When using Clang, replace GCC's libstdc++ with LLVM's libc++
mkdirs = ['cmd/dot', 'cmd/edgepaint', 'cmd/mingle', 'plugin/gdiplus']
filter_file(r'-lstdc\+\+', '-lc++', 'configure.ac',
*(d + '/Makefile.am' for d in mkdirs))
@when('%apple-clang')
def patch(self):
# When using Clang, replace GCC's libstdc++ with LLVM's libc++
mkdirs = ['cmd/dot', 'cmd/edgepaint', 'cmd/mingle', 'plugin/gdiplus']
filter_file(r'-lstdc\+\+', '-lc++', 'configure.ac',
*(d + '/Makefile.am' for d in mkdirs))
def configure_args(self):
spec = self.spec
args = ['--disable-silent-rules']
use_swig = False
for lang in self.language_bindings:
if '+' + lang in spec:
use_swig = True
args.append('--enable-' + lang)
args.append('--{0}-swig'.format('enable' if use_swig else 'disable'))
for var in ["expat", "gts", "ghostscript", "libgd", "pangocairo",
"poppler", "qt", "quartz", "x"]:
args += self.with_or_without(var)
for var in ["zlib", "expat", "java"]:
if '+' + var in spec:
args.append('--with-{0}includedir={1}'.format(
var, spec[var].prefix.include))
args.append('--with-{0}libdir={1}'.format(
var, spec[var].prefix.lib))
args.append('--{0}-gtk'.format(
"with" if "+gtkplus" in spec else "without"))
if spec.version >= Version('2.46'):
args.append('--{0}-man-pdfs'.format(
'enable' if '+doc' in spec else 'disable'))
return args
| 43.439153 | 155 | 0.637881 |
import os
import sys
from spack import *
from spack.operating_systems.mac_os import macos_version
MACOS_VERSION = macos_version() if sys.platform == 'darwin' else None
class Graphviz(AutotoolsPackage):
homepage = 'http://www.graphviz.org'
git = 'https://gitlab.com/graphviz/graphviz.git'
url = 'https://gitlab.com/graphviz/graphviz/-/archive/2.46.0/graphviz-2.46.0.tar.bz2'
version('2.49.0', sha256='b129555743bb9bfb7b63c55825da51763b2f1ee7c0eaa6234a42a61a3aff6cc9')
version('2.47.2', sha256='b5ebb00d4283c6d12cf16b2323e1820b535cc3823c8f261b783f7903b1d5b7fb')
version('2.46.0', sha256='1b11684fd5488940b45bf4624393140da6032abafae08f33dc3e986cffd55d71')
version('2.44.1', sha256='0f8f3fbeaddd474e0a270dc9bb0e247a1ae4284ae35125af4adceffae5c7ae9b')
version('2.42.4', sha256='a1ca0c4273d96bbf32fbfcbb784c8da2e38da13e7d2bbf9b24fe94ae45e79c4c')
version('2.40.1', sha256='581596aaeac5dae3f57da6ecde62ad7709a992df341e8f7c6177b41e8b1ae4f6')
version('2.38.0', sha256='c1b1e326b5d1f45b0ce91edd7acc68e80ff6be6b470008766e4d466aafc9801f', deprecated=True)
language_bindings = ['java']
for lang in language_bindings:
variant(lang, default=False,
description='Enable for optional {0} language '
'bindings'.format(lang))
variant('doc', default=False,
description='Build and install graphviz documentation')
variant('expat', default=False,
description='Build with Expat support (enables HTML-like labels)')
variant('gts', default=False,
description='Build with GNU Triangulated Surface Library')
variant('ghostscript', default=False,
description='Build with Ghostscript support')
variant('gtkplus', default=False,
description='Build with GTK+ support')
variant('libgd', default=False,
description='Build with libgd support (more output formats)')
variant('pangocairo', default=False,
description='Build with pango+cairo support (more output formats)')
variant('poppler', default=False,
description='Build with poppler support (pdf formats)')
variant('qt', default=False,
description='Build with Qt support')
variant('quartz', default=(MACOS_VERSION is not None),
description='Build with Quartz and PDF support')
variant('x', default=False,
description='Use the X Window System')
patch('https://www.linuxfromscratch.org/patches/blfs/9.0/graphviz-2.40.1-qt5-1.patch',
sha256='bd532df325df811713e311d17aaeac3f5d6075ea4fd0eae8d989391e6afba930',
when='@:2.40+qt^qt@5:')
patch('https://raw.githubusercontent.com/easybuilders/easybuild-easyconfigs/master/easybuild/easyconfigs/g/Graphviz/Graphviz-2.38.0_icc_sfio.patch',
sha256='393a0a772315a89dcc970b5efd4765d22dba83493d7956303673eb89c45b949f',
level=0,
when='@:2.40%intel')
patch('https://raw.githubusercontent.com/easybuilders/easybuild-easyconfigs/master/easybuild/easyconfigs/g/Graphviz/Graphviz-2.40.1_icc_vmalloc.patch',
sha256='813e6529e79161a18b0f24a969b7de22f8417b2e942239e658b5402884541bc2',
when='@:2.40%intel')
patch('ps2pdf.patch', when='@:2.45')
patch('implicit.patch', level=0, when='@:2.44.0')
if not MACOS_VERSION:
conflicts('+quartz',
msg="Graphviz can only be build with Quartz on macOS.")
elif MACOS_VERSION >= Version('10.9'):
patch('fix-quartz-darwin.patch', when='@:2.47.2')
# Language dependencies
for lang in language_bindings:
depends_on('swig', when=('+' + lang))
depends_on(lang, when=('+' + lang))
# Feature dependencies
depends_on('zlib')
depends_on('groff', type='build', when='+doc')
depends_on('ghostscript', type='build', when='+doc')
depends_on('expat', when='+expat')
depends_on('libgd', when='+libgd')
depends_on('fontconfig', when='+libgd')
depends_on('freetype', when='+libgd')
depends_on('ghostscript', when='+ghostscript')
depends_on('gtkplus', when='+gtkplus')
depends_on('gts', when='+gts')
depends_on('cairo+pdf+png+svg', when='+pangocairo')
depends_on('fontconfig', when='+pangocairo')
depends_on('freetype', when='+pangocairo')
depends_on('glib', when='+pangocairo')
depends_on('libpng', when='+pangocairo')
depends_on('pango', when='+pangocairo')
depends_on('poppler+glib', when='+poppler')
depends_on('qt', when='+qt')
depends_on('libx11', when="+x")
# Build dependencies (graphviz binaries don't include configure file)
depends_on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('bison@3.0.4:', type='build')
depends_on('flex', type='build')
depends_on('sed', type='build')
depends_on('libtool', type='build')
depends_on('pkgconfig', type='build')
depends_on('python@3.6:', when='@2.47:', type='build')
conflicts('~doc',
when='@:2.45',
msg='graphviz always builds documentation below version 2.46')
conflicts('%gcc@:5.9',
when='@2.40.1+qt ^qt@5:',
msg='graphviz-2.40.1 needs gcc-6 or greater to compile with QT5 '
'suppport')
def autoreconf(self, spec, prefix):
if os.path.exists(self.configure_abs_path):
return
bash = which('bash')
bash('./autogen.sh', 'NOCONFIG')
def setup_build_environment(self, env):
if '+quartz' in self.spec:
env.set('OBJC', self.compiler.cc)
@when('%clang platform=darwin')
def patch(self):
mkdirs = ['cmd/dot', 'cmd/edgepaint', 'cmd/mingle', 'plugin/gdiplus']
filter_file(r'-lstdc\+\+', '-lc++', 'configure.ac',
*(d + '/Makefile.am' for d in mkdirs))
@when('%apple-clang')
def patch(self):
mkdirs = ['cmd/dot', 'cmd/edgepaint', 'cmd/mingle', 'plugin/gdiplus']
filter_file(r'-lstdc\+\+', '-lc++', 'configure.ac',
*(d + '/Makefile.am' for d in mkdirs))
def configure_args(self):
spec = self.spec
args = ['--disable-silent-rules']
use_swig = False
for lang in self.language_bindings:
if '+' + lang in spec:
use_swig = True
args.append('--enable-' + lang)
args.append('--{0}-swig'.format('enable' if use_swig else 'disable'))
for var in ["expat", "gts", "ghostscript", "libgd", "pangocairo",
"poppler", "qt", "quartz", "x"]:
args += self.with_or_without(var)
for var in ["zlib", "expat", "java"]:
if '+' + var in spec:
args.append('--with-{0}includedir={1}'.format(
var, spec[var].prefix.include))
args.append('--with-{0}libdir={1}'.format(
var, spec[var].prefix.lib))
args.append('--{0}-gtk'.format(
"with" if "+gtkplus" in spec else "without"))
if spec.version >= Version('2.46'):
args.append('--{0}-man-pdfs'.format(
'enable' if '+doc' in spec else 'disable'))
return args
| true | true |
f71fa28fe37d5ad73815c67dfd54a10cbdef33d0 | 84,349 | py | Python | python/src/chirpstack_api/as_pb/external/api/application_pb2_grpc.py | maxreb/chirpstack-api | c591dd556e70b384318cdf61de19c0350715d61d | [
"MIT"
] | 55 | 2019-11-05T15:46:49.000Z | 2022-03-23T14:31:33.000Z | python/src/chirpstack_api/as_pb/external/api/application_pb2_grpc.py | maxreb/chirpstack-api | c591dd556e70b384318cdf61de19c0350715d61d | [
"MIT"
] | 39 | 2019-11-08T21:03:45.000Z | 2022-03-01T12:40:36.000Z | python/src/chirpstack_api/as_pb/external/api/application_pb2_grpc.py | maxreb/chirpstack-api | c591dd556e70b384318cdf61de19c0350715d61d | [
"MIT"
] | 101 | 2019-11-22T13:59:59.000Z | 2022-03-14T09:52:46.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from chirpstack_api.as_pb.external.api import application_pb2 as chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class ApplicationServiceStub(object):
"""ApplicationService is the service managing applications.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Create = channel.unary_unary(
'/api.ApplicationService/Create',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationResponse.FromString,
)
self.Get = channel.unary_unary(
'/api.ApplicationService/Get',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationResponse.FromString,
)
self.Update = channel.unary_unary(
'/api.ApplicationService/Update',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateApplicationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Delete = channel.unary_unary(
'/api.ApplicationService/Delete',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteApplicationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.List = channel.unary_unary(
'/api.ApplicationService/List',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationResponse.FromString,
)
self.CreateHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/CreateHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateHTTPIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/GetHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationResponse.FromString,
)
self.UpdateHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateHTTPIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteHTTPIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/CreateInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/GetInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationResponse.FromString,
)
self.UpdateInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/CreateThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/GetThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationResponse.FromString,
)
self.UpdateThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/CreateMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/GetMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationResponse.FromString,
)
self.UpdateMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/CreateLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/GetLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationResponse.FromString,
)
self.UpdateLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/CreateGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/GetGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationResponse.FromString,
)
self.UpdateGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/CreateAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/GetAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationResponse.FromString,
)
self.UpdateAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/CreateAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/GetAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationResponse.FromString,
)
self.UpdateAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreatePilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/CreatePilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreatePilotThingsIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetPilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/GetPilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationResponse.FromString,
)
self.UpdatePilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/UpdatePilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdatePilotThingsIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeletePilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/DeletePilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeletePilotThingsIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListIntegrations = channel.unary_unary(
'/api.ApplicationService/ListIntegrations',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationResponse.FromString,
)
self.GenerateMQTTIntegrationClientCertificate = channel.unary_unary(
'/api.ApplicationService/GenerateMQTTIntegrationClientCertificate',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateResponse.FromString,
)
class ApplicationServiceServicer(object):
"""ApplicationService is the service managing applications.
"""
def Create(self, request, context):
"""Create creates the given application.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Get(self, request, context):
"""Get returns the requested application.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
"""Update updates the given application.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Delete deletes the given application.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
"""List lists the available applications.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateHTTPIntegration(self, request, context):
"""CreateHTTPIntegration creates a HTTP application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetHTTPIntegration(self, request, context):
"""GetHTTPIntegration returns the HTTP application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateHTTPIntegration(self, request, context):
"""UpdateHTTPIntegration updates the HTTP application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteHTTPIntegration(self, request, context):
"""DeleteIntegration deletes the HTTP application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateInfluxDBIntegration(self, request, context):
"""CreateInfluxDBIntegration create an InfluxDB application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetInfluxDBIntegration(self, request, context):
"""GetInfluxDBIntegration returns the InfluxDB application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateInfluxDBIntegration(self, request, context):
"""UpdateInfluxDBIntegration updates the InfluxDB application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteInfluxDBIntegration(self, request, context):
"""DeleteInfluxDBIntegration deletes the InfluxDB application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateThingsBoardIntegration(self, request, context):
"""CreateThingsBoardIntegration creates a ThingsBoard application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetThingsBoardIntegration(self, request, context):
"""GetThingsBoardIntegration returns the ThingsBoard application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateThingsBoardIntegration(self, request, context):
"""UpdateThingsBoardIntegration updates the ThingsBoard application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteThingsBoardIntegration(self, request, context):
"""DeleteThingsBoardIntegration deletes the ThingsBoard application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateMyDevicesIntegration(self, request, context):
"""CreateMyDevicesIntegration creates a MyDevices application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetMyDevicesIntegration(self, request, context):
"""GetMyDevicesIntegration returns the MyDevices application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateMyDevicesIntegration(self, request, context):
"""UpdateMyDevicesIntegration updates the MyDevices application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteMyDevicesIntegration(self, request, context):
"""DeleteMyDevicesIntegration deletes the MyDevices application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateLoRaCloudIntegration(self, request, context):
"""CreateLoRaCloudIntegration creates A LoRaCloud application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLoRaCloudIntegration(self, request, context):
"""GetLoRaCloudIntegration returns the LoRaCloud application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateLoRaCloudIntegration(self, request, context):
"""UpdateLoRaCloudIntegration updates the LoRaCloud application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteLoRaCloudIntegration(self, request, context):
"""DeleteLoRaCloudIntegration deletes the LoRaCloud application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateGCPPubSubIntegration(self, request, context):
"""CreateGCPPubSubIntegration creates a GCP PubSub application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetGCPPubSubIntegration(self, request, context):
"""GetGCPPubSubIntegration returns the GCP PubSub application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateGCPPubSubIntegration(self, request, context):
"""UpdateGCPPubSubIntegration updates the GCP PubSub application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteGCPPubSubIntegration(self, request, context):
"""DeleteGCPPubSubIntegration deletes the GCP PubSub application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAWSSNSIntegration(self, request, context):
"""CreateAWSSNSIntegration creates a AWS SNS application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAWSSNSIntegration(self, request, context):
"""GetAWSSNSIntegration returns the AWS SNS application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAWSSNSIntegration(self, request, context):
"""UpdateAWSSNSIntegration updates the AWS SNS application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAWSSNSIntegration(self, request, context):
"""DeleteAWSSNSIntegration deletes the AWS SNS application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAzureServiceBusIntegration(self, request, context):
"""CreateAzureServiceBusIntegration creates an Azure Service-Bus application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAzureServiceBusIntegration(self, request, context):
"""GetAzureServiceBusIntegration returns the Azure Service-Bus application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAzureServiceBusIntegration(self, request, context):
"""UpdateAzureServiceBusIntegration updates the Azure Service-Bus application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAzureServiceBusIntegration(self, request, context):
"""DeleteAzureServiceBusIntegration deletes the Azure Service-Bus application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreatePilotThingsIntegration(self, request, context):
"""CreatePilotThingsIntegration creates an Pilot Things application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPilotThingsIntegration(self, request, context):
"""GetPilotThingsIntegration returns the Pilot Things application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdatePilotThingsIntegration(self, request, context):
"""UpdatePilotThingsIntegration updates the Pilot Things application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeletePilotThingsIntegration(self, request, context):
"""DeletePilotThingsIntegration deletes the Pilot Things application-integration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListIntegrations(self, request, context):
"""ListIntegrations lists all configured integrations.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateMQTTIntegrationClientCertificate(self, request, context):
"""GenerateMQTTIntegrationClientCertificate generates an application ID specific TLS certificate
to connect to the MQTT broker.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ApplicationServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationResponse.SerializeToString,
),
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationResponse.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateApplicationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteApplicationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationResponse.SerializeToString,
),
'CreateHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateHTTPIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationResponse.SerializeToString,
),
'UpdateHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateHTTPIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteHTTPIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateInfluxDBIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationResponse.SerializeToString,
),
'UpdateInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateInfluxDBIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteInfluxDBIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateThingsBoardIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationResponse.SerializeToString,
),
'UpdateThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateThingsBoardIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteThingsBoardIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateMyDevicesIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationResponse.SerializeToString,
),
'UpdateMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateMyDevicesIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteMyDevicesIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateLoRaCloudIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationResponse.SerializeToString,
),
'UpdateLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateLoRaCloudIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteLoRaCloudIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateGCPPubSubIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationResponse.SerializeToString,
),
'UpdateGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateGCPPubSubIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteGCPPubSubIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAWSSNSIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationResponse.SerializeToString,
),
'UpdateAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAWSSNSIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAWSSNSIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAzureServiceBusIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationResponse.SerializeToString,
),
'UpdateAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAzureServiceBusIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAzureServiceBusIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreatePilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreatePilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreatePilotThingsIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetPilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetPilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationResponse.SerializeToString,
),
'UpdatePilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdatePilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdatePilotThingsIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeletePilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeletePilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeletePilotThingsIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListIntegrations': grpc.unary_unary_rpc_method_handler(
servicer.ListIntegrations,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationResponse.SerializeToString,
),
'GenerateMQTTIntegrationClientCertificate': grpc.unary_unary_rpc_method_handler(
servicer.GenerateMQTTIntegrationClientCertificate,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'api.ApplicationService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class ApplicationService(object):
"""ApplicationService is the service managing applications.
"""
@staticmethod
def Create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Create',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Get',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Update',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateApplicationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Delete',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteApplicationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def List(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/List',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateHTTPIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateHTTPIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteHTTPIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateInfluxDBIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateInfluxDBIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteInfluxDBIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateThingsBoardIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateThingsBoardIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteThingsBoardIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateMyDevicesIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateMyDevicesIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteMyDevicesIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateLoRaCloudIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateLoRaCloudIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteLoRaCloudIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateGCPPubSubIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateGCPPubSubIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteGCPPubSubIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAWSSNSIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAWSSNSIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAWSSNSIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAzureServiceBusIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAzureServiceBusIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAzureServiceBusIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreatePilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreatePilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreatePilotThingsIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetPilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdatePilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdatePilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdatePilotThingsIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeletePilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeletePilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeletePilotThingsIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListIntegrations(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/ListIntegrations',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateMQTTIntegrationClientCertificate(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GenerateMQTTIntegrationClientCertificate',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 56.195203 | 176 | 0.719013 |
import grpc
from chirpstack_api.as_pb.external.api import application_pb2 as chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class ApplicationServiceStub(object):
def __init__(self, channel):
self.Create = channel.unary_unary(
'/api.ApplicationService/Create',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationResponse.FromString,
)
self.Get = channel.unary_unary(
'/api.ApplicationService/Get',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationResponse.FromString,
)
self.Update = channel.unary_unary(
'/api.ApplicationService/Update',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateApplicationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Delete = channel.unary_unary(
'/api.ApplicationService/Delete',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteApplicationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.List = channel.unary_unary(
'/api.ApplicationService/List',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationResponse.FromString,
)
self.CreateHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/CreateHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateHTTPIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/GetHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationResponse.FromString,
)
self.UpdateHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateHTTPIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteHTTPIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteHTTPIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteHTTPIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/CreateInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/GetInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationResponse.FromString,
)
self.UpdateInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteInfluxDBIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteInfluxDBIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteInfluxDBIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/CreateThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/GetThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationResponse.FromString,
)
self.UpdateThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteThingsBoardIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteThingsBoardIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteThingsBoardIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/CreateMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/GetMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationResponse.FromString,
)
self.UpdateMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteMyDevicesIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteMyDevicesIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteMyDevicesIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/CreateLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/GetLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationResponse.FromString,
)
self.UpdateLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteLoRaCloudIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteLoRaCloudIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteLoRaCloudIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/CreateGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/GetGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationResponse.FromString,
)
self.UpdateGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteGCPPubSubIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteGCPPubSubIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteGCPPubSubIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/CreateAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/GetAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationResponse.FromString,
)
self.UpdateAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteAWSSNSIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteAWSSNSIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAWSSNSIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/CreateAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/GetAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationResponse.FromString,
)
self.UpdateAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/UpdateAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteAzureServiceBusIntegration = channel.unary_unary(
'/api.ApplicationService/DeleteAzureServiceBusIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAzureServiceBusIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreatePilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/CreatePilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreatePilotThingsIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetPilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/GetPilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationResponse.FromString,
)
self.UpdatePilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/UpdatePilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdatePilotThingsIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeletePilotThingsIntegration = channel.unary_unary(
'/api.ApplicationService/DeletePilotThingsIntegration',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeletePilotThingsIntegrationRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListIntegrations = channel.unary_unary(
'/api.ApplicationService/ListIntegrations',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationResponse.FromString,
)
self.GenerateMQTTIntegrationClientCertificate = channel.unary_unary(
'/api.ApplicationService/GenerateMQTTIntegrationClientCertificate',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateResponse.FromString,
)
class ApplicationServiceServicer(object):
def Create(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Get(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateHTTPIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetHTTPIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateHTTPIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteHTTPIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateInfluxDBIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetInfluxDBIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateInfluxDBIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteInfluxDBIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateThingsBoardIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetThingsBoardIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateThingsBoardIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteThingsBoardIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateMyDevicesIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetMyDevicesIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateMyDevicesIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteMyDevicesIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateLoRaCloudIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLoRaCloudIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateLoRaCloudIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteLoRaCloudIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateGCPPubSubIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetGCPPubSubIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateGCPPubSubIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteGCPPubSubIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAWSSNSIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAWSSNSIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAWSSNSIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAWSSNSIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAzureServiceBusIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAzureServiceBusIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateAzureServiceBusIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAzureServiceBusIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreatePilotThingsIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPilotThingsIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdatePilotThingsIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeletePilotThingsIntegration(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListIntegrations(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateMQTTIntegrationClientCertificate(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ApplicationServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationResponse.SerializeToString,
),
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationResponse.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateApplicationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteApplicationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationResponse.SerializeToString,
),
'CreateHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateHTTPIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationResponse.SerializeToString,
),
'UpdateHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateHTTPIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteHTTPIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteHTTPIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteHTTPIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateInfluxDBIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationResponse.SerializeToString,
),
'UpdateInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateInfluxDBIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteInfluxDBIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteInfluxDBIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteInfluxDBIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateThingsBoardIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationResponse.SerializeToString,
),
'UpdateThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateThingsBoardIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteThingsBoardIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteThingsBoardIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteThingsBoardIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateMyDevicesIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationResponse.SerializeToString,
),
'UpdateMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateMyDevicesIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteMyDevicesIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteMyDevicesIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteMyDevicesIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateLoRaCloudIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationResponse.SerializeToString,
),
'UpdateLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateLoRaCloudIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteLoRaCloudIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteLoRaCloudIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteLoRaCloudIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateGCPPubSubIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationResponse.SerializeToString,
),
'UpdateGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateGCPPubSubIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteGCPPubSubIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteGCPPubSubIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteGCPPubSubIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAWSSNSIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationResponse.SerializeToString,
),
'UpdateAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAWSSNSIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteAWSSNSIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAWSSNSIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAWSSNSIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreateAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAzureServiceBusIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationResponse.SerializeToString,
),
'UpdateAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdateAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAzureServiceBusIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteAzureServiceBusIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAzureServiceBusIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAzureServiceBusIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreatePilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.CreatePilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreatePilotThingsIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetPilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.GetPilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationResponse.SerializeToString,
),
'UpdatePilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.UpdatePilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdatePilotThingsIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeletePilotThingsIntegration': grpc.unary_unary_rpc_method_handler(
servicer.DeletePilotThingsIntegration,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeletePilotThingsIntegrationRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListIntegrations': grpc.unary_unary_rpc_method_handler(
servicer.ListIntegrations,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationResponse.SerializeToString,
),
'GenerateMQTTIntegrationClientCertificate': grpc.unary_unary_rpc_method_handler(
servicer.GenerateMQTTIntegrationClientCertificate,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'api.ApplicationService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class ApplicationService(object):
@staticmethod
def Create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Create',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateApplicationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Get',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetApplicationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Update',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateApplicationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/Delete',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteApplicationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def List(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/List',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListApplicationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateHTTPIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetHTTPIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateHTTPIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteHTTPIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteHTTPIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteHTTPIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateInfluxDBIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetInfluxDBIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateInfluxDBIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteInfluxDBIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteInfluxDBIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteInfluxDBIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateThingsBoardIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetThingsBoardIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateThingsBoardIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteThingsBoardIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteThingsBoardIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteThingsBoardIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateMyDevicesIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetMyDevicesIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateMyDevicesIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteMyDevicesIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteMyDevicesIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteMyDevicesIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateLoRaCloudIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetLoRaCloudIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateLoRaCloudIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteLoRaCloudIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteLoRaCloudIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteLoRaCloudIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateGCPPubSubIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetGCPPubSubIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateGCPPubSubIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteGCPPubSubIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteGCPPubSubIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteGCPPubSubIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAWSSNSIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAWSSNSIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAWSSNSIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAWSSNSIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteAWSSNSIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAWSSNSIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreateAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreateAzureServiceBusIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetAzureServiceBusIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdateAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdateAzureServiceBusIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAzureServiceBusIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeleteAzureServiceBusIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeleteAzureServiceBusIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreatePilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/CreatePilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.CreatePilotThingsIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetPilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GetPilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GetPilotThingsIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdatePilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/UpdatePilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.UpdatePilotThingsIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeletePilotThingsIntegration(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/DeletePilotThingsIntegration',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.DeletePilotThingsIntegrationRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListIntegrations(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/ListIntegrations',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.ListIntegrationResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateMQTTIntegrationClientCertificate(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.ApplicationService/GenerateMQTTIntegrationClientCertificate',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_application__pb2.GenerateMQTTIntegrationClientCertificateResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| true | true |
f71fa2a559498de7857b82dce82c2cf35e13d842 | 1,475 | py | Python | heterogeneous_client.py | samiul272/fed_ml_proj | 16b6ab0e8a5a5c8ca1a7c6636ec167238f63b31b | [
"MIT"
] | null | null | null | heterogeneous_client.py | samiul272/fed_ml_proj | 16b6ab0e8a5a5c8ca1a7c6636ec167238f63b31b | [
"MIT"
] | null | null | null | heterogeneous_client.py | samiul272/fed_ml_proj | 16b6ab0e8a5a5c8ca1a7c6636ec167238f63b31b | [
"MIT"
] | null | null | null | import logging
class HeterogeneousClient:
def __init__(self, client_idx, local_training_data, local_test_data, local_sample_number, args, device,
model_trainer):
self.client_idx = client_idx
self.local_training_data = local_training_data
self.local_test_data = local_test_data
self.local_sample_number = local_sample_number
logging.info("self.local_sample_number = " + str(self.local_sample_number))
self.args = args
self.device = device
self.model_trainer = model_trainer
def update_local_dataset(self, client_idx, local_training_data, local_test_data, local_sample_number):
self.client_idx = client_idx
self.local_training_data = local_training_data
self.local_test_data = local_test_data
self.local_sample_number = local_sample_number
def get_sample_number(self):
return self.local_sample_number
def train(self, w_global):
self.model_trainer.set_model_params(w_global)
self.model_trainer.train(self.local_training_data, self.device, self.args)
weights = self.model_trainer.get_model_params()
return weights
def local_test(self, b_use_test_dataset):
if b_use_test_dataset:
test_data = self.local_test_data
else:
test_data = self.local_training_data
metrics = self.model_trainer.runtest(test_data, self.device, self.args)
return metrics
| 36.875 | 107 | 0.711864 | import logging
class HeterogeneousClient:
def __init__(self, client_idx, local_training_data, local_test_data, local_sample_number, args, device,
model_trainer):
self.client_idx = client_idx
self.local_training_data = local_training_data
self.local_test_data = local_test_data
self.local_sample_number = local_sample_number
logging.info("self.local_sample_number = " + str(self.local_sample_number))
self.args = args
self.device = device
self.model_trainer = model_trainer
def update_local_dataset(self, client_idx, local_training_data, local_test_data, local_sample_number):
self.client_idx = client_idx
self.local_training_data = local_training_data
self.local_test_data = local_test_data
self.local_sample_number = local_sample_number
def get_sample_number(self):
return self.local_sample_number
def train(self, w_global):
self.model_trainer.set_model_params(w_global)
self.model_trainer.train(self.local_training_data, self.device, self.args)
weights = self.model_trainer.get_model_params()
return weights
def local_test(self, b_use_test_dataset):
if b_use_test_dataset:
test_data = self.local_test_data
else:
test_data = self.local_training_data
metrics = self.model_trainer.runtest(test_data, self.device, self.args)
return metrics
| true | true |
f71fa355b3f48d5c42b444b5071f36c04ba8f953 | 5,989 | py | Python | src/utils.py | SimonPerche/PersonalitiesWars | 495803a5be5e9fde572c3f39086d8a3510c75f58 | [
"MIT"
] | null | null | null | src/utils.py | SimonPerche/PersonalitiesWars | 495803a5be5e9fde572c3f39086d8a3510c75f58 | [
"MIT"
] | null | null | null | src/utils.py | SimonPerche/PersonalitiesWars | 495803a5be5e9fde572c3f39086d8a3510c75f58 | [
"MIT"
] | 1 | 2022-03-08T22:07:50.000Z | 2022-03-08T22:07:50.000Z | from typing import Dict, List, Optional, Union
import contextlib
import asyncio
import discord
from discord.ext import pages
from database import DatabasePersonality, DatabaseDeck
# Set authorized guilds for slash command (return [] for global command - might take up to 1h to register)
def get_authorized_guild_ids():
return [550631040826343427]
async def personalities_name_searcher(ctx: discord.AutocompleteContext):
return [perso['name'] for perso in DatabasePersonality.get().get_all_personalities()
if ctx.value.lower() in perso['name'].lower()]
async def personalities_group_searcher(ctx: discord.AutocompleteContext):
return [group for group in DatabasePersonality.get().get_all_groups() if ctx.value.lower() in group.lower()]
async def wishlist_name_searcher(ctx: discord.AutocompleteContext):
ids = DatabaseDeck.get().get_wishlist(ctx.interaction.guild.id, ctx.interaction.user.id)
personalities = DatabasePersonality.get().get_multiple_perso_information(ids)
return [perso['name'] for perso in personalities
if ctx.value.lower() in perso['name'].lower()]
async def shopping_list_name_searcher(ctx: discord.AutocompleteContext):
ids = DatabaseDeck.get().get_shopping_list(ctx.interaction.guild.id, ctx.interaction.user.id)
personalities = DatabasePersonality.get().get_multiple_perso_information(ids)
return [perso['name'] for perso in personalities
if ctx.value.lower() in perso['name'].lower()]
async def deck_name_searcher(ctx: discord.AutocompleteContext):
ids = DatabaseDeck.get().get_user_deck(ctx.interaction.guild.id, ctx.interaction.user.id)
personalities = DatabasePersonality.get().get_multiple_perso_information(ids)
return [perso['name'] for perso in personalities
if ctx.value.lower() in perso['name'].lower()]
async def badges_name_searcher(ctx: discord.AutocompleteContext):
badges = DatabaseDeck.get().get_all_badges(ctx.interaction.guild.id)
return [badge['name'] for badge in badges if ctx.value.lower() in badge['name'].lower()]
class ConfirmView(discord.ui.View):
def __init__(self, authorized_user: discord.User, timeout: int = 60):
super().__init__(timeout=timeout)
self.is_accepted = None
self.authorized_user = authorized_user
@discord.ui.button(label="Yes", style=discord.ButtonStyle.green)
async def yes(
self, button: discord.ui.Button, interaction: discord.Interaction
):
self.is_accepted = True
button.label = 'Yes (chosen)'
await self.disable_update_and_stop(interaction)
@discord.ui.button(label="No", style=discord.ButtonStyle.red)
async def no(
self, button: discord.ui.Button, interaction: discord.Interaction
):
self.is_accepted = False
button.label = 'No (chosen)'
await self.disable_update_and_stop(interaction)
async def interaction_check(self, interaction: discord.Interaction):
if interaction.user != self.authorized_user:
await interaction.response.send_message('You cannot answer, you are not the recipient.', ephemeral=True)
return False
return True
async def on_timeout(self):
await self.disable()
async def disable_update_and_stop(self, interaction: discord.Interaction):
await self.disable()
await interaction.response.edit_message(view=self)
self.stop()
async def disable(self):
for child in self.children:
child.disabled = True
class PaginatorCustomStartPage(pages.Paginator):
def __init__(
self,
pages: Union[List[str], List[discord.Embed]],
author_check=True,
custom_view: Optional[discord.ui.View] = None,
timeout: Optional[float] = 180.0,
first_page: int = 0
) -> None:
super().__init__(pages=pages, show_disabled=True, show_indicator=True, author_check=author_check,
disable_on_timeout=True, custom_view=custom_view, timeout=timeout)
if first_page >= len(pages):
first_page = len(pages) - 1
elif first_page < 0:
first_page = 0
self.current_page = first_page
self.update_buttons()
async def respond(self, interaction: discord.Interaction, ephemeral: bool = False):
"""Sends an interaction response or followup with the paginated items.
Parameters
------------
interaction: :class:`discord.Interaction`
The interaction associated with this response.
ephemeral: :class:`bool`
Choose whether the message is ephemeral or not.
Returns
--------
:class:`~discord.Interaction`
The message sent with the paginator.
"""
page = self.pages[self.current_page]
self.user = interaction.user
if interaction.response.is_done():
msg = await interaction.followup.send(
content=page if isinstance(page, str) else None, embed=page if isinstance(page, discord.Embed) else None, view=self, ephemeral=ephemeral
)
else:
msg = await interaction.response.send_message(
content=page if isinstance(page, str) else None, embed=page if isinstance(page, discord.Embed) else None, view=self, ephemeral=ephemeral
)
if isinstance(msg, (discord.WebhookMessage, discord.Message)):
self.message = msg
elif isinstance(msg, discord.Interaction):
self.message = await msg.original_message()
return self.message
# https://stackoverflow.com/questions/49622924/wait-for-timeout-or-event-being-set-for-asyncio-event
async def event_wait(event: asyncio.Event, timeout: float):
# suppress TimeoutError because we'll return False in case of timeout
with contextlib.suppress(asyncio.TimeoutError):
await asyncio.wait_for(event.wait(), timeout)
return event.is_set()
| 39.143791 | 152 | 0.688429 | from typing import Dict, List, Optional, Union
import contextlib
import asyncio
import discord
from discord.ext import pages
from database import DatabasePersonality, DatabaseDeck
def get_authorized_guild_ids():
return [550631040826343427]
async def personalities_name_searcher(ctx: discord.AutocompleteContext):
return [perso['name'] for perso in DatabasePersonality.get().get_all_personalities()
if ctx.value.lower() in perso['name'].lower()]
async def personalities_group_searcher(ctx: discord.AutocompleteContext):
return [group for group in DatabasePersonality.get().get_all_groups() if ctx.value.lower() in group.lower()]
async def wishlist_name_searcher(ctx: discord.AutocompleteContext):
ids = DatabaseDeck.get().get_wishlist(ctx.interaction.guild.id, ctx.interaction.user.id)
personalities = DatabasePersonality.get().get_multiple_perso_information(ids)
return [perso['name'] for perso in personalities
if ctx.value.lower() in perso['name'].lower()]
async def shopping_list_name_searcher(ctx: discord.AutocompleteContext):
ids = DatabaseDeck.get().get_shopping_list(ctx.interaction.guild.id, ctx.interaction.user.id)
personalities = DatabasePersonality.get().get_multiple_perso_information(ids)
return [perso['name'] for perso in personalities
if ctx.value.lower() in perso['name'].lower()]
async def deck_name_searcher(ctx: discord.AutocompleteContext):
ids = DatabaseDeck.get().get_user_deck(ctx.interaction.guild.id, ctx.interaction.user.id)
personalities = DatabasePersonality.get().get_multiple_perso_information(ids)
return [perso['name'] for perso in personalities
if ctx.value.lower() in perso['name'].lower()]
async def badges_name_searcher(ctx: discord.AutocompleteContext):
badges = DatabaseDeck.get().get_all_badges(ctx.interaction.guild.id)
return [badge['name'] for badge in badges if ctx.value.lower() in badge['name'].lower()]
class ConfirmView(discord.ui.View):
def __init__(self, authorized_user: discord.User, timeout: int = 60):
super().__init__(timeout=timeout)
self.is_accepted = None
self.authorized_user = authorized_user
@discord.ui.button(label="Yes", style=discord.ButtonStyle.green)
async def yes(
self, button: discord.ui.Button, interaction: discord.Interaction
):
self.is_accepted = True
button.label = 'Yes (chosen)'
await self.disable_update_and_stop(interaction)
@discord.ui.button(label="No", style=discord.ButtonStyle.red)
async def no(
self, button: discord.ui.Button, interaction: discord.Interaction
):
self.is_accepted = False
button.label = 'No (chosen)'
await self.disable_update_and_stop(interaction)
async def interaction_check(self, interaction: discord.Interaction):
if interaction.user != self.authorized_user:
await interaction.response.send_message('You cannot answer, you are not the recipient.', ephemeral=True)
return False
return True
async def on_timeout(self):
await self.disable()
async def disable_update_and_stop(self, interaction: discord.Interaction):
await self.disable()
await interaction.response.edit_message(view=self)
self.stop()
async def disable(self):
for child in self.children:
child.disabled = True
class PaginatorCustomStartPage(pages.Paginator):
def __init__(
self,
pages: Union[List[str], List[discord.Embed]],
author_check=True,
custom_view: Optional[discord.ui.View] = None,
timeout: Optional[float] = 180.0,
first_page: int = 0
) -> None:
super().__init__(pages=pages, show_disabled=True, show_indicator=True, author_check=author_check,
disable_on_timeout=True, custom_view=custom_view, timeout=timeout)
if first_page >= len(pages):
first_page = len(pages) - 1
elif first_page < 0:
first_page = 0
self.current_page = first_page
self.update_buttons()
async def respond(self, interaction: discord.Interaction, ephemeral: bool = False):
page = self.pages[self.current_page]
self.user = interaction.user
if interaction.response.is_done():
msg = await interaction.followup.send(
content=page if isinstance(page, str) else None, embed=page if isinstance(page, discord.Embed) else None, view=self, ephemeral=ephemeral
)
else:
msg = await interaction.response.send_message(
content=page if isinstance(page, str) else None, embed=page if isinstance(page, discord.Embed) else None, view=self, ephemeral=ephemeral
)
if isinstance(msg, (discord.WebhookMessage, discord.Message)):
self.message = msg
elif isinstance(msg, discord.Interaction):
self.message = await msg.original_message()
return self.message
async def event_wait(event: asyncio.Event, timeout: float):
with contextlib.suppress(asyncio.TimeoutError):
await asyncio.wait_for(event.wait(), timeout)
return event.is_set()
| true | true |
f71fa39892b1a2c86c574196d784cf419690c32e | 449 | py | Python | app/commons/errorCodes.py | handdola/ai-chatbot | f0a336afb873db10b7a5f068b4e1eaa07bf62967 | [
"MIT"
] | 3 | 2017-12-27T19:29:27.000Z | 2018-01-07T02:51:44.000Z | app/commons/errorCodes.py | handdola/ai-chatbot | f0a336afb873db10b7a5f068b4e1eaa07bf62967 | [
"MIT"
] | 108 | 2018-03-26T05:44:22.000Z | 2020-12-14T15:08:38.000Z | app/commons/errorCodes.py | handdola/ai-chatbot | f0a336afb873db10b7a5f068b4e1eaa07bf62967 | [
"MIT"
] | 1 | 2019-05-30T10:50:49.000Z | 2019-05-30T10:50:49.000Z | emptyInput = {"errorCode": 601, "description": "empty input"}
InvalidInput = {"errorCode": 602, "description": "Invalid input"}
UnidentifiedIntent = {
"errorCode": 701,
"description": "Can't identify the intent"}
NotEnoughData = {
"errorCode": 702,
"description": "Not enough Training Data. Please Add more stories"}
UnableToextractentities = {"errorCode": 801,
"description": "Unable extract entities"}
| 34.538462 | 71 | 0.659243 | emptyInput = {"errorCode": 601, "description": "empty input"}
InvalidInput = {"errorCode": 602, "description": "Invalid input"}
UnidentifiedIntent = {
"errorCode": 701,
"description": "Can't identify the intent"}
NotEnoughData = {
"errorCode": 702,
"description": "Not enough Training Data. Please Add more stories"}
UnableToextractentities = {"errorCode": 801,
"description": "Unable extract entities"}
| true | true |
f71fa3cd63c33d94a24ea578cb181d5f2238b651 | 108,132 | py | Python | airflow/jobs.py | yujiantao/incubator-airflow | 97ac37d0b936fd565b113b79f418ff25b245de14 | [
"Apache-2.0"
] | 1 | 2020-05-03T04:34:08.000Z | 2020-05-03T04:34:08.000Z | airflow/jobs.py | yujiantao/incubator-airflow | 97ac37d0b936fd565b113b79f418ff25b245de14 | [
"Apache-2.0"
] | 4 | 2018-03-20T21:24:26.000Z | 2020-05-03T04:23:02.000Z | airflow/jobs.py | yujiantao/incubator-airflow | 97ac37d0b936fd565b113b79f418ff25b245de14 | [
"Apache-2.0"
] | 1 | 2018-10-23T08:58:10.000Z | 2018-10-23T08:58:10.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import getpass
import logging
import multiprocessing
import os
import signal
import sys
import threading
import time
from collections import defaultdict
from time import sleep
import six
from past.builtins import basestring
from sqlalchemy import (Column, Index, Integer, String, and_, func, not_, or_)
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm.session import make_transient
from airflow import configuration as conf
from airflow import executors, models, settings
from airflow.exceptions import AirflowException
from airflow.models import DAG, DagRun
from airflow.models.dagpickle import DagPickle
from airflow.settings import Stats
from airflow.task.task_runner import get_task_runner
from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, RUN_DEPS
from airflow.utils import asciiart, helpers, timezone
from airflow.utils.configuration import tmp_configuration_copy
from airflow.utils.dag_processing import (AbstractDagFileProcessor,
DagFileProcessorAgent,
SimpleDag,
SimpleDagBag,
SimpleTaskInstance,
list_py_file_paths)
from airflow.utils.db import create_session, provide_session
from airflow.utils.email import get_email_address_list, send_email
from airflow.utils.log.logging_mixin import LoggingMixin, StreamLogWriter, set_context
from airflow.utils.net import get_hostname
from airflow.utils.sqlalchemy import UtcDateTime
from airflow.utils.state import State
Base = models.base.Base
ID_LEN = models.ID_LEN
class BaseJob(Base, LoggingMixin):
"""
Abstract class to be derived for jobs. Jobs are processing items with state
and duration that aren't task instances. For instance a BackfillJob is
a collection of task instance runs, but should have its own state, start
and end time.
"""
__tablename__ = "job"
id = Column(Integer, primary_key=True)
dag_id = Column(String(ID_LEN),)
state = Column(String(20))
job_type = Column(String(30))
start_date = Column(UtcDateTime())
end_date = Column(UtcDateTime())
latest_heartbeat = Column(UtcDateTime())
executor_class = Column(String(500))
hostname = Column(String(500))
unixname = Column(String(1000))
__mapper_args__ = {
'polymorphic_on': job_type,
'polymorphic_identity': 'BaseJob'
}
__table_args__ = (
Index('job_type_heart', job_type, latest_heartbeat),
Index('idx_job_state_heartbeat', state, latest_heartbeat),
)
def __init__(
self,
executor=executors.GetDefaultExecutor(),
heartrate=conf.getfloat('scheduler', 'JOB_HEARTBEAT_SEC'),
*args, **kwargs):
self.hostname = get_hostname()
self.executor = executor
self.executor_class = executor.__class__.__name__
self.start_date = timezone.utcnow()
self.latest_heartbeat = timezone.utcnow()
self.heartrate = heartrate
self.unixname = getpass.getuser()
self.max_tis_per_query = conf.getint('scheduler', 'max_tis_per_query')
super(BaseJob, self).__init__(*args, **kwargs)
def is_alive(self):
return (
(timezone.utcnow() - self.latest_heartbeat).seconds <
(conf.getint('scheduler', 'JOB_HEARTBEAT_SEC') * 2.1)
)
@provide_session
def kill(self, session=None):
job = session.query(BaseJob).filter(BaseJob.id == self.id).first()
job.end_date = timezone.utcnow()
try:
self.on_kill()
except Exception as e:
self.log.error('on_kill() method failed: {}'.format(e))
session.merge(job)
session.commit()
raise AirflowException("Job shut down externally.")
def on_kill(self):
"""
Will be called when an external kill command is received
"""
pass
def heartbeat_callback(self, session=None):
pass
def heartbeat(self):
"""
Heartbeats update the job's entry in the database with a timestamp
for the latest_heartbeat and allows for the job to be killed
externally. This allows at the system level to monitor what is
actually active.
For instance, an old heartbeat for SchedulerJob would mean something
is wrong.
This also allows for any job to be killed externally, regardless
of who is running it or on which machine it is running.
Note that if your heartbeat is set to 60 seconds and you call this
method after 10 seconds of processing since the last heartbeat, it
will sleep 50 seconds to complete the 60 seconds and keep a steady
heart rate. If you go over 60 seconds before calling it, it won't
sleep at all.
"""
try:
with create_session() as session:
job = session.query(BaseJob).filter_by(id=self.id).one()
make_transient(job)
session.commit()
if job.state == State.SHUTDOWN:
self.kill()
# Figure out how long to sleep for
sleep_for = 0
if job.latest_heartbeat:
sleep_for = max(
0,
self.heartrate - (timezone.utcnow() -
job.latest_heartbeat).total_seconds())
sleep(sleep_for)
# Update last heartbeat time
with create_session() as session:
job = session.query(BaseJob).filter(BaseJob.id == self.id).first()
job.latest_heartbeat = timezone.utcnow()
session.merge(job)
session.commit()
self.heartbeat_callback(session=session)
self.log.debug('[heartbeat]')
except OperationalError as e:
self.log.error("Scheduler heartbeat got an exception: %s", str(e))
def run(self):
Stats.incr(self.__class__.__name__.lower() + '_start', 1, 1)
# Adding an entry in the DB
with create_session() as session:
self.state = State.RUNNING
session.add(self)
session.commit()
id_ = self.id
make_transient(self)
self.id = id_
try:
self._execute()
# In case of max runs or max duration
self.state = State.SUCCESS
except SystemExit:
# In case of ^C or SIGTERM
self.state = State.SUCCESS
except Exception:
self.state = State.FAILED
raise
finally:
self.end_date = timezone.utcnow()
session.merge(self)
session.commit()
Stats.incr(self.__class__.__name__.lower() + '_end', 1, 1)
def _execute(self):
raise NotImplementedError("This method needs to be overridden")
@provide_session
def reset_state_for_orphaned_tasks(self, filter_by_dag_run=None, session=None):
"""
This function checks if there are any tasks in the dagrun (or all)
that have a scheduled state but are not known by the
executor. If it finds those it will reset the state to None
so they will get picked up again.
The batch option is for performance reasons as the queries are made in
sequence.
:param filter_by_dag_run: the dag_run we want to process, None if all
:type filter_by_dag_run: models.DagRun
:return: the TIs reset (in expired SQLAlchemy state)
:rtype: List(TaskInstance)
"""
queued_tis = self.executor.queued_tasks
# also consider running as the state might not have changed in the db yet
running_tis = self.executor.running
resettable_states = [State.SCHEDULED, State.QUEUED]
TI = models.TaskInstance
DR = models.DagRun
if filter_by_dag_run is None:
resettable_tis = (
session
.query(TI)
.join(
DR,
and_(
TI.dag_id == DR.dag_id,
TI.execution_date == DR.execution_date))
.filter(
DR.state == State.RUNNING,
DR.run_id.notlike(BackfillJob.ID_PREFIX + '%'),
TI.state.in_(resettable_states))).all()
else:
resettable_tis = filter_by_dag_run.get_task_instances(state=resettable_states,
session=session)
tis_to_reset = []
# Can't use an update here since it doesn't support joins
for ti in resettable_tis:
if ti.key not in queued_tis and ti.key not in running_tis:
tis_to_reset.append(ti)
if len(tis_to_reset) == 0:
return []
def query(result, items):
filter_for_tis = ([and_(TI.dag_id == ti.dag_id,
TI.task_id == ti.task_id,
TI.execution_date == ti.execution_date)
for ti in items])
reset_tis = (
session
.query(TI)
.filter(or_(*filter_for_tis), TI.state.in_(resettable_states))
.with_for_update()
.all())
for ti in reset_tis:
ti.state = State.NONE
session.merge(ti)
return result + reset_tis
reset_tis = helpers.reduce_in_chunks(query,
tis_to_reset,
[],
self.max_tis_per_query)
task_instance_str = '\n\t'.join(
["{}".format(x) for x in reset_tis])
session.commit()
self.log.info(
"Reset the following %s TaskInstances:\n\t%s",
len(reset_tis), task_instance_str
)
return reset_tis
class DagFileProcessor(AbstractDagFileProcessor, LoggingMixin):
"""Helps call SchedulerJob.process_file() in a separate process."""
# Counter that increments everytime an instance of this class is created
class_creation_counter = 0
def __init__(self, file_path, pickle_dags, dag_id_white_list, zombies):
"""
:param file_path: a Python file containing Airflow DAG definitions
:type file_path: unicode
:param pickle_dags: whether to serialize the DAG objects to the DB
:type pickle_dags: bool
:param dag_id_whitelist: If specified, only look at these DAG ID's
:type dag_id_whitelist: list[unicode]
:param zombies: zombie task instances to kill
:type zombies: list[SimpleTaskInstance]
"""
self._file_path = file_path
# Queue that's used to pass results from the child process.
self._result_queue = multiprocessing.Queue()
# The process that was launched to process the given .
self._process = None
self._dag_id_white_list = dag_id_white_list
self._pickle_dags = pickle_dags
self._zombies = zombies
# The result of Scheduler.process_file(file_path).
self._result = None
# Whether the process is done running.
self._done = False
# When the process started.
self._start_time = None
# This ID is use to uniquely name the process / thread that's launched
# by this processor instance
self._instance_id = DagFileProcessor.class_creation_counter
DagFileProcessor.class_creation_counter += 1
@property
def file_path(self):
return self._file_path
@staticmethod
def _launch_process(result_queue,
file_path,
pickle_dags,
dag_id_white_list,
thread_name,
zombies):
"""
Launch a process to process the given file.
:param result_queue: the queue to use for passing back the result
:type result_queue: multiprocessing.Queue
:param file_path: the file to process
:type file_path: unicode
:param pickle_dags: whether to pickle the DAGs found in the file and
save them to the DB
:type pickle_dags: bool
:param dag_id_white_list: if specified, only examine DAG ID's that are
in this list
:type dag_id_white_list: list[unicode]
:param thread_name: the name to use for the process that is launched
:type thread_name: unicode
:return: the process that was launched
:rtype: multiprocessing.Process
:param zombies: zombie task instances to kill
:type zombies: list[SimpleTaskInstance]
"""
def helper():
# This helper runs in the newly created process
log = logging.getLogger("airflow.processor")
stdout = StreamLogWriter(log, logging.INFO)
stderr = StreamLogWriter(log, logging.WARN)
set_context(log, file_path)
try:
# redirect stdout/stderr to log
sys.stdout = stdout
sys.stderr = stderr
# Re-configure the ORM engine as there are issues with multiple processes
settings.configure_orm()
# Change the thread name to differentiate log lines. This is
# really a separate process, but changing the name of the
# process doesn't work, so changing the thread name instead.
threading.current_thread().name = thread_name
start_time = time.time()
log.info("Started process (PID=%s) to work on %s",
os.getpid(), file_path)
scheduler_job = SchedulerJob(dag_ids=dag_id_white_list, log=log)
result = scheduler_job.process_file(file_path,
zombies,
pickle_dags)
result_queue.put(result)
end_time = time.time()
log.info(
"Processing %s took %.3f seconds", file_path, end_time - start_time
)
except Exception:
# Log exceptions through the logging framework.
log.exception("Got an exception! Propagating...")
raise
finally:
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
# We re-initialized the ORM within this Process above so we need to
# tear it down manually here
settings.dispose_orm()
p = multiprocessing.Process(target=helper,
args=(),
name="{}-Process".format(thread_name))
p.start()
return p
def start(self):
"""
Launch the process and start processing the DAG.
"""
self._process = DagFileProcessor._launch_process(
self._result_queue,
self.file_path,
self._pickle_dags,
self._dag_id_white_list,
"DagFileProcessor{}".format(self._instance_id),
self._zombies)
self._start_time = timezone.utcnow()
def terminate(self, sigkill=False):
"""
Terminate (and then kill) the process launched to process the file.
:param sigkill: whether to issue a SIGKILL if SIGTERM doesn't work.
:type sigkill: bool
"""
if self._process is None:
raise AirflowException("Tried to call stop before starting!")
# The queue will likely get corrupted, so remove the reference
self._result_queue = None
self._process.terminate()
# Arbitrarily wait 5s for the process to die
self._process.join(5)
if sigkill and self._process.is_alive():
self.log.warning("Killing PID %s", self._process.pid)
os.kill(self._process.pid, signal.SIGKILL)
@property
def pid(self):
"""
:return: the PID of the process launched to process the given file
:rtype: int
"""
if self._process is None:
raise AirflowException("Tried to get PID before starting!")
return self._process.pid
@property
def exit_code(self):
"""
After the process is finished, this can be called to get the return code
:return: the exit code of the process
:rtype: int
"""
if not self._done:
raise AirflowException("Tried to call retcode before process was finished!")
return self._process.exitcode
@property
def done(self):
"""
Check if the process launched to process this file is done.
:return: whether the process is finished running
:rtype: bool
"""
if self._process is None:
raise AirflowException("Tried to see if it's done before starting!")
if self._done:
return True
# In case result queue is corrupted.
if self._result_queue and not self._result_queue.empty():
self._result = self._result_queue.get_nowait()
self._done = True
self.log.debug("Waiting for %s", self._process)
self._process.join()
return True
# Potential error case when process dies
if self._result_queue and not self._process.is_alive():
self._done = True
# Get the object from the queue or else join() can hang.
if not self._result_queue.empty():
self._result = self._result_queue.get_nowait()
self.log.debug("Waiting for %s", self._process)
self._process.join()
return True
return False
@property
def result(self):
"""
:return: result of running SchedulerJob.process_file()
:rtype: SimpleDag
"""
if not self.done:
raise AirflowException("Tried to get the result before it's done!")
return self._result
@property
def start_time(self):
"""
:return: when this started to process the file
:rtype: datetime
"""
if self._start_time is None:
raise AirflowException("Tried to get start time before it started!")
return self._start_time
class SchedulerJob(BaseJob):
"""
This SchedulerJob runs for a specific time interval and schedules the jobs
that are ready to run. It figures out the latest runs for each
task and sees if the dependencies for the next schedules are met.
If so, it creates appropriate TaskInstances and sends run commands to the
executor. It does this for each task in each DAG and repeats.
"""
__mapper_args__ = {
'polymorphic_identity': 'SchedulerJob'
}
def __init__(
self,
dag_id=None,
dag_ids=None,
subdir=settings.DAGS_FOLDER,
num_runs=-1,
processor_poll_interval=1.0,
run_duration=None,
do_pickle=False,
log=None,
*args, **kwargs):
"""
:param dag_id: if specified, only schedule tasks with this DAG ID
:type dag_id: unicode
:param dag_ids: if specified, only schedule tasks with these DAG IDs
:type dag_ids: list[unicode]
:param subdir: directory containing Python files with Airflow DAG
definitions, or a specific path to a file
:type subdir: unicode
:param num_runs: The number of times to try to schedule each DAG file.
-1 for unlimited within the run_duration.
:type num_runs: int
:param processor_poll_interval: The number of seconds to wait between
polls of running processors
:type processor_poll_interval: int
:param run_duration: how long to run (in seconds) before exiting
:type run_duration: int
:param do_pickle: once a DAG object is obtained by executing the Python
file, whether to serialize the DAG object to the DB
:type do_pickle: bool
"""
# for BaseJob compatibility
self.dag_id = dag_id
self.dag_ids = [dag_id] if dag_id else []
if dag_ids:
self.dag_ids.extend(dag_ids)
self.subdir = subdir
self.num_runs = num_runs
self.run_duration = run_duration
self._processor_poll_interval = processor_poll_interval
self.do_pickle = do_pickle
super(SchedulerJob, self).__init__(*args, **kwargs)
self.heartrate = conf.getint('scheduler', 'SCHEDULER_HEARTBEAT_SEC')
self.max_threads = conf.getint('scheduler', 'max_threads')
if log:
self._log = log
self.using_sqlite = False
if 'sqlite' in conf.get('core', 'sql_alchemy_conn'):
self.using_sqlite = True
self.max_tis_per_query = conf.getint('scheduler', 'max_tis_per_query')
if run_duration is None:
self.run_duration = conf.getint('scheduler',
'run_duration')
self.processor_agent = None
self._last_loop = False
signal.signal(signal.SIGINT, self._exit_gracefully)
signal.signal(signal.SIGTERM, self._exit_gracefully)
def _exit_gracefully(self, signum, frame):
"""
Helper method to clean up processor_agent to avoid leaving orphan processes.
"""
self.log.info("Exiting gracefully upon receiving signal {}".format(signum))
if self.processor_agent:
self.processor_agent.end()
sys.exit(os.EX_OK)
@provide_session
def manage_slas(self, dag, session=None):
"""
Finding all tasks that have SLAs defined, and sending alert emails
where needed. New SLA misses are also recorded in the database.
Where assuming that the scheduler runs often, so we only check for
tasks that should have succeeded in the past hour.
"""
if not any([ti.sla for ti in dag.tasks]):
self.log.info(
"Skipping SLA check for %s because no tasks in DAG have SLAs",
dag
)
return
TI = models.TaskInstance
sq = (
session
.query(
TI.task_id,
func.max(TI.execution_date).label('max_ti'))
.with_hint(TI, 'USE INDEX (PRIMARY)', dialect_name='mysql')
.filter(TI.dag_id == dag.dag_id)
.filter(or_(
TI.state == State.SUCCESS,
TI.state == State.SKIPPED))
.filter(TI.task_id.in_(dag.task_ids))
.group_by(TI.task_id).subquery('sq')
)
max_tis = session.query(TI).filter(
TI.dag_id == dag.dag_id,
TI.task_id == sq.c.task_id,
TI.execution_date == sq.c.max_ti,
).all()
ts = timezone.utcnow()
SlaMiss = models.SlaMiss
for ti in max_tis:
task = dag.get_task(ti.task_id)
dttm = ti.execution_date
if task.sla:
dttm = dag.following_schedule(dttm)
while dttm < timezone.utcnow():
following_schedule = dag.following_schedule(dttm)
if following_schedule + task.sla < timezone.utcnow():
session.merge(models.SlaMiss(
task_id=ti.task_id,
dag_id=ti.dag_id,
execution_date=dttm,
timestamp=ts))
dttm = dag.following_schedule(dttm)
session.commit()
slas = (
session
.query(SlaMiss)
.filter(SlaMiss.notification_sent == False) # noqa: E712
.filter(SlaMiss.dag_id == dag.dag_id)
.all()
)
if slas:
sla_dates = [sla.execution_date for sla in slas]
qry = (
session
.query(TI)
.filter(TI.state != State.SUCCESS)
.filter(TI.execution_date.in_(sla_dates))
.filter(TI.dag_id == dag.dag_id)
.all()
)
blocking_tis = []
for ti in qry:
if ti.task_id in dag.task_ids:
ti.task = dag.get_task(ti.task_id)
blocking_tis.append(ti)
else:
session.delete(ti)
session.commit()
task_list = "\n".join([
sla.task_id + ' on ' + sla.execution_date.isoformat()
for sla in slas])
blocking_task_list = "\n".join([
ti.task_id + ' on ' + ti.execution_date.isoformat()
for ti in blocking_tis])
# Track whether email or any alert notification sent
# We consider email or the alert callback as notifications
email_sent = False
notification_sent = False
if dag.sla_miss_callback:
# Execute the alert callback
self.log.info(' --------------> ABOUT TO CALL SLA MISS CALL BACK ')
try:
dag.sla_miss_callback(dag, task_list, blocking_task_list, slas,
blocking_tis)
notification_sent = True
except Exception:
self.log.exception("Could not call sla_miss_callback for DAG %s",
dag.dag_id)
email_content = """\
Here's a list of tasks that missed their SLAs:
<pre><code>{task_list}\n<code></pre>
Blocking tasks:
<pre><code>{blocking_task_list}\n{bug}<code></pre>
""".format(bug=asciiart.bug, **locals())
emails = set()
for task in dag.tasks:
if task.email:
if isinstance(task.email, basestring):
emails |= set(get_email_address_list(task.email))
elif isinstance(task.email, (list, tuple)):
emails |= set(task.email)
if emails and len(slas):
try:
send_email(
emails,
"[airflow] SLA miss on DAG=" + dag.dag_id,
email_content)
email_sent = True
notification_sent = True
except Exception:
self.log.exception("Could not send SLA Miss email notification for"
" DAG %s", dag.dag_id)
# If we sent any notification, update the sla_miss table
if notification_sent:
for sla in slas:
if email_sent:
sla.email_sent = True
sla.notification_sent = True
session.merge(sla)
session.commit()
@staticmethod
def update_import_errors(session, dagbag):
"""
For the DAGs in the given DagBag, record any associated import errors and clears
errors for files that no longer have them. These are usually displayed through the
Airflow UI so that users know that there are issues parsing DAGs.
:param session: session for ORM operations
:type session: sqlalchemy.orm.session.Session
:param dagbag: DagBag containing DAGs with import errors
:type dagbag: models.Dagbag
"""
# Clear the errors of the processed files
for dagbag_file in dagbag.file_last_changed:
session.query(models.ImportError).filter(
models.ImportError.filename == dagbag_file
).delete()
# Add the errors of the processed files
for filename, stacktrace in six.iteritems(dagbag.import_errors):
session.add(models.ImportError(
filename=filename,
stacktrace=stacktrace))
session.commit()
@provide_session
def create_dag_run(self, dag, session=None):
"""
This method checks whether a new DagRun needs to be created
for a DAG based on scheduling interval.
Returns DagRun if one is scheduled. Otherwise returns None.
"""
if dag.schedule_interval and conf.getboolean('scheduler', 'USE_JOB_SCHEDULE'):
active_runs = DagRun.find(
dag_id=dag.dag_id,
state=State.RUNNING,
external_trigger=False,
session=session
)
# return if already reached maximum active runs and no timeout setting
if len(active_runs) >= dag.max_active_runs and not dag.dagrun_timeout:
return
timedout_runs = 0
for dr in active_runs:
if (
dr.start_date and dag.dagrun_timeout and
dr.start_date < timezone.utcnow() - dag.dagrun_timeout):
dr.state = State.FAILED
dr.end_date = timezone.utcnow()
dag.handle_callback(dr, success=False, reason='dagrun_timeout',
session=session)
timedout_runs += 1
session.commit()
if len(active_runs) - timedout_runs >= dag.max_active_runs:
return
# this query should be replaced by find dagrun
qry = (
session.query(func.max(DagRun.execution_date))
.filter_by(dag_id=dag.dag_id)
.filter(or_(
DagRun.external_trigger == False, # noqa: E712
# add % as a wildcard for the like query
DagRun.run_id.like(DagRun.ID_PREFIX + '%')
))
)
last_scheduled_run = qry.scalar()
# don't schedule @once again
if dag.schedule_interval == '@once' and last_scheduled_run:
return None
# don't do scheduler catchup for dag's that don't have dag.catchup = True
if not (dag.catchup or dag.schedule_interval == '@once'):
# The logic is that we move start_date up until
# one period before, so that timezone.utcnow() is AFTER
# the period end, and the job can be created...
now = timezone.utcnow()
next_start = dag.following_schedule(now)
last_start = dag.previous_schedule(now)
if next_start <= now:
new_start = last_start
else:
new_start = dag.previous_schedule(last_start)
if dag.start_date:
if new_start >= dag.start_date:
dag.start_date = new_start
else:
dag.start_date = new_start
next_run_date = None
if not last_scheduled_run:
# First run
task_start_dates = [t.start_date for t in dag.tasks]
if task_start_dates:
next_run_date = dag.normalize_schedule(min(task_start_dates))
self.log.debug(
"Next run date based on tasks %s",
next_run_date
)
else:
next_run_date = dag.following_schedule(last_scheduled_run)
# make sure backfills are also considered
last_run = dag.get_last_dagrun(session=session)
if last_run and next_run_date:
while next_run_date <= last_run.execution_date:
next_run_date = dag.following_schedule(next_run_date)
# don't ever schedule prior to the dag's start_date
if dag.start_date:
next_run_date = (dag.start_date if not next_run_date
else max(next_run_date, dag.start_date))
if next_run_date == dag.start_date:
next_run_date = dag.normalize_schedule(dag.start_date)
self.log.debug(
"Dag start date: %s. Next run date: %s",
dag.start_date, next_run_date
)
# don't ever schedule in the future
if next_run_date > timezone.utcnow():
return
# this structure is necessary to avoid a TypeError from concatenating
# NoneType
if dag.schedule_interval == '@once':
period_end = next_run_date
elif next_run_date:
period_end = dag.following_schedule(next_run_date)
# Don't schedule a dag beyond its end_date (as specified by the dag param)
if next_run_date and dag.end_date and next_run_date > dag.end_date:
return
# Don't schedule a dag beyond its end_date (as specified by the task params)
# Get the min task end date, which may come from the dag.default_args
min_task_end_date = []
task_end_dates = [t.end_date for t in dag.tasks if t.end_date]
if task_end_dates:
min_task_end_date = min(task_end_dates)
if next_run_date and min_task_end_date and next_run_date > min_task_end_date:
return
if next_run_date and period_end and period_end <= timezone.utcnow():
next_run = dag.create_dagrun(
run_id=DagRun.ID_PREFIX + next_run_date.isoformat(),
execution_date=next_run_date,
start_date=timezone.utcnow(),
state=State.RUNNING,
external_trigger=False
)
return next_run
@provide_session
def _process_task_instances(self, dag, queue, session=None):
"""
This method schedules the tasks for a single DAG by looking at the
active DAG runs and adding task instances that should run to the
queue.
"""
# update the state of the previously active dag runs
dag_runs = DagRun.find(dag_id=dag.dag_id, state=State.RUNNING, session=session)
active_dag_runs = []
for run in dag_runs:
self.log.info("Examining DAG run %s", run)
# don't consider runs that are executed in the future
if run.execution_date > timezone.utcnow():
self.log.error(
"Execution date is in future: %s",
run.execution_date
)
continue
if len(active_dag_runs) >= dag.max_active_runs:
self.log.info("Active dag runs > max_active_run.")
continue
# skip backfill dagruns for now as long as they are not really scheduled
if run.is_backfill:
continue
# todo: run.dag is transient but needs to be set
run.dag = dag
# todo: preferably the integrity check happens at dag collection time
run.verify_integrity(session=session)
run.update_state(session=session)
if run.state == State.RUNNING:
make_transient(run)
active_dag_runs.append(run)
for run in active_dag_runs:
self.log.debug("Examining active DAG run: %s", run)
# this needs a fresh session sometimes tis get detached
tis = run.get_task_instances(state=(State.NONE,
State.UP_FOR_RETRY))
# this loop is quite slow as it uses are_dependencies_met for
# every task (in ti.is_runnable). This is also called in
# update_state above which has already checked these tasks
for ti in tis:
task = dag.get_task(ti.task_id)
# fixme: ti.task is transient but needs to be set
ti.task = task
# future: remove adhoc
if task.adhoc:
continue
if ti.are_dependencies_met(
dep_context=DepContext(flag_upstream_failed=True),
session=session):
self.log.debug('Queuing task: %s', ti)
queue.append(ti.key)
@provide_session
def _change_state_for_tis_without_dagrun(self,
simple_dag_bag,
old_states,
new_state,
session=None):
"""
For all DAG IDs in the SimpleDagBag, look for task instances in the
old_states and set them to new_state if the corresponding DagRun
does not exist or exists but is not in the running state. This
normally should not happen, but it can if the state of DagRuns are
changed manually.
:param old_states: examine TaskInstances in this state
:type old_state: list[State]
:param new_state: set TaskInstances to this state
:type new_state: State
:param simple_dag_bag: TaskInstances associated with DAGs in the
simple_dag_bag and with states in the old_state will be examined
:type simple_dag_bag: SimpleDagBag
"""
tis_changed = 0
query = session \
.query(models.TaskInstance) \
.outerjoin(models.DagRun, and_(
models.TaskInstance.dag_id == models.DagRun.dag_id,
models.TaskInstance.execution_date == models.DagRun.execution_date)) \
.filter(models.TaskInstance.dag_id.in_(simple_dag_bag.dag_ids)) \
.filter(models.TaskInstance.state.in_(old_states)) \
.filter(or_(
models.DagRun.state != State.RUNNING,
models.DagRun.state.is_(None)))
if self.using_sqlite:
tis_to_change = query \
.with_for_update() \
.all()
for ti in tis_to_change:
ti.set_state(new_state, session=session)
tis_changed += 1
else:
subq = query.subquery()
tis_changed = session \
.query(models.TaskInstance) \
.filter(and_(
models.TaskInstance.dag_id == subq.c.dag_id,
models.TaskInstance.task_id == subq.c.task_id,
models.TaskInstance.execution_date ==
subq.c.execution_date)) \
.update({models.TaskInstance.state: new_state},
synchronize_session=False)
session.commit()
if tis_changed > 0:
self.log.warning(
"Set %s task instances to state=%s as their associated DagRun was not in RUNNING state",
tis_changed, new_state
)
@provide_session
def __get_task_concurrency_map(self, states, session=None):
"""
Returns a map from tasks to number in the states list given.
:param states: List of states to query for
:type states: List[State]
:return: A map from (dag_id, task_id) to count of tasks in states
:rtype: Dict[[String, String], Int]
"""
TI = models.TaskInstance
ti_concurrency_query = (
session
.query(TI.task_id, TI.dag_id, func.count('*'))
.filter(TI.state.in_(states))
.group_by(TI.task_id, TI.dag_id)
).all()
task_map = defaultdict(int)
for result in ti_concurrency_query:
task_id, dag_id, count = result
task_map[(dag_id, task_id)] = count
return task_map
@provide_session
def _find_executable_task_instances(self, simple_dag_bag, states, session=None):
"""
Finds TIs that are ready for execution with respect to pool limits,
dag concurrency, executor state, and priority.
:param simple_dag_bag: TaskInstances associated with DAGs in the
simple_dag_bag will be fetched from the DB and executed
:type simple_dag_bag: SimpleDagBag
:param executor: the executor that runs task instances
:type executor: BaseExecutor
:param states: Execute TaskInstances in these states
:type states: Tuple[State]
:return: List[TaskInstance]
"""
executable_tis = []
# Get all the queued task instances from associated with scheduled
# DagRuns which are not backfilled, in the given states,
# and the dag is not paused
TI = models.TaskInstance
DR = models.DagRun
DM = models.DagModel
ti_query = (
session
.query(TI)
.filter(TI.dag_id.in_(simple_dag_bag.dag_ids))
.outerjoin(
DR,
and_(DR.dag_id == TI.dag_id, DR.execution_date == TI.execution_date)
)
.filter(or_(DR.run_id == None, # noqa: E711
not_(DR.run_id.like(BackfillJob.ID_PREFIX + '%'))))
.outerjoin(DM, DM.dag_id == TI.dag_id)
.filter(or_(DM.dag_id == None, # noqa: E711
not_(DM.is_paused)))
)
if None in states:
ti_query = ti_query.filter(
or_(TI.state == None, TI.state.in_(states)) # noqa: E711
)
else:
ti_query = ti_query.filter(TI.state.in_(states))
task_instances_to_examine = ti_query.all()
if len(task_instances_to_examine) == 0:
self.log.debug("No tasks to consider for execution.")
return executable_tis
# Put one task instance on each line
task_instance_str = "\n\t".join(
["{}".format(x) for x in task_instances_to_examine])
self.log.info("{} tasks up for execution:\n\t{}"
.format(len(task_instances_to_examine),
task_instance_str))
# Get the pool settings
pools = {p.pool: p for p in session.query(models.Pool).all()}
pool_to_task_instances = defaultdict(list)
for task_instance in task_instances_to_examine:
pool_to_task_instances[task_instance.pool].append(task_instance)
states_to_count_as_running = [State.RUNNING, State.QUEUED]
task_concurrency_map = self.__get_task_concurrency_map(
states=states_to_count_as_running, session=session)
# Go through each pool, and queue up a task for execution if there are
# any open slots in the pool.
for pool, task_instances in pool_to_task_instances.items():
if not pool:
# Arbitrary:
# If queued outside of a pool, trigger no more than
# non_pooled_task_slot_count per run
open_slots = conf.getint('core', 'non_pooled_task_slot_count')
else:
if pool not in pools:
self.log.warning(
"Tasks using non-existent pool '%s' will not be scheduled",
pool
)
open_slots = 0
else:
open_slots = pools[pool].open_slots(session=session)
num_queued = len(task_instances)
self.log.info(
"Figuring out tasks to run in Pool(name={pool}) with {open_slots} "
"open slots and {num_queued} task instances in queue".format(
**locals()
)
)
priority_sorted_task_instances = sorted(
task_instances, key=lambda ti: (-ti.priority_weight, ti.execution_date))
# DAG IDs with running tasks that equal the concurrency limit of the dag
dag_id_to_possibly_running_task_count = {}
for task_instance in priority_sorted_task_instances:
if open_slots <= 0:
self.log.info(
"Not scheduling since there are %s open slots in pool %s",
open_slots, pool
)
# Can't schedule any more since there are no more open slots.
break
# Check to make sure that the task concurrency of the DAG hasn't been
# reached.
dag_id = task_instance.dag_id
simple_dag = simple_dag_bag.get_dag(dag_id)
if dag_id not in dag_id_to_possibly_running_task_count:
dag_id_to_possibly_running_task_count[dag_id] = \
DAG.get_num_task_instances(
dag_id,
simple_dag_bag.get_dag(dag_id).task_ids,
states=states_to_count_as_running,
session=session)
current_task_concurrency = dag_id_to_possibly_running_task_count[dag_id]
task_concurrency_limit = simple_dag_bag.get_dag(dag_id).concurrency
self.log.info(
"DAG %s has %s/%s running and queued tasks",
dag_id, current_task_concurrency, task_concurrency_limit
)
if current_task_concurrency >= task_concurrency_limit:
self.log.info(
"Not executing %s since the number of tasks running or queued "
"from DAG %s is >= to the DAG's task concurrency limit of %s",
task_instance, dag_id, task_concurrency_limit
)
continue
task_concurrency = simple_dag.get_task_special_arg(
task_instance.task_id,
'task_concurrency')
if task_concurrency is not None:
num_running = task_concurrency_map[
(task_instance.dag_id, task_instance.task_id)
]
if num_running >= task_concurrency:
self.log.info("Not executing %s since the task concurrency for"
" this task has been reached.", task_instance)
continue
else:
task_concurrency_map[(task_instance.dag_id, task_instance.task_id)] += 1
if self.executor.has_task(task_instance):
self.log.debug(
"Not handling task %s as the executor reports it is running",
task_instance.key
)
continue
executable_tis.append(task_instance)
open_slots -= 1
dag_id_to_possibly_running_task_count[dag_id] += 1
task_instance_str = "\n\t".join(
["{}".format(x) for x in executable_tis])
self.log.info(
"Setting the following tasks to queued state:\n\t%s", task_instance_str)
# so these dont expire on commit
for ti in executable_tis:
copy_dag_id = ti.dag_id
copy_execution_date = ti.execution_date
copy_task_id = ti.task_id
make_transient(ti)
ti.dag_id = copy_dag_id
ti.execution_date = copy_execution_date
ti.task_id = copy_task_id
return executable_tis
@provide_session
def _change_state_for_executable_task_instances(self, task_instances,
acceptable_states, session=None):
"""
Changes the state of task instances in the list with one of the given states
to QUEUED atomically, and returns the TIs changed in SimpleTaskInstance format.
:param task_instances: TaskInstances to change the state of
:type task_instances: List[TaskInstance]
:param acceptable_states: Filters the TaskInstances updated to be in these states
:type acceptable_states: Iterable[State]
:return: List[SimpleTaskInstance]
"""
if len(task_instances) == 0:
session.commit()
return []
TI = models.TaskInstance
filter_for_ti_state_change = (
[and_(
TI.dag_id == ti.dag_id,
TI.task_id == ti.task_id,
TI.execution_date == ti.execution_date)
for ti in task_instances])
ti_query = (
session
.query(TI)
.filter(or_(*filter_for_ti_state_change)))
if None in acceptable_states:
ti_query = ti_query.filter(
or_(TI.state == None, TI.state.in_(acceptable_states)) # noqa: E711
)
else:
ti_query = ti_query.filter(TI.state.in_(acceptable_states))
tis_to_set_to_queued = (
ti_query
.with_for_update()
.all())
if len(tis_to_set_to_queued) == 0:
self.log.info("No tasks were able to have their state changed to queued.")
session.commit()
return []
# set TIs to queued state
for task_instance in tis_to_set_to_queued:
task_instance.state = State.QUEUED
task_instance.queued_dttm = (timezone.utcnow()
if not task_instance.queued_dttm
else task_instance.queued_dttm)
session.merge(task_instance)
# Generate a list of SimpleTaskInstance for the use of queuing
# them in the executor.
simple_task_instances = [SimpleTaskInstance(ti) for ti in
tis_to_set_to_queued]
task_instance_str = "\n\t".join(
["{}".format(x) for x in tis_to_set_to_queued])
session.commit()
self.log.info("Setting the following {} tasks to queued state:\n\t{}"
.format(len(tis_to_set_to_queued), task_instance_str))
return simple_task_instances
def _enqueue_task_instances_with_queued_state(self, simple_dag_bag,
simple_task_instances):
"""
Takes task_instances, which should have been set to queued, and enqueues them
with the executor.
:param simple_task_instances: TaskInstances to enqueue
:type simple_task_instances: List[SimpleTaskInstance]
:param simple_dag_bag: Should contains all of the task_instances' dags
:type simple_dag_bag: SimpleDagBag
"""
TI = models.TaskInstance
# actually enqueue them
for simple_task_instance in simple_task_instances:
simple_dag = simple_dag_bag.get_dag(simple_task_instance.dag_id)
command = TI.generate_command(
simple_task_instance.dag_id,
simple_task_instance.task_id,
simple_task_instance.execution_date,
local=True,
mark_success=False,
ignore_all_deps=False,
ignore_depends_on_past=False,
ignore_task_deps=False,
ignore_ti_state=False,
pool=simple_task_instance.pool,
file_path=simple_dag.full_filepath,
pickle_id=simple_dag.pickle_id)
priority = simple_task_instance.priority_weight
queue = simple_task_instance.queue
self.log.info(
"Sending %s to executor with priority %s and queue %s",
simple_task_instance.key, priority, queue
)
self.executor.queue_command(
simple_task_instance,
command,
priority=priority,
queue=queue)
@provide_session
def _execute_task_instances(self,
simple_dag_bag,
states,
session=None):
"""
Attempts to execute TaskInstances that should be executed by the scheduler.
There are three steps:
1. Pick TIs by priority with the constraint that they are in the expected states
and that we do exceed max_active_runs or pool limits.
2. Change the state for the TIs above atomically.
3. Enqueue the TIs in the executor.
:param simple_dag_bag: TaskInstances associated with DAGs in the
simple_dag_bag will be fetched from the DB and executed
:type simple_dag_bag: SimpleDagBag
:param states: Execute TaskInstances in these states
:type states: Tuple[State]
:return: Number of task instance with state changed.
"""
executable_tis = self._find_executable_task_instances(simple_dag_bag, states,
session=session)
def query(result, items):
simple_tis_with_state_changed = \
self._change_state_for_executable_task_instances(items,
states,
session=session)
self._enqueue_task_instances_with_queued_state(
simple_dag_bag,
simple_tis_with_state_changed)
session.commit()
return result + len(simple_tis_with_state_changed)
return helpers.reduce_in_chunks(query, executable_tis, 0, self.max_tis_per_query)
@provide_session
def _change_state_for_tasks_failed_to_execute(self, session):
"""
If there are tasks left over in the executor,
we set them back to SCHEDULED to avoid creating hanging tasks.
:param session: session for ORM operations
"""
if self.executor.queued_tasks:
TI = models.TaskInstance
filter_for_ti_state_change = (
[and_(
TI.dag_id == dag_id,
TI.task_id == task_id,
TI.execution_date == execution_date,
# The TI.try_number will return raw try_number+1 since the
# ti is not running. And we need to -1 to match the DB record.
TI._try_number == try_number - 1,
TI.state == State.QUEUED)
for dag_id, task_id, execution_date, try_number
in self.executor.queued_tasks.keys()])
ti_query = (session.query(TI)
.filter(or_(*filter_for_ti_state_change)))
tis_to_set_to_scheduled = (ti_query
.with_for_update()
.all())
if len(tis_to_set_to_scheduled) == 0:
session.commit()
return
# set TIs to queued state
for task_instance in tis_to_set_to_scheduled:
task_instance.state = State.SCHEDULED
task_instance_str = "\n\t".join(
["{}".format(x) for x in tis_to_set_to_scheduled])
session.commit()
self.log.info("Set the following tasks to scheduled state:\n\t{}"
.format(task_instance_str))
def _process_dags(self, dagbag, dags, tis_out):
"""
Iterates over the dags and processes them. Processing includes:
1. Create appropriate DagRun(s) in the DB.
2. Create appropriate TaskInstance(s) in the DB.
3. Send emails for tasks that have missed SLAs.
:param dagbag: a collection of DAGs to process
:type dagbag: models.DagBag
:param dags: the DAGs from the DagBag to process
:type dags: DAG
:param tis_out: A queue to add generated TaskInstance objects
:type tis_out: multiprocessing.Queue[TaskInstance]
:return: None
"""
for dag in dags:
dag = dagbag.get_dag(dag.dag_id)
if dag.is_paused:
self.log.info("Not processing DAG %s since it's paused", dag.dag_id)
continue
if not dag:
self.log.error("DAG ID %s was not found in the DagBag", dag.dag_id)
continue
self.log.info("Processing %s", dag.dag_id)
dag_run = self.create_dag_run(dag)
if dag_run:
self.log.info("Created %s", dag_run)
self._process_task_instances(dag, tis_out)
self.manage_slas(dag)
@provide_session
def _process_executor_events(self, simple_dag_bag, session=None):
"""
Respond to executor events.
"""
# TODO: this shares quite a lot of code with _manage_executor_state
TI = models.TaskInstance
for key, state in list(self.executor.get_event_buffer(simple_dag_bag.dag_ids)
.items()):
dag_id, task_id, execution_date, try_number = key
self.log.info(
"Executor reports %s.%s execution_date=%s as %s for try_number %s",
dag_id, task_id, execution_date, state, try_number
)
if state == State.FAILED or state == State.SUCCESS:
qry = session.query(TI).filter(TI.dag_id == dag_id,
TI.task_id == task_id,
TI.execution_date == execution_date)
ti = qry.first()
if not ti:
self.log.warning("TaskInstance %s went missing from the database", ti)
continue
# TODO: should we fail RUNNING as well, as we do in Backfills?
if ti.try_number == try_number and ti.state == State.QUEUED:
msg = ("Executor reports task instance {} finished ({}) "
"although the task says its {}. Was the task "
"killed externally?".format(ti, state, ti.state))
self.log.error(msg)
try:
simple_dag = simple_dag_bag.get_dag(dag_id)
dagbag = models.DagBag(simple_dag.full_filepath)
dag = dagbag.get_dag(dag_id)
ti.task = dag.get_task(task_id)
ti.handle_failure(msg)
except Exception:
self.log.error("Cannot load the dag bag to handle failure for %s"
". Setting task to FAILED without callbacks or "
"retries. Do you have enough resources?", ti)
ti.state = State.FAILED
session.merge(ti)
session.commit()
def _execute(self):
self.log.info("Starting the scheduler")
# DAGs can be pickled for easier remote execution by some executors
pickle_dags = False
if self.do_pickle and self.executor.__class__ not in \
(executors.LocalExecutor, executors.SequentialExecutor):
pickle_dags = True
self.log.info("Running execute loop for %s seconds", self.run_duration)
self.log.info("Processing each file at most %s times", self.num_runs)
# Build up a list of Python files that could contain DAGs
self.log.info("Searching for files in %s", self.subdir)
known_file_paths = list_py_file_paths(self.subdir)
self.log.info("There are %s files in %s", len(known_file_paths), self.subdir)
def processor_factory(file_path, zombies):
return DagFileProcessor(file_path,
pickle_dags,
self.dag_ids,
zombies)
# When using sqlite, we do not use async_mode
# so the scheduler job and DAG parser don't access the DB at the same time.
async_mode = not self.using_sqlite
self.processor_agent = DagFileProcessorAgent(self.subdir,
known_file_paths,
self.num_runs,
processor_factory,
async_mode)
try:
self._execute_helper()
except Exception:
self.log.exception("Exception when executing execute_helper")
finally:
self.processor_agent.end()
self.log.info("Exited execute loop")
def _execute_helper(self):
"""
The actual scheduler loop. The main steps in the loop are:
#. Harvest DAG parsing results through DagFileProcessorAgent
#. Find and queue executable tasks
#. Change task instance state in DB
#. Queue tasks in executor
#. Heartbeat executor
#. Execute queued tasks in executor asynchronously
#. Sync on the states of running tasks
Following is a graphic representation of these steps.
.. image:: ../docs/img/scheduler_loop.jpg
:return: None
"""
self.executor.start()
self.log.info("Resetting orphaned tasks for active dag runs")
self.reset_state_for_orphaned_tasks()
# Start after resetting orphaned tasks to avoid stressing out DB.
self.processor_agent.start()
execute_start_time = timezone.utcnow()
# Last time that self.heartbeat() was called.
last_self_heartbeat_time = timezone.utcnow()
# For the execute duration, parse and schedule DAGs
while (timezone.utcnow() - execute_start_time).total_seconds() < \
self.run_duration or self.run_duration < 0:
self.log.debug("Starting Loop...")
loop_start_time = time.time()
if self.using_sqlite:
self.processor_agent.heartbeat()
# For the sqlite case w/ 1 thread, wait until the processor
# is finished to avoid concurrent access to the DB.
self.log.debug(
"Waiting for processors to finish since we're using sqlite")
self.processor_agent.wait_until_finished()
self.log.info("Harvesting DAG parsing results")
simple_dags = self.processor_agent.harvest_simple_dags()
self.log.debug("Harvested {} SimpleDAGs".format(len(simple_dags)))
# Send tasks for execution if available
simple_dag_bag = SimpleDagBag(simple_dags)
if len(simple_dags) > 0:
try:
simple_dag_bag = SimpleDagBag(simple_dags)
# Handle cases where a DAG run state is set (perhaps manually) to
# a non-running state. Handle task instances that belong to
# DAG runs in those states
# If a task instance is up for retry but the corresponding DAG run
# isn't running, mark the task instance as FAILED so we don't try
# to re-run it.
self._change_state_for_tis_without_dagrun(simple_dag_bag,
[State.UP_FOR_RETRY],
State.FAILED)
# If a task instance is scheduled or queued, but the corresponding
# DAG run isn't running, set the state to NONE so we don't try to
# re-run it.
self._change_state_for_tis_without_dagrun(simple_dag_bag,
[State.QUEUED,
State.SCHEDULED],
State.NONE)
self._execute_task_instances(simple_dag_bag,
(State.SCHEDULED,))
except Exception as e:
self.log.error("Error queuing tasks")
self.log.exception(e)
continue
# Call heartbeats
self.log.debug("Heartbeating the executor")
self.executor.heartbeat()
self._change_state_for_tasks_failed_to_execute()
# Process events from the executor
self._process_executor_events(simple_dag_bag)
# Heartbeat the scheduler periodically
time_since_last_heartbeat = (timezone.utcnow() -
last_self_heartbeat_time).total_seconds()
if time_since_last_heartbeat > self.heartrate:
self.log.debug("Heartbeating the scheduler")
self.heartbeat()
last_self_heartbeat_time = timezone.utcnow()
loop_end_time = time.time()
loop_duration = loop_end_time - loop_start_time
self.log.debug(
"Ran scheduling loop in %.2f seconds",
loop_duration)
self.log.debug("Sleeping for %.2f seconds", self._processor_poll_interval)
time.sleep(self._processor_poll_interval)
# Exit early for a test mode, run one additional scheduler loop
# to reduce the possibility that parsed DAG was put into the queue
# by the DAG manager but not yet received by DAG agent.
if self.processor_agent.done:
self._last_loop = True
if self._last_loop:
self.log.info("Exiting scheduler loop as all files"
" have been processed {} times".format(self.num_runs))
break
if loop_duration < 1:
sleep_length = 1 - loop_duration
self.log.debug(
"Sleeping for {0:.2f} seconds to prevent excessive logging"
.format(sleep_length))
sleep(sleep_length)
# Stop any processors
self.processor_agent.terminate()
# Verify that all files were processed, and if so, deactivate DAGs that
# haven't been touched by the scheduler as they likely have been
# deleted.
if self.processor_agent.all_files_processed:
self.log.info(
"Deactivating DAGs that haven't been touched since %s",
execute_start_time.isoformat()
)
models.DAG.deactivate_stale_dags(execute_start_time)
self.executor.end()
settings.Session.remove()
@provide_session
def process_file(self, file_path, zombies, pickle_dags=False, session=None):
"""
Process a Python file containing Airflow DAGs.
This includes:
1. Execute the file and look for DAG objects in the namespace.
2. Pickle the DAG and save it to the DB (if necessary).
3. For each DAG, see what tasks should run and create appropriate task
instances in the DB.
4. Record any errors importing the file into ORM
5. Kill (in ORM) any task instances belonging to the DAGs that haven't
issued a heartbeat in a while.
Returns a list of SimpleDag objects that represent the DAGs found in
the file
:param file_path: the path to the Python file that should be executed
:type file_path: unicode
:param zombies: zombie task instances to kill.
:type zombies: list[SimpleTaskInstance]
:param pickle_dags: whether serialize the DAGs found in the file and
save them to the db
:type pickle_dags: bool
:return: a list of SimpleDags made from the Dags found in the file
:rtype: list[SimpleDag]
"""
self.log.info("Processing file %s for tasks to queue", file_path)
# As DAGs are parsed from this file, they will be converted into SimpleDags
simple_dags = []
try:
dagbag = models.DagBag(file_path, include_examples=False)
except Exception:
self.log.exception("Failed at reloading the DAG file %s", file_path)
Stats.incr('dag_file_refresh_error', 1, 1)
return []
if len(dagbag.dags) > 0:
self.log.info("DAG(s) %s retrieved from %s", dagbag.dags.keys(), file_path)
else:
self.log.warning("No viable dags retrieved from %s", file_path)
self.update_import_errors(session, dagbag)
return []
# Save individual DAGs in the ORM and update DagModel.last_scheduled_time
for dag in dagbag.dags.values():
dag.sync_to_db()
paused_dag_ids = [dag.dag_id for dag in dagbag.dags.values()
if dag.is_paused]
# Pickle the DAGs (if necessary) and put them into a SimpleDag
for dag_id in dagbag.dags:
dag = dagbag.get_dag(dag_id)
pickle_id = None
if pickle_dags:
pickle_id = dag.pickle(session).id
# Only return DAGs that are not paused
if dag_id not in paused_dag_ids:
simple_dags.append(SimpleDag(dag, pickle_id=pickle_id))
if len(self.dag_ids) > 0:
dags = [dag for dag in dagbag.dags.values()
if dag.dag_id in self.dag_ids and
dag.dag_id not in paused_dag_ids]
else:
dags = [dag for dag in dagbag.dags.values()
if not dag.parent_dag and
dag.dag_id not in paused_dag_ids]
# Not using multiprocessing.Queue() since it's no longer a separate
# process and due to some unusual behavior. (empty() incorrectly
# returns true?)
ti_keys_to_schedule = []
self._process_dags(dagbag, dags, ti_keys_to_schedule)
for ti_key in ti_keys_to_schedule:
dag = dagbag.dags[ti_key[0]]
task = dag.get_task(ti_key[1])
ti = models.TaskInstance(task, ti_key[2])
ti.refresh_from_db(session=session, lock_for_update=True)
# We can defer checking the task dependency checks to the worker themselves
# since they can be expensive to run in the scheduler.
dep_context = DepContext(deps=QUEUE_DEPS, ignore_task_deps=True)
# Only schedule tasks that have their dependencies met, e.g. to avoid
# a task that recently got its state changed to RUNNING from somewhere
# other than the scheduler from getting its state overwritten.
# TODO(aoen): It's not great that we have to check all the task instance
# dependencies twice; once to get the task scheduled, and again to actually
# run the task. We should try to come up with a way to only check them once.
if ti.are_dependencies_met(
dep_context=dep_context,
session=session,
verbose=True):
# Task starts out in the scheduled state. All tasks in the
# scheduled state will be sent to the executor
ti.state = State.SCHEDULED
# Also save this task instance to the DB.
self.log.info("Creating / updating %s in ORM", ti)
session.merge(ti)
# commit batch
session.commit()
# Record import errors into the ORM
try:
self.update_import_errors(session, dagbag)
except Exception:
self.log.exception("Error logging import errors!")
try:
dagbag.kill_zombies(zombies)
except Exception:
self.log.exception("Error killing zombies!")
return simple_dags
@provide_session
def heartbeat_callback(self, session=None):
Stats.incr('scheduler_heartbeat', 1, 1)
class BackfillJob(BaseJob):
"""
A backfill job consists of a dag or subdag for a specific time range. It
triggers a set of task instance runs, in the right order and lasts for
as long as it takes for the set of task instance to be completed.
"""
ID_PREFIX = 'backfill_'
ID_FORMAT_PREFIX = ID_PREFIX + '{0}'
__mapper_args__ = {
'polymorphic_identity': 'BackfillJob'
}
class _DagRunTaskStatus(object):
"""
Internal status of the backfill job. This class is intended to be instantiated
only within a BackfillJob instance and will track the execution of tasks,
e.g. running, skipped, succeeded, failed, etc. Information about the dag runs
related to the backfill job are also being tracked in this structure,
.e.g finished runs, etc. Any other status related information related to the
execution of dag runs / tasks can be included in this structure since it makes
it easier to pass it around.
"""
# TODO(edgarRd): AIRFLOW-1444: Add consistency check on counts
def __init__(self,
to_run=None,
running=None,
skipped=None,
succeeded=None,
failed=None,
not_ready=None,
deadlocked=None,
active_runs=None,
executed_dag_run_dates=None,
finished_runs=0,
total_runs=0,
):
"""
:param to_run: Tasks to run in the backfill
:type to_run: dict[Tuple[String, String, DateTime], TaskInstance]
:param running: Maps running task instance key to task instance object
:type running: dict[Tuple[String, String, DateTime], TaskInstance]
:param skipped: Tasks that have been skipped
:type skipped: set[Tuple[String, String, DateTime]]
:param succeeded: Tasks that have succeeded so far
:type succeeded: set[Tuple[String, String, DateTime]]
:param failed: Tasks that have failed
:type failed: set[Tuple[String, String, DateTime]]
:param not_ready: Tasks not ready for execution
:type not_ready: set[Tuple[String, String, DateTime]]
:param deadlocked: Deadlocked tasks
:type deadlocked: set[Tuple[String, String, DateTime]]
:param active_runs: Active dag runs at a certain point in time
:type active_runs: list[DagRun]
:param executed_dag_run_dates: Datetime objects for the executed dag runs
:type executed_dag_run_dates: set[Datetime]
:param finished_runs: Number of finished runs so far
:type finished_runs: int
:param total_runs: Number of total dag runs able to run
:type total_runs: int
"""
self.to_run = to_run or dict()
self.running = running or dict()
self.skipped = skipped or set()
self.succeeded = succeeded or set()
self.failed = failed or set()
self.not_ready = not_ready or set()
self.deadlocked = deadlocked or set()
self.active_runs = active_runs or list()
self.executed_dag_run_dates = executed_dag_run_dates or set()
self.finished_runs = finished_runs
self.total_runs = total_runs
def __init__(
self,
dag,
start_date=None,
end_date=None,
mark_success=False,
donot_pickle=False,
ignore_first_depends_on_past=False,
ignore_task_deps=False,
pool=None,
delay_on_limit_secs=1.0,
verbose=False,
conf=None,
rerun_failed_tasks=False,
*args, **kwargs):
"""
:param dag: DAG object.
:type dag: `class DAG`.
:param start_date: start date for the backfill date range.
:type start_date: datetime.
:param end_date: end date for the backfill date range.
:type end_date: datetime
:param mark_success: flag whether to mark the task auto success.
:type mark_success: bool
:param donot_pickle: whether pickle
:type donot_pickle: bool
:param ignore_first_depends_on_past: whether to ignore depend on past
:type ignore_first_depends_on_past: bool
:param ignore_task_deps: whether to ignore the task dependency
:type ignore_task_deps: bool
:param pool:
:type pool: list
:param delay_on_limit_secs:
:param verbose:
:type verbose: flag to whether display verbose message to backfill console
:param conf: a dictionary which user could pass k-v pairs for backfill
:type conf: dictionary
:param rerun_failed_tasks: flag to whether to
auto rerun the failed task in backfill
:type rerun_failed_tasks: bool
:param args:
:param kwargs:
"""
self.dag = dag
self.dag_id = dag.dag_id
self.bf_start_date = start_date
self.bf_end_date = end_date
self.mark_success = mark_success
self.donot_pickle = donot_pickle
self.ignore_first_depends_on_past = ignore_first_depends_on_past
self.ignore_task_deps = ignore_task_deps
self.pool = pool
self.delay_on_limit_secs = delay_on_limit_secs
self.verbose = verbose
self.conf = conf
self.rerun_failed_tasks = rerun_failed_tasks
super(BackfillJob, self).__init__(*args, **kwargs)
def _update_counters(self, ti_status):
"""
Updates the counters per state of the tasks that were running. Can re-add
to tasks to run in case required.
:param ti_status: the internal status of the backfill job tasks
:type ti_status: BackfillJob._DagRunTaskStatus
"""
for key, ti in list(ti_status.running.items()):
ti.refresh_from_db()
if ti.state == State.SUCCESS:
ti_status.succeeded.add(key)
self.log.debug("Task instance %s succeeded. Don't rerun.", ti)
ti_status.running.pop(key)
continue
elif ti.state == State.SKIPPED:
ti_status.skipped.add(key)
self.log.debug("Task instance %s skipped. Don't rerun.", ti)
ti_status.running.pop(key)
continue
elif ti.state == State.FAILED:
self.log.error("Task instance %s failed", ti)
ti_status.failed.add(key)
ti_status.running.pop(key)
continue
# special case: if the task needs to run again put it back
elif ti.state == State.UP_FOR_RETRY:
self.log.warning("Task instance %s is up for retry", ti)
ti_status.running.pop(key)
ti_status.to_run[key] = ti
# special case: The state of the task can be set to NONE by the task itself
# when it reaches concurrency limits. It could also happen when the state
# is changed externally, e.g. by clearing tasks from the ui. We need to cover
# for that as otherwise those tasks would fall outside of the scope of
# the backfill suddenly.
elif ti.state == State.NONE:
self.log.warning(
"FIXME: task instance %s state was set to none externally or "
"reaching concurrency limits. Re-adding task to queue.",
ti
)
ti.set_state(State.SCHEDULED)
ti_status.running.pop(key)
ti_status.to_run[key] = ti
def _manage_executor_state(self, running):
"""
Checks if the executor agrees with the state of task instances
that are running
:param running: dict of key, task to verify
"""
executor = self.executor
for key, state in list(executor.get_event_buffer().items()):
if key not in running:
self.log.warning(
"%s state %s not in running=%s",
key, state, running.values()
)
continue
ti = running[key]
ti.refresh_from_db()
self.log.debug("Executor state: %s task %s", state, ti)
if state == State.FAILED or state == State.SUCCESS:
if ti.state == State.RUNNING or ti.state == State.QUEUED:
msg = ("Executor reports task instance {} finished ({}) "
"although the task says its {}. Was the task "
"killed externally?".format(ti, state, ti.state))
self.log.error(msg)
ti.handle_failure(msg)
@provide_session
def _get_dag_run(self, run_date, session=None):
"""
Returns a dag run for the given run date, which will be matched to an existing
dag run if available or create a new dag run otherwise. If the max_active_runs
limit is reached, this function will return None.
:param run_date: the execution date for the dag run
:type run_date: datetime
:param session: the database session object
:type session: Session
:return: a DagRun in state RUNNING or None
"""
run_id = BackfillJob.ID_FORMAT_PREFIX.format(run_date.isoformat())
# consider max_active_runs but ignore when running subdags
respect_dag_max_active_limit = (True
if (self.dag.schedule_interval and
not self.dag.is_subdag)
else False)
current_active_dag_count = self.dag.get_num_active_runs(external_trigger=False)
# check if we are scheduling on top of a already existing dag_run
# we could find a "scheduled" run instead of a "backfill"
run = DagRun.find(dag_id=self.dag.dag_id,
execution_date=run_date,
session=session)
if run is not None and len(run) > 0:
run = run[0]
if run.state == State.RUNNING:
respect_dag_max_active_limit = False
else:
run = None
# enforce max_active_runs limit for dag, special cases already
# handled by respect_dag_max_active_limit
if (respect_dag_max_active_limit and
current_active_dag_count >= self.dag.max_active_runs):
return None
run = run or self.dag.create_dagrun(
run_id=run_id,
execution_date=run_date,
start_date=timezone.utcnow(),
state=State.RUNNING,
external_trigger=False,
session=session,
conf=self.conf,
)
# set required transient field
run.dag = self.dag
# explicitly mark as backfill and running
run.state = State.RUNNING
run.run_id = run_id
run.verify_integrity(session=session)
return run
@provide_session
def _task_instances_for_dag_run(self, dag_run, session=None):
"""
Returns a map of task instance key to task instance object for the tasks to
run in the given dag run.
:param dag_run: the dag run to get the tasks from
:type dag_run: models.DagRun
:param session: the database session object
:type session: Session
"""
tasks_to_run = {}
if dag_run is None:
return tasks_to_run
# check if we have orphaned tasks
self.reset_state_for_orphaned_tasks(filter_by_dag_run=dag_run, session=session)
# for some reason if we don't refresh the reference to run is lost
dag_run.refresh_from_db()
make_transient(dag_run)
# TODO(edgarRd): AIRFLOW-1464 change to batch query to improve perf
for ti in dag_run.get_task_instances():
# all tasks part of the backfill are scheduled to run
if ti.state == State.NONE:
ti.set_state(State.SCHEDULED, session=session)
if ti.state != State.REMOVED:
tasks_to_run[ti.key] = ti
return tasks_to_run
def _log_progress(self, ti_status):
msg = ' | '.join([
"[backfill progress]",
"finished run {0} of {1}",
"tasks waiting: {2}",
"succeeded: {3}",
"running: {4}",
"failed: {5}",
"skipped: {6}",
"deadlocked: {7}",
"not ready: {8}"
]).format(
ti_status.finished_runs,
ti_status.total_runs,
len(ti_status.to_run),
len(ti_status.succeeded),
len(ti_status.running),
len(ti_status.failed),
len(ti_status.skipped),
len(ti_status.deadlocked),
len(ti_status.not_ready))
self.log.info(msg)
self.log.debug(
"Finished dag run loop iteration. Remaining tasks %s",
ti_status.to_run.values()
)
@provide_session
def _process_backfill_task_instances(self,
ti_status,
executor,
pickle_id,
start_date=None, session=None):
"""
Process a set of task instances from a set of dag runs. Special handling is done
to account for different task instance states that could be present when running
them in a backfill process.
:param ti_status: the internal status of the job
:type ti_status: BackfillJob._DagRunTaskStatus
:param executor: the executor to run the task instances
:type executor: BaseExecutor
:param pickle_id: the pickle_id if dag is pickled, None otherwise
:type pickle_id: int
:param start_date: the start date of the backfill job
:type start_date: datetime
:param session: the current session object
:type session: Session
:return: the list of execution_dates for the finished dag runs
:rtype: list
"""
executed_run_dates = []
while ((len(ti_status.to_run) > 0 or len(ti_status.running) > 0) and
len(ti_status.deadlocked) == 0):
self.log.debug("*** Clearing out not_ready list ***")
ti_status.not_ready.clear()
# we need to execute the tasks bottom to top
# or leaf to root, as otherwise tasks might be
# determined deadlocked while they are actually
# waiting for their upstream to finish
for task in self.dag.topological_sort():
for key, ti in list(ti_status.to_run.items()):
if task.task_id != ti.task_id:
continue
ti.refresh_from_db()
task = self.dag.get_task(ti.task_id)
ti.task = task
ignore_depends_on_past = (
self.ignore_first_depends_on_past and
ti.execution_date == (start_date or ti.start_date))
self.log.debug(
"Task instance to run %s state %s", ti, ti.state)
# The task was already marked successful or skipped by a
# different Job. Don't rerun it.
if ti.state == State.SUCCESS:
ti_status.succeeded.add(key)
self.log.debug("Task instance %s succeeded. Don't rerun.", ti)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
elif ti.state == State.SKIPPED:
ti_status.skipped.add(key)
self.log.debug("Task instance %s skipped. Don't rerun.", ti)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
# guard against externally modified tasks instances or
# in case max concurrency has been reached at task runtime
elif ti.state == State.NONE:
self.log.warning(
"FIXME: task instance {} state was set to None "
"externally. This should not happen"
)
ti.set_state(State.SCHEDULED, session=session)
if self.rerun_failed_tasks:
# Rerun failed tasks or upstreamed failed tasks
if ti.state in (State.FAILED, State.UPSTREAM_FAILED):
self.log.error("Task instance {ti} "
"with state {state}".format(ti=ti,
state=ti.state))
if key in ti_status.running:
ti_status.running.pop(key)
# Reset the failed task in backfill to scheduled state
ti.set_state(State.SCHEDULED, session=session)
else:
# Default behaviour which works for subdag.
if ti.state in (State.FAILED, State.UPSTREAM_FAILED):
self.log.error("Task instance {ti} "
"with {state} state".format(ti=ti,
state=ti.state))
ti_status.failed.add(key)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
backfill_context = DepContext(
deps=RUN_DEPS,
ignore_depends_on_past=ignore_depends_on_past,
ignore_task_deps=self.ignore_task_deps,
flag_upstream_failed=True)
# Is the task runnable? -- then run it
# the dependency checker can change states of tis
if ti.are_dependencies_met(
dep_context=backfill_context,
session=session,
verbose=self.verbose):
ti.refresh_from_db(lock_for_update=True, session=session)
if ti.state == State.SCHEDULED or ti.state == State.UP_FOR_RETRY:
if executor.has_task(ti):
self.log.debug(
"Task Instance %s already in executor "
"waiting for queue to clear",
ti
)
else:
self.log.debug('Sending %s to executor', ti)
# Skip scheduled state, we are executing immediately
ti.state = State.QUEUED
session.merge(ti)
cfg_path = None
if executor.__class__ in (executors.LocalExecutor,
executors.SequentialExecutor):
cfg_path = tmp_configuration_copy()
executor.queue_task_instance(
ti,
mark_success=self.mark_success,
pickle_id=pickle_id,
ignore_task_deps=self.ignore_task_deps,
ignore_depends_on_past=ignore_depends_on_past,
pool=self.pool,
cfg_path=cfg_path)
ti_status.running[key] = ti
ti_status.to_run.pop(key)
session.commit()
continue
if ti.state == State.UPSTREAM_FAILED:
self.log.error("Task instance %s upstream failed", ti)
ti_status.failed.add(key)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
# special case
if ti.state == State.UP_FOR_RETRY:
self.log.debug(
"Task instance %s retry period not "
"expired yet", ti)
if key in ti_status.running:
ti_status.running.pop(key)
ti_status.to_run[key] = ti
continue
# all remaining tasks
self.log.debug('Adding %s to not_ready', ti)
ti_status.not_ready.add(key)
# execute the tasks in the queue
self.heartbeat()
executor.heartbeat()
# If the set of tasks that aren't ready ever equals the set of
# tasks to run and there are no running tasks then the backfill
# is deadlocked
if (ti_status.not_ready and
ti_status.not_ready == set(ti_status.to_run) and
len(ti_status.running) == 0):
self.log.warning(
"Deadlock discovered for ti_status.to_run=%s",
ti_status.to_run.values()
)
ti_status.deadlocked.update(ti_status.to_run.values())
ti_status.to_run.clear()
# check executor state
self._manage_executor_state(ti_status.running)
# update the task counters
self._update_counters(ti_status=ti_status)
# update dag run state
_dag_runs = ti_status.active_runs[:]
for run in _dag_runs:
run.update_state(session=session)
if run.state in State.finished():
ti_status.finished_runs += 1
ti_status.active_runs.remove(run)
executed_run_dates.append(run.execution_date)
self._log_progress(ti_status)
# return updated status
return executed_run_dates
@provide_session
def _collect_errors(self, ti_status, session=None):
err = ''
if ti_status.failed:
err += (
"---------------------------------------------------\n"
"Some task instances failed:\n{}\n".format(ti_status.failed))
if ti_status.deadlocked:
err += (
'---------------------------------------------------\n'
'BackfillJob is deadlocked.')
deadlocked_depends_on_past = any(
t.are_dependencies_met(
dep_context=DepContext(ignore_depends_on_past=False),
session=session,
verbose=self.verbose) !=
t.are_dependencies_met(
dep_context=DepContext(ignore_depends_on_past=True),
session=session,
verbose=self.verbose)
for t in ti_status.deadlocked)
if deadlocked_depends_on_past:
err += (
'Some of the deadlocked tasks were unable to run because '
'of "depends_on_past" relationships. Try running the '
'backfill with the option '
'"ignore_first_depends_on_past=True" or passing "-I" at '
'the command line.')
err += ' These tasks have succeeded:\n{}\n'.format(ti_status.succeeded)
err += ' These tasks are running:\n{}\n'.format(ti_status.running)
err += ' These tasks have failed:\n{}\n'.format(ti_status.failed)
err += ' These tasks are skipped:\n{}\n'.format(ti_status.skipped)
err += ' These tasks are deadlocked:\n{}\n'.format(ti_status.deadlocked)
return err
@provide_session
def _execute_for_run_dates(self, run_dates, ti_status, executor, pickle_id,
start_date, session=None):
"""
Computes the dag runs and their respective task instances for
the given run dates and executes the task instances.
Returns a list of execution dates of the dag runs that were executed.
:param run_dates: Execution dates for dag runs
:type run_dates: list
:param ti_status: internal BackfillJob status structure to tis track progress
:type ti_status: BackfillJob._DagRunTaskStatus
:param executor: the executor to use, it must be previously started
:type executor: BaseExecutor
:param pickle_id: numeric id of the pickled dag, None if not pickled
:type pickle_id: int
:param start_date: backfill start date
:type start_date: datetime
:param session: the current session object
:type session: Session
"""
for next_run_date in run_dates:
dag_run = self._get_dag_run(next_run_date, session=session)
tis_map = self._task_instances_for_dag_run(dag_run,
session=session)
if dag_run is None:
continue
ti_status.active_runs.append(dag_run)
ti_status.to_run.update(tis_map or {})
processed_dag_run_dates = self._process_backfill_task_instances(
ti_status=ti_status,
executor=executor,
pickle_id=pickle_id,
start_date=start_date,
session=session)
ti_status.executed_dag_run_dates.update(processed_dag_run_dates)
@provide_session
def _execute(self, session=None):
"""
Initializes all components required to run a dag for a specified date range and
calls helper method to execute the tasks.
"""
ti_status = BackfillJob._DagRunTaskStatus()
start_date = self.bf_start_date
# Get intervals between the start/end dates, which will turn into dag runs
run_dates = self.dag.get_run_dates(start_date=start_date,
end_date=self.bf_end_date)
if len(run_dates) == 0:
self.log.info("No run dates were found for the given dates and dag interval.")
return
# picklin'
pickle_id = None
if not self.donot_pickle and self.executor.__class__ not in (
executors.LocalExecutor, executors.SequentialExecutor):
pickle = DagPickle(self.dag)
session.add(pickle)
session.commit()
pickle_id = pickle.id
executor = self.executor
executor.start()
ti_status.total_runs = len(run_dates) # total dag runs in backfill
try:
remaining_dates = ti_status.total_runs
while remaining_dates > 0:
dates_to_process = [run_date for run_date in run_dates
if run_date not in ti_status.executed_dag_run_dates]
self._execute_for_run_dates(run_dates=dates_to_process,
ti_status=ti_status,
executor=executor,
pickle_id=pickle_id,
start_date=start_date,
session=session)
remaining_dates = (
ti_status.total_runs - len(ti_status.executed_dag_run_dates)
)
err = self._collect_errors(ti_status=ti_status, session=session)
if err:
raise AirflowException(err)
if remaining_dates > 0:
self.log.info(
"max_active_runs limit for dag %s has been reached "
" - waiting for other dag runs to finish",
self.dag_id
)
time.sleep(self.delay_on_limit_secs)
finally:
executor.end()
session.commit()
self.log.info("Backfill done. Exiting.")
class LocalTaskJob(BaseJob):
__mapper_args__ = {
'polymorphic_identity': 'LocalTaskJob'
}
def __init__(
self,
task_instance,
ignore_all_deps=False,
ignore_depends_on_past=False,
ignore_task_deps=False,
ignore_ti_state=False,
mark_success=False,
pickle_id=None,
pool=None,
*args, **kwargs):
self.task_instance = task_instance
self.dag_id = task_instance.dag_id
self.ignore_all_deps = ignore_all_deps
self.ignore_depends_on_past = ignore_depends_on_past
self.ignore_task_deps = ignore_task_deps
self.ignore_ti_state = ignore_ti_state
self.pool = pool
self.pickle_id = pickle_id
self.mark_success = mark_success
# terminating state is used so that a job don't try to
# terminate multiple times
self.terminating = False
super(LocalTaskJob, self).__init__(*args, **kwargs)
def _execute(self):
self.task_runner = get_task_runner(self)
def signal_handler(signum, frame):
"""Setting kill signal handler"""
self.log.error("Received SIGTERM. Terminating subprocesses")
self.on_kill()
raise AirflowException("LocalTaskJob received SIGTERM signal")
signal.signal(signal.SIGTERM, signal_handler)
if not self.task_instance._check_and_change_state_before_execution(
mark_success=self.mark_success,
ignore_all_deps=self.ignore_all_deps,
ignore_depends_on_past=self.ignore_depends_on_past,
ignore_task_deps=self.ignore_task_deps,
ignore_ti_state=self.ignore_ti_state,
job_id=self.id,
pool=self.pool):
self.log.info("Task is not able to be run")
return
try:
self.task_runner.start()
last_heartbeat_time = time.time()
heartbeat_time_limit = conf.getint('scheduler',
'scheduler_zombie_task_threshold')
while True:
# Monitor the task to see if it's done
return_code = self.task_runner.return_code()
if return_code is not None:
self.log.info("Task exited with return code %s", return_code)
return
# Periodically heartbeat so that the scheduler doesn't think this
# is a zombie
try:
self.heartbeat()
last_heartbeat_time = time.time()
except OperationalError:
Stats.incr('local_task_job_heartbeat_failure', 1, 1)
self.log.exception(
"Exception while trying to heartbeat! Sleeping for %s seconds",
self.heartrate
)
time.sleep(self.heartrate)
# If it's been too long since we've heartbeat, then it's possible that
# the scheduler rescheduled this task, so kill launched processes.
time_since_last_heartbeat = time.time() - last_heartbeat_time
if time_since_last_heartbeat > heartbeat_time_limit:
Stats.incr('local_task_job_prolonged_heartbeat_failure', 1, 1)
self.log.error("Heartbeat time limited exceeded!")
raise AirflowException("Time since last heartbeat({:.2f}s) "
"exceeded limit ({}s)."
.format(time_since_last_heartbeat,
heartbeat_time_limit))
finally:
self.on_kill()
def on_kill(self):
self.task_runner.terminate()
self.task_runner.on_finish()
@provide_session
def heartbeat_callback(self, session=None):
"""Self destruct task if state has been moved away from running externally"""
if self.terminating:
# ensure termination if processes are created later
self.task_runner.terminate()
return
self.task_instance.refresh_from_db()
ti = self.task_instance
fqdn = get_hostname()
same_hostname = fqdn == ti.hostname
same_process = ti.pid == os.getpid()
if ti.state == State.RUNNING:
if not same_hostname:
self.log.warning("The recorded hostname {ti.hostname} "
"does not match this instance's hostname "
"{fqdn}".format(**locals()))
raise AirflowException("Hostname of job runner does not match")
elif not same_process:
current_pid = os.getpid()
self.log.warning("Recorded pid {ti.pid} does not match "
"the current pid "
"{current_pid}".format(**locals()))
raise AirflowException("PID of job runner does not match")
elif (
self.task_runner.return_code() is None and
hasattr(self.task_runner, 'process')
):
self.log.warning(
"State of this instance has been externally set to %s. "
"Taking the poison pill.",
ti.state
)
self.task_runner.terminate()
self.terminating = True
| 41.47756 | 104 | 0.558447 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import getpass
import logging
import multiprocessing
import os
import signal
import sys
import threading
import time
from collections import defaultdict
from time import sleep
import six
from past.builtins import basestring
from sqlalchemy import (Column, Index, Integer, String, and_, func, not_, or_)
from sqlalchemy.exc import OperationalError
from sqlalchemy.orm.session import make_transient
from airflow import configuration as conf
from airflow import executors, models, settings
from airflow.exceptions import AirflowException
from airflow.models import DAG, DagRun
from airflow.models.dagpickle import DagPickle
from airflow.settings import Stats
from airflow.task.task_runner import get_task_runner
from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, RUN_DEPS
from airflow.utils import asciiart, helpers, timezone
from airflow.utils.configuration import tmp_configuration_copy
from airflow.utils.dag_processing import (AbstractDagFileProcessor,
DagFileProcessorAgent,
SimpleDag,
SimpleDagBag,
SimpleTaskInstance,
list_py_file_paths)
from airflow.utils.db import create_session, provide_session
from airflow.utils.email import get_email_address_list, send_email
from airflow.utils.log.logging_mixin import LoggingMixin, StreamLogWriter, set_context
from airflow.utils.net import get_hostname
from airflow.utils.sqlalchemy import UtcDateTime
from airflow.utils.state import State
Base = models.base.Base
ID_LEN = models.ID_LEN
class BaseJob(Base, LoggingMixin):
__tablename__ = "job"
id = Column(Integer, primary_key=True)
dag_id = Column(String(ID_LEN),)
state = Column(String(20))
job_type = Column(String(30))
start_date = Column(UtcDateTime())
end_date = Column(UtcDateTime())
latest_heartbeat = Column(UtcDateTime())
executor_class = Column(String(500))
hostname = Column(String(500))
unixname = Column(String(1000))
__mapper_args__ = {
'polymorphic_on': job_type,
'polymorphic_identity': 'BaseJob'
}
__table_args__ = (
Index('job_type_heart', job_type, latest_heartbeat),
Index('idx_job_state_heartbeat', state, latest_heartbeat),
)
def __init__(
self,
executor=executors.GetDefaultExecutor(),
heartrate=conf.getfloat('scheduler', 'JOB_HEARTBEAT_SEC'),
*args, **kwargs):
self.hostname = get_hostname()
self.executor = executor
self.executor_class = executor.__class__.__name__
self.start_date = timezone.utcnow()
self.latest_heartbeat = timezone.utcnow()
self.heartrate = heartrate
self.unixname = getpass.getuser()
self.max_tis_per_query = conf.getint('scheduler', 'max_tis_per_query')
super(BaseJob, self).__init__(*args, **kwargs)
def is_alive(self):
return (
(timezone.utcnow() - self.latest_heartbeat).seconds <
(conf.getint('scheduler', 'JOB_HEARTBEAT_SEC') * 2.1)
)
@provide_session
def kill(self, session=None):
job = session.query(BaseJob).filter(BaseJob.id == self.id).first()
job.end_date = timezone.utcnow()
try:
self.on_kill()
except Exception as e:
self.log.error('on_kill() method failed: {}'.format(e))
session.merge(job)
session.commit()
raise AirflowException("Job shut down externally.")
def on_kill(self):
pass
def heartbeat_callback(self, session=None):
pass
def heartbeat(self):
try:
with create_session() as session:
job = session.query(BaseJob).filter_by(id=self.id).one()
make_transient(job)
session.commit()
if job.state == State.SHUTDOWN:
self.kill()
sleep_for = 0
if job.latest_heartbeat:
sleep_for = max(
0,
self.heartrate - (timezone.utcnow() -
job.latest_heartbeat).total_seconds())
sleep(sleep_for)
with create_session() as session:
job = session.query(BaseJob).filter(BaseJob.id == self.id).first()
job.latest_heartbeat = timezone.utcnow()
session.merge(job)
session.commit()
self.heartbeat_callback(session=session)
self.log.debug('[heartbeat]')
except OperationalError as e:
self.log.error("Scheduler heartbeat got an exception: %s", str(e))
def run(self):
Stats.incr(self.__class__.__name__.lower() + '_start', 1, 1)
with create_session() as session:
self.state = State.RUNNING
session.add(self)
session.commit()
id_ = self.id
make_transient(self)
self.id = id_
try:
self._execute()
self.state = State.SUCCESS
except SystemExit:
self.state = State.SUCCESS
except Exception:
self.state = State.FAILED
raise
finally:
self.end_date = timezone.utcnow()
session.merge(self)
session.commit()
Stats.incr(self.__class__.__name__.lower() + '_end', 1, 1)
def _execute(self):
raise NotImplementedError("This method needs to be overridden")
@provide_session
def reset_state_for_orphaned_tasks(self, filter_by_dag_run=None, session=None):
queued_tis = self.executor.queued_tasks
running_tis = self.executor.running
resettable_states = [State.SCHEDULED, State.QUEUED]
TI = models.TaskInstance
DR = models.DagRun
if filter_by_dag_run is None:
resettable_tis = (
session
.query(TI)
.join(
DR,
and_(
TI.dag_id == DR.dag_id,
TI.execution_date == DR.execution_date))
.filter(
DR.state == State.RUNNING,
DR.run_id.notlike(BackfillJob.ID_PREFIX + '%'),
TI.state.in_(resettable_states))).all()
else:
resettable_tis = filter_by_dag_run.get_task_instances(state=resettable_states,
session=session)
tis_to_reset = []
for ti in resettable_tis:
if ti.key not in queued_tis and ti.key not in running_tis:
tis_to_reset.append(ti)
if len(tis_to_reset) == 0:
return []
def query(result, items):
filter_for_tis = ([and_(TI.dag_id == ti.dag_id,
TI.task_id == ti.task_id,
TI.execution_date == ti.execution_date)
for ti in items])
reset_tis = (
session
.query(TI)
.filter(or_(*filter_for_tis), TI.state.in_(resettable_states))
.with_for_update()
.all())
for ti in reset_tis:
ti.state = State.NONE
session.merge(ti)
return result + reset_tis
reset_tis = helpers.reduce_in_chunks(query,
tis_to_reset,
[],
self.max_tis_per_query)
task_instance_str = '\n\t'.join(
["{}".format(x) for x in reset_tis])
session.commit()
self.log.info(
"Reset the following %s TaskInstances:\n\t%s",
len(reset_tis), task_instance_str
)
return reset_tis
class DagFileProcessor(AbstractDagFileProcessor, LoggingMixin):
class_creation_counter = 0
def __init__(self, file_path, pickle_dags, dag_id_white_list, zombies):
self._file_path = file_path
self._result_queue = multiprocessing.Queue()
# The process that was launched to process the given .
self._process = None
self._dag_id_white_list = dag_id_white_list
self._pickle_dags = pickle_dags
self._zombies = zombies
# The result of Scheduler.process_file(file_path).
self._result = None
# Whether the process is done running.
self._done = False
# When the process started.
self._start_time = None
# This ID is use to uniquely name the process / thread that's launched
self._instance_id = DagFileProcessor.class_creation_counter
DagFileProcessor.class_creation_counter += 1
@property
def file_path(self):
return self._file_path
@staticmethod
def _launch_process(result_queue,
file_path,
pickle_dags,
dag_id_white_list,
thread_name,
zombies):
def helper():
log = logging.getLogger("airflow.processor")
stdout = StreamLogWriter(log, logging.INFO)
stderr = StreamLogWriter(log, logging.WARN)
set_context(log, file_path)
try:
sys.stdout = stdout
sys.stderr = stderr
settings.configure_orm()
threading.current_thread().name = thread_name
start_time = time.time()
log.info("Started process (PID=%s) to work on %s",
os.getpid(), file_path)
scheduler_job = SchedulerJob(dag_ids=dag_id_white_list, log=log)
result = scheduler_job.process_file(file_path,
zombies,
pickle_dags)
result_queue.put(result)
end_time = time.time()
log.info(
"Processing %s took %.3f seconds", file_path, end_time - start_time
)
except Exception:
# Log exceptions through the logging framework.
log.exception("Got an exception! Propagating...")
raise
finally:
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
# We re-initialized the ORM within this Process above so we need to
# tear it down manually here
settings.dispose_orm()
p = multiprocessing.Process(target=helper,
args=(),
name="{}-Process".format(thread_name))
p.start()
return p
def start(self):
self._process = DagFileProcessor._launch_process(
self._result_queue,
self.file_path,
self._pickle_dags,
self._dag_id_white_list,
"DagFileProcessor{}".format(self._instance_id),
self._zombies)
self._start_time = timezone.utcnow()
def terminate(self, sigkill=False):
if self._process is None:
raise AirflowException("Tried to call stop before starting!")
# The queue will likely get corrupted, so remove the reference
self._result_queue = None
self._process.terminate()
# Arbitrarily wait 5s for the process to die
self._process.join(5)
if sigkill and self._process.is_alive():
self.log.warning("Killing PID %s", self._process.pid)
os.kill(self._process.pid, signal.SIGKILL)
@property
def pid(self):
if self._process is None:
raise AirflowException("Tried to get PID before starting!")
return self._process.pid
@property
def exit_code(self):
if not self._done:
raise AirflowException("Tried to call retcode before process was finished!")
return self._process.exitcode
@property
def done(self):
if self._process is None:
raise AirflowException("Tried to see if it's done before starting!")
if self._done:
return True
if self._result_queue and not self._result_queue.empty():
self._result = self._result_queue.get_nowait()
self._done = True
self.log.debug("Waiting for %s", self._process)
self._process.join()
return True
if self._result_queue and not self._process.is_alive():
self._done = True
if not self._result_queue.empty():
self._result = self._result_queue.get_nowait()
self.log.debug("Waiting for %s", self._process)
self._process.join()
return True
return False
@property
def result(self):
if not self.done:
raise AirflowException("Tried to get the result before it's done!")
return self._result
@property
def start_time(self):
if self._start_time is None:
raise AirflowException("Tried to get start time before it started!")
return self._start_time
class SchedulerJob(BaseJob):
__mapper_args__ = {
'polymorphic_identity': 'SchedulerJob'
}
def __init__(
self,
dag_id=None,
dag_ids=None,
subdir=settings.DAGS_FOLDER,
num_runs=-1,
processor_poll_interval=1.0,
run_duration=None,
do_pickle=False,
log=None,
*args, **kwargs):
# for BaseJob compatibility
self.dag_id = dag_id
self.dag_ids = [dag_id] if dag_id else []
if dag_ids:
self.dag_ids.extend(dag_ids)
self.subdir = subdir
self.num_runs = num_runs
self.run_duration = run_duration
self._processor_poll_interval = processor_poll_interval
self.do_pickle = do_pickle
super(SchedulerJob, self).__init__(*args, **kwargs)
self.heartrate = conf.getint('scheduler', 'SCHEDULER_HEARTBEAT_SEC')
self.max_threads = conf.getint('scheduler', 'max_threads')
if log:
self._log = log
self.using_sqlite = False
if 'sqlite' in conf.get('core', 'sql_alchemy_conn'):
self.using_sqlite = True
self.max_tis_per_query = conf.getint('scheduler', 'max_tis_per_query')
if run_duration is None:
self.run_duration = conf.getint('scheduler',
'run_duration')
self.processor_agent = None
self._last_loop = False
signal.signal(signal.SIGINT, self._exit_gracefully)
signal.signal(signal.SIGTERM, self._exit_gracefully)
def _exit_gracefully(self, signum, frame):
self.log.info("Exiting gracefully upon receiving signal {}".format(signum))
if self.processor_agent:
self.processor_agent.end()
sys.exit(os.EX_OK)
@provide_session
def manage_slas(self, dag, session=None):
if not any([ti.sla for ti in dag.tasks]):
self.log.info(
"Skipping SLA check for %s because no tasks in DAG have SLAs",
dag
)
return
TI = models.TaskInstance
sq = (
session
.query(
TI.task_id,
func.max(TI.execution_date).label('max_ti'))
.with_hint(TI, 'USE INDEX (PRIMARY)', dialect_name='mysql')
.filter(TI.dag_id == dag.dag_id)
.filter(or_(
TI.state == State.SUCCESS,
TI.state == State.SKIPPED))
.filter(TI.task_id.in_(dag.task_ids))
.group_by(TI.task_id).subquery('sq')
)
max_tis = session.query(TI).filter(
TI.dag_id == dag.dag_id,
TI.task_id == sq.c.task_id,
TI.execution_date == sq.c.max_ti,
).all()
ts = timezone.utcnow()
SlaMiss = models.SlaMiss
for ti in max_tis:
task = dag.get_task(ti.task_id)
dttm = ti.execution_date
if task.sla:
dttm = dag.following_schedule(dttm)
while dttm < timezone.utcnow():
following_schedule = dag.following_schedule(dttm)
if following_schedule + task.sla < timezone.utcnow():
session.merge(models.SlaMiss(
task_id=ti.task_id,
dag_id=ti.dag_id,
execution_date=dttm,
timestamp=ts))
dttm = dag.following_schedule(dttm)
session.commit()
slas = (
session
.query(SlaMiss)
.filter(SlaMiss.notification_sent == False) # noqa: E712
.filter(SlaMiss.dag_id == dag.dag_id)
.all()
)
if slas:
sla_dates = [sla.execution_date for sla in slas]
qry = (
session
.query(TI)
.filter(TI.state != State.SUCCESS)
.filter(TI.execution_date.in_(sla_dates))
.filter(TI.dag_id == dag.dag_id)
.all()
)
blocking_tis = []
for ti in qry:
if ti.task_id in dag.task_ids:
ti.task = dag.get_task(ti.task_id)
blocking_tis.append(ti)
else:
session.delete(ti)
session.commit()
task_list = "\n".join([
sla.task_id + ' on ' + sla.execution_date.isoformat()
for sla in slas])
blocking_task_list = "\n".join([
ti.task_id + ' on ' + ti.execution_date.isoformat()
for ti in blocking_tis])
# Track whether email or any alert notification sent
# We consider email or the alert callback as notifications
email_sent = False
notification_sent = False
if dag.sla_miss_callback:
# Execute the alert callback
self.log.info(' --------------> ABOUT TO CALL SLA MISS CALL BACK ')
try:
dag.sla_miss_callback(dag, task_list, blocking_task_list, slas,
blocking_tis)
notification_sent = True
except Exception:
self.log.exception("Could not call sla_miss_callback for DAG %s",
dag.dag_id)
email_content = """\
Here's a list of tasks that missed their SLAs:
<pre><code>{task_list}\n<code></pre>
Blocking tasks:
<pre><code>{blocking_task_list}\n{bug}<code></pre>
""".format(bug=asciiart.bug, **locals())
emails = set()
for task in dag.tasks:
if task.email:
if isinstance(task.email, basestring):
emails |= set(get_email_address_list(task.email))
elif isinstance(task.email, (list, tuple)):
emails |= set(task.email)
if emails and len(slas):
try:
send_email(
emails,
"[airflow] SLA miss on DAG=" + dag.dag_id,
email_content)
email_sent = True
notification_sent = True
except Exception:
self.log.exception("Could not send SLA Miss email notification for"
" DAG %s", dag.dag_id)
if notification_sent:
for sla in slas:
if email_sent:
sla.email_sent = True
sla.notification_sent = True
session.merge(sla)
session.commit()
@staticmethod
def update_import_errors(session, dagbag):
for dagbag_file in dagbag.file_last_changed:
session.query(models.ImportError).filter(
models.ImportError.filename == dagbag_file
).delete()
for filename, stacktrace in six.iteritems(dagbag.import_errors):
session.add(models.ImportError(
filename=filename,
stacktrace=stacktrace))
session.commit()
@provide_session
def create_dag_run(self, dag, session=None):
if dag.schedule_interval and conf.getboolean('scheduler', 'USE_JOB_SCHEDULE'):
active_runs = DagRun.find(
dag_id=dag.dag_id,
state=State.RUNNING,
external_trigger=False,
session=session
)
if len(active_runs) >= dag.max_active_runs and not dag.dagrun_timeout:
return
timedout_runs = 0
for dr in active_runs:
if (
dr.start_date and dag.dagrun_timeout and
dr.start_date < timezone.utcnow() - dag.dagrun_timeout):
dr.state = State.FAILED
dr.end_date = timezone.utcnow()
dag.handle_callback(dr, success=False, reason='dagrun_timeout',
session=session)
timedout_runs += 1
session.commit()
if len(active_runs) - timedout_runs >= dag.max_active_runs:
return
qry = (
session.query(func.max(DagRun.execution_date))
.filter_by(dag_id=dag.dag_id)
.filter(or_(
DagRun.external_trigger == False,
DagRun.run_id.like(DagRun.ID_PREFIX + '%')
))
)
last_scheduled_run = qry.scalar()
if dag.schedule_interval == '@once' and last_scheduled_run:
return None
# don't do scheduler catchup for dag's that don't have dag.catchup = True
if not (dag.catchup or dag.schedule_interval == '@once'):
now = timezone.utcnow()
next_start = dag.following_schedule(now)
last_start = dag.previous_schedule(now)
if next_start <= now:
new_start = last_start
else:
new_start = dag.previous_schedule(last_start)
if dag.start_date:
if new_start >= dag.start_date:
dag.start_date = new_start
else:
dag.start_date = new_start
next_run_date = None
if not last_scheduled_run:
task_start_dates = [t.start_date for t in dag.tasks]
if task_start_dates:
next_run_date = dag.normalize_schedule(min(task_start_dates))
self.log.debug(
"Next run date based on tasks %s",
next_run_date
)
else:
next_run_date = dag.following_schedule(last_scheduled_run)
last_run = dag.get_last_dagrun(session=session)
if last_run and next_run_date:
while next_run_date <= last_run.execution_date:
next_run_date = dag.following_schedule(next_run_date)
if dag.start_date:
next_run_date = (dag.start_date if not next_run_date
else max(next_run_date, dag.start_date))
if next_run_date == dag.start_date:
next_run_date = dag.normalize_schedule(dag.start_date)
self.log.debug(
"Dag start date: %s. Next run date: %s",
dag.start_date, next_run_date
)
if next_run_date > timezone.utcnow():
return
# this structure is necessary to avoid a TypeError from concatenating
# NoneType
if dag.schedule_interval == '@once':
period_end = next_run_date
elif next_run_date:
period_end = dag.following_schedule(next_run_date)
# Don't schedule a dag beyond its end_date (as specified by the dag param)
if next_run_date and dag.end_date and next_run_date > dag.end_date:
return
# Get the min task end date, which may come from the dag.default_args
min_task_end_date = []
task_end_dates = [t.end_date for t in dag.tasks if t.end_date]
if task_end_dates:
min_task_end_date = min(task_end_dates)
if next_run_date and min_task_end_date and next_run_date > min_task_end_date:
return
if next_run_date and period_end and period_end <= timezone.utcnow():
next_run = dag.create_dagrun(
run_id=DagRun.ID_PREFIX + next_run_date.isoformat(),
execution_date=next_run_date,
start_date=timezone.utcnow(),
state=State.RUNNING,
external_trigger=False
)
return next_run
@provide_session
def _process_task_instances(self, dag, queue, session=None):
# update the state of the previously active dag runs
dag_runs = DagRun.find(dag_id=dag.dag_id, state=State.RUNNING, session=session)
active_dag_runs = []
for run in dag_runs:
self.log.info("Examining DAG run %s", run)
# don't consider runs that are executed in the future
if run.execution_date > timezone.utcnow():
self.log.error(
"Execution date is in future: %s",
run.execution_date
)
continue
if len(active_dag_runs) >= dag.max_active_runs:
self.log.info("Active dag runs > max_active_run.")
continue
if run.is_backfill:
continue
run.dag = dag
run.verify_integrity(session=session)
run.update_state(session=session)
if run.state == State.RUNNING:
make_transient(run)
active_dag_runs.append(run)
for run in active_dag_runs:
self.log.debug("Examining active DAG run: %s", run)
tis = run.get_task_instances(state=(State.NONE,
State.UP_FOR_RETRY))
for ti in tis:
task = dag.get_task(ti.task_id)
ti.task = task
if task.adhoc:
continue
if ti.are_dependencies_met(
dep_context=DepContext(flag_upstream_failed=True),
session=session):
self.log.debug('Queuing task: %s', ti)
queue.append(ti.key)
@provide_session
def _change_state_for_tis_without_dagrun(self,
simple_dag_bag,
old_states,
new_state,
session=None):
tis_changed = 0
query = session \
.query(models.TaskInstance) \
.outerjoin(models.DagRun, and_(
models.TaskInstance.dag_id == models.DagRun.dag_id,
models.TaskInstance.execution_date == models.DagRun.execution_date)) \
.filter(models.TaskInstance.dag_id.in_(simple_dag_bag.dag_ids)) \
.filter(models.TaskInstance.state.in_(old_states)) \
.filter(or_(
models.DagRun.state != State.RUNNING,
models.DagRun.state.is_(None)))
if self.using_sqlite:
tis_to_change = query \
.with_for_update() \
.all()
for ti in tis_to_change:
ti.set_state(new_state, session=session)
tis_changed += 1
else:
subq = query.subquery()
tis_changed = session \
.query(models.TaskInstance) \
.filter(and_(
models.TaskInstance.dag_id == subq.c.dag_id,
models.TaskInstance.task_id == subq.c.task_id,
models.TaskInstance.execution_date ==
subq.c.execution_date)) \
.update({models.TaskInstance.state: new_state},
synchronize_session=False)
session.commit()
if tis_changed > 0:
self.log.warning(
"Set %s task instances to state=%s as their associated DagRun was not in RUNNING state",
tis_changed, new_state
)
@provide_session
def __get_task_concurrency_map(self, states, session=None):
TI = models.TaskInstance
ti_concurrency_query = (
session
.query(TI.task_id, TI.dag_id, func.count('*'))
.filter(TI.state.in_(states))
.group_by(TI.task_id, TI.dag_id)
).all()
task_map = defaultdict(int)
for result in ti_concurrency_query:
task_id, dag_id, count = result
task_map[(dag_id, task_id)] = count
return task_map
@provide_session
def _find_executable_task_instances(self, simple_dag_bag, states, session=None):
executable_tis = []
TI = models.TaskInstance
DR = models.DagRun
DM = models.DagModel
ti_query = (
session
.query(TI)
.filter(TI.dag_id.in_(simple_dag_bag.dag_ids))
.outerjoin(
DR,
and_(DR.dag_id == TI.dag_id, DR.execution_date == TI.execution_date)
)
.filter(or_(DR.run_id == None,
not_(DR.run_id.like(BackfillJob.ID_PREFIX + '%'))))
.outerjoin(DM, DM.dag_id == TI.dag_id)
.filter(or_(DM.dag_id == None,
not_(DM.is_paused)))
)
if None in states:
ti_query = ti_query.filter(
or_(TI.state == None, TI.state.in_(states))
)
else:
ti_query = ti_query.filter(TI.state.in_(states))
task_instances_to_examine = ti_query.all()
if len(task_instances_to_examine) == 0:
self.log.debug("No tasks to consider for execution.")
return executable_tis
task_instance_str = "\n\t".join(
["{}".format(x) for x in task_instances_to_examine])
self.log.info("{} tasks up for execution:\n\t{}"
.format(len(task_instances_to_examine),
task_instance_str))
pools = {p.pool: p for p in session.query(models.Pool).all()}
pool_to_task_instances = defaultdict(list)
for task_instance in task_instances_to_examine:
pool_to_task_instances[task_instance.pool].append(task_instance)
states_to_count_as_running = [State.RUNNING, State.QUEUED]
task_concurrency_map = self.__get_task_concurrency_map(
states=states_to_count_as_running, session=session)
for pool, task_instances in pool_to_task_instances.items():
if not pool:
open_slots = conf.getint('core', 'non_pooled_task_slot_count')
else:
if pool not in pools:
self.log.warning(
"Tasks using non-existent pool '%s' will not be scheduled",
pool
)
open_slots = 0
else:
open_slots = pools[pool].open_slots(session=session)
num_queued = len(task_instances)
self.log.info(
"Figuring out tasks to run in Pool(name={pool}) with {open_slots} "
"open slots and {num_queued} task instances in queue".format(
**locals()
)
)
priority_sorted_task_instances = sorted(
task_instances, key=lambda ti: (-ti.priority_weight, ti.execution_date))
dag_id_to_possibly_running_task_count = {}
for task_instance in priority_sorted_task_instances:
if open_slots <= 0:
self.log.info(
"Not scheduling since there are %s open slots in pool %s",
open_slots, pool
)
break
# Check to make sure that the task concurrency of the DAG hasn't been
dag_id = task_instance.dag_id
simple_dag = simple_dag_bag.get_dag(dag_id)
if dag_id not in dag_id_to_possibly_running_task_count:
dag_id_to_possibly_running_task_count[dag_id] = \
DAG.get_num_task_instances(
dag_id,
simple_dag_bag.get_dag(dag_id).task_ids,
states=states_to_count_as_running,
session=session)
current_task_concurrency = dag_id_to_possibly_running_task_count[dag_id]
task_concurrency_limit = simple_dag_bag.get_dag(dag_id).concurrency
self.log.info(
"DAG %s has %s/%s running and queued tasks",
dag_id, current_task_concurrency, task_concurrency_limit
)
if current_task_concurrency >= task_concurrency_limit:
self.log.info(
"Not executing %s since the number of tasks running or queued "
"from DAG %s is >= to the DAG's task concurrency limit of %s",
task_instance, dag_id, task_concurrency_limit
)
continue
task_concurrency = simple_dag.get_task_special_arg(
task_instance.task_id,
'task_concurrency')
if task_concurrency is not None:
num_running = task_concurrency_map[
(task_instance.dag_id, task_instance.task_id)
]
if num_running >= task_concurrency:
self.log.info("Not executing %s since the task concurrency for"
" this task has been reached.", task_instance)
continue
else:
task_concurrency_map[(task_instance.dag_id, task_instance.task_id)] += 1
if self.executor.has_task(task_instance):
self.log.debug(
"Not handling task %s as the executor reports it is running",
task_instance.key
)
continue
executable_tis.append(task_instance)
open_slots -= 1
dag_id_to_possibly_running_task_count[dag_id] += 1
task_instance_str = "\n\t".join(
["{}".format(x) for x in executable_tis])
self.log.info(
"Setting the following tasks to queued state:\n\t%s", task_instance_str)
# so these dont expire on commit
for ti in executable_tis:
copy_dag_id = ti.dag_id
copy_execution_date = ti.execution_date
copy_task_id = ti.task_id
make_transient(ti)
ti.dag_id = copy_dag_id
ti.execution_date = copy_execution_date
ti.task_id = copy_task_id
return executable_tis
@provide_session
def _change_state_for_executable_task_instances(self, task_instances,
acceptable_states, session=None):
if len(task_instances) == 0:
session.commit()
return []
TI = models.TaskInstance
filter_for_ti_state_change = (
[and_(
TI.dag_id == ti.dag_id,
TI.task_id == ti.task_id,
TI.execution_date == ti.execution_date)
for ti in task_instances])
ti_query = (
session
.query(TI)
.filter(or_(*filter_for_ti_state_change)))
if None in acceptable_states:
ti_query = ti_query.filter(
or_(TI.state == None, TI.state.in_(acceptable_states)) # noqa: E711
)
else:
ti_query = ti_query.filter(TI.state.in_(acceptable_states))
tis_to_set_to_queued = (
ti_query
.with_for_update()
.all())
if len(tis_to_set_to_queued) == 0:
self.log.info("No tasks were able to have their state changed to queued.")
session.commit()
return []
# set TIs to queued state
for task_instance in tis_to_set_to_queued:
task_instance.state = State.QUEUED
task_instance.queued_dttm = (timezone.utcnow()
if not task_instance.queued_dttm
else task_instance.queued_dttm)
session.merge(task_instance)
# Generate a list of SimpleTaskInstance for the use of queuing
# them in the executor.
simple_task_instances = [SimpleTaskInstance(ti) for ti in
tis_to_set_to_queued]
task_instance_str = "\n\t".join(
["{}".format(x) for x in tis_to_set_to_queued])
session.commit()
self.log.info("Setting the following {} tasks to queued state:\n\t{}"
.format(len(tis_to_set_to_queued), task_instance_str))
return simple_task_instances
def _enqueue_task_instances_with_queued_state(self, simple_dag_bag,
simple_task_instances):
TI = models.TaskInstance
# actually enqueue them
for simple_task_instance in simple_task_instances:
simple_dag = simple_dag_bag.get_dag(simple_task_instance.dag_id)
command = TI.generate_command(
simple_task_instance.dag_id,
simple_task_instance.task_id,
simple_task_instance.execution_date,
local=True,
mark_success=False,
ignore_all_deps=False,
ignore_depends_on_past=False,
ignore_task_deps=False,
ignore_ti_state=False,
pool=simple_task_instance.pool,
file_path=simple_dag.full_filepath,
pickle_id=simple_dag.pickle_id)
priority = simple_task_instance.priority_weight
queue = simple_task_instance.queue
self.log.info(
"Sending %s to executor with priority %s and queue %s",
simple_task_instance.key, priority, queue
)
self.executor.queue_command(
simple_task_instance,
command,
priority=priority,
queue=queue)
@provide_session
def _execute_task_instances(self,
simple_dag_bag,
states,
session=None):
executable_tis = self._find_executable_task_instances(simple_dag_bag, states,
session=session)
def query(result, items):
simple_tis_with_state_changed = \
self._change_state_for_executable_task_instances(items,
states,
session=session)
self._enqueue_task_instances_with_queued_state(
simple_dag_bag,
simple_tis_with_state_changed)
session.commit()
return result + len(simple_tis_with_state_changed)
return helpers.reduce_in_chunks(query, executable_tis, 0, self.max_tis_per_query)
@provide_session
def _change_state_for_tasks_failed_to_execute(self, session):
if self.executor.queued_tasks:
TI = models.TaskInstance
filter_for_ti_state_change = (
[and_(
TI.dag_id == dag_id,
TI.task_id == task_id,
TI.execution_date == execution_date,
# The TI.try_number will return raw try_number+1 since the
# ti is not running. And we need to -1 to match the DB record.
TI._try_number == try_number - 1,
TI.state == State.QUEUED)
for dag_id, task_id, execution_date, try_number
in self.executor.queued_tasks.keys()])
ti_query = (session.query(TI)
.filter(or_(*filter_for_ti_state_change)))
tis_to_set_to_scheduled = (ti_query
.with_for_update()
.all())
if len(tis_to_set_to_scheduled) == 0:
session.commit()
return
# set TIs to queued state
for task_instance in tis_to_set_to_scheduled:
task_instance.state = State.SCHEDULED
task_instance_str = "\n\t".join(
["{}".format(x) for x in tis_to_set_to_scheduled])
session.commit()
self.log.info("Set the following tasks to scheduled state:\n\t{}"
.format(task_instance_str))
def _process_dags(self, dagbag, dags, tis_out):
for dag in dags:
dag = dagbag.get_dag(dag.dag_id)
if dag.is_paused:
self.log.info("Not processing DAG %s since it's paused", dag.dag_id)
continue
if not dag:
self.log.error("DAG ID %s was not found in the DagBag", dag.dag_id)
continue
self.log.info("Processing %s", dag.dag_id)
dag_run = self.create_dag_run(dag)
if dag_run:
self.log.info("Created %s", dag_run)
self._process_task_instances(dag, tis_out)
self.manage_slas(dag)
@provide_session
def _process_executor_events(self, simple_dag_bag, session=None):
TI = models.TaskInstance
for key, state in list(self.executor.get_event_buffer(simple_dag_bag.dag_ids)
.items()):
dag_id, task_id, execution_date, try_number = key
self.log.info(
"Executor reports %s.%s execution_date=%s as %s for try_number %s",
dag_id, task_id, execution_date, state, try_number
)
if state == State.FAILED or state == State.SUCCESS:
qry = session.query(TI).filter(TI.dag_id == dag_id,
TI.task_id == task_id,
TI.execution_date == execution_date)
ti = qry.first()
if not ti:
self.log.warning("TaskInstance %s went missing from the database", ti)
continue
if ti.try_number == try_number and ti.state == State.QUEUED:
msg = ("Executor reports task instance {} finished ({}) "
"although the task says its {}. Was the task "
"killed externally?".format(ti, state, ti.state))
self.log.error(msg)
try:
simple_dag = simple_dag_bag.get_dag(dag_id)
dagbag = models.DagBag(simple_dag.full_filepath)
dag = dagbag.get_dag(dag_id)
ti.task = dag.get_task(task_id)
ti.handle_failure(msg)
except Exception:
self.log.error("Cannot load the dag bag to handle failure for %s"
". Setting task to FAILED without callbacks or "
"retries. Do you have enough resources?", ti)
ti.state = State.FAILED
session.merge(ti)
session.commit()
def _execute(self):
self.log.info("Starting the scheduler")
pickle_dags = False
if self.do_pickle and self.executor.__class__ not in \
(executors.LocalExecutor, executors.SequentialExecutor):
pickle_dags = True
self.log.info("Running execute loop for %s seconds", self.run_duration)
self.log.info("Processing each file at most %s times", self.num_runs)
self.log.info("Searching for files in %s", self.subdir)
known_file_paths = list_py_file_paths(self.subdir)
self.log.info("There are %s files in %s", len(known_file_paths), self.subdir)
def processor_factory(file_path, zombies):
return DagFileProcessor(file_path,
pickle_dags,
self.dag_ids,
zombies)
async_mode = not self.using_sqlite
self.processor_agent = DagFileProcessorAgent(self.subdir,
known_file_paths,
self.num_runs,
processor_factory,
async_mode)
try:
self._execute_helper()
except Exception:
self.log.exception("Exception when executing execute_helper")
finally:
self.processor_agent.end()
self.log.info("Exited execute loop")
def _execute_helper(self):
self.executor.start()
self.log.info("Resetting orphaned tasks for active dag runs")
self.reset_state_for_orphaned_tasks()
# Start after resetting orphaned tasks to avoid stressing out DB.
self.processor_agent.start()
execute_start_time = timezone.utcnow()
# Last time that self.heartbeat() was called.
last_self_heartbeat_time = timezone.utcnow()
# For the execute duration, parse and schedule DAGs
while (timezone.utcnow() - execute_start_time).total_seconds() < \
self.run_duration or self.run_duration < 0:
self.log.debug("Starting Loop...")
loop_start_time = time.time()
if self.using_sqlite:
self.processor_agent.heartbeat()
# For the sqlite case w/ 1 thread, wait until the processor
# is finished to avoid concurrent access to the DB.
self.log.debug(
"Waiting for processors to finish since we're using sqlite")
self.processor_agent.wait_until_finished()
self.log.info("Harvesting DAG parsing results")
simple_dags = self.processor_agent.harvest_simple_dags()
self.log.debug("Harvested {} SimpleDAGs".format(len(simple_dags)))
simple_dag_bag = SimpleDagBag(simple_dags)
if len(simple_dags) > 0:
try:
simple_dag_bag = SimpleDagBag(simple_dags)
self._change_state_for_tis_without_dagrun(simple_dag_bag,
[State.UP_FOR_RETRY],
State.FAILED)
self._change_state_for_tis_without_dagrun(simple_dag_bag,
[State.QUEUED,
State.SCHEDULED],
State.NONE)
self._execute_task_instances(simple_dag_bag,
(State.SCHEDULED,))
except Exception as e:
self.log.error("Error queuing tasks")
self.log.exception(e)
continue
self.log.debug("Heartbeating the executor")
self.executor.heartbeat()
self._change_state_for_tasks_failed_to_execute()
self._process_executor_events(simple_dag_bag)
time_since_last_heartbeat = (timezone.utcnow() -
last_self_heartbeat_time).total_seconds()
if time_since_last_heartbeat > self.heartrate:
self.log.debug("Heartbeating the scheduler")
self.heartbeat()
last_self_heartbeat_time = timezone.utcnow()
loop_end_time = time.time()
loop_duration = loop_end_time - loop_start_time
self.log.debug(
"Ran scheduling loop in %.2f seconds",
loop_duration)
self.log.debug("Sleeping for %.2f seconds", self._processor_poll_interval)
time.sleep(self._processor_poll_interval)
if self.processor_agent.done:
self._last_loop = True
if self._last_loop:
self.log.info("Exiting scheduler loop as all files"
" have been processed {} times".format(self.num_runs))
break
if loop_duration < 1:
sleep_length = 1 - loop_duration
self.log.debug(
"Sleeping for {0:.2f} seconds to prevent excessive logging"
.format(sleep_length))
sleep(sleep_length)
self.processor_agent.terminate()
# deleted.
if self.processor_agent.all_files_processed:
self.log.info(
"Deactivating DAGs that haven't been touched since %s",
execute_start_time.isoformat()
)
models.DAG.deactivate_stale_dags(execute_start_time)
self.executor.end()
settings.Session.remove()
@provide_session
def process_file(self, file_path, zombies, pickle_dags=False, session=None):
self.log.info("Processing file %s for tasks to queue", file_path)
simple_dags = []
try:
dagbag = models.DagBag(file_path, include_examples=False)
except Exception:
self.log.exception("Failed at reloading the DAG file %s", file_path)
Stats.incr('dag_file_refresh_error', 1, 1)
return []
if len(dagbag.dags) > 0:
self.log.info("DAG(s) %s retrieved from %s", dagbag.dags.keys(), file_path)
else:
self.log.warning("No viable dags retrieved from %s", file_path)
self.update_import_errors(session, dagbag)
return []
for dag in dagbag.dags.values():
dag.sync_to_db()
paused_dag_ids = [dag.dag_id for dag in dagbag.dags.values()
if dag.is_paused]
for dag_id in dagbag.dags:
dag = dagbag.get_dag(dag_id)
pickle_id = None
if pickle_dags:
pickle_id = dag.pickle(session).id
if dag_id not in paused_dag_ids:
simple_dags.append(SimpleDag(dag, pickle_id=pickle_id))
if len(self.dag_ids) > 0:
dags = [dag for dag in dagbag.dags.values()
if dag.dag_id in self.dag_ids and
dag.dag_id not in paused_dag_ids]
else:
dags = [dag for dag in dagbag.dags.values()
if not dag.parent_dag and
dag.dag_id not in paused_dag_ids]
# process and due to some unusual behavior. (empty() incorrectly
# returns true?)
ti_keys_to_schedule = []
self._process_dags(dagbag, dags, ti_keys_to_schedule)
for ti_key in ti_keys_to_schedule:
dag = dagbag.dags[ti_key[0]]
task = dag.get_task(ti_key[1])
ti = models.TaskInstance(task, ti_key[2])
ti.refresh_from_db(session=session, lock_for_update=True)
# We can defer checking the task dependency checks to the worker themselves
# since they can be expensive to run in the scheduler.
dep_context = DepContext(deps=QUEUE_DEPS, ignore_task_deps=True)
# Only schedule tasks that have their dependencies met, e.g. to avoid
# a task that recently got its state changed to RUNNING from somewhere
# other than the scheduler from getting its state overwritten.
# TODO(aoen): It's not great that we have to check all the task instance
if ti.are_dependencies_met(
dep_context=dep_context,
session=session,
verbose=True):
ti.state = State.SCHEDULED
self.log.info("Creating / updating %s in ORM", ti)
session.merge(ti)
session.commit()
try:
self.update_import_errors(session, dagbag)
except Exception:
self.log.exception("Error logging import errors!")
try:
dagbag.kill_zombies(zombies)
except Exception:
self.log.exception("Error killing zombies!")
return simple_dags
@provide_session
def heartbeat_callback(self, session=None):
Stats.incr('scheduler_heartbeat', 1, 1)
class BackfillJob(BaseJob):
ID_PREFIX = 'backfill_'
ID_FORMAT_PREFIX = ID_PREFIX + '{0}'
__mapper_args__ = {
'polymorphic_identity': 'BackfillJob'
}
class _DagRunTaskStatus(object):
def __init__(self,
to_run=None,
running=None,
skipped=None,
succeeded=None,
failed=None,
not_ready=None,
deadlocked=None,
active_runs=None,
executed_dag_run_dates=None,
finished_runs=0,
total_runs=0,
):
self.to_run = to_run or dict()
self.running = running or dict()
self.skipped = skipped or set()
self.succeeded = succeeded or set()
self.failed = failed or set()
self.not_ready = not_ready or set()
self.deadlocked = deadlocked or set()
self.active_runs = active_runs or list()
self.executed_dag_run_dates = executed_dag_run_dates or set()
self.finished_runs = finished_runs
self.total_runs = total_runs
def __init__(
self,
dag,
start_date=None,
end_date=None,
mark_success=False,
donot_pickle=False,
ignore_first_depends_on_past=False,
ignore_task_deps=False,
pool=None,
delay_on_limit_secs=1.0,
verbose=False,
conf=None,
rerun_failed_tasks=False,
*args, **kwargs):
self.dag = dag
self.dag_id = dag.dag_id
self.bf_start_date = start_date
self.bf_end_date = end_date
self.mark_success = mark_success
self.donot_pickle = donot_pickle
self.ignore_first_depends_on_past = ignore_first_depends_on_past
self.ignore_task_deps = ignore_task_deps
self.pool = pool
self.delay_on_limit_secs = delay_on_limit_secs
self.verbose = verbose
self.conf = conf
self.rerun_failed_tasks = rerun_failed_tasks
super(BackfillJob, self).__init__(*args, **kwargs)
def _update_counters(self, ti_status):
for key, ti in list(ti_status.running.items()):
ti.refresh_from_db()
if ti.state == State.SUCCESS:
ti_status.succeeded.add(key)
self.log.debug("Task instance %s succeeded. Don't rerun.", ti)
ti_status.running.pop(key)
continue
elif ti.state == State.SKIPPED:
ti_status.skipped.add(key)
self.log.debug("Task instance %s skipped. Don't rerun.", ti)
ti_status.running.pop(key)
continue
elif ti.state == State.FAILED:
self.log.error("Task instance %s failed", ti)
ti_status.failed.add(key)
ti_status.running.pop(key)
continue
elif ti.state == State.UP_FOR_RETRY:
self.log.warning("Task instance %s is up for retry", ti)
ti_status.running.pop(key)
ti_status.to_run[key] = ti
elif ti.state == State.NONE:
self.log.warning(
"FIXME: task instance %s state was set to none externally or "
"reaching concurrency limits. Re-adding task to queue.",
ti
)
ti.set_state(State.SCHEDULED)
ti_status.running.pop(key)
ti_status.to_run[key] = ti
def _manage_executor_state(self, running):
executor = self.executor
for key, state in list(executor.get_event_buffer().items()):
if key not in running:
self.log.warning(
"%s state %s not in running=%s",
key, state, running.values()
)
continue
ti = running[key]
ti.refresh_from_db()
self.log.debug("Executor state: %s task %s", state, ti)
if state == State.FAILED or state == State.SUCCESS:
if ti.state == State.RUNNING or ti.state == State.QUEUED:
msg = ("Executor reports task instance {} finished ({}) "
"although the task says its {}. Was the task "
"killed externally?".format(ti, state, ti.state))
self.log.error(msg)
ti.handle_failure(msg)
@provide_session
def _get_dag_run(self, run_date, session=None):
run_id = BackfillJob.ID_FORMAT_PREFIX.format(run_date.isoformat())
respect_dag_max_active_limit = (True
if (self.dag.schedule_interval and
not self.dag.is_subdag)
else False)
current_active_dag_count = self.dag.get_num_active_runs(external_trigger=False)
run = DagRun.find(dag_id=self.dag.dag_id,
execution_date=run_date,
session=session)
if run is not None and len(run) > 0:
run = run[0]
if run.state == State.RUNNING:
respect_dag_max_active_limit = False
else:
run = None
if (respect_dag_max_active_limit and
current_active_dag_count >= self.dag.max_active_runs):
return None
run = run or self.dag.create_dagrun(
run_id=run_id,
execution_date=run_date,
start_date=timezone.utcnow(),
state=State.RUNNING,
external_trigger=False,
session=session,
conf=self.conf,
)
run.dag = self.dag
run.state = State.RUNNING
run.run_id = run_id
run.verify_integrity(session=session)
return run
@provide_session
def _task_instances_for_dag_run(self, dag_run, session=None):
tasks_to_run = {}
if dag_run is None:
return tasks_to_run
self.reset_state_for_orphaned_tasks(filter_by_dag_run=dag_run, session=session)
dag_run.refresh_from_db()
make_transient(dag_run)
# TODO(edgarRd): AIRFLOW-1464 change to batch query to improve perf
for ti in dag_run.get_task_instances():
# all tasks part of the backfill are scheduled to run
if ti.state == State.NONE:
ti.set_state(State.SCHEDULED, session=session)
if ti.state != State.REMOVED:
tasks_to_run[ti.key] = ti
return tasks_to_run
def _log_progress(self, ti_status):
msg = ' | '.join([
"[backfill progress]",
"finished run {0} of {1}",
"tasks waiting: {2}",
"succeeded: {3}",
"running: {4}",
"failed: {5}",
"skipped: {6}",
"deadlocked: {7}",
"not ready: {8}"
]).format(
ti_status.finished_runs,
ti_status.total_runs,
len(ti_status.to_run),
len(ti_status.succeeded),
len(ti_status.running),
len(ti_status.failed),
len(ti_status.skipped),
len(ti_status.deadlocked),
len(ti_status.not_ready))
self.log.info(msg)
self.log.debug(
"Finished dag run loop iteration. Remaining tasks %s",
ti_status.to_run.values()
)
@provide_session
def _process_backfill_task_instances(self,
ti_status,
executor,
pickle_id,
start_date=None, session=None):
executed_run_dates = []
while ((len(ti_status.to_run) > 0 or len(ti_status.running) > 0) and
len(ti_status.deadlocked) == 0):
self.log.debug("*** Clearing out not_ready list ***")
ti_status.not_ready.clear()
# we need to execute the tasks bottom to top
# or leaf to root, as otherwise tasks might be
# determined deadlocked while they are actually
# waiting for their upstream to finish
for task in self.dag.topological_sort():
for key, ti in list(ti_status.to_run.items()):
if task.task_id != ti.task_id:
continue
ti.refresh_from_db()
task = self.dag.get_task(ti.task_id)
ti.task = task
ignore_depends_on_past = (
self.ignore_first_depends_on_past and
ti.execution_date == (start_date or ti.start_date))
self.log.debug(
"Task instance to run %s state %s", ti, ti.state)
# The task was already marked successful or skipped by a
# different Job. Don't rerun it.
if ti.state == State.SUCCESS:
ti_status.succeeded.add(key)
self.log.debug("Task instance %s succeeded. Don't rerun.", ti)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
elif ti.state == State.SKIPPED:
ti_status.skipped.add(key)
self.log.debug("Task instance %s skipped. Don't rerun.", ti)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
elif ti.state == State.NONE:
self.log.warning(
"FIXME: task instance {} state was set to None "
"externally. This should not happen"
)
ti.set_state(State.SCHEDULED, session=session)
if self.rerun_failed_tasks:
if ti.state in (State.FAILED, State.UPSTREAM_FAILED):
self.log.error("Task instance {ti} "
"with state {state}".format(ti=ti,
state=ti.state))
if key in ti_status.running:
ti_status.running.pop(key)
ti.set_state(State.SCHEDULED, session=session)
else:
if ti.state in (State.FAILED, State.UPSTREAM_FAILED):
self.log.error("Task instance {ti} "
"with {state} state".format(ti=ti,
state=ti.state))
ti_status.failed.add(key)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
backfill_context = DepContext(
deps=RUN_DEPS,
ignore_depends_on_past=ignore_depends_on_past,
ignore_task_deps=self.ignore_task_deps,
flag_upstream_failed=True)
if ti.are_dependencies_met(
dep_context=backfill_context,
session=session,
verbose=self.verbose):
ti.refresh_from_db(lock_for_update=True, session=session)
if ti.state == State.SCHEDULED or ti.state == State.UP_FOR_RETRY:
if executor.has_task(ti):
self.log.debug(
"Task Instance %s already in executor "
"waiting for queue to clear",
ti
)
else:
self.log.debug('Sending %s to executor', ti)
ti.state = State.QUEUED
session.merge(ti)
cfg_path = None
if executor.__class__ in (executors.LocalExecutor,
executors.SequentialExecutor):
cfg_path = tmp_configuration_copy()
executor.queue_task_instance(
ti,
mark_success=self.mark_success,
pickle_id=pickle_id,
ignore_task_deps=self.ignore_task_deps,
ignore_depends_on_past=ignore_depends_on_past,
pool=self.pool,
cfg_path=cfg_path)
ti_status.running[key] = ti
ti_status.to_run.pop(key)
session.commit()
continue
if ti.state == State.UPSTREAM_FAILED:
self.log.error("Task instance %s upstream failed", ti)
ti_status.failed.add(key)
ti_status.to_run.pop(key)
if key in ti_status.running:
ti_status.running.pop(key)
continue
if ti.state == State.UP_FOR_RETRY:
self.log.debug(
"Task instance %s retry period not "
"expired yet", ti)
if key in ti_status.running:
ti_status.running.pop(key)
ti_status.to_run[key] = ti
continue
self.log.debug('Adding %s to not_ready', ti)
ti_status.not_ready.add(key)
self.heartbeat()
executor.heartbeat()
# tasks to run and there are no running tasks then the backfill
# is deadlocked
if (ti_status.not_ready and
ti_status.not_ready == set(ti_status.to_run) and
len(ti_status.running) == 0):
self.log.warning(
"Deadlock discovered for ti_status.to_run=%s",
ti_status.to_run.values()
)
ti_status.deadlocked.update(ti_status.to_run.values())
ti_status.to_run.clear()
# check executor state
self._manage_executor_state(ti_status.running)
# update the task counters
self._update_counters(ti_status=ti_status)
# update dag run state
_dag_runs = ti_status.active_runs[:]
for run in _dag_runs:
run.update_state(session=session)
if run.state in State.finished():
ti_status.finished_runs += 1
ti_status.active_runs.remove(run)
executed_run_dates.append(run.execution_date)
self._log_progress(ti_status)
# return updated status
return executed_run_dates
@provide_session
def _collect_errors(self, ti_status, session=None):
err = ''
if ti_status.failed:
err += (
"---------------------------------------------------\n"
"Some task instances failed:\n{}\n".format(ti_status.failed))
if ti_status.deadlocked:
err += (
'---------------------------------------------------\n'
'BackfillJob is deadlocked.')
deadlocked_depends_on_past = any(
t.are_dependencies_met(
dep_context=DepContext(ignore_depends_on_past=False),
session=session,
verbose=self.verbose) !=
t.are_dependencies_met(
dep_context=DepContext(ignore_depends_on_past=True),
session=session,
verbose=self.verbose)
for t in ti_status.deadlocked)
if deadlocked_depends_on_past:
err += (
'Some of the deadlocked tasks were unable to run because '
'of "depends_on_past" relationships. Try running the '
'backfill with the option '
'"ignore_first_depends_on_past=True" or passing "-I" at '
'the command line.')
err += ' These tasks have succeeded:\n{}\n'.format(ti_status.succeeded)
err += ' These tasks are running:\n{}\n'.format(ti_status.running)
err += ' These tasks have failed:\n{}\n'.format(ti_status.failed)
err += ' These tasks are skipped:\n{}\n'.format(ti_status.skipped)
err += ' These tasks are deadlocked:\n{}\n'.format(ti_status.deadlocked)
return err
@provide_session
def _execute_for_run_dates(self, run_dates, ti_status, executor, pickle_id,
start_date, session=None):
for next_run_date in run_dates:
dag_run = self._get_dag_run(next_run_date, session=session)
tis_map = self._task_instances_for_dag_run(dag_run,
session=session)
if dag_run is None:
continue
ti_status.active_runs.append(dag_run)
ti_status.to_run.update(tis_map or {})
processed_dag_run_dates = self._process_backfill_task_instances(
ti_status=ti_status,
executor=executor,
pickle_id=pickle_id,
start_date=start_date,
session=session)
ti_status.executed_dag_run_dates.update(processed_dag_run_dates)
@provide_session
def _execute(self, session=None):
ti_status = BackfillJob._DagRunTaskStatus()
start_date = self.bf_start_date
# Get intervals between the start/end dates, which will turn into dag runs
run_dates = self.dag.get_run_dates(start_date=start_date,
end_date=self.bf_end_date)
if len(run_dates) == 0:
self.log.info("No run dates were found for the given dates and dag interval.")
return
# picklin'
pickle_id = None
if not self.donot_pickle and self.executor.__class__ not in (
executors.LocalExecutor, executors.SequentialExecutor):
pickle = DagPickle(self.dag)
session.add(pickle)
session.commit()
pickle_id = pickle.id
executor = self.executor
executor.start()
ti_status.total_runs = len(run_dates)
try:
remaining_dates = ti_status.total_runs
while remaining_dates > 0:
dates_to_process = [run_date for run_date in run_dates
if run_date not in ti_status.executed_dag_run_dates]
self._execute_for_run_dates(run_dates=dates_to_process,
ti_status=ti_status,
executor=executor,
pickle_id=pickle_id,
start_date=start_date,
session=session)
remaining_dates = (
ti_status.total_runs - len(ti_status.executed_dag_run_dates)
)
err = self._collect_errors(ti_status=ti_status, session=session)
if err:
raise AirflowException(err)
if remaining_dates > 0:
self.log.info(
"max_active_runs limit for dag %s has been reached "
" - waiting for other dag runs to finish",
self.dag_id
)
time.sleep(self.delay_on_limit_secs)
finally:
executor.end()
session.commit()
self.log.info("Backfill done. Exiting.")
class LocalTaskJob(BaseJob):
__mapper_args__ = {
'polymorphic_identity': 'LocalTaskJob'
}
def __init__(
self,
task_instance,
ignore_all_deps=False,
ignore_depends_on_past=False,
ignore_task_deps=False,
ignore_ti_state=False,
mark_success=False,
pickle_id=None,
pool=None,
*args, **kwargs):
self.task_instance = task_instance
self.dag_id = task_instance.dag_id
self.ignore_all_deps = ignore_all_deps
self.ignore_depends_on_past = ignore_depends_on_past
self.ignore_task_deps = ignore_task_deps
self.ignore_ti_state = ignore_ti_state
self.pool = pool
self.pickle_id = pickle_id
self.mark_success = mark_success
# terminate multiple times
self.terminating = False
super(LocalTaskJob, self).__init__(*args, **kwargs)
def _execute(self):
self.task_runner = get_task_runner(self)
def signal_handler(signum, frame):
self.log.error("Received SIGTERM. Terminating subprocesses")
self.on_kill()
raise AirflowException("LocalTaskJob received SIGTERM signal")
signal.signal(signal.SIGTERM, signal_handler)
if not self.task_instance._check_and_change_state_before_execution(
mark_success=self.mark_success,
ignore_all_deps=self.ignore_all_deps,
ignore_depends_on_past=self.ignore_depends_on_past,
ignore_task_deps=self.ignore_task_deps,
ignore_ti_state=self.ignore_ti_state,
job_id=self.id,
pool=self.pool):
self.log.info("Task is not able to be run")
return
try:
self.task_runner.start()
last_heartbeat_time = time.time()
heartbeat_time_limit = conf.getint('scheduler',
'scheduler_zombie_task_threshold')
while True:
# Monitor the task to see if it's done
return_code = self.task_runner.return_code()
if return_code is not None:
self.log.info("Task exited with return code %s", return_code)
return
# is a zombie
try:
self.heartbeat()
last_heartbeat_time = time.time()
except OperationalError:
Stats.incr('local_task_job_heartbeat_failure', 1, 1)
self.log.exception(
"Exception while trying to heartbeat! Sleeping for %s seconds",
self.heartrate
)
time.sleep(self.heartrate)
# If it's been too long since we've heartbeat, then it's possible that
time_since_last_heartbeat = time.time() - last_heartbeat_time
if time_since_last_heartbeat > heartbeat_time_limit:
Stats.incr('local_task_job_prolonged_heartbeat_failure', 1, 1)
self.log.error("Heartbeat time limited exceeded!")
raise AirflowException("Time since last heartbeat({:.2f}s) "
"exceeded limit ({}s)."
.format(time_since_last_heartbeat,
heartbeat_time_limit))
finally:
self.on_kill()
def on_kill(self):
self.task_runner.terminate()
self.task_runner.on_finish()
@provide_session
def heartbeat_callback(self, session=None):
if self.terminating:
self.task_runner.terminate()
return
self.task_instance.refresh_from_db()
ti = self.task_instance
fqdn = get_hostname()
same_hostname = fqdn == ti.hostname
same_process = ti.pid == os.getpid()
if ti.state == State.RUNNING:
if not same_hostname:
self.log.warning("The recorded hostname {ti.hostname} "
"does not match this instance's hostname "
"{fqdn}".format(**locals()))
raise AirflowException("Hostname of job runner does not match")
elif not same_process:
current_pid = os.getpid()
self.log.warning("Recorded pid {ti.pid} does not match "
"the current pid "
"{current_pid}".format(**locals()))
raise AirflowException("PID of job runner does not match")
elif (
self.task_runner.return_code() is None and
hasattr(self.task_runner, 'process')
):
self.log.warning(
"State of this instance has been externally set to %s. "
"Taking the poison pill.",
ti.state
)
self.task_runner.terminate()
self.terminating = True
| true | true |
f71fa3db4ff531443af2a92cd1b1a2d567ddaf8d | 188 | py | Python | lightningrun.py | justteen/BUZZ-USERBOT | 55651cce150e1d04d2c61efb2565ef9f46b42933 | [
"BSL-1.0"
] | null | null | null | lightningrun.py | justteen/BUZZ-USERBOT | 55651cce150e1d04d2c61efb2565ef9f46b42933 | [
"BSL-1.0"
] | null | null | null | lightningrun.py | justteen/BUZZ-USERBOT | 55651cce150e1d04d2c61efb2565ef9f46b42933 | [
"BSL-1.0"
] | null | null | null | import os
os.system("git clone https://github.com/justteen/BUZZ-USERBOT /root/userbot && mkdir /root/userbot/bin/ && cd /root/userbot/ && chmod +x /usr/local/bin/* && python3 -m userbot")
| 62.666667 | 177 | 0.702128 | import os
os.system("git clone https://github.com/justteen/BUZZ-USERBOT /root/userbot && mkdir /root/userbot/bin/ && cd /root/userbot/ && chmod +x /usr/local/bin/* && python3 -m userbot")
| true | true |
f71fa5c1c650d81c5044415e020b232623ab58c2 | 37,109 | py | Python | src/m1_Line.py | chenx15rose/10-MoreImplementingClasses | 2bce636c73e968111c22bc245d90a596276d4679 | [
"MIT"
] | null | null | null | src/m1_Line.py | chenx15rose/10-MoreImplementingClasses | 2bce636c73e968111c22bc245d90a596276d4679 | [
"MIT"
] | null | null | null | src/m1_Line.py | chenx15rose/10-MoreImplementingClasses | 2bce636c73e968111c22bc245d90a596276d4679 | [
"MIT"
] | null | null | null | """
A simple Line class.
NOTE: This is NOT rosegraphics -- it is your OWN Line class.
Authors: David Mutchler, Vibha Alangar, Dave Fisher, Amanda Stouder,
their colleagues and Xiaolong Chen.
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
import math
import m1t_test_Line as m1t
########################################################################
# IMPORTANT:
# Your instructor will help you get started on this exercise.
########################################################################
# ----------------------------------------------------------------------
# DONE: 2. With your instructor, READ THE INSTRUCTIONS
# in file m0_INSTRUCTIONS.txt, asking questions as needed.
# Once you understand the instructions, mark this TO DO as DONE.
#
# Also go ahead and mark the src as a Sources Root (right click src folder
# Mark Directory as --> Sources Root. Expand the imports above
# and notice the red line go away from the m1t_test_Line module.
# This step is not critically important this time, but still good to do.
# ----------------------------------------------------------------------
########################################################################
# NOTE: For ALL of the methods that you implement, the method is allowed
# to have additional side effects as needed by it and/or other methods.
########################################################################
def main():
"""
Calls the TEST functions in this module, but ONLY if the method
to be tested has at least a partial implementation. That is,
a TEST function will not be called until you begin work
on the code that it is testing.
"""
if m1t.is_implemented('__init__'):
run_test_init()
if m1t.is_implemented('clone'):
run_test_clone()
if m1t.is_implemented('reverse'):
run_test_reverse()
if m1t.is_implemented('slope'):
run_test_slope()
if m1t.is_implemented('length'):
run_test_length()
if m1t.is_implemented('get_number_of_clones'):
run_test_get_number_of_clones()
if m1t.is_implemented('line_plus'):
run_test_line_plus()
if m1t.is_implemented('line_minus'):
run_test_line_minus()
if m1t.is_implemented('midpoint'):
run_test_midpoint()
if m1t.is_implemented('is_parallel'):
run_test_is_parallel()
if m1t.is_implemented('reset'):
run_test_reset()
########################################################################
# Students:
# Do NOT touch the following Point class - it has no TO DO.
# Do NOT copy code from the methods in this Point class.
#
# DO ** READ ** this Point class,
# asking questions about any of it that you do not understand.
#
# DO ** CALL ** methods in this Point class as needed
# in implementing and testing the methods of the ** Line ** class.
#
# IMPORTANT, IMPORTANT, IMPORTANT:
# *** In your ** Line ** class methods, you should NEVER have code
# *** that a ** Point ** class method could do for you.
########################################################################
# The Point class (and its methods) begins here.
########################################################################
class Point(object):
""" Represents a point in 2-dimensional space. """
def __init__(self, x, y):
"""
Sets instance variables x and y to the given coordinates.
"""
self.x = x
self.y = y
def __repr__(self):
"""
Returns a string representation of this Point.
For each coordinate (x and y), the representation
- uses no decimal points if the number is close to an integer,
- else it uses decimal_places places after the decimal point, where decimal_places = 2.
Examples:
Point(10, 3.14)
Point(3.01, 2.99)
"""
decimal_places = 2 # Use 2 places after the decimal point
formats = []
numbers = []
for coordinate in (self.x, self.y):
if abs(coordinate - round(coordinate)) < (10 ** -decimal_places):
# Treat it as an integer:
formats.append('{}')
numbers.append(round(coordinate))
else:
# Treat it as a float to decimal_places decimal places:
formats.append('{:.' + str(decimal_places) + 'f}')
numbers.append(round(coordinate, decimal_places))
format_string = 'Point(' + formats[0] + ', ' + formats[1] + ')'
return format_string.format(numbers[0], numbers[1])
def __eq__(self, p2):
"""
Defines == for Points: a == b is equivalent to a.__eq__(b).
Treats two numbers as "equal" if they are within 6 decimal
places of each other for both x and y coordinates.
"""
return (round(self.x, 6) == round(p2.x, 6) and
round(self.y, 6) == round(p2.y, 6))
def clone(self):
""" Returns a new Point at the same (x, y) as this Point. """
return Point(self.x, self.y)
def distance_from(self, p2):
""" Returns the distance this Point is from the given Point. """
dx_squared = (self.x - p2.x) ** 2
dy_squared = (self.y - p2.y) ** 2
return math.sqrt(dx_squared + dy_squared)
def halfway_to(self, p2):
"""
Given another Point object p2, returns a new Point
that is half-way between this Point and the given Point (p2).
"""
return Point((self.x + p2.x) / 2,
(self.y + p2.y) / 2)
def plus(self, p2):
"""
Returns a Point whose coordinates are those of this Point
plus the given Point. For example:
p1 = Point(500, 20)
p2 = Point(100, 13)
p3 = p1.plus(p2)
print(p3)
would print: Point(600, 33)
"""
return Point(self.x + p2.x, self.y + p2.y)
def minus(self, p2):
"""
Returns a Point whose coordinates are those of this Point
minus the given Point. For example:
p1 = Point(500, 20)
p2 = Point(100, 13)
p3 = p1.minus(p2)
print(p3)
would print: Point(400, 7)
"""
return Point(self.x - p2.x, self.y - p2.y)
########################################################################
# The Line class (and its methods) begins here.
########################################################################
class Line(object):
""" Represents a line segment in 2-dimensional space. """
def __init__(self, start, end):
"""
What comes in:
-- self
-- a Point object named start
-- a Point object named end
where the two Points are to be the initial start and end points,
respectively, of this Line.
What goes out: Nothing (i.e., None).
Side effects: MUTATEs this Line by setting two instance
variables named:
-- start
-- end
to CLONES of the two Point arguments, respectively.
Other methods must maintain those instance variables as needed
so that they always indicate the CURRENT start and end points
of this Line.
Also, initializes other instance variables as needed
by other Line methods.
Example: This __init__ method runs when one constructs
a Line. So the 3rd of the following statements
invokes the __init__ method of this Line class:
p1 = Point(30, 17)
p2 = Point(50, 80)
line = Line(p1, p2) # Causes __init__ to run
print(line.start) # Should print Point(30, 17)
print(line.end) # Should print Point(50, 80)
print(line.start == p1) # Should print True
print(line.start is p1) # Should print False
Type hints:
:type start: Point
:type end: Point
"""
# --------------------------------------------------------------
# DONE: 3.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
self.start = start.clone()
self.end = end.clone()
self.nclone = 0
self.oristart = start.clone()
self.oriend = end.clone()
def __repr__(self):
"""
What comes in:
-- self
What goes out: Returns a string representation of this Line,
in the form:
Line[(x1, y1), (x2, y2)]
Side effects: None.
Note: print(BLAH) causes BLAH's __repr__ to be called.
BLAH's __repr__ returns a string,
which the print function then prints.
Example: Since the print function calls __repr__ on the
object to be printed:
p1 = Point(30, 17)
p2 = Point(50, 80)
line = Line(p1, p2) # Causes __init__ to run
# The following statement causes __repr__ to run,
# hence should print: Line[(30, 17), (50, 80)]
print(line)
Type hints:
:rtype: str
"""
# --------------------------------------------------------------
# We have already implemented this __repr__ function for you.
# Do NOT modify it.
# --------------------------------------------------------------
start = repr(self.start).replace('Point', '')
end = repr(self.end).replace('Point', '')
return 'Line[{}, {}]'.format(start, end)
def __eq__(self, line2):
"""
What comes in:
-- self
-- a Line object
What goes out: Returns True if:
this Line's start point is equal to line2's start point AND
this Line's end point is equal to line2's end point.
Returns False otherwise.
Side effects: None.
Note: a == b is equivalent to a.__eq__(b).
Examples:
p1 = Point(30, 17)
p2 = Point(50, 80)
line1 = Line(p1, p2)
line2 = Line(p1, p2)
line3 = Line(p2, p1)
print(line1 == line1) # Should print: True
print(line1 == line2) # Should print: True
print(line1 == line3) # Should print: False
line1.start = Point(0, 0)
print(line1 == line2) # Should now print: False
Type hints:
:type line2: Line
:rtype: bool
"""
# --------------------------------------------------------------
# We have already implemented this __eq__ function for you.
# Do NOT modify it.
# --------------------------------------------------------------
return (self.start == line2.start) and (self.end == line2.end)
def clone(self):
"""
What comes in:
-- self
What goes out: Returns a new Line whose START is a clone of
this Line's START and whose END is a clone of this Line's END.
Side effects: None.
Example:
p1 = Point(30, 17)
p2 = Point(50, 80)
line1 = Line(p1, p2)
line2 = line1.clone()
print(line1) # Should print: Line[(30, 17), (50, 80)]
print(line2) # Should print: Line[(30, 17), (50, 80)]
print(line1 == line2) # Should print: True
print(line1 is line2) # Should print: False
print(line1.start is line2.start) # Should print: False
print(line1.end is line2.end) # Should print: False
line1.start = Point(11, 12)
print(line1) # Should print: Line[(11, 12), (50, 80)]
print(line2) # Should print: Line[(30, 17), (50, 80)]
print(line1 == line2) # Should now print: False
Type hints:
:rtype: Line
"""
# --------------------------------------------------------------
# DONE: 4.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
line = Line(self.start,self.end)
self.nclone +=1
return line
def reverse(self):
"""
What comes in:
-- self
What goes out: Nothing (i.e., None).
Side effects: MUTATES this Line so that its direction is
reversed (that is, its start and end points are swapped).
Examples:
p1 = Point(30, 17)
p2 = Point(50, 80)
line1 = Line(p1, p2)
line2 = line1.clone()
print(line1) # Should print: Line[(30, 17), (50, 80)]
line1.reverse()
print(line1) # Should print: Line[(50, 80), (30, 17)]
print(line1 == line2) # Should print: False
line1.reverse()
print(line1 == line2) # Should now print: True
"""
temp =self.start
self.start = self.end
self.end =temp
# --------------------------------------------------------------
# DONE: 5.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
def slope(self):
"""
What comes in:
-- self
What goes out: Returns the slope of this Line.
If the line is vertical (i.e., has "infinite" slope), returns
math.inf
Side effects: None.
Examples:
p1 = Point(30, 3)
p2 = Point(50, 8)
line1 = Line(p1, p2)
# Since the slope is (8 - 3) / (50 - 30) , which is 0.25:
print(line1.slope()) # Should print [approximately]: 0.25
line2 = Line(Point(10, 10), Point(10, 5))
print(line2.slope()) # Should print: inf
# math.inf is NOT the STRING 'inf', so:
print(line2.slope() == 'inf') # Should print False
Type hints:
:rtype: float
"""
if self.end.x == self.start.x:
return math.inf
else:
slope = (self.end.y - self.start.y)/(self.end.x - self.start.x)
return slope
# --------------------------------------------------------------
# DONE: 6.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
def length(self):
"""
What comes in:
-- self
What goes out: Returns the length of this Line.
Side effects: None.
Example:
p1 = Point(166, 10)
p2 = Point(100, 10)
line1 = Line(p1, p2)
# Since the distance from p1 to p2 is 66:
print(line1.length()) # Should print: 66.0
p3 = Point(0, 0)
p4 = Point(3, 4)
line2 = Line(p3, p4)
print(line2.length()) # Should print about 5.0
Type hints:
:rtype: float
"""
length = math.sqrt((self.end.y - self.start.y)**2+(self.end.x - self.start.x)**2)
return length
# --------------------------------------------------------------
# DONE: 7.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
def get_number_of_clones(self):
"""
What comes in:
-- self
What goes out:
-- Returns the number of times that this Line has been cloned
(via the clone method).
Side effects: None.
Example:
line1 = Line(Point(500, 20), Point(100, 8))
line2 = line1.clone()
line3 = line1.clone()
line4 = line3.clone()
line5 = line1.clone()
print(line1.get_number_of_clones())
print(line2.get_number_of_clones())
print(line3.get_number_of_clones())
print(line4.get_number_of_clones())
print(line5.get_number_of_clones())
would print:
3 [since there are three line1.clone() statements]
0 [since there are no line2.clone() statements]
1 [since there is one line3.clone() statement]
0 [since there are no line4.clone() statements]
0 [since there are no line5.clone() statements]
Type hints:
:rtype: int:
"""
return self.nclone
# --------------------------------------------------------------
# DONE: 8.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
def line_plus(self, other_line):
"""
What comes in:
-- self
-- another Line object
What goes out:
-- Returns a Line whose:
-- start is the sum of this Line's start (a Point)
and the other_line's start (another Point).
-- end is the sum of this Line's end (a Point)
and the other_line's end (another Point).
Side effects: None.
Example:
line1 = Line(Point(500, 20), Point(100, 8))
line2 = Line(Point(100, 13), Point(400, 8))
line3 = line1.line_plus(line2)
print(line3)
would print: Line[(600, 33), (500, 16)]
Type hints:
:type other_line: Line
:rtype: Line:
"""
start = Point(self.start.x + other_line.start.x,self.start.y + other_line.start.y)
end = Point(self.end.x + other_line.end.x,self.end.y + other_line.end.y)
line = Line(start,end)
return line
# --------------------------------------------------------------
# DONE: 9.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
def line_minus(self, other_line):
"""
What comes in:
-- self
-- another Line object
What goes out:
-- Returns a Line whose:
-- start is this Line's start (a Point)
minus the other_line's start (another Point).
-- end is this Line's end (a Point)
minus the other_line's end (another Point).
Side effects: None.
Example:
line1 = Line(Point(500, 20), Point(100, 8))
line2 = Line(Point(100, 13), Point(400, 8))
line3 = line1.line_minus(line2)
print(line3)
would print: Line[(400, 7), (-300, 0)]
Type hints:
:type other_line: Line
:rtype: Line:
"""
start = Point(self.start.x - other_line.start.x, self.start.y - other_line.start.y)
end = Point(self.end.x - other_line.end.x, self.end.y - other_line.end.y)
line = Line(start, end)
return line
# --------------------------------------------------------------
# DONE: 10.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
def midpoint(self):
"""
What comes in:
-- self
What goes out: returns a Point at the midpoint of this Line.
Side effects: None.
Example:
p1 = Point(3, 10)
p2 = Point(9, 20)
line1 = Line(p1, p2)
print(line1.midpoint()) # Should print: Point(6, 15)
Type hints:
:rtype: Point
"""
midpoint = Point((1/2)*(self.start.x + self.end.x), (1/2)*(self.start.y + self.end.y))
return midpoint
# --------------------------------------------------------------
# DONE: 11.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
def is_parallel(self, line2):
"""
What comes in:
-- self
-- another Line object (line2)
What goes out: Returns True if this Line is parallel to the
given Line (line2). Returns False otherwise.
Side effects: None.
Examples:
line1 = Line(Point(15, 30), Point(17, 50)) # slope is 10.0
line2 = Line(Point(10, 10), Point(15, 60)) # slope is 10.0
line3 = Line(Point(10, 10), Point(80, 80)) # slope is 7.0
line4 = Line(Point(10, 10), Point(10, 20)) # slope is inf
print(line1.is_parallel(line2)) # Should print: True
print(line2.is_parallel(line1)) # Should print: True
print(line1.is_parallel(line3)) # Should print: False
print(line1.is_parallel(line4)) # Should print: False
print(line1.is_parallel(line1)) # Should print: True
print(line4.is_parallel(line4)) # Should print: True
Type hints:
:type line2: Line
:rtype: bool
"""
selfslope = Line.slope(self)
line2slope = Line.slope(line2)
if round(selfslope,12) ==round(line2slope,12):
return True
else :
return False
# --------------------------------------------------------------
# DONE: 12.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
################################################################
#
# IMPORTANT: When you test whether two FLOATING POINT numbers
# are "equal", you must ROUND each to (say) 12 decimal places.
# Otherwise, you risk the imprecision of floating-point
# arithmetic biting you. For example, in REAL arithmetic,
# 1 / (24 * math.pi - 20 * math.pi)
# and
# 3 / (72 * math.pi - 60 * math.pi)
# are equal. But in FLOATING point arithmetic, they are:
# 0.07957747154594767
# and
# 0.07957747154594765
# respectively (hence NOT equal).
# Try it out if you don't believe me!
#
# IMPORTANT BOTTOM-LINE: When you want to test whether
# two FLOATING POINT numbers a and b are the same,
# DON'T use: a == b
# INSTEAD use: round(a, 12) == round(b, 12)
#
# The latter compares the numbers rounded to 12 decimal places
# which (usually) is adequate to ignore floating-point errors
# and (usually) adequate to distinguish numbers that really
# are different from each other.
################################################################
def reset(self):
"""
What comes in:
-- self
What goes out: Nothing (i.e., None).
Side effects: MUTATES this Line so that its start and end points
revert to what they were when this Line was constructed.
Examples:
p1 = Point(-3, -4)
p2 = Point(3, 4)
line1 = Line(p1, p2)
line2 = Line(Point(0, 1), Point(10, 20))
... [various actions, including some like these:]
line1.start = Point(100, 300)
line2.end = Point(99, 4)
line1.reverse()
# Should print: Line[(x1, y1), (x2, y2)] where (x1, y1) and
# (x2, y2) are the CURRENT coordinates of line1's endpoints.
print(line1)
print(line2) # Similarly for line2
line1.reset()
line2.reset()
print(line1) # Should print: Line[(-3, -4), (3, 4)]
print(line2) # Should print: Line[(0, 1), (10, 20)]
"""
# --------------------------------------------------------------
# DONE: 13.
# a. READ the above specification, including the Example.
# ** ASK QUESTIONS AS NEEDED. **
# ** Be sure you understand it, ESPECIALLY the Example.
# b. Implement and test this method.
# The tests are already written (below).
# They include the Example in the above doc-string.
# --------------------------------------------------------------
self.start = self.oristart
self.end = self.oriend
########################################################################
# The TEST functions for the Line class begin here.
#
# We have already written the TEST functions. They all take the form:
# -- m1t.run_test_BLAH() # This runs OUR tests.
# -- One more test (or tests) that came directly from the Example
# in the specification.
########################################################################
def run_test_init():
""" Tests the __init__ method of the Line class. """
m1t.run_test_init() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
p1 = Point(30, 17)
p2 = Point(50, 80)
line = Line(p1, p2) # Causes __init__ to run
print(line.start) # Should print Point(30, 17)
print(line.end) # Should print Point(50, 80)
print(line.start == p1) # Should print True
print(line.start is p1) # Should print False
print('The above should print:')
print(' Point(30, 17)')
print(' Point(50, 80)')
print(' True')
print(' False')
def run_test_clone():
""" Tests the clone method of the Line class. """
m1t.run_test_clone() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
p1 = Point(30, 17)
p2 = Point(50, 80)
line1 = Line(p1, p2)
line2 = line1.clone()
print(line1) # Should print: Line[(30, 17), (50, 80)]
print(line2) # Should print: Line[(30, 17), (50, 80)]
print(line1 == line2) # Should print: True
print(line1 is line2) # Should print: False
print(line1.start is line2.start) # Should print: False
print(line1.end is line2.end) # Should print: False
line1.start = Point(11, 12)
print(line1) # Should print: Line[(11, 12), (50, 80)]
print(line2) # Should print: Line[(30, 17), (50, 80)]
print(line1 == line2) # Should now print: False
print('The above should print:')
print(' Line[(30, 17), (50, 80)]')
print(' Line[(30, 17), (50, 80)]')
print(' True')
print(' False')
print(' False')
print(' False')
print(' Line[(11, 12), (50, 80)]')
print(' Line[(30, 17), (50, 80)')
print(' False')
def run_test_reverse():
""" Tests the reverse method of the Line class. """
m1t.run_test_reverse() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
p1 = Point(30, 17)
p2 = Point(50, 80)
line1 = Line(p1, p2)
line2 = line1.clone()
print(line1) # Should print: Line[(30, 17), (50, 80)]
line1.reverse()
print(line1) # Should print: Line[(50, 80), (30, 17)]
print(line1 == line2) # Should print: False
line1.reverse()
print(line1 == line2) # Should now print: True
print('The above should print:')
print(' Line[(30, 17), (50, 80)]')
print(' Line[(50, 80), (30, 17)')
print(' False')
print(' True')
def run_test_slope():
""" Tests the slope method of the Line class. """
m1t.run_test_slope() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
p1 = Point(30, 3)
p2 = Point(50, 8)
line1 = Line(p1, p2)
# Since the slope is (8 - 3) / (50 - 30) , which is 0.25:
print(line1.slope()) # Should print [approximately]: 0.25
line2 = Line(Point(10, 10), Point(10, 5))
print(line2.slope()) # Should print: inf
# math.inf is NOT the STRING 'inf', so:
print(line2.slope() == 'inf') # Should print False
print('The above should print:')
print(' 0.25 (approximately)')
print(' inf')
print(' False')
def run_test_length():
""" Tests the length method of the Line class. """
m1t.run_test_length() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
p1 = Point(166, 10)
p2 = Point(100, 10)
line1 = Line(p1, p2)
# Since the distance from p1 to p2 is 66:
print(line1.length()) # Should print: 66.0
p3 = Point(0, 0)
p4 = Point(3, 4)
line2 = Line(p3, p4)
print(line2.length()) # Should print about 5.0
print('The above should print:')
print(' 66.0')
print(' 5.0 (approximately)')
def run_test_get_number_of_clones():
""" Tests the get_number_of_clones method of the Line class. """
m1t.run_test_get_number_of_clones() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
line1 = Line(Point(500, 20), Point(100, 8))
line2 = line1.clone()
line3 = line1.clone()
line4 = line3.clone()
line5 = line1.clone()
print(line1.get_number_of_clones())
print(line2.get_number_of_clones())
print(line3.get_number_of_clones())
print(line4.get_number_of_clones())
print(line5.get_number_of_clones())
print('The above should print 3, then 0, then 1, then 0, then 0.')
def run_test_line_plus():
""" Tests the line_plus method of the Line class. """
m1t.run_test_line_plus() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
line1 = Line(Point(500, 20), Point(100, 8))
line2 = Line(Point(100, 13), Point(400, 8))
line3 = line1.line_plus(line2)
print(line3)
print('The above should print: Line[(600, 33), (500, 16)]')
def run_test_line_minus():
""" Tests the line_minus method of the Line class. """
m1t.run_test_line_minus() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
line1 = Line(Point(500, 20), Point(100, 8))
line2 = Line(Point(100, 13), Point(400, 8))
line3 = line1.line_minus(line2)
print(line3)
print('The above should print: Line[(400, 7), (-300, 0)]')
def run_test_midpoint():
""" Tests the midpoint method of the Line class. """
m1t.run_test_midpoint() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
p1 = Point(3, 10)
p2 = Point(9, 20)
line1 = Line(p1, p2)
print(line1.midpoint()) # Should print: Point(6, 15)
print('The above should print: Point(6, 15)')
def run_test_is_parallel():
""" Tests the is_parallel method of the Line class. """
m1t.run_test_is_parallel() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
line1 = Line(Point(15, 30), Point(17, 50)) # slope is 10.0
line2 = Line(Point(10, 10), Point(15, 60)) # slope is 10.0
line3 = Line(Point(10, 10), Point(80, 80)) # slope is 7.0
line4 = Line(Point(10, 10), Point(10, 20)) # slope is inf
print(line1.is_parallel(line2)) # Should print: True
print(line2.is_parallel(line1)) # Should print: True
print(line1.is_parallel(line3)) # Should print: False
print(line1.is_parallel(line4)) # Should print: False
print(line1.is_parallel(line1)) # Should print: True
print(line4.is_parallel(line4)) # Should print: True
print('The above should print:')
print(' True, True, False, False, True, True')
def run_test_reset():
""" Tests the reset method of the Line class. """
m1t.run_test_reset() # This runs OUR tests.
# ------------------------------------------------------------------
# One ADDITIONAL test (or set of tests).
# ------------------------------------------------------------------
p1 = Point(-3, -4)
p2 = Point(3, 4)
line1 = Line(p1, p2)
line2 = Line(Point(0, 1), Point(10, 20))
line1.start = Point(100, 300)
line2.end = Point(99, 4)
line1.reverse()
# Should print: Line[(x1, y1), (x2, y2)] where (x1, y1) and
# (x2, y2) are the CURRENT coordinates of line1's endpoints.
print(line1)
print(line2) # Similarly for line2
line1.reset()
line2.reset()
print(line1) # Should print: Line[(-3, -4), (3, 4)]
print(line2) # Should print: Line[(0, 1), (10, 20)]
print('The above should print:')
print(' Line[(3, 4), (100, 300)]')
print(' Line[(0, 1), (99, 4)]')
print(' Line[(-3, -4), (3, 4)]')
print(' Line[(0, 1), (10, 20)]')
# ----------------------------------------------------------------------
# If this module is running at the top level (as opposed to being
# imported by another module), then call the 'main' function.
# ----------------------------------------------------------------------
if __name__ == '__main__':
main()
| 37.789206 | 97 | 0.487052 |
import math
import m1t_test_Line as m1t
| true | true |
f71fa77e0d385cdca5a886aa27031fa0a8e90b0c | 1,251 | py | Python | canary-dedup-groomer.py | open-cluster-management/canary-reporting | d97d49fb83e8e8d5e56b937261928285303e475e | [
"Apache-2.0"
] | null | null | null | canary-dedup-groomer.py | open-cluster-management/canary-reporting | d97d49fb83e8e8d5e56b937261928285303e475e | [
"Apache-2.0"
] | 35 | 2021-03-24T14:57:42.000Z | 2021-09-23T18:37:58.000Z | canary-dedup-groomer.py | open-cluster-management/canary-reporting | d97d49fb83e8e8d5e56b937261928285303e475e | [
"Apache-2.0"
] | 1 | 2021-08-02T14:26:55.000Z | 2021-08-02T14:26:55.000Z | import os, sys, json, db_utils
import pymysql
from github import Github, UnknownObjectException
from datetime import datetime
# Aligns all defects recorded in database with their github status
TABLE_NAME = "canary_issues"
c = None
conn = None
github_token=os.getenv('GITHUB_TOKEN')
github_org=os.getenv('GITHUB_ORG')
github_repo=os.getenv('GITHUB_REPO')
def query_github_status(defect):
try:
issue_state = repo.get_issue(defect).state
except UnknownObjectException:
issue_state = None
return issue_state
#
# Get conneted to the Database
#
ret = db_utils.connect_to_db()
#
# Get connected to GitHub
#
github_token=os.getenv('GITHUB_TOKEN')
try:
g = Github(github_token)
org = g.get_organization(github_org)
repo = org.get_repo(github_repo)
except UnknownObjectException as ex:
print(ex)
exit(1)
ret = db_utils.pull_open_defects(github_repo)
if ret != None:
open_defects = list(ret)
for row in open_defects:
id = list(row)[0]
defect = list(row)[1]
status = query_github_status(int(defect))
if (status != "open") and (status != None):
db_utils.update_status(id, status, repo)
else:
print("No open defects!")
ret = db_utils.disconnect_from_db()
| 24.529412 | 66 | 0.710631 | import os, sys, json, db_utils
import pymysql
from github import Github, UnknownObjectException
from datetime import datetime
TABLE_NAME = "canary_issues"
c = None
conn = None
github_token=os.getenv('GITHUB_TOKEN')
github_org=os.getenv('GITHUB_ORG')
github_repo=os.getenv('GITHUB_REPO')
def query_github_status(defect):
try:
issue_state = repo.get_issue(defect).state
except UnknownObjectException:
issue_state = None
return issue_state
ret = db_utils.connect_to_db()
github_token=os.getenv('GITHUB_TOKEN')
try:
g = Github(github_token)
org = g.get_organization(github_org)
repo = org.get_repo(github_repo)
except UnknownObjectException as ex:
print(ex)
exit(1)
ret = db_utils.pull_open_defects(github_repo)
if ret != None:
open_defects = list(ret)
for row in open_defects:
id = list(row)[0]
defect = list(row)[1]
status = query_github_status(int(defect))
if (status != "open") and (status != None):
db_utils.update_status(id, status, repo)
else:
print("No open defects!")
ret = db_utils.disconnect_from_db()
| true | true |
f71fa7d89da3839e79f74760543e4ed894dcc3ac | 3,351 | py | Python | imagr_images/tests.py | sazlin/cfpydev-imagr | e34ac025e357694f40034ab1c02ed3be5294c2d8 | [
"MIT"
] | null | null | null | imagr_images/tests.py | sazlin/cfpydev-imagr | e34ac025e357694f40034ab1c02ed3be5294c2d8 | [
"MIT"
] | null | null | null | imagr_images/tests.py | sazlin/cfpydev-imagr | e34ac025e357694f40034ab1c02ed3be5294c2d8 | [
"MIT"
] | null | null | null | from django.test import TestCase
from models import Photo, Album
from imagr_users.models import ImagrUser
from imagr_images.models import get_file_owner_username
from admin import PhotoAdmin, AlbumAdmin, ImageSizeListFilter
from django.core.urlresolvers import reverse
from django.contrib.admin.sites import AdminSite
import datetime
from django.test.utils import setup_test_environment
setup_test_environment()
from django.test.client import Client
client = Client()
class ImagrTests(TestCase):
def setUp(self):
u1 = ImagrUser.objects.create(username='testuser')
u2 = ImagrUser.objects.create(username='testuser2')
u3 = ImagrUser.objects.create(username='testuser3')
u1.follow(u2)
u1.follow(u3)
Photo.objects.create(
image='test.png',
title='u1 test image',
owner=u1,
published=1)
Photo.objects.create(
image='test.png',
title='u2 test image',
owner=u2,
published=1)
Photo.objects.create(
image='test.png',
title='u3 test image',
owner=u3,
published=1)
Album.objects.create(
title='test album',
owner=u1,
published=1,
)
self.site = AdminSite()
def test_get_file_owner(self):
test_photo = Photo.objects.get(title='u1 test image')
self.assertEqual(isinstance(test_photo, Photo), True)
test_filename = '/garbage/garbage/garbage/test.png'
result = get_file_owner_username(test_photo, test_filename)
today = datetime.datetime.utcnow()
expected = 'testuser/{}/{}/{}'.format(unicode(today.year), unicode(today.month), u'test.png')
self.assertEquals(result, expected)
def test_photo_save(self):
test_photo = Photo.objects.get(title='u1 test image')
self.assertGreater(test_photo.image_size, 0)
def test_album_owner_link(self):
test_album = Album.objects.get(title='test album')
expected = "<a href='../../imagr_users/imagruser/{}/'>{}</a>".format(
test_album.owner.id,
test_album.owner)
test_album_admin = AlbumAdmin(test_album, self.site)
self.assertEquals(test_album_admin.owner_link(test_album), expected)
def test_photo_owner_link(self):
test_photo = Photo.objects.get(title='u1 test image')
expected = "<a href='../../imagr_users/imagruser/{}/'>{}</a>".format(
test_photo.owner.id,
test_photo.owner)
test_photo_admin = AlbumAdmin(test_photo, self.site)
self.assertEquals(test_photo_admin.owner_link(test_photo), expected)
def test_view_stream_page(self):
#client.logout()
user = ImagrUser.objects.get(username='testuser')
client.logout()
#client.login()
# self.assertEqual(client.session['_auth_user_id'], user.pk)
response = client.get(reverse('stream_page'))
self.assertEquals(response.status_code, 200)
actual_photos = response.context['photos']
self.assertEquals(len(actual_photos), 3)
self.assertEquals(actual_photos[0].title, 'u3 test image')
self.assertEquals(actual_photos[1].title, 'u2 test image')
self.assertEquals(actual_photos[2].title, 'u1 test image')
| 37.651685 | 101 | 0.64906 | from django.test import TestCase
from models import Photo, Album
from imagr_users.models import ImagrUser
from imagr_images.models import get_file_owner_username
from admin import PhotoAdmin, AlbumAdmin, ImageSizeListFilter
from django.core.urlresolvers import reverse
from django.contrib.admin.sites import AdminSite
import datetime
from django.test.utils import setup_test_environment
setup_test_environment()
from django.test.client import Client
client = Client()
class ImagrTests(TestCase):
def setUp(self):
u1 = ImagrUser.objects.create(username='testuser')
u2 = ImagrUser.objects.create(username='testuser2')
u3 = ImagrUser.objects.create(username='testuser3')
u1.follow(u2)
u1.follow(u3)
Photo.objects.create(
image='test.png',
title='u1 test image',
owner=u1,
published=1)
Photo.objects.create(
image='test.png',
title='u2 test image',
owner=u2,
published=1)
Photo.objects.create(
image='test.png',
title='u3 test image',
owner=u3,
published=1)
Album.objects.create(
title='test album',
owner=u1,
published=1,
)
self.site = AdminSite()
def test_get_file_owner(self):
test_photo = Photo.objects.get(title='u1 test image')
self.assertEqual(isinstance(test_photo, Photo), True)
test_filename = '/garbage/garbage/garbage/test.png'
result = get_file_owner_username(test_photo, test_filename)
today = datetime.datetime.utcnow()
expected = 'testuser/{}/{}/{}'.format(unicode(today.year), unicode(today.month), u'test.png')
self.assertEquals(result, expected)
def test_photo_save(self):
test_photo = Photo.objects.get(title='u1 test image')
self.assertGreater(test_photo.image_size, 0)
def test_album_owner_link(self):
test_album = Album.objects.get(title='test album')
expected = "<a href='../../imagr_users/imagruser/{}/'>{}</a>".format(
test_album.owner.id,
test_album.owner)
test_album_admin = AlbumAdmin(test_album, self.site)
self.assertEquals(test_album_admin.owner_link(test_album), expected)
def test_photo_owner_link(self):
test_photo = Photo.objects.get(title='u1 test image')
expected = "<a href='../../imagr_users/imagruser/{}/'>{}</a>".format(
test_photo.owner.id,
test_photo.owner)
test_photo_admin = AlbumAdmin(test_photo, self.site)
self.assertEquals(test_photo_admin.owner_link(test_photo), expected)
def test_view_stream_page(self):
user = ImagrUser.objects.get(username='testuser')
client.logout()
response = client.get(reverse('stream_page'))
self.assertEquals(response.status_code, 200)
actual_photos = response.context['photos']
self.assertEquals(len(actual_photos), 3)
self.assertEquals(actual_photos[0].title, 'u3 test image')
self.assertEquals(actual_photos[1].title, 'u2 test image')
self.assertEquals(actual_photos[2].title, 'u1 test image')
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.