code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
""" This program helps you capture data for training easily. It asks you what character you intend to show it and then captures likely character locations and saves in output directory at regular intervals. Please note that module looking for likely character locations isn't perfect and sometimes makes mistakes, so you would be best served manually confirming captured data is correct afterwards. """ import os import cv2 import glob import re import configobj import net.vision def save_file(dir_path, index, image): file_name = "{:05}.jpg".format(index) output_path = os.path.join(dir_path, file_name) cv2.imwrite(output_path, image) def get_initial_file_counter_value(base_path): existing_files = glob.glob(os.path.join(base_path, "*.jpg")) if len(existing_files) == 0: return 1 else: last_file = os.path.basename(existing_files[-1]) last_file_index = int(re.findall(r'\d+', last_file)[0]) return last_file_index + 1 def main(): result = input("What character do you want to capture data for: ")[0] print("Ok, we will be capturing data for " + result) base_path = os.path.join("../../data/characters/data/", result) os.makedirs(base_path, exist_ok=True) file_counter = get_initial_file_counter_value(base_path) previous_files_count = file_counter - 1 if file_counter > 1: print("We will be starting from file {}".format(file_counter)) frame_counter = 0 video_capture = cv2.VideoCapture(0) config = configobj.ConfigObj('configuration.ini') reconstruction_size = tuple([int(value) for value in config['image_size']]) while True: _, frame = video_capture.read() card_candidates = net.vision.CardCandidatesExtractor().get_card_candidates(frame, reconstruction_size) for candidate in card_candidates: cv2.drawContours(image=frame, contours=[candidate.coordinates], contourIdx=0, color=(0, 255, 0), thickness=4) # Only save data roughly three times per second if frame_counter == 10: for candidate in card_candidates: save_file(base_path, file_counter, candidate.image) print("Captured {} images".format(file_counter - previous_files_count), end='\r') file_counter += 1 frame_counter = 0 cv2.imshow("image", frame) key = cv2.waitKey(30) # If spacebar was pressed if key == 32: break frame_counter += 1 if __name__ == "__main__": main()
[ "os.makedirs", "os.path.basename", "cv2.waitKey", "cv2.imwrite", "configobj.ConfigObj", "cv2.VideoCapture", "re.findall", "cv2.drawContours", "cv2.imshow", "os.path.join" ]
[((585, 618), 'os.path.join', 'os.path.join', (['dir_path', 'file_name'], {}), '(dir_path, file_name)\n', (597, 618), False, 'import os\n'), ((623, 654), 'cv2.imwrite', 'cv2.imwrite', (['output_path', 'image'], {}), '(output_path, image)\n', (634, 654), False, 'import cv2\n'), ((1150, 1201), 'os.path.join', 'os.path.join', (['"""../../data/characters/data/"""', 'result'], {}), "('../../data/characters/data/', result)\n", (1162, 1201), False, 'import os\n'), ((1206, 1243), 'os.makedirs', 'os.makedirs', (['base_path'], {'exist_ok': '(True)'}), '(base_path, exist_ok=True)\n', (1217, 1243), False, 'import os\n'), ((1491, 1510), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (1507, 1510), False, 'import cv2\n'), ((1525, 1565), 'configobj.ConfigObj', 'configobj.ConfigObj', (['"""configuration.ini"""'], {}), "('configuration.ini')\n", (1544, 1565), False, 'import configobj\n'), ((736, 768), 'os.path.join', 'os.path.join', (['base_path', '"""*.jpg"""'], {}), "(base_path, '*.jpg')\n", (748, 768), False, 'import os\n'), ((851, 887), 'os.path.basename', 'os.path.basename', (['existing_files[-1]'], {}), '(existing_files[-1])\n', (867, 887), False, 'import os\n'), ((2389, 2415), 'cv2.imshow', 'cv2.imshow', (['"""image"""', 'frame'], {}), "('image', frame)\n", (2399, 2415), False, 'import cv2\n'), ((2431, 2446), 'cv2.waitKey', 'cv2.waitKey', (['(30)'], {}), '(30)\n', (2442, 2446), False, 'import cv2\n'), ((1872, 1986), 'cv2.drawContours', 'cv2.drawContours', ([], {'image': 'frame', 'contours': '[candidate.coordinates]', 'contourIdx': '(0)', 'color': '(0, 255, 0)', 'thickness': '(4)'}), '(image=frame, contours=[candidate.coordinates], contourIdx=\n 0, color=(0, 255, 0), thickness=4)\n', (1888, 1986), False, 'import cv2\n'), ((918, 947), 're.findall', 're.findall', (['"""\\\\d+"""', 'last_file'], {}), "('\\\\d+', last_file)\n", (928, 947), False, 'import re\n')]
import pytest from . import test_bptt_wgan from .. import bptt_cwgan def single_g_step(args): bptt_cwgan.main([ '--iterations', '1', '--truth_size', '1', '--num-models', '2', '--n_bandwidths', '1', '--WGAN_n_critic0', '1', '--seqlen', '4', '--skip-steps', '2', '--tc-stats-record-interval', '1', ] + args) @pytest.mark.parametrize('args', [ [], ['--num-models', '1'], ['--sample-sites', '0, 0.5'], ['--contrasts', '5, 20'], ['--include-inhibitory-neurons'], ]) def test_single_g_step_slowtest(args, cleancwd): test_bptt_wgan.test_single_g_step_slowtest( args, cleancwd, single_g_step=single_g_step, script_file=bptt_cwgan.__file__) assert cleancwd.join('results', 'store.hdf5').check() @pytest.mark.parametrize('args, config', [ ([], dict(ssn_type='heteroin')), (['--include-inhibitory-neurons'], dict(ssn_type='heteroin')), (['--include-inhibitory-neurons'], dict(ssn_type='heteroin', V=[0.3, 0])), (['--include-inhibitory-neurons'], dict(ssn_type='heteroin', gen_V_min=[0, 0], gen_V_max=[1, 0])), ([], dict(ssn_type='deg-heteroin')), (['--include-inhibitory-neurons'], dict(ssn_type='deg-heteroin', V=0.5)), ]) def test_single_g_step_with_load_config_slowtest(args, config, cleancwd): test_bptt_wgan.test_single_g_step_with_load_config_slowtest( args, config, cleancwd, single_g_step=single_g_step, script_file=bptt_cwgan.__file__)
[ "pytest.mark.parametrize" ]
[((384, 544), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""args"""', "[[], ['--num-models', '1'], ['--sample-sites', '0, 0.5'], ['--contrasts',\n '5, 20'], ['--include-inhibitory-neurons']]"], {}), "('args', [[], ['--num-models', '1'], [\n '--sample-sites', '0, 0.5'], ['--contrasts', '5, 20'], [\n '--include-inhibitory-neurons']])\n", (407, 544), False, 'import pytest\n')]
# -*- coding: utf-8 -*- # # K2HDKC DBaaS based on Trove # # Copyright 2020 Yahoo Japan Corporation # # K2HDKC DBaaS is a Database as a Service compatible with Trove which # is DBaaS for OpenStack. # Using K2HR3 as backend and incorporating it into Trove to provide # DBaaS functionality. K2HDKC, K2HR3, CHMPX and K2HASH are components # provided as AntPickax. # # For the full copyright and license information, please view # the license file that was distributed with this source code. # # AUTHOR: <NAME> # CREATE: Mon Sep 14 2020 # REVISION: # """Test Package for K2hr3 Python Client.""" from __future__ import (absolute_import, division, print_function, unicode_literals) import logging import unittest from k2hr3client.policy import K2hr3Policy LOG = logging.getLogger(__name__) class TestK2hr3Policy(unittest.TestCase): """Tests the K2hr3Policy class. Simple usage(this class only): $ python -m unittest tests/test_resource.py Simple usage(all): $ python -m unittest tests """ def setUp(self): """Sets up a test case.""" RESOURCE_PATH = "yrn:yahoo:::demo:resource:my_resource" self.token = "<PASSWORD>" self.name = "testpolicy" self.effect = 'allow' self.action = ['yrn:yahoo::::action:read'] self.resource = [RESOURCE_PATH] self.condition = None self.alias = [] def tearDown(self): """Tears down a test case.""" def test_k2hr3resource_construct(self): """Creates a K2hr3Policy instance.""" k2hr3_policy = K2hr3Policy(self.token, name=self.name, effect=self.effect, action=self.action, resource=self.resource, condition=self.condition, alias=self.alias) self.assertIsInstance(k2hr3_policy, K2hr3Policy) def test_k2hr3resource_repr(self): """Represent a K2hr3Policy instance.""" k2hr3_policy = K2hr3Policy(self.token, name=self.name, effect=self.effect, action=self.action, resource=self.resource, condition=self.condition, alias=self.alias) self.assertRegex(repr(k2hr3_policy), '<K2hr3Policy .*>') # # Local variables: # tab-width: 4 # c-basic-offset: 4 # End: # vim600: expandtab sw=4 ts=4 fdm=marker # vim<600: expandtab sw=4 ts=4 #
[ "k2hr3client.policy.K2hr3Policy", "logging.getLogger" ]
[((786, 813), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (803, 813), False, 'import logging\n'), ((1581, 1733), 'k2hr3client.policy.K2hr3Policy', 'K2hr3Policy', (['self.token'], {'name': 'self.name', 'effect': 'self.effect', 'action': 'self.action', 'resource': 'self.resource', 'condition': 'self.condition', 'alias': 'self.alias'}), '(self.token, name=self.name, effect=self.effect, action=self.\n action, resource=self.resource, condition=self.condition, alias=self.alias)\n', (1592, 1733), False, 'from k2hr3client.policy import K2hr3Policy\n'), ((2107, 2259), 'k2hr3client.policy.K2hr3Policy', 'K2hr3Policy', (['self.token'], {'name': 'self.name', 'effect': 'self.effect', 'action': 'self.action', 'resource': 'self.resource', 'condition': 'self.condition', 'alias': 'self.alias'}), '(self.token, name=self.name, effect=self.effect, action=self.\n action, resource=self.resource, condition=self.condition, alias=self.alias)\n', (2118, 2259), False, 'from k2hr3client.policy import K2hr3Policy\n')]
import decimal from dataclasses import dataclass from typing import TYPE_CHECKING, Any, List, Optional from ...payment.interface import GatewayResponse, PaymentGateway, PaymentMethodInfo if TYPE_CHECKING: from ...app.models import App from ...payment.interface import PaymentData APP_GATEWAY_ID_PREFIX = "app" @dataclass class PaymentAppData: app_pk: int name: str def to_payment_app_id(app: "App", gateway_id: str) -> "str": return f"{APP_GATEWAY_ID_PREFIX}:{app.pk}:{gateway_id}" def from_payment_app_id(app_gateway_id: str) -> Optional["PaymentAppData"]: splitted_id = app_gateway_id.split(":") if ( len(splitted_id) == 3 and splitted_id[0] == APP_GATEWAY_ID_PREFIX and all(splitted_id) ): try: app_pk = int(splitted_id[1]) except (TypeError, ValueError): return None else: return PaymentAppData(app_pk, name=splitted_id[2]) return None def parse_list_payment_gateways_response( response_data: Any, app: "App" ) -> List["PaymentGateway"]: gateways = [] for gateway_data in response_data: gateway_id = gateway_data.get("id") gateway_name = gateway_data.get("name") gateway_currencies = gateway_data.get("currencies") gateway_config = gateway_data.get("config") if gateway_id: gateways.append( PaymentGateway( id=to_payment_app_id(app, gateway_id), name=gateway_name, currencies=gateway_currencies, config=gateway_config, ) ) return gateways def parse_payment_action_response( payment_information: "PaymentData", response_data: Any, transaction_kind: "str", ) -> "GatewayResponse": error = response_data.get("error") is_success = not error payment_method_info = None payment_method_data = response_data.get("payment_method") if payment_method_data: payment_method_info = PaymentMethodInfo( brand=payment_method_data.get("brand"), exp_month=payment_method_data.get("exp_month"), exp_year=payment_method_data.get("exp_year"), last_4=payment_method_data.get("last_4"), name=payment_method_data.get("name"), type=payment_method_data.get("type"), ) amount = payment_information.amount if "amount" in response_data: try: amount = decimal.Decimal(response_data["amount"]) except decimal.DecimalException: pass return GatewayResponse( action_required=response_data.get("action_required", False), action_required_data=response_data.get("action_required_data"), amount=amount, currency=payment_information.currency, customer_id=response_data.get("customer_id"), error=error, is_success=is_success, kind=response_data.get("kind", transaction_kind), payment_method_info=payment_method_info, raw_response=response_data, psp_reference=response_data.get("psp_reference"), transaction_id=response_data.get("transaction_id", ""), transaction_already_processed=response_data.get( "transaction_already_processed", False ), )
[ "decimal.Decimal" ]
[((2502, 2542), 'decimal.Decimal', 'decimal.Decimal', (["response_data['amount']"], {}), "(response_data['amount'])\n", (2517, 2542), False, 'import decimal\n')]
import numpy as np def to_categorical(y, num_classes=None): """Converts a class vector (integers) to binary class matrix. E.g. for use with categorical_crossentropy. # Arguments y: class vector to be converted into a matrix (integers from 0 to num_classes). num_classes: total number of classes. # Returns A binary matrix representation of the input. The classes axis is placed last. """ y = np.array(y, dtype='int') input_shape = y.shape if input_shape and input_shape[-1] == 1 and len(input_shape) > 1: input_shape = tuple(input_shape[:-1]) y = y.ravel() if not num_classes: num_classes = np.max(y) + 1 n = y.shape[0] categorical = np.zeros((n, num_classes), dtype=np.float32) categorical[np.arange(n), y] = 1 output_shape = input_shape + (num_classes,) categorical = np.reshape(categorical, output_shape) return categorical def map_label(y, map_dict={}, if_binary=False): """Convert a class vector (all types) to a class vector (integers) E.g. for use with to_categorical. ['a', 'b', 4, 7, 'a'] -> [0, 1, 2, 3, 0], ['a', 'b', 'a', 'a'] -> [0, 1, 0, 0] # Arguments y: class vector to be converted map_dict: mapping relations # Returns: A converted class vector and two dictionaries of mapping relations. """ assert isinstance(map_dict, dict) y = np.array(y) y = y.ravel() if not map_dict: if_validate = False else: if if_binary and len(map_dict) != 2: raise ValueError( "Expected a dictionary of 2 elements in map_dict while received %d elements!" % len(map_dict)) if_validate = True rev_map_dict = {} class_idx = 0 int_y = [] for label_element in y: if label_element not in map_dict: if if_validate: raise ValueError("Invalid label %s!" % str(label_element)) map_dict[label_element] = class_idx rev_map_dict[class_idx] = label_element class_idx += 1 if if_binary and class_idx > 1: raise ValueError("Found more than 2 classes in label inputs!") int_y.append(map_dict[label_element]) int_y = np.array(int_y, dtype='int') return int_y, map_dict, rev_map_dict def get_classnum(y): """Get classnum from one-hot label inputs 'y'. Note that this function will not validate the label inputs # Arguments y: label inputs # Returns: The number of classes in 'y' """ assert isinstance(y, np.ndarray) inputshape = y.shape if len(inputshape) == 2: return inputshape[-1] else: raise ValueError("Input labels should be a 2-dim one-hot vector!")
[ "numpy.zeros", "numpy.max", "numpy.arange", "numpy.array", "numpy.reshape" ]
[((465, 489), 'numpy.array', 'np.array', (['y'], {'dtype': '"""int"""'}), "(y, dtype='int')\n", (473, 489), True, 'import numpy as np\n'), ((747, 791), 'numpy.zeros', 'np.zeros', (['(n, num_classes)'], {'dtype': 'np.float32'}), '((n, num_classes), dtype=np.float32)\n', (755, 791), True, 'import numpy as np\n'), ((895, 932), 'numpy.reshape', 'np.reshape', (['categorical', 'output_shape'], {}), '(categorical, output_shape)\n', (905, 932), True, 'import numpy as np\n'), ((1443, 1454), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (1451, 1454), True, 'import numpy as np\n'), ((2281, 2309), 'numpy.array', 'np.array', (['int_y'], {'dtype': '"""int"""'}), "(int_y, dtype='int')\n", (2289, 2309), True, 'import numpy as np\n'), ((696, 705), 'numpy.max', 'np.max', (['y'], {}), '(y)\n', (702, 705), True, 'import numpy as np\n'), ((808, 820), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (817, 820), True, 'import numpy as np\n')]
"""ACME protocol messages.""" import datetime from collections.abc import Hashable import json from typing import Any from typing import Dict from typing import Iterator from typing import List from typing import Mapping from typing import MutableMapping from typing import Optional from typing import Tuple from typing import Type from typing import TYPE_CHECKING from typing import TypeVar from typing import Union import josepy as jose from acme import challenges from acme import errors from acme import fields from acme import jws from acme import util from acme.mixins import ResourceMixin if TYPE_CHECKING: from typing_extensions import Protocol # pragma: no cover else: Protocol = object OLD_ERROR_PREFIX = "urn:acme:error:" ERROR_PREFIX = "urn:ietf:params:acme:error:" ERROR_CODES = { 'accountDoesNotExist': 'The request specified an account that does not exist', 'alreadyRevoked': 'The request specified a certificate to be revoked that has' \ ' already been revoked', 'badCSR': 'The CSR is unacceptable (e.g., due to a short key)', 'badNonce': 'The client sent an unacceptable anti-replay nonce', 'badPublicKey': 'The JWS was signed by a public key the server does not support', 'badRevocationReason': 'The revocation reason provided is not allowed by the server', 'badSignatureAlgorithm': 'The JWS was signed with an algorithm the server does not support', 'caa': 'Certification Authority Authorization (CAA) records forbid the CA from issuing' \ ' a certificate', 'compound': 'Specific error conditions are indicated in the "subproblems" array', 'connection': ('The server could not connect to the client to verify the' ' domain'), 'dns': 'There was a problem with a DNS query during identifier validation', 'dnssec': 'The server could not validate a DNSSEC signed domain', 'incorrectResponse': 'Response received didn\'t match the challenge\'s requirements', # deprecate invalidEmail 'invalidEmail': 'The provided email for a registration was invalid', 'invalidContact': 'The provided contact URI was invalid', 'malformed': 'The request message was malformed', 'rejectedIdentifier': 'The server will not issue certificates for the identifier', 'orderNotReady': 'The request attempted to finalize an order that is not ready to be finalized', 'rateLimited': 'There were too many requests of a given type', 'serverInternal': 'The server experienced an internal error', 'tls': 'The server experienced a TLS error during domain verification', 'unauthorized': 'The client lacks sufficient authorization', 'unsupportedContact': 'A contact URL for an account used an unsupported protocol scheme', 'unknownHost': 'The server could not resolve a domain name', 'unsupportedIdentifier': 'An identifier is of an unsupported type', 'externalAccountRequired': 'The server requires external account binding', } ERROR_TYPE_DESCRIPTIONS = dict( (ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items()) ERROR_TYPE_DESCRIPTIONS.update(dict( # add errors with old prefix, deprecate me (OLD_ERROR_PREFIX + name, desc) for name, desc in ERROR_CODES.items())) def is_acme_error(err: BaseException) -> bool: """Check if argument is an ACME error.""" if isinstance(err, Error) and (err.typ is not None): return (ERROR_PREFIX in err.typ) or (OLD_ERROR_PREFIX in err.typ) return False class Error(jose.JSONObjectWithFields, errors.Error): """ACME error. https://tools.ietf.org/html/draft-ietf-appsawg-http-problem-00 :ivar str typ: :ivar str title: :ivar str detail: """ typ: str = jose.field('type', omitempty=True, default='about:blank') title: str = jose.field('title', omitempty=True) detail: str = jose.field('detail', omitempty=True) @classmethod def with_code(cls, code: str, **kwargs: Any) -> 'Error': """Create an Error instance with an ACME Error code. :str code: An ACME error code, like 'dnssec'. :kwargs: kwargs to pass to Error. """ if code not in ERROR_CODES: raise ValueError("The supplied code: %s is not a known ACME error" " code" % code) typ = ERROR_PREFIX + code # Mypy will not understand that the Error constructor accepts a named argument # "typ" because of josepy magic. Let's ignore the type check here. return cls(typ=typ, **kwargs) @property def description(self) -> Optional[str]: """Hardcoded error description based on its type. :returns: Description if standard ACME error or ``None``. :rtype: str """ return ERROR_TYPE_DESCRIPTIONS.get(self.typ) @property def code(self) -> Optional[str]: """ACME error code. Basically self.typ without the ERROR_PREFIX. :returns: error code if standard ACME code or ``None``. :rtype: str """ code = str(self.typ).rsplit(':', maxsplit=1)[-1] if code in ERROR_CODES: return code return None def __str__(self) -> str: return b' :: '.join( part.encode('ascii', 'backslashreplace') for part in (self.typ, self.description, self.detail, self.title) if part is not None).decode() class _Constant(jose.JSONDeSerializable, Hashable): """ACME constant.""" __slots__ = ('name',) POSSIBLE_NAMES: Dict[str, '_Constant'] = NotImplemented def __init__(self, name: str) -> None: super().__init__() self.POSSIBLE_NAMES[name] = self # pylint: disable=unsupported-assignment-operation self.name = name def to_partial_json(self) -> str: return self.name @classmethod def from_json(cls, jobj: str) -> '_Constant': if jobj not in cls.POSSIBLE_NAMES: # pylint: disable=unsupported-membership-test raise jose.DeserializationError( '{0} not recognized'.format(cls.__name__)) return cls.POSSIBLE_NAMES[jobj] def __repr__(self) -> str: return '{0}({1})'.format(self.__class__.__name__, self.name) def __eq__(self, other: Any) -> bool: return isinstance(other, type(self)) and other.name == self.name def __hash__(self) -> int: return hash((self.__class__, self.name)) class Status(_Constant): """ACME "status" field.""" POSSIBLE_NAMES: Dict[str, _Constant] = {} STATUS_UNKNOWN = Status('unknown') STATUS_PENDING = Status('pending') STATUS_PROCESSING = Status('processing') STATUS_VALID = Status('valid') STATUS_INVALID = Status('invalid') STATUS_REVOKED = Status('revoked') STATUS_READY = Status('ready') STATUS_DEACTIVATED = Status('deactivated') class IdentifierType(_Constant): """ACME identifier type.""" POSSIBLE_NAMES: Dict[str, _Constant] = {} IDENTIFIER_FQDN = IdentifierType('dns') # IdentifierDNS in Boulder IDENTIFIER_IP = IdentifierType('ip') # IdentifierIP in pebble - not in Boulder yet class Identifier(jose.JSONObjectWithFields): """ACME identifier. :ivar IdentifierType typ: :ivar str value: """ typ: IdentifierType = jose.field('type', decoder=IdentifierType.from_json) value: str = jose.field('value') class HasResourceType(Protocol): """ Represents a class with a resource_type class parameter of type string. """ resource_type: str = NotImplemented GenericHasResourceType = TypeVar("GenericHasResourceType", bound=HasResourceType) class Directory(jose.JSONDeSerializable): """Directory.""" _REGISTERED_TYPES: Dict[str, Type[HasResourceType]] = {} class Meta(jose.JSONObjectWithFields): """Directory Meta.""" _terms_of_service: str = jose.field('terms-of-service', omitempty=True) _terms_of_service_v2: str = jose.field('termsOfService', omitempty=True) website: str = jose.field('website', omitempty=True) caa_identities: List[str] = jose.field('caaIdentities', omitempty=True) external_account_required: bool = jose.field('externalAccountRequired', omitempty=True) def __init__(self, **kwargs: Any) -> None: kwargs = {self._internal_name(k): v for k, v in kwargs.items()} super().__init__(**kwargs) @property def terms_of_service(self) -> str: """URL for the CA TOS""" return self._terms_of_service or self._terms_of_service_v2 def __iter__(self) -> Iterator[str]: # When iterating over fields, use the external name 'terms_of_service' instead of # the internal '_terms_of_service'. for name in super().__iter__(): yield name[1:] if name == '_terms_of_service' else name def _internal_name(self, name: str) -> str: return '_' + name if name == 'terms_of_service' else name @classmethod def _canon_key(cls, key: Union[str, HasResourceType, Type[HasResourceType]]) -> str: if isinstance(key, str): return key return key.resource_type @classmethod def register(cls, resource_body_cls: Type[GenericHasResourceType]) -> Type[GenericHasResourceType]: """Register resource.""" resource_type = resource_body_cls.resource_type assert resource_type not in cls._REGISTERED_TYPES cls._REGISTERED_TYPES[resource_type] = resource_body_cls return resource_body_cls def __init__(self, jobj: Mapping[str, Any]) -> None: canon_jobj = util.map_keys(jobj, self._canon_key) # TODO: check that everything is an absolute URL; acme-spec is # not clear on that self._jobj = canon_jobj def __getattr__(self, name: str) -> Any: try: return self[name.replace('_', '-')] except KeyError as error: raise AttributeError(str(error)) def __getitem__(self, name: Union[str, HasResourceType, Type[HasResourceType]]) -> Any: try: return self._jobj[self._canon_key(name)] except KeyError: raise KeyError('Directory field "' + self._canon_key(name) + '" not found') def to_partial_json(self) -> Dict[str, Any]: return self._jobj @classmethod def from_json(cls, jobj: MutableMapping[str, Any]) -> 'Directory': jobj['meta'] = cls.Meta.from_json(jobj.pop('meta', {})) return cls(jobj) class Resource(jose.JSONObjectWithFields): """ACME Resource. :ivar acme.messages.ResourceBody body: Resource body. """ body: "ResourceBody" = jose.field('body') class ResourceWithURI(Resource): """ACME Resource with URI. :ivar str uri: Location of the resource. """ uri: str = jose.field('uri') # no ChallengeResource.uri class ResourceBody(jose.JSONObjectWithFields): """ACME Resource Body.""" class ExternalAccountBinding: """ACME External Account Binding""" @classmethod def from_data(cls, account_public_key: jose.JWK, kid: str, hmac_key: str, directory: Directory) -> Dict[str, Any]: """Create External Account Binding Resource from contact details, kid and hmac.""" key_json = json.dumps(account_public_key.to_partial_json()).encode() decoded_hmac_key = jose.b64.b64decode(hmac_key) url = directory["newAccount"] eab = jws.JWS.sign(key_json, jose.jwk.JWKOct(key=decoded_hmac_key), jose.jwa.HS256, None, url, kid) return eab.to_partial_json() GenericRegistration = TypeVar('GenericRegistration', bound='Registration') class Registration(ResourceBody): """Registration Resource Body. :ivar jose.JWK key: Public key. :ivar tuple contact: Contact information following ACME spec, `tuple` of `str`. :ivar str agreement: """ # on new-reg key server ignores 'key' and populates it based on # JWS.signature.combined.jwk key: jose.JWK = jose.field('key', omitempty=True, decoder=jose.JWK.from_json) # Contact field implements special behavior to allow messages that clear existing # contacts while not expecting the `contact` field when loading from json. # This is implemented in the constructor and *_json methods. contact: Tuple[str, ...] = jose.field('contact', omitempty=True, default=()) agreement: str = jose.field('agreement', omitempty=True) status: Status = jose.field('status', omitempty=True) terms_of_service_agreed: bool = jose.field('termsOfServiceAgreed', omitempty=True) only_return_existing: bool = jose.field('onlyReturnExisting', omitempty=True) external_account_binding: Dict[str, Any] = jose.field('externalAccountBinding', omitempty=True) phone_prefix = 'tel:' email_prefix = 'mailto:' @classmethod def from_data(cls: Type[GenericRegistration], phone: Optional[str] = None, email: Optional[str] = None, external_account_binding: Optional[Dict[str, Any]] = None, **kwargs: Any) -> GenericRegistration: """ Create registration resource from contact details. The `contact` keyword being passed to a Registration object is meaningful, so this function represents empty iterables in its kwargs by passing on an empty `tuple`. """ # Note if `contact` was in kwargs. contact_provided = 'contact' in kwargs # Pop `contact` from kwargs and add formatted email or phone numbers details = list(kwargs.pop('contact', ())) if phone is not None: details.append(cls.phone_prefix + phone) if email is not None: details.extend([cls.email_prefix + mail for mail in email.split(',')]) # Insert formatted contact information back into kwargs # or insert an empty tuple if `contact` provided. if details or contact_provided: kwargs['contact'] = tuple(details) if external_account_binding: kwargs['external_account_binding'] = external_account_binding return cls(**kwargs) def __init__(self, **kwargs: Any) -> None: """Note if the user provides a value for the `contact` member.""" if 'contact' in kwargs and kwargs['contact'] is not None: # Avoid the __setattr__ used by jose.TypedJSONObjectWithFields object.__setattr__(self, '_add_contact', True) super().__init__(**kwargs) def _filter_contact(self, prefix: str) -> Tuple[str, ...]: return tuple( detail[len(prefix):] for detail in self.contact # pylint: disable=not-an-iterable if detail.startswith(prefix)) def _add_contact_if_appropriate(self, jobj: Dict[str, Any]) -> Dict[str, Any]: """ The `contact` member of Registration objects should not be required when de-serializing (as it would be if the Fields' `omitempty` flag were `False`), but it should be included in serializations if it was provided. :param jobj: Dictionary containing this Registrations' data :type jobj: dict :returns: Dictionary containing Registrations data to transmit to the server :rtype: dict """ if getattr(self, '_add_contact', False): jobj['contact'] = self.encode('contact') return jobj def to_partial_json(self) -> Dict[str, Any]: """Modify josepy.JSONDeserializable.to_partial_json()""" jobj = super().to_partial_json() return self._add_contact_if_appropriate(jobj) def fields_to_partial_json(self) -> Dict[str, Any]: """Modify josepy.JSONObjectWithFields.fields_to_partial_json()""" jobj = super().fields_to_partial_json() return self._add_contact_if_appropriate(jobj) @property def phones(self) -> Tuple[str, ...]: """All phones found in the ``contact`` field.""" return self._filter_contact(self.phone_prefix) @property def emails(self) -> Tuple[str, ...]: """All emails found in the ``contact`` field.""" return self._filter_contact(self.email_prefix) @Directory.register class NewRegistration(ResourceMixin, Registration): """New registration.""" resource_type = 'new-reg' resource: str = fields.resource(resource_type) class UpdateRegistration(ResourceMixin, Registration): """Update registration.""" resource_type = 'reg' resource: str = fields.resource(resource_type) class RegistrationResource(ResourceWithURI): """Registration Resource. :ivar acme.messages.Registration body: :ivar str new_authzr_uri: Deprecated. Do not use. :ivar str terms_of_service: URL for the CA TOS. """ body: Registration = jose.field('body', decoder=Registration.from_json) new_authzr_uri: str = jose.field('new_authzr_uri', omitempty=True) terms_of_service: str = jose.field('terms_of_service', omitempty=True) class ChallengeBody(ResourceBody): """Challenge Resource Body. .. todo:: Confusingly, this has a similar name to `.challenges.Challenge`, as well as `.achallenges.AnnotatedChallenge`. Please use names such as ``challb`` to distinguish instances of this class from ``achall``. :ivar acme.challenges.Challenge: Wrapped challenge. Conveniently, all challenge fields are proxied, i.e. you can call ``challb.x`` to get ``challb.chall.x`` contents. :ivar acme.messages.Status status: :ivar datetime.datetime validated: :ivar messages.Error error: """ __slots__ = ('chall',) # ACMEv1 has a "uri" field in challenges. ACMEv2 has a "url" field. This # challenge object supports either one, but should be accessed through the # name "uri". In Client.answer_challenge, whichever one is set will be # used. _uri: str = jose.field('uri', omitempty=True, default=None) _url: str = jose.field('url', omitempty=True, default=None) status: Status = jose.field('status', decoder=Status.from_json, omitempty=True, default=STATUS_PENDING) validated: datetime.datetime = fields.rfc3339('validated', omitempty=True) error: Error = jose.field('error', decoder=Error.from_json, omitempty=True, default=None) def __init__(self, **kwargs: Any) -> None: kwargs = {self._internal_name(k): v for k, v in kwargs.items()} super().__init__(**kwargs) def encode(self, name: str) -> Any: return super().encode(self._internal_name(name)) def to_partial_json(self) -> Dict[str, Any]: jobj = super().to_partial_json() jobj.update(self.chall.to_partial_json()) return jobj @classmethod def fields_from_json(cls, jobj: Mapping[str, Any]) -> Dict[str, Any]: jobj_fields = super().fields_from_json(jobj) jobj_fields['chall'] = challenges.Challenge.from_json(jobj) return jobj_fields @property def uri(self) -> str: """The URL of this challenge.""" return self._url or self._uri def __getattr__(self, name: str) -> Any: return getattr(self.chall, name) def __iter__(self) -> Iterator[str]: # When iterating over fields, use the external name 'uri' instead of # the internal '_uri'. for name in super().__iter__(): yield name[1:] if name == '_uri' else name def _internal_name(self, name: str) -> str: return '_' + name if name == 'uri' else name class ChallengeResource(Resource): """Challenge Resource. :ivar acme.messages.ChallengeBody body: :ivar str authzr_uri: URI found in the 'up' ``Link`` header. """ body: ChallengeBody = jose.field('body', decoder=ChallengeBody.from_json) authzr_uri: str = jose.field('authzr_uri') @property def uri(self) -> str: """The URL of the challenge body.""" return self.body.uri # pylint: disable=no-member class Authorization(ResourceBody): """Authorization Resource Body. :ivar acme.messages.Identifier identifier: :ivar list challenges: `list` of `.ChallengeBody` :ivar tuple combinations: Challenge combinations (`tuple` of `tuple` of `int`, as opposed to `list` of `list` from the spec). :ivar acme.messages.Status status: :ivar datetime.datetime expires: """ identifier: Identifier = jose.field('identifier', decoder=Identifier.from_json, omitempty=True) challenges: List[ChallengeBody] = jose.field('challenges', omitempty=True) combinations: Tuple[Tuple[int, ...], ...] = jose.field('combinations', omitempty=True) status: Status = jose.field('status', omitempty=True, decoder=Status.from_json) # TODO: 'expires' is allowed for Authorization Resources in # general, but for Key Authorization '[t]he "expires" field MUST # be absent'... then acme-spec gives example with 'expires' # present... That's confusing! expires: datetime.datetime = fields.rfc3339('expires', omitempty=True) wildcard: bool = jose.field('wildcard', omitempty=True) # Mypy does not understand the josepy magic happening here, and falsely claims # that challenge is redefined. Let's ignore the type check here. @challenges.decoder # type: ignore def challenges(value: List[Dict[str, Any]]) -> Tuple[ChallengeBody, ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring return tuple(ChallengeBody.from_json(chall) for chall in value) @property def resolved_combinations(self) -> Tuple[Tuple[ChallengeBody, ...], ...]: """Combinations with challenges instead of indices.""" return tuple(tuple(self.challenges[idx] for idx in combo) for combo in self.combinations) # pylint: disable=not-an-iterable @Directory.register class NewAuthorization(ResourceMixin, Authorization): """New authorization.""" resource_type = 'new-authz' resource: str = fields.resource(resource_type) class UpdateAuthorization(ResourceMixin, Authorization): """Update authorization.""" resource_type = 'authz' resource: str = fields.resource(resource_type) class AuthorizationResource(ResourceWithURI): """Authorization Resource. :ivar acme.messages.Authorization body: :ivar str new_cert_uri: Deprecated. Do not use. """ body: Authorization = jose.field('body', decoder=Authorization.from_json) new_cert_uri: str = jose.field('new_cert_uri', omitempty=True) @Directory.register class CertificateRequest(ResourceMixin, jose.JSONObjectWithFields): """ACME new-cert request. :ivar jose.ComparableX509 csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509` """ resource_type = 'new-cert' resource: str = fields.resource(resource_type) csr: jose.ComparableX509 = jose.field('csr', decoder=jose.decode_csr, encoder=jose.encode_csr) class CertificateResource(ResourceWithURI): """Certificate Resource. :ivar josepy.util.ComparableX509 body: `OpenSSL.crypto.X509` wrapped in `.ComparableX509` :ivar str cert_chain_uri: URI found in the 'up' ``Link`` header :ivar tuple authzrs: `tuple` of `AuthorizationResource`. """ cert_chain_uri: str = jose.field('cert_chain_uri') authzrs: Tuple[AuthorizationResource, ...] = jose.field('authzrs') @Directory.register class Revocation(ResourceMixin, jose.JSONObjectWithFields): """Revocation message. :ivar jose.ComparableX509 certificate: `OpenSSL.crypto.X509` wrapped in `jose.ComparableX509` """ resource_type = 'revoke-cert' resource: str = fields.resource(resource_type) certificate: jose.ComparableX509 = jose.field( 'certificate', decoder=jose.decode_cert, encoder=jose.encode_cert) reason: int = jose.field('reason') class Order(ResourceBody): """Order Resource Body. :ivar identifiers: List of identifiers for the certificate. :vartype identifiers: `list` of `.Identifier` :ivar acme.messages.Status status: :ivar authorizations: URLs of authorizations. :vartype authorizations: `list` of `str` :ivar str certificate: URL to download certificate as a fullchain PEM. :ivar str finalize: URL to POST to to request issuance once all authorizations have "valid" status. :ivar datetime.datetime expires: When the order expires. :ivar ~.Error error: Any error that occurred during finalization, if applicable. """ identifiers: List[Identifier] = jose.field('identifiers', omitempty=True) status: Status = jose.field('status', decoder=Status.from_json, omitempty=True) authorizations: List[str] = jose.field('authorizations', omitempty=True) certificate: str = jose.field('certificate', omitempty=True) finalize: str = jose.field('finalize', omitempty=True) expires: datetime.datetime = fields.rfc3339('expires', omitempty=True) error: Error = jose.field('error', omitempty=True, decoder=Error.from_json) # Mypy does not understand the josepy magic happening here, and falsely claims # that identifiers is redefined. Let's ignore the type check here. @identifiers.decoder # type: ignore def identifiers(value: List[Dict[str, Any]]) -> Tuple[Identifier, ...]: # type: ignore[misc] # pylint: disable=no-self-argument,missing-function-docstring return tuple(Identifier.from_json(identifier) for identifier in value) class OrderResource(ResourceWithURI): """Order Resource. :ivar acme.messages.Order body: :ivar bytes csr_pem: The CSR this Order will be finalized with. :ivar authorizations: Fully-fetched AuthorizationResource objects. :vartype authorizations: `list` of `acme.messages.AuthorizationResource` :ivar str fullchain_pem: The fetched contents of the certificate URL produced once the order was finalized, if it's present. :ivar alternative_fullchains_pem: The fetched contents of alternative certificate chain URLs produced once the order was finalized, if present and requested during finalization. :vartype alternative_fullchains_pem: `list` of `str` """ body: Order = jose.field('body', decoder=Order.from_json) csr_pem: bytes = jose.field('csr_pem', omitempty=True) authorizations: List[AuthorizationResource] = jose.field('authorizations') fullchain_pem: str = jose.field('fullchain_pem', omitempty=True) alternative_fullchains_pem: List[str] = jose.field('alternative_fullchains_pem', omitempty=True) @Directory.register class NewOrder(Order): """New order.""" resource_type = 'new-order'
[ "acme.util.map_keys", "josepy.b64.b64decode", "acme.challenges.Challenge.from_json", "typing.TypeVar", "josepy.jwk.JWKOct", "josepy.field", "acme.fields.rfc3339", "acme.fields.resource" ]
[((7489, 7545), 'typing.TypeVar', 'TypeVar', (['"""GenericHasResourceType"""'], {'bound': 'HasResourceType'}), "('GenericHasResourceType', bound=HasResourceType)\n", (7496, 7545), False, 'from typing import TypeVar\n'), ((11605, 11657), 'typing.TypeVar', 'TypeVar', (['"""GenericRegistration"""'], {'bound': '"""Registration"""'}), "('GenericRegistration', bound='Registration')\n", (11612, 11657), False, 'from typing import TypeVar\n'), ((3693, 3750), 'josepy.field', 'jose.field', (['"""type"""'], {'omitempty': '(True)', 'default': '"""about:blank"""'}), "('type', omitempty=True, default='about:blank')\n", (3703, 3750), True, 'import josepy as jose\n'), ((3768, 3803), 'josepy.field', 'jose.field', (['"""title"""'], {'omitempty': '(True)'}), "('title', omitempty=True)\n", (3778, 3803), True, 'import josepy as jose\n'), ((3822, 3858), 'josepy.field', 'jose.field', (['"""detail"""'], {'omitempty': '(True)'}), "('detail', omitempty=True)\n", (3832, 3858), True, 'import josepy as jose\n'), ((7205, 7257), 'josepy.field', 'jose.field', (['"""type"""'], {'decoder': 'IdentifierType.from_json'}), "('type', decoder=IdentifierType.from_json)\n", (7215, 7257), True, 'import josepy as jose\n'), ((7275, 7294), 'josepy.field', 'jose.field', (['"""value"""'], {}), "('value')\n", (7285, 7294), True, 'import josepy as jose\n'), ((10610, 10628), 'josepy.field', 'jose.field', (['"""body"""'], {}), "('body')\n", (10620, 10628), True, 'import josepy as jose\n'), ((10765, 10782), 'josepy.field', 'jose.field', (['"""uri"""'], {}), "('uri')\n", (10775, 10782), True, 'import josepy as jose\n'), ((12013, 12074), 'josepy.field', 'jose.field', (['"""key"""'], {'omitempty': '(True)', 'decoder': 'jose.JWK.from_json'}), "('key', omitempty=True, decoder=jose.JWK.from_json)\n", (12023, 12074), True, 'import josepy as jose\n'), ((12336, 12385), 'josepy.field', 'jose.field', (['"""contact"""'], {'omitempty': '(True)', 'default': '()'}), "('contact', omitempty=True, default=())\n", (12346, 12385), True, 'import josepy as jose\n'), ((12407, 12446), 'josepy.field', 'jose.field', (['"""agreement"""'], {'omitempty': '(True)'}), "('agreement', omitempty=True)\n", (12417, 12446), True, 'import josepy as jose\n'), ((12468, 12504), 'josepy.field', 'jose.field', (['"""status"""'], {'omitempty': '(True)'}), "('status', omitempty=True)\n", (12478, 12504), True, 'import josepy as jose\n'), ((12541, 12591), 'josepy.field', 'jose.field', (['"""termsOfServiceAgreed"""'], {'omitempty': '(True)'}), "('termsOfServiceAgreed', omitempty=True)\n", (12551, 12591), True, 'import josepy as jose\n'), ((12625, 12673), 'josepy.field', 'jose.field', (['"""onlyReturnExisting"""'], {'omitempty': '(True)'}), "('onlyReturnExisting', omitempty=True)\n", (12635, 12673), True, 'import josepy as jose\n'), ((12721, 12773), 'josepy.field', 'jose.field', (['"""externalAccountBinding"""'], {'omitempty': '(True)'}), "('externalAccountBinding', omitempty=True)\n", (12731, 12773), True, 'import josepy as jose\n'), ((16388, 16418), 'acme.fields.resource', 'fields.resource', (['resource_type'], {}), '(resource_type)\n', (16403, 16418), False, 'from acme import fields\n'), ((16553, 16583), 'acme.fields.resource', 'fields.resource', (['resource_type'], {}), '(resource_type)\n', (16568, 16583), False, 'from acme import fields\n'), ((16845, 16895), 'josepy.field', 'jose.field', (['"""body"""'], {'decoder': 'Registration.from_json'}), "('body', decoder=Registration.from_json)\n", (16855, 16895), True, 'import josepy as jose\n'), ((16922, 16966), 'josepy.field', 'jose.field', (['"""new_authzr_uri"""'], {'omitempty': '(True)'}), "('new_authzr_uri', omitempty=True)\n", (16932, 16966), True, 'import josepy as jose\n'), ((16995, 17041), 'josepy.field', 'jose.field', (['"""terms_of_service"""'], {'omitempty': '(True)'}), "('terms_of_service', omitempty=True)\n", (17005, 17041), True, 'import josepy as jose\n'), ((17950, 17997), 'josepy.field', 'jose.field', (['"""uri"""'], {'omitempty': '(True)', 'default': 'None'}), "('uri', omitempty=True, default=None)\n", (17960, 17997), True, 'import josepy as jose\n'), ((18014, 18061), 'josepy.field', 'jose.field', (['"""url"""'], {'omitempty': '(True)', 'default': 'None'}), "('url', omitempty=True, default=None)\n", (18024, 18061), True, 'import josepy as jose\n'), ((18083, 18174), 'josepy.field', 'jose.field', (['"""status"""'], {'decoder': 'Status.from_json', 'omitempty': '(True)', 'default': 'STATUS_PENDING'}), "('status', decoder=Status.from_json, omitempty=True, default=\n STATUS_PENDING)\n", (18093, 18174), True, 'import josepy as jose\n'), ((18229, 18272), 'acme.fields.rfc3339', 'fields.rfc3339', (['"""validated"""'], {'omitempty': '(True)'}), "('validated', omitempty=True)\n", (18243, 18272), False, 'from acme import fields\n'), ((18292, 18366), 'josepy.field', 'jose.field', (['"""error"""'], {'decoder': 'Error.from_json', 'omitempty': '(True)', 'default': 'None'}), "('error', decoder=Error.from_json, omitempty=True, default=None)\n", (18302, 18366), True, 'import josepy as jose\n'), ((19807, 19858), 'josepy.field', 'jose.field', (['"""body"""'], {'decoder': 'ChallengeBody.from_json'}), "('body', decoder=ChallengeBody.from_json)\n", (19817, 19858), True, 'import josepy as jose\n'), ((19881, 19905), 'josepy.field', 'jose.field', (['"""authzr_uri"""'], {}), "('authzr_uri')\n", (19891, 19905), True, 'import josepy as jose\n'), ((20477, 20547), 'josepy.field', 'jose.field', (['"""identifier"""'], {'decoder': 'Identifier.from_json', 'omitempty': '(True)'}), "('identifier', decoder=Identifier.from_json, omitempty=True)\n", (20487, 20547), True, 'import josepy as jose\n'), ((20586, 20626), 'josepy.field', 'jose.field', (['"""challenges"""'], {'omitempty': '(True)'}), "('challenges', omitempty=True)\n", (20596, 20626), True, 'import josepy as jose\n'), ((20675, 20717), 'josepy.field', 'jose.field', (['"""combinations"""'], {'omitempty': '(True)'}), "('combinations', omitempty=True)\n", (20685, 20717), True, 'import josepy as jose\n'), ((20740, 20802), 'josepy.field', 'jose.field', (['"""status"""'], {'omitempty': '(True)', 'decoder': 'Status.from_json'}), "('status', omitempty=True, decoder=Status.from_json)\n", (20750, 20802), True, 'import josepy as jose\n'), ((21068, 21109), 'acme.fields.rfc3339', 'fields.rfc3339', (['"""expires"""'], {'omitempty': '(True)'}), "('expires', omitempty=True)\n", (21082, 21109), False, 'from acme import fields\n'), ((21131, 21169), 'josepy.field', 'jose.field', (['"""wildcard"""'], {'omitempty': '(True)'}), "('wildcard', omitempty=True)\n", (21141, 21169), True, 'import josepy as jose\n'), ((22065, 22095), 'acme.fields.resource', 'fields.resource', (['resource_type'], {}), '(resource_type)\n', (22080, 22095), False, 'from acme import fields\n'), ((22235, 22265), 'acme.fields.resource', 'fields.resource', (['resource_type'], {}), '(resource_type)\n', (22250, 22265), False, 'from acme import fields\n'), ((22477, 22528), 'josepy.field', 'jose.field', (['"""body"""'], {'decoder': 'Authorization.from_json'}), "('body', decoder=Authorization.from_json)\n", (22487, 22528), True, 'import josepy as jose\n'), ((22553, 22595), 'josepy.field', 'jose.field', (['"""new_cert_uri"""'], {'omitempty': '(True)'}), "('new_cert_uri', omitempty=True)\n", (22563, 22595), True, 'import josepy as jose\n'), ((22874, 22904), 'acme.fields.resource', 'fields.resource', (['resource_type'], {}), '(resource_type)\n', (22889, 22904), False, 'from acme import fields\n'), ((22936, 23003), 'josepy.field', 'jose.field', (['"""csr"""'], {'decoder': 'jose.decode_csr', 'encoder': 'jose.encode_csr'}), "('csr', decoder=jose.decode_csr, encoder=jose.encode_csr)\n", (22946, 23003), True, 'import josepy as jose\n'), ((23346, 23374), 'josepy.field', 'jose.field', (['"""cert_chain_uri"""'], {}), "('cert_chain_uri')\n", (23356, 23374), True, 'import josepy as jose\n'), ((23424, 23445), 'josepy.field', 'jose.field', (['"""authzrs"""'], {}), "('authzrs')\n", (23434, 23445), True, 'import josepy as jose\n'), ((23725, 23755), 'acme.fields.resource', 'fields.resource', (['resource_type'], {}), '(resource_type)\n', (23740, 23755), False, 'from acme import fields\n'), ((23795, 23872), 'josepy.field', 'jose.field', (['"""certificate"""'], {'decoder': 'jose.decode_cert', 'encoder': 'jose.encode_cert'}), "('certificate', decoder=jose.decode_cert, encoder=jose.encode_cert)\n", (23805, 23872), True, 'import josepy as jose\n'), ((23900, 23920), 'josepy.field', 'jose.field', (['"""reason"""'], {}), "('reason')\n", (23910, 23920), True, 'import josepy as jose\n'), ((24604, 24645), 'josepy.field', 'jose.field', (['"""identifiers"""'], {'omitempty': '(True)'}), "('identifiers', omitempty=True)\n", (24614, 24645), True, 'import josepy as jose\n'), ((24667, 24729), 'josepy.field', 'jose.field', (['"""status"""'], {'decoder': 'Status.from_json', 'omitempty': '(True)'}), "('status', decoder=Status.from_json, omitempty=True)\n", (24677, 24729), True, 'import josepy as jose\n'), ((24762, 24806), 'josepy.field', 'jose.field', (['"""authorizations"""'], {'omitempty': '(True)'}), "('authorizations', omitempty=True)\n", (24772, 24806), True, 'import josepy as jose\n'), ((24830, 24871), 'josepy.field', 'jose.field', (['"""certificate"""'], {'omitempty': '(True)'}), "('certificate', omitempty=True)\n", (24840, 24871), True, 'import josepy as jose\n'), ((24892, 24930), 'josepy.field', 'jose.field', (['"""finalize"""'], {'omitempty': '(True)'}), "('finalize', omitempty=True)\n", (24902, 24930), True, 'import josepy as jose\n'), ((24964, 25005), 'acme.fields.rfc3339', 'fields.rfc3339', (['"""expires"""'], {'omitempty': '(True)'}), "('expires', omitempty=True)\n", (24978, 25005), False, 'from acme import fields\n'), ((25025, 25085), 'josepy.field', 'jose.field', (['"""error"""'], {'omitempty': '(True)', 'decoder': 'Error.from_json'}), "('error', omitempty=True, decoder=Error.from_json)\n", (25035, 25085), True, 'import josepy as jose\n'), ((26256, 26299), 'josepy.field', 'jose.field', (['"""body"""'], {'decoder': 'Order.from_json'}), "('body', decoder=Order.from_json)\n", (26266, 26299), True, 'import josepy as jose\n'), ((26321, 26358), 'josepy.field', 'jose.field', (['"""csr_pem"""'], {'omitempty': '(True)'}), "('csr_pem', omitempty=True)\n", (26331, 26358), True, 'import josepy as jose\n'), ((26409, 26437), 'josepy.field', 'jose.field', (['"""authorizations"""'], {}), "('authorizations')\n", (26419, 26437), True, 'import josepy as jose\n'), ((26463, 26506), 'josepy.field', 'jose.field', (['"""fullchain_pem"""'], {'omitempty': '(True)'}), "('fullchain_pem', omitempty=True)\n", (26473, 26506), True, 'import josepy as jose\n'), ((26551, 26607), 'josepy.field', 'jose.field', (['"""alternative_fullchains_pem"""'], {'omitempty': '(True)'}), "('alternative_fullchains_pem', omitempty=True)\n", (26561, 26607), True, 'import josepy as jose\n'), ((7780, 7826), 'josepy.field', 'jose.field', (['"""terms-of-service"""'], {'omitempty': '(True)'}), "('terms-of-service', omitempty=True)\n", (7790, 7826), True, 'import josepy as jose\n'), ((7863, 7907), 'josepy.field', 'jose.field', (['"""termsOfService"""'], {'omitempty': '(True)'}), "('termsOfService', omitempty=True)\n", (7873, 7907), True, 'import josepy as jose\n'), ((7931, 7968), 'josepy.field', 'jose.field', (['"""website"""'], {'omitempty': '(True)'}), "('website', omitempty=True)\n", (7941, 7968), True, 'import josepy as jose\n'), ((8005, 8048), 'josepy.field', 'jose.field', (['"""caaIdentities"""'], {'omitempty': '(True)'}), "('caaIdentities', omitempty=True)\n", (8015, 8048), True, 'import josepy as jose\n'), ((8091, 8144), 'josepy.field', 'jose.field', (['"""externalAccountRequired"""'], {'omitempty': '(True)'}), "('externalAccountRequired', omitempty=True)\n", (8101, 8144), True, 'import josepy as jose\n'), ((9568, 9604), 'acme.util.map_keys', 'util.map_keys', (['jobj', 'self._canon_key'], {}), '(jobj, self._canon_key)\n', (9581, 9604), False, 'from acme import util\n'), ((11313, 11341), 'josepy.b64.b64decode', 'jose.b64.b64decode', (['hmac_key'], {}), '(hmac_key)\n', (11331, 11341), True, 'import josepy as jose\n'), ((18980, 19016), 'acme.challenges.Challenge.from_json', 'challenges.Challenge.from_json', (['jobj'], {}), '(jobj)\n', (19010, 19016), False, 'from acme import challenges\n'), ((11418, 11455), 'josepy.jwk.JWKOct', 'jose.jwk.JWKOct', ([], {'key': 'decoded_hmac_key'}), '(key=decoded_hmac_key)\n', (11433, 11455), True, 'import josepy as jose\n')]
from datetime import datetime from sqlalchemy import select from sqlalchemy.orm import Session from src.PagoModel import Pago from src.config import engine def test_new_registry_between_fecha_range_clone_and_disable_highers_than_new(): PAGOS_TO_INSERT = [ Pago(id_contrato=12, id_cliente=99, fecha=datetime(2021, 8, 5), monto=7000), Pago(id_contrato=12, id_cliente=99, fecha=datetime(2021, 8, 6), monto=1280), Pago(id_contrato=12, id_cliente=99, fecha=datetime(2021, 8, 7), monto=4900), ] print(f"adding {len(PAGOS_TO_INSERT)} registries at latest date") with Session(engine) as session: for _pago in PAGOS_TO_INSERT: Pago.add_registry(session, _pago) session.commit() table = ( session.execute(select(Pago).order_by(Pago.activo, Pago.id_pago)) .scalars() ) for row in table.all(): print(row) assert len(table) == 3 print("adding one registry in between range of fecha") with Session(engine) as session: second_registry = Pago( id_contrato=12, id_cliente=99, fecha=datetime(2021, 8, 4), monto=900 ) Pago.add_registry(session, second_registry) session.commit() # show full table table = ( session.execute(select(Pago).order_by(Pago.activo, Pago.id_pago)) .scalars() ) for row in table.all(): print(row) assert len(table) == 7
[ "sqlalchemy.orm.Session", "sqlalchemy.select", "src.PagoModel.Pago.add_registry", "datetime.datetime" ]
[((604, 619), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (611, 619), False, 'from sqlalchemy.orm import Session\n'), ((1031, 1046), 'sqlalchemy.orm.Session', 'Session', (['engine'], {}), '(engine)\n', (1038, 1046), False, 'from sqlalchemy.orm import Session\n'), ((1190, 1233), 'src.PagoModel.Pago.add_registry', 'Pago.add_registry', (['session', 'second_registry'], {}), '(session, second_registry)\n', (1207, 1233), False, 'from src.PagoModel import Pago\n'), ((682, 715), 'src.PagoModel.Pago.add_registry', 'Pago.add_registry', (['session', '_pago'], {}), '(session, _pago)\n', (699, 715), False, 'from src.PagoModel import Pago\n'), ((314, 334), 'datetime.datetime', 'datetime', (['(2021)', '(8)', '(5)'], {}), '(2021, 8, 5)\n', (322, 334), False, 'from datetime import datetime\n'), ((399, 419), 'datetime.datetime', 'datetime', (['(2021)', '(8)', '(6)'], {}), '(2021, 8, 6)\n', (407, 419), False, 'from datetime import datetime\n'), ((484, 504), 'datetime.datetime', 'datetime', (['(2021)', '(8)', '(7)'], {}), '(2021, 8, 7)\n', (492, 504), False, 'from datetime import datetime\n'), ((1140, 1160), 'datetime.datetime', 'datetime', (['(2021)', '(8)', '(4)'], {}), '(2021, 8, 4)\n', (1148, 1160), False, 'from datetime import datetime\n'), ((792, 804), 'sqlalchemy.select', 'select', (['Pago'], {}), '(Pago)\n', (798, 804), False, 'from sqlalchemy import select\n'), ((1332, 1344), 'sqlalchemy.select', 'select', (['Pago'], {}), '(Pago)\n', (1338, 1344), False, 'from sqlalchemy import select\n')]
from datetime import datetime import pandas as pd import numpy as np # 将根目录(execute所在目录)添加到环境变量 from utils.GlobalVar import add_path_to_sys rootdir = add_path_to_sys() # 导入考勤状态判断相关函数和变量 from utils.GlobalVar import COURSE_TIME, LATE_SPAN filenames = ['Auxiliary_Info.xlsx', 'Classroom_Course_Schedule.xlsx', 'Classroom_Info.xlsx', 'College_Class_Info.xlsx', 'Attendance_Logs.xlsx'] au_info = pd.read_excel(rootdir + '/development/' + filenames[0]) def calculate_current_teach_week(semester_first_week_date='2021-3-08 08:00:00'): """ 计算当前日期所属教学周,实现思路是:当前日期所属一年中的周 - 每学期的第一周 ---- param: semester_first_week_date: 学期第一周的日期,例如 '2021-3-08 08:00:00' return: 当前教学周 """ # 获取指定日期属于当年的第几周, 返回字符串 semester_first_week = datetime.strptime(semester_first_week_date, '%Y-%m-%d %H:%M:%S').strftime('%W') # 获取当前日期是一年中的第几周, 返回字符串 current_year_week = datetime.now().strftime('%W') # 计算当前日期所属的教学周 # ( ) 中的减一表示第一周之前的周数 # 最后加一是因为计算周数是从索引00开始的,所以需要加1 current_teach_week = int(current_year_week) - (int(semester_first_week) - 1) + 1 return current_teach_week def holiday_judgment(judg_time=datetime.now(), holidays=au_info['Holiday Date']): """ 判断是否为假期 ---- param: judg_time: 需要被判断的时间 param: holidays: 当前学期的假期列表 return: 如果有课,则返回考勤时间设置; 如果没课,则返回None,并提示空教室。 """ # 因为表中有 NaT类型,这在遍历时会导致错误,因此需要先过滤掉NaT值 # 不包含 NaT 的索引 indexes_without_nat = [(type(holiday) != type(pd.NaT)) for holiday in au_info['Holiday Date']] # 不包含 NaT 的假期列表 holidays_pure = list(holidays[indexes_without_nat]) # 获取完整的时间格式 now = datetime.now() # 相同的功能 judg_time_ymd = now.date() # 是否为假期的标志位 is_now_holiday = False # 遍历假期列表 for holiday in holidays_pure: # 截取当前假期的年月日 holiday_month_day = datetime(holiday.year, holiday.month, holiday.day) if judg_time_ymd - holiday_month_day == 0: is_now_holiday = True if is_now_holiday: print(f'[INFO] {judg_time_ymd} is Holiday!') else: print(f'[INFO] {judg_time_ymd} is not Holiday!') return is_now_holiday def attendance_check(set_time='08:00:00'): """ 注意: 隔天比较存在问题,比如23:00考勤开始,00:30打卡,但是这种情况在现实中不存在 考勤状态判断,根据指定的时间判断考勤状态 手动设定考勤时间(简单),例如: - 1)正常:考勤时间设定之前的一小时内签到 - 2)迟到:上课之后45分钟内 - 3)其他:上课超过45分钟 - 4)旷课:上课时间未到 - 5)请假:通过销假系统自动读取,或者老师手动填写 ---- param set_time: = '19:00:00' """ ####################### 自定义参数 ##################### # 正常:考勤时间设定之前的一小时内(3600s)签到 normal_span = 60 * 60 # seconds # 设定一节课(大课,两小节)时长 course_time = COURSE_TIME # minutes # 设定上课多长时间认为是迟到 late_span = LATE_SPAN ######################################################## # 获取完整的时间格式 now = datetime.now() # 分别获取当前的年,月,日,时,分,秒,均为int类型 judg_time = now now_y = judg_time.year now_m = judg_time.month now_d = judg_time.day # 定义考勤标识符 att_state = '正常' # 格式化考勤时间 att_time = datetime.strptime(f'{now_y}-{now_m}-{now_d} {set_time}', '%Y-%m-%d %H:%M:%S') # 计算当前时间与设定时间的差值 time_diff = now - att_time # print(time_diff) time_diff_days, time_diff_seconds = time_diff.days, time_diff.seconds # print(time_diff_days, time_diff_seconds) # 如果time_diff_days为负,说明还未到考勤时间,此时计算距离考勤的时间 if time_diff_days < 0: # 一天的秒数减去time_diff_days time_span_att = 60 * 60 * 23 - time_diff_seconds if time_span_att < normal_span: att_state = '正常' else: print(f'[INFO] 无效!请在考勤时间设定之前的一小时内签到!距离考勤时间设定还有{round((time_span_att - 60*60)/60, 2)}分钟!') # 如果time_diff_days为正,说明考勤时间已过,此时判断是否为迟到或旷课 else: # 上课45分钟内,算迟到 if time_diff_seconds - late_span * 60 <= 0: att_state = '迟到' elif (time_diff_seconds > late_span * 60) and (time_diff_seconds <= course_time * 60): att_state = '其他' print('[INFO] 已经超过迟到时间范围,请联系老师处理!') else: att_state = '旷课' print(f'[INFO] 时间设定:{att_time},考勤时间:{now},考勤状态:{att_state}') return att_state
[ "utils.GlobalVar.add_path_to_sys", "datetime.datetime", "pandas.read_excel", "datetime.datetime.strptime", "datetime.datetime.now" ]
[((151, 168), 'utils.GlobalVar.add_path_to_sys', 'add_path_to_sys', ([], {}), '()\n', (166, 168), False, 'from utils.GlobalVar import add_path_to_sys\n'), ((450, 505), 'pandas.read_excel', 'pd.read_excel', (["(rootdir + '/development/' + filenames[0])"], {}), "(rootdir + '/development/' + filenames[0])\n", (463, 505), True, 'import pandas as pd\n'), ((1198, 1212), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1210, 1212), False, 'from datetime import datetime\n'), ((1689, 1703), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1701, 1703), False, 'from datetime import datetime\n'), ((2850, 2864), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2862, 2864), False, 'from datetime import datetime\n'), ((3069, 3146), 'datetime.datetime.strptime', 'datetime.strptime', (['f"""{now_y}-{now_m}-{now_d} {set_time}"""', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(f'{now_y}-{now_m}-{now_d} {set_time}', '%Y-%m-%d %H:%M:%S')\n", (3086, 3146), False, 'from datetime import datetime\n'), ((1888, 1938), 'datetime.datetime', 'datetime', (['holiday.year', 'holiday.month', 'holiday.day'], {}), '(holiday.year, holiday.month, holiday.day)\n', (1896, 1938), False, 'from datetime import datetime\n'), ((801, 865), 'datetime.datetime.strptime', 'datetime.strptime', (['semester_first_week_date', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(semester_first_week_date, '%Y-%m-%d %H:%M:%S')\n", (818, 865), False, 'from datetime import datetime\n'), ((933, 947), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (945, 947), False, 'from datetime import datetime\n')]
from django.contrib import admin from cover_letter.models import Reference # Entities admin.site.register(Reference)
[ "django.contrib.admin.site.register" ]
[((88, 118), 'django.contrib.admin.site.register', 'admin.site.register', (['Reference'], {}), '(Reference)\n', (107, 118), False, 'from django.contrib import admin\n')]
import asyncio import pickle import typing from aioredis import RedisConnection from dataclasses import dataclass, field from itertools import chain from ._mixins import DisableMethodsMixin from .interfaces import BackendInterface, FactoryInterface __all__ = ("RedisBackend",) @dataclass(order=False, eq=False, repr=False) class RedisBackend(DisableMethodsMixin, FactoryInterface, BackendInterface): """ A backend for managing redis based session storage. """ adapter: RedisConnection loop: typing.Optional[asyncio.AbstractEventLoop] = field(default=None) @classmethod async def create( cls, adapter: RedisConnection, loop: typing.Optional[asyncio.AbstractEventLoop] = None, ) -> "RedisBackend": """ A factory method for creating and initializing the backend. :param adapter: An opened connection to a redis server :param loop: An instance of event loop """ return cls(adapter, loop) async def clear(self, namespace: str) -> None: keys = await self.keys(namespace) if len(keys) > 0: await self.adapter.delete(*keys) async def keys(self, namespace: str) -> typing.List[str]: return [ key.decode("utf-8") for key in await self.adapter.keys(f"{namespace}:*") ] async def exists(self, *key: typing.Sequence[str]) -> int: return await self.adapter.exists(*key) async def len(self, namespace: str) -> int: return len(await self.adapter.keys(namespace)) async def get( self, *keys: typing.Sequence[str], ) -> typing.Sequence[typing.Any]: """Get values by the passed keys from a storage.""" return [key.decode("utf-8") for key in await self.adapter.mget(*keys)] async def set(self, key: str, value: typing.Any, **kwargs) -> None: """Set the value to the key in a storage.""" return await self.adapter.set(key, value, **kwargs) async def update(self, mapping: typing.Dict[str, typing.Any], **kwargs) -> None: """Update a storage with the passed mapping.""" return await self.adapter.mset(*chain.from_iterable(mapping.items())) async def delete(self, *keys: typing.Sequence[str]) -> str: return await self.adapter.delete(*keys)
[ "dataclasses.field", "dataclasses.dataclass" ]
[((283, 327), 'dataclasses.dataclass', 'dataclass', ([], {'order': '(False)', 'eq': '(False)', 'repr': '(False)'}), '(order=False, eq=False, repr=False)\n', (292, 327), False, 'from dataclasses import dataclass, field\n'), ((562, 581), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (567, 581), False, 'from dataclasses import dataclass, field\n')]
import django from django import template from django.template.defaulttags import url from django.template import Node, TemplateSyntaxError from treemenusplus.models import Menu, MenuItem from treemenusplus.config import APP_LABEL register = template.Library() @register.simple_tag def get_treemenus_static_prefix(): if django.VERSION >= (1, 3): from django.templatetags.static import PrefixNode return PrefixNode.handle_simple("STATIC_URL") + 'img/treemenusplus' else: from django.contrib.admin.templatetags.adminmedia import admin_media_prefix return admin_media_prefix() + 'img/admin/' def show_menu(context, menu_name, menu_type=None): menu = Menu.objects.get(name=menu_name) context['menu'] = menu context['menu_name'] = menu_name if menu_type: context['menu_type'] = menu_type return context register.inclusion_tag('%s/menu.html' % APP_LABEL, takes_context=True)(show_menu) def show_menu_item(context, menu_item): if not isinstance(menu_item, MenuItem): raise template.TemplateSyntaxError('Given argument must be a MenuItem object.') context['menu_item'] = menu_item return context register.inclusion_tag('%s/menu_item.html' % APP_LABEL, takes_context=True)(show_menu_item) class ReverseNamedURLNode(Node): def __init__(self, named_url, parser): self.named_url = named_url self.parser = parser def render(self, context): from django.template.base import TOKEN_BLOCK, Token resolved_named_url = self.named_url.resolve(context) # edit hts SpectralAngel if django.VERSION >= (1, 3): tokens = resolved_named_url.split(' ') base = tokens[0] args = tokens[1:] contents = u'url "{0}" {1}'.format(base, ' '.join(args)) else: contents = u'url {0}'.format(resolved_named_url) ## edit urlNode = url(self.parser, Token(token_type=TOKEN_BLOCK, contents=contents)) return urlNode.render(context) def reverse_named_url(parser, token): bits = token.contents.split(' ', 2) if len(bits) != 2: raise TemplateSyntaxError("'%s' takes only one argument" " (named url)" % bits[0]) named_url = parser.compile_filter(bits[1]) return ReverseNamedURLNode(named_url, parser) reverse_named_url = register.tag(reverse_named_url)
[ "django.template.Library", "treemenusplus.models.Menu.objects.get", "django.template.base.Token", "django.template.TemplateSyntaxError", "django.templatetags.static.PrefixNode.handle_simple", "django.contrib.admin.templatetags.adminmedia.admin_media_prefix" ]
[((245, 263), 'django.template.Library', 'template.Library', ([], {}), '()\n', (261, 263), False, 'from django import template\n'), ((698, 730), 'treemenusplus.models.Menu.objects.get', 'Menu.objects.get', ([], {'name': 'menu_name'}), '(name=menu_name)\n', (714, 730), False, 'from treemenusplus.models import Menu, MenuItem\n'), ((1055, 1128), 'django.template.TemplateSyntaxError', 'template.TemplateSyntaxError', (['"""Given argument must be a MenuItem object."""'], {}), "('Given argument must be a MenuItem object.')\n", (1083, 1128), False, 'from django import template\n'), ((2155, 2228), 'django.template.TemplateSyntaxError', 'TemplateSyntaxError', (['("\'%s\' takes only one argument (named url)" % bits[0])'], {}), '("\'%s\' takes only one argument (named url)" % bits[0])\n', (2174, 2228), False, 'from django.template import Node, TemplateSyntaxError\n'), ((428, 466), 'django.templatetags.static.PrefixNode.handle_simple', 'PrefixNode.handle_simple', (['"""STATIC_URL"""'], {}), "('STATIC_URL')\n", (452, 466), False, 'from django.templatetags.static import PrefixNode\n'), ((598, 618), 'django.contrib.admin.templatetags.adminmedia.admin_media_prefix', 'admin_media_prefix', ([], {}), '()\n', (616, 618), False, 'from django.contrib.admin.templatetags.adminmedia import admin_media_prefix\n'), ((1949, 1997), 'django.template.base.Token', 'Token', ([], {'token_type': 'TOKEN_BLOCK', 'contents': 'contents'}), '(token_type=TOKEN_BLOCK, contents=contents)\n', (1954, 1997), False, 'from django.template.base import TOKEN_BLOCK, Token\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.secure_information_retrieval.base_secure_information_retrieval import \ BaseSecureInformationRetrieval from federatedml.param.sir_param import SecureInformationRetrievalParam from federatedml.param.intersect_param import IntersectParam from federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer_sender import \ HauckObliviousTransferSender from federatedml.secureprotol.symmetric_encryption.py_aes_encryption import AESEncryptKey from federatedml.secureprotol.symmetric_encryption.cryptor_executor import CryptoExecutor from federatedml.statistic import data_overview from federatedml.statistic.intersect import DhIntersectionHost from federatedml.util import consts, abnormal_detection, LOGGER MODEL_PARAM_NAME = 'SecureInformationRetrievalParam' MODEL_META_NAME = 'SecureInformationRetrievalMeta' class SecureInformationRetrievalHost(BaseSecureInformationRetrieval): def __init__(self): super(SecureInformationRetrievalHost, self).__init__() self.oblivious_transfer = None self.target_indexes = None def _init_model(self, param: SecureInformationRetrievalParam): self._init_base_model(param) self.intersection_obj = DhIntersectionHost() self.intersection_obj.role = consts.HOST intersect_param = IntersectParam(dh_params=self.dh_params) self.intersection_obj.load_params(intersect_param) self.intersection_obj.host_party_id_list = self.component_properties.host_party_idlist self.intersection_obj.guest_party_id = self.component_properties.guest_partyid if self.model_param.oblivious_transfer_protocol == consts.OT_HAUCK.lower(): self.oblivious_transfer = HauckObliviousTransferSender() else: raise ValueError("SIR only supports Hauck's OT") def fit(self, data_inst): """ :param data_inst: Table :return: """ # LOGGER.info("data count = {}".format(data_inst.count())) abnormal_detection.empty_table_detection(data_inst) self._update_target_indexes(data_inst.schema) match_data = data_inst if data_overview.check_with_inst_id(data_inst): match_data = self._recover_match_id(data_inst) # 0. Raw retrieval if self.model_param.raw_retrieval or self.security_level == 0: LOGGER.info("enter raw information retrieval host") # abnormal_detection.empty_table_detection(data_inst) self._raw_information_retrieval(match_data) self._display_result(block_num='N/A') self._sync_coverage(data_inst) return data_inst # 1. Data pre-processing LOGGER.info("enter secure information retrieval host") # abnormal_detection.empty_table_detection(data_inst) self._parse_security_level(match_data) if not self._check_oblivious_transfer_condition(): self._failure_response() # 2. Guest find intersection self.intersection_obj.get_intersect_doubly_encrypted_id(match_data) id_list_host_first = self.intersection_obj.id_list_local_first # 3. Get the re-indexed doubly encrypted ID from guest id_blocks = self._iteratively_get_id_blocks() # 4. Restore value for the intersection id_blocks = _restore_value(id_list_host_first, id_blocks, self.target_indexes, self.need_label) # List[(Ei, val)] LOGGER.info("interested values restored") # 8. Execute OT as sender LOGGER.info("enter oblivious transfer protocol as a sender") key_list = self.oblivious_transfer.key_derivation(self.block_num) LOGGER.info("oblivious transfer key derived") # 9. Encrypt and transmit self._non_committing_encrypt(id_blocks, key_list) # List[(Ei, Eval)] LOGGER.info("non-committing encryption and transmission completed") # 10. Slack self._sync_coverage(data_inst) self._display_result() LOGGER.info("secure information retrieval finished") return data_inst def _sync_nonce_list(self, nonce, time): self.transfer_variable.nonce_list.remote(nonce, suffix=(time,), role=consts.GUEST, idx=0) LOGGER.info("sent {}-th nonce to guest".format(time)) def _transmit_value_ciphertext(self, id_block, time): self.transfer_variable.id_blocks_ciphertext.remote(id_block, suffix=(time,), role=consts.GUEST, idx=0) LOGGER.info("sent {}-th id block ciphertext to guest".format(time)) def _non_committing_encrypt(self, id_blocks, key_list): """ Use non-committing cipher to encrypt id blocks :param id_blocks: List[(Ei, val)] :param key_list: List[ObliviousTransferKey] :return: """ for i in range(self.block_num): if self.model_param.non_committing_encryption == consts.AES.lower(): aes_key = CryptoExecutor(AESEncryptKey(key_list[i].key)) else: raise ValueError("only supports AES cipher for non-committing encryption") self._transmit_value_ciphertext(aes_key.map_values_encrypt(id_blocks[i], mode=0), time=i) self._sync_nonce_list(aes_key.get_nonce(), time=i) block_confirm = self.transfer_variable.block_confirm.get(idx=0, suffix=(i,)) if block_confirm: continue def _update_target_indexes(self, schema): self.need_label = self._check_need_label() if self.need_label: return header = schema["header"] target_indexes = [] for col_name in self.target_cols: try: i = header.index(col_name) target_indexes.append(i) except ValueError: raise ValueError(f"{col_name} does not exist in table header. Please check.") self.target_indexes = target_indexes @staticmethod def extract_value(instance, target_indexes, need_label): if need_label: return instance.label features = [instance.features[i] for i in target_indexes] return features def _sync_natural_indexation(self, id_list=None, time=None): id_list_natural_indexation = self.transfer_variable.natural_indexation.get(idx=0, suffix=(time,)) LOGGER.info(f"got naturally indexed block {time} from guest") return id_list_natural_indexation def _parse_security_level(self, data_instance): self._sync_block_num() def _sync_block_num(self): self.block_num = self.transfer_variable.block_num.get(idx=0) LOGGER.info("got block num {} from guest".format(self.block_num)) def _raw_information_retrieval(self, data_instance): id_list_guest = self.transfer_variable.raw_id_list.get(idx=0) LOGGER.info("got raw id list from guest") target_indexes, need_label = self.target_indexes, self.need_label id_intersect = data_instance.join(id_list_guest, lambda v, u: SecureInformationRetrievalHost.extract_value(v, target_indexes, need_label)) self.transfer_variable.raw_value_list.remote(id_intersect, role=consts.GUEST, idx=0) LOGGER.info("sent raw value list to guest") # self._sync_coverage(data_instance) def _sync_coverage(self, data_instance): self.coverage = self.transfer_variable.coverage.get(idx=0) / data_instance.count() LOGGER.info(f"got coverage {self.coverage} from guest") def _iteratively_get_id_blocks(self): """ :return: List[Table] """ id_blocks = [None for _ in range(self.block_num)] for i in range(self.block_num): id_block = self._sync_natural_indexation(time=i) # get List[(Ei, -1)] id_blocks[i] = id_block return id_blocks def _restore_value(id_list_host, id_blocks, target_indexes, need_label): """ :param id_list_host: (h, (Eh, Instance)) :param id_blocks: List[(Ei, -1)] :return: """ id_value_blocks = [] for i in range(len(id_blocks)): restored_table = id_list_host.join(id_blocks[i], lambda v, u: SecureInformationRetrievalHost.extract_value(v[1], target_indexes, need_label)) id_value_blocks.append(restored_table) return id_value_blocks
[ "federatedml.statistic.data_overview.check_with_inst_id", "federatedml.statistic.intersect.DhIntersectionHost", "federatedml.util.LOGGER.info", "federatedml.util.abnormal_detection.empty_table_detection", "federatedml.param.intersect_param.IntersectParam", "federatedml.util.consts.OT_HAUCK.lower", "federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer_sender.HauckObliviousTransferSender", "federatedml.util.consts.AES.lower", "federatedml.secureprotol.symmetric_encryption.py_aes_encryption.AESEncryptKey" ]
[((1901, 1921), 'federatedml.statistic.intersect.DhIntersectionHost', 'DhIntersectionHost', ([], {}), '()\n', (1919, 1921), False, 'from federatedml.statistic.intersect import DhIntersectionHost\n'), ((1997, 2037), 'federatedml.param.intersect_param.IntersectParam', 'IntersectParam', ([], {'dh_params': 'self.dh_params'}), '(dh_params=self.dh_params)\n', (2011, 2037), False, 'from federatedml.param.intersect_param import IntersectParam\n'), ((2688, 2739), 'federatedml.util.abnormal_detection.empty_table_detection', 'abnormal_detection.empty_table_detection', (['data_inst'], {}), '(data_inst)\n', (2728, 2739), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((2836, 2879), 'federatedml.statistic.data_overview.check_with_inst_id', 'data_overview.check_with_inst_id', (['data_inst'], {}), '(data_inst)\n', (2868, 2879), False, 'from federatedml.statistic import data_overview\n'), ((3389, 3443), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""enter secure information retrieval host"""'], {}), "('enter secure information retrieval host')\n", (3400, 3443), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((4241, 4282), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""interested values restored"""'], {}), "('interested values restored')\n", (4252, 4282), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((4326, 4386), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""enter oblivious transfer protocol as a sender"""'], {}), "('enter oblivious transfer protocol as a sender')\n", (4337, 4386), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((4469, 4514), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""oblivious transfer key derived"""'], {}), "('oblivious transfer key derived')\n", (4480, 4514), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((4641, 4708), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""non-committing encryption and transmission completed"""'], {}), "('non-committing encryption and transmission completed')\n", (4652, 4708), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((4808, 4860), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""secure information retrieval finished"""'], {}), "('secure information retrieval finished')\n", (4819, 4860), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((7606, 7667), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['f"""got naturally indexed block {time} from guest"""'], {}), "(f'got naturally indexed block {time} from guest')\n", (7617, 7667), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((8105, 8146), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""got raw id list from guest"""'], {}), "('got raw id list from guest')\n", (8116, 8146), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((8819, 8862), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""sent raw value list to guest"""'], {}), "('sent raw value list to guest')\n", (8830, 8862), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((9054, 9109), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['f"""got coverage {self.coverage} from guest"""'], {}), "(f'got coverage {self.coverage} from guest')\n", (9065, 9109), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((2339, 2362), 'federatedml.util.consts.OT_HAUCK.lower', 'consts.OT_HAUCK.lower', ([], {}), '()\n', (2360, 2362), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((2402, 2432), 'federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer_sender.HauckObliviousTransferSender', 'HauckObliviousTransferSender', ([], {}), '()\n', (2430, 2432), False, 'from federatedml.secureprotol.oblivious_transfer.hauck_oblivious_transfer.hauck_oblivious_transfer_sender import HauckObliviousTransferSender\n'), ((3051, 3102), 'federatedml.util.LOGGER.info', 'LOGGER.info', (['"""enter raw information retrieval host"""'], {}), "('enter raw information retrieval host')\n", (3062, 3102), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((6015, 6033), 'federatedml.util.consts.AES.lower', 'consts.AES.lower', ([], {}), '()\n', (6031, 6033), False, 'from federatedml.util import consts, abnormal_detection, LOGGER\n'), ((6076, 6106), 'federatedml.secureprotol.symmetric_encryption.py_aes_encryption.AESEncryptKey', 'AESEncryptKey', (['key_list[i].key'], {}), '(key_list[i].key)\n', (6089, 6106), False, 'from federatedml.secureprotol.symmetric_encryption.py_aes_encryption import AESEncryptKey\n')]
import re from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ default, words, combined, do_insertions from pygments.util import get_bool_opt, shebang_matches from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Generic, Other, Error from pygments import unistring as uni class Python2(RegexLexer): """ For `Python 2.x <http://www.python.org>`_ source code. .. versionchanged:: 2.5 This class has been renamed from ``PythonLexer``. ``PythonLexer`` now refers to the Python 3 variant. File name patterns like ``*.py`` have been moved to Python 3 as well. """ name = 'Python 2.x' aliases = ['python2', 'py2'] filenames = ['.py2','.python2'] # now taken over by PythonLexer (3.x) mimetypes = ['text/x-python2', 'application/x-python2'] def innerstring_rules(ttype): return [ # the old style '%s' % (...) string formatting (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' '[hlL]?[E-GXc-giorsux%]', String.Interpol), # backslashes, quotes and formatting signs must be parsed one at a time (r'[^\\\'"%\n]+', ttype), (r'[\'"\\]', ttype), # unhandled string formatting sign (r'%', ttype), # newlines are an error (use "nl" state) ] tokens = { 'root': [ (r'\n', Text), (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")', bygroups(Text, String.Affix, String.Doc)), (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')", bygroups(Text, String.Affix, String.Doc)), (r'[^\S\n]+', Text), (r'\A#!.+$', Comment.Hashbang), (r'#.*$', Comment.Single), (r'[]{}:(),;[]', Punctuation), (r'\\\n', Text), (r'\\', Text), (r'(in|is|and|or|not)\b', Operator.Word), (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator), include('keywords'), (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'), (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'), (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), 'fromimport'), (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), 'import'), include('builtins'), include('magicfuncs'), include('magicvars'), include('backtick'), ('([rR]|[uUbB][rR]|[rR][uUbB])(""")', bygroups(String.Affix, String.Double), 'tdqs'), ("([rR]|[uUbB][rR]|[rR][uUbB])(''')", bygroups(String.Affix, String.Single), 'tsqs'), ('([rR]|[uUbB][rR]|[rR][uUbB])(")', bygroups(String.Affix, String.Double), 'dqs'), ("([rR]|[uUbB][rR]|[rR][uUbB])(')", bygroups(String.Affix, String.Single), 'sqs'), ('([uUbB]?)(""")', bygroups(String.Affix, String.Double), combined('stringescape', 'tdqs')), ("([uUbB]?)(''')", bygroups(String.Affix, String.Single), combined('stringescape', 'tsqs')), ('([uUbB]?)(")', bygroups(String.Affix, String.Double), combined('stringescape', 'dqs')), ("([uUbB]?)(')", bygroups(String.Affix, String.Single), combined('stringescape', 'sqs')), include('name'), include('numbers'), ], 'keywords': [ (words(( 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except', 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass', 'print', 'raise', 'return', 'try', 'while', 'yield', 'yield from', 'as', 'with'), suffix=r'\b'), Keyword), ], 'builtins': [ (words(( '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'), Name.Builtin), (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls' r')\b', Name.Builtin.Pseudo), (words(( 'ArithmeticError', 'AssertionError', 'AttributeError', 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError', 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError', 'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning', 'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'), Name.Exception), ], 'magicfuncs': [ (words(( '__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__', '__complex__', '__contains__', '__del__', '__delattr__', '__delete__', '__delitem__', '__delslice__', '__div__', '__divmod__', '__enter__', '__eq__', '__exit__', '__float__', '__floordiv__', '__ge__', '__get__', '__getattr__', '__getattribute__', '__getitem__', '__getslice__', '__gt__', '__hash__', '__hex__', '__iadd__', '__iand__', '__idiv__', '__ifloordiv__', '__ilshift__', '__imod__', '__imul__', '__index__', '__init__', '__instancecheck__', '__int__', '__invert__', '__iop__', '__ior__', '__ipow__', '__irshift__', '__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__', '__len__', '__long__', '__lshift__', '__lt__', '__missing__', '__mod__', '__mul__', '__ne__', '__neg__', '__new__', '__nonzero__', '__oct__', '__op__', '__or__', '__pos__', '__pow__', '__radd__', '__rand__', '__rcmp__', '__rdiv__', '__rdivmod__', '__repr__', '__reversed__', '__rfloordiv__', '__rlshift__', '__rmod__', '__rmul__', '__rop__', '__ror__', '__rpow__', '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__', '__rxor__', '__set__', '__setattr__', '__setitem__', '__setslice__', '__str__', '__sub__', '__subclasscheck__', '__truediv__', '__unicode__', '__xor__'), suffix=r'\b'), Name.Function.Magic), ], 'magicvars': [ (words(( '__bases__', '__class__', '__closure__', '__code__', '__defaults__', '__dict__', '__doc__', '__file__', '__func__', '__globals__', '__metaclass__', '__module__', '__mro__', '__name__', '__self__', '__slots__', '__weakref__'), suffix=r'\b'), Name.Variable.Magic), ], 'numbers': [ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), (r'\d+[eE][+-]?[0-9]+j?', Number.Float), (r'0[0-7]+j?', Number.Oct), (r'0[bB][01]+', Number.Bin), (r'0[xX][a-fA-F0-9]+', Number.Hex), (r'\d+L', Number.Integer.Long), (r'\d+j?', Number.Integer) ], 'backtick': [ ('`.*?`', String.Backtick), ], 'name': [ (r'@[\w.]+', Name.Decorator), (r'[a-zA-Z_]\w*', Name), ], 'funcname': [ include('magicfuncs'), (r'[a-zA-Z_]\w*', Name.Function, '#pop'), default('#pop'), ], 'classname': [ (r'[a-zA-Z_]\w*', Name.Class, '#pop') ], 'import': [ (r'(?:[ \t]|\\\n)+', Text), (r'as\b', Keyword.Namespace), (r',', Operator), (r'[a-zA-Z_][\w.]*', Name.Namespace), default('#pop') # all else: go back ], 'fromimport': [ (r'(?:[ \t]|\\\n)+', Text), (r'import\b', Keyword.Namespace, '#pop'), # if None occurs here, it's "raise x from None", since None can # never be a module name (r'None\b', Name.Builtin.Pseudo, '#pop'), # sadly, in "raise x from y" y will be highlighted as namespace too (r'[a-zA-Z_.][\w.]*', Name.Namespace), # anything else here also means "raise x from y" and is therefore # not an error default('#pop'), ], 'stringescape': [ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|' r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) ], 'strings-single': innerstring_rules(String.Single), 'strings-double': innerstring_rules(String.Double), 'dqs': [ (r'"', String.Double, '#pop'), (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings include('strings-double') ], 'sqs': [ (r"'", String.Single, '#pop'), (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings include('strings-single') ], 'tdqs': [ (r'"""', String.Double, '#pop'), include('strings-double'), (r'\n', String.Double) ], 'tsqs': [ (r"'''", String.Single, '#pop'), include('strings-single'), (r'\n', String.Single) ], } def analyse_text(text): return shebang_matches(text, r'pythonw?2(\.\d)?')
[ "pygments.lexer.combined", "pygments.lexer.default", "pygments.util.shebang_matches", "pygments.lexer.words", "pygments.lexer.bygroups", "pygments.lexer.include" ]
[((11034, 11077), 'pygments.util.shebang_matches', 'shebang_matches', (['text', '"""pythonw?2(\\\\.\\\\d)?"""'], {}), "(text, 'pythonw?2(\\\\.\\\\d)?')\n", (11049, 11077), False, 'from pygments.util import get_bool_opt, shebang_matches\n'), ((2067, 2086), 'pygments.lexer.include', 'include', (['"""keywords"""'], {}), "('keywords')\n", (2074, 2086), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2460, 2479), 'pygments.lexer.include', 'include', (['"""builtins"""'], {}), "('builtins')\n", (2467, 2479), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2494, 2515), 'pygments.lexer.include', 'include', (['"""magicfuncs"""'], {}), "('magicfuncs')\n", (2501, 2515), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2530, 2550), 'pygments.lexer.include', 'include', (['"""magicvars"""'], {}), "('magicvars')\n", (2537, 2550), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2565, 2584), 'pygments.lexer.include', 'include', (['"""backtick"""'], {}), "('backtick')\n", (2572, 2584), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3519, 3534), 'pygments.lexer.include', 'include', (['"""name"""'], {}), "('name')\n", (3526, 3534), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3549, 3567), 'pygments.lexer.include', 'include', (['"""numbers"""'], {}), "('numbers')\n", (3556, 3567), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((8955, 8976), 'pygments.lexer.include', 'include', (['"""magicfuncs"""'], {}), "('magicfuncs')\n", (8962, 8976), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((9046, 9061), 'pygments.lexer.default', 'default', (['"""#pop"""'], {}), "('#pop')\n", (9053, 9061), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((9362, 9377), 'pygments.lexer.default', 'default', (['"""#pop"""'], {}), "('#pop')\n", (9369, 9377), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((9955, 9970), 'pygments.lexer.default', 'default', (['"""#pop"""'], {}), "('#pop')\n", (9962, 9970), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((10442, 10467), 'pygments.lexer.include', 'include', (['"""strings-double"""'], {}), "('strings-double')\n", (10449, 10467), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((10636, 10661), 'pygments.lexer.include', 'include', (['"""strings-single"""'], {}), "('strings-single')\n", (10643, 10661), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((10752, 10777), 'pygments.lexer.include', 'include', (['"""strings-double"""'], {}), "('strings-double')\n", (10759, 10777), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((10905, 10930), 'pygments.lexer.include', 'include', (['"""strings-single"""'], {}), "('strings-single')\n", (10912, 10930), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((1564, 1604), 'pygments.lexer.bygroups', 'bygroups', (['Text', 'String.Affix', 'String.Doc'], {}), '(Text, String.Affix, String.Doc)\n', (1572, 1604), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((1678, 1718), 'pygments.lexer.bygroups', 'bygroups', (['Text', 'String.Affix', 'String.Doc'], {}), '(Text, String.Affix, String.Doc)\n', (1686, 1718), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2126, 2149), 'pygments.lexer.bygroups', 'bygroups', (['Keyword', 'Text'], {}), '(Keyword, Text)\n', (2134, 2149), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2204, 2227), 'pygments.lexer.bygroups', 'bygroups', (['Keyword', 'Text'], {}), '(Keyword, Text)\n', (2212, 2227), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2282, 2315), 'pygments.lexer.bygroups', 'bygroups', (['Keyword.Namespace', 'Text'], {}), '(Keyword.Namespace, Text)\n', (2290, 2315), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2387, 2420), 'pygments.lexer.bygroups', 'bygroups', (['Keyword.Namespace', 'Text'], {}), '(Keyword.Namespace, Text)\n', (2395, 2420), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2651, 2688), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Double'], {}), '(String.Affix, String.Double)\n', (2659, 2688), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2764, 2801), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Single'], {}), '(String.Affix, String.Single)\n', (2772, 2801), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2875, 2912), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Double'], {}), '(String.Affix, String.Double)\n', (2883, 2912), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((2985, 3022), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Single'], {}), '(String.Affix, String.Single)\n', (2993, 3022), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3064, 3101), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Double'], {}), '(String.Affix, String.Double)\n', (3072, 3101), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3117, 3149), 'pygments.lexer.combined', 'combined', (['"""stringescape"""', '"""tdqs"""'], {}), "('stringescape', 'tdqs')\n", (3125, 3149), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3184, 3221), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Single'], {}), '(String.Affix, String.Single)\n', (3192, 3221), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3237, 3269), 'pygments.lexer.combined', 'combined', (['"""stringescape"""', '"""tsqs"""'], {}), "('stringescape', 'tsqs')\n", (3245, 3269), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3302, 3339), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Double'], {}), '(String.Affix, String.Double)\n', (3310, 3339), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3355, 3386), 'pygments.lexer.combined', 'combined', (['"""stringescape"""', '"""dqs"""'], {}), "('stringescape', 'dqs')\n", (3363, 3386), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3419, 3456), 'pygments.lexer.bygroups', 'bygroups', (['String.Affix', 'String.Single'], {}), '(String.Affix, String.Single)\n', (3427, 3456), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3472, 3503), 'pygments.lexer.combined', 'combined', (['"""stringescape"""', '"""sqs"""'], {}), "('stringescape', 'sqs')\n", (3480, 3503), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3618, 3856), 'pygments.lexer.words', 'words', (["('assert', 'break', 'continue', 'del', 'elif', 'else', 'except', 'exec',\n 'finally', 'for', 'global', 'if', 'lambda', 'pass', 'print', 'raise',\n 'return', 'try', 'while', 'yield', 'yield from', 'as', 'with')"], {'suffix': '"""\\\\b"""'}), "(('assert', 'break', 'continue', 'del', 'elif', 'else', 'except',\n 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass', 'print',\n 'raise', 'return', 'try', 'while', 'yield', 'yield from', 'as', 'with'),\n suffix='\\\\b')\n", (3623, 3856), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((3988, 4807), 'pygments.lexer.words', 'words', (["('__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bool',\n 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod', 'cmp',\n 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',\n 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',\n 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',\n 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',\n 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object', 'oct',\n 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',\n 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',\n 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',\n 'unichr', 'unicode', 'vars', 'xrange', 'zip')"], {'prefix': '"""(?<!\\\\.)"""', 'suffix': '"""\\\\b"""'}), "(('__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',\n 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',\n 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict',\n 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit', 'file',\n 'filter', 'float', 'frozenset', 'getattr', 'globals', 'hasattr', 'hash',\n 'hex', 'id', 'input', 'int', 'intern', 'isinstance', 'issubclass',\n 'iter', 'len', 'list', 'locals', 'long', 'map', 'max', 'min', 'next',\n 'object', 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input',\n 'reduce', 'reload', 'repr', 'reversed', 'round', 'set', 'setattr',\n 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple',\n 'type', 'unichr', 'unicode', 'vars', 'xrange', 'zip'), prefix=\n '(?<!\\\\.)', suffix='\\\\b')\n", (3993, 4807), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((5130, 6055), 'pygments.lexer.words', 'words', (["('ArithmeticError', 'AssertionError', 'AttributeError', 'BaseException',\n 'DeprecationWarning', 'EOFError', 'EnvironmentError', 'Exception',\n 'FloatingPointError', 'FutureWarning', 'GeneratorExit', 'IOError',\n 'ImportError', 'ImportWarning', 'IndentationError', 'IndexError',\n 'KeyError', 'KeyboardInterrupt', 'LookupError', 'MemoryError',\n 'NameError', 'NotImplementedError', 'OSError', 'OverflowError',\n 'OverflowWarning', 'PendingDeprecationWarning', 'ReferenceError',\n 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',\n 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',\n 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',\n 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',\n 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',\n 'WindowsError', 'ZeroDivisionError')"], {'prefix': '"""(?<!\\\\.)"""', 'suffix': '"""\\\\b"""'}), "(('ArithmeticError', 'AssertionError', 'AttributeError',\n 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',\n 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',\n 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',\n 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',\n 'MemoryError', 'NameError', 'NotImplementedError', 'OSError',\n 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning',\n 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError',\n 'StopIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError',\n 'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError',\n 'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError',\n 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning', 'ValueError',\n 'VMSError', 'Warning', 'WindowsError', 'ZeroDivisionError'), prefix=\n '(?<!\\\\.)', suffix='\\\\b')\n", (5135, 6055), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((6325, 7649), 'pygments.lexer.words', 'words', (["('__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__',\n '__complex__', '__contains__', '__del__', '__delattr__', '__delete__',\n '__delitem__', '__delslice__', '__div__', '__divmod__', '__enter__',\n '__eq__', '__exit__', '__float__', '__floordiv__', '__ge__', '__get__',\n '__getattr__', '__getattribute__', '__getitem__', '__getslice__',\n '__gt__', '__hash__', '__hex__', '__iadd__', '__iand__', '__idiv__',\n '__ifloordiv__', '__ilshift__', '__imod__', '__imul__', '__index__',\n '__init__', '__instancecheck__', '__int__', '__invert__', '__iop__',\n '__ior__', '__ipow__', '__irshift__', '__isub__', '__iter__',\n '__itruediv__', '__ixor__', '__le__', '__len__', '__long__',\n '__lshift__', '__lt__', '__missing__', '__mod__', '__mul__', '__ne__',\n '__neg__', '__new__', '__nonzero__', '__oct__', '__op__', '__or__',\n '__pos__', '__pow__', '__radd__', '__rand__', '__rcmp__', '__rdiv__',\n '__rdivmod__', '__repr__', '__reversed__', '__rfloordiv__',\n '__rlshift__', '__rmod__', '__rmul__', '__rop__', '__ror__', '__rpow__',\n '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__', '__rxor__',\n '__set__', '__setattr__', '__setitem__', '__setslice__', '__str__',\n '__sub__', '__subclasscheck__', '__truediv__', '__unicode__', '__xor__')"], {'suffix': '"""\\\\b"""'}), "(('__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__',\n '__complex__', '__contains__', '__del__', '__delattr__', '__delete__',\n '__delitem__', '__delslice__', '__div__', '__divmod__', '__enter__',\n '__eq__', '__exit__', '__float__', '__floordiv__', '__ge__', '__get__',\n '__getattr__', '__getattribute__', '__getitem__', '__getslice__',\n '__gt__', '__hash__', '__hex__', '__iadd__', '__iand__', '__idiv__',\n '__ifloordiv__', '__ilshift__', '__imod__', '__imul__', '__index__',\n '__init__', '__instancecheck__', '__int__', '__invert__', '__iop__',\n '__ior__', '__ipow__', '__irshift__', '__isub__', '__iter__',\n '__itruediv__', '__ixor__', '__le__', '__len__', '__long__',\n '__lshift__', '__lt__', '__missing__', '__mod__', '__mul__', '__ne__',\n '__neg__', '__new__', '__nonzero__', '__oct__', '__op__', '__or__',\n '__pos__', '__pow__', '__radd__', '__rand__', '__rcmp__', '__rdiv__',\n '__rdivmod__', '__repr__', '__reversed__', '__rfloordiv__',\n '__rlshift__', '__rmod__', '__rmul__', '__rop__', '__ror__', '__rpow__',\n '__rrshift__', '__rshift__', '__rsub__', '__rtruediv__', '__rxor__',\n '__set__', '__setattr__', '__setitem__', '__setslice__', '__str__',\n '__sub__', '__subclasscheck__', '__truediv__', '__unicode__', '__xor__'\n ), suffix='\\\\b')\n", (6330, 7649), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n'), ((7971, 8229), 'pygments.lexer.words', 'words', (["('__bases__', '__class__', '__closure__', '__code__', '__defaults__',\n '__dict__', '__doc__', '__file__', '__func__', '__globals__',\n '__metaclass__', '__module__', '__mro__', '__name__', '__self__',\n '__slots__', '__weakref__')"], {'suffix': '"""\\\\b"""'}), "(('__bases__', '__class__', '__closure__', '__code__', '__defaults__',\n '__dict__', '__doc__', '__file__', '__func__', '__globals__',\n '__metaclass__', '__module__', '__mro__', '__name__', '__self__',\n '__slots__', '__weakref__'), suffix='\\\\b')\n", (7976, 8229), False, 'from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, default, words, combined, do_insertions\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_pymspack ---------------------------------- Tests for `pymspack` module. """ import pymspack import tempfile def test_module(): assert pymspack def test_cabfile(): cab = pymspack.CabFile() assert cab def test_infolist(): import os.path cab = pymspack.CabFile(os.path.join(os.path.dirname(__file__), 'data', '000.cab')) infos = cab.infolist() assert isinstance(infos, list) assert len(infos) == 4 for info in infos: assert isinstance(info, pymspack.CabInfo) assert isinstance(info.date_time, tuple) assert len(info.date_time) == 6 assert info.file_size > 0 cab.close() def test_extract(): import os.path import hashlib import shutil cab = pymspack.CabFile(os.path.join(os.path.dirname(__file__), 'data', '000.cab')) temp_dir = None try: temp_dir = tempfile.mkdtemp() assert os.path.exists(temp_dir) cab.extract('WERInternalMetadata.xml', os.path.join(temp_dir, 'WERInternalMetadata.xml')) h = hashlib.new('md5') with open(os.path.join(temp_dir, 'WERInternalMetadata.xml'), 'rb') as f: h.update(f.read()) finally: if temp_dir and os.path.isdir(temp_dir): shutil.rmtree(temp_dir) assert h.hexdigest() == '9ed41cedad7b3b3d0a55a4e0cf334323' cab.close() def test_extract_with(): import os.path import hashlib import shutil with pymspack.CabFile(os.path.join(os.path.dirname(__file__), 'data', '000.cab')) as cab: temp_dir = None try: temp_dir = tempfile.mkdtemp() cab.extract('WERInternalMetadata.xml', os.path.join(temp_dir, 'WERInternalMetadata.xml')) h = hashlib.new('md5') with open(os.path.join(temp_dir, 'WERInternalMetadata.xml'), 'rb') as f: h.update(f.read()) finally: if temp_dir and os.path.isdir(temp_dir): shutil.rmtree(temp_dir) assert h.hexdigest() == '9ed41cedad7b3b3d0a55a4e0cf334323'
[ "shutil.rmtree", "pymspack.CabFile", "tempfile.mkdtemp", "hashlib.new" ]
[((239, 257), 'pymspack.CabFile', 'pymspack.CabFile', ([], {}), '()\n', (255, 257), False, 'import pymspack\n'), ((916, 934), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (932, 934), False, 'import tempfile\n'), ((1085, 1103), 'hashlib.new', 'hashlib.new', (['"""md5"""'], {}), "('md5')\n", (1096, 1103), False, 'import hashlib\n'), ((1290, 1313), 'shutil.rmtree', 'shutil.rmtree', (['temp_dir'], {}), '(temp_dir)\n', (1303, 1313), False, 'import shutil\n'), ((1630, 1648), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1646, 1648), False, 'import tempfile\n'), ((1767, 1785), 'hashlib.new', 'hashlib.new', (['"""md5"""'], {}), "('md5')\n", (1778, 1785), False, 'import hashlib\n'), ((1992, 2015), 'shutil.rmtree', 'shutil.rmtree', (['temp_dir'], {}), '(temp_dir)\n', (2005, 2015), False, 'import shutil\n')]
############################################ #Treatment ############################################ import arcpy ##T1## #SA of 300m was identified from existing TX, given they do not reach injection TX. If they reach an injection TX, the SA stops. #Set local variables district = "NAME OF DISTRICT" LV_lines = "PATH TO LV LINES" Existing_TX = "PATH TO EXISTING TX LAYER" Injection_TX = "PATH TO INJECTION TX LAYER" Feeders = "PATH TO FEEDERS LAYER" network = "PATH TO NETWORK DATASET OF DISTRICT" distance_from_line = "25" district_siteID_range = 200 #existing TX service area Existing_TX_SA = arcpy.na.MakeServiceAreaLayer (network, "Existing_TX Service Area", "Length", "TRAVEL_FROM", "300" , "DETAILED_POLYS", "MERGE" , "DISKS", "NO_LINES" , "NON_OVERLAP", "NO_SPLIT", Feeders , "", "", "", "TRIM_POLYS", distance_from_line) Existing_TX_SA = Existing_TX_SA.getOutput(0) #Load Elements arcpy.na.AddLocations (Existing_TX_SA, "Facilities", Existing_TX, "field mapping", "25 Meters") arcpy.na.AddLocations (Existing_TX_SA, "Point Barriers", Injection_TX, "field mapping", "25 Meters") #Solve arcpy.na.Solve(Existing_TX_SA,"SKIP") #CHECK FOR ERRORS #Export polygon layer Existing_TX_Polygons = arcpy.SelectData_management (Existing_TX_SA, "Polygons") ##T2## #SA of 200m was drawn from the injection TX sites. #Setting existing TX SA as a barrier prevents the SA made in the previous step from being crossed or considered as a valid injection TX SA. #This method allows to include LV lines that extend from the injection TX, are terminal, #and can be most directly attributed to the injection TX to be prioritized as treatment sites. Treatment_SA = arcpy.na.MakeServiceAreaLayer (network, district+"_Treatment_SA", "Length", "TRAVEL_FROM", "200" , "DETAILED_POLYS", "NO_MERGE" , "DISKS", "NO_LINES" , "NON_OVERLAP", "NO_SPLIT", Feeders , "", "", "", "TRIM_POLYS", distance_from_line) Treatment_SA = Treatment_SA.getOutput(0) #Load Elements arcpy.na.AddLocations (Treatment_SA, "Facilities", Injection_TX, "", "25 Meters") arcpy.na.AddLocations (Treatment_SA, "Polygon Barriers", Existing_TX_Polygons, "", "25 Meters") #Solve arcpy.na.Solve(Treatment_SA,"SKIP") #CHECK FOR ERRORS #Save Injection transformers which were restricted by existing transformers Injection_TX_restricted = arcpy.SelectData_management (Treatment_SA, "Facilities") Injection_TX_restricted = arcpy.SelectLayerByAttribute_management (Injection_TX_restricted, "NEW_SELECTION",'"Status"= 3' ) arcpy.MakeFeatureLayer_management(Injection_TX_restricted,"Injection_TX_restricted") #Export polygon layer Treatment_Polygons = arcpy.SelectData_management (Treatment_SA, "Polygons") Treatment_Polygons = Treatment_Polygons.getOutput(0) Treatment_Polygons_withArea = arcpy.CalculateAreas_stats (Treatment_Polygons, "Treatment_Polygons") #arcpy.MakeFeatureLayer_management(Treatment_Polygons_withArea,"Treatment_Polygons_Standard",'"F_AREA" >= 10000' ) arcpy.MakeFeatureLayer_management(Treatment_Polygons_withArea,"Treatment_Polygons_Small",'"F_AREA" <= 10000' ) ##T3## #It is possible that the resulting polygon from the previous step is <10,000 sq meters or #if an injection TX is completely engulfed in existing TX SA. #In this case, a new SA is drawn extending 150m from the injection TX in all possible directions. #This was the case with 3 treatment sites in Dansoman. if arcpy.management.GetCount('Treatment_Polygons_Small')[0] > "0": Injection_TX_Alt = arcpy.SelectLayerByLocation_management (Injection_TX, "INTERSECT", 'Treatment_Polygons_Small', "15 Meters") arcpy.MakeFeatureLayer_management(Injection_TX_Alt,"Injection_TX_Alt") #Injection_TX_Alt = Injection_TX_Alt.getOutput(0) Treatment_Alt_SA = arcpy.na.MakeServiceAreaLayer (network, district+"_Treatment_Alt_SA", "Length", "TRAVEL_FROM", "150" , "DETAILED_POLYS", "NO_MERGE" , "DISKS", "NO_LINES" , "NON_OVERLAP", "NO_SPLIT", Feeders , "", "", "", "TRIM_POLYS", distance_from_line) #Treatment_Alt_SA = Treatment_Alt_SA.getOutput(0) arcpy.na.AddLocations (Treatment_Alt_SA, "Facilities", Injection_TX_Alt, "", "25 Meters") arcpy.na.AddLocations (Treatment_Alt_SA, "Facilities", Injection_TX_restricted, "", "25 Meters") arcpy.na.Solve(Treatment_Alt_SA,"SKIP", "CONTINUE") #CHECK FOR ERRORS Treatment_Polygons_Alt = arcpy.SelectData_management (Treatment_SA, "Polygons") Treatment_Polygons_Alt = arcpy.MakeFeatureLayer_management(Treatment_Polygons_Alt, "Treatment_Polygons_Alt") #arcpy.SaveToLayerFile_management (Treatment_Polygons_Alt, "Treatment_Polygons_Alt") else: Treatment_Polygons_Standard = arcpy.MakeFeatureLayer_management(Treatment_Polygons_withArea,"Treatment_Polygons_Standard",'"F_AREA" >= 10000' ) ##T4## #The result is area outlines either containing or directly adjacent to an Injection TX. #Centroid coordinates are given to each site for feild staff to locate each site #Collect all sites into 1 layer arcpy.Merge_management ([Treatment_Polygons_Standard, Treatment_Polygons_Alt], "Treatment_Sites") ##Visually Inspect Results. Check To Make Sure all requirements are met. If not, then create polygons of outliers mannually #Calculate site centroids arcpy.AddGeometryAttributes_management('Treatment_Sites',"CENTROID") #Adding Site IDs may be done mannually #arcpy.AddField_management('Treatment_Sites',"Site_ID", "TEXT","" ,"",50,"","","","") #arcpy.CalculateField_management('Treatment_Sites',"Site_ID", '!OBJECTID' + district_siteID_range, "PYTHON","") arcpy.FeatureClassToShapefile('Treatment_Sites', "C:/PATH/folder") ###########END##########
[ "arcpy.management.GetCount", "arcpy.CalculateAreas_stats", "arcpy.Merge_management", "arcpy.SelectLayerByLocation_management", "arcpy.SelectData_management", "arcpy.na.Solve", "arcpy.AddGeometryAttributes_management", "arcpy.MakeFeatureLayer_management", "arcpy.na.AddLocations", "arcpy.SelectLayerByAttribute_management", "arcpy.na.MakeServiceAreaLayer", "arcpy.FeatureClassToShapefile" ]
[((603, 843), 'arcpy.na.MakeServiceAreaLayer', 'arcpy.na.MakeServiceAreaLayer', (['network', '"""Existing_TX Service Area"""', '"""Length"""', '"""TRAVEL_FROM"""', '"""300"""', '"""DETAILED_POLYS"""', '"""MERGE"""', '"""DISKS"""', '"""NO_LINES"""', '"""NON_OVERLAP"""', '"""NO_SPLIT"""', 'Feeders', '""""""', '""""""', '""""""', '"""TRIM_POLYS"""', 'distance_from_line'], {}), "(network, 'Existing_TX Service Area', 'Length',\n 'TRAVEL_FROM', '300', 'DETAILED_POLYS', 'MERGE', 'DISKS', 'NO_LINES',\n 'NON_OVERLAP', 'NO_SPLIT', Feeders, '', '', '', 'TRIM_POLYS',\n distance_from_line)\n", (632, 843), False, 'import arcpy\n'), ((898, 996), 'arcpy.na.AddLocations', 'arcpy.na.AddLocations', (['Existing_TX_SA', '"""Facilities"""', 'Existing_TX', '"""field mapping"""', '"""25 Meters"""'], {}), "(Existing_TX_SA, 'Facilities', Existing_TX,\n 'field mapping', '25 Meters')\n", (919, 996), False, 'import arcpy\n'), ((994, 1097), 'arcpy.na.AddLocations', 'arcpy.na.AddLocations', (['Existing_TX_SA', '"""Point Barriers"""', 'Injection_TX', '"""field mapping"""', '"""25 Meters"""'], {}), "(Existing_TX_SA, 'Point Barriers', Injection_TX,\n 'field mapping', '25 Meters')\n", (1015, 1097), False, 'import arcpy\n'), ((1103, 1141), 'arcpy.na.Solve', 'arcpy.na.Solve', (['Existing_TX_SA', '"""SKIP"""'], {}), "(Existing_TX_SA, 'SKIP')\n", (1117, 1141), False, 'import arcpy\n'), ((1205, 1260), 'arcpy.SelectData_management', 'arcpy.SelectData_management', (['Existing_TX_SA', '"""Polygons"""'], {}), "(Existing_TX_SA, 'Polygons')\n", (1232, 1260), False, 'import arcpy\n'), ((1665, 1908), 'arcpy.na.MakeServiceAreaLayer', 'arcpy.na.MakeServiceAreaLayer', (['network', "(district + '_Treatment_SA')", '"""Length"""', '"""TRAVEL_FROM"""', '"""200"""', '"""DETAILED_POLYS"""', '"""NO_MERGE"""', '"""DISKS"""', '"""NO_LINES"""', '"""NON_OVERLAP"""', '"""NO_SPLIT"""', 'Feeders', '""""""', '""""""', '""""""', '"""TRIM_POLYS"""', 'distance_from_line'], {}), "(network, district + '_Treatment_SA', 'Length',\n 'TRAVEL_FROM', '200', 'DETAILED_POLYS', 'NO_MERGE', 'DISKS', 'NO_LINES',\n 'NON_OVERLAP', 'NO_SPLIT', Feeders, '', '', '', 'TRIM_POLYS',\n distance_from_line)\n", (1694, 1908), False, 'import arcpy\n'), ((1958, 2043), 'arcpy.na.AddLocations', 'arcpy.na.AddLocations', (['Treatment_SA', '"""Facilities"""', 'Injection_TX', '""""""', '"""25 Meters"""'], {}), "(Treatment_SA, 'Facilities', Injection_TX, '', '25 Meters'\n )\n", (1979, 2043), False, 'import arcpy\n'), ((2040, 2138), 'arcpy.na.AddLocations', 'arcpy.na.AddLocations', (['Treatment_SA', '"""Polygon Barriers"""', 'Existing_TX_Polygons', '""""""', '"""25 Meters"""'], {}), "(Treatment_SA, 'Polygon Barriers',\n Existing_TX_Polygons, '', '25 Meters')\n", (2061, 2138), False, 'import arcpy\n'), ((2144, 2180), 'arcpy.na.Solve', 'arcpy.na.Solve', (['Treatment_SA', '"""SKIP"""'], {}), "(Treatment_SA, 'SKIP')\n", (2158, 2180), False, 'import arcpy\n'), ((2301, 2356), 'arcpy.SelectData_management', 'arcpy.SelectData_management', (['Treatment_SA', '"""Facilities"""'], {}), "(Treatment_SA, 'Facilities')\n", (2328, 2356), False, 'import arcpy\n'), ((2384, 2484), 'arcpy.SelectLayerByAttribute_management', 'arcpy.SelectLayerByAttribute_management', (['Injection_TX_restricted', '"""NEW_SELECTION"""', '""""Status"= 3"""'], {}), '(Injection_TX_restricted,\n \'NEW_SELECTION\', \'"Status"= 3\')\n', (2423, 2484), False, 'import arcpy\n'), ((2482, 2571), 'arcpy.MakeFeatureLayer_management', 'arcpy.MakeFeatureLayer_management', (['Injection_TX_restricted', '"""Injection_TX_restricted"""'], {}), "(Injection_TX_restricted,\n 'Injection_TX_restricted')\n", (2515, 2571), False, 'import arcpy\n'), ((2612, 2665), 'arcpy.SelectData_management', 'arcpy.SelectData_management', (['Treatment_SA', '"""Polygons"""'], {}), "(Treatment_SA, 'Polygons')\n", (2639, 2665), False, 'import arcpy\n'), ((2750, 2818), 'arcpy.CalculateAreas_stats', 'arcpy.CalculateAreas_stats', (['Treatment_Polygons', '"""Treatment_Polygons"""'], {}), "(Treatment_Polygons, 'Treatment_Polygons')\n", (2776, 2818), False, 'import arcpy\n'), ((2938, 3053), 'arcpy.MakeFeatureLayer_management', 'arcpy.MakeFeatureLayer_management', (['Treatment_Polygons_withArea', '"""Treatment_Polygons_Small"""', '""""F_AREA" <= 10000"""'], {}), '(Treatment_Polygons_withArea,\n \'Treatment_Polygons_Small\', \'"F_AREA" <= 10000\')\n', (2971, 3053), False, 'import arcpy\n'), ((4905, 5006), 'arcpy.Merge_management', 'arcpy.Merge_management', (['[Treatment_Polygons_Standard, Treatment_Polygons_Alt]', '"""Treatment_Sites"""'], {}), "([Treatment_Polygons_Standard, Treatment_Polygons_Alt\n ], 'Treatment_Sites')\n", (4927, 5006), False, 'import arcpy\n'), ((5155, 5224), 'arcpy.AddGeometryAttributes_management', 'arcpy.AddGeometryAttributes_management', (['"""Treatment_Sites"""', '"""CENTROID"""'], {}), "('Treatment_Sites', 'CENTROID')\n", (5193, 5224), False, 'import arcpy\n'), ((5465, 5531), 'arcpy.FeatureClassToShapefile', 'arcpy.FeatureClassToShapefile', (['"""Treatment_Sites"""', '"""C:/PATH/folder"""'], {}), "('Treatment_Sites', 'C:/PATH/folder')\n", (5494, 5531), False, 'import arcpy\n'), ((3462, 3572), 'arcpy.SelectLayerByLocation_management', 'arcpy.SelectLayerByLocation_management', (['Injection_TX', '"""INTERSECT"""', '"""Treatment_Polygons_Small"""', '"""15 Meters"""'], {}), "(Injection_TX, 'INTERSECT',\n 'Treatment_Polygons_Small', '15 Meters')\n", (3500, 3572), False, 'import arcpy\n'), ((3571, 3642), 'arcpy.MakeFeatureLayer_management', 'arcpy.MakeFeatureLayer_management', (['Injection_TX_Alt', '"""Injection_TX_Alt"""'], {}), "(Injection_TX_Alt, 'Injection_TX_Alt')\n", (3604, 3642), False, 'import arcpy\n'), ((3713, 3960), 'arcpy.na.MakeServiceAreaLayer', 'arcpy.na.MakeServiceAreaLayer', (['network', "(district + '_Treatment_Alt_SA')", '"""Length"""', '"""TRAVEL_FROM"""', '"""150"""', '"""DETAILED_POLYS"""', '"""NO_MERGE"""', '"""DISKS"""', '"""NO_LINES"""', '"""NON_OVERLAP"""', '"""NO_SPLIT"""', 'Feeders', '""""""', '""""""', '""""""', '"""TRIM_POLYS"""', 'distance_from_line'], {}), "(network, district + '_Treatment_Alt_SA',\n 'Length', 'TRAVEL_FROM', '150', 'DETAILED_POLYS', 'NO_MERGE', 'DISKS',\n 'NO_LINES', 'NON_OVERLAP', 'NO_SPLIT', Feeders, '', '', '',\n 'TRIM_POLYS', distance_from_line)\n", (3742, 3960), False, 'import arcpy\n'), ((4004, 4096), 'arcpy.na.AddLocations', 'arcpy.na.AddLocations', (['Treatment_Alt_SA', '"""Facilities"""', 'Injection_TX_Alt', '""""""', '"""25 Meters"""'], {}), "(Treatment_Alt_SA, 'Facilities', Injection_TX_Alt, '',\n '25 Meters')\n", (4025, 4096), False, 'import arcpy\n'), ((4095, 4194), 'arcpy.na.AddLocations', 'arcpy.na.AddLocations', (['Treatment_Alt_SA', '"""Facilities"""', 'Injection_TX_restricted', '""""""', '"""25 Meters"""'], {}), "(Treatment_Alt_SA, 'Facilities',\n Injection_TX_restricted, '', '25 Meters')\n", (4116, 4194), False, 'import arcpy\n'), ((4193, 4245), 'arcpy.na.Solve', 'arcpy.na.Solve', (['Treatment_Alt_SA', '"""SKIP"""', '"""CONTINUE"""'], {}), "(Treatment_Alt_SA, 'SKIP', 'CONTINUE')\n", (4207, 4245), False, 'import arcpy\n'), ((4290, 4343), 'arcpy.SelectData_management', 'arcpy.SelectData_management', (['Treatment_SA', '"""Polygons"""'], {}), "(Treatment_SA, 'Polygons')\n", (4317, 4343), False, 'import arcpy\n'), ((4371, 4458), 'arcpy.MakeFeatureLayer_management', 'arcpy.MakeFeatureLayer_management', (['Treatment_Polygons_Alt', '"""Treatment_Polygons_Alt"""'], {}), "(Treatment_Polygons_Alt,\n 'Treatment_Polygons_Alt')\n", (4404, 4458), False, 'import arcpy\n'), ((4580, 4698), 'arcpy.MakeFeatureLayer_management', 'arcpy.MakeFeatureLayer_management', (['Treatment_Polygons_withArea', '"""Treatment_Polygons_Standard"""', '""""F_AREA" >= 10000"""'], {}), '(Treatment_Polygons_withArea,\n \'Treatment_Polygons_Standard\', \'"F_AREA" >= 10000\')\n', (4613, 4698), False, 'import arcpy\n'), ((3373, 3426), 'arcpy.management.GetCount', 'arcpy.management.GetCount', (['"""Treatment_Polygons_Small"""'], {}), "('Treatment_Polygons_Small')\n", (3398, 3426), False, 'import arcpy\n')]
# -*- coding: utf-8 -*- from . import __version__ from intake.source.base import DataSource from .rolling_store import OffSetS3Map import xarray def maybe_to_iris(ds): if len(ds.data_vars) == 1: return ds[list(ds.data_vars)[0]].to_iris() return ds class RollingZarrSource(DataSource): """Common behaviours for plugins in this repo""" name = 'rolling_zarr' version = __version__ container = 'xarray' partition_access = True def __init__(self, url=None, temp_chunk_path=None, metadata=None): """ Parameters ---------- path : str The S3 url to the which contains the manifest files. """ if not url.startswith('s3://'): raise ValueError('url must be a valid s3 url starting s3://') url = url[len('s3://'):] self._url = url self._temp_chunk_path = temp_chunk_path self._ds = None def read(self): if not self._ds: store = OffSetS3Map(root=self._url, temp_chunk_path=self._temp_chunk_path, check=False) self._ds = xarray.open_zarr(store) return maybe_to_iris(self._ds) def to_dask(self): self._get_schema() return self._dataframe def _close(self): self._dataframe = None self._ds = None
[ "xarray.open_zarr" ]
[((1159, 1182), 'xarray.open_zarr', 'xarray.open_zarr', (['store'], {}), '(store)\n', (1175, 1182), False, 'import xarray\n')]
# -*- coding: utf-8 -*- """ Functions relating velocity trend extrapolation """ from __future__ import (absolute_import, division, print_function, unicode_literals) __author__ = "yuhao" import numpy as np from pygeopressure.basic.well_log import Log # from ..well_log import Log v0 = 1600 # take values larger than 1500 def set_v0(v): """ set global variable v0 for slotnick() """ global v0 v0 = v def normal(x, a, b): r""" Extrapolate velocity using normal trend. Parameters ---------- x : 1-d ndarray depth to convert a, b : scalar coefficents Returns ------- out : 1-d ndarray esitmated velocity Notes ----- .. math:: \log d{t}_{Normal}=a-bz is transformed to .. math:: v={e}^{bz-a} **Note** that the exponential relation is unphysical especially in depth bellow the interval within which the equation is calibrated. References ---------- .. [1] <NAME>, <NAME>, and others, "Estimation of formation pressures from log-derived shale properties," Journal of Petroleum Technology, vol. 17, no. 6, pp. 717-722, 1965. """ return np.exp(x*b - a) def normal_log(vel_log, a, b): """ Returns ------- Log normal velocity log """ normal_vel = normal(np.array(vel_log.depth), a, b) mask = np.isnan(np.array(vel_log.data)) normal_vel[mask] = np.nan log = Log() log.depth = np.array(vel_log.depth) log.data = normal_vel log.name = 'normal_vel_log' log.descr = "Velocity_normal" log.units = "m/s" return log def slotnick(x, k): """ Relation between velocity and depth Parameters ---------- x : 1-d ndarray Depth to convert k : scalar velocity gradient Notes ----- typical values of velocity gradient k falls in the range 0.6-1.0s-1 References ---------- .. [1] <NAME>, "On seismic computations, with applications, I," Geophysics, vol. 1, no. 1, pp. 9-22, 1936. """ global v0 return v0 + k*x def normal_dt(x, a, b): """ normal trend of transit time Parameters ---------- x : 1-d ndarray depth to convert """ return a - b * x
[ "numpy.array", "numpy.exp", "pygeopressure.basic.well_log.Log" ]
[((1209, 1226), 'numpy.exp', 'np.exp', (['(x * b - a)'], {}), '(x * b - a)\n', (1215, 1226), True, 'import numpy as np\n'), ((1474, 1479), 'pygeopressure.basic.well_log.Log', 'Log', ([], {}), '()\n', (1477, 1479), False, 'from pygeopressure.basic.well_log import Log\n'), ((1496, 1519), 'numpy.array', 'np.array', (['vel_log.depth'], {}), '(vel_log.depth)\n', (1504, 1519), True, 'import numpy as np\n'), ((1359, 1382), 'numpy.array', 'np.array', (['vel_log.depth'], {}), '(vel_log.depth)\n', (1367, 1382), True, 'import numpy as np\n'), ((1410, 1432), 'numpy.array', 'np.array', (['vel_log.data'], {}), '(vel_log.data)\n', (1418, 1432), True, 'import numpy as np\n')]
#python # Quixel stuff from here: # https://github.com/Quixel/Bridge-Python-Plugin import json, sys, socket, time, threading import lx import lxifc import lxu try: #py3 import queue as q except ImportError: import Queue as q import modo com_listener = None g_bNewMeshAdded = False g_newMaskAdded = False g_meshNames = [] g_matGroupsAdded = [] callback_queue = q.Queue() host, port = '127.0.0.1', 24981 # The port number here is just an arbitrary number that's > 20000 threadServer = None #Background_Server is driven from Thread class in order to make it run in the background. class ms_Init(threading.Thread): #Initialize the thread and assign the method (i.e. importer) to be called when it receives JSON data. def __init__(self, importer): threading.Thread.__init__(self) self.importer = importer self.stopThread = False #Start the thread to start listing to the port. def run(self): #Adding a little delay to the thread doesn't get called in an infinite loop. time.sleep(0.1) try: #Making a socket object. socket_ = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #Binding the socket to host and port number mentioned at the start. socket_.bind((host, port)) #Run until the thread starts receiving data. while True: if self.stopThread: return socket_.listen(5) #Accept connection request. client, addr = socket_.accept() data = "" buffer_size = 4096*2 #Receive data from the client. data = client.recv(buffer_size) #If any data is received over the port. if data != "": self.TotalData = b"" self.TotalData += data #Append the previously received data to the Total Data. #Keep running until the connection is open and we are receiving data. while True: if self.stopThread: return #Keep receiving data from client. data = client.recv(4096*2) #if we are getting data keep appending it to the Total data. if data : self.TotalData += data else: #Once the data transmission is over call the importer method and send the collected TotalData. self.importer(self.TotalData) break except: pass def ms_asset_importer (imported_data): try: #Array of assets in case of Batch export. imported_assets_array = [] #Parsing JSON data that we received earlier. json_array = json.loads(imported_data) #For each asset data in the received array of assets (Multiple in case of batch export) for jData in json_array: packed_textures_list = [] #Channel packed texture list. textures_list = [] #All of the other textures list. geo_list = [] #Geometry list will contain data about meshes and LODs. #Get and store textures in the textures_list. for item in jData['components']: if 'path' in item: textures_list.append([item['path'], item['type']]) #Get and store the geometry in the geo_list. for item in jData['meshList']: if 'path' in item: geo_list.append(item['path']) #Get and store the channel packed textures in the packed_texture_list. for item in jData['packedTextures']: if 'path' in item: packed_textures_list.append([item['path'], item['type']]) #Reading other variables from JSON data. export_ = dict({ "AssetID": jData['id'], "FolderPath": jData['path'], "MeshList": geo_list, "TextureList": textures_list, "packedTextures": packed_textures_list, "Resolution": jData['resolution'], "activeLOD": jData['activeLOD'] }) callback_queue.put(export_) #Exception handling. except Exception as e: print ("Failed") print('Error Line : {}'.format(sys.exc_info()[-1].tb_lineno), type(e).__name__, e) pass class doTheWork(lxifc.Visitor): def __init__(self): self.importData = None pass def vis_Evaluate(self): if self.importData is not None: bAddedMesh = False tSrv = lx.service.Thread() tSrv.InitThread() textstring = "" bDoMaterialMask = False bDoSelectMesh = False bDoImport = False if "NewMats" in self.importData: bDoMaterialMask = True if "SelectList" in self.importData: bDoSelectMesh = True if not bDoSelectMesh and not bDoMaterialMask: bDoImport = True # set the material mask to our new item. if bDoMaterialMask: selectedMeshes = modo.Scene().selectedByType("mesh") if len(selectedMeshes) > 0: meshName = selectedMeshes[0].name for material in self.importData["NewMats"]: lx.eval("select.item {%s}" % material) lx.eval("mask.setMesh {%s}" % meshName) if bDoSelectMesh: for mesh in self.importData["SelectList"]: lx.eval("select.item {%s}" % mesh) mMesh = modo.Mesh(mesh) allUVs = mMesh.geometry.vmaps.uvMaps[0].name lx.eval("vertMap.list txuv {%s}" % allUVs) if bDoImport: userSelectMesh = lx.eval("user.value quixelBridge.selectMesh ?") userSetMask = lx.eval("user.value quixelBridge.setMaskMesh ?") for texture in self.importData["TextureList"]: textstring = textstring + texture[0] + ";" # not supported in the pbr command in 14.1 for texture in self.importData["packedTextures"]: textstring = textstring + texture[0] + ";" global g_bNewMeshAdded if "MeshList" in self.importData and len(self.importData["MeshList"]) > 0: for mesh in self.importData["MeshList"]: g_bNewMeshAdded = True lx.eval("!!scene.open {%s} import" % mesh) bAddedMesh = True matCall = dict({"TextureList": self.importData["TextureList"], "packedTextures": self.importData["packedTextures"]}) global g_meshNames #interval = 3000 if userSelectMesh: if len(g_meshNames) > 0: selectCall = dict({ "SelectList": g_meshNames } ) callback_queue.put(selectCall) # we still want to try and bring in the materials callback_queue.put(matCall) g_meshNames = [] else: global g_matGroupsAdded, g_newMaskAdded g_newMaskAdded = True lx.eval("shader.loadPBR path:{%s}" % textstring) # If the user has turned off "select mesh", if they import a mesh, the mask gets set to whatever they had selected, which is probably undesirable. # So for the mask flag to work, select also should be on. if userSetMask and userSelectMesh: matCallback = dict({ "NewMats": g_matGroupsAdded } ) callback_queue.put(matCallback) g_matGroupsAdded = [] tSrv.CleanupThread() #g_bNewMeshAdded = False class visIdle (lxifc.Visitor): # checks to see if we have any new incoming things when user is idle, run on a timer. Currently every second, could be expanded. def __init__(self): pass def vis_Evaluate(self): pSrv = lx.service.Platform() callback = None global interval, stepInterval try: callback = callback_queue.get(False) #doesn't block interval = stepInterval except q.Empty: interval = mainInterval pass if callback is not None: idleVis = doTheWork() idleVis.importData = callback doTheWork_com = lx.object.Unknown(idleVis) pSrv.DoWhenUserIsIdle(doTheWork_com, lx.symbol.iUSERIDLE_ALWAYS) else: global g_bNewMeshAdded, g_meshNames, g_newMaskAdded g_meshNames = [] g_bNewMeshAdded = False g_newMaskAdded = False # wait for next idle again myVis = self com_myVis = lx.object.Unknown(myVis) pSrv.TimerStart(com_myVis, interval, lx.symbol.iUSERIDLE_ALWAYS) def StartThread(): tSrv = lx.service.Thread() tSrv.InitThread() global threadServer threadServer = ms_Init(ms_asset_importer) threadServer.daemon = True threadServer.start() pSrv = lx.service.Platform() vis = visIdle() com_visitor = lx.object.Unknown(vis) pSrv.TimerStart(com_visitor, interval, lx.symbol.iUSERIDLE_ALWAYS) tSrv.CleanupThread() def StopThread(): global threadServer threadServer.stopThread = True threadServer = None print ("Stopping Quixel Bridge.") class ItemAddedListener(lxifc.SceneItemListener): def sil_ItemAdd(self,item): global g_bNewMeshAdded, g_newMaskAdded global g_meshNames if g_bNewMeshAdded == True: myItem = modo.Item(item) if myItem.type == "mesh": g_meshNames.append(myItem.name) if g_newMaskAdded == True: myItem = modo.Item(item) if myItem.type == "mask": g_matGroupsAdded.append(myItem.name) class StartBridgeCMD(lxu.command.BasicCommand): def __init__(self): lxu.command.BasicCommand.__init__(self) def cmd_Flags(self): return 0 def basic_Enable(self, msg): return True def basic_Execute(self, msg, flags): print ("Starting Quixel Bridge") StartThread() listenerService = lx.service.Listener() MyListen = ItemAddedListener() global com_listener if com_listener is None: com_listener = lx.object.Unknown(MyListen) listenerService.AddListener(com_listener) return True class StopBridgeCMD(lxu.command.BasicCommand): def __init__(self): lxu.command.BasicCommand.__init__(self) def cmd_Flags(self): return 0 def basic_Enable(self, msg): return True def basic_Execute(self, msg, flags): StopThread() listenerService = lx.service.Listener() global com_listener if com_listener is not None: listenerService.RemoveListener(com_listener) return True lx.bless(StartBridgeCMD, "quixelBridge.start") lx.bless(StopBridgeCMD, "quixelBridge.stop") mainInterval = lx.eval("user.value quixelBridge.bridgeInterval ?") stepInterval = lx.eval("user.value quixelBridge.bridgeStageInterval ?") interval = mainInterval
[ "threading.Thread.__init__", "lx.object.Unknown", "json.loads", "lx.eval", "modo.Mesh", "lx.service.Platform", "Queue.Queue", "socket.socket", "lx.service.Thread", "time.sleep", "lx.bless", "lx.service.Listener", "lxu.command.BasicCommand.__init__", "modo.Item", "modo.Scene", "sys.exc_info" ]
[((370, 379), 'Queue.Queue', 'q.Queue', ([], {}), '()\n', (377, 379), True, 'import Queue as q\n'), ((9252, 9298), 'lx.bless', 'lx.bless', (['StartBridgeCMD', '"""quixelBridge.start"""'], {}), "(StartBridgeCMD, 'quixelBridge.start')\n", (9260, 9298), False, 'import lx\n'), ((9299, 9343), 'lx.bless', 'lx.bless', (['StopBridgeCMD', '"""quixelBridge.stop"""'], {}), "(StopBridgeCMD, 'quixelBridge.stop')\n", (9307, 9343), False, 'import lx\n'), ((9361, 9412), 'lx.eval', 'lx.eval', (['"""user.value quixelBridge.bridgeInterval ?"""'], {}), "('user.value quixelBridge.bridgeInterval ?')\n", (9368, 9412), False, 'import lx\n'), ((9428, 9484), 'lx.eval', 'lx.eval', (['"""user.value quixelBridge.bridgeStageInterval ?"""'], {}), "('user.value quixelBridge.bridgeStageInterval ?')\n", (9435, 9484), False, 'import lx\n'), ((7478, 7497), 'lx.service.Thread', 'lx.service.Thread', ([], {}), '()\n', (7495, 7497), False, 'import lx\n'), ((7642, 7663), 'lx.service.Platform', 'lx.service.Platform', ([], {}), '()\n', (7661, 7663), False, 'import lx\n'), ((7696, 7718), 'lx.object.Unknown', 'lx.object.Unknown', (['vis'], {}), '(vis)\n', (7713, 7718), False, 'import lx\n'), ((763, 794), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (788, 794), False, 'import json, sys, socket, time, threading\n'), ((999, 1014), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1009, 1014), False, 'import json, sys, socket, time, threading\n'), ((2375, 2400), 'json.loads', 'json.loads', (['imported_data'], {}), '(imported_data)\n', (2385, 2400), False, 'import json, sys, socket, time, threading\n'), ((6745, 6766), 'lx.service.Platform', 'lx.service.Platform', ([], {}), '()\n', (6764, 6766), False, 'import lx\n'), ((7357, 7381), 'lx.object.Unknown', 'lx.object.Unknown', (['myVis'], {}), '(myVis)\n', (7374, 7381), False, 'import lx\n'), ((8414, 8453), 'lxu.command.BasicCommand.__init__', 'lxu.command.BasicCommand.__init__', (['self'], {}), '(self)\n', (8447, 8453), False, 'import lxu\n'), ((8640, 8661), 'lx.service.Listener', 'lx.service.Listener', ([], {}), '()\n', (8659, 8661), False, 'import lx\n'), ((8920, 8959), 'lxu.command.BasicCommand.__init__', 'lxu.command.BasicCommand.__init__', (['self'], {}), '(self)\n', (8953, 8959), False, 'import lxu\n'), ((9110, 9131), 'lx.service.Listener', 'lx.service.Listener', ([], {}), '()\n', (9129, 9131), False, 'import lx\n'), ((1066, 1115), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (1079, 1115), False, 'import json, sys, socket, time, threading\n'), ((3901, 3920), 'lx.service.Thread', 'lx.service.Thread', ([], {}), '()\n', (3918, 3920), False, 'import lx\n'), ((7067, 7093), 'lx.object.Unknown', 'lx.object.Unknown', (['idleVis'], {}), '(idleVis)\n', (7084, 7093), False, 'import lx\n'), ((8121, 8136), 'modo.Item', 'modo.Item', (['item'], {}), '(item)\n', (8130, 8136), False, 'import modo\n'), ((8246, 8261), 'modo.Item', 'modo.Item', (['item'], {}), '(item)\n', (8255, 8261), False, 'import modo\n'), ((8762, 8789), 'lx.object.Unknown', 'lx.object.Unknown', (['MyListen'], {}), '(MyListen)\n', (8779, 8789), False, 'import lx\n'), ((4851, 4898), 'lx.eval', 'lx.eval', (['"""user.value quixelBridge.selectMesh ?"""'], {}), "('user.value quixelBridge.selectMesh ?')\n", (4858, 4898), False, 'import lx\n'), ((4917, 4965), 'lx.eval', 'lx.eval', (['"""user.value quixelBridge.setMaskMesh ?"""'], {}), "('user.value quixelBridge.setMaskMesh ?')\n", (4924, 4965), False, 'import lx\n'), ((4647, 4681), 'lx.eval', 'lx.eval', (["('select.item {%s}' % mesh)"], {}), "('select.item {%s}' % mesh)\n", (4654, 4681), False, 'import lx\n'), ((4695, 4710), 'modo.Mesh', 'modo.Mesh', (['mesh'], {}), '(mesh)\n', (4704, 4710), False, 'import modo\n'), ((4766, 4808), 'lx.eval', 'lx.eval', (["('vertMap.list txuv {%s}' % allUVs)"], {}), "('vertMap.list txuv {%s}' % allUVs)\n", (4773, 4808), False, 'import lx\n'), ((6014, 6062), 'lx.eval', 'lx.eval', (["('shader.loadPBR path:{%s}' % textstring)"], {}), "('shader.loadPBR path:{%s}' % textstring)\n", (6021, 6062), False, 'import lx\n'), ((4325, 4337), 'modo.Scene', 'modo.Scene', ([], {}), '()\n', (4335, 4337), False, 'import modo\n'), ((4487, 4525), 'lx.eval', 'lx.eval', (["('select.item {%s}' % material)"], {}), "('select.item {%s}' % material)\n", (4494, 4525), False, 'import lx\n'), ((4532, 4571), 'lx.eval', 'lx.eval', (["('mask.setMesh {%s}' % meshName)"], {}), "('mask.setMesh {%s}' % meshName)\n", (4539, 4571), False, 'import lx\n'), ((5403, 5445), 'lx.eval', 'lx.eval', (["('!!scene.open {%s} import' % mesh)"], {}), "('!!scene.open {%s} import' % mesh)\n", (5410, 5445), False, 'import lx\n'), ((3664, 3678), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3676, 3678), False, 'import json, sys, socket, time, threading\n')]
import time import numpy as np import matplotlib.pyplot as plt from test_farfield import make_meshes from tectosaur.ops.sparse_integral_op import RegularizedSparseIntegralOp from tectosaur.ops.dense_integral_op import RegularizedDenseIntegralOp from tectosaur.ops.sparse_farfield_op import TriToTriDirectFarfieldOp from tectosaur.ops.mass_op import MassOp from tectosaur.ops.neg_op import MultOp from tectosaur.ops.sum_op import SumOp from tectosaur.nearfield.nearfield_op import any_nearfield from tectosaur.util.test_decorators import kernel from tectosaur.util.timer import Timer def plot_fnc(m, surf1_idxs, surf2_idxs, x, outs): def plot_at_pts(idxs, f): pts_f = np.full(m[0].shape[0], np.nan) pts_f[m[1][idxs]] = f pts_f_not_nan = pts_f[np.logical_not(np.isnan(pts_f))] min_f = np.min(pts_f_not_nan) max_f = np.max(pts_f_not_nan) plt.figure() plt.tricontourf( m[0][:,0], m[0][:,2], m[1], pts_f, levels = np.linspace(min_f, max_f, 21), extend = 'both' ) plt.colorbar() for d in range(3): plot_at_pts(surf2_idxs, x[:,d].reshape((-1,3))) for o in outs: plot_at_pts(surf1_idxs, o.reshape(-1,3,3)[:,:,d]) plt.show() def build_x_field(m, surf1_idxs, surf2_idxs): dof_pts = m[0][m[1][surf2_idxs]] dof_pts[:,:,1] -= dof_pts[0,0,1] def gaussian(a, b, c, x): return a * np.exp(-((x - b) ** 2) / (2 * c ** 2)) dist = np.linalg.norm(dof_pts.reshape(-1,3), axis = 1) x = np.zeros((dof_pts.shape[0] * 3, 3)) for d in range(3): x[:,d] = gaussian(0.1 * (d + 1), 0.0, 0.3, dist) return x def x_ones_field(m, surf1_idxs, surf2_idxs): x = np.ones(surf2_idxs.shape[0] * 9).reshape((-1,3,3)) from tectosaur.constraint_builders import find_free_edges free_edges = find_free_edges(m[1]) first_tri_idx = np.min(surf2_idxs) last_tri_idx = np.max(surf2_idxs) for tri_idx, edge_idx in free_edges: if tri_idx < first_tri_idx or tri_idx > last_tri_idx: continue for v in range(2): pt_idx = m[1][tri_idx, (edge_idx + v) % 3] tris_touching = np.where(m[1] == pt_idx) x[tris_touching[0] - first_tri_idx, tris_touching[1], :] = 0.0 return x def regularized_tester(K, sep, continuity, mass_op_factor = 0.0, which = None): if which is None: raise Exception('select some operators!') n_m = 30 full_K_name = f'elastic{K}3' full_RK_name = f'elasticR{K}3' m, surf1_idxs, surf2_idxs = make_meshes(n_m = n_m, sep = sep) if sep == 0.0: surf2_idxs = surf1_idxs near_threshold = 2.0 nq_near = 5 nq_far = 2 if any_nearfield(m[0], m[1], surf1_idxs, surf2_idxs, near_threshold): nearfield = True else: nearfield = False def sparse_unregularized(far_op, Kn): return SparseIntegralOp( 6, nq_far, nq_near, near_threshold, Kn, [1.0, 0.25], m[0], m[1], np.float32, farfield_op_type = far_op, obs_subset = surf1_idxs, src_subset = surf2_idxs ) def change_K_tri_tri(to): def f(*args, to = to): args = list(args) args[1] = to return TriToTriDirectFarfieldOp(*args) return f def add_sparse_reg(farfield_K, farfield_type): ops.append(SumOp([ RegularizedSparseIntegralOp( 10, 10, 6, nq_far, nq_near, near_threshold, full_RK_name, farfield_K, [1.0, 0.25], m[0], m[1], np.float32, farfield_type, obs_subset = surf1_idxs, src_subset = surf2_idxs ), MultOp(MassOp(3, m[0], m[1][surf1_idxs]), mass_op_factor) ])) ops = [ sparse_unregularized(PtToPtDirectFarfieldOp, full_K_name) ] if 'pt_to_pt_fmm' in which: ops.append(sparse_unregularized(PtToPtFMMFarfieldOp(150, 2.5, 5), full_K_name)) if 'tri_farfield_regularized' in which: ops.append(sparse_unregularized(change_K_tri_tri(full_RK_name), full_K_name)) if 'dense_regularized' in which: ops.append(SumOp([ RegularizedDenseIntegralOp( 10, 10, 6, nq_far, nq_near, near_threshold, full_RK_name, full_RK_name, [1.0, 0.25], m[0], m[1], np.float32, obs_subset = surf1_idxs, src_subset = surf2_idxs ), MultOp(MassOp(3, m[0], m[1][surf1_idxs]), mass_op_factor) ])) if 'sparse_regularized' in which: add_sparse_reg(full_RK_name, TriToTriDirectFarfieldOp) if 'sparse_regularized_fmm' in which: add_sparse_reg(full_K_name, PtToPtFMMFarfieldOp(150, 2.5, 5)) if 'sparse_regularized_but_unregularized_far': add_sparse_reg(full_K_name, change_K_tri_tri(full_K_name)) print('built ops') x = build_x_field(m, surf1_idxs, surf2_idxs) x_flat = x.flatten() outs = [o.dot(x_flat) for o in ops] if continuity: from tectosaur.constraint_builders import continuity_constraints, \ free_edge_constraints from tectosaur.constraints import build_constraint_matrix cs = continuity_constraints(m[1][surf1_idxs], np.array([])) cs.extend(free_edge_constraints(m[1][surf1_idxs])) cm, c_rhs = build_constraint_matrix(cs, outs[0].shape[0]) final_outs = [cm.T.dot(v) for v in outs] plot_outs = [cm.dot(v) for v in final_outs] else: plot_outs = outs final_outs = outs should_plot = True if should_plot: plot_fnc(m, surf1_idxs, surf2_idxs, x, plot_outs) for i in range(len(final_outs)): for j in range(i + 1, len(final_outs)): print(i,j,final_outs[i] / final_outs[j]) np.testing.assert_almost_equal(final_outs[i], final_outs[j], 6) def test_regularized_T_farfield(): regularized_tester('T', 2.0, False, which = ['tri_farfield_regularized']) def test_regularized_A_farfield(): regularized_tester('A', 2.0, True, which = ['tri_farfield_regularized']) def test_regularized_H_farfield(): regularized_tester('H', 4.0, True, which = ['tri_farfield_regularized']) def test_regularized_T_nearfield(): regularized_tester( 'T', 0.4, False, which = ['dense_regularized', 'sparse_regularized'] ) def test_regularized_A_nearfield(): regularized_tester( 'A', 0.4, True, which = ['dense_regularized', 'sparse_regularized'] ) def test_regularized_H_nearfield(): regularized_tester( 'H', 0.4, True, which = ['dense_regularized', 'sparse_regularized'] ) def test_regularized_T_self(): regularized_tester( 'T', 0.0, False, -0.5, which = [ 'dense_regularized', 'sparse_regularized', # 'sparse_regularized_fmm', # 'sparse_regularized_but_unregularized_far' ] ) def test_regularized_A_self(): regularized_tester( 'A', 0.0, True, 0.5, which = ['dense_regularized', 'sparse_regularized'] ) def test_regularized_H_self(): regularized_tester( 'H', 0.0, True, which = ['dense_regularized', 'sparse_regularized'] ) def test_benchmark_far_tris(): n = 100 m, surf1_idxs, surf2_idxs = make_meshes(n_m = n, sep = 4.0) op = TriToTriDirectFarfieldOp( 2, 'elasticRH3', [1.0, 0.25], m[0], m[1], np.float32, surf1_idxs, surf2_idxs ) x = build_x_field(m, surf1_idxs, surf2_idxs) x_flat = x.flatten() op.dot(x_flat) import tectosaur, logging tectosaur.logger.setLevel(logging.INFO) n = 2 for j in range(n): start = time.time() print(op.dot(x_flat)[0]) took = time.time() - start print('op.dot took', took) total_interactions = surf1_idxs.shape[0] * surf2_idxs.shape[0] inter_per_sec = total_interactions / took print('total interactions', total_interactions) print('billion interactions/sec', inter_per_sec / 1e9) if __name__ == "__main__": test_benchmark_far_tris()
[ "numpy.ones", "numpy.isnan", "matplotlib.pyplot.figure", "numpy.exp", "tectosaur.ops.sparse_integral_op.RegularizedSparseIntegralOp", "tectosaur.logger.setLevel", "numpy.full", "tectosaur.ops.dense_integral_op.RegularizedDenseIntegralOp", "numpy.testing.assert_almost_equal", "matplotlib.pyplot.colorbar", "numpy.max", "numpy.linspace", "matplotlib.pyplot.show", "tectosaur.constraint_builders.find_free_edges", "numpy.min", "tectosaur.nearfield.nearfield_op.any_nearfield", "test_farfield.make_meshes", "numpy.zeros", "tectosaur.constraints.build_constraint_matrix", "tectosaur.ops.mass_op.MassOp", "time.time", "numpy.where", "numpy.array", "tectosaur.ops.sparse_farfield_op.TriToTriDirectFarfieldOp", "tectosaur.constraint_builders.free_edge_constraints" ]
[((1550, 1585), 'numpy.zeros', 'np.zeros', (['(dof_pts.shape[0] * 3, 3)'], {}), '((dof_pts.shape[0] * 3, 3))\n', (1558, 1585), True, 'import numpy as np\n'), ((1863, 1884), 'tectosaur.constraint_builders.find_free_edges', 'find_free_edges', (['m[1]'], {}), '(m[1])\n', (1878, 1884), False, 'from tectosaur.constraint_builders import find_free_edges\n'), ((1905, 1923), 'numpy.min', 'np.min', (['surf2_idxs'], {}), '(surf2_idxs)\n', (1911, 1923), True, 'import numpy as np\n'), ((1943, 1961), 'numpy.max', 'np.max', (['surf2_idxs'], {}), '(surf2_idxs)\n', (1949, 1961), True, 'import numpy as np\n'), ((2576, 2605), 'test_farfield.make_meshes', 'make_meshes', ([], {'n_m': 'n_m', 'sep': 'sep'}), '(n_m=n_m, sep=sep)\n', (2587, 2605), False, 'from test_farfield import make_meshes\n'), ((2726, 2791), 'tectosaur.nearfield.nearfield_op.any_nearfield', 'any_nearfield', (['m[0]', 'm[1]', 'surf1_idxs', 'surf2_idxs', 'near_threshold'], {}), '(m[0], m[1], surf1_idxs, surf2_idxs, near_threshold)\n', (2739, 2791), False, 'from tectosaur.nearfield.nearfield_op import any_nearfield\n'), ((7292, 7319), 'test_farfield.make_meshes', 'make_meshes', ([], {'n_m': 'n', 'sep': '(4.0)'}), '(n_m=n, sep=4.0)\n', (7303, 7319), False, 'from test_farfield import make_meshes\n'), ((7334, 7441), 'tectosaur.ops.sparse_farfield_op.TriToTriDirectFarfieldOp', 'TriToTriDirectFarfieldOp', (['(2)', '"""elasticRH3"""', '[1.0, 0.25]', 'm[0]', 'm[1]', 'np.float32', 'surf1_idxs', 'surf2_idxs'], {}), "(2, 'elasticRH3', [1.0, 0.25], m[0], m[1], np.\n float32, surf1_idxs, surf2_idxs)\n", (7358, 7441), False, 'from tectosaur.ops.sparse_farfield_op import TriToTriDirectFarfieldOp\n'), ((7587, 7626), 'tectosaur.logger.setLevel', 'tectosaur.logger.setLevel', (['logging.INFO'], {}), '(logging.INFO)\n', (7612, 7626), False, 'import tectosaur, logging\n'), ((680, 710), 'numpy.full', 'np.full', (['m[0].shape[0]', 'np.nan'], {}), '(m[0].shape[0], np.nan)\n', (687, 710), True, 'import numpy as np\n'), ((821, 842), 'numpy.min', 'np.min', (['pts_f_not_nan'], {}), '(pts_f_not_nan)\n', (827, 842), True, 'import numpy as np\n'), ((859, 880), 'numpy.max', 'np.max', (['pts_f_not_nan'], {}), '(pts_f_not_nan)\n', (865, 880), True, 'import numpy as np\n'), ((890, 902), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (900, 902), True, 'import matplotlib.pyplot as plt\n'), ((1073, 1087), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (1085, 1087), True, 'import matplotlib.pyplot as plt\n'), ((1262, 1272), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1270, 1272), True, 'import matplotlib.pyplot as plt\n'), ((5327, 5372), 'tectosaur.constraints.build_constraint_matrix', 'build_constraint_matrix', (['cs', 'outs[0].shape[0]'], {}), '(cs, outs[0].shape[0])\n', (5350, 5372), False, 'from tectosaur.constraints import build_constraint_matrix\n'), ((7676, 7687), 'time.time', 'time.time', ([], {}), '()\n', (7685, 7687), False, 'import time\n'), ((1444, 1480), 'numpy.exp', 'np.exp', (['(-(x - b) ** 2 / (2 * c ** 2))'], {}), '(-(x - b) ** 2 / (2 * c ** 2))\n', (1450, 1480), True, 'import numpy as np\n'), ((1733, 1765), 'numpy.ones', 'np.ones', (['(surf2_idxs.shape[0] * 9)'], {}), '(surf2_idxs.shape[0] * 9)\n', (1740, 1765), True, 'import numpy as np\n'), ((2196, 2220), 'numpy.where', 'np.where', (['(m[1] == pt_idx)'], {}), '(m[1] == pt_idx)\n', (2204, 2220), True, 'import numpy as np\n'), ((3265, 3296), 'tectosaur.ops.sparse_farfield_op.TriToTriDirectFarfieldOp', 'TriToTriDirectFarfieldOp', (['*args'], {}), '(*args)\n', (3289, 3296), False, 'from tectosaur.ops.sparse_farfield_op import TriToTriDirectFarfieldOp\n'), ((5234, 5246), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (5242, 5246), True, 'import numpy as np\n'), ((5266, 5305), 'tectosaur.constraint_builders.free_edge_constraints', 'free_edge_constraints', (['m[1][surf1_idxs]'], {}), '(m[1][surf1_idxs])\n', (5287, 5305), False, 'from tectosaur.constraint_builders import continuity_constraints, free_edge_constraints\n'), ((5788, 5851), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['final_outs[i]', 'final_outs[j]', '(6)'], {}), '(final_outs[i], final_outs[j], 6)\n', (5818, 5851), True, 'import numpy as np\n'), ((7736, 7747), 'time.time', 'time.time', ([], {}), '()\n', (7745, 7747), False, 'import time\n'), ((787, 802), 'numpy.isnan', 'np.isnan', (['pts_f'], {}), '(pts_f)\n', (795, 802), True, 'import numpy as np\n'), ((996, 1025), 'numpy.linspace', 'np.linspace', (['min_f', 'max_f', '(21)'], {}), '(min_f, max_f, 21)\n', (1007, 1025), True, 'import numpy as np\n'), ((3405, 3608), 'tectosaur.ops.sparse_integral_op.RegularizedSparseIntegralOp', 'RegularizedSparseIntegralOp', (['(10)', '(10)', '(6)', 'nq_far', 'nq_near', 'near_threshold', 'full_RK_name', 'farfield_K', '[1.0, 0.25]', 'm[0]', 'm[1]', 'np.float32', 'farfield_type'], {'obs_subset': 'surf1_idxs', 'src_subset': 'surf2_idxs'}), '(10, 10, 6, nq_far, nq_near, near_threshold,\n full_RK_name, farfield_K, [1.0, 0.25], m[0], m[1], np.float32,\n farfield_type, obs_subset=surf1_idxs, src_subset=surf2_idxs)\n', (3432, 3608), False, 'from tectosaur.ops.sparse_integral_op import RegularizedSparseIntegralOp\n'), ((4180, 4369), 'tectosaur.ops.dense_integral_op.RegularizedDenseIntegralOp', 'RegularizedDenseIntegralOp', (['(10)', '(10)', '(6)', 'nq_far', 'nq_near', 'near_threshold', 'full_RK_name', 'full_RK_name', '[1.0, 0.25]', 'm[0]', 'm[1]', 'np.float32'], {'obs_subset': 'surf1_idxs', 'src_subset': 'surf2_idxs'}), '(10, 10, 6, nq_far, nq_near, near_threshold,\n full_RK_name, full_RK_name, [1.0, 0.25], m[0], m[1], np.float32,\n obs_subset=surf1_idxs, src_subset=surf2_idxs)\n', (4206, 4369), False, 'from tectosaur.ops.dense_integral_op import RegularizedDenseIntegralOp\n'), ((3703, 3736), 'tectosaur.ops.mass_op.MassOp', 'MassOp', (['(3)', 'm[0]', 'm[1][surf1_idxs]'], {}), '(3, m[0], m[1][surf1_idxs])\n', (3709, 3736), False, 'from tectosaur.ops.mass_op import MassOp\n'), ((4448, 4481), 'tectosaur.ops.mass_op.MassOp', 'MassOp', (['(3)', 'm[0]', 'm[1][surf1_idxs]'], {}), '(3, m[0], m[1][surf1_idxs])\n', (4454, 4481), False, 'from tectosaur.ops.mass_op import MassOp\n')]
# -*- coding: utf-8 -*- """International Site - Industries page""" import logging from requests import Response, Session from directory_tests_shared import PageType, Service, URLs from tests.functional.utils.request import Method, check_response, make_request SERVICE = Service.INTERNATIONAL NAME = "Industries" TYPE = PageType.LISTING URL = URLs.INTERNATIONAL_INDUSTRIES.absolute EXPECTED_STRINGS = ["Industries", "Great.gov.uk International"] INDUSTRY_NAMES = { "english": [ "aerospace", # "agritech", "automotive", # "business & government partnerships", # "consumer & retail", # "creative services", "creative industries", # "cyber security", "education", "energy", # "engineering", "engineering and manufacturing", # "food and drink", # "healthcare", "financial services", "healthcare and life sciences", # "infrastructure", # "innovation", "legal services", # "life sciences", # "marine", # "professional & financial services", "real estate", "space", # "sports economy", "technology", ], "german": [ "agrartechnologie", "automobilbranche", "bildung", "cybersicherheit", "einzelhandel", "energie", "finanz- und fachdienstleistungen", "gesundheitsversorgung", "handels- und regierungspartnerschaften", "infrastruktur", "ingenieurwesen", "innovation", "kreativbranche", "lebensmittel und getränke", "life sciences", "luft- und raumfahrt", "marine und schifffahrt", "raumfahrt", "rechtsdienstleistungen", "sportwirtschaft", "technologie", ], "french": [ "aéronautique", "agritech", "automobile", "partenariats entre les entreprises", "commerce de détail", "industries créatives", "cybersécurité", "éducation", "énergie", "ingénierie", "produits alimentaires et boissons", "santé", "infrastructures", "innovation", "services juridiques", "sciences de la vie", "maritime", "services professionnels et financiers", "espace", "économie du sport", "technologie", ], "chinese": [ "体育经济", "农业技术", "创意产业", "创新", "医疗保健", "商业和政府伙伴关系", "基础设施", "太空业", "工程", "教育", "汽车", "法律服务", "海事", "消费品零售", "生命科学", "科技", "网络安全", "能源", "航空航天", "食品与饮料", ], "japanese": [ "アグリテック", "イノベーション", "インフラストラクチャー", "エネルギー", "エンジニアリング", "クリエイティブ", "サイバーセキュリティ", "スポーツ経済", "テクノロジー", "ライフサイエンス", "医療サービス", "宇宙", "専門サービスと金融サービス", "教育", "法律サービス", "海事", "消費者小売", "産官協力", "自動車", "航空宇宙", "飲食料品", ], "portuguese": [ "aeroespacial", "tecnologias agrícolas", "automotivo", "parcerias", "varejo consumidor", "indústrias criativas", "segurança cibernética", "educação", "energia", "engenharia", "alimentos e bebidas", "assistência médica", "infraestrutura", "inovação", "serviços legais", "ciências da vida", "marítimo", "serviços profissionais e financeiros", "espacial", "economia esportiva", "tecnologia", ], "spanish": [ "aeroespacial", "agrotecnología", "automotriz", "colaboración público-privada", "venta al por menor", "sectores creativos", "seguridad cibernética", "enseñanza", "energía", "ingeniería", "alimentos y bebidas", "sanidad", "infraestructura", "innovación", "servicios jurídicos", "ciencias biológicas", "marítimo", "servicios profesionales y financieros", "espacio", "economía del deporte", "tecnología", ], "arabic": [ "صناعة الفضاء الجوي", "التقنية الزراعية", "صناعة السيارات", "الشراكات الحكومية والتجارية", "التجزئة للمستهلك", "الصناعات الإبداعية", "الأمن السيبراني", "لتعليم", "الطاقة", "الهندسة", "الطعام والشراب", "العناية الصحية", "البنية التحتية", "الابتكار", "الخدمات القانونية", "علوم الحياة", "الخدمات البحرية", "الخدمات المالية والمهنية", "لفضاء", "الاقتصاد الرياضي", "التقنية", ], } def go_to(session: Session) -> Response: return make_request(Method.GET, URL, session=session) def should_be_here(response: Response): check_response(response, 200, body_contains=EXPECTED_STRINGS) logging.debug("Successfully got to the International - Industries page") def should_see_links_to_industry_pages(response: Response, language: str): industry_links = INDUSTRY_NAMES[language.lower()] check_response(response, 200, body_contains=industry_links)
[ "tests.functional.utils.request.make_request", "logging.debug", "tests.functional.utils.request.check_response" ]
[((4971, 5017), 'tests.functional.utils.request.make_request', 'make_request', (['Method.GET', 'URL'], {'session': 'session'}), '(Method.GET, URL, session=session)\n', (4983, 5017), False, 'from tests.functional.utils.request import Method, check_response, make_request\n'), ((5064, 5125), 'tests.functional.utils.request.check_response', 'check_response', (['response', '(200)'], {'body_contains': 'EXPECTED_STRINGS'}), '(response, 200, body_contains=EXPECTED_STRINGS)\n', (5078, 5125), False, 'from tests.functional.utils.request import Method, check_response, make_request\n'), ((5130, 5202), 'logging.debug', 'logging.debug', (['"""Successfully got to the International - Industries page"""'], {}), "('Successfully got to the International - Industries page')\n", (5143, 5202), False, 'import logging\n'), ((5338, 5397), 'tests.functional.utils.request.check_response', 'check_response', (['response', '(200)'], {'body_contains': 'industry_links'}), '(response, 200, body_contains=industry_links)\n', (5352, 5397), False, 'from tests.functional.utils.request import Method, check_response, make_request\n')]
import requests PROXY_POOL_URL = 'http://localhost:5556/countavailable' def get_proxy(): try: response = requests.get(PROXY_POOL_URL) if response.status_code == 200: return response.text except ConnectionError: return None if __name__ == '__main__': print(get_proxy())
[ "requests.get" ]
[((119, 147), 'requests.get', 'requests.get', (['PROXY_POOL_URL'], {}), '(PROXY_POOL_URL)\n', (131, 147), False, 'import requests\n')]
# Copyright 2012 Alyseo. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Desc : Driver to store volumes on Coraid Appliances. Require : Coraid EtherCloud ESM, Coraid VSX and Coraid SRX. Author : <NAME> <<EMAIL>> Author : <NAME> <<EMAIL>> Author : <NAME> <<EMAIL>> Contrib : <NAME> <<EMAIL>> """ import cookielib import math import urllib import urllib2 from oslo.config import cfg import six.moves.urllib.parse as urlparse from cinder import exception from cinder.openstack.common.gettextutils import _ from cinder.openstack.common import jsonutils from cinder.openstack.common import lockutils from cinder.openstack.common import log as logging from cinder.openstack.common import units from cinder.volume import driver from cinder.volume import volume_types LOG = logging.getLogger(__name__) coraid_opts = [ cfg.StrOpt('coraid_esm_address', default='', help='IP address of Coraid ESM'), cfg.StrOpt('coraid_user', default='admin', help='User name to connect to Coraid ESM'), cfg.StrOpt('coraid_group', default='admin', help='Name of group on Coraid ESM to which coraid_user belongs' ' (must have admin privilege)'), cfg.StrOpt('coraid_password', default='password', help='Password to connect to Coraid ESM'), cfg.StrOpt('coraid_repository_key', default='coraid_repository', help='Volume Type key name to store ESM Repository Name'), ] CONF = cfg.CONF CONF.register_opts(coraid_opts) ESM_SESSION_EXPIRED_STATES = ['GeneralAdminFailure', 'passwordInactivityTimeout', 'passwordAbsoluteTimeout'] class CoraidRESTClient(object): """Executes REST RPC requests on Coraid ESM EtherCloud Appliance.""" def __init__(self, esm_url): self._check_esm_url(esm_url) self._esm_url = esm_url self._cookie_jar = cookielib.CookieJar() self._url_opener = urllib2.build_opener( urllib2.HTTPCookieProcessor(self._cookie_jar)) def _check_esm_url(self, esm_url): splitted = urlparse.urlsplit(esm_url) if splitted.scheme != 'https': raise ValueError( _('Invalid ESM url scheme "%s". Supported https only.') % splitted.scheme) @lockutils.synchronized('coraid_rpc', 'cinder-', False) def rpc(self, handle, url_params, data, allow_empty_response=False): return self._rpc(handle, url_params, data, allow_empty_response) def _rpc(self, handle, url_params, data, allow_empty_response): """Execute REST RPC using url <esm_url>/handle?url_params. Send JSON encoded data in body of POST request. Exceptions: urllib2.URLError 1. Name or service not found (e.reason is socket.gaierror) 2. Socket blocking operation timeout (e.reason is socket.timeout) 3. Network IO error (e.reason is socket.error) urllib2.HTTPError 1. HTTP 404, HTTP 500 etc. CoraidJsonEncodeFailure - bad REST response """ # Handle must be simple path, for example: # /configure if '?' in handle or '&' in handle: raise ValueError(_('Invalid REST handle name. Expected path.')) # Request url includes base ESM url, handle path and optional # URL params. rest_url = urlparse.urljoin(self._esm_url, handle) encoded_url_params = urllib.urlencode(url_params) if encoded_url_params: rest_url += '?' + encoded_url_params if data is None: json_request = None else: json_request = jsonutils.dumps(data) request = urllib2.Request(rest_url, json_request) response = self._url_opener.open(request).read() try: if not response and allow_empty_response: reply = {} else: reply = jsonutils.loads(response) except (TypeError, ValueError) as exc: msg = (_('Call to json.loads() failed: %(ex)s.' ' Response: %(resp)s') % {'ex': exc, 'resp': response}) raise exception.CoraidJsonEncodeFailure(msg) return reply def to_coraid_kb(gb): return math.ceil(float(gb) * units.Gi / 1000) def coraid_volume_size(gb): return '{0}K'.format(to_coraid_kb(gb)) class CoraidAppliance(object): def __init__(self, rest_client, username, password, group): self._rest_client = rest_client self._username = username self._password = password self._group = group self._logined = False def _login(self): """Login into ESM. Perform login request and return available groups. :returns: dict -- map with group_name to group_id """ ADMIN_GROUP_PREFIX = 'admin group:' url_params = {'op': 'login', 'username': self._username, 'password': self._password} reply = self._rest_client.rpc('admin', url_params, 'Login') if reply['state'] != 'adminSucceed': raise exception.CoraidESMBadCredentials() # Read groups map from login reply. groups_map = {} for group_info in reply.get('values', []): full_group_name = group_info['fullPath'] if full_group_name.startswith(ADMIN_GROUP_PREFIX): group_name = full_group_name[len(ADMIN_GROUP_PREFIX):] groups_map[group_name] = group_info['groupId'] return groups_map def _set_effective_group(self, groups_map, group): """Set effective group. Use groups_map returned from _login method. """ try: group_id = groups_map[group] except KeyError: raise exception.CoraidESMBadGroup(group_name=group) url_params = {'op': 'setRbacGroup', 'groupId': group_id} reply = self._rest_client.rpc('admin', url_params, 'Group') if reply['state'] != 'adminSucceed': raise exception.CoraidESMBadCredentials() self._logined = True def _ensure_session(self): if not self._logined: groups_map = self._login() self._set_effective_group(groups_map, self._group) def _relogin(self): self._logined = False self._ensure_session() def rpc(self, handle, url_params, data, allow_empty_response=False): self._ensure_session() relogin_attempts = 3 # Do action, relogin if needed and repeat action. while True: reply = self._rest_client.rpc(handle, url_params, data, allow_empty_response) if self._is_session_expired(reply): relogin_attempts -= 1 if relogin_attempts <= 0: raise exception.CoraidESMReloginFailed() LOG.debug('Session is expired. Relogin on ESM.') self._relogin() else: return reply def _is_session_expired(self, reply): return ('state' in reply and reply['state'] in ESM_SESSION_EXPIRED_STATES and reply['metaCROp'] == 'reboot') def _is_bad_config_state(self, reply): return (not reply or 'configState' not in reply or reply['configState'] != 'completedSuccessfully') def configure(self, json_request): reply = self.rpc('configure', {}, json_request) if self._is_bad_config_state(reply): # Calculate error message if not reply: reason = _('Reply is empty.') else: reason = reply.get('message', _('Error message is empty.')) raise exception.CoraidESMConfigureError(reason=reason) return reply def esm_command(self, request): request['data'] = jsonutils.dumps(request['data']) return self.configure([request]) def get_volume_info(self, volume_name): """Retrieve volume information for a given volume name.""" url_params = {'shelf': 'cms', 'orchStrRepo': '', 'lv': volume_name} reply = self.rpc('fetch', url_params, None) try: volume_info = reply[0][1]['reply'][0] except (IndexError, KeyError): raise exception.VolumeNotFound(volume_id=volume_name) return {'pool': volume_info['lv']['containingPool'], 'repo': volume_info['repoName'], 'lun': volume_info['lv']['lvStatus']['exportedLun']['lun'], 'shelf': volume_info['lv']['lvStatus']['exportedLun']['shelf']} def get_volume_repository(self, volume_name): volume_info = self.get_volume_info(volume_name) return volume_info['repo'] def get_all_repos(self): reply = self.rpc('fetch', {'orchStrRepo': ''}, None) try: return reply[0][1]['reply'] except (IndexError, KeyError): return [] def ping(self): try: self.rpc('fetch', {}, None, allow_empty_response=True) except Exception as e: LOG.debug('Coraid Appliance ping failed: %s', e) raise exception.CoraidESMNotAvailable(reason=e) def create_lun(self, repository_name, volume_name, volume_size_in_gb): request = {'addr': 'cms', 'data': { 'servers': [], 'repoName': repository_name, 'lvName': volume_name, 'size': coraid_volume_size(volume_size_in_gb)}, 'op': 'orchStrLun', 'args': 'add'} esm_result = self.esm_command(request) LOG.debug('Volume "%(name)s" created with VSX LUN "%(lun)s"' % {'name': volume_name, 'lun': esm_result['firstParam']}) return esm_result def delete_lun(self, volume_name): repository_name = self.get_volume_repository(volume_name) request = {'addr': 'cms', 'data': { 'repoName': repository_name, 'lvName': volume_name}, 'op': 'orchStrLun/verified', 'args': 'delete'} esm_result = self.esm_command(request) LOG.debug('Volume "%s" deleted.', volume_name) return esm_result def resize_volume(self, volume_name, new_volume_size_in_gb): LOG.debug('Resize volume "%(name)s" to %(size)s GB.' % {'name': volume_name, 'size': new_volume_size_in_gb}) repository = self.get_volume_repository(volume_name) LOG.debug('Repository for volume "%(name)s" found: "%(repo)s"' % {'name': volume_name, 'repo': repository}) request = {'addr': 'cms', 'data': { 'lvName': volume_name, 'newLvName': volume_name + '-resize', 'size': coraid_volume_size(new_volume_size_in_gb), 'repoName': repository}, 'op': 'orchStrLunMods', 'args': 'resize'} esm_result = self.esm_command(request) LOG.debug('Volume "%(name)s" resized. New size is %(size)s GB.' % {'name': volume_name, 'size': new_volume_size_in_gb}) return esm_result def create_snapshot(self, volume_name, snapshot_name): volume_repository = self.get_volume_repository(volume_name) request = {'addr': 'cms', 'data': { 'repoName': volume_repository, 'lvName': volume_name, 'newLvName': snapshot_name}, 'op': 'orchStrLunMods', 'args': 'addClSnap'} esm_result = self.esm_command(request) return esm_result def delete_snapshot(self, snapshot_name): repository_name = self.get_volume_repository(snapshot_name) request = {'addr': 'cms', 'data': { 'repoName': repository_name, 'lvName': snapshot_name}, 'op': 'orchStrLunMods', 'args': 'delClSnap'} esm_result = self.esm_command(request) return esm_result def create_volume_from_snapshot(self, snapshot_name, volume_name, dest_repository_name): snapshot_repo = self.get_volume_repository(snapshot_name) request = {'addr': 'cms', 'data': { 'lvName': snapshot_name, 'repoName': snapshot_repo, 'newLvName': volume_name, 'newRepoName': dest_repository_name}, 'op': 'orchStrLunMods', 'args': 'addClone'} esm_result = self.esm_command(request) return esm_result def clone_volume(self, src_volume_name, dst_volume_name, dst_repository_name): src_volume_info = self.get_volume_info(src_volume_name) if src_volume_info['repo'] != dst_repository_name: raise exception.CoraidException( _('Cannot create clone volume in different repository.')) request = {'addr': 'cms', 'data': { 'shelfLun': '{0}.{1}'.format(src_volume_info['shelf'], src_volume_info['lun']), 'lvName': src_volume_name, 'repoName': src_volume_info['repo'], 'newLvName': dst_volume_name, 'newRepoName': dst_repository_name}, 'op': 'orchStrLunMods', 'args': 'addClone'} return self.esm_command(request) class CoraidDriver(driver.VolumeDriver): """This is the Class to set in cinder.conf (volume_driver).""" VERSION = '1.0.0' def __init__(self, *args, **kwargs): super(CoraidDriver, self).__init__(*args, **kwargs) self.configuration.append_config_values(coraid_opts) self._stats = {'driver_version': self.VERSION, 'free_capacity_gb': 'unknown', 'reserved_percentage': 0, 'storage_protocol': 'aoe', 'total_capacity_gb': 'unknown', 'vendor_name': 'Coraid'} backend_name = self.configuration.safe_get('volume_backend_name') self._stats['volume_backend_name'] = backend_name or 'EtherCloud ESM' @property def appliance(self): # NOTE(nsobolevsky): This is workaround for bug in the ESM appliance. # If there is a lot of request with the same session/cookie/connection, # the appliance could corrupt all following request in session. # For that purpose we just create a new appliance. esm_url = "https://{0}:8443".format( self.configuration.coraid_esm_address) return CoraidAppliance(CoraidRESTClient(esm_url), self.configuration.coraid_user, self.configuration.coraid_password, self.configuration.coraid_group) def check_for_setup_error(self): """Return an error if prerequisites aren't met.""" self.appliance.ping() def _get_repository(self, volume_type): """Get the ESM Repository from the Volume Type. The ESM Repository is stored into a volume_type_extra_specs key. """ volume_type_id = volume_type['id'] repository_key_name = self.configuration.coraid_repository_key repository = volume_types.get_volume_type_extra_specs( volume_type_id, repository_key_name) # Remove <in> keyword from repository name if needed if repository.startswith('<in> '): return repository[len('<in> '):] else: return repository def create_volume(self, volume): """Create a Volume.""" repository = self._get_repository(volume['volume_type']) self.appliance.create_lun(repository, volume['name'], volume['size']) def create_cloned_volume(self, volume, src_vref): dst_volume_repository = self._get_repository(volume['volume_type']) self.appliance.clone_volume(src_vref['name'], volume['name'], dst_volume_repository) if volume['size'] != src_vref['size']: self.appliance.resize_volume(volume['name'], volume['size']) def delete_volume(self, volume): """Delete a Volume.""" try: self.appliance.delete_lun(volume['name']) except exception.VolumeNotFound: self.appliance.ping() def create_snapshot(self, snapshot): """Create a Snapshot.""" volume_name = snapshot['volume_name'] snapshot_name = snapshot['name'] self.appliance.create_snapshot(volume_name, snapshot_name) def delete_snapshot(self, snapshot): """Delete a Snapshot.""" snapshot_name = snapshot['name'] self.appliance.delete_snapshot(snapshot_name) def create_volume_from_snapshot(self, volume, snapshot): """Create a Volume from a Snapshot.""" snapshot_name = snapshot['name'] repository = self._get_repository(volume['volume_type']) self.appliance.create_volume_from_snapshot(snapshot_name, volume['name'], repository) if volume['size'] > snapshot['volume_size']: self.appliance.resize_volume(volume['name'], volume['size']) def extend_volume(self, volume, new_size): """Extend an existing volume.""" self.appliance.resize_volume(volume['name'], new_size) def initialize_connection(self, volume, connector): """Return connection information.""" volume_info = self.appliance.get_volume_info(volume['name']) shelf = volume_info['shelf'] lun = volume_info['lun'] LOG.debug('Initialize connection %(shelf)s/%(lun)s for %(name)s' % {'shelf': shelf, 'lun': lun, 'name': volume['name']}) aoe_properties = {'target_shelf': shelf, 'target_lun': lun} return {'driver_volume_type': 'aoe', 'data': aoe_properties} def _get_repository_capabilities(self): repos_list = map(lambda i: i['profile']['fullName'] + ':' + i['name'], self.appliance.get_all_repos()) return ' '.join(repos_list) def update_volume_stats(self): capabilities = self._get_repository_capabilities() self._stats[self.configuration.coraid_repository_key] = capabilities def get_volume_stats(self, refresh=False): """Return Volume Stats.""" if refresh: self.update_volume_stats() return self._stats def local_path(self, volume): pass def create_export(self, context, volume): pass def remove_export(self, context, volume): pass def terminate_connection(self, volume, connector, **kwargs): pass def ensure_export(self, context, volume): pass
[ "cookielib.CookieJar", "cinder.openstack.common.gettextutils._", "cinder.exception.CoraidESMBadCredentials", "oslo.config.cfg.StrOpt", "six.moves.urllib.parse.urljoin", "cinder.openstack.common.jsonutils.loads", "urllib2.Request", "cinder.exception.CoraidESMReloginFailed", "cinder.volume.volume_types.get_volume_type_extra_specs", "cinder.exception.CoraidESMBadGroup", "urllib.urlencode", "cinder.openstack.common.lockutils.synchronized", "cinder.exception.CoraidESMNotAvailable", "cinder.openstack.common.log.getLogger", "cinder.exception.VolumeNotFound", "cinder.openstack.common.jsonutils.dumps", "cinder.exception.CoraidESMConfigureError", "cinder.exception.CoraidJsonEncodeFailure", "urllib2.HTTPCookieProcessor", "six.moves.urllib.parse.urlsplit" ]
[((1329, 1356), 'cinder.openstack.common.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1346, 1356), True, 'from cinder.openstack.common import log as logging\n'), ((1378, 1455), 'oslo.config.cfg.StrOpt', 'cfg.StrOpt', (['"""coraid_esm_address"""'], {'default': '""""""', 'help': '"""IP address of Coraid ESM"""'}), "('coraid_esm_address', default='', help='IP address of Coraid ESM')\n", (1388, 1455), False, 'from oslo.config import cfg\n'), ((1491, 1581), 'oslo.config.cfg.StrOpt', 'cfg.StrOpt', (['"""coraid_user"""'], {'default': '"""admin"""', 'help': '"""User name to connect to Coraid ESM"""'}), "('coraid_user', default='admin', help=\n 'User name to connect to Coraid ESM')\n", (1501, 1581), False, 'from oslo.config import cfg\n'), ((1612, 1758), 'oslo.config.cfg.StrOpt', 'cfg.StrOpt', (['"""coraid_group"""'], {'default': '"""admin"""', 'help': '"""Name of group on Coraid ESM to which coraid_user belongs (must have admin privilege)"""'}), "('coraid_group', default='admin', help=\n 'Name of group on Coraid ESM to which coraid_user belongs (must have admin privilege)'\n )\n", (1622, 1758), False, 'from oslo.config import cfg\n'), ((1802, 1898), 'oslo.config.cfg.StrOpt', 'cfg.StrOpt', (['"""coraid_password"""'], {'default': '"""password"""', 'help': '"""Password to connect to Coraid ESM"""'}), "('coraid_password', default='password', help=\n 'Password to connect to Coraid ESM')\n", (1812, 1898), False, 'from oslo.config import cfg\n'), ((1929, 2056), 'oslo.config.cfg.StrOpt', 'cfg.StrOpt', (['"""coraid_repository_key"""'], {'default': '"""coraid_repository"""', 'help': '"""Volume Type key name to store ESM Repository Name"""'}), "('coraid_repository_key', default='coraid_repository', help=\n 'Volume Type key name to store ESM Repository Name')\n", (1939, 2056), False, 'from oslo.config import cfg\n'), ((2940, 2994), 'cinder.openstack.common.lockutils.synchronized', 'lockutils.synchronized', (['"""coraid_rpc"""', '"""cinder-"""', '(False)'], {}), "('coraid_rpc', 'cinder-', False)\n", (2962, 2994), False, 'from cinder.openstack.common import lockutils\n'), ((2542, 2563), 'cookielib.CookieJar', 'cookielib.CookieJar', ([], {}), '()\n', (2561, 2563), False, 'import cookielib\n'), ((2731, 2757), 'six.moves.urllib.parse.urlsplit', 'urlparse.urlsplit', (['esm_url'], {}), '(esm_url)\n', (2748, 2757), True, 'import six.moves.urllib.parse as urlparse\n'), ((4062, 4101), 'six.moves.urllib.parse.urljoin', 'urlparse.urljoin', (['self._esm_url', 'handle'], {}), '(self._esm_url, handle)\n', (4078, 4101), True, 'import six.moves.urllib.parse as urlparse\n'), ((4131, 4159), 'urllib.urlencode', 'urllib.urlencode', (['url_params'], {}), '(url_params)\n', (4147, 4159), False, 'import urllib\n'), ((4380, 4419), 'urllib2.Request', 'urllib2.Request', (['rest_url', 'json_request'], {}), '(rest_url, json_request)\n', (4395, 4419), False, 'import urllib2\n'), ((8641, 8673), 'cinder.openstack.common.jsonutils.dumps', 'jsonutils.dumps', (["request['data']"], {}), "(request['data'])\n", (8656, 8673), False, 'from cinder.openstack.common import jsonutils\n'), ((16686, 16763), 'cinder.volume.volume_types.get_volume_type_extra_specs', 'volume_types.get_volume_type_extra_specs', (['volume_type_id', 'repository_key_name'], {}), '(volume_type_id, repository_key_name)\n', (16726, 16763), False, 'from cinder.volume import volume_types\n'), ((2625, 2670), 'urllib2.HTTPCookieProcessor', 'urllib2.HTTPCookieProcessor', (['self._cookie_jar'], {}), '(self._cookie_jar)\n', (2652, 2670), False, 'import urllib2\n'), ((4339, 4360), 'cinder.openstack.common.jsonutils.dumps', 'jsonutils.dumps', (['data'], {}), '(data)\n', (4354, 4360), False, 'from cinder.openstack.common import jsonutils\n'), ((5826, 5861), 'cinder.exception.CoraidESMBadCredentials', 'exception.CoraidESMBadCredentials', ([], {}), '()\n', (5859, 5861), False, 'from cinder import exception\n'), ((6774, 6809), 'cinder.exception.CoraidESMBadCredentials', 'exception.CoraidESMBadCredentials', ([], {}), '()\n', (6807, 6809), False, 'from cinder import exception\n'), ((8508, 8556), 'cinder.exception.CoraidESMConfigureError', 'exception.CoraidESMConfigureError', ([], {'reason': 'reason'}), '(reason=reason)\n', (8541, 8556), False, 'from cinder import exception\n'), ((3903, 3948), 'cinder.openstack.common.gettextutils._', '_', (['"""Invalid REST handle name. Expected path."""'], {}), "('Invalid REST handle name. Expected path.')\n", (3904, 3948), False, 'from cinder.openstack.common.gettextutils import _\n'), ((4614, 4639), 'cinder.openstack.common.jsonutils.loads', 'jsonutils.loads', (['response'], {}), '(response)\n', (4629, 4639), False, 'from cinder.openstack.common import jsonutils\n'), ((4861, 4899), 'cinder.exception.CoraidJsonEncodeFailure', 'exception.CoraidJsonEncodeFailure', (['msg'], {}), '(msg)\n', (4894, 4899), False, 'from cinder import exception\n'), ((6509, 6554), 'cinder.exception.CoraidESMBadGroup', 'exception.CoraidESMBadGroup', ([], {'group_name': 'group'}), '(group_name=group)\n', (6536, 6554), False, 'from cinder import exception\n'), ((8375, 8395), 'cinder.openstack.common.gettextutils._', '_', (['"""Reply is empty."""'], {}), "('Reply is empty.')\n", (8376, 8395), False, 'from cinder.openstack.common.gettextutils import _\n'), ((9119, 9166), 'cinder.exception.VolumeNotFound', 'exception.VolumeNotFound', ([], {'volume_id': 'volume_name'}), '(volume_id=volume_name)\n', (9143, 9166), False, 'from cinder import exception\n'), ((9991, 10032), 'cinder.exception.CoraidESMNotAvailable', 'exception.CoraidESMNotAvailable', ([], {'reason': 'e'}), '(reason=e)\n', (10022, 10032), False, 'from cinder import exception\n'), ((14176, 14232), 'cinder.openstack.common.gettextutils._', '_', (['"""Cannot create clone volume in different repository."""'], {}), "('Cannot create clone volume in different repository.')\n", (14177, 14232), False, 'from cinder.openstack.common.gettextutils import _\n'), ((2843, 2898), 'cinder.openstack.common.gettextutils._', '_', (['"""Invalid ESM url scheme "%s". Supported https only."""'], {}), '(\'Invalid ESM url scheme "%s". Supported https only.\')\n', (2844, 2898), False, 'from cinder.openstack.common.gettextutils import _\n'), ((4706, 4766), 'cinder.openstack.common.gettextutils._', '_', (['"""Call to json.loads() failed: %(ex)s. Response: %(resp)s"""'], {}), "('Call to json.loads() failed: %(ex)s. Response: %(resp)s')\n", (4707, 4766), False, 'from cinder.openstack.common.gettextutils import _\n'), ((7590, 7624), 'cinder.exception.CoraidESMReloginFailed', 'exception.CoraidESMReloginFailed', ([], {}), '()\n', (7622, 7624), False, 'from cinder import exception\n'), ((8460, 8488), 'cinder.openstack.common.gettextutils._', '_', (['"""Error message is empty."""'], {}), "('Error message is empty.')\n", (8461, 8488), False, 'from cinder.openstack.common.gettextutils import _\n')]
#!/usr/bin/env python import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() lut = vtk.vtkLookupTable() lut.SetHueRange(0.6, 0) lut.SetSaturationRange(1.0, 0) lut.SetValueRange(0.5, 1.0) # Read the data: a height field results demReader = vtk.vtkDEMReader() demReader.SetFileName(VTK_DATA_ROOT + "/Data/SainteHelens.dem") demReader.Update() lo = demReader.GetOutput().GetScalarRange()[0] hi = demReader.GetOutput().GetScalarRange()[1] # Decimate the terrain deci = vtk.vtkGreedyTerrainDecimation() deci.SetInputConnection(demReader.GetOutputPort()) deci.BoundaryVertexDeletionOn() # deci.SetErrorMeasureToSpecifiedReduction() # deci.SetReduction(0.95) deci.SetErrorMeasureToNumberOfTriangles() deci.SetNumberOfTriangles(5000) # deci.SetErrorMeasureToAbsoluteError() # deci.SetAbsoluteError(25.0) # deci.SetErrorMeasureToRelativeError() # deci.SetAbsoluteError(0.01) normals = vtk.vtkPolyDataNormals() normals.SetInputConnection(deci.GetOutputPort()) normals.SetFeatureAngle(60) normals.ConsistencyOn() normals.SplittingOff() demMapper = vtk.vtkPolyDataMapper() demMapper.SetInputConnection(normals.GetOutputPort()) demMapper.SetScalarRange(lo, hi) demMapper.SetLookupTable(lut) actor = vtk.vtkLODActor() actor.SetMapper(demMapper) # Create the RenderWindow, Renderer and both Actors # ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren1) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # Add the actors to the renderer, set the background and size # ren1.AddActor(actor) ren1.SetBackground(.1, .2, .4) iren.SetDesiredUpdateRate(5) ren1.GetActiveCamera().SetViewUp(0, 0, 1) ren1.GetActiveCamera().SetPosition(-99900, -21354, 131801) ren1.GetActiveCamera().SetFocalPoint(41461, 41461, 2815) ren1.ResetCamera() ren1.GetActiveCamera().Dolly(1.2) ren1.ResetCameraClippingRange() renWin.Render() iren.Initialize() #iren.Start()
[ "vtk.vtkGreedyTerrainDecimation", "vtk.vtkRenderer", "vtk.vtkRenderWindow", "vtk.util.misc.vtkGetDataRoot", "vtk.vtkLookupTable", "vtk.vtkLODActor", "vtk.vtkDEMReader", "vtk.vtkRenderWindowInteractor", "vtk.vtkPolyDataNormals", "vtk.vtkPolyDataMapper" ]
[((123, 139), 'vtk.util.misc.vtkGetDataRoot', 'vtkGetDataRoot', ([], {}), '()\n', (137, 139), False, 'from vtk.util.misc import vtkGetDataRoot\n'), ((149, 169), 'vtk.vtkLookupTable', 'vtk.vtkLookupTable', ([], {}), '()\n', (167, 169), False, 'import vtk\n'), ((312, 330), 'vtk.vtkDEMReader', 'vtk.vtkDEMReader', ([], {}), '()\n', (328, 330), False, 'import vtk\n'), ((548, 580), 'vtk.vtkGreedyTerrainDecimation', 'vtk.vtkGreedyTerrainDecimation', ([], {}), '()\n', (578, 580), False, 'import vtk\n'), ((978, 1002), 'vtk.vtkPolyDataNormals', 'vtk.vtkPolyDataNormals', ([], {}), '()\n', (1000, 1002), False, 'import vtk\n'), ((1146, 1169), 'vtk.vtkPolyDataMapper', 'vtk.vtkPolyDataMapper', ([], {}), '()\n', (1167, 1169), False, 'import vtk\n'), ((1301, 1318), 'vtk.vtkLODActor', 'vtk.vtkLODActor', ([], {}), '()\n', (1316, 1318), False, 'import vtk\n'), ((1413, 1430), 'vtk.vtkRenderer', 'vtk.vtkRenderer', ([], {}), '()\n', (1428, 1430), False, 'import vtk\n'), ((1441, 1462), 'vtk.vtkRenderWindow', 'vtk.vtkRenderWindow', ([], {}), '()\n', (1460, 1462), False, 'import vtk\n'), ((1497, 1528), 'vtk.vtkRenderWindowInteractor', 'vtk.vtkRenderWindowInteractor', ([], {}), '()\n', (1526, 1528), False, 'import vtk\n')]
import asyncio from typing import Dict from aiohttp import web from aiohttp.web_exceptions import HTTPBadRequest, HTTPInternalServerError, HTTPOk from config import Config from log import get_logger from src.thread.concurrent_call import ConcurrentCall logger = get_logger() class AiohttpApi: def __init__(self, jobs: Dict[str, ConcurrentCall]): self.app = web.Application() self.jobs = jobs def init_routes(self): self.app.add_routes( [ web.get('/status', self.status), web.patch('/restart/{name}', self.restart_thread), ], ) def aiohttp_server(self): return web.AppRunner(self.app) def run_server(self, runner: web.AppRunner): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(runner.setup()) site = web.TCPSite(runner, Config.api_host, Config.api_port) loop.run_until_complete(site.start()) logger.info(f'HTTP API serve at {Config.api_host}:{Config.api_port}') loop.run_forever() async def status(self, request): result = {} for name, job in self.jobs.items(): result.update( **{name: job.status()} ) return web.json_response(data=result) async def restart_thread(self, request): thread_name = request.match_info.get('name') thread = self.jobs.get(thread_name) if not thread_name: raise HTTPBadRequest thread.daemon = True thread.start() if not thread.status(): raise HTTPInternalServerError raise HTTPOk
[ "aiohttp.web.patch", "asyncio.set_event_loop", "aiohttp.web.TCPSite", "aiohttp.web.json_response", "log.get_logger", "aiohttp.web.get", "aiohttp.web.AppRunner", "aiohttp.web.Application", "asyncio.new_event_loop" ]
[((265, 277), 'log.get_logger', 'get_logger', ([], {}), '()\n', (275, 277), False, 'from log import get_logger\n'), ((375, 392), 'aiohttp.web.Application', 'web.Application', ([], {}), '()\n', (390, 392), False, 'from aiohttp import web\n'), ((676, 699), 'aiohttp.web.AppRunner', 'web.AppRunner', (['self.app'], {}), '(self.app)\n', (689, 699), False, 'from aiohttp import web\n'), ((765, 789), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (787, 789), False, 'import asyncio\n'), ((798, 826), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['loop'], {}), '(loop)\n', (820, 826), False, 'import asyncio\n'), ((890, 943), 'aiohttp.web.TCPSite', 'web.TCPSite', (['runner', 'Config.api_host', 'Config.api_port'], {}), '(runner, Config.api_host, Config.api_port)\n', (901, 943), False, 'from aiohttp import web\n'), ((1292, 1322), 'aiohttp.web.json_response', 'web.json_response', ([], {'data': 'result'}), '(data=result)\n', (1309, 1322), False, 'from aiohttp import web\n'), ((505, 536), 'aiohttp.web.get', 'web.get', (['"""/status"""', 'self.status'], {}), "('/status', self.status)\n", (512, 536), False, 'from aiohttp import web\n'), ((554, 603), 'aiohttp.web.patch', 'web.patch', (['"""/restart/{name}"""', 'self.restart_thread'], {}), "('/restart/{name}', self.restart_thread)\n", (563, 603), False, 'from aiohttp import web\n')]
""" @date 30.08.2019 @author Roman.Detinin<EMAIL> @details :copyright: 2003–2019 Acronis International GmbH, Rheinweg 9, 8200 Schaffhausen, Switzerland. All rights reserved. """ import json import logging import os import requests from jsonschema import validate from requests.auth import HTTPBasicAuth from tools import GrantType from tools import handle_error_response logging.basicConfig( filename=os.path.join(os.path.dirname(__file__), 'samples.log'), level=logging.DEBUG, format='[%(asctime)s] %(levelname)s: %(message)s', ) # OPTIONAL: These are sample exceptions, which can be more # descriptive in the context of our library and API. class ClientException(Exception): """Base exception for client""" pass class EmptyCredentials(ClientException): """Exception for client credentials""" pass class InvalidGrantType(ClientException): """Exception for invalid grant type""" pass # OPTIONAL: There can be even more complex implementation # with several types of clients. class Client: """Represents a user account or client. :param grant_type: Authorization grant type :raises EmptyCredentials: Empty credentials provided in config :raises InvalidGrantType: Invalid grant type provided """ # OPTIONAL: # https://docs.python.org/3/reference/datamodel.html#slots __slots__ = ( '_login', '_password', '_client_id', '_client_secret', '_router_url', '_base_url', '_auth_header', '_tenant_id', '_config', '_grant_type', ) # TODO: Maybe a user can provide a path to # the configuration file by himself. def __init__(self, grant_type, use_dr_config=False, use_grpm_config=False): # Read the configuration file and store it in a variable config_name = 'config.json' if use_grpm_config: config_name = 'grpm_' + config_name elif use_dr_config: config_name = 'dr_' + config_name self._config = self._read_config( os.path.join(os.path.dirname(__file__), config_name) ) # Define API router url # By default it will lead to the beta cloud (for development purposes) self._router_url = self._config.get('router_url') # The grant type will be used to check if proper # grant type is used to access certain method. self._grant_type = grant_type # Login is required to get base url to the API self._login = self._config.get('login') if not self._login: # This exception is mandatory for the production environment raise EmptyCredentials( 'A login must be provided in order to get the base url.' ) # Send a request to the router url to get the login-specific # server url (for production environment) response = requests.get( self._router_url, verify=not use_grpm_config, params=dict(login=self._login), ) handle_error_response(response) # Form base_url based on received server_url self._base_url = response.json().get("server_url") payload = dict(grant_type=self._grant_type.name, scope='openid') if self._grant_type == GrantType.password: # Get the password from the config self._password = self._config.get('password') if not self._password: raise EmptyCredentials( 'Password is not provided in the configuration file.' ) payload.update( dict( username=self._login, password=self._password, scope='offline_access', ) ) auth = None elif self._grant_type == GrantType.client_credentials: # Get the client_id and client_secret which are necessary # for 'client_credentials' grant_type self._client_id = self._config.get('client_id') self._client_secret = self._config.get('client_secret') if not self._client_id or not self._client_secret: raise EmptyCredentials( 'Client ID and/or client secret are ' 'not provided in the configuration file.' ) auth = HTTPBasicAuth(self._client_id, self._client_secret) else: raise InvalidGrantType('Incorrect grant_type provided.') response = requests.post( f'{self.base_url}/api/2/idp/token', verify=not use_grpm_config, auth=auth, headers={'Content-Type': 'application/x-www-form-urlencoded'}, data=payload, ) handle_error_response(response) access_token = response.json().get('access_token') # Generate authorization header for further use in the requests self._auth_header = dict(Authorization=f'Bearer {access_token}') # OPTIONAL: Create and store more properties # of the client here if necessary response = requests.get( f'{self.base_url}/api/2/users/me', verify=not use_grpm_config, headers=self._auth_header, ) handle_error_response(response) self._tenant_id = response.json().get('tenant_id') # Functions and variables starting with '_' are # meant for internal use only. def _read_config(self, config_path: str) -> dict: """Opens a JSON configuration file and converts its content into a dictionary. :param config_path: Path to JSON configuration file :return: A dictionary with configuration data :rtype: dict """ config_schema = { 'type': 'object', 'properties': { 'login': {'type': 'string'}, 'password': {'type': 'string'}, 'router_url': {'type': 'string'}, 'client_id': {'type': 'string'}, 'client_secret': {'type': 'string'}, }, 'required': [ 'login', 'password', 'router_url', 'client_id', 'client_secret' ] } with open(config_path) as config_file: config = json.load(config_file) validate(instance=config, schema=config_schema) return config # These are class properties which can hold any # read-only values or computable values. @property def tenant_id(self): return self._tenant_id @property def base_url(self): return self._base_url @property def auth_header(self): return self._auth_header
[ "jsonschema.validate", "json.load", "tools.handle_error_response", "os.path.dirname", "requests.get", "requests.post", "requests.auth.HTTPBasicAuth" ]
[((3112, 3143), 'tools.handle_error_response', 'handle_error_response', (['response'], {}), '(response)\n', (3133, 3143), False, 'from tools import handle_error_response\n'), ((4668, 4841), 'requests.post', 'requests.post', (['f"""{self.base_url}/api/2/idp/token"""'], {'verify': '(not use_grpm_config)', 'auth': 'auth', 'headers': "{'Content-Type': 'application/x-www-form-urlencoded'}", 'data': 'payload'}), "(f'{self.base_url}/api/2/idp/token', verify=not\n use_grpm_config, auth=auth, headers={'Content-Type':\n 'application/x-www-form-urlencoded'}, data=payload)\n", (4681, 4841), False, 'import requests\n'), ((4920, 4951), 'tools.handle_error_response', 'handle_error_response', (['response'], {}), '(response)\n', (4941, 4951), False, 'from tools import handle_error_response\n'), ((5283, 5389), 'requests.get', 'requests.get', (['f"""{self.base_url}/api/2/users/me"""'], {'verify': '(not use_grpm_config)', 'headers': 'self._auth_header'}), "(f'{self.base_url}/api/2/users/me', verify=not use_grpm_config,\n headers=self._auth_header)\n", (5295, 5389), False, 'import requests\n'), ((5446, 5477), 'tools.handle_error_response', 'handle_error_response', (['response'], {}), '(response)\n', (5467, 5477), False, 'from tools import handle_error_response\n'), ((6527, 6574), 'jsonschema.validate', 'validate', ([], {'instance': 'config', 'schema': 'config_schema'}), '(instance=config, schema=config_schema)\n', (6535, 6574), False, 'from jsonschema import validate\n'), ((442, 467), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (457, 467), False, 'import os\n'), ((6493, 6515), 'json.load', 'json.load', (['config_file'], {}), '(config_file)\n', (6502, 6515), False, 'import json\n'), ((2095, 2120), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2110, 2120), False, 'import os\n'), ((4507, 4558), 'requests.auth.HTTPBasicAuth', 'HTTPBasicAuth', (['self._client_id', 'self._client_secret'], {}), '(self._client_id, self._client_secret)\n', (4520, 4558), False, 'from requests.auth import HTTPBasicAuth\n')]
from tests.test_util import print_objects from tests.libs_for_tests import prepare_yamllist_for_diff from kalc.model.search import HypothesisysNode, OptimisticRun from kalc.model.system.Scheduler import Scheduler from kalc.model.system.globals import GlobalVar from kalc.model.kinds.Service import Service from kalc.model.kinds.Node import Node from kalc.model.kinds.Pod import Pod from kalc.model.kinds.Deployment import Deployment from kalc.model.kinds.DaemonSet import DaemonSet from kalc.model.kinds.PriorityClass import PriorityClass from kalc.model.kubernetes import KubernetesCluster from kalc.misc.const import * import pytest import inspect from kalc.model.search import K8ServiceInterruptSearch from kalc.misc.object_factory import labelFactory from click.testing import CliRunner from kalc.model.scenario import Scenario from poodle import planned from tests.libs_for_tests import convert_space_to_yaml,print_objects_from_yaml,print_plan,load_yaml, print_objects_compare, checks_assert_conditions, reload_cluster_from_yaml, checks_assert_conditions_in_one_mode DEBUG_MODE = 2 # 0 - no debug, 1- debug with yaml load , 2 - debug without yaml load def build_running_pod(podName, cpuRequest, memRequest, atNode): pod_running_1 = Pod() pod_running_1.metadata_name = "pod"+str(podName) pod_running_1.cpuRequest = cpuRequest pod_running_1.memRequest = memRequest pod_running_1.atNode = atNode pod_running_1.status = STATUS_POD["Running"] pod_running_1.hasDeployment = False pod_running_1.hasService = False pod_running_1.hasDaemonset = False return pod_running_1 def build_running_pod_with_d(podName, cpuRequest, memRequest, atNode, d, ds): pod_running_1 = Pod() pod_running_1.metadata_name = "pod"+str(podName) pod_running_1.cpuRequest = cpuRequest pod_running_1.memRequest = memRequest pod_running_1.atNode = atNode pod_running_1.status = STATUS_POD["Running"] pod_running_1.hasDeployment = False pod_running_1.hasService = False pod_running_1.hasDaemonset = False if d is not None: d.podList.add(pod_running_1) d.amountOfActivePods += 1 pod_running_1.hasDeployment = True if ds is not None: ds.podList.add(pod_running_1) ds.amountOfActivePods += 1 pod_running_1.hasDaemonset = True atNode.currentFormalCpuConsumption += cpuRequest atNode.currentFormalMemConsumption += memRequest return pod_running_1 def build_pending_pod(podName, cpuRequest, memRequest, toNode): p = build_running_pod(podName, cpuRequest, memRequest, Node.NODE_NULL) p.status = STATUS_POD["Pending"] p.toNode = toNode p.hasDeployment = False p.hasService = False p.hasDaemonset = False return p def build_pending_pod_with_d(podName, cpuRequest, memRequest, toNode, d, ds): p = Pod() p.metadata_name = "pod"+str(podName) p.cpuRequest = cpuRequest p.memRequest = memRequest p.status = STATUS_POD["Pending"] p.hasDeployment = False p.hasService = False p.hasDaemonset = False if d is not None: d.podList.add(p) p.hasDeployment = True if ds is not None: ds.podList.add(p) p.hasDaemonset = True p.toNode = toNode return p def test_1_1pod_2nodes_Service_outage(): # Initialize scheduler, globalvar k = KubernetesCluster() globalVar = next(filter(lambda x: isinstance(x, GlobalVar), k.state_objects)) scheduler = next(filter(lambda x: isinstance(x, Scheduler), k.state_objects)) # initial node state i = 0 j = 0 nodes = [] pods = [] # Service to detecte eviction s = Service() s.metadata_name = "test-service" s.amountOfActivePods = 0 s2 = Service() s2.metadata_name = "test-service2" s2.amountOfActivePods = 0 # create Deploymnent that we're going to detect failure of... pod_id = 1 node_1 = Node("node 1") node_1.cpuCapacity = 4 node_1.memCapacity = 4 node_1.isNull = False node_1.status = STATUS_NODE["Active"] node_2 = Node("node 2") node_2.cpuCapacity = 4 node_2.memCapacity = 4 node_2.isNull = False node_2.status = STATUS_NODE["Active"] pod_running_1 = build_running_pod_with_d(pod_id,2,2,node_1,None,None) pod_running_1.hasService = True node_1.amountOfActivePods += 1 s.podList.add(pod_running_1) s.amountOfActivePods += 1 s.status = STATUS_SERV["Started"] # k.state_objects += [node_1,node_2,pod_running_1, s] k.state_objects += [node_1,node_2,pod_running_1, s, STATUS_POD["Pending"], STATUS_POD["Killing"], STATUS_POD["Running"]] create_objects = [] k._build_state() class HypothesisysNode_k1(HypothesisysNode): pass p = HypothesisysNode_k1(k.state_objects) HypothesisysNode_k1.__name__ = inspect.stack()[0].function not_assert_conditions = [] print_objects(k.state_objects) p.Initiate_node_outage(node_1,globalVar) # p.Initiate_killing_of_Pod_because_of_node_outage(node_1,pod_running_1,globalVar) # p.KillPod_IF_Deployment_isNUll_Service_isNotNull_Daemonset_isNull(pod_running_1,node_1,s,scheduler) # p.NodeOutageFinished(node_1,globalVar) # p.Mark_node_outage_event(node_1,globalVar) # p.SelectNode(pod_running_1,node_2,globalVar) # p.StartPod_IF_Deployment_isNUll_Service_isNotNull_Daemonset_isNull(pod_running_1,node_2,scheduler,s,globalVar) # p.SchedulerCleaneduler,globalVar) print(" >> changed state << ") print_objects(k.state_objects) p.run() print (" >> after << ") print_objects(k.state_objects) print_plan(p) # @pytest.mark.skip(reason="too early to test") def test_2_3pods_2nodes_Service_outage(): # Initialize scheduler, globalvar k = KubernetesCluster() globalVar = next(filter(lambda x: isinstance(x, GlobalVar), k.state_objects)) scheduler = next(filter(lambda x: isinstance(x, Scheduler), k.state_objects)) # initial node state i = 0 j = 0 nodes = [] pods = [] # Service to detecte eviction s = Service() s.metadata_name = "test-service" s.amountOfActivePods = 0 s2 = Service() s2.metadata_name = "test-service2" s2.amountOfActivePods = 0 # create Deploymnent that we're going to detect failure of... pod_id = 1 node_1 = Node("node 1") node_1.cpuCapacity = 7 node_1.memCapacity = 7 node_1.isNull = False node_1.status = STATUS_NODE["Active"] node_2 = Node("node 2") node_2.cpuCapacity = 7 node_2.memCapacity = 7 node_2.isNull = False node_2.status = STATUS_NODE["Active"] pod_running_1 = build_running_pod_with_d(1,2,2,node_1,None,None) pod_running_1.hasService = True node_1.amountOfActivePods += 1 s.podList.add(pod_running_1) s.amountOfActivePods += 1 pod_running_2 = build_running_pod_with_d(2,2,2,node_2,None,None) pod_running_2.hasService = True node_2.amountOfActivePods += 1 s.podList.add(pod_running_2) s.amountOfActivePods += 1 pod_running_3 = build_running_pod_with_d(3,2,2,node_1,None,None) pod_running_3.hasService = True node_1.amountOfActivePods += 1 s2.podList.add(pod_running_3) s2.amountOfActivePods += 1 k.state_objects += [node_1,node_2,pod_running_1, s, STATUS_POD["Pending"], STATUS_POD["Killing"], STATUS_POD["Running"], Node.NODE_NULL] create_objects = [] k._build_state() class test_2_3pods_2nodes_Service_outage(HypothesisysNode): pass p = test_2_3pods_2nodes_Service_outage(k.state_objects) not_assert_conditions = [] print_objects(k.state_objects) p.Initiate_node_outage(node_2,globalVar) p.Initiate_killing_of_Pod_because_of_node_outage(node_2,pod_running_2,globalVar) # p.KillPod_IF_Deployment_isNUll_Service_isNotNull_Daemonset_isNull(pod_running_2,node_2,s,scheduler) # p.NodeOutageFinished(node_2,globalVar) # p.Mark_node_outage_event(node_1,globalVar) # p.SelectNode(pod_running_1,node_2,globalVar) # p.StartPod_IF_Deployment_isNUll_Service_isNotNull_Daemonset_isNull(pod_running_1,node_2,scheduler,s,globalVar) # p.SchedulerCleaneduler,globalVar) print(" >> changed state << ") print_objects(k.state_objects) p.run() print (" >> after << ") print_objects(k.state_objects) print_plan(p)
[ "kalc.model.kinds.Node.Node", "tests.test_util.print_objects", "kalc.model.kinds.Service.Service", "kalc.model.kubernetes.KubernetesCluster", "kalc.model.kinds.Pod.Pod", "tests.libs_for_tests.print_plan", "inspect.stack" ]
[((1244, 1249), 'kalc.model.kinds.Pod.Pod', 'Pod', ([], {}), '()\n', (1247, 1249), False, 'from kalc.model.kinds.Pod import Pod\n'), ((1710, 1715), 'kalc.model.kinds.Pod.Pod', 'Pod', ([], {}), '()\n', (1713, 1715), False, 'from kalc.model.kinds.Pod import Pod\n'), ((2840, 2845), 'kalc.model.kinds.Pod.Pod', 'Pod', ([], {}), '()\n', (2843, 2845), False, 'from kalc.model.kinds.Pod import Pod\n'), ((3351, 3370), 'kalc.model.kubernetes.KubernetesCluster', 'KubernetesCluster', ([], {}), '()\n', (3368, 3370), False, 'from kalc.model.kubernetes import KubernetesCluster\n'), ((3656, 3665), 'kalc.model.kinds.Service.Service', 'Service', ([], {}), '()\n', (3663, 3665), False, 'from kalc.model.kinds.Service import Service\n'), ((3742, 3751), 'kalc.model.kinds.Service.Service', 'Service', ([], {}), '()\n', (3749, 3751), False, 'from kalc.model.kinds.Service import Service\n'), ((3915, 3929), 'kalc.model.kinds.Node.Node', 'Node', (['"""node 1"""'], {}), "('node 1')\n", (3919, 3929), False, 'from kalc.model.kinds.Node import Node\n'), ((4066, 4080), 'kalc.model.kinds.Node.Node', 'Node', (['"""node 2"""'], {}), "('node 2')\n", (4070, 4080), False, 'from kalc.model.kinds.Node import Node\n'), ((4885, 4915), 'tests.test_util.print_objects', 'print_objects', (['k.state_objects'], {}), '(k.state_objects)\n', (4898, 4915), False, 'from tests.test_util import print_objects\n'), ((5518, 5548), 'tests.test_util.print_objects', 'print_objects', (['k.state_objects'], {}), '(k.state_objects)\n', (5531, 5548), False, 'from tests.test_util import print_objects\n'), ((5623, 5653), 'tests.test_util.print_objects', 'print_objects', (['k.state_objects'], {}), '(k.state_objects)\n', (5636, 5653), False, 'from tests.test_util import print_objects\n'), ((5658, 5671), 'tests.libs_for_tests.print_plan', 'print_plan', (['p'], {}), '(p)\n', (5668, 5671), False, 'from tests.libs_for_tests import convert_space_to_yaml, print_objects_from_yaml, print_plan, load_yaml, print_objects_compare, checks_assert_conditions, reload_cluster_from_yaml, checks_assert_conditions_in_one_mode\n'), ((5814, 5833), 'kalc.model.kubernetes.KubernetesCluster', 'KubernetesCluster', ([], {}), '()\n', (5831, 5833), False, 'from kalc.model.kubernetes import KubernetesCluster\n'), ((6119, 6128), 'kalc.model.kinds.Service.Service', 'Service', ([], {}), '()\n', (6126, 6128), False, 'from kalc.model.kinds.Service import Service\n'), ((6205, 6214), 'kalc.model.kinds.Service.Service', 'Service', ([], {}), '()\n', (6212, 6214), False, 'from kalc.model.kinds.Service import Service\n'), ((6378, 6392), 'kalc.model.kinds.Node.Node', 'Node', (['"""node 1"""'], {}), "('node 1')\n", (6382, 6392), False, 'from kalc.model.kinds.Node import Node\n'), ((6529, 6543), 'kalc.model.kinds.Node.Node', 'Node', (['"""node 2"""'], {}), "('node 2')\n", (6533, 6543), False, 'from kalc.model.kinds.Node import Node\n'), ((7644, 7674), 'tests.test_util.print_objects', 'print_objects', (['k.state_objects'], {}), '(k.state_objects)\n', (7657, 7674), False, 'from tests.test_util import print_objects\n'), ((8275, 8305), 'tests.test_util.print_objects', 'print_objects', (['k.state_objects'], {}), '(k.state_objects)\n', (8288, 8305), False, 'from tests.test_util import print_objects\n'), ((8380, 8410), 'tests.test_util.print_objects', 'print_objects', (['k.state_objects'], {}), '(k.state_objects)\n', (8393, 8410), False, 'from tests.test_util import print_objects\n'), ((8415, 8428), 'tests.libs_for_tests.print_plan', 'print_plan', (['p'], {}), '(p)\n', (8425, 8428), False, 'from tests.libs_for_tests import convert_space_to_yaml, print_objects_from_yaml, print_plan, load_yaml, print_objects_compare, checks_assert_conditions, reload_cluster_from_yaml, checks_assert_conditions_in_one_mode\n'), ((4822, 4837), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (4835, 4837), False, 'import inspect\n')]
from rh_logger.api import logger import logging import numpy as np from scipy.optimize import least_squares import pickle import os import time import scipy.sparse as spp from scipy.sparse.linalg import lsqr import scipy.optimize from rh_renderer.models import RigidModel #import common EPS = 0.000001 class Rigid2DOptimizer(object): # TODO - make it a class def __init__(self, **kwargs): self._damping = float(kwargs.get("damping", 0.0)) self._huber_delta = float(kwargs.get("huber_delta", 15)) self._max_iterations = int(kwargs.get("max_iterations", 1000)) self._init_gamma = float(kwargs.get("init_gamma", 0.00000000001)) self._min_gamma = float(kwargs.get("min_gamma", 1e-30)) self._eps = float(kwargs.get("eps", 1e-9)) self._pre_translate = "pre_translate" in kwargs @staticmethod def apply_rigid_transform(pts, theta, t_x, t_y): cos_theta = np.cos(theta) sin_theta = np.sin(theta) return np.dot([[cos_theta, -sin_theta], [sin_theta, cos_theta]], pts.T).T + np.array([t_x, t_y]) @staticmethod def optimize_func(params, tile_names_map, matches, matches_num): # Compute the residuals of all the matches residuals = np.empty((matches_num, ), dtype=np.float32) start_idx = 0 for pair_name, pair_matches in matches.items(): pair_matches_len = len(pair_matches[0]) tile1_params_start_idx = tile_names_map[pair_name[0]] * 3 tile2_params_start_idx = tile_names_map[pair_name[1]] * 3 pts1_transformed = Rigid2DOptimizer.apply_rigid_transform(pair_matches[0], *params[tile1_params_start_idx:tile1_params_start_idx+3]) pts2_transformed = Rigid2DOptimizer.apply_rigid_transform(pair_matches[1], *params[tile2_params_start_idx:tile2_params_start_idx+3]) # compute the L2 distance between the two sets of points deltas = pts1_transformed - pts2_transformed residuals[start_idx:start_idx + pair_matches_len] = np.sqrt(np.sum(deltas**2, axis=1)) start_idx += pair_matches_len # Normalize the residuals by 2*median #med_residual = np.median(residuals) #residuals = residuals / (2*med_residual + EPS) return residuals @staticmethod def compute_all_dists(matches, transforms, matches_num): dists = np.empty((matches_num, ), dtype=np.float32) start_idx = 0 for pair_name, pair_matches in matches.items(): pair_matches_len = len(pair_matches[0]) transform1 = transforms[pair_name[0]] transform2 = transforms[pair_name[1]] pts1_transformed = Rigid2DOptimizer.apply_rigid_transform(pair_matches[0], *transform1) pts2_transformed = Rigid2DOptimizer.apply_rigid_transform(pair_matches[1], *transform2) # compute the L2 distance between the two sets of points deltas = pts1_transformed - pts2_transformed dists[start_idx:start_idx + pair_matches_len] = np.sqrt(np.sum(deltas**2, axis=1)) start_idx += pair_matches_len return dists @staticmethod def grad_F_huber(huber_delta, params, tile_names_map, matches, matches_num): # Compute the residuals of all the matches grad_f_result = np.zeros_like(params) #start_idx = 0 for pair_name, pair_matches in matches.items(): #pair_matches_len = len(pair_matches[0]) tile1_params_start_idx = tile_names_map[pair_name[0]] * 3 tile2_params_start_idx = tile_names_map[pair_name[1]] * 3 pts1_transformed = Rigid2DOptimizer.apply_rigid_transform(pair_matches[0], *params[tile1_params_start_idx:tile1_params_start_idx+3]) pts2_transformed = Rigid2DOptimizer.apply_rigid_transform(pair_matches[1], *params[tile2_params_start_idx:tile2_params_start_idx+3]) deltas = pts1_transformed - pts2_transformed delta_x = deltas[:, 0] delta_y = deltas[:, 1] residuals = np.sqrt(np.sum(deltas**2, axis=1)) residuals_huber_mask = residuals <= huber_delta # The gradient coefficient for anything that is below the huber delta is 1, and anything above should be: # (delta / R), where R is the distance between the two points grad_f_multiplier = np.ones_like(residuals) grad_f_multiplier[~residuals_huber_mask] = huber_delta / residuals[~residuals_huber_mask] # The current matches only add values to the gradient at the indices of the relevant parameters (don't change anything else) theta1_idx = tile1_params_start_idx tx1_idx = tile1_params_start_idx + 1 ty1_idx = tile1_params_start_idx + 2 theta2_idx = tile2_params_start_idx tx2_idx = tile2_params_start_idx + 1 ty2_idx = tile2_params_start_idx + 2 # Update grad(Theta_tile1) , grad(Tx_tile1), grad(Ty_tile1) grad_f_result[theta1_idx] += np.sum( np.dot(delta_x * (-pair_matches[0][:, 0]*params[theta1_idx] - pair_matches[0][:, 1]), grad_f_multiplier) ) + np.sum( np.dot(delta_y * (pair_matches[0][:, 0] - pair_matches[0][:, 1]*params[theta1_idx]), grad_f_multiplier) ) grad_f_result[tx1_idx] += np.sum(np.dot(delta_x, grad_f_multiplier)) grad_f_result[ty1_idx] += np.sum(np.dot(delta_y, grad_f_multiplier)) # Update grad(Theta_tile2) , grad(Tx_tile2), grad(Ty_tile2) grad_f_result[theta2_idx] += np.sum( np.dot(delta_x * (pair_matches[1][:, 0]*params[theta2_idx] + pair_matches[1][:, 1]), grad_f_multiplier) ) + np.sum( np.dot(delta_y * (-pair_matches[1][:, 0] + pair_matches[1][:, 1]*params[theta2_idx]), grad_f_multiplier) ) grad_f_result[tx2_idx] += -np.sum(np.dot(delta_x, grad_f_multiplier)) grad_f_result[ty2_idx] += -np.sum(np.dot(delta_y, grad_f_multiplier)) return grad_f_result def _gradient_descent(self, optimize_func, p0, grad_F_huber, args=None): def compute_cost_huber(optimize_func, cur_p, params, huber_delta): residuals = optimize_func(cur_p, *params) cost = np.empty_like(residuals) residuals_huber_mask = residuals <= huber_delta cost[residuals_huber_mask] = 0.5 * residuals[residuals_huber_mask]**2 cost[~residuals_huber_mask] = huber_delta * residuals[~residuals_huber_mask] - (0.5 * huber_delta**2) return np.sum(cost) cur_p = p0 #cur_cost = np.sum(optimize_func(cur_p, *args)) cur_cost = compute_cost_huber(optimize_func, cur_p, args, self._huber_delta) logger.report_event("Initial cost: {}".format(cur_cost), log_level=logging.INFO) gamma = self._init_gamma for it in range(self._max_iterations): #print("Iteration {}".format(it)) prev_p = cur_p prev_cost = cur_cost cur_p = prev_p - gamma * grad_F_huber(self._huber_delta, prev_p, *args) #print("New params: {}".format(cur_p)) #cur_cost = np.sum(optimize_func(cur_p, *args)) cur_cost = compute_cost_huber(optimize_func, cur_p, args, self._huber_delta) #print("New cost: {}".format(cur_cost)) if it % 100 == 0: logger.report_event("iter {}: C: {}".format(it, cur_cost), log_level=logging.INFO) if cur_cost > prev_cost: # we took a bad step: undo it, scale down gamma, and start over #print("Backtracking step") cur_p = prev_p cur_cost = prev_cost gamma *= 0.5 elif np.all(np.abs(cur_p - prev_p) <= self._eps): # We took a good step, but the change to the parameters vector is negligible break else: # We took a good step, try to increase the step size a bit gamma *= 1.1 if gamma < self._min_gamma: break #print("The local minimum occurs at", cur_p) logger.report_event("Post-opt cost: {}".format(cur_cost), log_level=logging.INFO) return cur_p def optimize(self, orig_locs, matches): """ The aim is to find for each tile a triplet: tetha, t_x, and t_y that will define the rigid transformation that needs to be applied to that tile. The transformation needs to minimize the L2 distance between the matches of pairs of tiles. To this end, we define our optimizations as a non-linear least squares problem. Given that the number of tiles is N, and the total number of matches is M, we want to find the values for 3*N parameters, s.t., the sum of all distances is minimized. Note that due to outliers, we would like to use a more robust method, such as huber loss. """ tile_names = sorted(list(orig_locs.keys())) tile_names_map = {name:idx for idx, name in enumerate(tile_names)} matches_num = np.sum([len(m[0]) for m in matches.values()]) p0 = np.empty((len(orig_locs)*3, ), dtype=np.float32) # all triplets [theta1, t_x1, t_y1, theta2, t_x2, t_y2, ...] if self._pre_translate: # For debug: solution1 = {name:[0, orig_locs[name][0], orig_locs[name][1]] for name, idx in tile_names_map.items()} dists = Rigid2DOptimizer.compute_all_dists(matches, solution1, matches_num) logger.report_event("pre optimization distances: min={}, mean={}, median={}, max={}".format(np.min(dists), np.mean(dists), np.median(dists), np.max(dists)), log_level=logging.INFO) st_time = time.time() # Find an initial translation only transformation for each tile (better than the initial assumption) # solve for X # Create a matrix A that is made of 1's, 0's and -1's of size matches_num*tiles_num, # and a vector b s.t. b = - matches[0].x + matches[1].x (actually b will be a matches_num*2 matrix, one column for x and the other for y) # We'll try to find x, s.t. A*x=b, and therefore each row (corresponding to a single match of a pair of tiles), # will have 1 for the first tile of the match, -1 for the second tile of the match, and 0 elsewhere #A = spp.csc_matrix( (matches_num, len(orig_locs)), dtype=np.float32 ) A = spp.lil_matrix( (matches_num, len(orig_locs)), dtype=np.float32 ) b = np.empty((matches_num, 2), dtype=np.float32) start_idx = 0 for pair_name, pair_matches in matches.items(): pair_matches_len = len(pair_matches[0]) tile1_params_idx = tile_names_map[pair_name[0]] tile2_params_idx = tile_names_map[pair_name[1]] A[start_idx:start_idx + pair_matches_len, tile1_params_idx] = 1 A[start_idx:start_idx + pair_matches_len, tile2_params_idx] = -1 b[start_idx:start_idx + pair_matches_len] = - pair_matches[0] + pair_matches[1] start_idx += pair_matches_len # convert A to row sparse matrix, for faster computations A = A.tocsr() #p0_translate_x = np.array([orig_locs[k][0] for k in tile_names]) # [t_x1, t_x2, ...] with the original locations Tx = lsqr(A, b[:, 0], damp=self._damping)[0] Ty = lsqr(A, b[:, 1], damp=self._damping)[0] logger.report_event("translation-only optimization time: {} seconds".format(time.time() - st_time), log_level=logging.INFO) # Normalize all deltas to (0, 0) Tx -= np.min(Tx) Ty -= np.min(Ty) p0[1::3] = Tx p0[2::3] = Ty # For debug: #solution2 = {name:[0, p0[1::3][idx], p0[2::3][idx]] for name, idx in tile_names_map.items()} solution2 = {name:[0, Tx[idx], Ty[idx]] for name, idx in tile_names_map.items()} dists = Rigid2DOptimizer.compute_all_dists(matches, solution2, matches_num) logger.report_event("post translation optimization distances: min={}, mean={}, median={}, max={}".format(np.min(dists), np.mean(dists), np.median(dists), np.max(dists)), log_level=logging.INFO) else: p0[1::3] = [orig_locs[k][0] for k in tile_names] # set default X to original location's X p0[2::3] = [orig_locs[k][1] for k in tile_names] # set default Y to original location's Y p0[::3] = 0 # Set default theta to 0 # Create a sparse matrix that has st_time = time.time() #res = least_squares(optimize_func, p0, args=(tile_names_map, matches, matches_num), verbose=2) #res = least_squares(optimize_func, p0, loss='huber', f_scale=15, args=(tile_names_map, matches, matches_num), verbose=2) #res = least_squares(optimize_func, p0, loss='soft_l1', f_scale=15, args=(tile_names_map, matches, matches_num), verbose=2) # stepsize = 0.0001 # max_iterations = 1000 # res = gradient_descent(optimize_func, p0, max_iterations, stepsize, args=(tile_names_map, matches, matches_num)) huber_delta = 15 # Maximal L2 distance for a match to be considered inlier res = self._gradient_descent(Rigid2DOptimizer.optimize_func, p0, Rigid2DOptimizer.grad_F_huber, args=(tile_names_map, matches, matches_num)) end_time = time.time() logger.report_event("non-linear optimization time: {} seconds".format(end_time - st_time), log_level=logging.INFO) solution = {} if res is not None: for name, idx in tile_names_map.items(): solution[name] = np.array(res[idx * 3:idx*3 + 3]) # Stores [theta, t_x, t_y] of the tile else: raise Exception("Could not find a valid solution to the optimization problem") dists = Rigid2DOptimizer.compute_all_dists(matches, solution, matches_num) logger.report_event("post optimization distances: min={}, mean={}, median={}, max={}".format(np.min(dists), np.mean(dists), np.median(dists), np.max(dists)), log_level=logging.INFO) # create the optimized models for each tile optimized_models = {name:RigidModel(res[idx*3], res[idx*3+1:idx*3+3]) for name, idx in tile_names_map.items()} return optimized_models # def fix_matches(orig_locs, matches, new_matches_num=4): # # # Create "false matches" in case non are there # # for pair_name, pair_matches in matches.values(): # # if len(pair_matches[0]) < 2: # # print("Creating made up matches for pair: {} -> {}".format(os.path.basename(pair_name[0]), os.path.basename(pair_name[1]))) # # pair_matches[0] = np.zeros((new_matches_num, 2)) # # pair_matches[1] = np.zeros((new_matches_num, 2)) # # Remove any pair of matched tiles that don't have matches # to_remove_keys = [] # for pair_name, pair_matches in matches.items(): # if len(pair_matches[0]) == 0: # print("Removing no matches for pair: {} -> {}".format(os.path.basename(pair_name[0]), os.path.basename(pair_name[1]))) # to_remove_keys.append(pair_name) # # for k in to_remove_keys: # del matches[k] if __name__ == '__main__': # in_orig_locs_fname = 'data/W05_Sec001_ROI466_mfovs_475_476_orig_locs.pkl' # in_matches_fname = 'data/W05_Sec001_ROI466_mfovs_475_476.pkl' # in_ts_fname = 'data/W05_Sec001_ROI466_mfovs_475_476.json' # out_ts_fname = 'montaged_optimize3_W05_Sec001_ROI466_mfovs_475_476.json' in_orig_locs_fname = 'data/W05_Sec001_ROI466_orig_locs.pkl' in_matches_fname = 'data/W05_Sec001_ROI466.pkl' in_ts_fname = 'data/W05_Sec001_ROI466.json' out_ts_fname = 'montaged_optimize3_W05_Sec001_ROI466.json' # Read the files with open(in_orig_locs_fname, 'rb') as in_f: orig_locs = pickle.load(in_f) with open(in_matches_fname, 'rb') as in_f: matches = pickle.load(in_f) fix_matches(orig_locs, matches) solution = optimize(orig_locs, matches, pre_translate=True) #common.export_rigid_tilespec(in_ts_fname, out_ts_fname, solution)
[ "scipy.sparse.linalg.lsqr", "numpy.zeros_like", "numpy.ones_like", "numpy.sum", "numpy.abs", "numpy.median", "numpy.empty", "numpy.empty_like", "time.time", "numpy.min", "numpy.sin", "pickle.load", "numpy.array", "numpy.cos", "rh_renderer.models.RigidModel", "numpy.mean", "numpy.dot", "numpy.max" ]
[((939, 952), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (945, 952), True, 'import numpy as np\n'), ((973, 986), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (979, 986), True, 'import numpy as np\n'), ((1289, 1331), 'numpy.empty', 'np.empty', (['(matches_num,)'], {'dtype': 'np.float32'}), '((matches_num,), dtype=np.float32)\n', (1297, 1331), True, 'import numpy as np\n'), ((2441, 2483), 'numpy.empty', 'np.empty', (['(matches_num,)'], {'dtype': 'np.float32'}), '((matches_num,), dtype=np.float32)\n', (2449, 2483), True, 'import numpy as np\n'), ((3391, 3412), 'numpy.zeros_like', 'np.zeros_like', (['params'], {}), '(params)\n', (3404, 3412), True, 'import numpy as np\n'), ((13219, 13230), 'time.time', 'time.time', ([], {}), '()\n', (13228, 13230), False, 'import time\n'), ((14032, 14043), 'time.time', 'time.time', ([], {}), '()\n', (14041, 14043), False, 'import time\n'), ((16577, 16594), 'pickle.load', 'pickle.load', (['in_f'], {}), '(in_f)\n', (16588, 16594), False, 'import pickle\n'), ((16660, 16677), 'pickle.load', 'pickle.load', (['in_f'], {}), '(in_f)\n', (16671, 16677), False, 'import pickle\n'), ((1109, 1129), 'numpy.array', 'np.array', (['[t_x, t_y]'], {}), '([t_x, t_y])\n', (1117, 1129), True, 'import numpy as np\n'), ((4449, 4472), 'numpy.ones_like', 'np.ones_like', (['residuals'], {}), '(residuals)\n', (4461, 4472), True, 'import numpy as np\n'), ((6835, 6859), 'numpy.empty_like', 'np.empty_like', (['residuals'], {}), '(residuals)\n', (6848, 6859), True, 'import numpy as np\n'), ((7135, 7147), 'numpy.sum', 'np.sum', (['cost'], {}), '(cost)\n', (7141, 7147), True, 'import numpy as np\n'), ((10284, 10295), 'time.time', 'time.time', ([], {}), '()\n', (10293, 10295), False, 'import time\n'), ((11099, 11143), 'numpy.empty', 'np.empty', (['(matches_num, 2)'], {'dtype': 'np.float32'}), '((matches_num, 2), dtype=np.float32)\n', (11107, 11143), True, 'import numpy as np\n'), ((12268, 12278), 'numpy.min', 'np.min', (['Tx'], {}), '(Tx)\n', (12274, 12278), True, 'import numpy as np\n'), ((12297, 12307), 'numpy.min', 'np.min', (['Ty'], {}), '(Ty)\n', (12303, 12307), True, 'import numpy as np\n'), ((14842, 14896), 'rh_renderer.models.RigidModel', 'RigidModel', (['res[idx * 3]', 'res[idx * 3 + 1:idx * 3 + 3]'], {}), '(res[idx * 3], res[idx * 3 + 1:idx * 3 + 3])\n', (14852, 14896), False, 'from rh_renderer.models import RigidModel\n'), ((1002, 1066), 'numpy.dot', 'np.dot', (['[[cos_theta, -sin_theta], [sin_theta, cos_theta]]', 'pts.T'], {}), '([[cos_theta, -sin_theta], [sin_theta, cos_theta]], pts.T)\n', (1008, 1066), True, 'import numpy as np\n'), ((2093, 2120), 'numpy.sum', 'np.sum', (['(deltas ** 2)'], {'axis': '(1)'}), '(deltas ** 2, axis=1)\n', (2099, 2120), True, 'import numpy as np\n'), ((3111, 3138), 'numpy.sum', 'np.sum', (['(deltas ** 2)'], {'axis': '(1)'}), '(deltas ** 2, axis=1)\n', (3117, 3138), True, 'import numpy as np\n'), ((4138, 4165), 'numpy.sum', 'np.sum', (['(deltas ** 2)'], {'axis': '(1)'}), '(deltas ** 2, axis=1)\n', (4144, 4165), True, 'import numpy as np\n'), ((5671, 5705), 'numpy.dot', 'np.dot', (['delta_x', 'grad_f_multiplier'], {}), '(delta_x, grad_f_multiplier)\n', (5677, 5705), True, 'import numpy as np\n'), ((5752, 5786), 'numpy.dot', 'np.dot', (['delta_y', 'grad_f_multiplier'], {}), '(delta_y, grad_f_multiplier)\n', (5758, 5786), True, 'import numpy as np\n'), ((11959, 11995), 'scipy.sparse.linalg.lsqr', 'lsqr', (['A', 'b[:, 0]'], {'damp': 'self._damping'}), '(A, b[:, 0], damp=self._damping)\n', (11963, 11995), False, 'from scipy.sparse.linalg import lsqr\n'), ((12016, 12052), 'scipy.sparse.linalg.lsqr', 'lsqr', (['A', 'b[:, 1]'], {'damp': 'self._damping'}), '(A, b[:, 1], damp=self._damping)\n', (12020, 12052), False, 'from scipy.sparse.linalg import lsqr\n'), ((14305, 14339), 'numpy.array', 'np.array', (['res[idx * 3:idx * 3 + 3]'], {}), '(res[idx * 3:idx * 3 + 3])\n', (14313, 14339), True, 'import numpy as np\n'), ((14667, 14680), 'numpy.min', 'np.min', (['dists'], {}), '(dists)\n', (14673, 14680), True, 'import numpy as np\n'), ((14682, 14696), 'numpy.mean', 'np.mean', (['dists'], {}), '(dists)\n', (14689, 14696), True, 'import numpy as np\n'), ((14698, 14714), 'numpy.median', 'np.median', (['dists'], {}), '(dists)\n', (14707, 14714), True, 'import numpy as np\n'), ((14716, 14729), 'numpy.max', 'np.max', (['dists'], {}), '(dists)\n', (14722, 14729), True, 'import numpy as np\n'), ((5175, 5285), 'numpy.dot', 'np.dot', (['(delta_x * (-pair_matches[0][:, 0] * params[theta1_idx] - pair_matches[0][:,\n 1]))', 'grad_f_multiplier'], {}), '(delta_x * (-pair_matches[0][:, 0] * params[theta1_idx] -\n pair_matches[0][:, 1]), grad_f_multiplier)\n', (5181, 5285), True, 'import numpy as np\n'), ((5428, 5538), 'numpy.dot', 'np.dot', (['(delta_y * (pair_matches[0][:, 0] - pair_matches[0][:, 1] * params[theta1_idx])\n )', 'grad_f_multiplier'], {}), '(delta_y * (pair_matches[0][:, 0] - pair_matches[0][:, 1] * params[\n theta1_idx]), grad_f_multiplier)\n', (5434, 5538), True, 'import numpy as np\n'), ((5953, 6063), 'numpy.dot', 'np.dot', (['(delta_x * (pair_matches[1][:, 0] * params[theta2_idx] + pair_matches[1][:, 1])\n )', 'grad_f_multiplier'], {}), '(delta_x * (pair_matches[1][:, 0] * params[theta2_idx] + pair_matches\n [1][:, 1]), grad_f_multiplier)\n', (5959, 6063), True, 'import numpy as np\n'), ((6205, 6316), 'numpy.dot', 'np.dot', (['(delta_y * (-pair_matches[1][:, 0] + pair_matches[1][:, 1] * params[\n theta2_idx]))', 'grad_f_multiplier'], {}), '(delta_y * (-pair_matches[1][:, 0] + pair_matches[1][:, 1] * params[\n theta2_idx]), grad_f_multiplier)\n', (6211, 6316), True, 'import numpy as np\n'), ((6450, 6484), 'numpy.dot', 'np.dot', (['delta_x', 'grad_f_multiplier'], {}), '(delta_x, grad_f_multiplier)\n', (6456, 6484), True, 'import numpy as np\n'), ((6532, 6566), 'numpy.dot', 'np.dot', (['delta_y', 'grad_f_multiplier'], {}), '(delta_y, grad_f_multiplier)\n', (6538, 6566), True, 'import numpy as np\n'), ((10172, 10185), 'numpy.min', 'np.min', (['dists'], {}), '(dists)\n', (10178, 10185), True, 'import numpy as np\n'), ((10187, 10201), 'numpy.mean', 'np.mean', (['dists'], {}), '(dists)\n', (10194, 10201), True, 'import numpy as np\n'), ((10203, 10219), 'numpy.median', 'np.median', (['dists'], {}), '(dists)\n', (10212, 10219), True, 'import numpy as np\n'), ((10221, 10234), 'numpy.max', 'np.max', (['dists'], {}), '(dists)\n', (10227, 10234), True, 'import numpy as np\n'), ((12790, 12803), 'numpy.min', 'np.min', (['dists'], {}), '(dists)\n', (12796, 12803), True, 'import numpy as np\n'), ((12805, 12819), 'numpy.mean', 'np.mean', (['dists'], {}), '(dists)\n', (12812, 12819), True, 'import numpy as np\n'), ((12821, 12837), 'numpy.median', 'np.median', (['dists'], {}), '(dists)\n', (12830, 12837), True, 'import numpy as np\n'), ((12839, 12852), 'numpy.max', 'np.max', (['dists'], {}), '(dists)\n', (12845, 12852), True, 'import numpy as np\n'), ((8316, 8338), 'numpy.abs', 'np.abs', (['(cur_p - prev_p)'], {}), '(cur_p - prev_p)\n', (8322, 8338), True, 'import numpy as np\n'), ((12144, 12155), 'time.time', 'time.time', ([], {}), '()\n', (12153, 12155), False, 'import time\n')]
import unittest import orca from setup.settings import * from pandas.util.testing import * class FunctionReorderLevelsTest(unittest.TestCase): @classmethod def setUpClass(cls): # connect to a DolphinDB server orca.connect(HOST, PORT, "admin", "123456") def test_function_reshaping_sorting_transposing_reorder_levels_dataframe(self): arrays = [np.array(['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux']), np.array(['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two'])] pdf = pd.DataFrame(np.random.randn(8, 4), index=arrays) odf = orca.DataFrame(pdf) assert_frame_equal(odf.reorder_levels([1, 0], axis=0).to_pandas(), pdf.reorder_levels([1, 0], axis=0)) if __name__ == '__main__': unittest.main()
[ "unittest.main", "orca.connect", "orca.DataFrame" ]
[((778, 793), 'unittest.main', 'unittest.main', ([], {}), '()\n', (791, 793), False, 'import unittest\n'), ((235, 278), 'orca.connect', 'orca.connect', (['HOST', 'PORT', '"""admin"""', '"""123456"""'], {}), "(HOST, PORT, 'admin', '123456')\n", (247, 278), False, 'import orca\n'), ((614, 633), 'orca.DataFrame', 'orca.DataFrame', (['pdf'], {}), '(pdf)\n', (628, 633), False, 'import orca\n')]
#!/usr/bin/env python3 # ------------------------------------------------------------- # salt-get-config-dir command # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Project: salt-wrapper # Licence: BSD-2-Clause # ------------------------------------------------------------- import json import os import sys # ------------------------------------------------------------- # Load configuration # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - config_file_path = os.environ.get( "SALT_WRAPPER_CONF", "/usr/local/etc/salt-wrapper.conf" ) if not os.path.isfile(config_file_path): print( "The configuration file {0} doesn't exist.".format(config_file_path), file=sys.stderr ) print( "See https://docs.nasqueron.org/salt-wrapper/admin.html#configuration-file", # noqa file=sys.stderr ) sys.exit(2) try: with open(config_file_path) as config_file: config = json.load(config_file) except OSError as err: print("Can't open configuration file: {0}".format(err)) sys.exit(4) # ------------------------------------------------------------- # Find configuration directory # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for root in config['roots']: if os.getcwd().startswith(root['states']): print(root['config']) sys.exit(0) sys.exit(1)
[ "json.load", "os.getcwd", "os.environ.get", "os.path.isfile", "sys.exit" ]
[((533, 604), 'os.environ.get', 'os.environ.get', (['"""SALT_WRAPPER_CONF"""', '"""/usr/local/etc/salt-wrapper.conf"""'], {}), "('SALT_WRAPPER_CONF', '/usr/local/etc/salt-wrapper.conf')\n", (547, 604), False, 'import os\n'), ((1415, 1426), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1423, 1426), False, 'import sys\n'), ((623, 655), 'os.path.isfile', 'os.path.isfile', (['config_file_path'], {}), '(config_file_path)\n', (637, 655), False, 'import os\n'), ((914, 925), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (922, 925), False, 'import sys\n'), ((997, 1019), 'json.load', 'json.load', (['config_file'], {}), '(config_file)\n', (1006, 1019), False, 'import json\n'), ((1107, 1118), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (1115, 1118), False, 'import sys\n'), ((1402, 1413), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1410, 1413), False, 'import sys\n'), ((1324, 1335), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1333, 1335), False, 'import os\n')]
import hmac import logging from hashlib import sha256 from typing import TYPE_CHECKING, Dict, List, Optional from django import forms from django.contrib.auth.models import AnonymousUser from oauth2_provider.contrib.rest_framework import TokenHasScope from oauth2_provider.models import get_access_token_model, get_application_model from rest_framework import routers, status, viewsets from rest_framework.authentication import BaseAuthentication from rest_framework.response import Response from drf_integrations.integrations.base import BaseIntegration, BaseIntegrationForm from drf_integrations.models import get_application_installation_model if TYPE_CHECKING: from rest_framework.request import Request logger = logging.getLogger(__name__) Application = get_application_model() ApplicationInstallation = get_application_installation_model() AccessToken = get_access_token_model() class ShopifyConfigForm(BaseIntegrationForm): shopify_shop = forms.CharField() shared_secret = forms.CharField() def set_initial_values( self, *, target, integration, application=None, **kwargs, ): if application: if application.internal_integration_name != ShopifyIntegration.name: raise ValueError( f"Cannot use {self.__class__.__name__} with " f"integration {application.internal_integration_name}" ) else: application = Application.objects.get_by_internal_integration(ShopifyIntegration) super().set_initial_values( target=target, integration=integration, application=application, **kwargs, ) @classmethod def clean_form_data(cls, installation_form: "forms.Form") -> Dict: data = super().clean_form_data(installation_form) shopify_shop = data["shopify_shop"] organisation_id = data.get("organisation_id") if organisation_id and ( ApplicationInstallation.objects.filter(config__shopify_shop=shopify_shop) .exclude(client_id=organisation_id) .exists() ): raise forms.ValidationError( { "shopify_shop": { f"There is already an existing installation with shop {shopify_shop}" } } ) return data class ShopifyIntegration(BaseIntegration): name = "shopify" display_name = "Shopify" config_form_class = ShopifyConfigForm default_scopes = ["purchase:shopify:write", "webhook:shopify:write"] def get_urls(self) -> List: router = routers.DefaultRouter() router.register("webhook", ShopifyWebhookViewSet, basename="shopify") return router.urls @classmethod def get_installation_lookup_from_request(cls, request: "Request", **kwargs) -> Dict: return cls.get_installation_lookup_from_config_values( shopify_shop=( request.headers.get("X-Shopify-Hmac-Sha256") or request.query_params.get("shop") ) ) class ShopifyBaseAuthBackend(BaseAuthentication): def _validate_required(self, request) -> bool: raise NotImplementedError() def _get_signature(self, request) -> Optional[bool]: raise NotImplementedError() def _get_signature_values(self, request) -> Optional[bool]: raise NotImplementedError() def authenticate(self, request): if not self._validate_required(request): logger.info("integrations.shopify.missing_params") return None try: installation = ApplicationInstallation.objects.select_related("application").get( **ShopifyIntegration.get_installation_lookup_from_request( request=request, application=None ) ) except ( ApplicationInstallation.DoesNotExist, ApplicationInstallation.MultipleObjectsReturned, ): logger.info("integrations.shopify.application_setup_not_found") return None context = installation.get_context() signature = self._get_signature(request) signature_values = self._get_signature_values(request) if not signature or not signature_values: logger.info( "integrations.shopify.missing_signature_values", extra=dict(signature=signature, signature_values=signature_values), ) return None new_signature = hmac.new( context.installation.get_config()["shared_secret"].encode(), signature_values, sha256, ) if not hmac.compare_digest(signature.encode("utf-8"), new_signature.encode("utf-8")): logger.info( "integrations.shopify.invalid_signature", extra=dict( request=request, headers=request.headers, queryparams=request.query_params, signature=signature, calculated_signature=new_signature, ), ) return None request.auth_context = context return ( AnonymousUser(), AccessToken.objects.create_for_internal_integration( application=installation.application ), ) class ShopifyProxyBackend(ShopifyBaseAuthBackend): required_queryparams = ( "shop", "signature", ) def _validate_required(self, request) -> bool: return all(request.query_params.get(key) for key in self.required_queryparams) def _get_signature(self, request): queryparams = { key: ",".join(sorted(values)) for key, values in request.query_params.lists() if key != "signature" } encoded_params = "&".join( (f"{key}={queryparams[key]}" for key in sorted(queryparams.keys())) ) return encoded_params def _get_signature_values(self, request): return request.query_params["signature"] class ShopifyWebhookBackend(ShopifyBaseAuthBackend): required_headers = ( "X-Shopify-Hmac-Sha256", "X-Shopify-Shop-Domain", ) def _validate_required(self, request) -> bool: return all(key in request.headers for key in self.required_headers) def _get_signature(self, request): return request.headers["X-Shopify-Hmac-Sha256"] def _get_signature_values(self, request): return request.body class ShopifyPermission(TokenHasScope): def has_permission(self, request, view): return isinstance( request.successful_authenticator, ShopifyBaseAuthBackend ) and super().has_permission(request, view) class ShopifyWebhookViewSet(viewsets.ViewSet): authentication_classes = (ShopifyWebhookBackend,) permission_classes = (ShopifyPermission,) required_scopes = ("webhook:shopify:write",) def create(self, request): logger.info( "integrations.shopify.webhook", extra=dict(data=request.data, installation=request.auth_context.installation), ) return Response(status=status.HTTP_200_OK)
[ "django.contrib.auth.models.AnonymousUser", "rest_framework.routers.DefaultRouter", "oauth2_provider.models.get_access_token_model", "oauth2_provider.models.get_application_model", "django.forms.ValidationError", "rest_framework.response.Response", "django.forms.CharField", "logging.getLogger", "drf_integrations.models.get_application_installation_model" ]
[((725, 752), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (742, 752), False, 'import logging\n'), ((769, 792), 'oauth2_provider.models.get_application_model', 'get_application_model', ([], {}), '()\n', (790, 792), False, 'from oauth2_provider.models import get_access_token_model, get_application_model\n'), ((819, 855), 'drf_integrations.models.get_application_installation_model', 'get_application_installation_model', ([], {}), '()\n', (853, 855), False, 'from drf_integrations.models import get_application_installation_model\n'), ((870, 894), 'oauth2_provider.models.get_access_token_model', 'get_access_token_model', ([], {}), '()\n', (892, 894), False, 'from oauth2_provider.models import get_access_token_model, get_application_model\n'), ((962, 979), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (977, 979), False, 'from django import forms\n'), ((1000, 1017), 'django.forms.CharField', 'forms.CharField', ([], {}), '()\n', (1015, 1017), False, 'from django import forms\n'), ((2632, 2655), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (2653, 2655), False, 'from rest_framework import routers, status, viewsets\n'), ((7208, 7243), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (7216, 7243), False, 'from rest_framework.response import Response\n'), ((2124, 2241), 'django.forms.ValidationError', 'forms.ValidationError', (["{'shopify_shop': {\n f'There is already an existing installation with shop {shopify_shop}'}}"], {}), "({'shopify_shop': {\n f'There is already an existing installation with shop {shopify_shop}'}})\n", (2145, 2241), False, 'from django import forms\n'), ((5225, 5240), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (5238, 5240), False, 'from django.contrib.auth.models import AnonymousUser\n')]
from django.contrib import admin from .models import Notepad, SharedNotepad # Register your models here. admin.site.register(Notepad) admin.site.register(SharedNotepad)
[ "django.contrib.admin.site.register" ]
[((107, 135), 'django.contrib.admin.site.register', 'admin.site.register', (['Notepad'], {}), '(Notepad)\n', (126, 135), False, 'from django.contrib import admin\n'), ((136, 170), 'django.contrib.admin.site.register', 'admin.site.register', (['SharedNotepad'], {}), '(SharedNotepad)\n', (155, 170), False, 'from django.contrib import admin\n')]
from abc import ABC, abstractmethod, abstractclassmethod from typing import Dict, Optional import pandas as pd import numpy as np from wiseml.models.types.task_type import TaskType from wiseml.models.types.model_type import ModelType class TrainSet: def __init__(self, X: pd.DataFrame, y: pd.Series): if X.shape[0] != y.shape[0]: raise ValueError("Len of X and y should be equal") self.indices = np.arange(X.shape[0]) self.X = X self.y = y def __iter__(self): for i in self.indices: yield self.X.iloc[i], self.y.iloc[i] class Model(ABC): def __init__(self): self.task_type: Optional[TaskType] = None self.model_type: Optional[ModelType] = None @abstractmethod def fit(self, *args, **kwargs): pass @abstractmethod def predict(self, y_true, y_pred, *args, **kwargs): pass @classmethod @abstractmethod def save(cls, path): pass @classmethod @abstractmethod def load(cls, path): pass
[ "numpy.arange" ]
[((434, 455), 'numpy.arange', 'np.arange', (['X.shape[0]'], {}), '(X.shape[0])\n', (443, 455), True, 'import numpy as np\n')]
'''tzinfo timezone information for Africa/Nairobi.''' from pytz.tzinfo import DstTzInfo from pytz.tzinfo import memorized_datetime as d from pytz.tzinfo import memorized_ttinfo as i class Nairobi(DstTzInfo): '''Africa/Nairobi timezone definition. See datetime.tzinfo for details''' zone = 'Africa/Nairobi' _utc_transition_times = [ d(1,1,1,0,0,0), d(1928,6,30,21,32,44), d(1929,12,31,21,0,0), d(1939,12,31,21,30,0), d(1959,12,31,21,15,15), ] _transition_info = [ i(8820,0,'LMT'), i(10800,0,'EAT'), i(9000,0,'BEAT'), i(9900,0,'BEAUT'), i(10800,0,'EAT'), ] Nairobi = Nairobi()
[ "pytz.tzinfo.memorized_datetime", "pytz.tzinfo.memorized_ttinfo" ]
[((347, 366), 'pytz.tzinfo.memorized_datetime', 'd', (['(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(1, 1, 1, 0, 0, 0)\n', (348, 366), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((363, 389), 'pytz.tzinfo.memorized_datetime', 'd', (['(1928)', '(6)', '(30)', '(21)', '(32)', '(44)'], {}), '(1928, 6, 30, 21, 32, 44)\n', (364, 389), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((386, 411), 'pytz.tzinfo.memorized_datetime', 'd', (['(1929)', '(12)', '(31)', '(21)', '(0)', '(0)'], {}), '(1929, 12, 31, 21, 0, 0)\n', (387, 411), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((408, 434), 'pytz.tzinfo.memorized_datetime', 'd', (['(1939)', '(12)', '(31)', '(21)', '(30)', '(0)'], {}), '(1939, 12, 31, 21, 30, 0)\n', (409, 434), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((431, 458), 'pytz.tzinfo.memorized_datetime', 'd', (['(1959)', '(12)', '(31)', '(21)', '(15)', '(15)'], {}), '(1959, 12, 31, 21, 15, 15)\n', (432, 458), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((491, 508), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(8820)', '(0)', '"""LMT"""'], {}), "(8820, 0, 'LMT')\n", (492, 508), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((508, 526), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(10800)', '(0)', '"""EAT"""'], {}), "(10800, 0, 'EAT')\n", (509, 526), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((526, 544), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(9000)', '(0)', '"""BEAT"""'], {}), "(9000, 0, 'BEAT')\n", (527, 544), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((544, 563), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(9900)', '(0)', '"""BEAUT"""'], {}), "(9900, 0, 'BEAUT')\n", (545, 563), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((563, 581), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(10800)', '(0)', '"""EAT"""'], {}), "(10800, 0, 'EAT')\n", (564, 581), True, 'from pytz.tzinfo import memorized_ttinfo as i\n')]
######################################################################## # $Header: /var/local/cvsroot/4Suite/Ft/Lib/CommandLine/Arguments.py,v 1.4 2005/04/13 23:41:04 jkloth Exp $ """ Classes that support advanced arg processing for command-line scripts Copyright 2004 Fourthought, Inc. (USA). Detailed license and copyright information: http://4suite.org/COPYRIGHT Project home, documentation, distributions: http://4suite.org/ """ from CommandLineUtil import ArgumentError REQUIRED = 1 OPTIONAL = 2 ZERO_OR_MORE = 3 ONE_OR_MORE = 4 class Argument: def __init__(self, name, description, validationFunc=None): self.name = name self.description = description self.validationFunc = validationFunc or (lambda x: x) return class RequiredArgument(Argument): requirements = REQUIRED def gen_command_line(self): return self.name def validate(self,cmd,args): #Just the first if not len(args): raise ArgumentError(cmd, "missing required argument '%s'" % self.name) return self.validationFunc(args[0]),args[1:] class OptionalArgument(Argument): requirements = OPTIONAL def gen_command_line(self): return "[%s]" % (self.name) def validate(self,cmd,args): #Just the first, maybe if len(args): return self.validationFunc(args[0]),args[1:] return None,[] class ZeroOrMoreArgument(Argument): requirements = ZERO_OR_MORE def gen_command_line(self): return "[%s]..." % (self.name) def validate(self,cmd,args): #We take the rest eaten = map(lambda x,f=self.validationFunc:f(x),args) return eaten,[] class OneOrMoreArgument(Argument): requirements = ONE_OR_MORE def gen_command_line(self): return "%s [%s]..." % (self.name, self.name) def validate(self,cmd,args): #We take the rest if not len(args): raise ArgumentError(cmd, "missing required argument '%s'" % self.name) eaten = map(lambda x,f=self.validationFunc:f(x),args) return eaten,[] ## for cur_arg in cmd_args: ## if cur_arg[2] == '.': ## #A required arg ## if not len(cl_args): ## resArgs[cur_arg[0]] = self._translate_args(cur_arg[3],cur_arg[0],[cl_args[0]])[0] ## cl_args = cl_args[1:] ## elif cur_arg[2] == '?': ## if len(cl_args): ## resArgs[cur_arg[0]] = self._translate_args(cur_arg[3],cur_arg[0],[cl_args[0]])[0] ## cl_args = cl_args[1:] ## elif cur_arg[2] == '+': ## if not len(cl_args): ## raise ArgumentError("missing required argument '%s'" % cur_arg[0]) ## resArgs[cur_arg[0]] = self._translate_args(cur_arg[3],cur_arg[0],cl_args) ## cl_args = [] ## elif cur_arg[2] == '*': ## resArgs[cur_arg[0]] = self._translate_args(cur_arg[3],cur_arg[0],cl_args) ## cl_args = [] ## command[2] = resArgs ## return 1 ## def _translate_args(self,func,name,args): ## res = [] ## for arg in args: ## try: ## res.append(func(arg)) ## except: ## #import traceback ## #traceback.print_exc() ## raise ArgumentError('failed conversion for %s (%s)' % (arg, name)) ## return res
[ "CommandLineUtil.ArgumentError" ]
[((983, 1047), 'CommandLineUtil.ArgumentError', 'ArgumentError', (['cmd', '("missing required argument \'%s\'" % self.name)'], {}), '(cmd, "missing required argument \'%s\'" % self.name)\n', (996, 1047), False, 'from CommandLineUtil import ArgumentError\n'), ((1977, 2041), 'CommandLineUtil.ArgumentError', 'ArgumentError', (['cmd', '("missing required argument \'%s\'" % self.name)'], {}), '(cmd, "missing required argument \'%s\'" % self.name)\n', (1990, 2041), False, 'from CommandLineUtil import ArgumentError\n')]
from typing import Iterable from eth2spec.test.helpers.constants import PHASE0, ALTAIR, BELLATRIX, MINIMAL, MAINNET from eth2spec.test.helpers.typing import SpecForkName, PresetBaseName from eth2spec.test.altair.fork import test_altair_fork_basic, test_altair_fork_random from eth2spec.test.bellatrix.fork import test_bellatrix_fork_basic, test_bellatrix_fork_random from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests def create_provider(tests_src, preset_name: PresetBaseName, phase: SpecForkName, fork_name: SpecForkName) -> gen_typing.TestProvider: def prepare_fn() -> None: return def cases_fn() -> Iterable[gen_typing.TestCase]: return generate_from_tests( runner_name='fork', handler_name='fork', src=tests_src, fork_name=fork_name, preset_name=preset_name, phase=phase, ) return gen_typing.TestProvider(prepare=prepare_fn, make_cases=cases_fn) def _get_fork_tests_providers(): for preset in [MINIMAL, MAINNET]: yield create_provider(test_altair_fork_basic, preset, PHASE0, ALTAIR) yield create_provider(test_altair_fork_random, preset, PHASE0, ALTAIR) yield create_provider(test_bellatrix_fork_basic, preset, ALTAIR, BELLATRIX) yield create_provider(test_bellatrix_fork_random, preset, ALTAIR, BELLATRIX) if __name__ == "__main__": gen_runner.run_generator("forks", list(_get_fork_tests_providers()))
[ "eth2spec.gen_helpers.gen_base.gen_typing.TestProvider", "eth2spec.gen_helpers.gen_from_tests.gen.generate_from_tests" ]
[((1006, 1070), 'eth2spec.gen_helpers.gen_base.gen_typing.TestProvider', 'gen_typing.TestProvider', ([], {'prepare': 'prepare_fn', 'make_cases': 'cases_fn'}), '(prepare=prepare_fn, make_cases=cases_fn)\n', (1029, 1070), False, 'from eth2spec.gen_helpers.gen_base import gen_runner, gen_typing\n'), ((776, 914), 'eth2spec.gen_helpers.gen_from_tests.gen.generate_from_tests', 'generate_from_tests', ([], {'runner_name': '"""fork"""', 'handler_name': '"""fork"""', 'src': 'tests_src', 'fork_name': 'fork_name', 'preset_name': 'preset_name', 'phase': 'phase'}), "(runner_name='fork', handler_name='fork', src=tests_src,\n fork_name=fork_name, preset_name=preset_name, phase=phase)\n", (795, 914), False, 'from eth2spec.gen_helpers.gen_from_tests.gen import generate_from_tests\n')]
""" Trainer creates train ops and goes through all data to train or test. Author: <NAME> Date: Fall 2017 """ import tensorflow as tf import time import sys from random import random import numpy as np def magnitude(complex_spec): return tf.sqrt(complex_spec[:,0] ** 2 + complex_spec[:,1] ** 2) class Trainer: """ Train a model """ def __init__(self, models, learn_rate = 1e-4, lr_decay = 1., max_norm = 5.0, loss_weight = {'fidelity': 1.0}, lengths = False, verbose = False, batch_size = 1, ): """ Parameters ---------- models : dict All models need for training in the format: {<name of model>: {'model': model, 'train': bool, 'vars': list}} learn_rate : float Rate of gradient descent lr_decay : float Amount of decay for learning rate max_norm : float For clipping norm loss_weight : dict How much weight to assign to each loss, of the form: {<name of loss>: float, ...} lengths : bool Whether to pass the length of the input to the fd, for recurrent models. verbose : bool Whether to print debugging info """ self.verbose = verbose self.lengths = lengths self.learn_rate = learn_rate self.batch_size = batch_size if verbose: print("Generating compute graph") self.feed_dict = {} # Collect variables for training var_list = [] self.training = [] for model in models: if models[model]['train']: var_list += models[model]['vars'] self.training.append(models[model]['model'].training) else: self.feed_dict[models[model]['model'].training] = False self.initialize_inputs(models, loss_weight) # Losses self.loss = 0 self.losses = {} for loss in loss_weight: op = self.get_op(loss, models) self.losses[loss] = {'op': op } self.loss += loss_weight[loss] * op # Create primary train op self.losses['average'] = {'op': self.loss} self.train_op = self._create_train_op(self.loss, var_list, max_norm, self.learn_rate, lr_decay) def initialize_inputs(self, models, loss_weight): """ Create placeholders for all types of inputs """ self.irm, self.ibm_x, self.ibm_n, self.senone, self.trans = None, None, None, None, None if 'generator' in models: self.noisy = models['generator']['model'].inputs if 'masking' in loss_weight: shape = models['generator']['model'].outputs.get_shape().as_list() self.irm = tf.placeholder(tf.float32, shape = shape, name = 'irm') self.clean = None if 'speech-mask' in loss_weight: self.ibm_x = tf.placeholder(tf.float32, shape = shape, name = 'ibm_x') if 'noise-mask' in loss_weight: self.ibm_n = tf.placeholder(tf.float32, shape = shape, name = 'ibm_n') if 'teacher' in models: self.clean = models['teacher']['model'].inputs elif 'fidelity' in loss_weight: shape = models['generator']['model'].outputs.get_shape().as_list() self.clean = tf.placeholder(tf.float32, shape = shape, name = "clean") else: self.clean = None self.senone = None else: self.clean = models['student']['model'].inputs self.noisy = None if 'ctc' in loss_weight: self.trans = tf.sparse_placeholder(tf.int32) # The only time we need a senone placeholder is if theres a cross-entropy loss if 'cross-ent' in loss_weight: shape = models['student']['model'].outputs.get_shape().as_list()[:-1] self.senone = tf.placeholder(tf.int32, shape, name = "senone") def get_op(self, loss, models): """ This function creates many types of losses, Parameters ---------- * loss : string The type of loss to create * models : dict The models to use for making the loss """ if loss == 'cross-ent': return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits( labels = self.senone, logits = models['student']['model'].outputs, )) elif loss == 'ctc': return tf.reduce_mean(tf.nn.ctc_loss( labels = self.trans, inputs = models['student']['model'].outputs, sequence_length = [models['student']['model'].seq_len], )) elif loss == 'cer': decoded, log_prob = tf.nn.ctc_beam_search_decoder( inputs = models['student']['model'].outputs, sequence_length = [models['student']['model'].seq_len], ) return tf.reduce_mean(tf.edit_distance( hypothesis = tf.cast(decoded[0], tf.int32), truth = self.trans, normalize = False, )) elif loss == 'fidelity': return tf.reduce_mean(tf.losses.mean_squared_error( labels = self.clean, predictions = models['generator']['model'].fidelity, )) elif loss == 'magnitude': return tf.reduce_mean(tf.losses.mean_squared_error( labels = magnitude(self.clean), predictions = magnitude(models['generator']['model'].outputs), )) elif loss == 'masking': return tf.reduce_mean(tf.losses.sigmoid_cross_entropy( multi_class_labels = self.irm, logits = models['generator']['model'].masking, )) elif loss == 'speech-mask': return tf.reduce_mean(tf.losses.sigmoid_cross_entropy( multi_class_labels = self.ibm_x, logits = models['generator']['model'].outputs, )) elif loss == 'mimic' or loss == 'map-as-mask-mimic': return tf.reduce_mean(tf.losses.mean_squared_error( labels = models['teacher']['model'].outputs, predictions = models['student']['model'].outputs, )) elif loss == 'generator': return self.make_adversarial_loss(models) else: raise ValueError def make_adversarial_loss(self, models): # Prepare for making the discriminator model epsilon = tf.random_uniform([], 0.0, 1.0) x_hat = self.generator.outputs * epsilon + self.clean * (1-epsilon) # Make 3 copies with different inputs #self.d_fake = discrimaker(self.generator.outputs) #self.d_real = discrimaker(self.clean, reuse=True) #d_hat = discrimaker(x_hat, reuse=True) #self.feed_dict[d_hat.training] = False # Make loss and gradient penalty self.discriminator_loss = tf.reduce_mean(self.d_fake.outputs) - tf.reduce_mean(self.d_real.outputs) self.discriminator_loss *= 0.005#loss_weight['generator'] #gradients = tf.gradients(d_hat.outputs, x_hat)[0] #slopes = tf.sqrt(tf.reduce_sum(tf.square(gradients), reduction_indices=[1])) #gradient_penalty = 10 * tf.reduce_mean((slopes - 1.0) ** 2) #self.discriminator_loss += gradient_penalty # Train op var_list = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='discriminator') self.discriminator_train_op = self._create_train_op(self.discriminator_loss, var_list) self.clip_op = [var.assign(tf.clip_by_value(var, -0.02, 0.02)) for var in var_list] # Generator loss return -tf.reduce_mean(self.d_fake.outputs) def _create_train_op(self, loss, var_list, max_norm, learn_rate, decay): """ Define the training op """ # Define train op global_step = tf.Variable(0, trainable=False) self.learn_rate_pl = tf.train.exponential_decay(learn_rate, global_step, 1e4, decay) opt = tf.train.AdamOptimizer(self.learn_rate_pl) return opt.minimize(loss, var_list = var_list, global_step = global_step) def run_ops(self, sess, loader, training = True, epoch = 1): """ Run one epoch of batches through the model """ for var in self.training: self.feed_dict[var] = training # Counters start_time = time.time() frames = 1 count = 1 # Prepare ops ops = self.get_ops(training) # Iterate dataset for batch in loader.batchify(epoch): if self.verbose: print("Batch", count) count += 1 # Count frames for loss calculation frames += batch['frames'] # Build the feed dict for this batch self.build_feed_dict(batch, training, sess, epoch) # Run all ops output = sess.run(ops, self.feed_dict) # Update losses for label in self.losses: self.losses[label]['loss'] += batch['frames'] * output[self.losses[label]['id']] if label == 'generator' and self.verbose: print("G loss", output[self.losses[label]['id']]) if self.verbose: for label in self.losses: print("{}: {}".format(label, self.losses[label]['loss'] / frames)) # Compute average average = {} for label in self.losses: average[label] = self.losses[label]['loss'] / frames duration = time.time() - start_time return average, duration def get_ops(self, training): ops = [] for i, loss in enumerate(self.losses): ops.append(self.losses[loss]['op']) self.losses[loss]['id'] = i self.losses[loss]['loss'] = 0 # Doesn't produce output, so no map needed if training: ops.append(self.train_op) return ops def build_feed_dict(self, batch, training, sess, epoch): #self.feed_dict[self.learn_rate_pl] = self.learn_rate # Always feed clean, and then either senone or noisy if self.clean is not None: self.feed_dict[self.clean] = batch['clean'] if self.senone is not None: self.feed_dict[self.senone] = batch['senone'] if self.noisy is not None: self.feed_dict[self.noisy] = batch['noisy'] if self.irm is not None: self.feed_dict[self.irm] = batch['irm'] if self.ibm_x is not None: self.feed_dict[self.ibm_x] = batch['ibm_x'] if self.ibm_n is not None: self.feed_dict[self.ibm_n] = batch['ibm_n'] if self.trans is not None: self.feed_dict[self.trans] = batch['trans'] # Count the frames in the batch if self.lengths: self.feed_dict[self.lengths] = [batch['frames']] # Train discriminator if 'generator' in self.losses: self.feed_dict[self.d_real.training] = training self.feed_dict[self.d_fake.training] = training self.feed_dict[self.generator_train] = False d_ops = [self.discriminator_loss, self.discriminator_train_op] d_loss, _ = sess.run(d_ops, self.feed_dict) sess.run(self.clip_op) if self.verbose: print("D loss", d_loss) self.feed_dict[self.d_real.training] = False self.feed_dict[self.d_fake.training] = False self.feed_dict[self.generator_train] = training
[ "tensorflow.random_uniform", "tensorflow.clip_by_value", "tensorflow.losses.mean_squared_error", "tensorflow.get_collection", "tensorflow.nn.ctc_loss", "tensorflow.losses.sigmoid_cross_entropy", "tensorflow.reduce_mean", "tensorflow.train.AdamOptimizer", "time.time", "tensorflow.placeholder", "tensorflow.cast", "tensorflow.Variable", "tensorflow.sparse_placeholder", "tensorflow.sqrt", "tensorflow.train.exponential_decay", "tensorflow.nn.sparse_softmax_cross_entropy_with_logits", "tensorflow.nn.ctc_beam_search_decoder" ]
[((244, 302), 'tensorflow.sqrt', 'tf.sqrt', (['(complex_spec[:, 0] ** 2 + complex_spec[:, 1] ** 2)'], {}), '(complex_spec[:, 0] ** 2 + complex_spec[:, 1] ** 2)\n', (251, 302), True, 'import tensorflow as tf\n'), ((6789, 6820), 'tensorflow.random_uniform', 'tf.random_uniform', (['[]', '(0.0)', '(1.0)'], {}), '([], 0.0, 1.0)\n', (6806, 6820), True, 'import tensorflow as tf\n'), ((7680, 7754), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.TRAINABLE_VARIABLES'], {'scope': '"""discriminator"""'}), "(tf.GraphKeys.TRAINABLE_VARIABLES, scope='discriminator')\n", (7697, 7754), True, 'import tensorflow as tf\n'), ((8198, 8229), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'trainable': '(False)'}), '(0, trainable=False)\n', (8209, 8229), True, 'import tensorflow as tf\n'), ((8259, 8326), 'tensorflow.train.exponential_decay', 'tf.train.exponential_decay', (['learn_rate', 'global_step', '(10000.0)', 'decay'], {}), '(learn_rate, global_step, 10000.0, decay)\n', (8285, 8326), True, 'import tensorflow as tf\n'), ((8337, 8379), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['self.learn_rate_pl'], {}), '(self.learn_rate_pl)\n', (8359, 8379), True, 'import tensorflow as tf\n'), ((8715, 8726), 'time.time', 'time.time', ([], {}), '()\n', (8724, 8726), False, 'import time\n'), ((3766, 3797), 'tensorflow.sparse_placeholder', 'tf.sparse_placeholder', (['tf.int32'], {}), '(tf.int32)\n', (3787, 3797), True, 'import tensorflow as tf\n'), ((4033, 4079), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32', 'shape'], {'name': '"""senone"""'}), "(tf.int32, shape, name='senone')\n", (4047, 4079), True, 'import tensorflow as tf\n'), ((7234, 7269), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['self.d_fake.outputs'], {}), '(self.d_fake.outputs)\n', (7248, 7269), True, 'import tensorflow as tf\n'), ((7272, 7307), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['self.d_real.outputs'], {}), '(self.d_real.outputs)\n', (7286, 7307), True, 'import tensorflow as tf\n'), ((7988, 8023), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['self.d_fake.outputs'], {}), '(self.d_fake.outputs)\n', (8002, 8023), True, 'import tensorflow as tf\n'), ((9882, 9893), 'time.time', 'time.time', ([], {}), '()\n', (9891, 9893), False, 'import time\n'), ((2850, 2901), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': 'shape', 'name': '"""irm"""'}), "(tf.float32, shape=shape, name='irm')\n", (2864, 2901), True, 'import tensorflow as tf\n'), ((3015, 3068), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': 'shape', 'name': '"""ibm_x"""'}), "(tf.float32, shape=shape, name='ibm_x')\n", (3029, 3068), True, 'import tensorflow as tf\n'), ((3147, 3200), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': 'shape', 'name': '"""ibm_n"""'}), "(tf.float32, shape=shape, name='ibm_n')\n", (3161, 3200), True, 'import tensorflow as tf\n'), ((4439, 4553), 'tensorflow.nn.sparse_softmax_cross_entropy_with_logits', 'tf.nn.sparse_softmax_cross_entropy_with_logits', ([], {'labels': 'self.senone', 'logits': "models['student']['model'].outputs"}), "(labels=self.senone, logits=\n models['student']['model'].outputs)\n", (4485, 4553), True, 'import tensorflow as tf\n'), ((7885, 7919), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['var', '(-0.02)', '(0.02)'], {}), '(var, -0.02, 0.02)\n', (7901, 7919), True, 'import tensorflow as tf\n'), ((3462, 3515), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': 'shape', 'name': '"""clean"""'}), "(tf.float32, shape=shape, name='clean')\n", (3476, 3515), True, 'import tensorflow as tf\n'), ((4664, 4798), 'tensorflow.nn.ctc_loss', 'tf.nn.ctc_loss', ([], {'labels': 'self.trans', 'inputs': "models['student']['model'].outputs", 'sequence_length': "[models['student']['model'].seq_len]"}), "(labels=self.trans, inputs=models['student']['model'].outputs,\n sequence_length=[models['student']['model'].seq_len])\n", (4678, 4798), True, 'import tensorflow as tf\n'), ((4926, 5056), 'tensorflow.nn.ctc_beam_search_decoder', 'tf.nn.ctc_beam_search_decoder', ([], {'inputs': "models['student']['model'].outputs", 'sequence_length': "[models['student']['model'].seq_len]"}), "(inputs=models['student']['model'].outputs,\n sequence_length=[models['student']['model'].seq_len])\n", (4955, 5056), True, 'import tensorflow as tf\n'), ((5377, 5480), 'tensorflow.losses.mean_squared_error', 'tf.losses.mean_squared_error', ([], {'labels': 'self.clean', 'predictions': "models['generator']['model'].fidelity"}), "(labels=self.clean, predictions=models[\n 'generator']['model'].fidelity)\n", (5405, 5480), True, 'import tensorflow as tf\n'), ((5186, 5215), 'tensorflow.cast', 'tf.cast', (['decoded[0]', 'tf.int32'], {}), '(decoded[0], tf.int32)\n', (5193, 5215), True, 'import tensorflow as tf\n'), ((5854, 5964), 'tensorflow.losses.sigmoid_cross_entropy', 'tf.losses.sigmoid_cross_entropy', ([], {'multi_class_labels': 'self.irm', 'logits': "models['generator']['model'].masking"}), "(multi_class_labels=self.irm, logits=models[\n 'generator']['model'].masking)\n", (5885, 5964), True, 'import tensorflow as tf\n'), ((6095, 6207), 'tensorflow.losses.sigmoid_cross_entropy', 'tf.losses.sigmoid_cross_entropy', ([], {'multi_class_labels': 'self.ibm_x', 'logits': "models['generator']['model'].outputs"}), "(multi_class_labels=self.ibm_x, logits=\n models['generator']['model'].outputs)\n", (6126, 6207), True, 'import tensorflow as tf\n'), ((6363, 6486), 'tensorflow.losses.mean_squared_error', 'tf.losses.mean_squared_error', ([], {'labels': "models['teacher']['model'].outputs", 'predictions': "models['student']['model'].outputs"}), "(labels=models['teacher']['model'].outputs,\n predictions=models['student']['model'].outputs)\n", (6391, 6486), True, 'import tensorflow as tf\n')]
##################################################################### # Copyright (c) The Caleydo Team, http://caleydo.org # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. ##################################################################### def phovea(registry): """ register extension points :param registry: """ # generator-phovea:begin registry.append('namespace', 'phovea_security_store_generated_api', 'phovea_security_store_generated.api', { 'namespace': '/api/tdp/security_store_generated' }) registry.append('user_stores', 'phovea_security_store_generated_store', 'phovea_security_store_generated.store', {}) # generator-phovea:end pass def phovea_config(): """ :return: file pointer to config file """ from os import path here = path.abspath(path.dirname(__file__)) config_file = path.join(here, 'config.json') return config_file if path.exists(config_file) else None
[ "os.path.dirname", "os.path.join", "os.path.exists" ]
[((986, 1016), 'os.path.join', 'path.join', (['here', '"""config.json"""'], {}), "(here, 'config.json')\n", (995, 1016), False, 'from os import path\n'), ((946, 968), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (958, 968), False, 'from os import path\n'), ((1041, 1065), 'os.path.exists', 'path.exists', (['config_file'], {}), '(config_file)\n', (1052, 1065), False, 'from os import path\n')]
import os from distutils.dir_util import mkpath from distutils.file_util import copy_file from . import tools from . import plists from .util import copy_tree from .templates import InstallationCheck from .py3k import StringIO, u def write_template(script_strings, dest, mkpath=mkpath): script, strings = script_strings spath = os.path.join(dest, 'InstallationCheck') f = open(spath, 'wb') f.write(script.encode('utf8')) f.close() os.chmod(spath, os.stat(spath)[0] | 0x49) # 0111 octal lproj = os.path.join(dest, 'English.lproj') mkpath(lproj) spath = os.path.join(lproj, 'InstallationCheck.strings') f = open(spath, 'wb') f.write(strings.encode('utf16')) f.close() def write_sizes(count, size, compressed, pkgdir): f = open( os.path.join(pkgdir, 'Contents', 'Resources', 'Archive.sizes'), 'wb' ) f.write(('NumFiles %d\nInstalledSize %d\nCompressedSize %d' % (count, size, compressed)).encode('utf8')) f.close() TEXT_EXTS = '.rtfd', '.rtf', '.html', '.txt' IMAGE_EXTS = '.tiff', '.png', '.jpg' def write_pkginfo(pkgdir): f = open(os.path.join(pkgdir, 'Contents', 'PkgInfo'), 'wb') f.write('pmkrpkg1'.encode('utf8')) f.close() def try_exts(path, exts=TEXT_EXTS): path = os.path.splitext(path)[0] for ext in exts: npath = path + ext if os.path.exists(npath): return npath return None def copy_doc(path, name, pkgdir, exts=TEXT_EXTS, language=None, dry_run=0, copy_tree=copy_tree, copy_file=copy_file, mkpath=mkpath): if path is None: return is_string = hasattr(path, 'getvalue') if is_string: ext = '.txt' else: ext = os.path.splitext(path)[1].lower() if ext == '': ext = '.txt' if ext not in exts: raise ValueError('Invalid extension for %s' % (path,)) destdir = os.path.join(pkgdir, 'Contents', 'Resources') if language is not None: destdir = os.path.join(destdir, language + '.lproj') mkpath(destdir) dest = os.path.join(destdir, name + ext) if is_string: if not dry_run: f = open(dest, 'wb') f.write(path.getvalue().encode('utf8')) f.close() elif ext == '.rtfd': copy_tree(path, dest) else: copy_file(path, dest) def make_metapackage(cmd, name, version, packages, pkgdir, info=(), description=None): license = cmd.license readme = cmd.readme welcome = cmd.welcome background = cmd.background template = cmd.template dry_run = cmd.dry_run dist = cmd.distribution copy_tree = cmd.copy_tree copy_file = cmd.copy_file mkpath = cmd.mkpath if description is None: description = dist.get_description() if not description: description = u('%s %s' % (name, version)) mkpath(os.path.join(pkgdir, 'Contents', 'Resources')) if not dry_run: write_pkginfo(pkgdir) ninfo = plists.mpkg_info(name, version, packages) ninfo.update(dict(info)) if not dry_run: plists.write(ninfo, os.path.join(pkgdir, 'Contents', 'Info.plist')) desc = plists.common_description(name+' '+version, version) if description: desc['IFPkgDescriptionDescription'] = description if not dry_run: plists.write( desc, os.path.join(pkgdir, 'Contents', 'Resources', 'Description.plist') ) template_dest = os.path.join(pkgdir, 'Contents', 'Resources') if not os.path.exists(template) and template in InstallationCheck: write_template(InstallationCheck[template], template_dest, mkpath=mkpath) else: copy_tree(template, template_dest) if readme is None: readme_text = dist.get_long_description() if readme_text: readme = StringIO(readme_text) def doc(path, name, exts=TEXT_EXTS): copy_doc(path, name, pkgdir, exts=exts, dry_run=dry_run, mkpath=mkpath, copy_tree=copy_tree, copy_file=copy_file, ) doc(readme, 'ReadMe') doc(license, 'License') doc(welcome, 'Welcome') doc(background, 'Background', exts=IMAGE_EXTS) def make_package(cmd, name, version, files, common, prefix, pkgdir, info=(), description=None): license = cmd.license readme = cmd.readme welcome = cmd.welcome background = cmd.background template = cmd.template dry_run = cmd.dry_run dist = cmd.distribution copy_tree = cmd.copy_tree copy_file = cmd.copy_file mkpath = cmd.mkpath if description is None: description = dist.get_description() mkpath(os.path.join(pkgdir, 'Contents', 'Resources')) if not dry_run: write_pkginfo(pkgdir) tools.mkbom(common, pkgdir) count = len(files) admin = tools.admin_writable(prefix) size = tools.reduce_size(files) compressed = tools.pax(common, pkgdir) if not dry_run: write_sizes(count, size, compressed, pkgdir) if admin: auth = u('AdminAuthorization') else: auth = u('RootAuthorization') ninfo = plists.pkg_info(name, version) ninfo.update(dict( IFPkgFlagAuthorizationAction=auth, IFPkgFlagDefaultLocation=tools.unicode_path(prefix), )) ninfo.update(dict(info)) if not dry_run: plists.write(ninfo, os.path.join(pkgdir, 'Contents', 'Info.plist')) desc = plists.common_description(name, version) if description is not None: desc['IFPkgDescriptionDescription'] = description if not dry_run: plists.write( desc, os.path.join(pkgdir, 'Contents', 'Resources', 'Description.plist') ) template_dest = os.path.join(pkgdir, 'Contents', 'Resources') if not os.path.exists(template) and template in InstallationCheck: write_template(InstallationCheck[template], template_dest, mkpath=mkpath) else: copy_tree(template, template_dest) def doc(path, name, exts=TEXT_EXTS): copy_doc(path, name, pkgdir, exts=exts, dry_run=dry_run, mkpath=mkpath, copy_tree=copy_tree, copy_file=copy_file, ) doc(readme, 'ReadMe') doc(license, 'License') doc(welcome, 'Welcome') doc(background, 'Background', exts=IMAGE_EXTS)
[ "distutils.dir_util.mkpath", "os.stat", "os.path.exists", "os.path.splitext", "distutils.file_util.copy_file", "os.path.join" ]
[((339, 378), 'os.path.join', 'os.path.join', (['dest', '"""InstallationCheck"""'], {}), "(dest, 'InstallationCheck')\n", (351, 378), False, 'import os\n'), ((525, 560), 'os.path.join', 'os.path.join', (['dest', '"""English.lproj"""'], {}), "(dest, 'English.lproj')\n", (537, 560), False, 'import os\n'), ((565, 578), 'distutils.dir_util.mkpath', 'mkpath', (['lproj'], {}), '(lproj)\n', (571, 578), False, 'from distutils.dir_util import mkpath\n'), ((591, 639), 'os.path.join', 'os.path.join', (['lproj', '"""InstallationCheck.strings"""'], {}), "(lproj, 'InstallationCheck.strings')\n", (603, 639), False, 'import os\n'), ((1894, 1939), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""'], {}), "(pkgdir, 'Contents', 'Resources')\n", (1906, 1939), False, 'import os\n'), ((2034, 2049), 'distutils.dir_util.mkpath', 'mkpath', (['destdir'], {}), '(destdir)\n', (2040, 2049), False, 'from distutils.dir_util import mkpath\n'), ((2061, 2094), 'os.path.join', 'os.path.join', (['destdir', '(name + ext)'], {}), '(destdir, name + ext)\n', (2073, 2094), False, 'import os\n'), ((3460, 3505), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""'], {}), "(pkgdir, 'Contents', 'Resources')\n", (3472, 3505), False, 'import os\n'), ((5714, 5759), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""'], {}), "(pkgdir, 'Contents', 'Resources')\n", (5726, 5759), False, 'import os\n'), ((790, 852), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""', '"""Archive.sizes"""'], {}), "(pkgdir, 'Contents', 'Resources', 'Archive.sizes')\n", (802, 852), False, 'import os\n'), ((1128, 1171), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""PkgInfo"""'], {}), "(pkgdir, 'Contents', 'PkgInfo')\n", (1140, 1171), False, 'import os\n'), ((1280, 1302), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (1296, 1302), False, 'import os\n'), ((1365, 1386), 'os.path.exists', 'os.path.exists', (['npath'], {}), '(npath)\n', (1379, 1386), False, 'import os\n'), ((1987, 2029), 'os.path.join', 'os.path.join', (['destdir', "(language + '.lproj')"], {}), "(destdir, language + '.lproj')\n", (1999, 2029), False, 'import os\n'), ((2870, 2915), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""'], {}), "(pkgdir, 'Contents', 'Resources')\n", (2882, 2915), False, 'import os\n'), ((4650, 4695), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""'], {}), "(pkgdir, 'Contents', 'Resources')\n", (4662, 4695), False, 'import os\n'), ((2317, 2338), 'distutils.file_util.copy_file', 'copy_file', (['path', 'dest'], {}), '(path, dest)\n', (2326, 2338), False, 'from distutils.file_util import copy_file\n'), ((3099, 3145), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Info.plist"""'], {}), "(pkgdir, 'Contents', 'Info.plist')\n", (3111, 3145), False, 'import os\n'), ((3362, 3428), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""', '"""Description.plist"""'], {}), "(pkgdir, 'Contents', 'Resources', 'Description.plist')\n", (3374, 3428), False, 'import os\n'), ((3517, 3541), 'os.path.exists', 'os.path.exists', (['template'], {}), '(template)\n', (3531, 3541), False, 'import os\n'), ((5353, 5399), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Info.plist"""'], {}), "(pkgdir, 'Contents', 'Info.plist')\n", (5365, 5399), False, 'import os\n'), ((5616, 5682), 'os.path.join', 'os.path.join', (['pkgdir', '"""Contents"""', '"""Resources"""', '"""Description.plist"""'], {}), "(pkgdir, 'Contents', 'Resources', 'Description.plist')\n", (5628, 5682), False, 'import os\n'), ((5771, 5795), 'os.path.exists', 'os.path.exists', (['template'], {}), '(template)\n', (5785, 5795), False, 'import os\n'), ((474, 488), 'os.stat', 'os.stat', (['spath'], {}), '(spath)\n', (481, 488), False, 'import os\n'), ((1712, 1734), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (1728, 1734), False, 'import os\n')]
"""Code for AMS 2019 short course.""" import copy import glob import errno import random import os.path import json import pickle import time import calendar import numpy import netCDF4 import keras from keras import backend as K from sklearn.metrics import auc as scikit_learn_auc import matplotlib.colors import matplotlib.pyplot as pyplot from module_4 import keras_metrics from module_4 import roc_curves from module_4 import performance_diagrams from module_4 import attributes_diagrams # Directories. # MODULE4_DIR_NAME = '.' # SHORT_COURSE_DIR_NAME = '..' MODULE4_DIR_NAME = os.path.dirname(__file__) SHORT_COURSE_DIR_NAME = os.path.dirname(MODULE4_DIR_NAME) DEFAULT_IMAGE_DIR_NAME = '{0:s}/data/track_data_ncar_ams_3km_nc_small'.format( SHORT_COURSE_DIR_NAME) # Plotting constants. FIGURE_WIDTH_INCHES = 15 FIGURE_HEIGHT_INCHES = 15 FIGURE_RESOLUTION_DPI = 300 BAR_GRAPH_FACE_COLOUR = numpy.array([166, 206, 227], dtype=float) / 255 BAR_GRAPH_EDGE_COLOUR = numpy.full(3, 0.) BAR_GRAPH_EDGE_WIDTH = 2. SALIENCY_COLOUR_MAP_OBJECT = pyplot.cm.Greys FONT_SIZE = 30 pyplot.rc('font', size=FONT_SIZE) pyplot.rc('axes', titlesize=FONT_SIZE) pyplot.rc('axes', labelsize=FONT_SIZE) pyplot.rc('xtick', labelsize=FONT_SIZE) pyplot.rc('ytick', labelsize=FONT_SIZE) pyplot.rc('legend', fontsize=FONT_SIZE) pyplot.rc('figure', titlesize=FONT_SIZE) # Naming constants. CSV_METADATA_COLUMNS = [ 'Step_ID', 'Track_ID', 'Ensemble_Name', 'Ensemble_Member', 'Run_Date', 'Valid_Date', 'Forecast_Hour', 'Valid_Hour_UTC' ] CSV_EXTRANEOUS_COLUMNS = [ 'Duration', 'Centroid_Lon', 'Centroid_Lat', 'Centroid_X', 'Centroid_Y', 'Storm_Motion_U', 'Storm_Motion_V', 'Matched', 'Max_Hail_Size', 'Num_Matches', 'Shape', 'Location', 'Scale' ] CSV_TARGET_NAME = 'RVORT1_MAX-future_max' TARGET_NAME = 'max_future_vorticity_s01' NETCDF_REFL_NAME = 'REFL_COM_curr' NETCDF_TEMP_NAME = 'T2_curr' NETCDF_U_WIND_NAME = 'U10_curr' NETCDF_V_WIND_NAME = 'V10_curr' NETCDF_PREDICTOR_NAMES = [ NETCDF_REFL_NAME, NETCDF_TEMP_NAME, NETCDF_U_WIND_NAME, NETCDF_V_WIND_NAME ] REFLECTIVITY_NAME = 'reflectivity_dbz' TEMPERATURE_NAME = 'temperature_kelvins' U_WIND_NAME = 'u_wind_m_s01' V_WIND_NAME = 'v_wind_m_s01' PREDICTOR_NAMES = [ REFLECTIVITY_NAME, TEMPERATURE_NAME, U_WIND_NAME, V_WIND_NAME ] NETCDF_TRACK_ID_NAME = 'track_id' NETCDF_TRACK_STEP_NAME = 'track_step' NETCDF_TARGET_NAME = 'RVORT1_MAX_future' NUM_VALUES_KEY = 'num_values' MEAN_VALUE_KEY = 'mean_value' MEAN_OF_SQUARES_KEY = 'mean_of_squares' STORM_IDS_KEY = 'storm_ids' STORM_STEPS_KEY = 'storm_steps' PREDICTOR_NAMES_KEY = 'predictor_names' PREDICTOR_MATRIX_KEY = 'predictor_matrix' TARGET_NAME_KEY = 'target_name' TARGET_MATRIX_KEY = 'target_matrix' TRAINING_FILES_KEY = 'training_file_names' NORMALIZATION_DICT_KEY = 'normalization_dict' BINARIZATION_THRESHOLD_KEY = 'binarization_threshold' NUM_EXAMPLES_PER_BATCH_KEY = 'num_examples_per_batch' NUM_TRAINING_BATCHES_KEY = 'num_training_batches_per_epoch' VALIDATION_FILES_KEY = 'validation_file_names' NUM_VALIDATION_BATCHES_KEY = 'num_validation_batches_per_epoch' CNN_FILE_KEY = 'cnn_file_name' CNN_FEATURE_LAYER_KEY = 'cnn_feature_layer_name' PERMUTED_PREDICTORS_KEY = 'permuted_predictor_name_by_step' HIGHEST_COSTS_KEY = 'highest_cost_by_step' ORIGINAL_COST_KEY = 'original_cost' STEP1_PREDICTORS_KEY = 'predictor_names_step1' STEP1_COSTS_KEY = 'costs_step1' EOF_MATRIX_KEY = 'eof_matrix' FEATURE_MEANS_KEY = 'feature_means' FEATURE_STDEVS_KEY = 'feature_standard_deviations' NOVEL_IMAGES_ACTUAL_KEY = 'novel_image_matrix_actual' NOVEL_IMAGES_UPCONV_KEY = 'novel_image_matrix_upconv' NOVEL_IMAGES_UPCONV_SVD_KEY = 'novel_image_matrix_upconv_svd' # More plotting constants. THIS_COLOUR_LIST = [ numpy.array([4, 233, 231]), numpy.array([1, 159, 244]), numpy.array([3, 0, 244]), numpy.array([2, 253, 2]), numpy.array([1, 197, 1]), numpy.array([0, 142, 0]), numpy.array([253, 248, 2]), numpy.array([229, 188, 0]), numpy.array([253, 149, 0]), numpy.array([253, 0, 0]), numpy.array([212, 0, 0]), numpy.array([188, 0, 0]), numpy.array([248, 0, 253]), numpy.array([152, 84, 198]) ] for p in range(len(THIS_COLOUR_LIST)): THIS_COLOUR_LIST[p] = THIS_COLOUR_LIST[p].astype(float) / 255 REFL_COLOUR_MAP_OBJECT = matplotlib.colors.ListedColormap(THIS_COLOUR_LIST) REFL_COLOUR_MAP_OBJECT.set_under(numpy.ones(3)) PREDICTOR_TO_COLOUR_MAP_DICT = { TEMPERATURE_NAME: pyplot.cm.YlOrRd, REFLECTIVITY_NAME: REFL_COLOUR_MAP_OBJECT, U_WIND_NAME: pyplot.cm.seismic, V_WIND_NAME: pyplot.cm.seismic } THESE_COLOUR_BOUNDS = numpy.array( [0.1, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70]) REFL_COLOUR_NORM_OBJECT = matplotlib.colors.BoundaryNorm( THESE_COLOUR_BOUNDS, REFL_COLOUR_MAP_OBJECT.N) # Deep-learning constants. L1_WEIGHT = 0. L2_WEIGHT = 0.001 NUM_PREDICTORS_TO_FIRST_NUM_FILTERS = 8 NUM_CONV_LAYER_SETS = 2 NUM_CONV_LAYERS_PER_SET = 2 NUM_CONV_FILTER_ROWS = 3 NUM_CONV_FILTER_COLUMNS = 3 CONV_LAYER_DROPOUT_FRACTION = None USE_BATCH_NORMALIZATION = True SLOPE_FOR_RELU = 0.2 NUM_POOLING_ROWS = 2 NUM_POOLING_COLUMNS = 2 NUM_DENSE_LAYERS = 3 DENSE_LAYER_DROPOUT_FRACTION = 0.5 NUM_SMOOTHING_FILTER_ROWS = 5 NUM_SMOOTHING_FILTER_COLUMNS = 5 MIN_XENTROPY_DECREASE_FOR_EARLY_STOP = 0.005 MIN_MSE_DECREASE_FOR_EARLY_STOP = 0.005 NUM_EPOCHS_FOR_EARLY_STOPPING = 5 LIST_OF_METRIC_FUNCTIONS = [ keras_metrics.accuracy, keras_metrics.binary_accuracy, keras_metrics.binary_csi, keras_metrics.binary_frequency_bias, keras_metrics.binary_pod, keras_metrics.binary_pofd, keras_metrics.binary_peirce_score, keras_metrics.binary_success_ratio, keras_metrics.binary_focn ] METRIC_FUNCTION_DICT = { 'accuracy': keras_metrics.accuracy, 'binary_accuracy': keras_metrics.binary_accuracy, 'binary_csi': keras_metrics.binary_csi, 'binary_frequency_bias': keras_metrics.binary_frequency_bias, 'binary_pod': keras_metrics.binary_pod, 'binary_pofd': keras_metrics.binary_pofd, 'binary_peirce_score': keras_metrics.binary_peirce_score, 'binary_success_ratio': keras_metrics.binary_success_ratio, 'binary_focn': keras_metrics.binary_focn } DEFAULT_NUM_BWO_ITERATIONS = 200 DEFAULT_BWO_LEARNING_RATE = 0.01 # Misc constants. SEPARATOR_STRING = '\n\n' + '*' * 50 + '\n\n' MINOR_SEPARATOR_STRING = '\n\n' + '-' * 50 + '\n\n' DATE_FORMAT = '%Y%m%d' DATE_FORMAT_REGEX = '[0-9][0-9][0-9][0-9][0-1][0-9][0-3][0-9]' MIN_PROBABILITY = 1e-15 MAX_PROBABILITY = 1. - MIN_PROBABILITY METRES_PER_SECOND_TO_KT = 3.6 / 1.852 def time_string_to_unix(time_string, time_format): """Converts time from string to Unix format. Unix format = seconds since 0000 UTC 1 Jan 1970. :param time_string: Time string. :param time_format: Format of time string (example: "%Y%m%d" or "%Y-%m-%d-%H%M%S"). :return: unix_time_sec: Time in Unix format. """ return calendar.timegm(time.strptime(time_string, time_format)) def time_unix_to_string(unix_time_sec, time_format): """Converts time from Unix format to string. Unix format = seconds since 0000 UTC 1 Jan 1970. :param unix_time_sec: Time in Unix format. :param time_format: Desired format of time string (example: "%Y%m%d" or "%Y-%m-%d-%H%M%S"). :return: time_string: Time string. """ return time.strftime(time_format, time.gmtime(unix_time_sec)) def _image_file_name_to_date(netcdf_file_name): """Parses date from name of image (NetCDF) file. :param netcdf_file_name: Path to input file. :return: date_string: Date (format "yyyymmdd"). """ pathless_file_name = os.path.split(netcdf_file_name)[-1] date_string = pathless_file_name.replace( 'NCARSTORM_', '').replace('-0000_d01_model_patches.nc', '') # Verify. time_string_to_unix(time_string=date_string, time_format=DATE_FORMAT) return date_string def find_many_image_files(first_date_string, last_date_string, image_dir_name=DEFAULT_IMAGE_DIR_NAME): """Finds image (NetCDF) files in the given date range. :param first_date_string: First date ("yyyymmdd") in range. :param last_date_string: Last date ("yyyymmdd") in range. :param image_dir_name: Name of directory with image (NetCDF) files. :return: netcdf_file_names: 1-D list of paths to image files. """ first_time_unix_sec = time_string_to_unix( time_string=first_date_string, time_format=DATE_FORMAT) last_time_unix_sec = time_string_to_unix( time_string=last_date_string, time_format=DATE_FORMAT) netcdf_file_pattern = ( '{0:s}/NCARSTORM_{1:s}-0000_d01_model_patches.nc' ).format(image_dir_name, DATE_FORMAT_REGEX) netcdf_file_names = glob.glob(netcdf_file_pattern) netcdf_file_names.sort() file_date_strings = [_image_file_name_to_date(f) for f in netcdf_file_names] file_times_unix_sec = numpy.array([ time_string_to_unix(time_string=d, time_format=DATE_FORMAT) for d in file_date_strings ], dtype=int) good_indices = numpy.where(numpy.logical_and( file_times_unix_sec >= first_time_unix_sec, file_times_unix_sec <= last_time_unix_sec ))[0] return [netcdf_file_names[k] for k in good_indices] def read_image_file(netcdf_file_name): """Reads storm-centered images from NetCDF file. E = number of examples (storm objects) in file M = number of rows in each storm-centered grid N = number of columns in each storm-centered grid C = number of channels (predictor variables) :param netcdf_file_name: Path to input file. :return: image_dict: Dictionary with the following keys. image_dict['storm_ids']: length-E list of storm IDs (integers). image_dict['storm_steps']: length-E numpy array of storm steps (integers). image_dict['predictor_names']: length-C list of predictor names. image_dict['predictor_matrix']: E-by-M-by-N-by-C numpy array of predictor values. image_dict['target_name']: Name of target variable. image_dict['target_matrix']: E-by-M-by-N numpy array of target values. """ dataset_object = netCDF4.Dataset(netcdf_file_name) storm_ids = numpy.array( dataset_object.variables[NETCDF_TRACK_ID_NAME][:], dtype=int) storm_steps = numpy.array( dataset_object.variables[NETCDF_TRACK_STEP_NAME][:], dtype=int) predictor_matrix = None for this_predictor_name in NETCDF_PREDICTOR_NAMES: this_predictor_matrix = numpy.array( dataset_object.variables[this_predictor_name][:], dtype=float) this_predictor_matrix = numpy.expand_dims( this_predictor_matrix, axis=-1) if predictor_matrix is None: predictor_matrix = this_predictor_matrix + 0. else: predictor_matrix = numpy.concatenate( (predictor_matrix, this_predictor_matrix), axis=-1) target_matrix = numpy.array( dataset_object.variables[NETCDF_TARGET_NAME][:], dtype=float) return { STORM_IDS_KEY: storm_ids, STORM_STEPS_KEY: storm_steps, PREDICTOR_NAMES_KEY: PREDICTOR_NAMES, PREDICTOR_MATRIX_KEY: predictor_matrix, TARGET_NAME_KEY: TARGET_NAME, TARGET_MATRIX_KEY: target_matrix } def read_many_image_files(netcdf_file_names): """Reads storm-centered images from many NetCDF files. :param netcdf_file_names: 1-D list of paths to input files. :return: image_dict: See doc for `read_image_file`. """ image_dict = None keys_to_concat = [ STORM_IDS_KEY, STORM_STEPS_KEY, PREDICTOR_MATRIX_KEY, TARGET_MATRIX_KEY ] for this_file_name in netcdf_file_names: print('Reading data from: "{0:s}"...'.format(this_file_name)) this_image_dict = read_image_file(this_file_name) if image_dict is None: image_dict = copy.deepcopy(this_image_dict) continue for this_key in keys_to_concat: image_dict[this_key] = numpy.concatenate( (image_dict[this_key], this_image_dict[this_key]), axis=0) return image_dict def image_files_example1(): """Runs Example 1 for feature files.""" image_file_names = find_many_image_files( first_date_string='20150701', last_date_string='20150731') image_dict = read_many_image_files(image_file_names) print(MINOR_SEPARATOR_STRING) print('Variables in dictionary are as follows:') for this_key in image_dict.keys(): print(this_key) print('\nPredictor variables are as follows:') predictor_names = image_dict[PREDICTOR_NAMES_KEY] for this_name in predictor_names: print(this_name) these_predictor_values = image_dict[PREDICTOR_MATRIX_KEY][0, :5, :5, 0] print( ('\nSome values of predictor variable "{0:s}" for first storm object:' '\n{1:s}' ).format(predictor_names[0], str(these_predictor_values)) ) these_target_values = image_dict[TARGET_MATRIX_KEY][0, :5, :5] print( ('\nSome values of target variable "{0:s}" for first storm object:' '\n{1:s}' ).format(image_dict[TARGET_NAME_KEY], str(these_target_values)) ) def find_training_files_example(): """Finds training files.""" training_file_names = find_many_image_files( first_date_string='20100101', last_date_string='20141231') validation_file_names = find_many_image_files( first_date_string='20150101', last_date_string='20151231') def _init_figure_panels(num_rows, num_columns, horizontal_space_fraction=0.1, vertical_space_fraction=0.1): """Initializes paneled figure. :param num_rows: Number of panel rows. :param num_columns: Number of panel columns. :param horizontal_space_fraction: Horizontal space between panels (as fraction of panel size). :param vertical_space_fraction: Vertical space between panels (as fraction of panel size). :return: figure_object: Instance of `matplotlib.figure.Figure`. :return: axes_objects_2d_list: 2-D list, where axes_objects_2d_list[i][j] is the handle (instance of `matplotlib.axes._subplots.AxesSubplot`) for the [i]th row and [j]th column. """ figure_object, axes_objects_2d_list = pyplot.subplots( num_rows, num_columns, sharex=False, sharey=False, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES) ) if num_rows == num_columns == 1: axes_objects_2d_list = [[axes_objects_2d_list]] elif num_columns == 1: axes_objects_2d_list = [[a] for a in axes_objects_2d_list] elif num_rows == 1: axes_objects_2d_list = [axes_objects_2d_list] pyplot.subplots_adjust( left=0.02, bottom=0.02, right=0.98, top=0.95, hspace=vertical_space_fraction, wspace=horizontal_space_fraction) return figure_object, axes_objects_2d_list def _add_colour_bar( axes_object, colour_map_object, values_to_colour, min_colour_value, max_colour_value, colour_norm_object=None, orientation_string='vertical', extend_min=True, extend_max=True): """Adds colour bar to existing axes. :param axes_object: Existing axes (instance of `matplotlib.axes._subplots.AxesSubplot`). :param colour_map_object: Colour scheme (instance of `matplotlib.pyplot.cm`). :param values_to_colour: numpy array of values to colour. :param min_colour_value: Minimum value in colour map. :param max_colour_value: Max value in colour map. :param colour_norm_object: Instance of `matplotlib.colors.BoundaryNorm`, defining the scale of the colour map. If `colour_norm_object is None`, will assume that scale is linear. :param orientation_string: Orientation of colour bar ("vertical" or "horizontal"). :param extend_min: Boolean flag. If True, the bottom of the colour bar will have an arrow. If False, it will be a flat line, suggesting that lower values are not possible. :param extend_max: Same but for top of colour bar. :return: colour_bar_object: Colour bar (instance of `matplotlib.pyplot.colorbar`) created by this method. """ if colour_norm_object is None: colour_norm_object = matplotlib.colors.Normalize( vmin=min_colour_value, vmax=max_colour_value, clip=False) scalar_mappable_object = pyplot.cm.ScalarMappable( cmap=colour_map_object, norm=colour_norm_object) scalar_mappable_object.set_array(values_to_colour) if extend_min and extend_max: extend_string = 'both' elif extend_min: extend_string = 'min' elif extend_max: extend_string = 'max' else: extend_string = 'neither' if orientation_string == 'horizontal': padding = 0.075 else: padding = 0.05 colour_bar_object = pyplot.colorbar( ax=axes_object, mappable=scalar_mappable_object, orientation=orientation_string, pad=padding, extend=extend_string) colour_bar_object.ax.tick_params(labelsize=FONT_SIZE) return colour_bar_object def plot_predictor_2d( predictor_matrix, colour_map_object, colour_norm_object=None, min_colour_value=None, max_colour_value=None, axes_object=None): """Plots predictor variable on 2-D grid. If `colour_norm_object is None`, both `min_colour_value` and `max_colour_value` must be specified. M = number of rows in grid N = number of columns in grid :param predictor_matrix: M-by-N numpy array of predictor values. :param colour_map_object: Instance of `matplotlib.pyplot.cm`. :param min_colour_value: Minimum value in colour scheme. :param max_colour_value: Max value in colour scheme. :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`. Will plot on these axes. :return: colour_bar_object: Colour bar (instance of `matplotlib.pyplot.colorbar`) created by this method. """ if axes_object is None: _, axes_object = pyplot.subplots( 1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES) ) if colour_norm_object is not None: min_colour_value = colour_norm_object.boundaries[0] max_colour_value = colour_norm_object.boundaries[-1] axes_object.pcolormesh( predictor_matrix, cmap=colour_map_object, norm=colour_norm_object, vmin=min_colour_value, vmax=max_colour_value, shading='flat', edgecolors='None') axes_object.set_xticks([]) axes_object.set_yticks([]) return _add_colour_bar( axes_object=axes_object, colour_map_object=colour_map_object, values_to_colour=predictor_matrix, min_colour_value=min_colour_value, max_colour_value=max_colour_value) def plot_wind_2d(u_wind_matrix_m_s01, v_wind_matrix_m_s01, axes_object=None): """Plots wind velocity on 2-D grid. M = number of rows in grid N = number of columns in grid :param u_wind_matrix_m_s01: M-by-N numpy array of eastward components (metres per second). :param v_wind_matrix_m_s01: M-by-N numpy array of northward components (metres per second). :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`. Will plot on these axes. """ if axes_object is None: _, axes_object = pyplot.subplots( 1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES) ) num_grid_rows = u_wind_matrix_m_s01.shape[0] num_grid_columns = u_wind_matrix_m_s01.shape[1] x_coords_unique = numpy.linspace( 0, num_grid_columns, num=num_grid_columns + 1, dtype=float) x_coords_unique = x_coords_unique[:-1] x_coords_unique = x_coords_unique + numpy.diff(x_coords_unique[:2]) / 2 y_coords_unique = numpy.linspace( 0, num_grid_rows, num=num_grid_rows + 1, dtype=float) y_coords_unique = y_coords_unique[:-1] y_coords_unique = y_coords_unique + numpy.diff(y_coords_unique[:2]) / 2 x_coord_matrix, y_coord_matrix = numpy.meshgrid(x_coords_unique, y_coords_unique) speed_matrix_m_s01 = numpy.sqrt(u_wind_matrix_m_s01 ** 2 + v_wind_matrix_m_s01 ** 2) axes_object.barbs( x_coord_matrix, y_coord_matrix, u_wind_matrix_m_s01 * METRES_PER_SECOND_TO_KT, v_wind_matrix_m_s01 * METRES_PER_SECOND_TO_KT, speed_matrix_m_s01 * METRES_PER_SECOND_TO_KT, color='k', length=6, sizes={'emptybarb': 0.1}, fill_empty=True, rounding=False) axes_object.set_xlim(0, num_grid_columns) axes_object.set_ylim(0, num_grid_rows) def plot_many_predictors_with_barbs( predictor_matrix, predictor_names, min_colour_temp_kelvins, max_colour_temp_kelvins): """Plots many predictor variables on 2-D grid with wind barbs overlain. M = number of rows in grid N = number of columns in grid C = number of predictors :param predictor_matrix: M-by-N-by-C numpy array of predictor values. :param predictor_names: length-C list of predictor names. :param min_colour_temp_kelvins: Minimum value in temperature colour scheme. :param max_colour_temp_kelvins: Max value in temperature colour scheme. :return: figure_object: See doc for `_init_figure_panels`. :return: axes_objects_2d_list: Same. """ u_wind_matrix_m_s01 = predictor_matrix[ ..., predictor_names.index(U_WIND_NAME)] v_wind_matrix_m_s01 = predictor_matrix[ ..., predictor_names.index(V_WIND_NAME)] non_wind_predictor_names = [ p for p in predictor_names if p not in [U_WIND_NAME, V_WIND_NAME] ] figure_object, axes_objects_2d_list = _init_figure_panels( num_rows=len(non_wind_predictor_names), num_columns=1) for m in range(len(non_wind_predictor_names)): this_predictor_index = predictor_names.index( non_wind_predictor_names[m]) if non_wind_predictor_names[m] == REFLECTIVITY_NAME: this_colour_norm_object = REFL_COLOUR_NORM_OBJECT this_min_colour_value = None this_max_colour_value = None else: this_colour_norm_object = None this_min_colour_value = min_colour_temp_kelvins + 0. this_max_colour_value = max_colour_temp_kelvins + 0. this_colour_bar_object = plot_predictor_2d( predictor_matrix=predictor_matrix[..., this_predictor_index], colour_map_object=PREDICTOR_TO_COLOUR_MAP_DICT[ non_wind_predictor_names[m]], colour_norm_object=this_colour_norm_object, min_colour_value=this_min_colour_value, max_colour_value=this_max_colour_value, axes_object=axes_objects_2d_list[m][0]) plot_wind_2d(u_wind_matrix_m_s01=u_wind_matrix_m_s01, v_wind_matrix_m_s01=v_wind_matrix_m_s01, axes_object=axes_objects_2d_list[m][0]) this_colour_bar_object.set_label(non_wind_predictor_names[m]) return figure_object, axes_objects_2d_list def plot_many_predictors_sans_barbs( predictor_matrix, predictor_names, min_colour_temp_kelvins, max_colour_temp_kelvins, max_colour_wind_speed_m_s01): """Plots many predictor variables on 2-D grid; no wind barbs overlain. In this case, both u-wind and v-wind are plotted as separate maps. M = number of rows in grid N = number of columns in grid C = number of predictors :param predictor_matrix: M-by-N-by-C numpy array of predictor values. :param predictor_names: length-C list of predictor names. :param min_colour_temp_kelvins: Minimum value in temperature colour scheme. :param max_colour_temp_kelvins: Max value in temperature colour scheme. :param max_colour_wind_speed_m_s01: Max wind speed (metres per second) in colour maps for both u- and v-components. The minimum wind speed be `-1 * max_colour_wind_speed_m_s01`, so the diverging colour scheme will be zero-centered. :return: figure_object: See doc for `_init_figure_panels`. :return: axes_objects_2d_list: Same. """ num_predictors = len(predictor_names) num_panel_rows = int(numpy.floor(numpy.sqrt(num_predictors))) num_panel_columns = int(numpy.ceil(float(num_predictors) / num_panel_rows)) figure_object, axes_objects_2d_list = _init_figure_panels( num_rows=num_panel_rows, num_columns=num_panel_columns) for i in range(num_panel_rows): for j in range(num_panel_columns): this_linear_index = i * num_panel_columns + j if this_linear_index >= num_predictors: break this_colour_map_object = PREDICTOR_TO_COLOUR_MAP_DICT[ predictor_names[this_linear_index]] if predictor_names[this_linear_index] == REFLECTIVITY_NAME: this_colour_norm_object = REFL_COLOUR_NORM_OBJECT this_min_colour_value = None this_max_colour_value = None elif predictor_names[this_linear_index] == TEMPERATURE_NAME: this_colour_norm_object = None this_min_colour_value = min_colour_temp_kelvins + 0. this_max_colour_value = max_colour_temp_kelvins + 0. else: this_colour_norm_object = None this_min_colour_value = -1 * max_colour_wind_speed_m_s01 this_max_colour_value = max_colour_wind_speed_m_s01 + 0. this_colour_bar_object = plot_predictor_2d( predictor_matrix=predictor_matrix[..., this_linear_index], colour_map_object=this_colour_map_object, colour_norm_object=this_colour_norm_object, min_colour_value=this_min_colour_value, max_colour_value=this_max_colour_value, axes_object=axes_objects_2d_list[i][j]) this_colour_bar_object.set_label(predictor_names[this_linear_index]) return figure_object, axes_objects_2d_list def plot_predictors_example1(validation_file_names): """Plots all predictors for random example (storm object). :param validation_file_names: 1-D list of paths to input files. """ validation_image_dict = read_many_image_files(validation_file_names) print(SEPARATOR_STRING) predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][0, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] temperature_matrix_kelvins = predictor_matrix[ ..., predictor_names.index(TEMPERATURE_NAME)] plot_many_predictors_with_barbs( predictor_matrix=predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=numpy.percentile(temperature_matrix_kelvins, 1), max_colour_temp_kelvins=numpy.percentile(temperature_matrix_kelvins, 99) ) pyplot.show() def plot_predictors_example2(validation_image_dict): """Plots all predictors for example with greatest future vorticity. :param validation_image_dict: Dictionary created by `read_many_image_files`. """ target_matrix_s01 = validation_image_dict[TARGET_MATRIX_KEY] example_index = numpy.unravel_index( numpy.argmax(target_matrix_s01), target_matrix_s01.shape )[0] predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][ example_index, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] temperature_matrix_kelvins = predictor_matrix[ ..., predictor_names.index(TEMPERATURE_NAME)] plot_many_predictors_with_barbs( predictor_matrix=predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=numpy.percentile(temperature_matrix_kelvins, 1), max_colour_temp_kelvins=numpy.percentile(temperature_matrix_kelvins, 99) ) pyplot.show() def _update_normalization_params(intermediate_normalization_dict, new_values): """Updates normalization params for one predictor. :param intermediate_normalization_dict: Dictionary with the following keys. intermediate_normalization_dict['num_values']: Number of values on which current estimates are based. intermediate_normalization_dict['mean_value']: Current estimate for mean. intermediate_normalization_dict['mean_of_squares']: Current mean of squared values. :param new_values: numpy array of new values (will be used to update `intermediate_normalization_dict`). :return: intermediate_normalization_dict: Same as input but with updated values. """ if MEAN_VALUE_KEY not in intermediate_normalization_dict: intermediate_normalization_dict = { NUM_VALUES_KEY: 0, MEAN_VALUE_KEY: 0., MEAN_OF_SQUARES_KEY: 0. } these_means = numpy.array([ intermediate_normalization_dict[MEAN_VALUE_KEY], numpy.mean(new_values) ]) these_weights = numpy.array([ intermediate_normalization_dict[NUM_VALUES_KEY], new_values.size ]) intermediate_normalization_dict[MEAN_VALUE_KEY] = numpy.average( these_means, weights=these_weights) these_means = numpy.array([ intermediate_normalization_dict[MEAN_OF_SQUARES_KEY], numpy.mean(new_values ** 2) ]) intermediate_normalization_dict[MEAN_OF_SQUARES_KEY] = numpy.average( these_means, weights=these_weights) intermediate_normalization_dict[NUM_VALUES_KEY] += new_values.size return intermediate_normalization_dict def _get_standard_deviation(intermediate_normalization_dict): """Computes stdev from intermediate normalization params. :param intermediate_normalization_dict: See doc for `_update_normalization_params`. :return: standard_deviation: Standard deviation. """ num_values = float(intermediate_normalization_dict[NUM_VALUES_KEY]) multiplier = num_values / (num_values - 1) return numpy.sqrt(multiplier * ( intermediate_normalization_dict[MEAN_OF_SQUARES_KEY] - intermediate_normalization_dict[MEAN_VALUE_KEY] ** 2 )) def get_image_normalization_params(netcdf_file_names): """Computes normalization params (mean and stdev) for each predictor. :param netcdf_file_names: 1-D list of paths to input files. :return: normalization_dict: See input doc for `normalize_images`. """ predictor_names = None norm_dict_by_predictor = None for this_file_name in netcdf_file_names: print('Reading data from: "{0:s}"...'.format(this_file_name)) this_image_dict = read_image_file(this_file_name) if predictor_names is None: predictor_names = this_image_dict[PREDICTOR_NAMES_KEY] norm_dict_by_predictor = [{}] * len(predictor_names) for m in range(len(predictor_names)): norm_dict_by_predictor[m] = _update_normalization_params( intermediate_normalization_dict=norm_dict_by_predictor[m], new_values=this_image_dict[PREDICTOR_MATRIX_KEY][..., m]) print('\n') normalization_dict = {} for m in range(len(predictor_names)): this_mean = norm_dict_by_predictor[m][MEAN_VALUE_KEY] this_stdev = _get_standard_deviation(norm_dict_by_predictor[m]) normalization_dict[predictor_names[m]] = numpy.array( [this_mean, this_stdev]) print( ('Mean and standard deviation for "{0:s}" = {1:.4f}, {2:.4f}' ).format(predictor_names[m], this_mean, this_stdev) ) return normalization_dict def get_norm_params_example(training_file_names): """Gets normalization parameters. :param training_file_names: 1-D list of paths to input files. """ normalization_dict = get_image_normalization_params(training_file_names) def normalize_images( predictor_matrix, predictor_names, normalization_dict=None): """Normalizes images to z-scores. E = number of examples (storm objects) in file M = number of rows in each storm-centered grid N = number of columns in each storm-centered grid C = number of channels (predictor variables) :param predictor_matrix: E-by-M-by-N-by-C numpy array of predictor values. :param predictor_names: length-C list of predictor names. :param normalization_dict: Dictionary. Each key is the name of a predictor value, and the corresponding value is a length-2 numpy array with [mean, standard deviation]. If `normalization_dict is None`, mean and standard deviation will be computed for each predictor. :return: predictor_matrix: Normalized version of input. :return: normalization_dict: See doc for input variable. If input was None, this will be a newly created dictionary. Otherwise, this will be the same dictionary passed as input. """ num_predictors = len(predictor_names) if normalization_dict is None: normalization_dict = {} for m in range(num_predictors): this_mean = numpy.mean(predictor_matrix[..., m]) this_stdev = numpy.std(predictor_matrix[..., m], ddof=1) normalization_dict[predictor_names[m]] = numpy.array( [this_mean, this_stdev]) for m in range(num_predictors): this_mean = normalization_dict[predictor_names[m]][0] this_stdev = normalization_dict[predictor_names[m]][1] predictor_matrix[..., m] = ( (predictor_matrix[..., m] - this_mean) / float(this_stdev) ) return predictor_matrix, normalization_dict def denormalize_images(predictor_matrix, predictor_names, normalization_dict): """Denormalizes images from z-scores back to original scales. :param predictor_matrix: See doc for `normalize_images`. :param predictor_names: Same. :param normalization_dict: Same. :return: predictor_matrix: Denormalized version of input. """ num_predictors = len(predictor_names) for m in range(num_predictors): this_mean = normalization_dict[predictor_names[m]][0] this_stdev = normalization_dict[predictor_names[m]][1] predictor_matrix[..., m] = ( this_mean + this_stdev * predictor_matrix[..., m] ) return predictor_matrix def norm_denorm_example(training_file_names, normalization_dict): """Normalizes and denormalizes images. :param training_file_names: 1-D list of paths to input files. :param normalization_dict: Dictionary created by `get_image_normalization_params`. """ image_dict = read_image_file(training_file_names[0]) predictor_names = image_dict[PREDICTOR_NAMES_KEY] these_predictor_values = image_dict[PREDICTOR_MATRIX_KEY][0, :5, :5, 0] print('\nOriginal values of "{0:s}" for first storm object:\n{1:s}'.format( predictor_names[0], str(these_predictor_values) )) image_dict[PREDICTOR_MATRIX_KEY], _ = normalize_images( predictor_matrix=image_dict[PREDICTOR_MATRIX_KEY], predictor_names=predictor_names, normalization_dict=normalization_dict) these_predictor_values = image_dict[PREDICTOR_MATRIX_KEY][0, :5, :5, 0] print( '\nNormalized values of "{0:s}" for first storm object:\n{1:s}'.format( predictor_names[0], str(these_predictor_values)) ) image_dict[PREDICTOR_MATRIX_KEY] = denormalize_images( predictor_matrix=image_dict[PREDICTOR_MATRIX_KEY], predictor_names=predictor_names, normalization_dict=normalization_dict) these_predictor_values = image_dict[PREDICTOR_MATRIX_KEY][0, :5, :5, 0] print( ('\nDenormalized values of "{0:s}" for first storm object:\n{1:s}' ).format(predictor_names[0], str(these_predictor_values)) ) def get_binarization_threshold(netcdf_file_names, percentile_level): """Computes binarization threshold for target variable. Binarization threshold will be [q]th percentile of all image maxima, where q = `percentile_level`. :param netcdf_file_names: 1-D list of paths to input files. :param percentile_level: q in the above discussion. :return: binarization_threshold: Binarization threshold (used to turn each target image into a yes-or-no label). """ max_target_values = numpy.array([]) for this_file_name in netcdf_file_names: print('Reading data from: "{0:s}"...'.format(this_file_name)) this_image_dict = read_image_file(this_file_name) this_target_matrix = this_image_dict[TARGET_MATRIX_KEY] this_num_examples = this_target_matrix.shape[0] these_max_target_values = numpy.full(this_num_examples, numpy.nan) for i in range(this_num_examples): these_max_target_values[i] = numpy.max(this_target_matrix[i, ...]) max_target_values = numpy.concatenate(( max_target_values, these_max_target_values)) binarization_threshold = numpy.percentile( max_target_values, percentile_level) print('\nBinarization threshold for "{0:s}" = {1:.4e}'.format( TARGET_NAME, binarization_threshold)) return binarization_threshold def find_binarization_threshold_example(training_file_names): """Finds binarization threshold for target variable. :param training_file_names: 1-D list of paths to input files. """ binarization_threshold = get_binarization_threshold( netcdf_file_names=training_file_names, percentile_level=90.) def binarize_target_images(target_matrix, binarization_threshold): """Binarizes target images. Specifically, this method turns each target image into a binary label, depending on whether or not (max value in image) >= binarization_threshold. E = number of examples (storm objects) in file M = number of rows in each storm-centered grid N = number of columns in each storm-centered grid :param target_matrix: E-by-M-by-N numpy array of floats. :param binarization_threshold: Binarization threshold. :return: target_values: length-E numpy array of target values (integers in 0...1). """ num_examples = target_matrix.shape[0] target_values = numpy.full(num_examples, -1, dtype=int) for i in range(num_examples): target_values[i] = ( numpy.max(target_matrix[i, ...]) >= binarization_threshold ) return target_values def binarization_example(training_file_names, binarization_threshold): """Binarizes target images. :param training_file_names: 1-D list of paths to input files. :param binarization_threshold: Binarization threshold. """ image_dict = read_image_file(training_file_names[0]) these_max_target_values = numpy.array( [numpy.max(image_dict[TARGET_MATRIX_KEY][i, ...]) for i in range(10)] ) print( ('\nSpatial maxima of "{0:s}" for the first few storm objects:\n{1:s}' ).format(image_dict[TARGET_NAME_KEY], str(these_max_target_values)) ) target_values = binarize_target_images( target_matrix=image_dict[TARGET_MATRIX_KEY], binarization_threshold=binarization_threshold) print( ('\nBinarized target values for the first few storm objects:\n{0:s}' ).format(str(target_values[:10])) ) def _get_dense_layer_dimensions(num_input_units, num_classes, num_dense_layers): """Returns dimensions (number of input and output units) for each dense lyr. D = number of dense layers :param num_input_units: Number of input units (features created by flattening layer). :param num_classes: Number of output classes (possible values of target variable). :param num_dense_layers: Number of dense layers. :return: num_inputs_by_layer: length-D numpy array with number of input units by dense layer. :return: num_outputs_by_layer: length-D numpy array with number of output units by dense layer. """ if num_classes == 2: num_output_units = 1 else: num_output_units = num_classes + 0 e_folding_param = ( float(-1 * num_dense_layers) / numpy.log(float(num_output_units) / num_input_units) ) dense_layer_indices = numpy.linspace( 0, num_dense_layers - 1, num=num_dense_layers, dtype=float) num_inputs_by_layer = num_input_units * numpy.exp( -1 * dense_layer_indices / e_folding_param) num_inputs_by_layer = numpy.round(num_inputs_by_layer).astype(int) num_outputs_by_layer = numpy.concatenate(( num_inputs_by_layer[1:], numpy.array([num_output_units], dtype=int) )) return num_inputs_by_layer, num_outputs_by_layer def setup_cnn(num_grid_rows, num_grid_columns): """Sets up (but does not train) CNN (convolutional neural net). :param num_grid_rows: Number of rows in each predictor image. :param num_grid_columns: Number of columns in each predictor image. :return: cnn_model_object: Untrained instance of `keras.models.Model`. """ regularizer_object = keras.regularizers.l1_l2(l1=L1_WEIGHT, l2=L2_WEIGHT) num_predictors = len(NETCDF_PREDICTOR_NAMES) input_layer_object = keras.layers.Input( shape=(num_grid_rows, num_grid_columns, num_predictors) ) current_num_filters = None current_layer_object = None # Add convolutional layers. for _ in range(NUM_CONV_LAYER_SETS): for _ in range(NUM_CONV_LAYERS_PER_SET): if current_num_filters is None: current_num_filters = ( num_predictors * NUM_PREDICTORS_TO_FIRST_NUM_FILTERS) this_input_layer_object = input_layer_object else: current_num_filters *= 2 this_input_layer_object = current_layer_object current_layer_object = keras.layers.Conv2D( filters=current_num_filters, kernel_size=(NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS), strides=(1, 1), padding='valid', data_format='channels_last', dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=regularizer_object )(this_input_layer_object) current_layer_object = keras.layers.LeakyReLU( alpha=SLOPE_FOR_RELU )(current_layer_object) if CONV_LAYER_DROPOUT_FRACTION is not None: current_layer_object = keras.layers.Dropout( rate=CONV_LAYER_DROPOUT_FRACTION )(current_layer_object) if USE_BATCH_NORMALIZATION: current_layer_object = keras.layers.BatchNormalization( axis=-1, center=True, scale=True )(current_layer_object) current_layer_object = keras.layers.MaxPooling2D( pool_size=(NUM_POOLING_ROWS, NUM_POOLING_COLUMNS), strides=(NUM_POOLING_ROWS, NUM_POOLING_COLUMNS), padding='valid', data_format='channels_last' )(current_layer_object) these_dimensions = numpy.array( current_layer_object.get_shape().as_list()[1:], dtype=int) num_features = numpy.prod(these_dimensions) current_layer_object = keras.layers.Flatten()(current_layer_object) # Add intermediate dense layers. _, num_outputs_by_dense_layer = _get_dense_layer_dimensions( num_input_units=num_features, num_classes=2, num_dense_layers=NUM_DENSE_LAYERS) for k in range(NUM_DENSE_LAYERS - 1): current_layer_object = keras.layers.Dense( num_outputs_by_dense_layer[k], activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=regularizer_object )(current_layer_object) current_layer_object = keras.layers.LeakyReLU( alpha=SLOPE_FOR_RELU )(current_layer_object) if DENSE_LAYER_DROPOUT_FRACTION is not None: current_layer_object = keras.layers.Dropout( rate=DENSE_LAYER_DROPOUT_FRACTION )(current_layer_object) if USE_BATCH_NORMALIZATION: current_layer_object = keras.layers.BatchNormalization( axis=-1, center=True, scale=True )(current_layer_object) # Add output layer (also dense). current_layer_object = keras.layers.Dense( 1, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=regularizer_object )(current_layer_object) current_layer_object = keras.layers.Activation( 'sigmoid' )(current_layer_object) if DENSE_LAYER_DROPOUT_FRACTION is not None and NUM_DENSE_LAYERS == 1: current_layer_object = keras.layers.Dropout( rate=DENSE_LAYER_DROPOUT_FRACTION )(current_layer_object) # Put the whole thing together and compile. cnn_model_object = keras.models.Model( inputs=input_layer_object, outputs=current_layer_object) cnn_model_object.compile( loss=keras.losses.binary_crossentropy, optimizer=keras.optimizers.Adam(), metrics=LIST_OF_METRIC_FUNCTIONS) cnn_model_object.summary() return cnn_model_object def setup_cnn_example(training_file_names): """Sets up CNN. :param training_file_names: 1-D list of paths to input files. """ this_image_dict = read_image_file(training_file_names[0]) cnn_model_object = setup_cnn( num_grid_rows=this_image_dict[PREDICTOR_MATRIX_KEY].shape[1], num_grid_columns=this_image_dict[PREDICTOR_MATRIX_KEY].shape[2]) def deep_learning_generator(netcdf_file_names, num_examples_per_batch, normalization_dict, binarization_threshold): """Generates training examples for deep-learning model on the fly. E = number of examples (storm objects) M = number of rows in each storm-centered grid N = number of columns in each storm-centered grid C = number of channels (predictor variables) :param netcdf_file_names: 1-D list of paths to input (NetCDF) files. :param num_examples_per_batch: Number of examples per training batch. :param normalization_dict: See doc for `normalize_images`. You cannot leave this as None. :param binarization_threshold: Binarization threshold for target variable. See `binarize_target_images` for details on what this does. :return: predictor_matrix: E-by-M-by-N-by-C numpy array of predictor values. :return: target_values: length-E numpy array of target values (integers in 0...1). :raises: TypeError: if `normalization_dict is None`. """ # TODO(thunderhoser): Maybe add upsampling or downsampling. if normalization_dict is None: error_string = 'normalization_dict cannot be None. Must be specified.' raise TypeError(error_string) random.shuffle(netcdf_file_names) num_files = len(netcdf_file_names) file_index = 0 num_examples_in_memory = 0 full_predictor_matrix = None full_target_matrix = None predictor_names = None while True: while num_examples_in_memory < num_examples_per_batch: print('Reading data from: "{0:s}"...'.format( netcdf_file_names[file_index])) this_image_dict = read_image_file(netcdf_file_names[file_index]) predictor_names = this_image_dict[PREDICTOR_NAMES_KEY] file_index += 1 if file_index >= num_files: file_index = 0 if full_target_matrix is None or full_target_matrix.size == 0: full_predictor_matrix = ( this_image_dict[PREDICTOR_MATRIX_KEY] + 0. ) full_target_matrix = this_image_dict[TARGET_MATRIX_KEY] + 0. else: full_predictor_matrix = numpy.concatenate( (full_predictor_matrix, this_image_dict[PREDICTOR_MATRIX_KEY]), axis=0) full_target_matrix = numpy.concatenate( (full_target_matrix, this_image_dict[TARGET_MATRIX_KEY]), axis=0) num_examples_in_memory = full_target_matrix.shape[0] batch_indices = numpy.linspace( 0, num_examples_in_memory - 1, num=num_examples_in_memory, dtype=int) batch_indices = numpy.random.choice( batch_indices, size=num_examples_per_batch, replace=False) predictor_matrix, _ = normalize_images( predictor_matrix=full_predictor_matrix[batch_indices, ...], predictor_names=predictor_names, normalization_dict=normalization_dict) predictor_matrix = predictor_matrix.astype('float32') target_values = binarize_target_images( target_matrix=full_target_matrix[batch_indices, ...], binarization_threshold=binarization_threshold) print('Fraction of examples in positive class: {0:.4f}'.format( numpy.mean(target_values))) num_examples_in_memory = 0 full_predictor_matrix = None full_target_matrix = None yield (predictor_matrix, target_values) def train_cnn( cnn_model_object, training_file_names, normalization_dict, binarization_threshold, num_examples_per_batch, num_epochs, num_training_batches_per_epoch, output_model_file_name, validation_file_names=None, num_validation_batches_per_epoch=None): """Trains CNN (convolutional neural net). :param cnn_model_object: Untrained instance of `keras.models.Model` (may be created by `setup_cnn`). :param training_file_names: 1-D list of paths to training files (must be readable by `read_image_file`). :param normalization_dict: See doc for `deep_learning_generator`. :param binarization_threshold: Same. :param num_examples_per_batch: Same. :param num_epochs: Number of epochs. :param num_training_batches_per_epoch: Number of training batches furnished to model in each epoch. :param output_model_file_name: Path to output file. The model will be saved as an HDF5 file (extension should be ".h5", but this is not enforced). :param validation_file_names: 1-D list of paths to training files (must be readable by `read_image_file`). If `validation_file_names is None`, will omit on-the-fly validation. :param num_validation_batches_per_epoch: [used only if `validation_file_names is not None`] Number of validation batches furnished to model in each epoch. :return: cnn_metadata_dict: Dictionary with the following keys. cnn_metadata_dict['training_file_names']: See input doc. cnn_metadata_dict['normalization_dict']: Same. cnn_metadata_dict['binarization_threshold']: Same. cnn_metadata_dict['num_examples_per_batch']: Same. cnn_metadata_dict['num_training_batches_per_epoch']: Same. cnn_metadata_dict['validation_file_names']: Same. cnn_metadata_dict['num_validation_batches_per_epoch']: Same. """ _create_directory(file_name=output_model_file_name) if validation_file_names is None: checkpoint_object = keras.callbacks.ModelCheckpoint( filepath=output_model_file_name, monitor='loss', verbose=1, save_best_only=False, save_weights_only=False, mode='min', period=1) else: checkpoint_object = keras.callbacks.ModelCheckpoint( filepath=output_model_file_name, monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='min', period=1) list_of_callback_objects = [checkpoint_object] cnn_metadata_dict = { TRAINING_FILES_KEY: training_file_names, NORMALIZATION_DICT_KEY: normalization_dict, BINARIZATION_THRESHOLD_KEY: binarization_threshold, NUM_EXAMPLES_PER_BATCH_KEY: num_examples_per_batch, NUM_TRAINING_BATCHES_KEY: num_training_batches_per_epoch, VALIDATION_FILES_KEY: validation_file_names, NUM_VALIDATION_BATCHES_KEY: num_validation_batches_per_epoch } training_generator = deep_learning_generator( netcdf_file_names=training_file_names, num_examples_per_batch=num_examples_per_batch, normalization_dict=normalization_dict, binarization_threshold=binarization_threshold) if validation_file_names is None: cnn_model_object.fit_generator( generator=training_generator, steps_per_epoch=num_training_batches_per_epoch, epochs=num_epochs, verbose=1, callbacks=list_of_callback_objects, workers=0) return cnn_metadata_dict early_stopping_object = keras.callbacks.EarlyStopping( monitor='val_loss', min_delta=MIN_XENTROPY_DECREASE_FOR_EARLY_STOP, patience=NUM_EPOCHS_FOR_EARLY_STOPPING, verbose=1, mode='min') list_of_callback_objects.append(early_stopping_object) validation_generator = deep_learning_generator( netcdf_file_names=validation_file_names, num_examples_per_batch=num_examples_per_batch, normalization_dict=normalization_dict, binarization_threshold=binarization_threshold) cnn_model_object.fit_generator( generator=training_generator, steps_per_epoch=num_training_batches_per_epoch, epochs=num_epochs, verbose=1, callbacks=list_of_callback_objects, workers=0, validation_data=validation_generator, validation_steps=num_validation_batches_per_epoch) return cnn_metadata_dict def _create_directory(directory_name=None, file_name=None): """Creates directory (along with parents if necessary). This method creates directories only when necessary, so you don't have to worry about it overwriting anything. :param directory_name: Name of desired directory. :param file_name: [used only if `directory_name is None`] Path to desired file. All directories in path will be created. """ if directory_name is None: directory_name = os.path.split(file_name)[0] try: os.makedirs(directory_name) except OSError as this_error: if this_error.errno == errno.EEXIST and os.path.isdir(directory_name): pass else: raise def read_keras_model(hdf5_file_name): """Reads Keras model from HDF5 file. :param hdf5_file_name: Path to input file. :return: model_object: Instance of `keras.models.Model`. """ return keras.models.load_model( hdf5_file_name, custom_objects=METRIC_FUNCTION_DICT) def find_model_metafile(model_file_name, raise_error_if_missing=False): """Finds metafile for machine-learning model. :param model_file_name: Path to file with trained model. :param raise_error_if_missing: Boolean flag. If True and metafile is not found, this method will error out. :return: model_metafile_name: Path to file with metadata. If file is not found and `raise_error_if_missing = False`, this will be the expected path. :raises: ValueError: if metafile is not found and `raise_error_if_missing = True`. """ model_directory_name, pathless_model_file_name = os.path.split( model_file_name) model_metafile_name = '{0:s}/{1:s}_metadata.json'.format( model_directory_name, os.path.splitext(pathless_model_file_name)[0] ) if not os.path.isfile(model_metafile_name) and raise_error_if_missing: error_string = 'Cannot find file. Expected at: "{0:s}"'.format( model_metafile_name) raise ValueError(error_string) return model_metafile_name def _metadata_numpy_to_list(model_metadata_dict): """Converts numpy arrays in model metadata to lists. This is needed so that the metadata can be written to a JSON file (JSON does not handle numpy arrays). This method does not overwrite the original dictionary. :param model_metadata_dict: Dictionary created by `train_cnn` or `train_ucn`. :return: new_metadata_dict: Same but with lists instead of numpy arrays. """ new_metadata_dict = copy.deepcopy(model_metadata_dict) if NORMALIZATION_DICT_KEY in new_metadata_dict.keys(): this_norm_dict = new_metadata_dict[NORMALIZATION_DICT_KEY] for this_key in this_norm_dict.keys(): if isinstance(this_norm_dict[this_key], numpy.ndarray): this_norm_dict[this_key] = this_norm_dict[this_key].tolist() return new_metadata_dict def _metadata_list_to_numpy(model_metadata_dict): """Converts lists in model metadata to numpy arrays. This method is the inverse of `_metadata_numpy_to_list`. This method overwrites the original dictionary. :param model_metadata_dict: Dictionary created by `train_cnn` or `train_ucn`. :return: model_metadata_dict: Same but numpy arrays instead of lists. """ if NORMALIZATION_DICT_KEY in model_metadata_dict.keys(): this_norm_dict = model_metadata_dict[NORMALIZATION_DICT_KEY] for this_key in this_norm_dict.keys(): this_norm_dict[this_key] = numpy.array(this_norm_dict[this_key]) return model_metadata_dict def write_model_metadata(model_metadata_dict, json_file_name): """Writes metadata for machine-learning model to JSON file. :param model_metadata_dict: Dictionary created by `train_cnn` or `train_ucn`. :param json_file_name: Path to output file. """ _create_directory(file_name=json_file_name) new_metadata_dict = _metadata_numpy_to_list(model_metadata_dict) with open(json_file_name, 'w') as this_file: json.dump(new_metadata_dict, this_file) def read_model_metadata(json_file_name): """Reads metadata for machine-learning model from JSON file. :param json_file_name: Path to output file. :return: model_metadata_dict: Dictionary with keys listed in doc for `train_cnn` or `train_ucn`. """ with open(json_file_name) as this_file: model_metadata_dict = json.load(this_file) return _metadata_list_to_numpy(model_metadata_dict) def train_cnn_example( cnn_model_object, training_file_names, validation_file_names, normalization_dict, binarization_threshold): """Actually trains the CNN. :param cnn_model_object: See doc for `train_cnn`. :param training_file_names: Same. :param validation_file_names: Same. :param normalization_dict: Same. :param binarization_threshold: Same. """ cnn_file_name = '{0:s}/cnn_model.h5'.format(MODULE4_DIR_NAME) cnn_metadata_dict = train_cnn( cnn_model_object=cnn_model_object, training_file_names=training_file_names, normalization_dict=normalization_dict, binarization_threshold=binarization_threshold, num_examples_per_batch=256, num_epochs=10, num_training_batches_per_epoch=10, validation_file_names=validation_file_names, num_validation_batches_per_epoch=10, output_model_file_name=cnn_file_name) def _apply_cnn(cnn_model_object, predictor_matrix, verbose=True, output_layer_name=None): """Applies trained CNN (convolutional neural net) to new data. E = number of examples (storm objects) in file M = number of rows in each storm-centered grid N = number of columns in each storm-centered grid C = number of channels (predictor variables) :param cnn_model_object: Trained instance of `keras.models.Model`. :param predictor_matrix: E-by-M-by-N-by-C numpy array of predictor values. :param verbose: Boolean flag. If True, progress messages will be printed. :param output_layer_name: Name of output layer. If `output_layer_name is None`, this method will use the actual output layer, so will return predictions. If `output_layer_name is not None`, will return "features" (outputs from the given layer). If `output_layer_name is None`... :return: forecast_probabilities: length-E numpy array with forecast probabilities of positive class (label = 1). If `output_layer_name is not None`... :return: feature_matrix: numpy array of features (outputs from the given layer). There is no guarantee on the shape of this array, except that the first axis has length E. """ num_examples = predictor_matrix.shape[0] num_examples_per_batch = 1000 if output_layer_name is None: model_object_to_use = cnn_model_object else: model_object_to_use = keras.models.Model( inputs=cnn_model_object.input, outputs=cnn_model_object.get_layer(name=output_layer_name).output) output_array = None for i in range(0, num_examples, num_examples_per_batch): this_first_index = i this_last_index = min( [i + num_examples_per_batch - 1, num_examples - 1] ) if verbose: print('Applying model to examples {0:d}-{1:d} of {2:d}...'.format( this_first_index, this_last_index, num_examples)) these_indices = numpy.linspace( this_first_index, this_last_index, num=this_last_index - this_first_index + 1, dtype=int) this_output_array = model_object_to_use.predict( predictor_matrix[these_indices, ...], batch_size=num_examples_per_batch) if output_layer_name is None: this_output_array = this_output_array[:, -1] if output_array is None: output_array = this_output_array + 0. else: output_array = numpy.concatenate( (output_array, this_output_array), axis=0) return output_array def evaluate_cnn( cnn_model_object, image_dict, cnn_metadata_dict, output_dir_name): """Evaluates trained CNN (convolutional neural net). :param cnn_model_object: Trained instance of `keras.models.Model`. :param image_dict: Dictionary created by `read_image_file` or `read_many_image_files`. Should contain validation or testing data (not training data), but this is not enforced. :param cnn_metadata_dict: Dictionary created by `train_cnn`. This will ensure that data in `image_dict` are processed the exact same way as the training data for `cnn_model_object`. :param output_dir_name: Path to output directory. Figures will be saved here. """ predictor_matrix, _ = normalize_images( predictor_matrix=image_dict[PREDICTOR_MATRIX_KEY] + 0., predictor_names=image_dict[PREDICTOR_NAMES_KEY], normalization_dict=cnn_metadata_dict[NORMALIZATION_DICT_KEY]) predictor_matrix = predictor_matrix.astype('float32') target_values = binarize_target_images( target_matrix=image_dict[TARGET_MATRIX_KEY], binarization_threshold=cnn_metadata_dict[BINARIZATION_THRESHOLD_KEY]) forecast_probabilities = _apply_cnn(cnn_model_object=cnn_model_object, predictor_matrix=predictor_matrix) print(MINOR_SEPARATOR_STRING) pofd_by_threshold, pod_by_threshold = roc_curves.plot_roc_curve( observed_labels=target_values, forecast_probabilities=forecast_probabilities) area_under_roc_curve = scikit_learn_auc(pofd_by_threshold, pod_by_threshold) title_string = 'Area under ROC curve: {0:.4f}'.format(area_under_roc_curve) pyplot.title(title_string) pyplot.show() _create_directory(directory_name=output_dir_name) roc_curve_file_name = '{0:s}/roc_curve.jpg'.format(output_dir_name) print('Saving figure to: "{0:s}"...'.format(roc_curve_file_name)) pyplot.savefig(roc_curve_file_name, dpi=FIGURE_RESOLUTION_DPI) pyplot.close() performance_diagrams.plot_performance_diagram( observed_labels=target_values, forecast_probabilities=forecast_probabilities) pyplot.show() perf_diagram_file_name = '{0:s}/performance_diagram.jpg'.format( output_dir_name) print('Saving figure to: "{0:s}"...'.format(perf_diagram_file_name)) pyplot.savefig(perf_diagram_file_name, dpi=FIGURE_RESOLUTION_DPI) pyplot.close() attributes_diagrams.plot_attributes_diagram( observed_labels=target_values, forecast_probabilities=forecast_probabilities, num_bins=20) pyplot.show() attr_diagram_file_name = '{0:s}/attributes_diagram.jpg'.format( output_dir_name) print('Saving figure to: "{0:s}"...'.format(attr_diagram_file_name)) pyplot.savefig(attr_diagram_file_name, dpi=FIGURE_RESOLUTION_DPI) pyplot.close() def evaluate_cnn_example(validation_image_dict): """Evaluates CNN on validation data. :param validation_image_dict: Dictionary created by `read_many_image_files`. """ cnn_file_name = '{0:s}/pretrained_cnn/pretrained_cnn.h5'.format( MODULE4_DIR_NAME) cnn_metafile_name = find_model_metafile(model_file_name=cnn_file_name) cnn_model_object = read_keras_model(cnn_file_name) cnn_metadata_dict = read_model_metadata(cnn_metafile_name) validation_dir_name = '{0:s}/validation'.format(MODULE4_DIR_NAME) evaluate_cnn( cnn_model_object=cnn_model_object, image_dict=validation_image_dict, cnn_metadata_dict=cnn_metadata_dict, output_dir_name=validation_dir_name) print(SEPARATOR_STRING) def _get_binary_xentropy(target_values, forecast_probabilities): """Computes binary cross-entropy. This function satisfies the requirements for `cost_function` in the input to `run_permutation_test`. E = number of examples :param: target_values: length-E numpy array of target values (integer class labels). :param: forecast_probabilities: length-E numpy array with predicted probabilities of positive class (target value = 1). :return: cross_entropy: Cross-entropy. """ forecast_probabilities[ forecast_probabilities < MIN_PROBABILITY] = MIN_PROBABILITY forecast_probabilities[ forecast_probabilities > MAX_PROBABILITY] = MAX_PROBABILITY return -1 * numpy.nanmean( target_values * numpy.log2(forecast_probabilities) + (1 - target_values) * numpy.log2(1 - forecast_probabilities) ) def permutation_test_for_cnn( cnn_model_object, image_dict, cnn_metadata_dict, output_pickle_file_name, cost_function=_get_binary_xentropy): """Runs permutation test on CNN (convolutional neural net). E = number of examples (storm objects) C = number of channels (predictor variables) :param cnn_model_object: Trained instance of `keras.models.Model`. :param image_dict: Dictionary created by `read_image_file` or `read_many_image_files`. Should contain validation data (rather than training data), but this is not enforced. :param cnn_metadata_dict: Dictionary created by `train_cnn`. This will ensure that data in `image_dict` are processed the exact same way as the training data for `cnn_model_object`. :param output_pickle_file_name: Path to output file. `result_dict` (the output variable) will be saved here. :param cost_function: Cost function (used to evaluate model predictions). Must be negatively oriented (lower values are better). Must have the following inputs and outputs. Input: target_values: length-E numpy array of target values (integer class labels). Input: forecast_probabilities: length-E numpy array with predicted probabilities of positive class (target value = 1). Output: cost: Scalar value. :return: result_dict: Dictionary with the following keys. result_dict['permuted_predictor_name_by_step']: length-C list with name of predictor permuted at each step. result_dict['highest_cost_by_step']: length-C numpy array with corresponding cost at each step. highest_cost_by_step[m] = cost after permuting permuted_predictor_name_by_step[m]. result_dict['original_cost']: Original cost (before any permutation). result_dict['predictor_names_step1']: length-C list of predictor names. result_dict['costs_step1']: length-C numpy array of corresponding costs. costs_step1[m] = cost after permuting only predictor_names_step1[m]. This key and "predictor_names_step1" correspond to the Breiman version of the permutation test, while "permuted_predictor_name_by_step" and "highest_cost_by_step" correspond to the Lakshmanan version. """ predictor_names = image_dict[PREDICTOR_NAMES_KEY] predictor_matrix, _ = normalize_images( predictor_matrix=image_dict[PREDICTOR_MATRIX_KEY] + 0., predictor_names=image_dict[PREDICTOR_NAMES_KEY], normalization_dict=cnn_metadata_dict[NORMALIZATION_DICT_KEY]) predictor_matrix = predictor_matrix.astype('float32') target_values = binarize_target_images( target_matrix=image_dict[TARGET_MATRIX_KEY], binarization_threshold=cnn_metadata_dict[BINARIZATION_THRESHOLD_KEY]) # Get original cost (before permutation). these_probabilities = _apply_cnn(cnn_model_object=cnn_model_object, predictor_matrix=predictor_matrix) print(MINOR_SEPARATOR_STRING) original_cost = cost_function(target_values, these_probabilities) print('Original cost (no permutation): {0:.4e}\n'.format(original_cost)) num_examples = len(target_values) remaining_predictor_names = predictor_names + [] current_step_num = 0 permuted_predictor_name_by_step = [] highest_cost_by_step = [] predictor_names_step1 = [] costs_step1 = [] while len(remaining_predictor_names) > 0: current_step_num += 1 highest_cost = -numpy.inf best_predictor_name = None best_predictor_permuted_values = None for this_predictor_name in remaining_predictor_names: print( ('Trying predictor "{0:s}" at step {1:d} of permutation test...' ).format(this_predictor_name, current_step_num) ) this_predictor_index = predictor_names.index(this_predictor_name) this_predictor_matrix = predictor_matrix + 0. for i in range(num_examples): this_predictor_matrix[i, ..., this_predictor_index] = ( numpy.random.permutation( this_predictor_matrix[i, ..., this_predictor_index]) ) print(MINOR_SEPARATOR_STRING) these_probabilities = _apply_cnn( cnn_model_object=cnn_model_object, predictor_matrix=this_predictor_matrix) print(MINOR_SEPARATOR_STRING) this_cost = cost_function(target_values, these_probabilities) print('Resulting cost = {0:.4e}'.format(this_cost)) if current_step_num == 1: predictor_names_step1.append(this_predictor_name) costs_step1.append(this_cost) if this_cost < highest_cost: continue highest_cost = this_cost + 0. best_predictor_name = this_predictor_name + '' best_predictor_permuted_values = this_predictor_matrix[ ..., this_predictor_index] permuted_predictor_name_by_step.append(best_predictor_name) highest_cost_by_step.append(highest_cost) # Remove best predictor from list. remaining_predictor_names.remove(best_predictor_name) # Leave values of best predictor permuted. this_predictor_index = predictor_names.index(best_predictor_name) predictor_matrix[ ..., this_predictor_index] = best_predictor_permuted_values print('\nBest predictor = "{0:s}" ... new cost = {1:.4e}\n'.format( best_predictor_name, highest_cost)) result_dict = { PERMUTED_PREDICTORS_KEY: permuted_predictor_name_by_step, HIGHEST_COSTS_KEY: numpy.array(highest_cost_by_step), ORIGINAL_COST_KEY: original_cost, STEP1_PREDICTORS_KEY: predictor_names_step1, STEP1_COSTS_KEY: numpy.array(costs_step1) } _create_directory(file_name=output_pickle_file_name) print('Writing results to: "{0:s}"...'.format(output_pickle_file_name)) file_handle = open(output_pickle_file_name, 'wb') pickle.dump(result_dict, file_handle) file_handle.close() return result_dict def permutation_test_example(cnn_model_object, validation_image_dict, cnn_metadata_dict): """Runs the permutation test on validation data. :param cnn_model_object: See doc for `permutation_test_for_cnn`. :param validation_image_dict: Same. :param cnn_metadata_dict: Same. """ permutation_dir_name = '{0:s}/permutation_test'.format(MODULE4_DIR_NAME) main_permutation_file_name = '{0:s}/permutation_results.p'.format( permutation_dir_name) permutation_dict = permutation_test_for_cnn( cnn_model_object=cnn_model_object, image_dict=validation_image_dict, cnn_metadata_dict=cnn_metadata_dict, output_pickle_file_name=main_permutation_file_name) def _label_bars_in_graph(axes_object, y_coords, y_strings): """Labels bars in graph. J = number of bars :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`. Will plot on these axes. :param y_coords: length-J numpy array with y-coordinates of bars. :param y_strings: length-J list of labels. """ x_min, x_max = pyplot.xlim() x_coord_for_text = x_min + 0.01 * (x_max - x_min) for j in range(len(y_coords)): axes_object.text( x_coord_for_text, y_coords[j], y_strings[j], color='k', horizontalalignment='left', verticalalignment='center') def plot_breiman_results( result_dict, output_file_name, plot_percent_increase=False): """Plots results of Breiman (single-pass) permutation test. :param result_dict: Dictionary created by `permutation_test_for_cnn`. :param output_file_name: Path to output file. Figure will be saved here. :param plot_percent_increase: Boolean flag. If True, x-axis will be percentage of original cost (before permutation). If False, will be actual cost. """ cost_values = result_dict[STEP1_COSTS_KEY] predictor_names = result_dict[STEP1_PREDICTORS_KEY] sort_indices = numpy.argsort(cost_values) cost_values = cost_values[sort_indices] predictor_names = [predictor_names[k] for k in sort_indices] x_coords = numpy.concatenate(( numpy.array([result_dict[ORIGINAL_COST_KEY]]), cost_values )) if plot_percent_increase: x_coords = 100 * x_coords / x_coords[0] y_strings = ['No permutation'] + predictor_names y_coords = numpy.linspace( 0, len(y_strings) - 1, num=len(y_strings), dtype=float) _, axes_object = pyplot.subplots( 1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES) ) axes_object.barh( y_coords, x_coords, color=BAR_GRAPH_FACE_COLOUR, edgecolor=BAR_GRAPH_EDGE_COLOUR, linewidth=BAR_GRAPH_EDGE_WIDTH) pyplot.yticks([], []) pyplot.ylabel('Predictor permuted') if plot_percent_increase: pyplot.xlabel('Cost (percentage of original)') else: pyplot.xlabel('Cost') _label_bars_in_graph( axes_object=axes_object, y_coords=y_coords, y_strings=y_strings) pyplot.show() _create_directory(file_name=output_file_name) print('Saving figure to: "{0:s}"...'.format(output_file_name)) pyplot.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI) pyplot.close() def plot_lakshmanan_results( result_dict, output_file_name, plot_percent_increase=False): """Plots results of Lakshmanan (multi-pass) permutation test. :param result_dict: See doc for `plot_breiman_results`. :param output_file_name: Same. :param plot_percent_increase: Same. """ x_coords = numpy.concatenate(( numpy.array([result_dict[ORIGINAL_COST_KEY]]), result_dict[HIGHEST_COSTS_KEY] )) if plot_percent_increase: x_coords = 100 * x_coords / x_coords[0] y_strings = ['No permutation'] + result_dict[PERMUTED_PREDICTORS_KEY] y_coords = numpy.linspace( 0, len(y_strings) - 1, num=len(y_strings), dtype=float )[::-1] _, axes_object = pyplot.subplots( 1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES) ) axes_object.barh( y_coords, x_coords, color=BAR_GRAPH_FACE_COLOUR, edgecolor=BAR_GRAPH_EDGE_COLOUR, linewidth=BAR_GRAPH_EDGE_WIDTH) pyplot.yticks([], []) pyplot.ylabel('Predictor permuted') if plot_percent_increase: pyplot.xlabel('Cost (percentage of original)') else: pyplot.xlabel('Cost') _label_bars_in_graph( axes_object=axes_object, y_coords=y_coords, y_strings=y_strings) pyplot.show() _create_directory(file_name=output_file_name) print('Saving figure to: "{0:s}"...'.format(output_file_name)) pyplot.savefig(output_file_name, dpi=FIGURE_RESOLUTION_DPI) pyplot.close() def plot_breiman_results_example(permutation_dir_name, permutation_dict): """Plots results of Breiman permutation test. :param permutation_dir_name: Name of output directory. :param permutation_dict: Dictionary created by `permutation_test_for_cnn`. """ breiman_file_name = '{0:s}/breiman_results.jpg'.format(permutation_dir_name) plot_breiman_results( result_dict=permutation_dict, output_file_name=breiman_file_name, plot_percent_increase=False) def plot_lakshmanan_results_example(permutation_dir_name, permutation_dict): """Plots results of Lakshmanan permutation test. :param permutation_dir_name: Name of output directory. :param permutation_dict: Dictionary created by `permutation_test_for_cnn`. """ lakshmanan_file_name = '{0:s}/lakshmanan_results.jpg'.format( permutation_dir_name) plot_lakshmanan_results( result_dict=permutation_dict, output_file_name=lakshmanan_file_name, plot_percent_increase=False) def _gradient_descent_for_bwo( cnn_model_object, loss_tensor, init_function_or_matrices, num_iterations, learning_rate): """Does gradient descent (the nitty-gritty part) for backwards optimization. :param cnn_model_object: Trained instance of `keras.models.Model`. :param loss_tensor: Keras tensor, defining the loss function to be minimized. :param init_function_or_matrices: Either a function or list of numpy arrays. If function, will be used to initialize input matrices. See `create_gaussian_initializer` for an example. If list of numpy arrays, these are the input matrices themselves. Matrices should be processed in the exact same way that training data were processed (e.g., normalization method). Matrices must also be in the same order as training matrices, and the [q]th matrix in this list must have the same shape as the [q]th training matrix. :param num_iterations: Number of gradient-descent iterations (number of times that the input matrices are adjusted). :param learning_rate: Learning rate. At each iteration, each input value x will be decremented by `learning_rate * gradient`, where `gradient` is the gradient of the loss function with respect to x. :return: list_of_optimized_input_matrices: length-T list of optimized input matrices (numpy arrays), where T = number of input tensors to the model. If the input arg `init_function_or_matrices` is a list of numpy arrays (rather than a function), `list_of_optimized_input_matrices` will have the exact same shape, just with different values. """ if isinstance(cnn_model_object.input, list): list_of_input_tensors = cnn_model_object.input else: list_of_input_tensors = [cnn_model_object.input] num_input_tensors = len(list_of_input_tensors) list_of_gradient_tensors = K.gradients(loss_tensor, list_of_input_tensors) for i in range(num_input_tensors): list_of_gradient_tensors[i] /= K.maximum( K.sqrt(K.mean(list_of_gradient_tensors[i] ** 2)), K.epsilon() ) inputs_to_loss_and_gradients = K.function( list_of_input_tensors + [K.learning_phase()], ([loss_tensor] + list_of_gradient_tensors) ) if isinstance(init_function_or_matrices, list): list_of_optimized_input_matrices = copy.deepcopy( init_function_or_matrices) else: list_of_optimized_input_matrices = [None] * num_input_tensors for i in range(num_input_tensors): these_dimensions = numpy.array( [1] + list_of_input_tensors[i].get_shape().as_list()[1:], dtype=int) list_of_optimized_input_matrices[i] = init_function_or_matrices( these_dimensions) for j in range(num_iterations): these_outputs = inputs_to_loss_and_gradients( list_of_optimized_input_matrices + [0]) if numpy.mod(j, 100) == 0: print('Loss after {0:d} of {1:d} iterations: {2:.2e}'.format( j, num_iterations, these_outputs[0])) for i in range(num_input_tensors): list_of_optimized_input_matrices[i] -= ( these_outputs[i + 1] * learning_rate) print('Loss after {0:d} iterations: {1:.2e}'.format( num_iterations, these_outputs[0])) return list_of_optimized_input_matrices def bwo_for_class( cnn_model_object, target_class, init_function_or_matrices, num_iterations=DEFAULT_NUM_BWO_ITERATIONS, learning_rate=DEFAULT_BWO_LEARNING_RATE): """Does backwards optimization to maximize probability of target class. :param cnn_model_object: Trained instance of `keras.models.Model`. :param target_class: Synthetic input data will be created to maximize probability of this class. :param init_function_or_matrices: See doc for `_gradient_descent_for_bwo`. :param num_iterations: Same. :param learning_rate: Same. :return: list_of_optimized_input_matrices: Same. """ target_class = int(numpy.round(target_class)) num_iterations = int(numpy.round(num_iterations)) assert target_class >= 0 assert num_iterations > 0 assert learning_rate > 0. assert learning_rate < 1. num_output_neurons = ( cnn_model_object.layers[-1].output.get_shape().as_list()[-1] ) if num_output_neurons == 1: assert target_class <= 1 if target_class == 1: loss_tensor = K.mean( (cnn_model_object.layers[-1].output[..., 0] - 1) ** 2 ) else: loss_tensor = K.mean( cnn_model_object.layers[-1].output[..., 0] ** 2 ) else: assert target_class < num_output_neurons loss_tensor = K.mean( (cnn_model_object.layers[-1].output[..., target_class] - 1) ** 2 ) return _gradient_descent_for_bwo( cnn_model_object=cnn_model_object, loss_tensor=loss_tensor, init_function_or_matrices=init_function_or_matrices, num_iterations=num_iterations, learning_rate=learning_rate) def bwo_example1(validation_image_dict, normalization_dict, cnn_model_object): """Optimizes random example (storm object) for positive class. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ orig_predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][0, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] orig_predictor_matrix_norm, _ = normalize_images( predictor_matrix=orig_predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) orig_predictor_matrix_norm = numpy.expand_dims( orig_predictor_matrix_norm, axis=0) optimized_predictor_matrix_norm = bwo_for_class( cnn_model_object=cnn_model_object, target_class=1, init_function_or_matrices=[orig_predictor_matrix_norm] )[0][0, ...] optimized_predictor_matrix = denormalize_images( predictor_matrix=optimized_predictor_matrix_norm, predictor_names=predictor_names, normalization_dict=normalization_dict) temperature_index = predictor_names.index(TEMPERATURE_NAME) combined_temp_matrix_kelvins = numpy.concatenate( (orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[..., temperature_index]), axis=0) min_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 1) max_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 99) figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=orig_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Real example (before optimization)') pyplot.show() figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=optimized_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Synthetic example (after optimization)') pyplot.show() def bwo_example2(validation_image_dict, normalization_dict, cnn_model_object): """Optimizes random example (storm object) for negative class. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ orig_predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][0, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] orig_predictor_matrix_norm, _ = normalize_images( predictor_matrix=orig_predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) orig_predictor_matrix_norm = numpy.expand_dims( orig_predictor_matrix_norm, axis=0) optimized_predictor_matrix_norm = bwo_for_class( cnn_model_object=cnn_model_object, target_class=0, init_function_or_matrices=[orig_predictor_matrix_norm] )[0][0, ...] optimized_predictor_matrix = denormalize_images( predictor_matrix=optimized_predictor_matrix_norm, predictor_names=predictor_names, normalization_dict=normalization_dict) temperature_index = predictor_names.index(TEMPERATURE_NAME) combined_temp_matrix_kelvins = numpy.concatenate( (orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[..., temperature_index]), axis=0) min_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 1) max_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 99) figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=orig_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Real example (before optimization)') pyplot.show() figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=optimized_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Synthetic example (after optimization)') pyplot.show() def bwo_example3(validation_image_dict, normalization_dict, cnn_model_object): """Optimizes extreme example (storm object) for positive class. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ target_matrix_s01 = validation_image_dict[TARGET_MATRIX_KEY] example_index = numpy.unravel_index( numpy.argmax(target_matrix_s01), target_matrix_s01.shape )[0] orig_predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][ example_index, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] orig_predictor_matrix_norm, _ = normalize_images( predictor_matrix=orig_predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) orig_predictor_matrix_norm = numpy.expand_dims( orig_predictor_matrix_norm, axis=0) optimized_predictor_matrix_norm = bwo_for_class( cnn_model_object=cnn_model_object, target_class=1, init_function_or_matrices=[orig_predictor_matrix_norm] )[0][0, ...] optimized_predictor_matrix = denormalize_images( predictor_matrix=optimized_predictor_matrix_norm, predictor_names=predictor_names, normalization_dict=normalization_dict) temperature_index = predictor_names.index(TEMPERATURE_NAME) combined_temp_matrix_kelvins = numpy.concatenate( (orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[..., temperature_index]), axis=0) min_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 1) max_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 99) figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=orig_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Real example (before optimization)') pyplot.show() figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=optimized_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Synthetic example (after optimization)') pyplot.show() def bwo_example4(validation_image_dict, normalization_dict, cnn_model_object): """Optimizes extreme example (storm object) for negative class. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ target_matrix_s01 = validation_image_dict[TARGET_MATRIX_KEY] example_index = numpy.unravel_index( numpy.argmax(target_matrix_s01), target_matrix_s01.shape )[0] orig_predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][ example_index, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] orig_predictor_matrix_norm, _ = normalize_images( predictor_matrix=orig_predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) orig_predictor_matrix_norm = numpy.expand_dims( orig_predictor_matrix_norm, axis=0) optimized_predictor_matrix_norm = bwo_for_class( cnn_model_object=cnn_model_object, target_class=0, init_function_or_matrices=[orig_predictor_matrix_norm] )[0][0, ...] optimized_predictor_matrix = denormalize_images( predictor_matrix=optimized_predictor_matrix_norm, predictor_names=predictor_names, normalization_dict=normalization_dict) temperature_index = predictor_names.index(TEMPERATURE_NAME) combined_temp_matrix_kelvins = numpy.concatenate( (orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[..., temperature_index]), axis=0) min_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 1) max_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 99) figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=orig_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Real example (before optimization)') pyplot.show() figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=optimized_predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Synthetic example (after optimization)') pyplot.show() def _do_saliency_calculations( cnn_model_object, loss_tensor, list_of_input_matrices): """Does the nitty-gritty part of computing saliency maps. T = number of input tensors to the model E = number of examples (storm objects) :param cnn_model_object: Trained instance of `keras.models.Model`. :param loss_tensor: Keras tensor defining the loss function. :param list_of_input_matrices: length-T list of numpy arrays, comprising one or more examples (storm objects). list_of_input_matrices[i] must have the same dimensions as the [i]th input tensor to the model. :return: list_of_saliency_matrices: length-T list of numpy arrays, comprising the saliency map for each example. list_of_saliency_matrices[i] has the same dimensions as list_of_input_matrices[i] and defines the "saliency" of each value x, which is the gradient of the loss function with respect to x. """ if isinstance(cnn_model_object.input, list): list_of_input_tensors = cnn_model_object.input else: list_of_input_tensors = [cnn_model_object.input] list_of_gradient_tensors = K.gradients(loss_tensor, list_of_input_tensors) num_input_tensors = len(list_of_input_tensors) for i in range(num_input_tensors): list_of_gradient_tensors[i] /= K.maximum( K.std(list_of_gradient_tensors[i]), K.epsilon() ) inputs_to_gradients_function = K.function( list_of_input_tensors + [K.learning_phase()], list_of_gradient_tensors) list_of_saliency_matrices = inputs_to_gradients_function( list_of_input_matrices + [0]) for i in range(num_input_tensors): list_of_saliency_matrices[i] *= -1 return list_of_saliency_matrices def saliency_for_class(cnn_model_object, target_class, list_of_input_matrices): """For each input example, creates saliency map for prob of given class. :param cnn_model_object: Trained instance of `keras.models.Model`. :param target_class: Saliency maps will be created for probability of this class. :param list_of_input_matrices: See doc for `_do_saliency_calculations`. :return: list_of_saliency_matrices: Same. """ target_class = int(numpy.round(target_class)) assert target_class >= 0 num_output_neurons = ( cnn_model_object.layers[-1].output.get_shape().as_list()[-1] ) if num_output_neurons == 1: assert target_class <= 1 if target_class == 1: loss_tensor = K.mean( (cnn_model_object.layers[-1].output[..., 0] - 1) ** 2 ) else: loss_tensor = K.mean( cnn_model_object.layers[-1].output[..., 0] ** 2 ) else: assert target_class < num_output_neurons loss_tensor = K.mean( (cnn_model_object.layers[-1].output[..., target_class] - 1) ** 2 ) return _do_saliency_calculations( cnn_model_object=cnn_model_object, loss_tensor=loss_tensor, list_of_input_matrices=list_of_input_matrices) def plot_saliency_2d( saliency_matrix, axes_object, colour_map_object, max_absolute_contour_level, contour_interval, line_width=2): """Plots saliency map over 2-D grid (for one predictor). M = number of rows in grid N = number of columns in grid :param saliency_matrix: M-by-N numpy array of saliency values. :param axes_object: Instance of `matplotlib.axes._subplots.AxesSubplot`. Will plot on these axes. :param colour_map_object: Colour scheme (instance of `matplotlib.pyplot.cm`). :param max_absolute_contour_level: Max saliency to plot. The minimum saliency plotted will be `-1 * max_absolute_contour_level`. :param max_absolute_contour_level: Max absolute saliency value to plot. The min and max values, respectively, will be `-1 * max_absolute_contour_level` and `max_absolute_contour_level`. :param contour_interval: Saliency interval between successive contours. :param line_width: Width of contour lines. """ num_grid_rows = saliency_matrix.shape[0] num_grid_columns = saliency_matrix.shape[1] x_coords_unique = numpy.linspace( 0, num_grid_columns, num=num_grid_columns + 1, dtype=float) x_coords_unique = x_coords_unique[:-1] x_coords_unique = x_coords_unique + numpy.diff(x_coords_unique[:2]) / 2 y_coords_unique = numpy.linspace( 0, num_grid_rows, num=num_grid_rows + 1, dtype=float) y_coords_unique = y_coords_unique[:-1] y_coords_unique = y_coords_unique + numpy.diff(y_coords_unique[:2]) / 2 x_coord_matrix, y_coord_matrix = numpy.meshgrid(x_coords_unique, y_coords_unique) half_num_contours = int(numpy.round( 1 + max_absolute_contour_level / contour_interval )) # Plot positive values. these_contour_levels = numpy.linspace( 0., max_absolute_contour_level, num=half_num_contours) axes_object.contour( x_coord_matrix, y_coord_matrix, saliency_matrix, these_contour_levels, cmap=colour_map_object, vmin=numpy.min(these_contour_levels), vmax=numpy.max(these_contour_levels), linewidths=line_width, linestyles='solid', zorder=1e6) # Plot negative values. these_contour_levels = these_contour_levels[1:] axes_object.contour( x_coord_matrix, y_coord_matrix, -saliency_matrix, these_contour_levels, cmap=colour_map_object, vmin=numpy.min(these_contour_levels), vmax=numpy.max(these_contour_levels), linewidths=line_width, linestyles='dashed', zorder=1e6) def plot_many_saliency_maps( saliency_matrix, axes_objects_2d_list, colour_map_object, max_absolute_contour_level, contour_interval, line_width=2): """Plots 2-D saliency map for each predictor. M = number of rows in grid N = number of columns in grid C = number of predictors :param saliency_matrix: M-by-N-by-C numpy array of saliency values. :param axes_objects_2d_list: See doc for `_init_figure_panels`. :param colour_map_object: See doc for `plot_saliency_2d`. :param max_absolute_contour_level: Same. :param max_absolute_contour_level: Same. :param contour_interval: Same. :param line_width: Same. """ num_predictors = saliency_matrix.shape[-1] num_panel_rows = len(axes_objects_2d_list) num_panel_columns = len(axes_objects_2d_list[0]) for m in range(num_predictors): this_panel_row, this_panel_column = numpy.unravel_index( m, (num_panel_rows, num_panel_columns) ) plot_saliency_2d( saliency_matrix=saliency_matrix[..., m], axes_object=axes_objects_2d_list[this_panel_row][this_panel_column], colour_map_object=colour_map_object, max_absolute_contour_level=max_absolute_contour_level, contour_interval=contour_interval, line_width=line_width) def saliency_example1(validation_image_dict, normalization_dict, cnn_model_object): """Computes saliency map for random example wrt positive-class probability. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][0, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] predictor_matrix_norm, _ = normalize_images( predictor_matrix=predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) predictor_matrix_norm = numpy.expand_dims(predictor_matrix_norm, axis=0) saliency_matrix = saliency_for_class( cnn_model_object=cnn_model_object, target_class=1, list_of_input_matrices=[predictor_matrix_norm] )[0][0, ...] temperature_index = predictor_names.index(TEMPERATURE_NAME) min_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 1) max_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 99) wind_indices = numpy.array([ predictor_names.index(U_WIND_NAME), predictor_names.index(V_WIND_NAME) ], dtype=int) max_colour_wind_speed_m_s01 = numpy.percentile( numpy.absolute(predictor_matrix[..., wind_indices]), 99) _, axes_objects_2d_list = plot_many_predictors_sans_barbs( predictor_matrix=predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins, max_colour_wind_speed_m_s01=max_colour_wind_speed_m_s01) max_absolute_contour_level = numpy.percentile( numpy.absolute(saliency_matrix), 99) contour_interval = max_absolute_contour_level / 10 plot_many_saliency_maps( saliency_matrix=saliency_matrix, axes_objects_2d_list=axes_objects_2d_list, colour_map_object=SALIENCY_COLOUR_MAP_OBJECT, max_absolute_contour_level=max_absolute_contour_level, contour_interval=contour_interval) pyplot.show() def saliency_example2(validation_image_dict, normalization_dict, cnn_model_object): """Computes saliency map for random example wrt negative-class probability. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][0, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] predictor_matrix_norm, _ = normalize_images( predictor_matrix=predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) predictor_matrix_norm = numpy.expand_dims(predictor_matrix_norm, axis=0) saliency_matrix = saliency_for_class( cnn_model_object=cnn_model_object, target_class=0, list_of_input_matrices=[predictor_matrix_norm] )[0][0, ...] temperature_index = predictor_names.index(TEMPERATURE_NAME) min_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 1) max_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 99) wind_indices = numpy.array([ predictor_names.index(U_WIND_NAME), predictor_names.index(V_WIND_NAME) ], dtype=int) max_colour_wind_speed_m_s01 = numpy.percentile( numpy.absolute(predictor_matrix[..., wind_indices]), 99) _, axes_objects_2d_list = plot_many_predictors_sans_barbs( predictor_matrix=predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins, max_colour_wind_speed_m_s01=max_colour_wind_speed_m_s01) max_absolute_contour_level = numpy.percentile( numpy.absolute(saliency_matrix), 99) contour_interval = max_absolute_contour_level / 10 plot_many_saliency_maps( saliency_matrix=saliency_matrix, axes_objects_2d_list=axes_objects_2d_list, colour_map_object=SALIENCY_COLOUR_MAP_OBJECT, max_absolute_contour_level=max_absolute_contour_level, contour_interval=contour_interval) pyplot.show() def saliency_example3(validation_image_dict, normalization_dict, cnn_model_object): """Computes saliency map for extreme example wrt positive-class probability. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ target_matrix_s01 = validation_image_dict[TARGET_MATRIX_KEY] example_index = numpy.unravel_index( numpy.argmax(target_matrix_s01), target_matrix_s01.shape )[0] predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][ example_index, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] predictor_matrix_norm, _ = normalize_images( predictor_matrix=predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) predictor_matrix_norm = numpy.expand_dims(predictor_matrix_norm, axis=0) saliency_matrix = saliency_for_class( cnn_model_object=cnn_model_object, target_class=1, list_of_input_matrices=[predictor_matrix_norm] )[0][0, ...] temperature_index = predictor_names.index(TEMPERATURE_NAME) min_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 1) max_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 99) wind_indices = numpy.array([ predictor_names.index(U_WIND_NAME), predictor_names.index(V_WIND_NAME) ], dtype=int) max_colour_wind_speed_m_s01 = numpy.percentile( numpy.absolute(predictor_matrix[..., wind_indices]), 99) _, axes_objects_2d_list = plot_many_predictors_sans_barbs( predictor_matrix=predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins, max_colour_wind_speed_m_s01=max_colour_wind_speed_m_s01) max_absolute_contour_level = numpy.percentile( numpy.absolute(saliency_matrix), 99) contour_interval = max_absolute_contour_level / 10 plot_many_saliency_maps( saliency_matrix=saliency_matrix, axes_objects_2d_list=axes_objects_2d_list, colour_map_object=SALIENCY_COLOUR_MAP_OBJECT, max_absolute_contour_level=max_absolute_contour_level, contour_interval=contour_interval) pyplot.show() def saliency_example4(validation_image_dict, normalization_dict, cnn_model_object): """Computes saliency map for extreme example wrt negative-class probability. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`. """ target_matrix_s01 = validation_image_dict[TARGET_MATRIX_KEY] example_index = numpy.unravel_index( numpy.argmax(target_matrix_s01), target_matrix_s01.shape )[0] predictor_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][ example_index, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] predictor_matrix_norm, _ = normalize_images( predictor_matrix=predictor_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) predictor_matrix_norm = numpy.expand_dims(predictor_matrix_norm, axis=0) saliency_matrix = saliency_for_class( cnn_model_object=cnn_model_object, target_class=0, list_of_input_matrices=[predictor_matrix_norm] )[0][0, ...] temperature_index = predictor_names.index(TEMPERATURE_NAME) min_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 1) max_colour_temp_kelvins = numpy.percentile( predictor_matrix[..., temperature_index], 99) wind_indices = numpy.array([ predictor_names.index(U_WIND_NAME), predictor_names.index(V_WIND_NAME) ], dtype=int) max_colour_wind_speed_m_s01 = numpy.percentile( numpy.absolute(predictor_matrix[..., wind_indices]), 99) _, axes_objects_2d_list = plot_many_predictors_sans_barbs( predictor_matrix=predictor_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins, max_colour_wind_speed_m_s01=max_colour_wind_speed_m_s01) max_absolute_contour_level = numpy.percentile( numpy.absolute(saliency_matrix), 99) contour_interval = max_absolute_contour_level / 10 plot_many_saliency_maps( saliency_matrix=saliency_matrix, axes_objects_2d_list=axes_objects_2d_list, colour_map_object=SALIENCY_COLOUR_MAP_OBJECT, max_absolute_contour_level=max_absolute_contour_level, contour_interval=contour_interval) pyplot.show() def _create_smoothing_filter( smoothing_radius_px, num_half_filter_rows, num_half_filter_columns, num_channels): """Creates convolution filter for Gaussian smoothing. M = number of rows in filter N = number of columns in filter C = number of channels (or "variables" or "features") to smooth. Each channel will be smoothed independently. :param smoothing_radius_px: e-folding radius (pixels). :param num_half_filter_rows: Number of rows in one half of filter. Total number of rows will be 2 * `num_half_filter_rows` + 1. :param num_half_filter_columns: Same but for columns. :param num_channels: C in the above discussion. :return: weight_matrix: M-by-N-by-C-by-C numpy array of convolution weights. """ num_filter_rows = 2 * num_half_filter_rows + 1 num_filter_columns = 2 * num_half_filter_columns + 1 row_offsets_unique = numpy.linspace( -num_half_filter_rows, num_half_filter_rows, num=num_filter_rows, dtype=float) column_offsets_unique = numpy.linspace( -num_half_filter_columns, num_half_filter_columns, num=num_filter_columns, dtype=float) column_offset_matrix, row_offset_matrix = numpy.meshgrid( column_offsets_unique, row_offsets_unique) pixel_offset_matrix = numpy.sqrt( row_offset_matrix ** 2 + column_offset_matrix ** 2) small_weight_matrix = numpy.exp( -pixel_offset_matrix ** 2 / (2 * smoothing_radius_px ** 2) ) small_weight_matrix = small_weight_matrix / numpy.sum(small_weight_matrix) weight_matrix = numpy.zeros( (num_filter_rows, num_filter_columns, num_channels, num_channels) ) for k in range(num_channels): weight_matrix[..., k, k] = small_weight_matrix return weight_matrix def setup_ucn( num_input_features, first_num_rows, first_num_columns, upsampling_factors, num_output_channels, use_activation_for_out_layer=False, use_bn_for_out_layer=True, use_transposed_conv=False, smoothing_radius_px=None): """Creates (but does not train) upconvnet. L = number of conv or deconv layers :param num_input_features: Number of input features. :param first_num_rows: Number of rows in input to first deconv layer. The input features will be reshaped into a grid with this many rows. :param first_num_columns: Same but for columns. :param upsampling_factors: length-L numpy array of upsampling factors. Must all be positive integers. :param num_output_channels: Number of channels in output images. :param use_activation_for_out_layer: Boolean flag. If True, activation will be applied to output layer. :param use_bn_for_out_layer: Boolean flag. If True, batch normalization will be applied to output layer. :param use_transposed_conv: Boolean flag. If True, upsampling will be done with transposed-convolution layers. If False, each upsampling will be done with an upsampling layer followed by a conv layer. :param smoothing_radius_px: Smoothing radius (pixels). Gaussian smoothing with this e-folding radius will be done after each upsampling. If `smoothing_radius_px is None`, no smoothing will be done. :return: ucn_model_object: Untrained instance of `keras.models.Model`. """ if smoothing_radius_px is not None: num_half_smoothing_rows = int(numpy.round( (NUM_SMOOTHING_FILTER_ROWS - 1) / 2 )) num_half_smoothing_columns = int(numpy.round( (NUM_SMOOTHING_FILTER_COLUMNS - 1) / 2 )) regularizer_object = keras.regularizers.l1_l2(l1=L1_WEIGHT, l2=L2_WEIGHT) input_layer_object = keras.layers.Input(shape=(num_input_features,)) current_num_filters = int(numpy.round( num_input_features / (first_num_rows * first_num_columns) )) layer_object = keras.layers.Reshape( target_shape=(first_num_rows, first_num_columns, current_num_filters) )(input_layer_object) num_main_layers = len(upsampling_factors) for i in range(num_main_layers): this_upsampling_factor = upsampling_factors[i] if i == num_main_layers - 1: current_num_filters = num_output_channels + 0 elif this_upsampling_factor == 1: current_num_filters = int(numpy.round(current_num_filters / 2)) if use_transposed_conv: if this_upsampling_factor > 1: this_padding_arg = 'same' else: this_padding_arg = 'valid' layer_object = keras.layers.Conv2DTranspose( filters=current_num_filters, kernel_size=(NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS), strides=(this_upsampling_factor, this_upsampling_factor), padding=this_padding_arg, data_format='channels_last', dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=regularizer_object )(layer_object) else: if this_upsampling_factor > 1: try: layer_object = keras.layers.UpSampling2D( size=(this_upsampling_factor, this_upsampling_factor), data_format='channels_last', interpolation='nearest' )(layer_object) except: layer_object = keras.layers.UpSampling2D( size=(this_upsampling_factor, this_upsampling_factor), data_format='channels_last' )(layer_object) layer_object = keras.layers.Conv2D( filters=current_num_filters, kernel_size=(NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS), strides=(1, 1), padding='same', data_format='channels_last', dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=regularizer_object )(layer_object) if this_upsampling_factor == 1: layer_object = keras.layers.ZeroPadding2D( padding=(1, 1), data_format='channels_last' )(layer_object) if smoothing_radius_px is not None: this_weight_matrix = _create_smoothing_filter( smoothing_radius_px=smoothing_radius_px, num_half_filter_rows=num_half_smoothing_rows, num_half_filter_columns=num_half_smoothing_columns, num_channels=current_num_filters) this_bias_vector = numpy.zeros(current_num_filters) layer_object = keras.layers.Conv2D( filters=current_num_filters, kernel_size=(NUM_SMOOTHING_FILTER_ROWS, NUM_SMOOTHING_FILTER_COLUMNS), strides=(1, 1), padding='same', data_format='channels_last', dilation_rate=(1, 1), activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=regularizer_object, trainable=False, weights=[this_weight_matrix, this_bias_vector] )(layer_object) if i < num_main_layers - 1 or use_activation_for_out_layer: layer_object = keras.layers.LeakyReLU( alpha=SLOPE_FOR_RELU )(layer_object) if i < num_main_layers - 1 or use_bn_for_out_layer: layer_object = keras.layers.BatchNormalization( axis=-1, center=True, scale=True )(layer_object) ucn_model_object = keras.models.Model( inputs=input_layer_object, outputs=layer_object) ucn_model_object.compile( loss=keras.losses.mean_squared_error, optimizer=keras.optimizers.Adam()) ucn_model_object.summary() return ucn_model_object def get_cnn_flatten_layer(cnn_model_object): """Finds flattening layer in CNN. This method assumes that there is only one flattening layer. If there are several, this method will return the first (shallowest). :param cnn_model_object: Instance of `keras.models.Model`. :return: layer_name: Name of flattening layer. :raises: TypeError: if flattening layer cannot be found. """ layer_names = [lyr.name for lyr in cnn_model_object.layers] flattening_flags = numpy.array( ['flatten' in n for n in layer_names], dtype=bool) flattening_indices = numpy.where(flattening_flags)[0] if len(flattening_indices) == 0: error_string = ( 'Cannot find flattening layer in model. Layer names are listed ' 'below.\n{0:s}' ).format(str(layer_names)) raise TypeError(error_string) return layer_names[flattening_indices[0]] def setup_ucn_example(cnn_model_object): """Example of UCN architecture (with transposed conv, no smoothing). :param cnn_model_object: Trained CNN (instance of `keras.models.Model`). """ cnn_feature_layer_name = get_cnn_flatten_layer(cnn_model_object) cnn_feature_layer_object = cnn_model_object.get_layer( name=cnn_feature_layer_name) cnn_feature_dimensions = numpy.array( cnn_feature_layer_object.input.shape[1:], dtype=int) num_input_features = numpy.prod(cnn_feature_dimensions) first_num_rows = cnn_feature_dimensions[0] first_num_columns = cnn_feature_dimensions[1] num_output_channels = numpy.array( cnn_model_object.input.shape[1:], dtype=int )[-1] upsampling_factors = numpy.array([2, 1, 1, 2, 1, 1], dtype=int) ucn_model_object = setup_ucn( num_input_features=num_input_features, first_num_rows=first_num_rows, first_num_columns=first_num_columns, upsampling_factors=upsampling_factors, num_output_channels=num_output_channels, use_transposed_conv=True, smoothing_radius_px=None) def ucn_generator(netcdf_file_names, num_examples_per_batch, normalization_dict, cnn_model_object, cnn_feature_layer_name): """Generates training examples for UCN (upconvolutional network) on the fly. E = number of examples (storm objects) M = number of rows in each storm-centered grid N = number of columns in each storm-centered grid C = number of channels (predictor variables) Z = number of scalar features (neurons in layer `cnn_feature_layer_name` of the CNN specified by `cnn_model_object`) :param netcdf_file_names: 1-D list of paths to input (NetCDF) files. :param num_examples_per_batch: Number of examples per training batch. :param normalization_dict: See doc for `normalize_images`. You cannot leave this as None. :param cnn_model_object: Trained CNN model (instance of `keras.models.Model`). This will be used to turn images stored in `netcdf_file_names` into scalar features. :param cnn_feature_layer_name: The "scalar features" will be the set of activations from this layer. :return: feature_matrix: E-by-Z numpy array of scalar features. These are the "predictors" for the upconv network. :return: target_matrix: E-by-M-by-N-by-C numpy array of target images. These are the predictors for the CNN and the targets for the upconv network. :raises: TypeError: if `normalization_dict is None`. """ if normalization_dict is None: error_string = 'normalization_dict cannot be None. Must be specified.' raise TypeError(error_string) random.shuffle(netcdf_file_names) num_files = len(netcdf_file_names) file_index = 0 num_examples_in_memory = 0 full_target_matrix = None predictor_names = None while True: while num_examples_in_memory < num_examples_per_batch: print('Reading data from: "{0:s}"...'.format( netcdf_file_names[file_index])) this_image_dict = read_image_file(netcdf_file_names[file_index]) predictor_names = this_image_dict[PREDICTOR_NAMES_KEY] file_index += 1 if file_index >= num_files: file_index = 0 if full_target_matrix is None or full_target_matrix.size == 0: full_target_matrix = this_image_dict[PREDICTOR_MATRIX_KEY] + 0. else: full_target_matrix = numpy.concatenate( (full_target_matrix, this_image_dict[PREDICTOR_MATRIX_KEY]), axis=0) num_examples_in_memory = full_target_matrix.shape[0] batch_indices = numpy.linspace( 0, num_examples_in_memory - 1, num=num_examples_in_memory, dtype=int) batch_indices = numpy.random.choice( batch_indices, size=num_examples_per_batch, replace=False) target_matrix, _ = normalize_images( predictor_matrix=full_target_matrix[batch_indices, ...], predictor_names=predictor_names, normalization_dict=normalization_dict) target_matrix = target_matrix.astype('float32') feature_matrix = _apply_cnn( cnn_model_object=cnn_model_object, predictor_matrix=target_matrix, verbose=False, output_layer_name=cnn_feature_layer_name) num_examples_in_memory = 0 full_target_matrix = None yield (feature_matrix, target_matrix) def train_ucn( ucn_model_object, training_file_names, normalization_dict, cnn_model_object, cnn_file_name, cnn_feature_layer_name, num_examples_per_batch, num_epochs, num_training_batches_per_epoch, output_model_file_name, validation_file_names=None, num_validation_batches_per_epoch=None): """Trains UCN (upconvolutional network). :param ucn_model_object: Untrained instance of `keras.models.Model` (may be created by `setup_ucn`), representing the upconv network. :param training_file_names: 1-D list of paths to training files (must be readable by `read_image_file`). :param normalization_dict: See doc for `ucn_generator`. :param cnn_model_object: Same. :param cnn_file_name: Path to file with trained CNN (represented by `cnn_model_object`). This is needed only for the output dictionary (metadata). :param cnn_feature_layer_name: Same. :param num_examples_per_batch: Same. :param num_epochs: Number of epochs. :param num_training_batches_per_epoch: Number of training batches furnished to model in each epoch. :param output_model_file_name: Path to output file. The model will be saved as an HDF5 file (extension should be ".h5", but this is not enforced). :param validation_file_names: 1-D list of paths to training files (must be readable by `read_image_file`). If `validation_file_names is None`, will omit on-the-fly validation. :param num_validation_batches_per_epoch: [used only if `validation_file_names is not None`] Number of validation batches furnished to model in each epoch. :return: ucn_metadata_dict: Dictionary with the following keys. ucn_metadata_dict['training_file_names']: See input doc. ucn_metadata_dict['normalization_dict']: Same. ucn_metadata_dict['cnn_file_name']: Same. ucn_metadata_dict['cnn_feature_layer_name']: Same. ucn_metadata_dict['num_examples_per_batch']: Same. ucn_metadata_dict['num_training_batches_per_epoch']: Same. ucn_metadata_dict['validation_file_names']: Same. ucn_metadata_dict['num_validation_batches_per_epoch']: Same. """ _create_directory(file_name=output_model_file_name) if validation_file_names is None: checkpoint_object = keras.callbacks.ModelCheckpoint( filepath=output_model_file_name, monitor='loss', verbose=1, save_best_only=False, save_weights_only=False, mode='min', period=1) else: checkpoint_object = keras.callbacks.ModelCheckpoint( filepath=output_model_file_name, monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=False, mode='min', period=1) list_of_callback_objects = [checkpoint_object] ucn_metadata_dict = { TRAINING_FILES_KEY: training_file_names, NORMALIZATION_DICT_KEY: normalization_dict, CNN_FILE_KEY: cnn_file_name, CNN_FEATURE_LAYER_KEY: cnn_feature_layer_name, NUM_EXAMPLES_PER_BATCH_KEY: num_examples_per_batch, NUM_TRAINING_BATCHES_KEY: num_training_batches_per_epoch, VALIDATION_FILES_KEY: validation_file_names, NUM_VALIDATION_BATCHES_KEY: num_validation_batches_per_epoch } training_generator = ucn_generator( netcdf_file_names=training_file_names, num_examples_per_batch=num_examples_per_batch, normalization_dict=normalization_dict, cnn_model_object=cnn_model_object, cnn_feature_layer_name=cnn_feature_layer_name) if validation_file_names is None: ucn_model_object.fit_generator( generator=training_generator, steps_per_epoch=num_training_batches_per_epoch, epochs=num_epochs, verbose=1, callbacks=list_of_callback_objects, workers=0) return ucn_metadata_dict early_stopping_object = keras.callbacks.EarlyStopping( monitor='val_loss', min_delta=MIN_MSE_DECREASE_FOR_EARLY_STOP, patience=NUM_EPOCHS_FOR_EARLY_STOPPING, verbose=1, mode='min') list_of_callback_objects.append(early_stopping_object) validation_generator = ucn_generator( netcdf_file_names=validation_file_names, num_examples_per_batch=num_examples_per_batch, normalization_dict=normalization_dict, cnn_model_object=cnn_model_object, cnn_feature_layer_name=cnn_feature_layer_name) ucn_model_object.fit_generator( generator=training_generator, steps_per_epoch=num_training_batches_per_epoch, epochs=num_epochs, verbose=1, callbacks=list_of_callback_objects, workers=0, validation_data=validation_generator, validation_steps=num_validation_batches_per_epoch) return ucn_metadata_dict def train_ucn_example(ucn_model_object, training_file_names, normalization_dict, cnn_model_object, cnn_file_name): """Actually trains the UCN (upconvolutional network). :param ucn_model_object: See doc for `train_ucn`. :param training_file_names: Same. :param normalization_dict: Same. :param cnn_model_object: See doc for `cnn_model_object` in `train_ucn`. :param cnn_file_name: See doc for `train_ucn`. """ validation_file_names = find_many_image_files( first_date_string='20150101', last_date_string='20151231') ucn_file_name = '{0:s}/ucn_model.h5'.format(MODULE4_DIR_NAME) ucn_metadata_dict = train_ucn( ucn_model_object=ucn_model_object, training_file_names=training_file_names, normalization_dict=normalization_dict, cnn_model_object=cnn_model_object, cnn_file_name=cnn_file_name, cnn_feature_layer_name=get_cnn_flatten_layer(cnn_model_object), num_examples_per_batch=100, num_epochs=10, num_training_batches_per_epoch=10, output_model_file_name=ucn_file_name, validation_file_names=validation_file_names, num_validation_batches_per_epoch=10) def apply_ucn_example1( validation_image_dict, normalization_dict, cnn_model_object): """Uses upconvnet to reconstruct random validation example. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`, representing the CNN that goes with the upconvnet. """ ucn_file_name = '{0:s}/pretrained_cnn/pretrained_ucn.h5'.format( MODULE4_DIR_NAME) ucn_metafile_name = find_model_metafile(model_file_name=ucn_file_name) ucn_model_object = read_keras_model(ucn_file_name) ucn_metadata_dict = read_model_metadata(ucn_metafile_name) image_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][0, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] image_matrix_norm, _ = normalize_images( predictor_matrix=image_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) image_matrix_norm = numpy.expand_dims(image_matrix_norm, axis=0) feature_matrix = _apply_cnn( cnn_model_object=cnn_model_object, predictor_matrix=image_matrix_norm, output_layer_name=get_cnn_flatten_layer(cnn_model_object), verbose=False) reconstructed_image_matrix_norm = ucn_model_object.predict( feature_matrix, batch_size=1) reconstructed_image_matrix = denormalize_images( predictor_matrix=reconstructed_image_matrix_norm, predictor_names=predictor_names, normalization_dict=normalization_dict )[0, ...] temperature_index = predictor_names.index(TEMPERATURE_NAME) combined_temp_matrix_kelvins = numpy.concatenate( (image_matrix[..., temperature_index], reconstructed_image_matrix[..., temperature_index]), axis=0) min_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 1) max_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 99) figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=image_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Original image (CNN input)') pyplot.show() figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=reconstructed_image_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Reconstructed image (upconvnet output)') pyplot.show() def apply_ucn_example2( validation_image_dict, normalization_dict, ucn_model_object, cnn_model_object): """Uses upconvnet to reconstruct extreme validation example. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param ucn_model_object: Trained instance of `keras.models.Model`, representing the upconvnet. :param cnn_model_object: Trained instance of `keras.models.Model`, representing the CNN that goes with the upconvnet. """ target_matrix_s01 = validation_image_dict[TARGET_MATRIX_KEY] example_index = numpy.unravel_index( numpy.argmax(target_matrix_s01), target_matrix_s01.shape )[0] image_matrix = validation_image_dict[PREDICTOR_MATRIX_KEY][ example_index, ...] predictor_names = validation_image_dict[PREDICTOR_NAMES_KEY] image_matrix_norm, _ = normalize_images( predictor_matrix=image_matrix + 0., predictor_names=predictor_names, normalization_dict=normalization_dict) image_matrix_norm = numpy.expand_dims(image_matrix_norm, axis=0) feature_matrix = _apply_cnn( cnn_model_object=cnn_model_object, predictor_matrix=image_matrix_norm, output_layer_name=get_cnn_flatten_layer(cnn_model_object), verbose=False) reconstructed_image_matrix_norm = ucn_model_object.predict( feature_matrix, batch_size=1) reconstructed_image_matrix = denormalize_images( predictor_matrix=reconstructed_image_matrix_norm, predictor_names=predictor_names, normalization_dict=normalization_dict )[0, ...] temperature_index = predictor_names.index(TEMPERATURE_NAME) combined_temp_matrix_kelvins = numpy.concatenate( (image_matrix[..., temperature_index], reconstructed_image_matrix[..., temperature_index]), axis=0) min_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 1) max_colour_temp_kelvins = numpy.percentile(combined_temp_matrix_kelvins, 99) figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=image_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Original image (CNN input)') pyplot.show() figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=reconstructed_image_matrix, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) figure_object.suptitle('Reconstructed image (upconvnet output)') pyplot.show() def _normalize_features(feature_matrix, feature_means=None, feature_standard_deviations=None): """Normalizes scalar features to z-scores. E = number of examples (storm objects) Z = number of features :param feature_matrix: E-by-Z numpy array of features. :param feature_means: length-Z numpy array of mean values. If `feature_means is None`, these will be computed on the fly from `feature_matrix`. :param feature_standard_deviations: Same but with standard deviations. :return: feature_matrix: Normalized version of input. :return: feature_means: See input doc. :return: feature_standard_deviations: See input doc. """ if feature_means is None or feature_standard_deviations is None: feature_means = numpy.mean(feature_matrix, axis=0) feature_standard_deviations = numpy.std(feature_matrix, axis=0, ddof=1) num_examples = feature_matrix.shape[0] num_features = feature_matrix.shape[1] mean_matrix = numpy.reshape(feature_means, (1, num_features)) mean_matrix = numpy.repeat(mean_matrix, repeats=num_examples, axis=0) stdev_matrix = numpy.reshape(feature_standard_deviations, (1, num_features)) stdev_matrix = numpy.repeat(stdev_matrix, repeats=num_examples, axis=0) feature_matrix = (feature_matrix - mean_matrix) / stdev_matrix return feature_matrix, feature_means, feature_standard_deviations def _fit_svd(baseline_feature_matrix, test_feature_matrix, percent_variance_to_keep): """Fits SVD (singular-value decomposition) model. B = number of baseline examples (storm objects) T = number of testing examples (storm objects) Z = number of scalar features (produced by dense layer of a CNN) K = number of modes (top eigenvectors) retained The SVD model will be fit only to the baseline set, but both the baseline and testing sets will be used to compute normalization parameters (means and standard deviations). Before, when only the baseline set was used to compute normalization params, the testing set had huge standard deviations, which caused the results of novelty detection to be physically unrealistic. :param baseline_feature_matrix: B-by-Z numpy array of features. :param test_feature_matrix: T-by-Z numpy array of features. :param percent_variance_to_keep: Percentage of variance to keep. Determines how many eigenvectors (K in the above discussion) will be used in the SVD model. :return: svd_dictionary: Dictionary with the following keys. svd_dictionary['eof_matrix']: Z-by-K numpy array, where each column is an EOF (empirical orthogonal function). svd_dictionary['feature_means']: length-Z numpy array with mean value of each feature (before transformation). svd_dictionary['feature_standard_deviations']: length-Z numpy array with standard deviation of each feature (before transformation). """ combined_feature_matrix = numpy.concatenate( (baseline_feature_matrix, test_feature_matrix), axis=0) combined_feature_matrix, feature_means, feature_standard_deviations = ( _normalize_features(feature_matrix=combined_feature_matrix) ) num_baseline_examples = baseline_feature_matrix.shape[0] baseline_feature_matrix = combined_feature_matrix[ :num_baseline_examples, ...] eigenvalues, eof_matrix = numpy.linalg.svd(baseline_feature_matrix)[1:] eigenvalues = eigenvalues ** 2 explained_variances = eigenvalues / numpy.sum(eigenvalues) cumulative_explained_variances = numpy.cumsum(explained_variances) fraction_of_variance_to_keep = 0.01 * percent_variance_to_keep num_modes_to_keep = 1 + numpy.where( cumulative_explained_variances >= fraction_of_variance_to_keep )[0][0] print( ('Number of modes required to explain {0:f}% of variance: {1:d}' ).format(percent_variance_to_keep, num_modes_to_keep) ) return { EOF_MATRIX_KEY: numpy.transpose(eof_matrix)[..., :num_modes_to_keep], FEATURE_MEANS_KEY: feature_means, FEATURE_STDEVS_KEY: feature_standard_deviations } def _apply_svd(feature_vector, svd_dictionary): """Applies SVD (singular-value decomposition) model to new example. Z = number of features :param feature_vector: length-Z numpy array with feature values for one example (storm object). :param svd_dictionary: Dictionary created by `_fit_svd`. :return: reconstructed_feature_vector: Reconstructed version of input. """ this_matrix = numpy.dot( svd_dictionary[EOF_MATRIX_KEY], numpy.transpose(svd_dictionary[EOF_MATRIX_KEY]) ) feature_vector_norm = ( (feature_vector - svd_dictionary[FEATURE_MEANS_KEY]) / svd_dictionary[FEATURE_STDEVS_KEY] ) reconstructed_feature_vector_norm = numpy.dot( this_matrix, feature_vector_norm) return ( svd_dictionary[FEATURE_MEANS_KEY] + reconstructed_feature_vector_norm * svd_dictionary[FEATURE_STDEVS_KEY] ) def do_novelty_detection( baseline_image_matrix, test_image_matrix, image_normalization_dict, predictor_names, cnn_model_object, cnn_feature_layer_name, ucn_model_object, num_novel_test_images, percent_svd_variance_to_keep=97.5): """Does novelty detection. Specifically, this method follows the procedure in Wagstaff et al. (2018) to determine which images in the test set are most novel with respect to the baseline set. NOTE: Both input and output images are (assumed to be) denormalized. B = number of baseline examples (storm objects) T = number of test examples (storm objects) M = number of rows in each storm-centered grid N = number of columns in each storm-centered grid C = number of channels (predictor variables) :param baseline_image_matrix: B-by-M-by-N-by-C numpy array of baseline images. :param test_image_matrix: T-by-M-by-N-by-C numpy array of test images. :param image_normalization_dict: See doc for `normalize_images`. :param predictor_names: length-C list of predictor names. :param cnn_model_object: Trained CNN model (instance of `keras.models.Model`). Will be used to turn images into scalar features. :param cnn_feature_layer_name: The "scalar features" will be the set of activations from this layer. :param ucn_model_object: Trained UCN model (instance of `keras.models.Model`). Will be used to turn scalar features into images. :param num_novel_test_images: Number of novel test images to find. :param percent_svd_variance_to_keep: See doc for `_fit_svd`. :return: novelty_dict: Dictionary with the following keys. In the following discussion, Q = number of novel test images found. novelty_dict['novel_image_matrix_actual']: Q-by-M-by-N-by-C numpy array of novel test images. novelty_dict['novel_image_matrix_upconv']: Same as "novel_image_matrix_actual" but reconstructed by the upconvnet. novelty_dict['novel_image_matrix_upconv_svd']: Same as "novel_image_matrix_actual" but reconstructed by SVD (singular-value decomposition) and the upconvnet. :raises: TypeError: if `image_normalization_dict is None`. """ if image_normalization_dict is None: error_string = ( 'image_normalization_dict cannot be None. Must be specified.') raise TypeError(error_string) num_test_examples = test_image_matrix.shape[0] baseline_image_matrix_norm, _ = normalize_images( predictor_matrix=baseline_image_matrix + 0., predictor_names=predictor_names, normalization_dict=image_normalization_dict) test_image_matrix_norm, _ = normalize_images( predictor_matrix=test_image_matrix + 0., predictor_names=predictor_names, normalization_dict=image_normalization_dict) baseline_feature_matrix = _apply_cnn( cnn_model_object=cnn_model_object, predictor_matrix=baseline_image_matrix_norm, verbose=False, output_layer_name=cnn_feature_layer_name) test_feature_matrix = _apply_cnn( cnn_model_object=cnn_model_object, predictor_matrix=test_image_matrix_norm, verbose=False, output_layer_name=cnn_feature_layer_name) novel_indices = [] novel_image_matrix_upconv = None novel_image_matrix_upconv_svd = None for k in range(num_novel_test_images): print('Finding {0:d}th of {1:d} novel test images...'.format( k + 1, num_novel_test_images)) if len(novel_indices) == 0: this_baseline_feature_matrix = baseline_feature_matrix + 0. this_test_feature_matrix = test_feature_matrix + 0. else: novel_indices_numpy = numpy.array(novel_indices, dtype=int) this_baseline_feature_matrix = numpy.concatenate( (baseline_feature_matrix, test_feature_matrix[novel_indices_numpy, ...]), axis=0) this_test_feature_matrix = numpy.delete( test_feature_matrix, obj=novel_indices_numpy, axis=0) svd_dictionary = _fit_svd( baseline_feature_matrix=this_baseline_feature_matrix, test_feature_matrix=this_test_feature_matrix, percent_variance_to_keep=percent_svd_variance_to_keep) svd_errors = numpy.full(num_test_examples, numpy.nan) test_feature_matrix_svd = numpy.full( test_feature_matrix.shape, numpy.nan) for i in range(num_test_examples): print(i) if i in novel_indices: continue test_feature_matrix_svd[i, ...] = _apply_svd( feature_vector=test_feature_matrix[i, ...], svd_dictionary=svd_dictionary) svd_errors[i] = numpy.linalg.norm( test_feature_matrix_svd[i, ...] - test_feature_matrix[i, ...] ) new_novel_index = numpy.nanargmax(svd_errors) novel_indices.append(new_novel_index) new_image_matrix_upconv = ucn_model_object.predict( test_feature_matrix[[new_novel_index], ...], batch_size=1) new_image_matrix_upconv_svd = ucn_model_object.predict( test_feature_matrix_svd[[new_novel_index], ...], batch_size=1) if novel_image_matrix_upconv is None: novel_image_matrix_upconv = new_image_matrix_upconv + 0. novel_image_matrix_upconv_svd = new_image_matrix_upconv_svd + 0. else: novel_image_matrix_upconv = numpy.concatenate( (novel_image_matrix_upconv, new_image_matrix_upconv), axis=0) novel_image_matrix_upconv_svd = numpy.concatenate( (novel_image_matrix_upconv_svd, new_image_matrix_upconv_svd), axis=0) novel_indices = numpy.array(novel_indices, dtype=int) novel_image_matrix_upconv = denormalize_images( predictor_matrix=novel_image_matrix_upconv, predictor_names=predictor_names, normalization_dict=image_normalization_dict) novel_image_matrix_upconv_svd = denormalize_images( predictor_matrix=novel_image_matrix_upconv_svd, predictor_names=predictor_names, normalization_dict=image_normalization_dict) return { NOVEL_IMAGES_ACTUAL_KEY: test_image_matrix[novel_indices, ...], NOVEL_IMAGES_UPCONV_KEY: novel_image_matrix_upconv, NOVEL_IMAGES_UPCONV_SVD_KEY: novel_image_matrix_upconv_svd } def _plot_novelty_for_many_predictors( novelty_matrix, predictor_names, max_absolute_temp_kelvins, max_absolute_refl_dbz): """Plots novelty for each predictor on 2-D grid with wind barbs overlain. M = number of rows in grid N = number of columns in grid C = number of predictors :param novelty_matrix: M-by-N-by-C numpy array of novelty values. :param predictor_names: length-C list of predictor names. :param max_absolute_temp_kelvins: Max absolute temperature in colour scheme. Minimum temperature in colour scheme will be -1 * `max_absolute_temp_kelvins`, and this will be a diverging scheme centered at zero. :param max_absolute_refl_dbz: Same but for reflectivity. :return: figure_object: See doc for `_init_figure_panels`. :return: axes_objects_2d_list: Same. """ u_wind_matrix_m_s01 = novelty_matrix[ ..., predictor_names.index(U_WIND_NAME)] v_wind_matrix_m_s01 = novelty_matrix[ ..., predictor_names.index(V_WIND_NAME)] non_wind_predictor_names = [ p for p in predictor_names if p not in [U_WIND_NAME, V_WIND_NAME] ] figure_object, axes_objects_2d_list = _init_figure_panels( num_rows=len(non_wind_predictor_names), num_columns=1) for m in range(len(non_wind_predictor_names)): this_predictor_index = predictor_names.index( non_wind_predictor_names[m]) if non_wind_predictor_names[m] == REFLECTIVITY_NAME: this_min_colour_value = -1 * max_absolute_refl_dbz this_max_colour_value = max_absolute_refl_dbz + 0. this_colour_map_object = pyplot.cm.PuOr else: this_min_colour_value = -1 * max_absolute_temp_kelvins this_max_colour_value = max_absolute_temp_kelvins + 0. this_colour_map_object = pyplot.cm.bwr this_colour_bar_object = plot_predictor_2d( predictor_matrix=novelty_matrix[..., this_predictor_index], colour_map_object=this_colour_map_object, colour_norm_object=None, min_colour_value=this_min_colour_value, max_colour_value=this_max_colour_value, axes_object=axes_objects_2d_list[m][0]) plot_wind_2d(u_wind_matrix_m_s01=u_wind_matrix_m_s01, v_wind_matrix_m_s01=v_wind_matrix_m_s01, axes_object=axes_objects_2d_list[m][0]) this_colour_bar_object.set_label(non_wind_predictor_names[m]) return figure_object, axes_objects_2d_list def plot_novelty_detection(image_dict, novelty_dict, test_index): """Plots results of novelty detection. :param image_dict: Dictionary created by `read_many_image_files`, containing input data for novelty detection. :param novelty_dict: Dictionary created by `do_novelty_detection`, containing results. :param test_index: Array index. The [i]th-most novel test example will be plotted, where i = `test_index`. """ predictor_names = image_dict[PREDICTOR_NAMES_KEY] temperature_index = predictor_names.index(TEMPERATURE_NAME) reflectivity_index = predictor_names.index(REFLECTIVITY_NAME) image_matrix_actual = novelty_dict[NOVEL_IMAGES_ACTUAL_KEY][test_index, ...] image_matrix_upconv = novelty_dict[NOVEL_IMAGES_UPCONV_KEY][test_index, ...] image_matrix_upconv_svd = novelty_dict[ NOVEL_IMAGES_UPCONV_SVD_KEY][test_index, ...] combined_matrix_kelvins = numpy.concatenate( (image_matrix_actual[..., temperature_index], image_matrix_upconv[..., temperature_index]), axis=0) min_colour_temp_kelvins = numpy.percentile(combined_matrix_kelvins, 1) max_colour_temp_kelvins = numpy.percentile(combined_matrix_kelvins, 99) this_figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=image_matrix_actual, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) base_title_string = '{0:d}th-most novel example'.format(test_index + 1) this_title_string = '{0:s}: actual'.format(base_title_string) this_figure_object.suptitle(this_title_string) pyplot.show() this_figure_object, _ = plot_many_predictors_with_barbs( predictor_matrix=image_matrix_upconv, predictor_names=predictor_names, min_colour_temp_kelvins=min_colour_temp_kelvins, max_colour_temp_kelvins=max_colour_temp_kelvins) this_title_string = r'{0:s}: upconvnet reconstruction'.format( base_title_string) this_title_string += r' ($\mathbf{X}_{up}$)' this_figure_object.suptitle(this_title_string) pyplot.show() novelty_matrix = image_matrix_upconv - image_matrix_upconv_svd max_absolute_temp_kelvins = numpy.percentile( numpy.absolute(novelty_matrix[..., temperature_index]), 99) max_absolute_refl_dbz = numpy.percentile( numpy.absolute(novelty_matrix[..., reflectivity_index]), 99) this_figure_object, _ = _plot_novelty_for_many_predictors( novelty_matrix=novelty_matrix, predictor_names=predictor_names, max_absolute_temp_kelvins=max_absolute_temp_kelvins, max_absolute_refl_dbz=max_absolute_refl_dbz) this_title_string = r'{0:s}: novelty'.format( base_title_string) this_title_string += r' ($\mathbf{X}_{up} - \mathbf{X}_{up,svd}$)' this_figure_object.suptitle(this_title_string) pyplot.show() def do_novelty_detection_example( validation_image_dict, normalization_dict, cnn_model_object, ucn_model_object): """Runs novelty detection. The baseline images are a random set of 100 from the validation set, and the test images are the 100 storm objects with greatest vorticity in the validation set. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param normalization_dict: Dictionary created by `get_image_normalization_params`. :param cnn_model_object: Trained instance of `keras.models.Model`, representing the CNN or "encoder". :param ucn_model_object: Trained instance of `keras.models.Model`, representing the UCN or "decoder". """ target_matrix_s01 = validation_image_dict[TARGET_MATRIX_KEY] num_examples = target_matrix_s01.shape[0] max_target_by_example_s01 = numpy.array( [numpy.max(target_matrix_s01[i, ...]) for i in range(num_examples)] ) test_indices = numpy.argsort(-1 * max_target_by_example_s01)[:100] test_indices = test_indices[test_indices >= 100] baseline_indices = numpy.linspace(0, 100, num=100, dtype=int) novelty_dict = do_novelty_detection( baseline_image_matrix=validation_image_dict[ PREDICTOR_MATRIX_KEY][baseline_indices, ...], test_image_matrix=validation_image_dict[ PREDICTOR_MATRIX_KEY][test_indices, ...], image_normalization_dict=normalization_dict, predictor_names=validation_image_dict[PREDICTOR_NAMES_KEY], cnn_model_object=cnn_model_object, cnn_feature_layer_name=get_cnn_flatten_layer(cnn_model_object), ucn_model_object=ucn_model_object, num_novel_test_images=4) def plot_novelty_detection_example1(validation_image_dict, novelty_dict): """Plots first-most novel example, selon novelty detection. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param novelty_dict: Dictionary created by `do_novelty_detection`. """ plot_novelty_detection(image_dict=validation_image_dict, novelty_dict=novelty_dict, test_index=0) def plot_novelty_detection_example2(validation_image_dict, novelty_dict): """Plots second-most novel example, selon novelty detection. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param novelty_dict: Dictionary created by `do_novelty_detection`. """ plot_novelty_detection(image_dict=validation_image_dict, novelty_dict=novelty_dict, test_index=1) def plot_novelty_detection_example3(validation_image_dict, novelty_dict): """Plots third-most novel example, selon novelty detection. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param novelty_dict: Dictionary created by `do_novelty_detection`. """ plot_novelty_detection(image_dict=validation_image_dict, novelty_dict=novelty_dict, test_index=2) def plot_novelty_detection_example4(validation_image_dict, novelty_dict): """Plots fourth-most novel example, selon novelty detection. :param validation_image_dict: Dictionary created by `read_many_image_files`. :param novelty_dict: Dictionary created by `do_novelty_detection`. """ plot_novelty_detection(image_dict=validation_image_dict, novelty_dict=novelty_dict, test_index=3)
[ "numpy.sum", "keras.backend.epsilon", "numpy.ones", "keras.models.Model", "numpy.argsort", "numpy.linalg.svd", "numpy.exp", "keras.layers.Input", "netCDF4.Dataset", "keras.layers.Flatten", "numpy.max", "matplotlib.pyplot.rc", "matplotlib.pyplot.subplots", "keras.backend.gradients", "matplotlib.pyplot.show", "keras.backend.learning_phase", "keras.optimizers.Adam", "numpy.percentile", "numpy.min", "keras.layers.Conv2D", "numpy.delete", "time.gmtime", "numpy.unravel_index", "numpy.array", "keras.backend.std", "time.strptime", "pickle.dump", "module_4.attributes_diagrams.plot_attributes_diagram", "module_4.roc_curves.plot_roc_curve", "numpy.linalg.norm", "glob.glob", "keras.layers.Reshape", "numpy.full", "numpy.meshgrid", "matplotlib.pyplot.close", "numpy.transpose", "copy.deepcopy", "numpy.average", "keras.callbacks.ModelCheckpoint", "keras.layers.Dropout", "numpy.mod", "matplotlib.pyplot.subplots_adjust", "numpy.nanargmax", "numpy.expand_dims", "keras.layers.Dense", "module_4.performance_diagrams.plot_performance_diagram", "numpy.diff", "matplotlib.pyplot.xlabel", "keras.models.load_model", "matplotlib.pyplot.title", "numpy.argmax", "numpy.mean", "keras.layers.ZeroPadding2D", "numpy.prod", "matplotlib.pyplot.cm.ScalarMappable", "numpy.cumsum", "numpy.linspace", "keras.layers.MaxPooling2D", "numpy.repeat", "json.dump", "keras.layers.LeakyReLU", "keras.layers.Conv2DTranspose", "keras.layers.UpSampling2D", "numpy.dot", "keras.layers.BatchNormalization", "keras.layers.Activation", "numpy.zeros", "matplotlib.pyplot.savefig", "numpy.absolute", "random.shuffle", "keras.regularizers.l1_l2", "numpy.round", "numpy.std", "matplotlib.pyplot.yticks", "matplotlib.pyplot.colorbar", "numpy.reshape", "numpy.random.choice", "numpy.log2", "numpy.random.permutation", "matplotlib.pyplot.ylabel", "numpy.concatenate", "matplotlib.pyplot.xlim", "json.load", "numpy.logical_and", "sklearn.metrics.auc", "keras.callbacks.EarlyStopping", "keras.backend.mean", "numpy.where", "numpy.sqrt" ]
[((974, 992), 'numpy.full', 'numpy.full', (['(3)', '(0.0)'], {}), '(3, 0.0)\n', (984, 992), False, 'import numpy\n'), ((1080, 1113), 'matplotlib.pyplot.rc', 'pyplot.rc', (['"""font"""'], {'size': 'FONT_SIZE'}), "('font', size=FONT_SIZE)\n", (1089, 1113), True, 'import matplotlib.pyplot as pyplot\n'), ((1114, 1152), 'matplotlib.pyplot.rc', 'pyplot.rc', (['"""axes"""'], {'titlesize': 'FONT_SIZE'}), "('axes', titlesize=FONT_SIZE)\n", (1123, 1152), True, 'import matplotlib.pyplot as pyplot\n'), ((1153, 1191), 'matplotlib.pyplot.rc', 'pyplot.rc', (['"""axes"""'], {'labelsize': 'FONT_SIZE'}), "('axes', labelsize=FONT_SIZE)\n", (1162, 1191), True, 'import matplotlib.pyplot as pyplot\n'), ((1192, 1231), 'matplotlib.pyplot.rc', 'pyplot.rc', (['"""xtick"""'], {'labelsize': 'FONT_SIZE'}), "('xtick', labelsize=FONT_SIZE)\n", (1201, 1231), True, 'import matplotlib.pyplot as pyplot\n'), ((1232, 1271), 'matplotlib.pyplot.rc', 'pyplot.rc', (['"""ytick"""'], {'labelsize': 'FONT_SIZE'}), "('ytick', labelsize=FONT_SIZE)\n", (1241, 1271), True, 'import matplotlib.pyplot as pyplot\n'), ((1272, 1311), 'matplotlib.pyplot.rc', 'pyplot.rc', (['"""legend"""'], {'fontsize': 'FONT_SIZE'}), "('legend', fontsize=FONT_SIZE)\n", (1281, 1311), True, 'import matplotlib.pyplot as pyplot\n'), ((1312, 1352), 'matplotlib.pyplot.rc', 'pyplot.rc', (['"""figure"""'], {'titlesize': 'FONT_SIZE'}), "('figure', titlesize=FONT_SIZE)\n", (1321, 1352), True, 'import matplotlib.pyplot as pyplot\n'), ((4586, 4659), 'numpy.array', 'numpy.array', (['[0.1, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70]'], {}), '([0.1, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70])\n', (4597, 4659), False, 'import numpy\n'), ((902, 943), 'numpy.array', 'numpy.array', (['[166, 206, 227]'], {'dtype': 'float'}), '([166, 206, 227], dtype=float)\n', (913, 943), False, 'import numpy\n'), ((3734, 3760), 'numpy.array', 'numpy.array', (['[4, 233, 231]'], {}), '([4, 233, 231])\n', (3745, 3760), False, 'import numpy\n'), ((3762, 3788), 'numpy.array', 'numpy.array', (['[1, 159, 244]'], {}), '([1, 159, 244])\n', (3773, 3788), False, 'import numpy\n'), ((3794, 3818), 'numpy.array', 'numpy.array', (['[3, 0, 244]'], {}), '([3, 0, 244])\n', (3805, 3818), False, 'import numpy\n'), ((3820, 3844), 'numpy.array', 'numpy.array', (['[2, 253, 2]'], {}), '([2, 253, 2])\n', (3831, 3844), False, 'import numpy\n'), ((3850, 3874), 'numpy.array', 'numpy.array', (['[1, 197, 1]'], {}), '([1, 197, 1])\n', (3861, 3874), False, 'import numpy\n'), ((3876, 3900), 'numpy.array', 'numpy.array', (['[0, 142, 0]'], {}), '([0, 142, 0])\n', (3887, 3900), False, 'import numpy\n'), ((3906, 3932), 'numpy.array', 'numpy.array', (['[253, 248, 2]'], {}), '([253, 248, 2])\n', (3917, 3932), False, 'import numpy\n'), ((3934, 3960), 'numpy.array', 'numpy.array', (['[229, 188, 0]'], {}), '([229, 188, 0])\n', (3945, 3960), False, 'import numpy\n'), ((3966, 3992), 'numpy.array', 'numpy.array', (['[253, 149, 0]'], {}), '([253, 149, 0])\n', (3977, 3992), False, 'import numpy\n'), ((3994, 4018), 'numpy.array', 'numpy.array', (['[253, 0, 0]'], {}), '([253, 0, 0])\n', (4005, 4018), False, 'import numpy\n'), ((4024, 4048), 'numpy.array', 'numpy.array', (['[212, 0, 0]'], {}), '([212, 0, 0])\n', (4035, 4048), False, 'import numpy\n'), ((4050, 4074), 'numpy.array', 'numpy.array', (['[188, 0, 0]'], {}), '([188, 0, 0])\n', (4061, 4074), False, 'import numpy\n'), ((4080, 4106), 'numpy.array', 'numpy.array', (['[248, 0, 253]'], {}), '([248, 0, 253])\n', (4091, 4106), False, 'import numpy\n'), ((4108, 4135), 'numpy.array', 'numpy.array', (['[152, 84, 198]'], {}), '([152, 84, 198])\n', (4119, 4135), False, 'import numpy\n'), ((4354, 4367), 'numpy.ones', 'numpy.ones', (['(3)'], {}), '(3)\n', (4364, 4367), False, 'import numpy\n'), ((8723, 8753), 'glob.glob', 'glob.glob', (['netcdf_file_pattern'], {}), '(netcdf_file_pattern)\n', (8732, 8753), False, 'import glob\n'), ((10128, 10161), 'netCDF4.Dataset', 'netCDF4.Dataset', (['netcdf_file_name'], {}), '(netcdf_file_name)\n', (10143, 10161), False, 'import netCDF4\n'), ((10179, 10252), 'numpy.array', 'numpy.array', (['dataset_object.variables[NETCDF_TRACK_ID_NAME][:]'], {'dtype': 'int'}), '(dataset_object.variables[NETCDF_TRACK_ID_NAME][:], dtype=int)\n', (10190, 10252), False, 'import numpy\n'), ((10280, 10355), 'numpy.array', 'numpy.array', (['dataset_object.variables[NETCDF_TRACK_STEP_NAME][:]'], {'dtype': 'int'}), '(dataset_object.variables[NETCDF_TRACK_STEP_NAME][:], dtype=int)\n', (10291, 10355), False, 'import numpy\n'), ((10914, 10987), 'numpy.array', 'numpy.array', (['dataset_object.variables[NETCDF_TARGET_NAME][:]'], {'dtype': 'float'}), '(dataset_object.variables[NETCDF_TARGET_NAME][:], dtype=float)\n', (10925, 10987), False, 'import numpy\n'), ((14277, 14401), 'matplotlib.pyplot.subplots', 'pyplot.subplots', (['num_rows', 'num_columns'], {'sharex': '(False)', 'sharey': '(False)', 'figsize': '(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)'}), '(num_rows, num_columns, sharex=False, sharey=False, figsize=\n (FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES))\n', (14292, 14401), True, 'import matplotlib.pyplot as pyplot\n'), ((14690, 14829), 'matplotlib.pyplot.subplots_adjust', 'pyplot.subplots_adjust', ([], {'left': '(0.02)', 'bottom': '(0.02)', 'right': '(0.98)', 'top': '(0.95)', 'hspace': 'vertical_space_fraction', 'wspace': 'horizontal_space_fraction'}), '(left=0.02, bottom=0.02, right=0.98, top=0.95, hspace\n =vertical_space_fraction, wspace=horizontal_space_fraction)\n', (14712, 14829), True, 'import matplotlib.pyplot as pyplot\n'), ((16384, 16457), 'matplotlib.pyplot.cm.ScalarMappable', 'pyplot.cm.ScalarMappable', ([], {'cmap': 'colour_map_object', 'norm': 'colour_norm_object'}), '(cmap=colour_map_object, norm=colour_norm_object)\n', (16408, 16457), True, 'import matplotlib.pyplot as pyplot\n'), ((16860, 16995), 'matplotlib.pyplot.colorbar', 'pyplot.colorbar', ([], {'ax': 'axes_object', 'mappable': 'scalar_mappable_object', 'orientation': 'orientation_string', 'pad': 'padding', 'extend': 'extend_string'}), '(ax=axes_object, mappable=scalar_mappable_object,\n orientation=orientation_string, pad=padding, extend=extend_string)\n', (16875, 16995), True, 'import matplotlib.pyplot as pyplot\n'), ((19558, 19632), 'numpy.linspace', 'numpy.linspace', (['(0)', 'num_grid_columns'], {'num': '(num_grid_columns + 1)', 'dtype': 'float'}), '(0, num_grid_columns, num=num_grid_columns + 1, dtype=float)\n', (19572, 19632), False, 'import numpy\n'), ((19784, 19852), 'numpy.linspace', 'numpy.linspace', (['(0)', 'num_grid_rows'], {'num': '(num_grid_rows + 1)', 'dtype': 'float'}), '(0, num_grid_rows, num=num_grid_rows + 1, dtype=float)\n', (19798, 19852), False, 'import numpy\n'), ((20019, 20067), 'numpy.meshgrid', 'numpy.meshgrid', (['x_coords_unique', 'y_coords_unique'], {}), '(x_coords_unique, y_coords_unique)\n', (20033, 20067), False, 'import numpy\n'), ((20146, 20209), 'numpy.sqrt', 'numpy.sqrt', (['(u_wind_matrix_m_s01 ** 2 + v_wind_matrix_m_s01 ** 2)'], {}), '(u_wind_matrix_m_s01 ** 2 + v_wind_matrix_m_s01 ** 2)\n', (20156, 20209), False, 'import numpy\n'), ((26898, 26911), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (26909, 26911), True, 'import matplotlib.pyplot as pyplot\n'), ((27872, 27885), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (27883, 27885), True, 'import matplotlib.pyplot as pyplot\n'), ((28966, 29045), 'numpy.array', 'numpy.array', (['[intermediate_normalization_dict[NUM_VALUES_KEY], new_values.size]'], {}), '([intermediate_normalization_dict[NUM_VALUES_KEY], new_values.size])\n', (28977, 29045), False, 'import numpy\n'), ((29115, 29164), 'numpy.average', 'numpy.average', (['these_means'], {'weights': 'these_weights'}), '(these_means, weights=these_weights)\n', (29128, 29164), False, 'import numpy\n'), ((29372, 29421), 'numpy.average', 'numpy.average', (['these_means'], {'weights': 'these_weights'}), '(these_means, weights=these_weights)\n', (29385, 29421), False, 'import numpy\n'), ((29962, 30105), 'numpy.sqrt', 'numpy.sqrt', (['(multiplier * (intermediate_normalization_dict[MEAN_OF_SQUARES_KEY] - \n intermediate_normalization_dict[MEAN_VALUE_KEY] ** 2))'], {}), '(multiplier * (intermediate_normalization_dict[\n MEAN_OF_SQUARES_KEY] - intermediate_normalization_dict[MEAN_VALUE_KEY] **\n 2))\n', (29972, 30105), False, 'import numpy\n'), ((36284, 36299), 'numpy.array', 'numpy.array', (['[]'], {}), '([])\n', (36295, 36299), False, 'import numpy\n'), ((36929, 36982), 'numpy.percentile', 'numpy.percentile', (['max_target_values', 'percentile_level'], {}), '(max_target_values, percentile_level)\n', (36945, 36982), False, 'import numpy\n'), ((38165, 38204), 'numpy.full', 'numpy.full', (['num_examples', '(-1)'], {'dtype': 'int'}), '(num_examples, -1, dtype=int)\n', (38175, 38204), False, 'import numpy\n'), ((40196, 40270), 'numpy.linspace', 'numpy.linspace', (['(0)', '(num_dense_layers - 1)'], {'num': 'num_dense_layers', 'dtype': 'float'}), '(0, num_dense_layers - 1, num=num_dense_layers, dtype=float)\n', (40210, 40270), False, 'import numpy\n'), ((41017, 41069), 'keras.regularizers.l1_l2', 'keras.regularizers.l1_l2', ([], {'l1': 'L1_WEIGHT', 'l2': 'L2_WEIGHT'}), '(l1=L1_WEIGHT, l2=L2_WEIGHT)\n', (41041, 41069), False, 'import keras\n'), ((41145, 41220), 'keras.layers.Input', 'keras.layers.Input', ([], {'shape': '(num_grid_rows, num_grid_columns, num_predictors)'}), '(shape=(num_grid_rows, num_grid_columns, num_predictors))\n', (41163, 41220), False, 'import keras\n'), ((43209, 43237), 'numpy.prod', 'numpy.prod', (['these_dimensions'], {}), '(these_dimensions)\n', (43219, 43237), False, 'import numpy\n'), ((44995, 45070), 'keras.models.Model', 'keras.models.Model', ([], {'inputs': 'input_layer_object', 'outputs': 'current_layer_object'}), '(inputs=input_layer_object, outputs=current_layer_object)\n', (45013, 45070), False, 'import keras\n'), ((46961, 46994), 'random.shuffle', 'random.shuffle', (['netcdf_file_names'], {}), '(netcdf_file_names)\n', (46975, 46994), False, 'import random\n'), ((52825, 52995), 'keras.callbacks.EarlyStopping', 'keras.callbacks.EarlyStopping', ([], {'monitor': '"""val_loss"""', 'min_delta': 'MIN_XENTROPY_DECREASE_FOR_EARLY_STOP', 'patience': 'NUM_EPOCHS_FOR_EARLY_STOPPING', 'verbose': '(1)', 'mode': '"""min"""'}), "(monitor='val_loss', min_delta=\n MIN_XENTROPY_DECREASE_FOR_EARLY_STOP, patience=\n NUM_EPOCHS_FOR_EARLY_STOPPING, verbose=1, mode='min')\n", (52854, 52995), False, 'import keras\n'), ((54615, 54691), 'keras.models.load_model', 'keras.models.load_model', (['hdf5_file_name'], {'custom_objects': 'METRIC_FUNCTION_DICT'}), '(hdf5_file_name, custom_objects=METRIC_FUNCTION_DICT)\n', (54638, 54691), False, 'import keras\n'), ((56255, 56289), 'copy.deepcopy', 'copy.deepcopy', (['model_metadata_dict'], {}), '(model_metadata_dict)\n', (56268, 56289), False, 'import copy\n'), ((63260, 63367), 'module_4.roc_curves.plot_roc_curve', 'roc_curves.plot_roc_curve', ([], {'observed_labels': 'target_values', 'forecast_probabilities': 'forecast_probabilities'}), '(observed_labels=target_values,\n forecast_probabilities=forecast_probabilities)\n', (63285, 63367), False, 'from module_4 import roc_curves\n'), ((63409, 63462), 'sklearn.metrics.auc', 'scikit_learn_auc', (['pofd_by_threshold', 'pod_by_threshold'], {}), '(pofd_by_threshold, pod_by_threshold)\n', (63425, 63462), True, 'from sklearn.metrics import auc as scikit_learn_auc\n'), ((63548, 63574), 'matplotlib.pyplot.title', 'pyplot.title', (['title_string'], {}), '(title_string)\n', (63560, 63574), True, 'import matplotlib.pyplot as pyplot\n'), ((63579, 63592), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (63590, 63592), True, 'import matplotlib.pyplot as pyplot\n'), ((63795, 63857), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['roc_curve_file_name'], {'dpi': 'FIGURE_RESOLUTION_DPI'}), '(roc_curve_file_name, dpi=FIGURE_RESOLUTION_DPI)\n', (63809, 63857), True, 'import matplotlib.pyplot as pyplot\n'), ((63862, 63876), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (63874, 63876), True, 'import matplotlib.pyplot as pyplot\n'), ((63882, 64009), 'module_4.performance_diagrams.plot_performance_diagram', 'performance_diagrams.plot_performance_diagram', ([], {'observed_labels': 'target_values', 'forecast_probabilities': 'forecast_probabilities'}), '(observed_labels=target_values,\n forecast_probabilities=forecast_probabilities)\n', (63927, 64009), False, 'from module_4 import performance_diagrams\n'), ((64027, 64040), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (64038, 64040), True, 'import matplotlib.pyplot as pyplot\n'), ((64214, 64279), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['perf_diagram_file_name'], {'dpi': 'FIGURE_RESOLUTION_DPI'}), '(perf_diagram_file_name, dpi=FIGURE_RESOLUTION_DPI)\n', (64228, 64279), True, 'import matplotlib.pyplot as pyplot\n'), ((64284, 64298), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (64296, 64298), True, 'import matplotlib.pyplot as pyplot\n'), ((64304, 64442), 'module_4.attributes_diagrams.plot_attributes_diagram', 'attributes_diagrams.plot_attributes_diagram', ([], {'observed_labels': 'target_values', 'forecast_probabilities': 'forecast_probabilities', 'num_bins': '(20)'}), '(observed_labels=target_values,\n forecast_probabilities=forecast_probabilities, num_bins=20)\n', (64347, 64442), False, 'from module_4 import attributes_diagrams\n'), ((64460, 64473), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (64471, 64473), True, 'import matplotlib.pyplot as pyplot\n'), ((64646, 64711), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['attr_diagram_file_name'], {'dpi': 'FIGURE_RESOLUTION_DPI'}), '(attr_diagram_file_name, dpi=FIGURE_RESOLUTION_DPI)\n', (64660, 64711), True, 'import matplotlib.pyplot as pyplot\n'), ((64716, 64730), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (64728, 64730), True, 'import matplotlib.pyplot as pyplot\n'), ((72495, 72532), 'pickle.dump', 'pickle.dump', (['result_dict', 'file_handle'], {}), '(result_dict, file_handle)\n', (72506, 72532), False, 'import pickle\n'), ((73691, 73704), 'matplotlib.pyplot.xlim', 'pyplot.xlim', ([], {}), '()\n', (73702, 73704), True, 'import matplotlib.pyplot as pyplot\n'), ((74574, 74600), 'numpy.argsort', 'numpy.argsort', (['cost_values'], {}), '(cost_values)\n', (74587, 74600), False, 'import numpy\n'), ((75070, 75144), 'matplotlib.pyplot.subplots', 'pyplot.subplots', (['(1)', '(1)'], {'figsize': '(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)'}), '(1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES))\n', (75085, 75144), True, 'import matplotlib.pyplot as pyplot\n'), ((75317, 75338), 'matplotlib.pyplot.yticks', 'pyplot.yticks', (['[]', '[]'], {}), '([], [])\n', (75330, 75338), True, 'import matplotlib.pyplot as pyplot\n'), ((75343, 75378), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""Predictor permuted"""'], {}), "('Predictor permuted')\n", (75356, 75378), True, 'import matplotlib.pyplot as pyplot\n'), ((75609, 75622), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (75620, 75622), True, 'import matplotlib.pyplot as pyplot\n'), ((75745, 75804), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['output_file_name'], {'dpi': 'FIGURE_RESOLUTION_DPI'}), '(output_file_name, dpi=FIGURE_RESOLUTION_DPI)\n', (75759, 75804), True, 'import matplotlib.pyplot as pyplot\n'), ((75809, 75823), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (75821, 75823), True, 'import matplotlib.pyplot as pyplot\n'), ((76553, 76627), 'matplotlib.pyplot.subplots', 'pyplot.subplots', (['(1)', '(1)'], {'figsize': '(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)'}), '(1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES))\n', (76568, 76627), True, 'import matplotlib.pyplot as pyplot\n'), ((76800, 76821), 'matplotlib.pyplot.yticks', 'pyplot.yticks', (['[]', '[]'], {}), '([], [])\n', (76813, 76821), True, 'import matplotlib.pyplot as pyplot\n'), ((76826, 76861), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""Predictor permuted"""'], {}), "('Predictor permuted')\n", (76839, 76861), True, 'import matplotlib.pyplot as pyplot\n'), ((77092, 77105), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (77103, 77105), True, 'import matplotlib.pyplot as pyplot\n'), ((77228, 77287), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (['output_file_name'], {'dpi': 'FIGURE_RESOLUTION_DPI'}), '(output_file_name, dpi=FIGURE_RESOLUTION_DPI)\n', (77242, 77287), True, 'import matplotlib.pyplot as pyplot\n'), ((77292, 77306), 'matplotlib.pyplot.close', 'pyplot.close', ([], {}), '()\n', (77304, 77306), True, 'import matplotlib.pyplot as pyplot\n'), ((80242, 80289), 'keras.backend.gradients', 'K.gradients', (['loss_tensor', 'list_of_input_tensors'], {}), '(loss_tensor, list_of_input_tensors)\n', (80253, 80289), True, 'from keras import backend as K\n'), ((84274, 84327), 'numpy.expand_dims', 'numpy.expand_dims', (['orig_predictor_matrix_norm'], {'axis': '(0)'}), '(orig_predictor_matrix_norm, axis=0)\n', (84291, 84327), False, 'import numpy\n'), ((84822, 84952), 'numpy.concatenate', 'numpy.concatenate', (['(orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[\n ..., temperature_index])'], {'axis': '(0)'}), '((orig_predictor_matrix[..., temperature_index],\n optimized_predictor_matrix[..., temperature_index]), axis=0)\n', (84839, 84952), False, 'import numpy\n'), ((85006, 85055), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(1)'], {}), '(combined_temp_matrix_kelvins, 1)\n', (85022, 85055), False, 'import numpy\n'), ((85086, 85136), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(99)'], {}), '(combined_temp_matrix_kelvins, 99)\n', (85102, 85136), False, 'import numpy\n'), ((85467, 85480), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (85478, 85480), True, 'import matplotlib.pyplot as pyplot\n'), ((85820, 85833), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (85831, 85833), True, 'import matplotlib.pyplot as pyplot\n'), ((86605, 86658), 'numpy.expand_dims', 'numpy.expand_dims', (['orig_predictor_matrix_norm'], {'axis': '(0)'}), '(orig_predictor_matrix_norm, axis=0)\n', (86622, 86658), False, 'import numpy\n'), ((87153, 87283), 'numpy.concatenate', 'numpy.concatenate', (['(orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[\n ..., temperature_index])'], {'axis': '(0)'}), '((orig_predictor_matrix[..., temperature_index],\n optimized_predictor_matrix[..., temperature_index]), axis=0)\n', (87170, 87283), False, 'import numpy\n'), ((87337, 87386), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(1)'], {}), '(combined_temp_matrix_kelvins, 1)\n', (87353, 87386), False, 'import numpy\n'), ((87417, 87467), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(99)'], {}), '(combined_temp_matrix_kelvins, 99)\n', (87433, 87467), False, 'import numpy\n'), ((87798, 87811), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (87809, 87811), True, 'import matplotlib.pyplot as pyplot\n'), ((88151, 88164), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (88162, 88164), True, 'import matplotlib.pyplot as pyplot\n'), ((89139, 89192), 'numpy.expand_dims', 'numpy.expand_dims', (['orig_predictor_matrix_norm'], {'axis': '(0)'}), '(orig_predictor_matrix_norm, axis=0)\n', (89156, 89192), False, 'import numpy\n'), ((89687, 89817), 'numpy.concatenate', 'numpy.concatenate', (['(orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[\n ..., temperature_index])'], {'axis': '(0)'}), '((orig_predictor_matrix[..., temperature_index],\n optimized_predictor_matrix[..., temperature_index]), axis=0)\n', (89704, 89817), False, 'import numpy\n'), ((89871, 89920), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(1)'], {}), '(combined_temp_matrix_kelvins, 1)\n', (89887, 89920), False, 'import numpy\n'), ((89951, 90001), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(99)'], {}), '(combined_temp_matrix_kelvins, 99)\n', (89967, 90001), False, 'import numpy\n'), ((90332, 90345), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (90343, 90345), True, 'import matplotlib.pyplot as pyplot\n'), ((90685, 90698), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (90696, 90698), True, 'import matplotlib.pyplot as pyplot\n'), ((91673, 91726), 'numpy.expand_dims', 'numpy.expand_dims', (['orig_predictor_matrix_norm'], {'axis': '(0)'}), '(orig_predictor_matrix_norm, axis=0)\n', (91690, 91726), False, 'import numpy\n'), ((92221, 92351), 'numpy.concatenate', 'numpy.concatenate', (['(orig_predictor_matrix[..., temperature_index], optimized_predictor_matrix[\n ..., temperature_index])'], {'axis': '(0)'}), '((orig_predictor_matrix[..., temperature_index],\n optimized_predictor_matrix[..., temperature_index]), axis=0)\n', (92238, 92351), False, 'import numpy\n'), ((92405, 92454), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(1)'], {}), '(combined_temp_matrix_kelvins, 1)\n', (92421, 92454), False, 'import numpy\n'), ((92485, 92535), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(99)'], {}), '(combined_temp_matrix_kelvins, 99)\n', (92501, 92535), False, 'import numpy\n'), ((92866, 92879), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (92877, 92879), True, 'import matplotlib.pyplot as pyplot\n'), ((93219, 93232), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (93230, 93232), True, 'import matplotlib.pyplot as pyplot\n'), ((94395, 94442), 'keras.backend.gradients', 'K.gradients', (['loss_tensor', 'list_of_input_tensors'], {}), '(loss_tensor, list_of_input_tensors)\n', (94406, 94442), True, 'from keras import backend as K\n'), ((97472, 97546), 'numpy.linspace', 'numpy.linspace', (['(0)', 'num_grid_columns'], {'num': '(num_grid_columns + 1)', 'dtype': 'float'}), '(0, num_grid_columns, num=num_grid_columns + 1, dtype=float)\n', (97486, 97546), False, 'import numpy\n'), ((97698, 97766), 'numpy.linspace', 'numpy.linspace', (['(0)', 'num_grid_rows'], {'num': '(num_grid_rows + 1)', 'dtype': 'float'}), '(0, num_grid_rows, num=num_grid_rows + 1, dtype=float)\n', (97712, 97766), False, 'import numpy\n'), ((97933, 97981), 'numpy.meshgrid', 'numpy.meshgrid', (['x_coords_unique', 'y_coords_unique'], {}), '(x_coords_unique, y_coords_unique)\n', (97947, 97981), False, 'import numpy\n'), ((98197, 98267), 'numpy.linspace', 'numpy.linspace', (['(0.0)', 'max_absolute_contour_level'], {'num': 'half_num_contours'}), '(0.0, max_absolute_contour_level, num=half_num_contours)\n', (98211, 98267), False, 'import numpy\n'), ((101068, 101116), 'numpy.expand_dims', 'numpy.expand_dims', (['predictor_matrix_norm'], {'axis': '(0)'}), '(predictor_matrix_norm, axis=0)\n', (101085, 101116), False, 'import numpy\n'), ((101386, 101447), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(1)'], {}), '(predictor_matrix[..., temperature_index], 1)\n', (101402, 101447), False, 'import numpy\n'), ((101487, 101549), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(99)'], {}), '(predictor_matrix[..., temperature_index], 99)\n', (101503, 101549), False, 'import numpy\n'), ((102566, 102579), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (102577, 102579), True, 'import matplotlib.pyplot as pyplot\n'), ((103371, 103419), 'numpy.expand_dims', 'numpy.expand_dims', (['predictor_matrix_norm'], {'axis': '(0)'}), '(predictor_matrix_norm, axis=0)\n', (103388, 103419), False, 'import numpy\n'), ((103689, 103750), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(1)'], {}), '(predictor_matrix[..., temperature_index], 1)\n', (103705, 103750), False, 'import numpy\n'), ((103790, 103852), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(99)'], {}), '(predictor_matrix[..., temperature_index], 99)\n', (103806, 103852), False, 'import numpy\n'), ((104869, 104882), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (104880, 104882), True, 'import matplotlib.pyplot as pyplot\n'), ((105877, 105925), 'numpy.expand_dims', 'numpy.expand_dims', (['predictor_matrix_norm'], {'axis': '(0)'}), '(predictor_matrix_norm, axis=0)\n', (105894, 105925), False, 'import numpy\n'), ((106195, 106256), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(1)'], {}), '(predictor_matrix[..., temperature_index], 1)\n', (106211, 106256), False, 'import numpy\n'), ((106296, 106358), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(99)'], {}), '(predictor_matrix[..., temperature_index], 99)\n', (106312, 106358), False, 'import numpy\n'), ((107375, 107388), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (107386, 107388), True, 'import matplotlib.pyplot as pyplot\n'), ((108383, 108431), 'numpy.expand_dims', 'numpy.expand_dims', (['predictor_matrix_norm'], {'axis': '(0)'}), '(predictor_matrix_norm, axis=0)\n', (108400, 108431), False, 'import numpy\n'), ((108701, 108762), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(1)'], {}), '(predictor_matrix[..., temperature_index], 1)\n', (108717, 108762), False, 'import numpy\n'), ((108802, 108864), 'numpy.percentile', 'numpy.percentile', (['predictor_matrix[..., temperature_index]', '(99)'], {}), '(predictor_matrix[..., temperature_index], 99)\n', (108818, 108864), False, 'import numpy\n'), ((109881, 109894), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (109892, 109894), True, 'import matplotlib.pyplot as pyplot\n'), ((110812, 110910), 'numpy.linspace', 'numpy.linspace', (['(-num_half_filter_rows)', 'num_half_filter_rows'], {'num': 'num_filter_rows', 'dtype': 'float'}), '(-num_half_filter_rows, num_half_filter_rows, num=\n num_filter_rows, dtype=float)\n', (110826, 110910), False, 'import numpy\n'), ((110951, 111058), 'numpy.linspace', 'numpy.linspace', (['(-num_half_filter_columns)', 'num_half_filter_columns'], {'num': 'num_filter_columns', 'dtype': 'float'}), '(-num_half_filter_columns, num_half_filter_columns, num=\n num_filter_columns, dtype=float)\n', (110965, 111058), False, 'import numpy\n'), ((111118, 111175), 'numpy.meshgrid', 'numpy.meshgrid', (['column_offsets_unique', 'row_offsets_unique'], {}), '(column_offsets_unique, row_offsets_unique)\n', (111132, 111175), False, 'import numpy\n'), ((111212, 111274), 'numpy.sqrt', 'numpy.sqrt', (['(row_offset_matrix ** 2 + column_offset_matrix ** 2)'], {}), '(row_offset_matrix ** 2 + column_offset_matrix ** 2)\n', (111222, 111274), False, 'import numpy\n'), ((111311, 111380), 'numpy.exp', 'numpy.exp', (['(-pixel_offset_matrix ** 2 / (2 * smoothing_radius_px ** 2))'], {}), '(-pixel_offset_matrix ** 2 / (2 * smoothing_radius_px ** 2))\n', (111320, 111380), False, 'import numpy\n'), ((111495, 111573), 'numpy.zeros', 'numpy.zeros', (['(num_filter_rows, num_filter_columns, num_channels, num_channels)'], {}), '((num_filter_rows, num_filter_columns, num_channels, num_channels))\n', (111506, 111573), False, 'import numpy\n'), ((113554, 113606), 'keras.regularizers.l1_l2', 'keras.regularizers.l1_l2', ([], {'l1': 'L1_WEIGHT', 'l2': 'L2_WEIGHT'}), '(l1=L1_WEIGHT, l2=L2_WEIGHT)\n', (113578, 113606), False, 'import keras\n'), ((113632, 113679), 'keras.layers.Input', 'keras.layers.Input', ([], {'shape': '(num_input_features,)'}), '(shape=(num_input_features,))\n', (113650, 113679), False, 'import keras\n'), ((117709, 117776), 'keras.models.Model', 'keras.models.Model', ([], {'inputs': 'input_layer_object', 'outputs': 'layer_object'}), '(inputs=input_layer_object, outputs=layer_object)\n', (117727, 117776), False, 'import keras\n'), ((118456, 118520), 'numpy.array', 'numpy.array', (["[('flatten' in n) for n in layer_names]"], {'dtype': 'bool'}), "([('flatten' in n) for n in layer_names], dtype=bool)\n", (118467, 118520), False, 'import numpy\n'), ((119273, 119337), 'numpy.array', 'numpy.array', (['cnn_feature_layer_object.input.shape[1:]'], {'dtype': 'int'}), '(cnn_feature_layer_object.input.shape[1:], dtype=int)\n', (119284, 119337), False, 'import numpy\n'), ((119373, 119407), 'numpy.prod', 'numpy.prod', (['cnn_feature_dimensions'], {}), '(cnn_feature_dimensions)\n', (119383, 119407), False, 'import numpy\n'), ((119632, 119674), 'numpy.array', 'numpy.array', (['[2, 1, 1, 2, 1, 1]'], {'dtype': 'int'}), '([2, 1, 1, 2, 1, 1], dtype=int)\n', (119643, 119674), False, 'import numpy\n'), ((121610, 121643), 'random.shuffle', 'random.shuffle', (['netcdf_file_names'], {}), '(netcdf_file_names)\n', (121624, 121643), False, 'import random\n'), ((127355, 127519), 'keras.callbacks.EarlyStopping', 'keras.callbacks.EarlyStopping', ([], {'monitor': '"""val_loss"""', 'min_delta': 'MIN_MSE_DECREASE_FOR_EARLY_STOP', 'patience': 'NUM_EPOCHS_FOR_EARLY_STOPPING', 'verbose': '(1)', 'mode': '"""min"""'}), "(monitor='val_loss', min_delta=\n MIN_MSE_DECREASE_FOR_EARLY_STOP, patience=NUM_EPOCHS_FOR_EARLY_STOPPING,\n verbose=1, mode='min')\n", (127384, 127519), False, 'import keras\n'), ((130523, 130567), 'numpy.expand_dims', 'numpy.expand_dims', (['image_matrix_norm'], {'axis': '(0)'}), '(image_matrix_norm, axis=0)\n', (130540, 130567), False, 'import numpy\n'), ((131179, 131300), 'numpy.concatenate', 'numpy.concatenate', (['(image_matrix[..., temperature_index], reconstructed_image_matrix[...,\n temperature_index])'], {'axis': '(0)'}), '((image_matrix[..., temperature_index],\n reconstructed_image_matrix[..., temperature_index]), axis=0)\n', (131196, 131300), False, 'import numpy\n'), ((131354, 131403), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(1)'], {}), '(combined_temp_matrix_kelvins, 1)\n', (131370, 131403), False, 'import numpy\n'), ((131434, 131484), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(99)'], {}), '(combined_temp_matrix_kelvins, 99)\n', (131450, 131484), False, 'import numpy\n'), ((131798, 131811), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (131809, 131811), True, 'import matplotlib.pyplot as pyplot\n'), ((132151, 132164), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (132162, 132164), True, 'import matplotlib.pyplot as pyplot\n'), ((133307, 133351), 'numpy.expand_dims', 'numpy.expand_dims', (['image_matrix_norm'], {'axis': '(0)'}), '(image_matrix_norm, axis=0)\n', (133324, 133351), False, 'import numpy\n'), ((133963, 134084), 'numpy.concatenate', 'numpy.concatenate', (['(image_matrix[..., temperature_index], reconstructed_image_matrix[...,\n temperature_index])'], {'axis': '(0)'}), '((image_matrix[..., temperature_index],\n reconstructed_image_matrix[..., temperature_index]), axis=0)\n', (133980, 134084), False, 'import numpy\n'), ((134138, 134187), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(1)'], {}), '(combined_temp_matrix_kelvins, 1)\n', (134154, 134187), False, 'import numpy\n'), ((134218, 134268), 'numpy.percentile', 'numpy.percentile', (['combined_temp_matrix_kelvins', '(99)'], {}), '(combined_temp_matrix_kelvins, 99)\n', (134234, 134268), False, 'import numpy\n'), ((134582, 134595), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (134593, 134595), True, 'import matplotlib.pyplot as pyplot\n'), ((134935, 134948), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (134946, 134948), True, 'import matplotlib.pyplot as pyplot\n'), ((135969, 136016), 'numpy.reshape', 'numpy.reshape', (['feature_means', '(1, num_features)'], {}), '(feature_means, (1, num_features))\n', (135982, 136016), False, 'import numpy\n'), ((136035, 136090), 'numpy.repeat', 'numpy.repeat', (['mean_matrix'], {'repeats': 'num_examples', 'axis': '(0)'}), '(mean_matrix, repeats=num_examples, axis=0)\n', (136047, 136090), False, 'import numpy\n'), ((136111, 136172), 'numpy.reshape', 'numpy.reshape', (['feature_standard_deviations', '(1, num_features)'], {}), '(feature_standard_deviations, (1, num_features))\n', (136124, 136172), False, 'import numpy\n'), ((136192, 136248), 'numpy.repeat', 'numpy.repeat', (['stdev_matrix'], {'repeats': 'num_examples', 'axis': '(0)'}), '(stdev_matrix, repeats=num_examples, axis=0)\n', (136204, 136248), False, 'import numpy\n'), ((137967, 138040), 'numpy.concatenate', 'numpy.concatenate', (['(baseline_feature_matrix, test_feature_matrix)'], {'axis': '(0)'}), '((baseline_feature_matrix, test_feature_matrix), axis=0)\n', (137984, 138040), False, 'import numpy\n'), ((138568, 138601), 'numpy.cumsum', 'numpy.cumsum', (['explained_variances'], {}), '(explained_variances)\n', (138580, 138601), False, 'import numpy\n'), ((139861, 139904), 'numpy.dot', 'numpy.dot', (['this_matrix', 'feature_vector_norm'], {}), '(this_matrix, feature_vector_norm)\n', (139870, 139904), False, 'import numpy\n'), ((145914, 145951), 'numpy.array', 'numpy.array', (['novel_indices'], {'dtype': 'int'}), '(novel_indices, dtype=int)\n', (145925, 145951), False, 'import numpy\n'), ((150049, 150170), 'numpy.concatenate', 'numpy.concatenate', (['(image_matrix_actual[..., temperature_index], image_matrix_upconv[...,\n temperature_index])'], {'axis': '(0)'}), '((image_matrix_actual[..., temperature_index],\n image_matrix_upconv[..., temperature_index]), axis=0)\n', (150066, 150170), False, 'import numpy\n'), ((150224, 150268), 'numpy.percentile', 'numpy.percentile', (['combined_matrix_kelvins', '(1)'], {}), '(combined_matrix_kelvins, 1)\n', (150240, 150268), False, 'import numpy\n'), ((150299, 150344), 'numpy.percentile', 'numpy.percentile', (['combined_matrix_kelvins', '(99)'], {}), '(combined_matrix_kelvins, 99)\n', (150315, 150344), False, 'import numpy\n'), ((150798, 150811), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (150809, 150811), True, 'import matplotlib.pyplot as pyplot\n'), ((151274, 151287), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (151285, 151287), True, 'import matplotlib.pyplot as pyplot\n'), ((152043, 152056), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (152054, 152056), True, 'import matplotlib.pyplot as pyplot\n'), ((153196, 153238), 'numpy.linspace', 'numpy.linspace', (['(0)', '(100)'], {'num': '(100)', 'dtype': 'int'}), '(0, 100, num=100, dtype=int)\n', (153210, 153238), False, 'import numpy\n'), ((6913, 6952), 'time.strptime', 'time.strptime', (['time_string', 'time_format'], {}), '(time_string, time_format)\n', (6926, 6952), False, 'import time\n'), ((7350, 7376), 'time.gmtime', 'time.gmtime', (['unix_time_sec'], {}), '(unix_time_sec)\n', (7361, 7376), False, 'import time\n'), ((10482, 10556), 'numpy.array', 'numpy.array', (['dataset_object.variables[this_predictor_name][:]'], {'dtype': 'float'}), '(dataset_object.variables[this_predictor_name][:], dtype=float)\n', (10493, 10556), False, 'import numpy\n'), ((10602, 10651), 'numpy.expand_dims', 'numpy.expand_dims', (['this_predictor_matrix'], {'axis': '(-1)'}), '(this_predictor_matrix, axis=-1)\n', (10619, 10651), False, 'import numpy\n'), ((18028, 18102), 'matplotlib.pyplot.subplots', 'pyplot.subplots', (['(1)', '(1)'], {'figsize': '(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)'}), '(1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES))\n', (18043, 18102), True, 'import matplotlib.pyplot as pyplot\n'), ((19336, 19410), 'matplotlib.pyplot.subplots', 'pyplot.subplots', (['(1)', '(1)'], {'figsize': '(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES)'}), '(1, 1, figsize=(FIGURE_WIDTH_INCHES, FIGURE_HEIGHT_INCHES))\n', (19351, 19410), True, 'import matplotlib.pyplot as pyplot\n'), ((31336, 31372), 'numpy.array', 'numpy.array', (['[this_mean, this_stdev]'], {}), '([this_mean, this_stdev])\n', (31347, 31372), False, 'import numpy\n'), ((36629, 36669), 'numpy.full', 'numpy.full', (['this_num_examples', 'numpy.nan'], {}), '(this_num_examples, numpy.nan)\n', (36639, 36669), False, 'import numpy\n'), ((36822, 36885), 'numpy.concatenate', 'numpy.concatenate', (['(max_target_values, these_max_target_values)'], {}), '((max_target_values, these_max_target_values))\n', (36839, 36885), False, 'import numpy\n'), ((40324, 40377), 'numpy.exp', 'numpy.exp', (['(-1 * dense_layer_indices / e_folding_param)'], {}), '(-1 * dense_layer_indices / e_folding_param)\n', (40333, 40377), False, 'import numpy\n'), ((43266, 43288), 'keras.layers.Flatten', 'keras.layers.Flatten', ([], {}), '()\n', (43286, 43288), False, 'import keras\n'), ((44409, 44574), 'keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': 'None', 'use_bias': '(True)', 'kernel_initializer': '"""glorot_uniform"""', 'bias_initializer': '"""zeros"""', 'kernel_regularizer': 'regularizer_object'}), "(1, activation=None, use_bias=True, kernel_initializer=\n 'glorot_uniform', bias_initializer='zeros', kernel_regularizer=\n regularizer_object)\n", (44427, 44574), False, 'import keras\n'), ((44645, 44679), 'keras.layers.Activation', 'keras.layers.Activation', (['"""sigmoid"""'], {}), "('sigmoid')\n", (44668, 44679), False, 'import keras\n'), ((48347, 48435), 'numpy.linspace', 'numpy.linspace', (['(0)', '(num_examples_in_memory - 1)'], {'num': 'num_examples_in_memory', 'dtype': 'int'}), '(0, num_examples_in_memory - 1, num=num_examples_in_memory,\n dtype=int)\n', (48361, 48435), False, 'import numpy\n'), ((48481, 48559), 'numpy.random.choice', 'numpy.random.choice', (['batch_indices'], {'size': 'num_examples_per_batch', 'replace': '(False)'}), '(batch_indices, size=num_examples_per_batch, replace=False)\n', (48500, 48559), False, 'import numpy\n'), ((51306, 51476), 'keras.callbacks.ModelCheckpoint', 'keras.callbacks.ModelCheckpoint', ([], {'filepath': 'output_model_file_name', 'monitor': '"""loss"""', 'verbose': '(1)', 'save_best_only': '(False)', 'save_weights_only': '(False)', 'mode': '"""min"""', 'period': '(1)'}), "(filepath=output_model_file_name, monitor=\n 'loss', verbose=1, save_best_only=False, save_weights_only=False, mode=\n 'min', period=1)\n", (51337, 51476), False, 'import keras\n'), ((51542, 51714), 'keras.callbacks.ModelCheckpoint', 'keras.callbacks.ModelCheckpoint', ([], {'filepath': 'output_model_file_name', 'monitor': '"""val_loss"""', 'verbose': '(1)', 'save_best_only': '(True)', 'save_weights_only': '(False)', 'mode': '"""min"""', 'period': '(1)'}), "(filepath=output_model_file_name, monitor=\n 'val_loss', verbose=1, save_best_only=True, save_weights_only=False,\n mode='min', period=1)\n", (51573, 51714), False, 'import keras\n'), ((57777, 57816), 'json.dump', 'json.dump', (['new_metadata_dict', 'this_file'], {}), '(new_metadata_dict, this_file)\n', (57786, 57816), False, 'import json\n'), ((58166, 58186), 'json.load', 'json.load', (['this_file'], {}), '(this_file)\n', (58175, 58186), False, 'import json\n'), ((61230, 61338), 'numpy.linspace', 'numpy.linspace', (['this_first_index', 'this_last_index'], {'num': '(this_last_index - this_first_index + 1)', 'dtype': 'int'}), '(this_first_index, this_last_index, num=this_last_index -\n this_first_index + 1, dtype=int)\n', (61244, 61338), False, 'import numpy\n'), ((72116, 72149), 'numpy.array', 'numpy.array', (['highest_cost_by_step'], {}), '(highest_cost_by_step)\n', (72127, 72149), False, 'import numpy\n'), ((72271, 72295), 'numpy.array', 'numpy.array', (['costs_step1'], {}), '(costs_step1)\n', (72282, 72295), False, 'import numpy\n'), ((75418, 75464), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Cost (percentage of original)"""'], {}), "('Cost (percentage of original)')\n", (75431, 75464), True, 'import matplotlib.pyplot as pyplot\n'), ((75483, 75504), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Cost"""'], {}), "('Cost')\n", (75496, 75504), True, 'import matplotlib.pyplot as pyplot\n'), ((76901, 76947), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Cost (percentage of original)"""'], {}), "('Cost (percentage of original)')\n", (76914, 76947), True, 'import matplotlib.pyplot as pyplot\n'), ((76966, 76987), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Cost"""'], {}), "('Cost')\n", (76979, 76987), True, 'import matplotlib.pyplot as pyplot\n'), ((80731, 80771), 'copy.deepcopy', 'copy.deepcopy', (['init_function_or_matrices'], {}), '(init_function_or_matrices)\n', (80744, 80771), False, 'import copy\n'), ((82444, 82469), 'numpy.round', 'numpy.round', (['target_class'], {}), '(target_class)\n', (82455, 82469), False, 'import numpy\n'), ((82496, 82523), 'numpy.round', 'numpy.round', (['num_iterations'], {}), '(num_iterations)\n', (82507, 82523), False, 'import numpy\n'), ((83172, 83244), 'keras.backend.mean', 'K.mean', (['((cnn_model_object.layers[-1].output[..., target_class] - 1) ** 2)'], {}), '((cnn_model_object.layers[-1].output[..., target_class] - 1) ** 2)\n', (83178, 83244), True, 'from keras import backend as K\n'), ((95491, 95516), 'numpy.round', 'numpy.round', (['target_class'], {}), '(target_class)\n', (95502, 95516), False, 'import numpy\n'), ((96073, 96145), 'keras.backend.mean', 'K.mean', (['((cnn_model_object.layers[-1].output[..., target_class] - 1) ** 2)'], {}), '((cnn_model_object.layers[-1].output[..., target_class] - 1) ** 2)\n', (96079, 96145), True, 'from keras import backend as K\n'), ((98063, 98125), 'numpy.round', 'numpy.round', (['(1 + max_absolute_contour_level / contour_interval)'], {}), '(1 + max_absolute_contour_level / contour_interval)\n', (98074, 98125), False, 'import numpy\n'), ((99848, 99907), 'numpy.unravel_index', 'numpy.unravel_index', (['m', '(num_panel_rows, num_panel_columns)'], {}), '(m, (num_panel_rows, num_panel_columns))\n', (99867, 99907), False, 'import numpy\n'), ((101751, 101802), 'numpy.absolute', 'numpy.absolute', (['predictor_matrix[..., wind_indices]'], {}), '(predictor_matrix[..., wind_indices])\n', (101765, 101802), False, 'import numpy\n'), ((102187, 102218), 'numpy.absolute', 'numpy.absolute', (['saliency_matrix'], {}), '(saliency_matrix)\n', (102201, 102218), False, 'import numpy\n'), ((104054, 104105), 'numpy.absolute', 'numpy.absolute', (['predictor_matrix[..., wind_indices]'], {}), '(predictor_matrix[..., wind_indices])\n', (104068, 104105), False, 'import numpy\n'), ((104490, 104521), 'numpy.absolute', 'numpy.absolute', (['saliency_matrix'], {}), '(saliency_matrix)\n', (104504, 104521), False, 'import numpy\n'), ((106560, 106611), 'numpy.absolute', 'numpy.absolute', (['predictor_matrix[..., wind_indices]'], {}), '(predictor_matrix[..., wind_indices])\n', (106574, 106611), False, 'import numpy\n'), ((106996, 107027), 'numpy.absolute', 'numpy.absolute', (['saliency_matrix'], {}), '(saliency_matrix)\n', (107010, 107027), False, 'import numpy\n'), ((109066, 109117), 'numpy.absolute', 'numpy.absolute', (['predictor_matrix[..., wind_indices]'], {}), '(predictor_matrix[..., wind_indices])\n', (109080, 109117), False, 'import numpy\n'), ((109502, 109533), 'numpy.absolute', 'numpy.absolute', (['saliency_matrix'], {}), '(saliency_matrix)\n', (109516, 109533), False, 'import numpy\n'), ((111443, 111473), 'numpy.sum', 'numpy.sum', (['small_weight_matrix'], {}), '(small_weight_matrix)\n', (111452, 111473), False, 'import numpy\n'), ((113711, 113781), 'numpy.round', 'numpy.round', (['(num_input_features / (first_num_rows * first_num_columns))'], {}), '(num_input_features / (first_num_rows * first_num_columns))\n', (113722, 113781), False, 'import numpy\n'), ((113817, 113912), 'keras.layers.Reshape', 'keras.layers.Reshape', ([], {'target_shape': '(first_num_rows, first_num_columns, current_num_filters)'}), '(target_shape=(first_num_rows, first_num_columns,\n current_num_filters))\n', (113837, 113912), False, 'import keras\n'), ((118553, 118582), 'numpy.where', 'numpy.where', (['flattening_flags'], {}), '(flattening_flags)\n', (118564, 118582), False, 'import numpy\n'), ((119531, 119587), 'numpy.array', 'numpy.array', (['cnn_model_object.input.shape[1:]'], {'dtype': 'int'}), '(cnn_model_object.input.shape[1:], dtype=int)\n', (119542, 119587), False, 'import numpy\n'), ((122652, 122740), 'numpy.linspace', 'numpy.linspace', (['(0)', '(num_examples_in_memory - 1)'], {'num': 'num_examples_in_memory', 'dtype': 'int'}), '(0, num_examples_in_memory - 1, num=num_examples_in_memory,\n dtype=int)\n', (122666, 122740), False, 'import numpy\n'), ((122786, 122864), 'numpy.random.choice', 'numpy.random.choice', (['batch_indices'], {'size': 'num_examples_per_batch', 'replace': '(False)'}), '(batch_indices, size=num_examples_per_batch, replace=False)\n', (122805, 122864), False, 'import numpy\n'), ((125771, 125941), 'keras.callbacks.ModelCheckpoint', 'keras.callbacks.ModelCheckpoint', ([], {'filepath': 'output_model_file_name', 'monitor': '"""loss"""', 'verbose': '(1)', 'save_best_only': '(False)', 'save_weights_only': '(False)', 'mode': '"""min"""', 'period': '(1)'}), "(filepath=output_model_file_name, monitor=\n 'loss', verbose=1, save_best_only=False, save_weights_only=False, mode=\n 'min', period=1)\n", (125802, 125941), False, 'import keras\n'), ((126007, 126179), 'keras.callbacks.ModelCheckpoint', 'keras.callbacks.ModelCheckpoint', ([], {'filepath': 'output_model_file_name', 'monitor': '"""val_loss"""', 'verbose': '(1)', 'save_best_only': '(True)', 'save_weights_only': '(False)', 'mode': '"""min"""', 'period': '(1)'}), "(filepath=output_model_file_name, monitor=\n 'val_loss', verbose=1, save_best_only=True, save_weights_only=False,\n mode='min', period=1)\n", (126038, 126179), False, 'import keras\n'), ((135748, 135782), 'numpy.mean', 'numpy.mean', (['feature_matrix'], {'axis': '(0)'}), '(feature_matrix, axis=0)\n', (135758, 135782), False, 'import numpy\n'), ((135821, 135862), 'numpy.std', 'numpy.std', (['feature_matrix'], {'axis': '(0)', 'ddof': '(1)'}), '(feature_matrix, axis=0, ddof=1)\n', (135830, 135862), False, 'import numpy\n'), ((138386, 138427), 'numpy.linalg.svd', 'numpy.linalg.svd', (['baseline_feature_matrix'], {}), '(baseline_feature_matrix)\n', (138402, 138427), False, 'import numpy\n'), ((138508, 138530), 'numpy.sum', 'numpy.sum', (['eigenvalues'], {}), '(eigenvalues)\n', (138517, 138530), False, 'import numpy\n'), ((139625, 139672), 'numpy.transpose', 'numpy.transpose', (['svd_dictionary[EOF_MATRIX_KEY]'], {}), '(svd_dictionary[EOF_MATRIX_KEY])\n', (139640, 139672), False, 'import numpy\n'), ((144443, 144483), 'numpy.full', 'numpy.full', (['num_test_examples', 'numpy.nan'], {}), '(num_test_examples, numpy.nan)\n', (144453, 144483), False, 'import numpy\n'), ((144518, 144566), 'numpy.full', 'numpy.full', (['test_feature_matrix.shape', 'numpy.nan'], {}), '(test_feature_matrix.shape, numpy.nan)\n', (144528, 144566), False, 'import numpy\n'), ((145038, 145065), 'numpy.nanargmax', 'numpy.nanargmax', (['svd_errors'], {}), '(svd_errors)\n', (145053, 145065), False, 'import numpy\n'), ((151414, 151468), 'numpy.absolute', 'numpy.absolute', (['novelty_matrix[..., temperature_index]'], {}), '(novelty_matrix[..., temperature_index])\n', (151428, 151468), False, 'import numpy\n'), ((151528, 151583), 'numpy.absolute', 'numpy.absolute', (['novelty_matrix[..., reflectivity_index]'], {}), '(novelty_matrix[..., reflectivity_index])\n', (151542, 151583), False, 'import numpy\n'), ((153068, 153113), 'numpy.argsort', 'numpy.argsort', (['(-1 * max_target_by_example_s01)'], {}), '(-1 * max_target_by_example_s01)\n', (153081, 153113), False, 'import numpy\n'), ((9058, 9167), 'numpy.logical_and', 'numpy.logical_and', (['(file_times_unix_sec >= first_time_unix_sec)', '(file_times_unix_sec <= last_time_unix_sec)'], {}), '(file_times_unix_sec >= first_time_unix_sec, \n file_times_unix_sec <= last_time_unix_sec)\n', (9075, 9167), False, 'import numpy\n'), ((10806, 10875), 'numpy.concatenate', 'numpy.concatenate', (['(predictor_matrix, this_predictor_matrix)'], {'axis': '(-1)'}), '((predictor_matrix, this_predictor_matrix), axis=-1)\n', (10823, 10875), False, 'import numpy\n'), ((11861, 11891), 'copy.deepcopy', 'copy.deepcopy', (['this_image_dict'], {}), '(this_image_dict)\n', (11874, 11891), False, 'import copy\n'), ((11989, 12065), 'numpy.concatenate', 'numpy.concatenate', (['(image_dict[this_key], this_image_dict[this_key])'], {'axis': '(0)'}), '((image_dict[this_key], this_image_dict[this_key]), axis=0)\n', (12006, 12065), False, 'import numpy\n'), ((19725, 19756), 'numpy.diff', 'numpy.diff', (['x_coords_unique[:2]'], {}), '(x_coords_unique[:2])\n', (19735, 19756), False, 'import numpy\n'), ((19945, 19976), 'numpy.diff', 'numpy.diff', (['y_coords_unique[:2]'], {}), '(y_coords_unique[:2])\n', (19955, 19976), False, 'import numpy\n'), ((24245, 24271), 'numpy.sqrt', 'numpy.sqrt', (['num_predictors'], {}), '(num_predictors)\n', (24255, 24271), False, 'import numpy\n'), ((26757, 26804), 'numpy.percentile', 'numpy.percentile', (['temperature_matrix_kelvins', '(1)'], {}), '(temperature_matrix_kelvins, 1)\n', (26773, 26804), False, 'import numpy\n'), ((26838, 26886), 'numpy.percentile', 'numpy.percentile', (['temperature_matrix_kelvins', '(99)'], {}), '(temperature_matrix_kelvins, 99)\n', (26854, 26886), False, 'import numpy\n'), ((27244, 27275), 'numpy.argmax', 'numpy.argmax', (['target_matrix_s01'], {}), '(target_matrix_s01)\n', (27256, 27275), False, 'import numpy\n'), ((27731, 27778), 'numpy.percentile', 'numpy.percentile', (['temperature_matrix_kelvins', '(1)'], {}), '(temperature_matrix_kelvins, 1)\n', (27747, 27778), False, 'import numpy\n'), ((27812, 27860), 'numpy.percentile', 'numpy.percentile', (['temperature_matrix_kelvins', '(99)'], {}), '(temperature_matrix_kelvins, 99)\n', (27828, 27860), False, 'import numpy\n'), ((28916, 28938), 'numpy.mean', 'numpy.mean', (['new_values'], {}), '(new_values)\n', (28926, 28938), False, 'import numpy\n'), ((29277, 29304), 'numpy.mean', 'numpy.mean', (['(new_values ** 2)'], {}), '(new_values ** 2)\n', (29287, 29304), False, 'import numpy\n'), ((33045, 33081), 'numpy.mean', 'numpy.mean', (['predictor_matrix[..., m]'], {}), '(predictor_matrix[..., m])\n', (33055, 33081), False, 'import numpy\n'), ((33107, 33150), 'numpy.std', 'numpy.std', (['predictor_matrix[..., m]'], {'ddof': '(1)'}), '(predictor_matrix[..., m], ddof=1)\n', (33116, 33150), False, 'import numpy\n'), ((33205, 33241), 'numpy.array', 'numpy.array', (['[this_mean, this_stdev]'], {}), '([this_mean, this_stdev])\n', (33216, 33241), False, 'import numpy\n'), ((36755, 36792), 'numpy.max', 'numpy.max', (['this_target_matrix[i, ...]'], {}), '(this_target_matrix[i, ...])\n', (36764, 36792), False, 'import numpy\n'), ((38281, 38313), 'numpy.max', 'numpy.max', (['target_matrix[i, ...]'], {}), '(target_matrix[i, ...])\n', (38290, 38313), False, 'import numpy\n'), ((38725, 38773), 'numpy.max', 'numpy.max', (['image_dict[TARGET_MATRIX_KEY][i, ...]'], {}), '(image_dict[TARGET_MATRIX_KEY][i, ...])\n', (38734, 38773), False, 'import numpy\n'), ((40413, 40445), 'numpy.round', 'numpy.round', (['num_inputs_by_layer'], {}), '(num_inputs_by_layer)\n', (40424, 40445), False, 'import numpy\n'), ((40547, 40589), 'numpy.array', 'numpy.array', (['[num_output_units]'], {'dtype': 'int'}), '([num_output_units], dtype=int)\n', (40558, 40589), False, 'import numpy\n'), ((42846, 43025), 'keras.layers.MaxPooling2D', 'keras.layers.MaxPooling2D', ([], {'pool_size': '(NUM_POOLING_ROWS, NUM_POOLING_COLUMNS)', 'strides': '(NUM_POOLING_ROWS, NUM_POOLING_COLUMNS)', 'padding': '"""valid"""', 'data_format': '"""channels_last"""'}), "(pool_size=(NUM_POOLING_ROWS, NUM_POOLING_COLUMNS),\n strides=(NUM_POOLING_ROWS, NUM_POOLING_COLUMNS), padding='valid',\n data_format='channels_last')\n", (42871, 43025), False, 'import keras\n'), ((43584, 43776), 'keras.layers.Dense', 'keras.layers.Dense', (['num_outputs_by_dense_layer[k]'], {'activation': 'None', 'use_bias': '(True)', 'kernel_initializer': '"""glorot_uniform"""', 'bias_initializer': '"""zeros"""', 'kernel_regularizer': 'regularizer_object'}), "(num_outputs_by_dense_layer[k], activation=None, use_bias\n =True, kernel_initializer='glorot_uniform', bias_initializer='zeros',\n kernel_regularizer=regularizer_object)\n", (43602, 43776), False, 'import keras\n'), ((43868, 43912), 'keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {'alpha': 'SLOPE_FOR_RELU'}), '(alpha=SLOPE_FOR_RELU)\n', (43890, 43912), False, 'import keras\n'), ((44823, 44878), 'keras.layers.Dropout', 'keras.layers.Dropout', ([], {'rate': 'DENSE_LAYER_DROPOUT_FRACTION'}), '(rate=DENSE_LAYER_DROPOUT_FRACTION)\n', (44843, 44878), False, 'import keras\n'), ((45175, 45198), 'keras.optimizers.Adam', 'keras.optimizers.Adam', ([], {}), '()\n', (45196, 45198), False, 'import keras\n'), ((57255, 57292), 'numpy.array', 'numpy.array', (['this_norm_dict[this_key]'], {}), '(this_norm_dict[this_key])\n', (57266, 57292), False, 'import numpy\n'), ((61736, 61796), 'numpy.concatenate', 'numpy.concatenate', (['(output_array, this_output_array)'], {'axis': '(0)'}), '((output_array, this_output_array), axis=0)\n', (61753, 61796), False, 'import numpy\n'), ((74754, 74799), 'numpy.array', 'numpy.array', (['[result_dict[ORIGINAL_COST_KEY]]'], {}), '([result_dict[ORIGINAL_COST_KEY]])\n', (74765, 74799), False, 'import numpy\n'), ((76178, 76223), 'numpy.array', 'numpy.array', (['[result_dict[ORIGINAL_COST_KEY]]'], {}), '([result_dict[ORIGINAL_COST_KEY]])\n', (76189, 76223), False, 'import numpy\n'), ((80454, 80465), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (80463, 80465), True, 'from keras import backend as K\n'), ((81321, 81338), 'numpy.mod', 'numpy.mod', (['j', '(100)'], {}), '(j, 100)\n', (81330, 81338), False, 'import numpy\n'), ((82872, 82933), 'keras.backend.mean', 'K.mean', (['((cnn_model_object.layers[-1].output[..., 0] - 1) ** 2)'], {}), '((cnn_model_object.layers[-1].output[..., 0] - 1) ** 2)\n', (82878, 82933), True, 'from keras import backend as K\n'), ((83004, 83059), 'keras.backend.mean', 'K.mean', (['(cnn_model_object.layers[-1].output[..., 0] ** 2)'], {}), '(cnn_model_object.layers[-1].output[..., 0] ** 2)\n', (83010, 83059), True, 'from keras import backend as K\n'), ((88685, 88716), 'numpy.argmax', 'numpy.argmax', (['target_matrix_s01'], {}), '(target_matrix_s01)\n', (88697, 88716), False, 'import numpy\n'), ((91219, 91250), 'numpy.argmax', 'numpy.argmax', (['target_matrix_s01'], {}), '(target_matrix_s01)\n', (91231, 91250), False, 'import numpy\n'), ((94596, 94630), 'keras.backend.std', 'K.std', (['list_of_gradient_tensors[i]'], {}), '(list_of_gradient_tensors[i])\n', (94601, 94630), True, 'from keras import backend as K\n'), ((94632, 94643), 'keras.backend.epsilon', 'K.epsilon', ([], {}), '()\n', (94641, 94643), True, 'from keras import backend as K\n'), ((95773, 95834), 'keras.backend.mean', 'K.mean', (['((cnn_model_object.layers[-1].output[..., 0] - 1) ** 2)'], {}), '((cnn_model_object.layers[-1].output[..., 0] - 1) ** 2)\n', (95779, 95834), True, 'from keras import backend as K\n'), ((95905, 95960), 'keras.backend.mean', 'K.mean', (['(cnn_model_object.layers[-1].output[..., 0] ** 2)'], {}), '(cnn_model_object.layers[-1].output[..., 0] ** 2)\n', (95911, 95960), True, 'from keras import backend as K\n'), ((97639, 97670), 'numpy.diff', 'numpy.diff', (['x_coords_unique[:2]'], {}), '(x_coords_unique[:2])\n', (97649, 97670), False, 'import numpy\n'), ((97859, 97890), 'numpy.diff', 'numpy.diff', (['y_coords_unique[:2]'], {}), '(y_coords_unique[:2])\n', (97869, 97890), False, 'import numpy\n'), ((98426, 98457), 'numpy.min', 'numpy.min', (['these_contour_levels'], {}), '(these_contour_levels)\n', (98435, 98457), False, 'import numpy\n'), ((98472, 98503), 'numpy.max', 'numpy.max', (['these_contour_levels'], {}), '(these_contour_levels)\n', (98481, 98503), False, 'import numpy\n'), ((98800, 98831), 'numpy.min', 'numpy.min', (['these_contour_levels'], {}), '(these_contour_levels)\n', (98809, 98831), False, 'import numpy\n'), ((98846, 98877), 'numpy.max', 'numpy.max', (['these_contour_levels'], {}), '(these_contour_levels)\n', (98855, 98877), False, 'import numpy\n'), ((105443, 105474), 'numpy.argmax', 'numpy.argmax', (['target_matrix_s01'], {}), '(target_matrix_s01)\n', (105455, 105474), False, 'import numpy\n'), ((107949, 107980), 'numpy.argmax', 'numpy.argmax', (['target_matrix_s01'], {}), '(target_matrix_s01)\n', (107961, 107980), False, 'import numpy\n'), ((113340, 113388), 'numpy.round', 'numpy.round', (['((NUM_SMOOTHING_FILTER_ROWS - 1) / 2)'], {}), '((NUM_SMOOTHING_FILTER_ROWS - 1) / 2)\n', (113351, 113388), False, 'import numpy\n'), ((113453, 113504), 'numpy.round', 'numpy.round', (['((NUM_SMOOTHING_FILTER_COLUMNS - 1) / 2)'], {}), '((NUM_SMOOTHING_FILTER_COLUMNS - 1) / 2)\n', (113464, 113504), False, 'import numpy\n'), ((116670, 116702), 'numpy.zeros', 'numpy.zeros', (['current_num_filters'], {}), '(current_num_filters)\n', (116681, 116702), False, 'import numpy\n'), ((117872, 117895), 'keras.optimizers.Adam', 'keras.optimizers.Adam', ([], {}), '()\n', (117893, 117895), False, 'import keras\n'), ((132889, 132920), 'numpy.argmax', 'numpy.argmax', (['target_matrix_s01'], {}), '(target_matrix_s01)\n', (132901, 132920), False, 'import numpy\n'), ((138986, 139013), 'numpy.transpose', 'numpy.transpose', (['eof_matrix'], {}), '(eof_matrix)\n', (139001, 139013), False, 'import numpy\n'), ((143839, 143876), 'numpy.array', 'numpy.array', (['novel_indices'], {'dtype': 'int'}), '(novel_indices, dtype=int)\n', (143850, 143876), False, 'import numpy\n'), ((143920, 144024), 'numpy.concatenate', 'numpy.concatenate', (['(baseline_feature_matrix, test_feature_matrix[novel_indices_numpy, ...])'], {'axis': '(0)'}), '((baseline_feature_matrix, test_feature_matrix[\n novel_indices_numpy, ...]), axis=0)\n', (143937, 144024), False, 'import numpy\n'), ((144110, 144176), 'numpy.delete', 'numpy.delete', (['test_feature_matrix'], {'obj': 'novel_indices_numpy', 'axis': '(0)'}), '(test_feature_matrix, obj=novel_indices_numpy, axis=0)\n', (144122, 144176), False, 'import numpy\n'), ((144900, 144985), 'numpy.linalg.norm', 'numpy.linalg.norm', (['(test_feature_matrix_svd[i, ...] - test_feature_matrix[i, ...])'], {}), '(test_feature_matrix_svd[i, ...] - test_feature_matrix[i, ...]\n )\n', (144917, 144985), False, 'import numpy\n'), ((145631, 145710), 'numpy.concatenate', 'numpy.concatenate', (['(novel_image_matrix_upconv, new_image_matrix_upconv)'], {'axis': '(0)'}), '((novel_image_matrix_upconv, new_image_matrix_upconv), axis=0)\n', (145648, 145710), False, 'import numpy\n'), ((145772, 145863), 'numpy.concatenate', 'numpy.concatenate', (['(novel_image_matrix_upconv_svd, new_image_matrix_upconv_svd)'], {'axis': '(0)'}), '((novel_image_matrix_upconv_svd,\n new_image_matrix_upconv_svd), axis=0)\n', (145789, 145863), False, 'import numpy\n'), ((152975, 153011), 'numpy.max', 'numpy.max', (['target_matrix_s01[i, ...]'], {}), '(target_matrix_s01[i, ...])\n', (152984, 153011), False, 'import numpy\n'), ((41801, 42147), 'keras.layers.Conv2D', 'keras.layers.Conv2D', ([], {'filters': 'current_num_filters', 'kernel_size': '(NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS)', 'strides': '(1, 1)', 'padding': '"""valid"""', 'data_format': '"""channels_last"""', 'dilation_rate': '(1, 1)', 'activation': 'None', 'use_bias': '(True)', 'kernel_initializer': '"""glorot_uniform"""', 'bias_initializer': '"""zeros"""', 'kernel_regularizer': 'regularizer_object'}), "(filters=current_num_filters, kernel_size=(\n NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS), strides=(1, 1), padding\n ='valid', data_format='channels_last', dilation_rate=(1, 1), activation\n =None, use_bias=True, kernel_initializer='glorot_uniform',\n bias_initializer='zeros', kernel_regularizer=regularizer_object)\n", (41820, 42147), False, 'import keras\n'), ((42300, 42344), 'keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {'alpha': 'SLOPE_FOR_RELU'}), '(alpha=SLOPE_FOR_RELU)\n', (42322, 42344), False, 'import keras\n'), ((44046, 44101), 'keras.layers.Dropout', 'keras.layers.Dropout', ([], {'rate': 'DENSE_LAYER_DROPOUT_FRACTION'}), '(rate=DENSE_LAYER_DROPOUT_FRACTION)\n', (44066, 44101), False, 'import keras\n'), ((44226, 44291), 'keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {'axis': '(-1)', 'center': '(True)', 'scale': '(True)'}), '(axis=-1, center=True, scale=True)\n', (44257, 44291), False, 'import keras\n'), ((47941, 48035), 'numpy.concatenate', 'numpy.concatenate', (['(full_predictor_matrix, this_image_dict[PREDICTOR_MATRIX_KEY])'], {'axis': '(0)'}), '((full_predictor_matrix, this_image_dict[\n PREDICTOR_MATRIX_KEY]), axis=0)\n', (47958, 48035), False, 'import numpy\n'), ((48131, 48218), 'numpy.concatenate', 'numpy.concatenate', (['(full_target_matrix, this_image_dict[TARGET_MATRIX_KEY])'], {'axis': '(0)'}), '((full_target_matrix, this_image_dict[TARGET_MATRIX_KEY]),\n axis=0)\n', (48148, 48218), False, 'import numpy\n'), ((49111, 49136), 'numpy.mean', 'numpy.mean', (['target_values'], {}), '(target_values)\n', (49121, 49136), False, 'import numpy\n'), ((70499, 70576), 'numpy.random.permutation', 'numpy.random.permutation', (['this_predictor_matrix[i, ..., this_predictor_index]'], {}), '(this_predictor_matrix[i, ..., this_predictor_index])\n', (70523, 70576), False, 'import numpy\n'), ((80399, 80439), 'keras.backend.mean', 'K.mean', (['(list_of_gradient_tensors[i] ** 2)'], {}), '(list_of_gradient_tensors[i] ** 2)\n', (80405, 80439), True, 'from keras import backend as K\n'), ((80557, 80575), 'keras.backend.learning_phase', 'K.learning_phase', ([], {}), '()\n', (80573, 80575), True, 'from keras import backend as K\n'), ((94735, 94753), 'keras.backend.learning_phase', 'K.learning_phase', ([], {}), '()\n', (94751, 94753), True, 'from keras import backend as K\n'), ((114504, 114914), 'keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', ([], {'filters': 'current_num_filters', 'kernel_size': '(NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS)', 'strides': '(this_upsampling_factor, this_upsampling_factor)', 'padding': 'this_padding_arg', 'data_format': '"""channels_last"""', 'dilation_rate': '(1, 1)', 'activation': 'None', 'use_bias': '(True)', 'kernel_initializer': '"""glorot_uniform"""', 'bias_initializer': '"""zeros"""', 'kernel_regularizer': 'regularizer_object'}), "(filters=current_num_filters, kernel_size=(\n NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS), strides=(\n this_upsampling_factor, this_upsampling_factor), padding=\n this_padding_arg, data_format='channels_last', dilation_rate=(1, 1),\n activation=None, use_bias=True, kernel_initializer='glorot_uniform',\n bias_initializer='zeros', kernel_regularizer=regularizer_object)\n", (114532, 114914), False, 'import keras\n'), ((115646, 115991), 'keras.layers.Conv2D', 'keras.layers.Conv2D', ([], {'filters': 'current_num_filters', 'kernel_size': '(NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS)', 'strides': '(1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_last"""', 'dilation_rate': '(1, 1)', 'activation': 'None', 'use_bias': '(True)', 'kernel_initializer': '"""glorot_uniform"""', 'bias_initializer': '"""zeros"""', 'kernel_regularizer': 'regularizer_object'}), "(filters=current_num_filters, kernel_size=(\n NUM_CONV_FILTER_ROWS, NUM_CONV_FILTER_COLUMNS), strides=(1, 1), padding\n ='same', data_format='channels_last', dilation_rate=(1, 1), activation=\n None, use_bias=True, kernel_initializer='glorot_uniform',\n bias_initializer='zeros', kernel_regularizer=regularizer_object)\n", (115665, 115991), False, 'import keras\n'), ((116731, 117154), 'keras.layers.Conv2D', 'keras.layers.Conv2D', ([], {'filters': 'current_num_filters', 'kernel_size': '(NUM_SMOOTHING_FILTER_ROWS, NUM_SMOOTHING_FILTER_COLUMNS)', 'strides': '(1, 1)', 'padding': '"""same"""', 'data_format': '"""channels_last"""', 'dilation_rate': '(1, 1)', 'activation': 'None', 'use_bias': '(True)', 'kernel_initializer': '"""glorot_uniform"""', 'bias_initializer': '"""zeros"""', 'kernel_regularizer': 'regularizer_object', 'trainable': '(False)', 'weights': '[this_weight_matrix, this_bias_vector]'}), "(filters=current_num_filters, kernel_size=(\n NUM_SMOOTHING_FILTER_ROWS, NUM_SMOOTHING_FILTER_COLUMNS), strides=(1, 1\n ), padding='same', data_format='channels_last', dilation_rate=(1, 1),\n activation=None, use_bias=True, kernel_initializer='glorot_uniform',\n bias_initializer='zeros', kernel_regularizer=regularizer_object,\n trainable=False, weights=[this_weight_matrix, this_bias_vector])\n", (116750, 117154), False, 'import keras\n'), ((117398, 117442), 'keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {'alpha': 'SLOPE_FOR_RELU'}), '(alpha=SLOPE_FOR_RELU)\n', (117420, 117442), False, 'import keras\n'), ((117575, 117640), 'keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {'axis': '(-1)', 'center': '(True)', 'scale': '(True)'}), '(axis=-1, center=True, scale=True)\n', (117606, 117640), False, 'import keras\n'), ((122433, 122524), 'numpy.concatenate', 'numpy.concatenate', (['(full_target_matrix, this_image_dict[PREDICTOR_MATRIX_KEY])'], {'axis': '(0)'}), '((full_target_matrix, this_image_dict[PREDICTOR_MATRIX_KEY\n ]), axis=0)\n', (122450, 122524), False, 'import numpy\n'), ((138698, 138773), 'numpy.where', 'numpy.where', (['(cumulative_explained_variances >= fraction_of_variance_to_keep)'], {}), '(cumulative_explained_variances >= fraction_of_variance_to_keep)\n', (138709, 138773), False, 'import numpy\n'), ((42493, 42547), 'keras.layers.Dropout', 'keras.layers.Dropout', ([], {'rate': 'CONV_LAYER_DROPOUT_FRACTION'}), '(rate=CONV_LAYER_DROPOUT_FRACTION)\n', (42513, 42547), False, 'import keras\n'), ((42688, 42753), 'keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {'axis': '(-1)', 'center': '(True)', 'scale': '(True)'}), '(axis=-1, center=True, scale=True)\n', (42719, 42753), False, 'import keras\n'), ((66260, 66294), 'numpy.log2', 'numpy.log2', (['forecast_probabilities'], {}), '(forecast_probabilities)\n', (66270, 66294), False, 'import numpy\n'), ((66327, 66365), 'numpy.log2', 'numpy.log2', (['(1 - forecast_probabilities)'], {}), '(1 - forecast_probabilities)\n', (66337, 66365), False, 'import numpy\n'), ((114259, 114295), 'numpy.round', 'numpy.round', (['(current_num_filters / 2)'], {}), '(current_num_filters / 2)\n', (114270, 114295), False, 'import numpy\n'), ((116173, 116244), 'keras.layers.ZeroPadding2D', 'keras.layers.ZeroPadding2D', ([], {'padding': '(1, 1)', 'data_format': '"""channels_last"""'}), "(padding=(1, 1), data_format='channels_last')\n", (116199, 116244), False, 'import keras\n'), ((115146, 115289), 'keras.layers.UpSampling2D', 'keras.layers.UpSampling2D', ([], {'size': '(this_upsampling_factor, this_upsampling_factor)', 'data_format': '"""channels_last"""', 'interpolation': '"""nearest"""'}), "(size=(this_upsampling_factor,\n this_upsampling_factor), data_format='channels_last', interpolation=\n 'nearest')\n", (115171, 115289), False, 'import keras\n'), ((115424, 115537), 'keras.layers.UpSampling2D', 'keras.layers.UpSampling2D', ([], {'size': '(this_upsampling_factor, this_upsampling_factor)', 'data_format': '"""channels_last"""'}), "(size=(this_upsampling_factor,\n this_upsampling_factor), data_format='channels_last')\n", (115449, 115537), False, 'import keras\n')]
# libraries import numpy as np from bio_embeddings.embed import ProtTransT5BFDEmbedder import pandas as pd embedder = ProtTransT5BFDEmbedder() ds = pd.read_csv('Sequences_Predict.csv') sequences_Example = list(ds["Sequence"]) num_seq = len(sequences_Example) i = 0 length = 1000 while i < num_seq: print("Doing", i, num_seq) start = i end = i + length sequences = sequences_Example[start:end] embeddings = [] for seq in sequences: embeddings.append(np.mean(np.asarray(embedder.embed(seq)), axis=0)) s_no = start / length filename = 'Embeddings/' + 'T5_' + str(s_no) + '.npz' embeddings = np.asarray(embeddings) # print(embeddings.shape) np.savez_compressed(filename, embeddings) i += length
[ "pandas.read_csv", "numpy.asarray", "numpy.savez_compressed", "bio_embeddings.embed.ProtTransT5BFDEmbedder" ]
[((120, 144), 'bio_embeddings.embed.ProtTransT5BFDEmbedder', 'ProtTransT5BFDEmbedder', ([], {}), '()\n', (142, 144), False, 'from bio_embeddings.embed import ProtTransT5BFDEmbedder\n'), ((151, 187), 'pandas.read_csv', 'pd.read_csv', (['"""Sequences_Predict.csv"""'], {}), "('Sequences_Predict.csv')\n", (162, 187), True, 'import pandas as pd\n'), ((608, 630), 'numpy.asarray', 'np.asarray', (['embeddings'], {}), '(embeddings)\n', (618, 630), True, 'import numpy as np\n'), ((659, 700), 'numpy.savez_compressed', 'np.savez_compressed', (['filename', 'embeddings'], {}), '(filename, embeddings)\n', (678, 700), True, 'import numpy as np\n')]
"""Console script for yandex_checkout_payout.""" import argparse import sys def main(): """Console script for yandex_checkout_payout.""" parser = argparse.ArgumentParser() # parser.add_argument('_', nargs='*') parser.version = '1.0' # parser.add_argument('generate') parser.add_argument('-a', action='append', choices=['generate', 'balance']) parser.add_argument('-p', action='store') # parser.add_argument('-a', action='store') # parser.add_argument('-b', action='store_const', const=42) # parser.add_argument('-c', action='store_true') # parser.add_argument('-d', action='store_false') # parser.add_argument('-e', action='append') # parser.add_argument('-f', action='append_const', const=42) # parser.add_argument('-g', action='count') # parser.add_argument('-h', action='help') parser.add_argument('-v', action='version') args = parser.parse_args() print("Arguments: " + str(args.a)) # print("There are several actions that are already defined and ready to be used. Let’s analyze them in detail:\n" # "store - stores the input value to the Namespace object. (This is the default action.)\n" # "store_const - stores a constant value when the corresponding optional arguments are specified.\n" # "store_true - stores the Boolean value True when the corresponding optional argument is specified and " # "stores a False elsewhere.\n" # "store_false - stores the Boolean value False when the corresponding optional argument is specified and " # "stores True elsewhere.\n" # "append - stores a list, appending a value to the list each time the option is provided.\n" # "append_const - stores a list appending a constant value to the list each time the option is provided.\n" # "count - stores an int that is equal to the times the option has been provided.\n" # "help - shows a help text and exits.\n" # "version - shows the version of the program and exits.\n") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
[ "argparse.ArgumentParser" ]
[((156, 181), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (179, 181), False, 'import argparse\n')]
from flask_script import Manager, Shell, Server from app import create_app, logger import os # create the application configuration app = create_app(os.environ.get("FLASK_CONFIG", "default")) PORT = 5000 manager = Manager(app) server = Server(host="127.0.0.1", port=PORT) public_server = Server(host="0.0.0.0", port=PORT) def make_shell_context(): """ Makes a shell context :return dictionary object :rtype: dict """ return dict(app=app) manager.add_command("shell", Shell(make_context=make_shell_context)) manager.add_command("runserver", server) manager.add_command("publicserver", public_server) @manager.command def test(cover=False): """Run the unit tests.""" import coverage cov = coverage.coverage(branch=True, include="app/*") cov.start() if cover and not os.environ.get("FLASK_COVERAGE"): import sys os.environ["FLASK_COVERAGE"] = "1" os.execvp(sys.executable, [sys.executable] + sys.argv) import unittest tests = unittest.TestLoader().discover("tests") unittest.TextTestRunner(verbosity=2).run(tests) if cover: cov.stop() cov.save() logger.debug("Coverage Summary:") cov.report() basedir = os.path.abspath(os.path.dirname(__file__)) covdir = os.path.join(basedir, "coverage") # generate html report cov.html_report(directory=covdir) # generate xml report cov.xml_report() logger.debug(f"HTML version: file://{covdir}/index.html") logger.debug(f"XML version: file://{covdir}") cov.erase() @manager.command def profile(length=25, profile_dir=None): """ This module provides a simple WSGI profiler middleware for finding bottlenecks in web application. It uses the profile or cProfile module to do the profiling and writes the stats to the stream provided see: http://werkzeug.pocoo.org/docs/0.9/contrib/profiler/ """ from werkzeug.contrib.profiler import ProfilerMiddleware app.config["PROFILE"] = True app.wsgi_app = ProfilerMiddleware( app.wsgi_app, restrictions=[length], profile_dir=profile_dir ) app.run() if __name__ == "__main__": manager.run()
[ "app.logger.debug", "os.path.join", "unittest.TextTestRunner", "coverage.coverage", "flask_script.Manager", "os.path.dirname", "os.environ.get", "unittest.TestLoader", "flask_script.Shell", "werkzeug.contrib.profiler.ProfilerMiddleware", "os.execvp", "flask_script.Server" ]
[((216, 228), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (223, 228), False, 'from flask_script import Manager, Shell, Server\n'), ((238, 273), 'flask_script.Server', 'Server', ([], {'host': '"""127.0.0.1"""', 'port': 'PORT'}), "(host='127.0.0.1', port=PORT)\n", (244, 273), False, 'from flask_script import Manager, Shell, Server\n'), ((290, 323), 'flask_script.Server', 'Server', ([], {'host': '"""0.0.0.0"""', 'port': 'PORT'}), "(host='0.0.0.0', port=PORT)\n", (296, 323), False, 'from flask_script import Manager, Shell, Server\n'), ((150, 191), 'os.environ.get', 'os.environ.get', (['"""FLASK_CONFIG"""', '"""default"""'], {}), "('FLASK_CONFIG', 'default')\n", (164, 191), False, 'import os\n'), ((497, 535), 'flask_script.Shell', 'Shell', ([], {'make_context': 'make_shell_context'}), '(make_context=make_shell_context)\n', (502, 535), False, 'from flask_script import Manager, Shell, Server\n'), ((732, 779), 'coverage.coverage', 'coverage.coverage', ([], {'branch': '(True)', 'include': '"""app/*"""'}), "(branch=True, include='app/*')\n", (749, 779), False, 'import coverage\n'), ((2071, 2156), 'werkzeug.contrib.profiler.ProfilerMiddleware', 'ProfilerMiddleware', (['app.wsgi_app'], {'restrictions': '[length]', 'profile_dir': 'profile_dir'}), '(app.wsgi_app, restrictions=[length], profile_dir=profile_dir\n )\n', (2089, 2156), False, 'from werkzeug.contrib.profiler import ProfilerMiddleware\n'), ((923, 977), 'os.execvp', 'os.execvp', (['sys.executable', '([sys.executable] + sys.argv)'], {}), '(sys.executable, [sys.executable] + sys.argv)\n', (932, 977), False, 'import os\n'), ((1164, 1197), 'app.logger.debug', 'logger.debug', (['"""Coverage Summary:"""'], {}), "('Coverage Summary:')\n", (1176, 1197), False, 'from app import create_app, logger\n'), ((1298, 1331), 'os.path.join', 'os.path.join', (['basedir', '"""coverage"""'], {}), "(basedir, 'coverage')\n", (1310, 1331), False, 'import os\n'), ((1471, 1528), 'app.logger.debug', 'logger.debug', (['f"""HTML version: file://{covdir}/index.html"""'], {}), "(f'HTML version: file://{covdir}/index.html')\n", (1483, 1528), False, 'from app import create_app, logger\n'), ((1537, 1582), 'app.logger.debug', 'logger.debug', (['f"""XML version: file://{covdir}"""'], {}), "(f'XML version: file://{covdir}')\n", (1549, 1582), False, 'from app import create_app, logger\n'), ((818, 850), 'os.environ.get', 'os.environ.get', (['"""FLASK_COVERAGE"""'], {}), "('FLASK_COVERAGE')\n", (832, 850), False, 'import os\n'), ((1012, 1033), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (1031, 1033), False, 'import unittest\n'), ((1056, 1092), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (1079, 1092), False, 'import unittest\n'), ((1254, 1279), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1269, 1279), False, 'import os\n')]
import asyncio import string import aioredis async def go(r, key, value): await r.set(key, value) val = await r.get(key) print(f"Got {key} -> {val}") async def main(loop): try: r = await aioredis.create_redis_pool( "redis://localhost", minsize=5, maxsize=10, loop=loop ) return await asyncio.gather( *( go(r, i, j) for i, j in zip(string.ascii_uppercase, string.ascii_lowercase) ), return_exceptions=True, ) finally: r.close() await r.wait_closed() if __name__ == "__main__": loop = asyncio.get_event_loop() res = loop.run_until_complete(main(loop))
[ "asyncio.get_event_loop", "aioredis.create_redis_pool" ]
[((642, 666), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (664, 666), False, 'import asyncio\n'), ((216, 302), 'aioredis.create_redis_pool', 'aioredis.create_redis_pool', (['"""redis://localhost"""'], {'minsize': '(5)', 'maxsize': '(10)', 'loop': 'loop'}), "('redis://localhost', minsize=5, maxsize=10, loop\n =loop)\n", (242, 302), False, 'import aioredis\n')]
# ***************************************************************************** # * Copyright 2019 Amazon.com, Inc. and its affiliates. All Rights Reserved. * # * # Licensed under the Amazon Software License (the "License"). * # You may not use this file except in compliance with the License. * # A copy of the License is located at * # * # http://aws.amazon.com/asl/ * # * # or in the "license" file accompanying this file. This file is distributed * # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * # express or implied. See the License for the specific language governing * # permissions and limitations under the License. * # ***************************************************************************** import glob import os from predict import Predict """ These are sagemaker compatible functions for inference """ def input_fn(request_body, request_content_type): """An input_fn that processes the request body to a tensor""" if request_content_type == 'application/binary': return request_body else: # Handle other content-types here or raise an Exception # if the content type is not supported. raise "Unsupported content type {}".format(request_content_type) def model_fn(model_dir): """ Loads the model from disk and return a model object :param model_dir: The directory in which the model is located :return: Model object """ model_path = list(glob.glob("{}\*.pth".format(os.path.join(model_dir))))[0] num_classes = os.environ.get("NUM_CLASSES", None) model_factory_name = os.environ.get("MODEL_FACTORY_NAME", None) # Validate to make sure that the model and num classes are available as environment variables if num_classes is None or model_factory_name is None: raise Exception( "Please set the environment variable MODEL_FACTORY_NAME to indicate the model factory name {} and NUM_CLASSES to specify the number of classes {}. This setting should match the training values used to train the model".format( model_factory_name, num_classes)) return Predict(model_dict_path=model_path, num_classes=num_classes, model_factory_name=model_factory_name) def predict_fn(input_data, model): """Predict using input and model""" return model(input_data) def output_fn(prediction, content_type): """Return prediction formatted according to the content type""" return prediction
[ "os.environ.get", "os.path.join", "predict.Predict" ]
[((1918, 1953), 'os.environ.get', 'os.environ.get', (['"""NUM_CLASSES"""', 'None'], {}), "('NUM_CLASSES', None)\n", (1932, 1953), False, 'import os\n'), ((1979, 2021), 'os.environ.get', 'os.environ.get', (['"""MODEL_FACTORY_NAME"""', 'None'], {}), "('MODEL_FACTORY_NAME', None)\n", (1993, 2021), False, 'import os\n'), ((2504, 2607), 'predict.Predict', 'Predict', ([], {'model_dict_path': 'model_path', 'num_classes': 'num_classes', 'model_factory_name': 'model_factory_name'}), '(model_dict_path=model_path, num_classes=num_classes,\n model_factory_name=model_factory_name)\n', (2511, 2607), False, 'from predict import Predict\n'), ((1870, 1893), 'os.path.join', 'os.path.join', (['model_dir'], {}), '(model_dir)\n', (1882, 1893), False, 'import os\n')]
#!/usr/bin/env python3 import sys import re import argparse from collections import defaultdict # This script loads frequently used words in a language, looks up their # pronunciations in a CMU dictionary, then prints an example word + # pronunciation for each phoneme. def main(): parser = argparse.ArgumentParser() parser.add_argument("frequent_words", help="Path to text file with frequent words") parser.add_argument("dictionary", help="Path to CMU dictionary") args = parser.parse_args() # Download frequently used words in the given language with open(args.frequent_words, "r") as word_file: words = set([w.strip().upper() for w in word_file.read().splitlines()]) # phoneme -> [(word, pronunciation), ...] examples = defaultdict(list) # Find pronunciations for each frequently used word with open(args.dictionary, "r") as dict_file: for line in dict_file: line = line.strip() if not line: continue parts = re.split(r"[\t ]+", line) word = parts[0] if "(" in word: word = word[: word.index("(")] # Record example words for each phoneme upper_word = word.upper() if upper_word in words: pronunciation = parts[1:] for phoneme in pronunciation: examples[phoneme].append((word, pronunciation)) # Pick unique example words for every phoneme used_words = set() for phoneme in sorted(examples): # Choose the shortest, unused example word for this phoneme. # Exclude words with 3 or fewer letters. for word, pron in sorted(examples[phoneme], key=lambda kv: len(kv[0])): if len(word) > 3 and (word not in used_words): # Output format is: # phoneme word pronunciation print(phoneme, word, " ".join(pron)) used_words.add(word) break # ----------------------------------------------------------------------------- if __name__ == "__main__": main()
[ "collections.defaultdict", "re.split", "argparse.ArgumentParser" ]
[((298, 323), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (321, 323), False, 'import argparse\n'), ((768, 785), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (779, 785), False, 'from collections import defaultdict\n'), ((1027, 1052), 're.split', 're.split', (['"""[\\\\t ]+"""', 'line'], {}), "('[\\\\t ]+', line)\n", (1035, 1052), False, 'import re\n')]
from math import isclose import numpy as np from scipy.spatial.transform.rotation import Rotation from scipy.spatial.distance import cityblock MIN_OVERLAPPING_BEACONS = 12 scanner_positions = [] class Beacon: def __init__(self, x: int, y: int, z: int): self.pos = np.array([x, y, z]) class Scanner: def __init__(self, index: int): self.index = index self.beacons = None self.differences = None def set_beacons(self, beacons: list[Beacon]): self.beacons = beacons self.differences: list[list[float]] = [] for i in range(len(beacons)): row = [] for j in range(i): difference = np.linalg.norm(np.array(beacons[i].pos) - np.array(beacons[j].pos)) row.append(difference) self.differences.append(row) def get_max_overlaps(scanner1: Scanner, scanner2: Scanner): num_matching_differences = 0 indices_1 = set() indices_2 = set() for i1 in range(len(scanner1.beacons)): # check if there are at least 12 euclidean distance matches for j1 in range(i1): for i2 in range(len(scanner2.beacons)): for j2 in range(i2): if isclose(scanner1.differences[i1][j1], scanner2.differences[i2][j2]): indices_1.add(i1) indices_1.add(j1) indices_2.add(i2) indices_2.add(j2) num_matching_differences += 1 return num_matching_differences, indices_1, indices_2 def is_close(one: list[float], other: list[float]): value = True for i in range(len(one)): if not isclose(one[i], other[i]): value = False return value def get_possible_beacon_rotations(beacons: list[Beacon]): rotations = [] for x_degrees in range(0, 360, 90): for y_degrees in range(0, 360, 90): for z_degrees in range(0, 360, 90): rotation = [] for beacon in beacons: rotation.append(rotate_point(beacon.pos, x_degrees, y_degrees, z_degrees)) rotations.append(rotation) return rotations def rotate_point(point: np.array, x_degrees: int, y_degrees: int, z_degrees: int): x_radians = np.radians(x_degrees) rotation_axis = np.array([1, 0, 0]) rotation_vector = x_radians * rotation_axis rotation = Rotation.from_rotvec(rotation_vector) rotated_point = rotation.apply(point) y_radians = np.radians(y_degrees) rotation_axis = np.array([0, 1, 0]) rotation_vector = y_radians * rotation_axis rotation = Rotation.from_rotvec(rotation_vector) rotated_point = rotation.apply(rotated_point) z_radians = np.radians(z_degrees) rotation_axis = np.array([0, 0, 1]) rotation_vector = z_radians * rotation_axis rotation = Rotation.from_rotvec(rotation_vector) rotated_point = rotation.apply(rotated_point) return np.array(rotated_point) def get_position_pair(positions_1, positions_2, diff): for position_1 in positions_1: for position_2 in positions_2: if isclose(np.linalg.norm(position_1 - position_2), diff): return position_1, position_2 def get_new_beacon_positions(unified_scanner, indices_1, new_scanner): global scanner_positions relevant_unified_beacons = \ [beacon for idx, beacon in enumerate(unified_scanner.beacons) if idx in indices_1] possible_rotations = get_possible_beacon_rotations(new_scanner.beacons) for possible_rotation in possible_rotations: diffs = {} for relevant_unified_beacon in relevant_unified_beacons: for beacon_pos in possible_rotation: diff = np.linalg.norm(relevant_unified_beacon.pos - beacon_pos) key_found = False for key in diffs.keys(): if isclose(diff, key): diffs[key] += 1 key_found = True if not key_found: diffs[diff] = 1 for diff in diffs.keys(): matching_diff_found = False if not matching_diff_found and diffs[diff] >= 12: matching_diff_found = True unified_positions = [beacon.pos for beacon in relevant_unified_beacons] position_pair = get_position_pair(unified_positions, possible_rotation, diff) translation_vector = position_pair[0] - position_pair[1] scanner_positions.append(np.array([translation_vector])) for i in range(len(possible_rotation)): possible_rotation[i] = possible_rotation[i] + translation_vector new_beacon_positions = [] for beacon_pos in possible_rotation: already_exists = False for relevant_unified_beacon in relevant_unified_beacons: if is_close(beacon_pos, relevant_unified_beacon.pos): already_exists = True if not already_exists: new_beacon_positions.append(beacon_pos) return new_beacon_positions def main(): global scanner_positions lines = open('input.txt', 'r').readlines() scanners = [] scanner = None beacons = [] same_distance_threshold = (MIN_OVERLAPPING_BEACONS * (MIN_OVERLAPPING_BEACONS - 1)) / 2 for line in lines: if 'scanner' in line: index = int(line.strip()[len('--- scanner '):-len(' ---')]) if scanner is not None: scanner.set_beacons(beacons) scanners.append(scanner) scanner = Scanner(index) beacons = [] else: comma_split = line.strip().split(',') if len(comma_split) == 3: x = int(comma_split[0]) y = int(comma_split[1]) z = int(comma_split[2]) beacon = Beacon(x, y, z) beacons.append(beacon) scanner.set_beacons(beacons) scanners.append(scanner) unified_scanner = scanners[0] scanner_positions.append(np.array([0, 0, 0])) scanners.remove(scanners[0]) while len(scanners) > 0: print(f'{len(scanners)} scanners yet to be unified') step_completed = False for j in range(len(scanners)): if j < len(scanners): if not step_completed: num_overlapping_beacons, indices_1, indices_2 = get_max_overlaps(unified_scanner, scanners[j]) if num_overlapping_beacons >= same_distance_threshold / 2: new_beacon_positions = get_new_beacon_positions(unified_scanner, indices_1, scanners[j]) if new_beacon_positions is not None: unified_beacons = unified_scanner.beacons for pos in new_beacon_positions: unified_beacons.append(Beacon(pos[0], pos[1], pos[2])) unified_scanner.set_beacons(unified_beacons) scanners.remove(scanners[j]) step_completed = True print(f'Unified scanner and scanner at index {j} overlap - added to unified map') max_distance = 0 for i in range(len(scanner_positions)): for j in range(len(scanner_positions)): distance = cityblock(scanner_positions[i], scanner_positions[j]) if distance > max_distance: max_distance = distance print('Max distance between scanners:', round(max_distance)) if __name__ == '__main__': main()
[ "numpy.radians", "scipy.spatial.distance.cityblock", "numpy.array", "math.isclose", "numpy.linalg.norm", "scipy.spatial.transform.rotation.Rotation.from_rotvec" ]
[((2299, 2320), 'numpy.radians', 'np.radians', (['x_degrees'], {}), '(x_degrees)\n', (2309, 2320), True, 'import numpy as np\n'), ((2341, 2360), 'numpy.array', 'np.array', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (2349, 2360), True, 'import numpy as np\n'), ((2424, 2461), 'scipy.spatial.transform.rotation.Rotation.from_rotvec', 'Rotation.from_rotvec', (['rotation_vector'], {}), '(rotation_vector)\n', (2444, 2461), False, 'from scipy.spatial.transform.rotation import Rotation\n'), ((2521, 2542), 'numpy.radians', 'np.radians', (['y_degrees'], {}), '(y_degrees)\n', (2531, 2542), True, 'import numpy as np\n'), ((2563, 2582), 'numpy.array', 'np.array', (['[0, 1, 0]'], {}), '([0, 1, 0])\n', (2571, 2582), True, 'import numpy as np\n'), ((2646, 2683), 'scipy.spatial.transform.rotation.Rotation.from_rotvec', 'Rotation.from_rotvec', (['rotation_vector'], {}), '(rotation_vector)\n', (2666, 2683), False, 'from scipy.spatial.transform.rotation import Rotation\n'), ((2751, 2772), 'numpy.radians', 'np.radians', (['z_degrees'], {}), '(z_degrees)\n', (2761, 2772), True, 'import numpy as np\n'), ((2793, 2812), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (2801, 2812), True, 'import numpy as np\n'), ((2876, 2913), 'scipy.spatial.transform.rotation.Rotation.from_rotvec', 'Rotation.from_rotvec', (['rotation_vector'], {}), '(rotation_vector)\n', (2896, 2913), False, 'from scipy.spatial.transform.rotation import Rotation\n'), ((2976, 2999), 'numpy.array', 'np.array', (['rotated_point'], {}), '(rotated_point)\n', (2984, 2999), True, 'import numpy as np\n'), ((281, 300), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (289, 300), True, 'import numpy as np\n'), ((6195, 6214), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (6203, 6214), True, 'import numpy as np\n'), ((1689, 1714), 'math.isclose', 'isclose', (['one[i]', 'other[i]'], {}), '(one[i], other[i])\n', (1696, 1714), False, 'from math import isclose\n'), ((7494, 7547), 'scipy.spatial.distance.cityblock', 'cityblock', (['scanner_positions[i]', 'scanner_positions[j]'], {}), '(scanner_positions[i], scanner_positions[j])\n', (7503, 7547), False, 'from scipy.spatial.distance import cityblock\n'), ((3154, 3193), 'numpy.linalg.norm', 'np.linalg.norm', (['(position_1 - position_2)'], {}), '(position_1 - position_2)\n', (3168, 3193), True, 'import numpy as np\n'), ((3755, 3811), 'numpy.linalg.norm', 'np.linalg.norm', (['(relevant_unified_beacon.pos - beacon_pos)'], {}), '(relevant_unified_beacon.pos - beacon_pos)\n', (3769, 3811), True, 'import numpy as np\n'), ((1224, 1291), 'math.isclose', 'isclose', (['scanner1.differences[i1][j1]', 'scanner2.differences[i2][j2]'], {}), '(scanner1.differences[i1][j1], scanner2.differences[i2][j2])\n', (1231, 1291), False, 'from math import isclose\n'), ((3910, 3928), 'math.isclose', 'isclose', (['diff', 'key'], {}), '(diff, key)\n', (3917, 3928), False, 'from math import isclose\n'), ((4556, 4586), 'numpy.array', 'np.array', (['[translation_vector]'], {}), '([translation_vector])\n', (4564, 4586), True, 'import numpy as np\n'), ((706, 730), 'numpy.array', 'np.array', (['beacons[i].pos'], {}), '(beacons[i].pos)\n', (714, 730), True, 'import numpy as np\n'), ((733, 757), 'numpy.array', 'np.array', (['beacons[j].pos'], {}), '(beacons[j].pos)\n', (741, 757), True, 'import numpy as np\n')]
""" Molecular function callers. A harness for calling functions defined over Molecules. Makes use of the mols/mol_functions.py """ from argparse import Namespace from copy import deepcopy import numpy as np from time import sleep from dragonfly.exd.experiment_caller import CPFunctionCaller from dragonfly.exd.exd_core import EVAL_ERROR_CODE from dragonfly.utils.reporters import get_reporter from dragonfly.exd.domains import CartesianProductDomain # Local imports from mols.mol_domains import MolDomain def get_cp_func_caller_args(domain_config): index_ordering = [0] kernel_ordering = [""] # "" or None for not having a kernel associated with func caller, will use `chemist_args` name_ordering = ["molecule"] dim_ordering = [1] raw_name_ordering = ["molecule"] orderings = Namespace(index_ordering=index_ordering, kernel_ordering=kernel_ordering, dim_ordering=dim_ordering, name_ordering=name_ordering, raw_name_ordering=raw_name_ordering) list_of_domains = [MolDomain(**domain_config)] # Create a namespace with additional information domain_info = Namespace() domain_info.config_orderings = orderings # domain_info.constraints = None # Create a cartesian product domain cp_domain = CartesianProductDomain(list_of_domains, domain_info) fidel_space, fidel_to_opt, fidel_space_orderings = None, None, None ret = { 'domain': cp_domain, 'domain_orderings': orderings, 'fidel_space': fidel_space, 'fidel_to_opt': fidel_to_opt, 'fidel_space_orderings': fidel_space_orderings } return ret class MolFunctionCaller(CPFunctionCaller): """ Function Caller for Mol evaluations. """ def __init__(self, objective, domain_config, descr='', reporter='silent'): constructor_args = get_cp_func_caller_args(domain_config) super(MolFunctionCaller, self).__init__(objective, descr=descr, **constructor_args) self.reporter = get_reporter(reporter) @classmethod def is_mf(cls): """ Returns True if Multi-fidelity. """ return False if __name__ == "__main__": from mols.molecule import Molecule mol = Molecule("C=C1NC(N(C)C)=NC12CCN(CC(C)c1ccccc1)CC2") print("---") domain_config = {'data_source': 'chembl', 'constraint_checker': 'organic', 'sampling_seed': 42} cp_domain = get_cp_func_caller_args(domain_config)['domain'] print(cp_domain.is_a_member(mol)) print(cp_domain.is_a_member([mol])) print(cp_domain.is_a_member([None]))
[ "argparse.Namespace", "mols.mol_domains.MolDomain", "mols.molecule.Molecule", "dragonfly.utils.reporters.get_reporter", "dragonfly.exd.domains.CartesianProductDomain" ]
[((808, 982), 'argparse.Namespace', 'Namespace', ([], {'index_ordering': 'index_ordering', 'kernel_ordering': 'kernel_ordering', 'dim_ordering': 'dim_ordering', 'name_ordering': 'name_ordering', 'raw_name_ordering': 'raw_name_ordering'}), '(index_ordering=index_ordering, kernel_ordering=kernel_ordering,\n dim_ordering=dim_ordering, name_ordering=name_ordering,\n raw_name_ordering=raw_name_ordering)\n', (817, 982), False, 'from argparse import Namespace\n'), ((1202, 1213), 'argparse.Namespace', 'Namespace', ([], {}), '()\n', (1211, 1213), False, 'from argparse import Namespace\n'), ((1353, 1405), 'dragonfly.exd.domains.CartesianProductDomain', 'CartesianProductDomain', (['list_of_domains', 'domain_info'], {}), '(list_of_domains, domain_info)\n', (1375, 1405), False, 'from dragonfly.exd.domains import CartesianProductDomain\n'), ((2273, 2324), 'mols.molecule.Molecule', 'Molecule', (['"""C=C1NC(N(C)C)=NC12CCN(CC(C)c1ccccc1)CC2"""'], {}), "('C=C1NC(N(C)C)=NC12CCN(CC(C)c1ccccc1)CC2')\n", (2281, 2324), False, 'from mols.molecule import Molecule\n'), ((1102, 1128), 'mols.mol_domains.MolDomain', 'MolDomain', ([], {}), '(**domain_config)\n', (1111, 1128), False, 'from mols.mol_domains import MolDomain\n'), ((2065, 2087), 'dragonfly.utils.reporters.get_reporter', 'get_reporter', (['reporter'], {}), '(reporter)\n', (2077, 2087), False, 'from dragonfly.utils.reporters import get_reporter\n')]
# This file is part of the Reproducible Open Benchmarks for Data Analysis # Platform (ROB). # # Copyright (C) 2019-2021 NYU. # # ROB is free software; you can redistribute it and/or modify it under the # terms of the MIT License; see LICENSE file for more details. """Helper methods to access configuration parameters. Following the Twelve-Factor App methodology all configuration parameters are maintained in environment variables. The name of methods that provide access to values from environment variables are in upper case to emphasize that they access configuration values that are expected to remain constant throughout the lifespan of a running application. """ import os from flowserv.config import FLOWSERV_BASEDIR, FLOWSERV_API_PATH """Environment variables that contain configuration parameters for the Web API. """ # Path to the optional build files for the ROB user-interface to be served # by the Flask app. ROB_UI_PATH = 'ROB_UI_PATH' # Directory path for API logs ROB_WEBAPI_LOG = 'ROB_WEBAPI_LOG' # Maximum size of uploaded files (in bytes) ROB_WEBAPI_CONTENTLENGTH = 'ROB_WEBAPI_CONTENTLENGTH' # -- Helper methods to access configutation parameters ------------------------ def API_PATH() -> str: """Get the API application path value from the service configuration. Returns ------- string """ from robflask.service import service return service.get(FLOWSERV_API_PATH) def LOG_DIR() -> str: """Get the logging directory for the Web API from the respective environment variable 'ROB_WEBAPI_LOG'. If the variable is not set a sub-folder 'log' in the API base directory use used as the default. Returns ------- string """ log_dir = os.environ.get(ROB_WEBAPI_LOG) # If the variable is not set create a sub-folder in the API base directory if log_dir is None: from robflask.service import service log_dir = os.path.join(service.get(FLOWSERV_BASEDIR), 'log') return os.path.abspath(log_dir) def MAX_CONTENT_LENGTH() -> str: """Get the maximum size for uploaded files from the respective environment variable 'ROB_WEBAPI_CONTENTLENGTH'. If the variable is not set the default value that is equal to 16MB is used. Returns ------- string Raises ------ ValueError """ value = os.environ.get(ROB_WEBAPI_CONTENTLENGTH) # If the variable is not set use a default of 16MB return 16 * 1024 * 1024 if value is None else int(value)
[ "os.environ.get", "os.path.abspath", "robflask.service.service.get" ]
[((1395, 1425), 'robflask.service.service.get', 'service.get', (['FLOWSERV_API_PATH'], {}), '(FLOWSERV_API_PATH)\n', (1406, 1425), False, 'from robflask.service import service\n'), ((1721, 1751), 'os.environ.get', 'os.environ.get', (['ROB_WEBAPI_LOG'], {}), '(ROB_WEBAPI_LOG)\n', (1735, 1751), False, 'import os\n'), ((1980, 2004), 'os.path.abspath', 'os.path.abspath', (['log_dir'], {}), '(log_dir)\n', (1995, 2004), False, 'import os\n'), ((2334, 2374), 'os.environ.get', 'os.environ.get', (['ROB_WEBAPI_CONTENTLENGTH'], {}), '(ROB_WEBAPI_CONTENTLENGTH)\n', (2348, 2374), False, 'import os\n'), ((1931, 1960), 'robflask.service.service.get', 'service.get', (['FLOWSERV_BASEDIR'], {}), '(FLOWSERV_BASEDIR)\n', (1942, 1960), False, 'from robflask.service import service\n')]
from jinja2.exceptions import TemplateNotFound from flask import request, render_template from flaskcbv.response import Response from flaskcbv.view import View, TemplateIsAjaxView from flaskcbv.view.mixins import JSONMixin, getArgumentMixin from flaskcbv.conf import settings class DefaultContextVars(object): def get_context_data(self, *args, **kwargs): context_ = {} context_['STATIC_URL'] = settings.STATIC_URL try: context_['AUTH_SESSION'] = self.session_id except: context_['AUTH_SESSION'] = None context_['REQUEST'] = self.request context = super(DefaultContextVars, self).get_context_data(*args, **kwargs) context_.update(context) return context_ class myTemplateView(DefaultContextVars, getArgumentMixin, TemplateIsAjaxView): def render_template(self, *args, **kwargs): try: return super(TemplateIsAjaxView, self).render_template(*args, **kwargs) except TemplateNotFound: context = self.get_context_data(*args, **kwargs) context['ajax_content_tpl'] = self.get_template_name(is_ajax=True) return render_template('misc/static.tpl', **context) class JSONView(getArgumentMixin, JSONMixin, View): def get_json_indent(self): return self.__json_indent def dispatch(self, request, *args, **kwargs): try: self.__json_indent = int(request.args['json_indent']) except: self.__json_indent = None return Response(self.get_as_json())
[ "flask.render_template" ]
[((1147, 1192), 'flask.render_template', 'render_template', (['"""misc/static.tpl"""'], {}), "('misc/static.tpl', **context)\n", (1162, 1192), False, 'from flask import request, render_template\n')]
import os import sys FILE_DIR = os.path.dirname(os.path.abspath(__file__)) # avoid annoying import errors... sys.path.append(FILE_DIR) from p4z3.base import * from p4z3.expressions import * from p4z3.statements import * from p4z3.parser import * from p4z3.callables import *
[ "sys.path.append", "os.path.abspath" ]
[((110, 135), 'sys.path.append', 'sys.path.append', (['FILE_DIR'], {}), '(FILE_DIR)\n', (125, 135), False, 'import sys\n'), ((48, 73), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (63, 73), False, 'import os\n')]
import pytest import asyncio import collections import contextlib import random import hat.event.common import hat.event.server.common import hat.event.client import hat.event.server.main import hat.gateway.engine from hat.util import aio from test_unit.test_gateway import mock_device @pytest.fixture def event_server_port(unused_tcp_port_factory): return unused_tcp_port_factory() @pytest.fixture async def event_server(event_server_port): conf = { 'backend_engine': { 'server_id': 1, 'backend': { 'module': 'test_unit.test_event.backends.memory_backend'}}, 'module_engine': {'modules': []}, 'communication': { 'address': f'tcp+sbs://127.0.0.1:{event_server_port}'}} async with aio.Group() as group: group.spawn(hat.event.server.main.run, conf, None) await asyncio.sleep(0.01) # Wait for event server to start yield @pytest.fixture async def engine_factory(event_server, event_server_port): @contextlib.asynccontextmanager async def f(gateway_name, devices=[]): event_server_address = f'tcp+sbs://127.0.0.1:{event_server_port}' conf = {'gateway_name': gateway_name, 'devices': devices} client = await hat.event.client.connect( event_server_address, subscriptions=[ ['gateway', gateway_name, '?', '?', 'system', '*']]) engine = await hat.gateway.engine.create_engine(conf, client) yield engine await engine.async_close() await client.async_close() return f @pytest.fixture def device_queue(monkeypatch): async def create_wrapper(*args, **kwargs): device = await create(*args, **kwargs) queue.put_nowait(device) return device queue = aio.Queue() create = mock_device.create monkeypatch.setattr(mock_device, 'create', create_wrapper) yield queue async def set_enable(client, gateway_name, device_type, device_name, enable): await client.register_with_response([hat.event.common.RegisterEvent( event_type=['gateway', gateway_name, device_type, device_name, 'system', 'enable'], source_timestamp=None, payload=hat.event.common.EventPayload( type=hat.event.common.EventPayloadType.JSON, data=enable))]) @pytest.mark.parametrize("device_count", [1, 2, 10]) @pytest.mark.asyncio async def test_create_device(event_server, event_server_port, engine_factory, device_queue, device_count): event_server_address = f'tcp+sbs://127.0.0.1:{event_server_port}' client = await hat.event.client.connect(event_server_address) device_confs = [{'module': 'test_unit.test_gateway.mock_device', 'name': f'mock {i}'} for i in range(device_count)] devices = collections.deque() async with engine_factory('gateway 0', device_confs): for i in device_confs: await set_enable(client, 'gateway 0', mock_device.device_type, i['name'], True) device = await device_queue.get() assert not device.closed.done() devices.append(device) for i in device_confs: device = devices.popleft() assert not device.closed.done() await set_enable(client, 'gateway 0', mock_device.device_type, i['name'], False) await device.closed assert device.closed.done() @pytest.mark.asyncio async def test_device_enable(event_server, event_server_port, engine_factory, device_queue): event_server_address = f'tcp+sbs://127.0.0.1:{event_server_port}' device_type = mock_device.device_type device_name = 'mock 0' client = await hat.event.client.connect( event_server_address, subscriptions=[ ['gateway', 'gateway 0', device_type, device_name, 'gateway', 'running']]) device_confs = [{'module': 'test_unit.test_gateway.mock_device', 'name': device_name}] async with engine_factory('gateway 0', device_confs): assert device_queue.empty() running_event = (await client.receive())[0] assert running_event.payload.data is False await set_enable(client, 'gateway 0', device_type, device_name, True) device = await device_queue.get() assert not device.closed.done() running_event = (await client.receive())[0] assert running_event.payload.data is True await set_enable(client, 'gateway 0', device_type, device_name, False) await device.closed assert device.closed.done() running_event = (await client.receive())[0] assert running_event.payload.data is False await set_enable(client, 'gateway 0', device_type, device_name, True) device = await device_queue.get() assert not device.closed.done() running_event = (await client.receive())[0] assert running_event.payload.data is True await device.closed assert device.closed.done() running_event = (await client.receive())[0] assert running_event.payload.data is False async with engine_factory('gateway 0', device_confs): running_event = (await client.receive())[0] assert running_event.payload.data is False device = await device_queue.get() assert not device.closed.done() running_event = (await client.receive())[0] assert running_event.payload.data is True await client.async_close() @pytest.mark.parametrize("device_count", [1, 2, 10]) @pytest.mark.asyncio async def test_device_client(event_server, event_server_port, engine_factory, device_queue, device_count): event_server_address = f'tcp+sbs://127.0.0.1:{event_server_port}' device_type = mock_device.device_type prefix = ['gateway', 'gateway 0', device_type] client = await hat.event.client.connect( event_server_address, subscriptions=[ [*prefix, '?', 'gateway', 'test']]) device_confs = [{'module': 'test_unit.test_gateway.mock_device', 'name': f'mock {i}'} for i in range(device_count)] devices = {} async with engine_factory('gateway 0', device_confs): for i in device_confs: await set_enable(client, 'gateway 0', device_type, i['name'], True) device = await device_queue.get() assert device.event_type_prefix == [*prefix, i['name'], 'gateway'] devices[i['name']] = device # Test receive # Predictable shuffle just to make it out-of-order # Seed chosen by fair dice roll shuffled_names = random.Random(4).sample(list(devices), k=len(devices)) reg_events = [hat.event.common.RegisterEvent( event_type=[*prefix, name, 'system', 'test'], source_timestamp=None, payload=None ) for name in shuffled_names] events = await client.register_with_response(reg_events) for name, event in zip(shuffled_names, events): received = await devices[name].client.receive() assert len(received) == 1 assert received[0] == event # Test query for device in devices.values(): result = await device.client.query(hat.event.common.QueryData( event_types=[[*prefix, '?', 'system', 'test']])) assert ({tuple(i.event_type): i for i in events} == {tuple(i.event_type): i for i in result}) # Test register with response for name, device in devices.items(): event = await device.client.register_with_response([ hat.event.common.RegisterEvent( event_type=[*prefix, name, 'gateway', 'test'], source_timestamp=hat.event.common.now(), payload=hat.event.common.EventPayload( type=hat.event.common.EventPayloadType.JSON, data=f'{name} test'))]) assert await client.receive() == event # Test register for name, device in devices.items(): reg_event = hat.event.common.RegisterEvent( event_type=[*prefix, name, 'gateway', 'test'], source_timestamp=None, payload=None) device.client.register([reg_event]) received = await client.receive() assert len(received) == 1 event = received[0] assert (reg_event.event_type == event.event_type and reg_event.source_timestamp == event.source_timestamp and reg_event.payload == event.payload) @pytest.mark.parametrize("device_count", [1, 2, 10]) @pytest.mark.asyncio async def test_device_close(event_server, event_server_port, engine_factory, device_queue, device_count): event_server_address = f'tcp+sbs://127.0.0.1:{event_server_port}' device_type = mock_device.device_type client = await hat.event.client.connect(event_server_address) device_confs = [{'module': 'test_unit.test_gateway.mock_device', 'name': f'mock {i}'} for i in range(device_count)] devices = [] async with engine_factory('gateway 0', device_confs) as engine: for i in device_confs: await set_enable(client, 'gateway 0', device_type, i['name'], True) devices.append(await device_queue.get()) # Close "random" device await devices[random.Random(4).randrange(len(devices))].async_close() await asyncio.gather(*(i.closed for i in devices)) await engine.closed assert engine.closed.done() @pytest.mark.parametrize("register_event", [ hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'mock', 'mock 0', 'system', 'enable'], source_timestamp=None, payload=None), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'mock', 'mock 0', 'system', 'enable'], source_timestamp=None, payload=hat.event.common.EventPayload( type=hat.event.common.EventPayloadType.BINARY, data=b'test')), hat.event.common.RegisterEvent( event_type=['gateway'], source_timestamp=None, payload=None), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0'], source_timestamp=None, payload=None), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'mock'], source_timestamp=None, payload=None), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'mock', 'mock 0'], source_timestamp=None, payload=None), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'mock', 'mock 0', 'system', 'enable', 'too long'], source_timestamp=None, payload=hat.event.common.EventPayload( type=hat.event.common.EventPayloadType.JSON, data=False)), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'spock', 'mock 0', 'system', 'enable'], source_timestamp=None, payload=hat.event.common.EventPayload( type=hat.event.common.EventPayloadType.JSON, data=False)), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'mock', 'spock 0', 'system', 'enable'], source_timestamp=None, payload=hat.event.common.EventPayload( type=hat.event.common.EventPayloadType.JSON, data=False)), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'spock', 'mock 0', 'system', 'test'], source_timestamp=None, payload=None), hat.event.common.RegisterEvent( event_type=['gateway', 'gateway 0', 'mock', 'spock 0', 'system', 'test'], source_timestamp=None, payload=None)]) @pytest.mark.asyncio async def test_malformed_event(event_server, event_server_port, engine_factory, device_queue, register_event): event_server_address = f'tcp+sbs://127.0.0.1:{event_server_port}' device_type = mock_device.device_type client = await hat.event.client.connect(event_server_address) device_confs = [{'module': 'test_unit.test_gateway.mock_device', 'name': 'mock 0'}] async with engine_factory('gateway 0', device_confs) as engine: await set_enable(client, 'gateway 0', device_type, 'mock 0', True) device = await device_queue.get() await client.register_with_response([register_event]) await asyncio.sleep(0.1) assert not device.closed.done() assert not engine.closed.done()
[ "asyncio.gather", "asyncio.sleep", "random.Random", "collections.deque", "pytest.mark.parametrize", "hat.util.aio.Group", "hat.util.aio.Queue" ]
[((2359, 2410), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""device_count"""', '[1, 2, 10]'], {}), "('device_count', [1, 2, 10])\n", (2382, 2410), False, 'import pytest\n'), ((5616, 5667), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""device_count"""', '[1, 2, 10]'], {}), "('device_count', [1, 2, 10])\n", (5639, 5667), False, 'import pytest\n'), ((8795, 8846), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""device_count"""', '[1, 2, 10]'], {}), "('device_count', [1, 2, 10])\n", (8818, 8846), False, 'import pytest\n'), ((1805, 1816), 'hat.util.aio.Queue', 'aio.Queue', ([], {}), '()\n', (1814, 1816), False, 'from hat.util import aio\n'), ((2860, 2879), 'collections.deque', 'collections.deque', ([], {}), '()\n', (2877, 2879), False, 'import collections\n'), ((774, 785), 'hat.util.aio.Group', 'aio.Group', ([], {}), '()\n', (783, 785), False, 'from hat.util import aio\n'), ((869, 888), 'asyncio.sleep', 'asyncio.sleep', (['(0.01)'], {}), '(0.01)\n', (882, 888), False, 'import asyncio\n'), ((9698, 9742), 'asyncio.gather', 'asyncio.gather', (['*(i.closed for i in devices)'], {}), '(*(i.closed for i in devices))\n', (9712, 9742), False, 'import asyncio\n'), ((12917, 12935), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (12930, 12935), False, 'import asyncio\n'), ((6768, 6784), 'random.Random', 'random.Random', (['(4)'], {}), '(4)\n', (6781, 6784), False, 'import random\n'), ((9627, 9643), 'random.Random', 'random.Random', (['(4)'], {}), '(4)\n', (9640, 9643), False, 'import random\n')]
#coding=utf-8 import csv import pymysql import DataUtil import xlrd class DBUtil: def __init__(self): print('init DBUtil') def query_mysql(self): # 打开数据库连接 db = pymysql.connect("10.188.40.12", "udata", "123456", "udata_privilege", charset="utf8") # 使用cursor()方法获取操作游标 cursor = db.cursor() # SQL 查询语句 sql = "SELECT * FROM account" try: print('hello') # 执行SQL语句 cursor.execute(sql) # 获取所有记录列表 results = cursor.fetchall() print(results.count) for row in results: fname = row[0] lname = row[1] age = row[2] sex = row[3] income = row[4] # 打印结果 print("id=%s,uid=%s,uname=%s,status=%s,operator=%s" % \ (fname, lname, age, sex, income)) except Exception as err: print(err) finally: db.close() def read_csv_to_db(self, csv_file_path): csv_reader = csv.reader(open(csv_file_path, encoding='utf-8')) # 打开数据库连接 db = pymysql.connect("10.188.40.12", "udata", "123456", "udata_business", charset="utf8") # 使用cursor()方法获取操作游标 cursor = db.cursor() # SQL 查询语句 sql = " insert into life_service_0502(shop_id, shop_name, province, city, district, address, lat, lon," \ "classification, phone, business_hours, type, branch_code) " \ "VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) " try: list = [] i = 0 for row in csv_reader: shop_type = DataUtil.get_shop_type(row[8]) data = (row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], shop_type, 0 ) list.append(data) if len(list) > 5000: cursor.executemany(sql, list) db.commit() list.clear() i = i + 1 print(i * 5000) cursor.executemany(sql, list) db.commit() except Exception as err: print(err) db.rollback() finally: # 关闭数据库连接 cursor.close() db.close() def read_xlsx_to_db(self, xlsx_file_path): # 打开数据库连接 db = pymysql.connect("10.188.40.12", "udata", "123456", "udata_business", charset="utf8") # 使用cursor()方法获取操作游标 cursor = db.cursor() # SQL 查询语句 sql_fence = " insert into digit_fence(branch_code, branch_type, lon, lat)" \ " VALUES (%s,%s,%s,%s) " sql_branch = " insert into branch(branch_code, branch_name, branch_type, branch_superior, superior_name, superior_type, area_code, area_name," \ "region_code, region_name, effect_time, account_no, city_code,province,city,city_level) " \ "VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) " try: data = xlrd.open_workbook(xlsx_file_path) sheets = len(data.sheets()) print(sheets) for sheet in range(0, sheets) : sql = sql_fence if (sheet % 2 == 0) : print(sheet) sql = sql_branch table = data.sheets()[sheet] nrows = table.nrows # 行数 print(nrows) for i in range(0, nrows): row = table.row_values(i) # 某一行数据 print(row) cursor.execute(sql, row) print(i) db.commit() print('Done ' + str(sheet)) except Exception as err: print(err) db.rollback() finally: cursor.close() db.close() def read_takeout_csv_to_db(self, csv_file_path): csv_reader = csv.reader(open(csv_file_path, encoding='utf-8')) # 打开数据库连接 db = pymysql.connect("10.188.40.12", "udata", "123456", "udata_business", charset="utf8") # 使用cursor()方法获取操作游标 cursor = db.cursor() # SQL 查询语句 sql = " insert into takeout_shop_0510(shop_id, shop_name, classification,province, city, address, lon_lat, business_hours,sales, is_brand," \ "avg_deliver_time, begin_price, deliver_fee, deliver_service, company,remark) " \ " VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) " try: list = [] i = 0 j = 0 for row in csv_reader: list.append(row) if len(list) > 5000: cursor.executemany(sql, list) db.commit() list.clear() i = i + 1 print(i * 5000) cursor.executemany(sql, list) db.commit() except Exception as err: print(err) db.rollback() finally: # 关闭数据库连接 cursor.close() db.close() def digit_fence(self): db = pymysql.connect("10.188.40.12", "udata", "123456", "udata_business", charset="utf8") # 使用cursor()方法获取操作游标 cursor = db.cursor() sql = "select * from digit_fence" dict = {} try: # 执行SQL语句 cursor.execute(sql) # 获取所有记录列表 results = cursor.fetchall() print("the length of records in digit_fence:" + str(len(results))) for row in results: if row[0] in dict: dict[row[0]].append(row) else: dict[row[0]] = [row] print("the length of dict is:" + str(len(dict))) sql = "SELECT * FROM life_service_0502_bsgsh_3type" # 执行SQL语句 cursor.execute(sql) # 获取所有记录列表 results = cursor.fetchall() print("the length of records in life_service_0502_bsgsh_3type:" + str(len(results))) for row in results: lat = row[7] lon = row[8] for key in dict: try: if DataUtil.is_in_fence(lon, lat, dict[key]) : code = dict[key][0][0]; name = dict[key][0][1]; if( code is None): continue if (name is None): continue sql = "update life_service_0502_bsgsh_3type set branch_code='" + code + "', branch_name='" + name + "' where id=" + str(row[0]); print(sql) cursor.execute(sql) db.commit() break except Exception as err: print("Error in circle:" + str(err)) except Exception as err: print(err) finally: db.close()
[ "DataUtil.get_shop_type", "xlrd.open_workbook", "pymysql.connect", "DataUtil.is_in_fence" ]
[((197, 286), 'pymysql.connect', 'pymysql.connect', (['"""10.188.40.12"""', '"""udata"""', '"""123456"""', '"""udata_privilege"""'], {'charset': '"""utf8"""'}), "('10.188.40.12', 'udata', '123456', 'udata_privilege',\n charset='utf8')\n", (212, 286), False, 'import pymysql\n'), ((1170, 1258), 'pymysql.connect', 'pymysql.connect', (['"""10.188.40.12"""', '"""udata"""', '"""123456"""', '"""udata_business"""'], {'charset': '"""utf8"""'}), "('10.188.40.12', 'udata', '123456', 'udata_business',\n charset='utf8')\n", (1185, 1258), False, 'import pymysql\n'), ((2433, 2521), 'pymysql.connect', 'pymysql.connect', (['"""10.188.40.12"""', '"""udata"""', '"""123456"""', '"""udata_business"""'], {'charset': '"""utf8"""'}), "('10.188.40.12', 'udata', '123456', 'udata_business',\n charset='utf8')\n", (2448, 2521), False, 'import pymysql\n'), ((4066, 4154), 'pymysql.connect', 'pymysql.connect', (['"""10.188.40.12"""', '"""udata"""', '"""123456"""', '"""udata_business"""'], {'charset': '"""utf8"""'}), "('10.188.40.12', 'udata', '123456', 'udata_business',\n charset='utf8')\n", (4081, 4154), False, 'import pymysql\n'), ((5188, 5276), 'pymysql.connect', 'pymysql.connect', (['"""10.188.40.12"""', '"""udata"""', '"""123456"""', '"""udata_business"""'], {'charset': '"""utf8"""'}), "('10.188.40.12', 'udata', '123456', 'udata_business',\n charset='utf8')\n", (5203, 5276), False, 'import pymysql\n'), ((3086, 3120), 'xlrd.open_workbook', 'xlrd.open_workbook', (['xlsx_file_path'], {}), '(xlsx_file_path)\n', (3104, 3120), False, 'import xlrd\n'), ((1705, 1735), 'DataUtil.get_shop_type', 'DataUtil.get_shop_type', (['row[8]'], {}), '(row[8])\n', (1727, 1735), False, 'import DataUtil\n'), ((6296, 6337), 'DataUtil.is_in_fence', 'DataUtil.is_in_fence', (['lon', 'lat', 'dict[key]'], {}), '(lon, lat, dict[key])\n', (6316, 6337), False, 'import DataUtil\n')]
# -*- coding: utf-8 -*- """ Created on July 6 2017 @author: <EMAIL> """ import os import sys import time from osgeo import ogr from osgeo import osr import pandas as pd path = os.path.dirname(os.path.dirname(os.path.dirname(os.path. abspath(__file__)))) if path not in sys.path: sys.path.append(path) ''' This module creates a shapefile with attributes which exist in the input shapefile of shp2csv.py module and assigns the calculated values to the features of this shapefile. ''' def update_building_lyr(inputCSV, inShapefile, outShapefile, epsg=3035): # fields in CSV are as follows: # ['hotmaps_ID', 'inputLyr_ID', 'Type', 'Year_Construction', 'Address', # 'Footprint', 'NrFloor', 'GFA', 'spec_demand', 'demand', 'X_3035', # 'Y_3035'] ifile = pd.read_csv(inputCSV) ifile = ifile.sort_values(["hotmaps_ID"]) csv_cols = ifile.columns.values col_dtype = ifile.dtypes.values # Get the input layer driver = ogr.GetDriverByName('ESRI Shapefile') inDataSource = driver.Open(inShapefile) inLayer = inDataSource.GetLayer() # set CRS srs = osr.SpatialReference() srs.ImportFromEPSG(epsg) outDriver = ogr.GetDriverByName("ESRI Shapefile") if os.path.exists(outShapefile): outDriver.DeleteDataSource(outShapefile) # Create the output shapefile outDataSource = outDriver.CreateDataSource(outShapefile) geom_typ = inLayer.GetGeomType() geom_typ_dict = {1: ogr.wkbPoint, 2: ogr.wkbLineString, 3: ogr.wkbPolygon} if geom_typ not in list(geom_typ_dict.keys()): raise Exception("Geometry type of the input layer is not supported!") outLayer = outDataSource.CreateLayer("Building_lyr_updated", srs, geom_type=geom_typ_dict[geom_typ]) for i, item in enumerate(csv_cols): # shapefile's field name should not exceed 10 characters. O.W. warning # will be printed. To avoid waning, the following is applied: item = item[:10] if i > 0: if col_dtype[i] == object: Field = ogr.FieldDefn(item, ogr.OFTString) elif col_dtype[i] == int: Field = ogr.FieldDefn(item, ogr.OFTInteger) else: Field = ogr.FieldDefn(item, ogr.OFTReal) outLayer.CreateField(Field) outLayerDefn = outLayer.GetLayerDefn() # loop through the input features inFeature = inLayer.GetNextFeature() while inFeature: fid = inFeature.GetFID() outFeature = ogr.Feature(outLayerDefn) # get the input geometry for i, item in enumerate(csv_cols): if i > 0: outFeature.SetField(outLayerDefn.GetFieldDefn(i-1).GetNameRef(), (ifile[item].values)[fid]) geom = inFeature.GetGeometryRef() outFeature.SetGeometry(geom) outLayer.CreateFeature(outFeature) outFeature = None inFeature = inLayer.GetNextFeature() # Save and close DataSources inDataSource = None outDataSource = None
[ "sys.path.append", "os.path.abspath", "pandas.read_csv", "osgeo.ogr.GetDriverByName", "os.path.exists", "osgeo.ogr.FieldDefn", "osgeo.osr.SpatialReference", "osgeo.ogr.Feature" ]
[((339, 360), 'sys.path.append', 'sys.path.append', (['path'], {}), '(path)\n', (354, 360), False, 'import sys\n'), ((829, 850), 'pandas.read_csv', 'pd.read_csv', (['inputCSV'], {}), '(inputCSV)\n', (840, 850), True, 'import pandas as pd\n'), ((1008, 1045), 'osgeo.ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""ESRI Shapefile"""'], {}), "('ESRI Shapefile')\n", (1027, 1045), False, 'from osgeo import ogr\n'), ((1153, 1175), 'osgeo.osr.SpatialReference', 'osr.SpatialReference', ([], {}), '()\n', (1173, 1175), False, 'from osgeo import osr\n'), ((1221, 1258), 'osgeo.ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""ESRI Shapefile"""'], {}), "('ESRI Shapefile')\n", (1240, 1258), False, 'from osgeo import ogr\n'), ((1266, 1294), 'os.path.exists', 'os.path.exists', (['outShapefile'], {}), '(outShapefile)\n', (1280, 1294), False, 'import os\n'), ((2571, 2596), 'osgeo.ogr.Feature', 'ogr.Feature', (['outLayerDefn'], {}), '(outLayerDefn)\n', (2582, 2596), False, 'from osgeo import ogr\n'), ((225, 250), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (240, 250), False, 'import os\n'), ((2126, 2160), 'osgeo.ogr.FieldDefn', 'ogr.FieldDefn', (['item', 'ogr.OFTString'], {}), '(item, ogr.OFTString)\n', (2139, 2160), False, 'from osgeo import ogr\n'), ((2223, 2258), 'osgeo.ogr.FieldDefn', 'ogr.FieldDefn', (['item', 'ogr.OFTInteger'], {}), '(item, ogr.OFTInteger)\n', (2236, 2258), False, 'from osgeo import ogr\n'), ((2301, 2333), 'osgeo.ogr.FieldDefn', 'ogr.FieldDefn', (['item', 'ogr.OFTReal'], {}), '(item, ogr.OFTReal)\n', (2314, 2333), False, 'from osgeo import ogr\n')]
from sklearn.datasets import load_boston from tensorflow.keras.optimizers import Adam from tensorflow.keras.optimizers import RMSprop from artificial_neural_network_model_automation.hyper_parameter_tuning import ANNRandomizedSearchConfig from artificial_neural_network_model_automation.hyper_parameter_tuning import ANNRandomizedSearch # loading data data = load_boston() features = data.data target = data.target # creating ANNRandomizedSearchConfig object machine_learning_task = "regression" neural_network_architecture_list = [ [13, 20, 20, 1], [13, 40, 1], [13, 20, 20, 20, 1], [13, 20, 30, 20, 1], [13, 20, 25, 20, 1], [13, 25, 25, 25, 1], [13, 25, 1], [13, 50, 1], [13, 50, 50, 1] ] hidden_layers_activation_function_list = ["relu", "selu", "elu"] dropout_rate_list = [None, 0.0001, 0.001, 0.01, 0.02, 0.03, 0.04] optimizer1 = Adam() optimizer2 = Adam(learning_rate=0.01) optimizer3 = Adam(learning_rate=0.02) optimizer4 = RMSprop() optimizer5 = RMSprop(learning_rate=0.01) optimizer6 = RMSprop(learning_rate=0.02) optimizer_list = [optimizer1, optimizer2, optimizer3, optimizer4, optimizer5, optimizer6] metric_list = ["mean_squared_error", "mean_absolute_error", "mean_absolute_percentage_error", "mean_squared_logarithmic_error", "cosine_similarity", "logcosh"] batch_size_list = [10, 20, 30, 40, 50] epochs_list = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100] neural_network_config_list_dict = { "machine_learning_task": machine_learning_task, "neural_network_architecture_list": neural_network_architecture_list, "hidden_layers_activation_function_list": hidden_layers_activation_function_list, "dropout_rate_list": dropout_rate_list, "optimizer_list": optimizer_list, "metric_list": metric_list, "batch_size_list": batch_size_list, "epochs_list": epochs_list } ann_randomized_search_config = ANNRandomizedSearchConfig(neural_network_config_list_dict) # create artificial neural network randomized search object ann_randomized_search = ANNRandomizedSearch(ann_randomized_search_config, n_iter=100, n_jobs=-1) # perform randomized search ann_randomized_search.fit(features, target)
[ "artificial_neural_network_model_automation.hyper_parameter_tuning.ANNRandomizedSearch", "artificial_neural_network_model_automation.hyper_parameter_tuning.ANNRandomizedSearchConfig", "sklearn.datasets.load_boston", "tensorflow.keras.optimizers.Adam", "tensorflow.keras.optimizers.RMSprop" ]
[((359, 372), 'sklearn.datasets.load_boston', 'load_boston', ([], {}), '()\n', (370, 372), False, 'from sklearn.datasets import load_boston\n'), ((873, 879), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {}), '()\n', (877, 879), False, 'from tensorflow.keras.optimizers import Adam\n'), ((893, 917), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (897, 917), False, 'from tensorflow.keras.optimizers import Adam\n'), ((931, 955), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'learning_rate': '(0.02)'}), '(learning_rate=0.02)\n', (935, 955), False, 'from tensorflow.keras.optimizers import Adam\n'), ((969, 978), 'tensorflow.keras.optimizers.RMSprop', 'RMSprop', ([], {}), '()\n', (976, 978), False, 'from tensorflow.keras.optimizers import RMSprop\n'), ((992, 1019), 'tensorflow.keras.optimizers.RMSprop', 'RMSprop', ([], {'learning_rate': '(0.01)'}), '(learning_rate=0.01)\n', (999, 1019), False, 'from tensorflow.keras.optimizers import RMSprop\n'), ((1033, 1060), 'tensorflow.keras.optimizers.RMSprop', 'RMSprop', ([], {'learning_rate': '(0.02)'}), '(learning_rate=0.02)\n', (1040, 1060), False, 'from tensorflow.keras.optimizers import RMSprop\n'), ((1890, 1948), 'artificial_neural_network_model_automation.hyper_parameter_tuning.ANNRandomizedSearchConfig', 'ANNRandomizedSearchConfig', (['neural_network_config_list_dict'], {}), '(neural_network_config_list_dict)\n', (1915, 1948), False, 'from artificial_neural_network_model_automation.hyper_parameter_tuning import ANNRandomizedSearchConfig\n'), ((2034, 2106), 'artificial_neural_network_model_automation.hyper_parameter_tuning.ANNRandomizedSearch', 'ANNRandomizedSearch', (['ann_randomized_search_config'], {'n_iter': '(100)', 'n_jobs': '(-1)'}), '(ann_randomized_search_config, n_iter=100, n_jobs=-1)\n', (2053, 2106), False, 'from artificial_neural_network_model_automation.hyper_parameter_tuning import ANNRandomizedSearch\n')]
import logging from flask import Flask from nisse.services.reminder_job import remind from nisse.utils.configs import load_config application = Flask(__name__, instance_relative_config=True) load_config(application) remind(application.logger, application.config)
[ "flask.Flask", "nisse.utils.configs.load_config", "nisse.services.reminder_job.remind" ]
[((145, 191), 'flask.Flask', 'Flask', (['__name__'], {'instance_relative_config': '(True)'}), '(__name__, instance_relative_config=True)\n', (150, 191), False, 'from flask import Flask\n'), ((193, 217), 'nisse.utils.configs.load_config', 'load_config', (['application'], {}), '(application)\n', (204, 217), False, 'from nisse.utils.configs import load_config\n'), ((219, 265), 'nisse.services.reminder_job.remind', 'remind', (['application.logger', 'application.config'], {}), '(application.logger, application.config)\n', (225, 265), False, 'from nisse.services.reminder_job import remind\n')]
# -*- coding: utf-8 -*- __author__ = 'Jinkey' import time import numpy as np import h5py import matplotlib.pyplot as plt import scipy from PIL import Image from scipy import ndimage from dnn_app_utils import * from keras.models import Sequential, load_model from keras.layers import Dense, Dropout, Activation np.random.seed(1) class DeepClassifier: def __init__(self): self.train_x, self.train_y, self.test_x, self.test_y, self.classes = load_data() self.sample_amount = self.train_x.shape[0] self.test_amount = self.test_x.shape[0] self.model = Sequential() self.parameters = {} self.is_trained = False def show_data_info(self): print ("Number of training examples: " + str(self.sample_amount)) print ("Number of testing examples: " + str(self.test_amount)) print ("Each image is of size: (" + str(self.train_x.shape[1]) + ", " + str(self.train_x.shape[1]) + ", 3)") print ("train_x_orig shape: " + str(self.train_x.shape)) print ("train_y shape: " + str(self.train_y.shape)) print ("test_x_orig shape: " + str(self.test_x.shape)) print ("test_y shape: " + str(self.test_y.shape)) return self def flattern_x(self): self.train_x = self.train_x.reshape(self.sample_amount, -1).T self.test_x = self.test_x.reshape(self.test_amount, -1).T assert self.train_x.shape == (12288, self.sample_amount) assert self.test_x.shape == (12288, self.test_amount) return self def standardize_x(self): self.train_x = self.train_x / 255.0 self.test_x = self.test_x / 255.0 return self def L_layer_model(self, learning_rate=0.0075, num_iterations=3000): # lr was 0.009 np.random.seed(1) costs = [] # keep track of cost # Parameters initialization. ### START CODE HERE ### layers_dims = [12288, 20, 7, 5, 1] parameters = initialize_parameters_deep(layers_dims) ### END CODE HERE ### # Loop (gradient descent) for i in range(0, num_iterations): # Forward propagation: [LINEAR -> RELU]*(L-1) -> LINEAR -> SIGMOID. ### START CODE HERE ### (≈ 1 line of code) AL, caches = L_model_forward(self.train_x, parameters) ### END CODE HERE ### # Compute cost. ### START CODE HERE ### (≈ 1 line of code) cost = compute_cost(AL, self.train_y) ### END CODE HERE ### # Backward propagation. ### START CODE HERE ### (≈ 1 line of code) grads = L_model_backward(AL, self.train_y, caches) ### END CODE HERE ### # Update parameters. ### START CODE HERE ### (≈ 1 line of code) self.parameters = update_parameters(parameters, grads, learning_rate) ### END CODE HERE ### # Print the cost every 100 training example if i % 100 == 0: costs.append(cost) print ("Cost after iteration %i: %f" % (i, cost)) self.is_trained = True # plot the cost plt.plot(np.squeeze(costs)) plt.ylabel('cost') plt.xlabel('iterations (per tens)') plt.title("Learning rate =" + str(learning_rate)) plt.show() return self def save_model(self): f = h5py.File("iscat-deep.h5", "w") f.create_dataset("layers", data=5) for key, value in self.parameters.items(): f.create_dataset(key, data=value) def load_model(self): f = h5py.File("iscat-deep.h5", "r") number_of_layers = np.squeeze(f["layers"]) for i in range(1, number_of_layers): self.parameters["W"+str(i)] = np.array(f["W"+str(i)]) self.parameters["b"+str(i)] = np.array(f["b"+str(i)]) self.is_trained = True return self # 课程作业也是只是呈现到多层网络的前向和反向传播,前馈和反馈的函数都封装在dnn_app_utils 里面,所以这里我直接用 keras 实现课程作业要求的5层神经网络 def L_layer_model_with_keras(self): model = Sequential() model.add(Dense(output_dim=20, activation="relu", input_dim=12288)) model.add(Dense(output_dim=7, activation="relu", input_dim=13)) model.add(Dense(output_dim=5, activation="relu", input_dim=7)) model.add(Dense(output_dim=1, activation="sigmoid", input_dim=5)) model.compile(loss="binary_crossentropy", optimizer='sgd', metrics=["accuracy"]) model.fit(self.train_x.T, self.train_y.T, nb_epoch=5000) model.save("iscat-keras.h5") score = model.evaluate(self.test_x.T, self.test_y.T) print(score) return self def load_keras_model(self): self.model = load_model('iscat-keras.h5') return self def predict_with_keras(self, image_path): image = np.array(ndimage.imread(image_path, flatten=False)) image_flatten = scipy.misc.imresize(image, size=(64, 64)).reshape((64*64*3, 1)) result = np.squeeze(self.model.predict(image_flatten.T)) print("这%s一只猫" % "是" if result==1 else "不是") def predict_standard(self, image_path): print("==============在测试集的准确率=================") predict(self.test_x, self.test_y, self.parameters) print("==============预测一张图片=================") image = np.array(ndimage.imread(image_path, flatten=False)) my_image = scipy.misc.imresize(image, size=(64, 64)).reshape((64 * 64 * 3, 1)) my_predicted_image = predict(X=my_image, y=[1], parameters=self.parameters) print("这%s一只猫" % "是" if my_predicted_image == 1 else "不是") plt.imshow(image) if __name__ == '__main__': # 使用作业方法训练模型 # DeepClassifier().flattern_x().standardize_x().L_layer_model(learning_rate=0.0075, num_iterations=3000).save_model() # 使用 作业的 模型预测 DeepClassifier().load_model().flattern_x().standardize_x().predict_standard("images/cat.jpg") # 使用 Keras 训练模型 # DeepClassifier().flattern_x().standardize_x().L_layer_model_with_keras() # 使用 Keras 模型预测 # DeepClassifier().load_model().predict("images/cat.jpg")
[ "keras.models.load_model", "h5py.File", "numpy.random.seed", "matplotlib.pyplot.show", "keras.models.Sequential", "matplotlib.pyplot.imshow", "keras.layers.Dense", "scipy.misc.imresize", "numpy.squeeze", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel", "scipy.ndimage.imread" ]
[((312, 329), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (326, 329), True, 'import numpy as np\n'), ((587, 599), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (597, 599), False, 'from keras.models import Sequential, load_model\n'), ((1764, 1781), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (1778, 1781), True, 'import numpy as np\n'), ((3192, 3210), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""cost"""'], {}), "('cost')\n", (3202, 3210), True, 'import matplotlib.pyplot as plt\n'), ((3219, 3254), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""iterations (per tens)"""'], {}), "('iterations (per tens)')\n", (3229, 3254), True, 'import matplotlib.pyplot as plt\n'), ((3321, 3331), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3329, 3331), True, 'import matplotlib.pyplot as plt\n'), ((3392, 3423), 'h5py.File', 'h5py.File', (['"""iscat-deep.h5"""', '"""w"""'], {}), "('iscat-deep.h5', 'w')\n", (3401, 3423), False, 'import h5py\n'), ((3603, 3634), 'h5py.File', 'h5py.File', (['"""iscat-deep.h5"""', '"""r"""'], {}), "('iscat-deep.h5', 'r')\n", (3612, 3634), False, 'import h5py\n'), ((3662, 3685), 'numpy.squeeze', 'np.squeeze', (["f['layers']"], {}), "(f['layers'])\n", (3672, 3685), True, 'import numpy as np\n'), ((4062, 4074), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (4072, 4074), False, 'from keras.models import Sequential, load_model\n'), ((4717, 4745), 'keras.models.load_model', 'load_model', (['"""iscat-keras.h5"""'], {}), "('iscat-keras.h5')\n", (4727, 4745), False, 'from keras.models import Sequential, load_model\n'), ((5617, 5634), 'matplotlib.pyplot.imshow', 'plt.imshow', (['image'], {}), '(image)\n', (5627, 5634), True, 'import matplotlib.pyplot as plt\n'), ((3165, 3182), 'numpy.squeeze', 'np.squeeze', (['costs'], {}), '(costs)\n', (3175, 3182), True, 'import numpy as np\n'), ((4093, 4149), 'keras.layers.Dense', 'Dense', ([], {'output_dim': '(20)', 'activation': '"""relu"""', 'input_dim': '(12288)'}), "(output_dim=20, activation='relu', input_dim=12288)\n", (4098, 4149), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((4169, 4221), 'keras.layers.Dense', 'Dense', ([], {'output_dim': '(7)', 'activation': '"""relu"""', 'input_dim': '(13)'}), "(output_dim=7, activation='relu', input_dim=13)\n", (4174, 4221), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((4241, 4292), 'keras.layers.Dense', 'Dense', ([], {'output_dim': '(5)', 'activation': '"""relu"""', 'input_dim': '(7)'}), "(output_dim=5, activation='relu', input_dim=7)\n", (4246, 4292), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((4312, 4366), 'keras.layers.Dense', 'Dense', ([], {'output_dim': '(1)', 'activation': '"""sigmoid"""', 'input_dim': '(5)'}), "(output_dim=1, activation='sigmoid', input_dim=5)\n", (4317, 4366), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((4838, 4879), 'scipy.ndimage.imread', 'ndimage.imread', (['image_path'], {'flatten': '(False)'}), '(image_path, flatten=False)\n', (4852, 4879), False, 'from scipy import ndimage\n'), ((5328, 5369), 'scipy.ndimage.imread', 'ndimage.imread', (['image_path'], {'flatten': '(False)'}), '(image_path, flatten=False)\n', (5342, 5369), False, 'from scipy import ndimage\n'), ((4905, 4946), 'scipy.misc.imresize', 'scipy.misc.imresize', (['image'], {'size': '(64, 64)'}), '(image, size=(64, 64))\n', (4924, 4946), False, 'import scipy\n'), ((5390, 5431), 'scipy.misc.imresize', 'scipy.misc.imresize', (['image'], {'size': '(64, 64)'}), '(image, size=(64, 64))\n', (5409, 5431), False, 'import scipy\n')]
from PyPDF2 import PdfFileMerger, PdfFileReader import os def ask(question, cursor='>> ', default_choice=None): '''This is a shortcut function to ask questions and receive a string answer back. The `question` is a string which is printed out in the console. `cursor` is the delimiter used to prompt the user to enter an answer. If provided, `default_choice` is returned if no input is provided,''' actual_question = question if default_choice: actual_question = '{} [Default: {}]'.format(question, default_choice) print(actual_question) answer = input(cursor) if answer: return answer elif default_choice: return default_choice else: return '' def ask_for_directory(): '''Prompt the user which directory to work on. Defaults to the present working directory.''' return ask('What directory contains your PDFs? Please enter the absolute ' 'path.', default_choice=os.getcwd()) def is_valid_directory(directory): '''Verify if `directory` is indeed a directory.''' return directory and os.path.isdir(directory) def list_files(directory): '''Returns a list of files within `directory` that end with the `extension`.''' files = [file_name for file_name in os.listdir(directory) if file_name.endswith('pdf')] files.sort() return files def should_include_file(_file): '''Identifies whether or not the user wants to include `_file`.''' response = None while not response: response = ask('Include {}? [y/n]'.format(_file)).lower() if response != 'y' and response != 'n': response = None return response == 'y' def ask_for_selection(directory, files): '''Lists the `directory` files and returns a list of the files that the user wants to include.''' return [os.path.join(directory, _file) for _file in files if should_include_file(_file)] def ask_for_new_file_name(): '''Asks for the file name of the new merged file.''' return ask('What would you like to name the combined file?') def build_file_name(file_name): '''Returns the file name entered.''' return '{}.pdf'.format(file_name) def merge(file_names, new_file_name): '''Merges the list of `file_names` into one file called `new_file_name`.''' merger = PdfFileMerger() for file_name in file_names: merger.append(PdfFileReader(file_name, 'rb')) merger.write(new_file_name) def ask_for_destination(file_name, default=os.getcwd()): '''Asks the user where they would like to save the `file_name`. It defaults to save `file_name` to the directory `default`.''' destination = ask('Where would you like to save {}?'.format(file_name), default) if not is_valid_directory(destination): destination = default return os.path.join(destination, file_name) def ask_where_to_work(): '''Asks the user what directory to look for files. It expects an absolute path.''' directory = None while not is_valid_directory(directory): directory = ask_for_directory() return directory def go_for_it(workspace, files, extension): '''This is the main logic to merge the files.''' selection = ask_for_selection(workspace, files) new_file_name = build_file_name(ask_for_new_file_name(), extension) destination = ask_for_destination(new_file_name, workspace) merge(selection, destination) print('Created `{}`'.format(destination)) def main(): '''Executes the program. Try it out. :D ''' directory = ask_where_to_work() extension = ask_for_file_type() files = list_files(directory) if len(files) > 0: go_for_it(directory, files, extension) else: print('No files were found in `{}`'.format(directory)) if __name__ == '__main__': main()
[ "os.getcwd", "os.path.isdir", "PyPDF2.PdfFileReader", "PyPDF2.PdfFileMerger", "os.path.join", "os.listdir" ]
[((2356, 2371), 'PyPDF2.PdfFileMerger', 'PdfFileMerger', ([], {}), '()\n', (2369, 2371), False, 'from PyPDF2 import PdfFileMerger, PdfFileReader\n'), ((2538, 2549), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2547, 2549), False, 'import os\n'), ((2859, 2895), 'os.path.join', 'os.path.join', (['destination', 'file_name'], {}), '(destination, file_name)\n', (2871, 2895), False, 'import os\n'), ((1101, 1125), 'os.path.isdir', 'os.path.isdir', (['directory'], {}), '(directory)\n', (1114, 1125), False, 'import os\n'), ((1863, 1893), 'os.path.join', 'os.path.join', (['directory', '_file'], {}), '(directory, _file)\n', (1875, 1893), False, 'import os\n'), ((971, 982), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (980, 982), False, 'import os\n'), ((1283, 1304), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (1293, 1304), False, 'import os\n'), ((2428, 2458), 'PyPDF2.PdfFileReader', 'PdfFileReader', (['file_name', '"""rb"""'], {}), "(file_name, 'rb')\n", (2441, 2458), False, 'from PyPDF2 import PdfFileMerger, PdfFileReader\n')]
''' This module provides database interfaces to postgres SQL (c) Copyright 2013 Mark V Systems Limited, California US, All rights reserved. Mark V copyright applies to this software, which is licensed according to the terms of Arelle(r). ''' import sys, os, io, glob, time, re, datetime from math import isnan, isinf from decimal import Decimal from arelle.ModelValue import dateTime from arelle.PythonUtil import flattenSequence import socket TRACESQLFILE = None #TRACESQLFILE = r"z:\temp\sqltraceWin.log" # uncomment to trace SQL on connection (very big file!!!) #TRACESQLFILE = "/Users/hermf/temp/sqltraceUnx.log" # uncomment to trace SQL on connection (very big file!!!) def noop(*args, **kwargs): return class NoopException(Exception): pass try: import pg8000 hasPostgres = True pgConnect = pg8000.connect pgOperationalError = pg8000.OperationalError pgProgrammingError = pg8000.ProgrammingError pgInterfaceError = pg8000.InterfaceError except ImportError: hasPostgres = False pgConnect = noop pgOperationalError = pgProgrammingError = pgInterfaceError = NoopException try: import pymysql # MIT License but not installed at GAE hasMySql = True mysqlConnect = pymysql.connect mysqlProgrammingError = pymysql.ProgrammingError mysqlInterfaceError = pymysql.InterfaceError mysqlInternalError = pymysql.InternalError except ImportError: try : import MySQLdb # LGPL License and used on GAE, Python 2.7 only hasMySql = True mysqlConnect = MySQLdb.connect mysqlProgrammingError = MySQLdb.ProgrammingError mysqlInterfaceError = MySQLdb.InterfaceError mysqlInternalError = MySQLdb.InternalError except ImportError: hasMySql = False mysqlConnect = noop mysqlProgrammingError = mysqlInterfaceError = mysqlInternalError = NoopException try: # requires NLS_LANG to be UTF-8 os.environ["NLS_LANG"] = ".UTF8" os.environ['ORA_NCHAR_LITERAL_REPLACE'] = 'TRUE' import cx_Oracle hasOracle = True oracleConnect = cx_Oracle.connect oracleDatabaseError = cx_Oracle.DatabaseError oracleInterfaceError = cx_Oracle.InterfaceError oracleNCLOB = cx_Oracle.NCLOB except ImportError: # also requires "Oracle Instant Client" hasOracle = False oracleConnect = noop oracleDatabaseError = oracleInterfaceError = NoopException oracleCLOB = None try: import pyodbc hasMSSql = True mssqlConnect = pyodbc.connect mssqlOperationalError = pyodbc.OperationalError mssqlProgrammingError = pyodbc.ProgrammingError mssqlInterfaceError = pyodbc.InterfaceError mssqlInternalError = pyodbc.InternalError mssqlDataError = pyodbc.DataError mssqlIntegrityError = pyodbc.IntegrityError except ImportError: hasMSSql = False mssqlConnect = noop mssqlOperationalError = mssqlProgrammingError = mssqlInterfaceError = mssqlInternalError = \ mssqlDataError = mssqlIntegrityError = NoopException try: import sqlite3 hasSQLite = True sqliteConnect = sqlite3.connect sqliteParseDecltypes = sqlite3.PARSE_DECLTYPES sqliteOperationalError = sqlite3.OperationalError sqliteProgrammingError = sqlite3.ProgrammingError sqliteInterfaceError = sqlite3.InterfaceError sqliteInternalError = sqlite3.InternalError sqliteDataError = sqlite3.DataError sqliteIntegrityError = sqlite3.IntegrityError except ImportError: hasSQLite = False sqliteConnect = noop sqliteParseDecltypes = None sqliteOperationalError = sqliteProgrammingError = sqliteInterfaceError = sqliteInternalError = \ sqliteDataError = sqliteIntegrityError = NoopException def isSqlConnection(host, port, timeout=10, product=None): # determine if postgres port t = 2 while t < timeout: try: if product == "postgres" and hasPostgres: pgConnect(user='', host=host, port=int(port or 5432), timeout=t) elif product == "mysql" and hasMySql: mysqlConnect(user='', host=host, port=int(port or 5432), socket_timeout=t) elif product == "orcl" and hasOracle: orclConnect = oracleConnect('{}/{}@{}:{}' .format("", "", host, ":{}".format(port) if port else "")) elif product == "mssql" and hasMSSql: mssqlConnect(user='', host=host, socket_timeout=t) elif product == "sqlite" and hasSQLite: sqliteConnect("", t) # needs a database specified for this test except (pgProgrammingError, mysqlProgrammingError, oracleDatabaseError, sqliteProgrammingError): return True # success, this is really a postgres socket, wants user name except (pgInterfaceError, mysqlInterfaceError, oracleInterfaceError, mssqlOperationalError, mssqlInterfaceError, sqliteOperationalError, sqliteInterfaceError): return False # something is there but not postgres except socket.timeout: t = t + 2 # relax - try again with longer timeout return False class XPDBException(Exception): def __init__(self, code, message, **kwargs ): self.code = code self.message = message self.kwargs = kwargs self.args = ( self.__repr__(), ) def __repr__(self): return _('[{0}] exception: {1}').format(self.code, self.message % self.kwargs) class SqlDbConnection(): def __init__(self, modelXbrl, user, password, host, port, database, timeout, product): self.modelXbrl = modelXbrl self.disclosureSystem = modelXbrl.modelManager.disclosureSystem if product == "postgres": if not hasPostgres: raise XPDBException("xpgDB:MissingPostgresInterface", _("Postgres interface is not installed")) self.conn = pgConnect(user=user, password=password, host=host, port=int(port or 5432), database=database, timeout=timeout or 60) self.product = product elif product == "mysql": if not hasMySql: raise XPDBException("xpgDB:MissingMySQLInterface", _("MySQL interface is not installed")) self.conn = mysqlConnect(user=user, passwd=password, host=host, port=int(port or 5432), db=database, # pymysql takes database or db but MySQLdb only takes db connect_timeout=timeout or 60, charset='utf8') self.product = product elif product == "orcl": if not hasOracle: raise XPDBException("xpgDB:MissingOracleInterface", _("Oracle interface is not installed")) self.conn = oracleConnect('{}/{}@{}{}' .format(user, password, host, ":{}".format(port) if port else "")) # self.conn.paramstyle = 'named' self.product = product elif product == "mssql": if not hasMSSql: raise XPDBException("xpgDB:MissingMSSQLInterface", _("MSSQL server interface is not installed")) self.conn = mssqlConnect('DRIVER={{SQL Server Native Client 11.0}};SERVER={2};DATABASE={3};UID={0};PWD={1};CHARSET=UTF8' .format(user, password, host, # e.g., localhost\\SQLEXPRESS database)) self.product = product elif product == "sqlite": if not hasSQLite: raise XPDBException("xpgDB:MissingSQLiteInterface", _("SQLite interface is not installed")) self.conn = sqliteConnect(database, (timeout or 60), detect_types=sqliteParseDecltypes) self.product = product self.syncSequences = False # for object_id coordination of autoincrement values else: self.product = None self.tableColTypes = {} self.tableColDeclaration = {} self.accessionId = "(None)" self.tempInputTableName = "input{}".format(os.getpid()) def close(self, rollback=False): if not self.isClosed: try: self.closeCursor() if rollback: self.rollback() self.conn.close() self.__dict__.clear() # dereference everything except Exception as ex: self.__dict__.clear() # dereference everything if sys.version[0] >= '3': raise ex.with_traceback(ex.__traceback__) else: raise ex @property def isClosed(self): return not bool(self.__dict__) # closed when dict is empty def showStatus(self, msg, clearAfter=None): self.modelXbrl.modelManager.showStatus(msg, clearAfter) def pyStrFromDbStr(self, str): if self.product == "postgres": return str.replace("%%", "%") return str def pyBoolFromDbBool(self, str): return str in ("TRUE", "t", True) # may be DB string or Python boolean (preconverted) def pyNoneFromDbNULL(self, str): return None def dbNum(self, num): if isinstance(num, (int,float)): if isinf(num) or isnan(num): return None # not legal in SQL return num return None def dbStr(self, s): if self.product == "orcl": return "'" + str(s).replace("'","''") + "'" elif self.product == "mysql": return "N" + self.conn.escape(str(s)) elif self.product == "sqlite": return "'" + str(s).replace("'","''") + "'" else: return "'" + str(s).replace("'","''").replace('%', '%%') + "'" def dbTableName(self, tableName): if self.product == "orcl": return '"' + tableName + '"' else: return tableName @property def cursor(self): try: return self._cursor except AttributeError: self._cursor = self.conn.cursor() return self._cursor def closeCursor(self): try: self._cursor.close() del self._cursor except (AttributeError, pgOperationalError, mysqlProgrammingError, oracleDatabaseError): if hasattr(self, '_cursor'): del self._cursor def commit(self): self.conn.commit() def rollback(self): try: self.conn.rollback() except (pg8000.ConnectionClosedError): pass def dropTemporaryTable(self): if self.product == "orcl": self.execute(""" BEGIN EXECUTE IMMEDIATE 'drop table {}'; EXCEPTION WHEN OTHERS THEN NULL; END; """.format(self.tempInputTableName), close=True, commit=False, fetch=False, action="dropping temporary table") elif self.product == "mssql": self.execute(""" IF OBJECT_ID('tempdb..#{0}', 'U') IS NOT NULL DROP TABLE "#{0}"; """.format(self.tempInputTableName), close=True, commit=False, fetch=False, action="dropping temporary table") def lockTables(self, tableNames, isSessionTransaction=False): ''' lock for an entire transaction has isSessionTransaction=True, locks until commit some databases require locks per operation (such as MySQL), when isSessionTransaction=False ''' if self.product in ("postgres", "orcl") and isSessionTransaction: result = self.execute('LOCK {} IN SHARE ROW EXCLUSIVE MODE'.format(', '.join(tableNames)), close=False, commit=False, fetch=False, action="locking table") elif self.product in ("mysql",): result = self.execute('LOCK TABLES {}' .format(', '.join(['{} WRITE'.format(t) for t in tableNames])), close=False, commit=False, fetch=False, action="locking table") elif self.product in ("sqlite",) and isSessionTransaction: result = self.execute('BEGIN TRANSACTION', close=False, commit=False, fetch=False, action="locking table") # note, there is no lock for MS SQL (as far as I could find) def unlockAllTables(self): if self.product in ("mysql",): result = self.execute('UNLOCK TABLES', close=False, commit=False, fetch=False, action="locking table") elif self.product in ("sqlite",): result = self.execute('COMMIT TRANSACTION', close=False, commit=False, fetch=False, action="locking table") def execute(self, sql, commit=False, close=True, fetch=True, params=None, action="execute"): cursor = self.cursor try: if isinstance(params, dict): cursor.execute(sql, **params) elif isinstance(params, (tuple,list)): cursor.execute(sql, params) else: cursor.execute(sql) except (pgProgrammingError, mysqlProgrammingError, mysqlInternalError, oracleDatabaseError, mssqlOperationalError, mssqlInterfaceError, mssqlDataError, mssqlProgrammingError, mssqlIntegrityError, sqliteOperationalError, sqliteInterfaceError, sqliteDataError, socket.timeout, ValueError) as ex: # something wrong with SQL if TRACESQLFILE: with io.open(TRACESQLFILE, "a", encoding='utf-8') as fh: fh.write("\n\n>>> EXCEPTION {} error {}\n sql {}\n" .format(action, str(ex), sql)) raise if fetch: result = cursor.fetchall() else: #if cursor.rowcount > 0: # cursor.fetchall() # must get anyway result = None if commit: self.conn.commit() if close: self.closeCursor() return result def create(self, ddlFiles, dropPriorTables=True): # ddl Files may be a sequence (or not) of file names, glob wildcards ok, relative ok if dropPriorTables: # drop tables startedAt = time.time() self.showStatus("Dropping prior tables") for table in self.tablesInDB(): result = self.execute('DROP TABLE %s' % self.dbTableName(table), close=False, commit=False, fetch=False, action="dropping table") self.showStatus("Dropping prior sequences") for sequence in self.sequencesInDB(): result = self.execute('DROP SEQUENCE %s' % sequence, close=False, commit=False, fetch=False, action="dropping sequence") self.modelXbrl.profileStat(_("XbrlPublicDB: drop prior tables"), time.time() - startedAt) startedAt = time.time() # process ddlFiles to make absolute and de-globbed _ddlFiles = [] for ddlFile in flattenSequence(ddlFiles): if not os.path.isabs(ddlFile): ddlFile = os.path.join(os.path.dirname(__file__), ddlFile) for _ddlFile in glob.glob(ddlFile): _ddlFiles.append(_ddlFile) for ddlFile in _ddlFiles: with io.open(ddlFile, 'rt', encoding='utf-8') as fh: sql = fh.read().replace('%', '%%') # separate dollar-quoted bodies and statement lines sqlstatements = [] def findstatements(start, end, laststatement): for line in sql[start:end].split('\n'): stmt, comment1, comment2 = line.partition("--") laststatement += stmt + '\n' if ';' in stmt: sqlstatements.append(laststatement) laststatement = '' return laststatement stmt = '' i = 0 patternDollarEsc = re.compile(r"([$]\w*[$])", re.DOTALL + re.MULTILINE) while i < len(sql): # preserve $$ function body escaping match = patternDollarEsc.search(sql, i) if not match: stmt = findstatements(i, len(sql), stmt) sqlstatements.append(stmt) break # found match dollarescape = match.group() j = match.end() stmt = findstatements(i, j, stmt) # accumulate statements before match i = sql.find(dollarescape, j) if i > j: # found end of match if self.product == "mysql": # mysql doesn't want DELIMITER over the interface stmt = sql[j:i] i += len(dollarescape) else: # postgres and others want the delimiter in the sql sent i += len(dollarescape) stmt += sql[j:i] sqlstatements.append(stmt) # problem with driver and $$ statements, skip them (for now) stmt = '' action = "executing ddl in {}".format(os.path.basename(ddlFile)) for i, sql in enumerate(sqlstatements): if any(cmd in sql for cmd in ('CREATE TABLE', 'CREATE SEQUENCE', 'INSERT INTO', 'CREATE TYPE', 'CREATE FUNCTION', 'DROP' 'SET', 'CREATE INDEX', 'CREATE UNIQUE INDEX', # 'ALTER TABLE ONLY' 'CREATE VIEW', 'CREATE OR REPLACE VIEW', 'CREATE MATERIALIZED VIEW' )): statusMsg, sep, rest = sql.strip().partition('\n') self.showStatus(statusMsg[0:50]) result = self.execute(sql, close=False, commit=False, fetch=False, action=action) if TRACESQLFILE: with io.open(TRACESQLFILE, "a", encoding='utf-8') as fh: fh.write("\n\n>>> ddl {0}: \n{1} \n\n>>> result: \n{2}\n" .format(i, sql, result)) fh.write(sql) self.showStatus("") self.conn.commit() self.modelXbrl.profileStat(_("XbrlPublicDB: create tables"), time.time() - startedAt) self.closeCursor() def databasesInDB(self): return self.execute({"postgres":"SELECT datname FROM pg_database;", "mysql": "SHOW databases;", "mssql": "SELECT name FROM master..sysdatabases", "orcl": "SELECT DISTINCT OWNER FROM ALL_OBJECTS" }[self.product], action="listing tables in database") def dropAllTablesInDB(self): # drop all tables (clean out database) if self.product == "postgres": self.execute("drop schema public cascade") self.execute("create schema public;", commit=True, action="recreating schema") elif self.product in ("mysql", "mssql", "orcl"): for tableName in self.tablesInDB(): self.execute("DROP TABLE {}".format( self.dbTableName(tableName) ), action="dropping tables") def tablesInDB(self): return set(tableRow[0] for tableRow in self.execute({"postgres":"SELECT tablename FROM pg_tables WHERE schemaname = 'public';", "mysql": "SHOW tables;", "mssql": "SELECT name FROM sys.TABLES;", "orcl": "SELECT table_name FROM user_tables", "sqlite": "SELECT name FROM sqlite_master WHERE type='table';" }[self.product])) def sequencesInDB(self): try: return set(sequenceRow[0] for sequenceRow in self.execute({"postgres":"SELECT c.relname FROM pg_class c WHERE c.relkind = 'S';", "mysql": "SHOW triggers;", "mssql": "SELECT name FROM sys.triggers;", "orcl": "SHOW trigger_name FROM user_triggers"\ }[self.product])) except KeyError: return set() def columnTypeFunctions(self, table): if table not in self.tableColTypes: if self.product == "orcl": colTypesResult = self.execute("SELECT column_name, data_type, data_precision, char_col_decl_length " "FROM user_tab_columns " "WHERE table_name = '{0}'" .format( table )) # table name is not " " quoted here colTypes = [] for name, fulltype, dataPrecision, charColDeclLength in colTypesResult: name = name.lower() fulltype = fulltype.lower() if fulltype in ("varchar", "varchar2"): colDecl = "{}({})".format(fulltype, charColDeclLength) elif fulltype == "number" and dataPrecision: colDecl = "{}({})".format(fulltype, dataPrecision) else: colDecl = fulltype colTypes.append( (name, fulltype, colDecl) ) # print ("col types for {} = {} ".format(table, colTypes)) elif self.product == "mssql": colTypesResult = self.execute("SELECT column_name, data_type, character_maximum_length " "FROM information_schema.columns " "WHERE table_name = '{0}'" .format( table )) # table name is not " " quoted here colTypes = [] for name, fulltype, characterMaxLength in colTypesResult: name = name.lower() if fulltype in ("char", "varchar", "nvarchar"): if characterMaxLength == -1: characterMaxLength = "max" colDecl = "{}({})".format(fulltype, characterMaxLength) else: colDecl = fulltype colTypes.append( (name, fulltype, colDecl) ) # print ("col types for {} = {} ".format(table, colTypes)) elif self.product == "sqlite": colTypesResult = self.execute("PRAGMA table_info('{0}')" .format( table )) # table name is not " " quoted here colTypes = [] for cid, name, type, notnull, dflt_value, pk in colTypesResult: name = name.lower() type = type.lower() colTypes.append( (name, type, type) ) # print ("col types for {} = {} ".format(table, colTypes)) else: colTypes = self.execute("SELECT c.column_name, c.data_type, {0} " "FROM information_schema.columns c " "WHERE c.table_name = '{1}' " "ORDER BY c.ordinal_position;" .format('c.column_type' if self.product == 'mysql' else 'c.data_type', self.dbTableName(table))) self.tableColTypes[table] = dict((name, # (type cast, conversion function) ('::' + typename if typename in # takes first word of full type {"integer", "smallint", "int", "bigint", "real", "numeric", "int2", "int4", "int8", "float4", "float8", "boolean", "date", "timestamp"} else "::double precision" if fulltype.startswith("double precision") else '', int if typename in ("integer", "smallint", "int", "bigint", "number") else float if typename in ("double precision", "real", "numeric") else self.pyBoolFromDbBool if typename in ("bit", "boolean") else dateTime if typename in ("date","timestamp") else # ModelValue.datetime !!! not python class str)) for name, fulltype, colDecl in colTypes for typename in (fulltype.partition(' ')[0],)) if self.product in ('mysql', 'mssql', 'orcl', 'sqlite'): self.tableColDeclaration[table] = dict((name, colDecl) for name, fulltype, colDecl in colTypes) return self.tableColTypes[table] def getTable(self, table, idCol, newCols=None, matchCols=None, data=None, commit=False, comparisonOperator='=', checkIfExisting=False, insertIfNotMatched=True, returnMatches=True, returnExistenceStatus=False): # generate SQL # note: comparison by = will never match NULL fields # use 'IS NOT DISTINCT FROM' to match nulls, but this is not indexed and verrrrry slooooow if not data or not newCols or not matchCols: # nothing can be done, just return return () # place breakpoint here to debug isOracle = self.product == "orcl" isMSSql = self.product == "mssql" isPostgres = self.product == "postgres" isSQLite = self.product == "sqlite" newCols = [newCol.lower() for newCol in newCols] matchCols = [matchCol.lower() for matchCol in matchCols] returningCols = [] if idCol: # idCol is the first returned column if present returningCols.append(idCol.lower()) for matchCol in matchCols: if matchCol not in returningCols: # allow idCol to be specified or default assigned returningCols.append(matchCol) colTypeFunctions = self.columnTypeFunctions(table) colDeclarations = self.tableColDeclaration.get(table) try: colTypeCast = tuple(colTypeFunctions[colName][0] for colName in newCols) colTypeFunction = [colTypeFunctions[colName][1] for colName in returningCols] if returnExistenceStatus: colTypeFunction.append(self.pyBoolFromDbBool) # existence is a boolean except KeyError as err: raise XPDBException("xpgDB:MissingColumnDefinition", _("Table %(table)s column definition missing: %(missingColumnName)s"), table=table, missingColumnName=str(err)) rowValues = [] rowLongValues = [] # contains None if no parameters, else {} parameter dict if isOracle: longColValues = {} else: longColValues = [] for row in data: colValues = [] for col in row: if isinstance(col, bool): if isOracle or isMSSql or isSQLite: colValues.append('1' if col else '0') else: colValues.append('TRUE' if col else 'FALSE') elif isinstance(col, int): colValues.append(str(col)) elif isinstance(col, float): if _ISFINITE(col): colValues.append(str(col)) else: # no NaN, INF, in SQL implementations (Postgres has it but not IEEE implementation) colValues.append('NULL') elif isinstance(col, Decimal): if col.is_finite(): colValues.append(str(col)) else: # no NaN, INF, in SQL implementations (Postgres has it but not IEEE implementation) colValues.append('NULL') elif isinstance(col, (datetime.date, datetime.datetime)) and self.product == "orcl": colValues.append("DATE '{:04}-{:02}-{:02}'".format(col.year, col.month, col.day)) elif isinstance(col, datetime.datetime) and (isMSSql or isSQLite): colValues.append("'{:04}-{:02}-{:02} {:02}:{:02}:{:02}'".format(col.year, col.month, col.day, col.hour, col.minute, col.second)) elif isinstance(col, datetime.date) and (isMSSql or isSQLite): colValues.append("'{:04}-{:02}-{:02}'".format(col.year, col.month, col.day)) elif col is None: colValues.append('NULL') elif isinstance(col, _STR_BASE) and len(col) >= 4000 and (isOracle or isMSSql): if isOracle: colName = "col{}".format(len(colValues)) longColValues[colName] = col colValues.append(":" + colName) else: longColValues.append(col) colValues.append("?") else: colValues.append(self.dbStr(col)) if not rowValues and isPostgres: # first row for i, cast in enumerate(colTypeCast): if cast: colValues[i] = colValues[i] + cast rowColValues = ", ".join(colValues) rowValues.append("(" + rowColValues + ")" if not isOracle else rowColValues) if longColValues: rowLongValues.append(longColValues) if isOracle: longColValues = {} # must be new instance of dict else: longColValues = [] else: rowLongValues.append(None) values = ", \n".join(rowValues) _table = self.dbTableName(table) _inputTableName = self.tempInputTableName if self.product == "postgres": # insert new rows, return id and cols of new and existing rows # use IS NOT DISTINCT FROM instead of = to compare NULL usefully sql = [((''' WITH row_values (%(newCols)s) AS ( VALUES %(values)s )''' + (''', insertions AS ( INSERT INTO %(table)s (%(newCols)s) SELECT %(newCols)s FROM row_values v''' + (''' WHERE NOT EXISTS (SELECT 1 FROM %(table)s x WHERE %(match)s)''' if checkIfExisting else '') + ''' RETURNING %(returningCols)s ) ''' if insertIfNotMatched else '') + ''' (''' + ((''' SELECT %(x_returningCols)s %(statusIfExisting)s FROM %(table)s x JOIN row_values v ON (%(match)s) ''' if checkIfExisting else '') + (''' ) UNION ( ''' if (checkIfExisting and insertIfNotMatched) else '') + (''' SELECT %(returningCols)s %(statusIfInserted)s FROM insertions''' if insertIfNotMatched else '')) + ''' );''') % {"table": _table, "idCol": idCol, "newCols": ', '.join(newCols), "returningCols": ', '.join(returningCols), "x_returningCols": ', '.join('x.{0}'.format(c) for c in returningCols), "match": ' AND '.join('x.{0} {1} v.{0}'.format(col, comparisonOperator) for col in matchCols), "values": values, "statusIfInserted": ", FALSE" if returnExistenceStatus else "", "statusIfExisting": ", TRUE" if returnExistenceStatus else "" }, None, True)] elif self.product == "mysql": sql = [("CREATE TEMPORARY TABLE %(inputTable)s ( %(inputCols)s );" % {"inputTable": _inputTableName, "inputCols": ', '.join('{0} {1}'.format(newCol, colDeclarations[newCol]) for newCol in newCols)}, None, False), ("INSERT INTO %(inputTable)s ( %(newCols)s ) VALUES %(values)s;" % {"inputTable": _inputTableName, "newCols": ', '.join(newCols), "values": values}, None, False)] if insertIfNotMatched: if checkIfExisting: _where = ('WHERE NOT EXISTS (SELECT 1 FROM %(table)s x WHERE %(match)s)' % {"table": _table, "match": ' AND '.join('x.{0} {1} i.{0}'.format(col, comparisonOperator) for col in matchCols)}) _whereLock = (", %(table)s AS x READ" % {"table": _table}) else: _where = ""; _whereLock = "" sql.append( ("LOCK TABLES %(table)s WRITE %(whereLock)s" % {"table": _table, "whereLock": _whereLock}, None, False) ) sql.append( ("INSERT INTO %(table)s ( %(newCols)s ) SELECT %(newCols)s FROM %(inputTable)s i %(where)s;" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "where": _where}, None, False) ) elif returnMatches or returnExistenceStatus: sql.append( ("LOCK TABLES %(table)s READ" % {"table": _table}, None, False) ) # don't know how to get status if existing if returnMatches or returnExistenceStatus: sql.append( ("SELECT %(returningCols)s %(statusIfExisting)s from %(inputTable)s JOIN %(table)s ON ( %(match)s );" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "match": ' AND '.join('{0}.{2} = {1}.{2}'.format(_table,_inputTableName,col) for col in matchCols), "statusIfExisting": ", FALSE" if returnExistenceStatus else "", "returningCols": ', '.join('{0}.{1}'.format(_table,col) for col in returningCols)}, None, True) ) sql.append( ("DROP TEMPORARY TABLE %(inputTable)s;" % {"inputTable": _inputTableName}, None, False) ) elif self.product == "mssql": sql = [("CREATE TABLE #%(inputTable)s ( %(inputCols)s );" % {"inputTable": _inputTableName, "inputCols": ', '.join('{0} {1}'.format(newCol, colDeclarations[newCol]) for newCol in newCols)}, None, False)] # break values insertion into 1000's each def insertMSSqlRows(i, j, params): sql.append(("INSERT INTO #%(inputTable)s ( %(newCols)s ) VALUES %(values)s;" % {"inputTable": _inputTableName, "newCols": ', '.join(newCols), "values": ", ".join(rowValues[i:j])}, params, False)) iMax = len(rowValues) i = 0 while (i < iMax): for j in range(i, min(i+1000, iMax)): if rowLongValues[j] is not None: if j > i: insertMSSqlRows(i, j, None) insertMSSqlRows(j, j+1, rowLongValues[j]) i = j + 1 break if i < j+1 and i < iMax: insertMSSqlRows(i, j+1, None) i = j+1 if insertIfNotMatched: sql.append(("MERGE INTO %(table)s USING #%(inputTable)s ON (%(match)s) " "WHEN NOT MATCHED THEN INSERT (%(newCols)s) VALUES (%(values)s);" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "match": ' AND '.join('{0}.{2} = #{1}.{2}'.format(_table,_inputTableName,col) for col in matchCols), "values": ', '.join("#{0}.{1}".format(_inputTableName,newCol) for newCol in newCols)}, None, False)) if returnMatches or returnExistenceStatus: sql.append(# don't know how to get status if existing ("SELECT %(returningCols)s %(statusIfExisting)s from #%(inputTable)s JOIN %(table)s ON ( %(match)s );" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "match": ' AND '.join('{0}.{2} = #{1}.{2}'.format(_table,_inputTableName,col) for col in matchCols), "statusIfExisting": ", 0" if returnExistenceStatus else "", "returningCols": ', '.join('{0}.{1}'.format(_table,col) for col in returningCols)}, None, True)) sql.append(("DROP TABLE #%(inputTable)s;" % {"inputTable": _inputTableName}, None, False)) elif self.product == "orcl": sql = [("CREATE GLOBAL TEMPORARY TABLE %(inputTable)s ( %(inputCols)s )" % {"inputTable": _inputTableName, "inputCols": ', '.join('{0} {1}'.format(newCol, colDeclarations[newCol]) for newCol in newCols)}, None, False)] # break values insertion into 1000's each def insertOrclRows(i, j, params): sql.append(("INSERT INTO %(inputTable)s ( %(newCols)s ) %(values)s" % {"inputTable": _inputTableName, "newCols": ', '.join(newCols), "values": "\nUNION ALL".join(" SELECT {} FROM dual ".format(r) for r in rowValues[i:j])}, params, False)) iMax = len(rowValues) i = 0 while (i < iMax): for j in range(i, min(i+1000, iMax)): if rowLongValues[j] is not None: if j > i: insertOrclRows(i, j, None) insertOrclRows(j, j+1, rowLongValues[j]) i = j + 1 break if i < j+1 and i < iMax: insertOrclRows(i, j+1, None) i = j+1 if insertIfNotMatched: sql.append(("MERGE INTO %(table)s USING %(inputTable)s ON (%(match)s) " "WHEN NOT MATCHED THEN INSERT (%(newCols)s) VALUES (%(values)s)" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "match": ' AND '.join('{0}.{2} = {1}.{2}'.format(_table,_inputTableName,col) for col in matchCols), "values": ', '.join("{0}.{1}".format(_inputTableName,newCol) for newCol in newCols)}, None, False)) if returnMatches or returnExistenceStatus: sql.append(# don't know how to get status if existing ("SELECT %(returningCols)s %(statusIfExisting)s from %(inputTable)s JOIN %(table)s ON ( %(match)s )" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "match": ' AND '.join('{0}.{2} = {1}.{2}'.format(_table,_inputTableName,col) for col in matchCols), "statusIfExisting": ", 0" if returnExistenceStatus else "", "returningCols": ', '.join('{0}.{1}'.format(_table,col) for col in returningCols)}, None, True)) sql.append(("DROP TABLE %(inputTable)s" % {"inputTable": _inputTableName}, None, False)) elif self.product == "sqlite": sql = [("CREATE TEMP TABLE %(inputTable)s ( %(inputCols)s );" % {"inputTable": _inputTableName, "inputCols": ', '.join('{0} {1}'.format(newCol, colDeclarations[newCol]) for newCol in newCols)}, None, False)] # break values insertion into 1000's each def insertSQLiteRows(i, j, params): sql.append(("INSERT INTO %(inputTable)s ( %(newCols)s ) VALUES %(values)s;" % {"inputTable": _inputTableName, "newCols": ', '.join(newCols), "values": ", ".join(rowValues[i:j])}, params, False)) iMax = len(rowValues) i = 0 while (i < iMax): for j in range(i, min(i+500, iMax)): if rowLongValues[j] is not None: if j > i: insertSQLiteRows(i, j, None) insertSQLiteRows(j, j+1, rowLongValues[j]) i = j + 1 break if i < j+1 and i < iMax: insertSQLiteRows(i, j+1, None) i = j+1 if insertIfNotMatched: if checkIfExisting: _where = ('WHERE NOT EXISTS (SELECT 1 FROM %(table)s x WHERE %(match)s)' % {"table": _table, "match": ' AND '.join('x.{0} {1} i.{0}'.format(col, comparisonOperator) for col in matchCols)}) else: _where = ""; sql.append( ("INSERT INTO %(table)s ( %(newCols)s ) SELECT %(newCols)s FROM %(inputTable)s i %(where)s;" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "where": _where}, None, False) ) if returnMatches or returnExistenceStatus: sql.append(# don't know how to get status if existing ("SELECT %(returningCols)s %(statusIfExisting)s from %(inputTable)s JOIN %(table)s ON ( %(match)s );" % {"inputTable": _inputTableName, "table": _table, "newCols": ', '.join(newCols), "match": ' AND '.join('{0}.{2} = {1}.{2}'.format(_table,_inputTableName,col) for col in matchCols), "statusIfExisting": ", 0" if returnExistenceStatus else "", "returningCols": ', '.join('{0}.{1}'.format(_table,col) for col in returningCols)}, None, True)) sql.append(("DROP TABLE %(inputTable)s;" % {"inputTable": _inputTableName}, None, False)) if insertIfNotMatched and self.syncSequences: sql.append( ("update sqlite_sequence " "set seq = (select seq from sqlite_sequence where name = '%(table)s') " "where name != '%(table)s';" % {"table": _table}, None, False) ) if TRACESQLFILE: with io.open(TRACESQLFILE, "a", encoding='utf-8') as fh: fh.write("\n\n>>> accession {0} table {1} sql length {2} row count {3}\n" .format(self.accessionId, table, len(sql), len(data))) for sqlStmt, params, fetch in sql: fh.write("\n " + sqlStmt + "\n {}".format(params if params else "")) tableRows = [] for sqlStmt, params, fetch in sql: if params and isOracle: self.cursor.setinputsizes(**dict((name,oracleNCLOB) for name in params)) result = self.execute(sqlStmt,commit=commit, close=False, fetch=fetch, params=params) if fetch and result: tableRows.extend(result) if TRACESQLFILE: with io.open(TRACESQLFILE, "a", encoding='utf-8') as fh: fh.write("\n\n>>> accession {0} table {1} result row count {2}\n{3}\n" .format(self.accessionId, table, len(tableRows), '\n'.join(str(r) for r in tableRows))) return tuple(tuple(None if colValue == "NULL" or colValue is None else colTypeFunction[i](colValue) # convert to int, datetime, etc for i, colValue in enumerate(row)) for row in tableRows) def updateTable(self, table, cols=None, data=None, commit=False): # generate SQL # note: comparison by = will never match NULL fields # use 'IS NOT DISTINCT FROM' to match nulls, but this is not indexed and verrrrry slooooow if not cols or not data: # nothing can be done, just return return () # place breakpoint here to debug isOracle = self.product == "orcl" isSQLite = self.product == "sqlite" idCol = cols[0] colTypeFunctions = self.columnTypeFunctions(table) colDeclarations = self.tableColDeclaration.get(table) try: colTypeCast = tuple(colTypeFunctions[colName][0] for colName in cols) except KeyError as err: raise XPDBException("xpgDB:MissingColumnDefinition", _("Table %(table)s column definition missing: %(missingColumnName)s"), table=table, missingColumnName=str(err)) rowValues = [] for row in data: colValues = [] for col in row: if isinstance(col, bool): colValues.append('TRUE' if col else 'FALSE') elif isinstance(col, (int,float)): colValues.append(str(col)) elif col is None: colValues.append('NULL') else: colValues.append(self.dbStr(col)) if not rowValues and self.product == "postgres": # first row for i, cast in enumerate(colTypeCast): if cast: colValues[i] = colValues[i] + cast rowColValues = ", ".join(colValues) if isOracle: rowValues.append(rowColValues) elif isSQLite: rowValues.append(colValues) else: rowValues.append("(" + rowColValues + ")") if not isOracle and not isSQLite: values = ", \n".join(rowValues) _table = self.dbTableName(table) _inputTableName = self.tempInputTableName if self.product == "postgres": # insert new rows, return id and cols of new and existing rows # use IS NOT DISTINCT FROM instead of = to compare NULL usefully sql = [(''' WITH input (%(valCols)s) AS ( VALUES %(values)s ) UPDATE %(table)s t SET %(settings)s FROM input i WHERE i.%(idCol)s = t.%(idCol)s ;''') % {"table": _table, "idCol": idCol, "valCols": ', '.join(col for col in cols), "settings": ', '.join('{0} = i.{0}'.format(cols[i]) for i, col in enumerate(cols) if i > 0), "values": values}] elif self.product == "mysql": sql = ["CREATE TEMPORARY TABLE %(inputTable)s ( %(valCols)s );" % {"inputTable": _inputTableName, "valCols": ', '.join('{0} {1}'.format(col, colDeclarations[col]) for col in cols)}, "INSERT INTO %(inputTable)s ( %(newCols)s ) VALUES %(values)s;" % {"inputTable": _inputTableName, "newCols": ', '.join(cols), "values": values}, "LOCK TABLES %(inputTable)s AS i READ, %(table)s AS t WRITE;" % {"inputTable": _inputTableName, "table": _table}, "UPDATE %(inputTable)s i, %(table)s t SET %(settings)s WHERE i.%(idCol)s = t.%(idCol)s;" % {"inputTable": _inputTableName, "table": _table, "idCol": idCol, "settings": ', '.join('t.{0} = i.{0}'.format(cols[i]) for i, col in enumerate(cols) if i > 0)}, "DROP TEMPORARY TABLE %(inputTable)s;" % {"inputTable": _inputTableName}] elif self.product == "mssql": sql = ["CREATE TABLE #%(inputTable)s ( %(valCols)s );" % {"inputTable": _inputTableName, "valCols": ', '.join('{0} {1}'.format(col, colDeclarations[col]) for col in cols)}] # must break values insertion into 1000's each for i in range(0, len(rowValues), 950): values = ", \n".join(rowValues[i: i+950]) sql.append("INSERT INTO #%(inputTable)s ( %(cols)s ) VALUES %(values)s;" % {"inputTable": _inputTableName, "cols": ', '.join(cols), "values": values}) sql.append("MERGE INTO %(table)s USING #%(inputTable)s ON (#%(inputTable)s.%(idCol)s = %(table)s.%(idCol)s) " "WHEN MATCHED THEN UPDATE SET %(settings)s;" % {"inputTable": _inputTableName, "table": _table, "idCol": idCol, "settings": ', '.join('{0}.{2} = #{1}.{2}'.format(_table, _inputTableName, cols[i]) for i, col in enumerate(cols) if i > 0)}) sql.append("DROP TABLE #%(inputTable)s;" % {"inputTable": _inputTableName}) elif self.product == "orcl": sql = ["CREATE GLOBAL TEMPORARY TABLE %(inputTable)s ( %(valCols)s )" % {"inputTable": _inputTableName, "valCols": ', '.join('{0} {1}'.format(col, colDeclarations[col]) for col in cols)}] for i in range(0, len(rowValues), 500): sql.append( "INSERT INTO %(inputTable)s ( %(cols)s ) %(values)s" % {"inputTable": _inputTableName, "cols": ', '.join(cols), "values": "\nUNION ALL".join(" SELECT {} FROM dual ".format(r) for r in rowValues[i:i+500])}) sql.append("MERGE INTO %(table)s USING %(inputTable)s ON (%(inputTable)s.%(idCol)s = %(table)s.%(idCol)s) " "WHEN MATCHED THEN UPDATE SET %(settings)s" % {"inputTable": _inputTableName, "table": _table, "idCol": idCol, "settings": ', '.join('{0}.{2} = {1}.{2}'.format(_table, _inputTableName, cols[i]) for i, col in enumerate(cols) if i > 0)}) sql.append("DROP TABLE %(inputTable)s" % {"inputTable": _inputTableName}) elif self.product == "sqlite": sql = ["UPDATE %(table)s SET %(settings)s WHERE %(idCol)s = %(idVal)s;" % {"table": _table, "idCol": idCol, "idVal": rowValue[0], "settings": ', '.join('{0} = {1}'.format(col,rowValue[i]) for i, col in enumerate(cols) if i > 0)} for rowValue in rowValues] if TRACESQLFILE: with io.open(TRACESQLFILE, "a", encoding='utf-8') as fh: fh.write("\n\n>>> accession {0} table {1} sql length {2} row count {3}\n" .format(self.accessionId, table, len(sql), len(data))) for sqlStmt in sql: fh.write(sqlStmt) for sqlStmt in sql: self.execute(sqlStmt,commit=commit, fetch=False, close=False)
[ "math.isnan", "math.isinf", "os.getpid", "os.path.isabs", "os.path.basename", "os.path.dirname", "time.time", "io.open", "glob.glob", "arelle.PythonUtil.flattenSequence", "re.compile" ]
[((15850, 15861), 'time.time', 'time.time', ([], {}), '()\n', (15859, 15861), False, 'import sys, os, io, glob, time, re, datetime\n'), ((15967, 15992), 'arelle.PythonUtil.flattenSequence', 'flattenSequence', (['ddlFiles'], {}), '(ddlFiles)\n', (15982, 15992), False, 'from arelle.PythonUtil import flattenSequence\n'), ((8624, 8635), 'os.getpid', 'os.getpid', ([], {}), '()\n', (8633, 8635), False, 'import sys, os, io, glob, time, re, datetime\n'), ((15133, 15144), 'time.time', 'time.time', ([], {}), '()\n', (15142, 15144), False, 'import sys, os, io, glob, time, re, datetime\n'), ((16140, 16158), 'glob.glob', 'glob.glob', (['ddlFile'], {}), '(ddlFile)\n', (16149, 16158), False, 'import sys, os, io, glob, time, re, datetime\n'), ((16927, 16979), 're.compile', 're.compile', (['"""([$]\\\\w*[$])"""', '(re.DOTALL + re.MULTILINE)'], {}), "('([$]\\\\w*[$])', re.DOTALL + re.MULTILINE)\n", (16937, 16979), False, 'import sys, os, io, glob, time, re, datetime\n'), ((9855, 9865), 'math.isinf', 'isinf', (['num'], {}), '(num)\n', (9860, 9865), False, 'from math import isnan, isinf\n'), ((9869, 9879), 'math.isnan', 'isnan', (['num'], {}), '(num)\n', (9874, 9879), False, 'from math import isnan, isinf\n'), ((16013, 16035), 'os.path.isabs', 'os.path.isabs', (['ddlFile'], {}), '(ddlFile)\n', (16026, 16035), False, 'import sys, os, io, glob, time, re, datetime\n'), ((16254, 16294), 'io.open', 'io.open', (['ddlFile', '"""rt"""'], {'encoding': '"""utf-8"""'}), "(ddlFile, 'rt', encoding='utf-8')\n", (16261, 16294), False, 'import sys, os, io, glob, time, re, datetime\n'), ((18170, 18195), 'os.path.basename', 'os.path.basename', (['ddlFile'], {}), '(ddlFile)\n', (18186, 18195), False, 'import sys, os, io, glob, time, re, datetime\n'), ((19417, 19428), 'time.time', 'time.time', ([], {}), '()\n', (19426, 19428), False, 'import sys, os, io, glob, time, re, datetime\n'), ((45839, 45883), 'io.open', 'io.open', (['TRACESQLFILE', '"""a"""'], {'encoding': '"""utf-8"""'}), "(TRACESQLFILE, 'a', encoding='utf-8')\n", (45846, 45883), False, 'import sys, os, io, glob, time, re, datetime\n'), ((46614, 46658), 'io.open', 'io.open', (['TRACESQLFILE', '"""a"""'], {'encoding': '"""utf-8"""'}), "(TRACESQLFILE, 'a', encoding='utf-8')\n", (46621, 46658), False, 'import sys, os, io, glob, time, re, datetime\n'), ((54727, 54771), 'io.open', 'io.open', (['TRACESQLFILE', '"""a"""'], {'encoding': '"""utf-8"""'}), "(TRACESQLFILE, 'a', encoding='utf-8')\n", (54734, 54771), False, 'import sys, os, io, glob, time, re, datetime\n'), ((15784, 15795), 'time.time', 'time.time', ([], {}), '()\n', (15793, 15795), False, 'import sys, os, io, glob, time, re, datetime\n'), ((16076, 16101), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (16091, 16101), False, 'import sys, os, io, glob, time, re, datetime\n'), ((14399, 14443), 'io.open', 'io.open', (['TRACESQLFILE', '"""a"""'], {'encoding': '"""utf-8"""'}), "(TRACESQLFILE, 'a', encoding='utf-8')\n", (14406, 14443), False, 'import sys, os, io, glob, time, re, datetime\n'), ((19051, 19095), 'io.open', 'io.open', (['TRACESQLFILE', '"""a"""'], {'encoding': '"""utf-8"""'}), "(TRACESQLFILE, 'a', encoding='utf-8')\n", (19058, 19095), False, 'import sys, os, io, glob, time, re, datetime\n')]
""" ******************************************************************************** make figures ******************************************************************************** """ import numpy as np from scipy.interpolate import griddata import matplotlib.pyplot as plt def plt_sol0(XY, u, width, height, cmap): lb = XY.min(0) ub = XY.max(0) nx = 200 x = np.linspace(lb[0], ub[0], nx) y = np.linspace(lb[1], ub[1], nx) X, Y = np.meshgrid(x, y) U = griddata(XY, u.flatten(), (X, Y), method = "linear") plt.figure(figsize = (width, height)) plt.pcolor(X, Y, U, cmap = cmap, shading = "auto") plt.colorbar() plt.xlabel("x") plt.ylabel("y") plt.title(r"$ \phi $") plt.show() def plt_sol1(XY, u, width, height, cmap, v0, v1, vt): lb = XY.min(0) ub = XY.max(0) nx = 200 x = np.linspace(lb[0], ub[0], nx) y = np.linspace(lb[1], ub[1], nx) X, Y = np.meshgrid(x, y) U = griddata(XY, u.flatten(), (X, Y), method = "cubic") plt.figure(figsize = (width, height)) plt.pcolor(X, Y, U, cmap = cmap, shading = "auto", vmin = v0, vmax = v1) plt.colorbar(ticks = np.arange(v0, v1 + .001, vt)) plt.xlabel("x") plt.ylabel("y") plt.title(r"$ \phi $") plt.show() def plt_diff(x, u1, u2, height, width, cmap, v0, v1, vt): lb = x.min(0) ub = x.max(0) nx = 200 x = np.linspace(lb[0], ub[0], nx) y = np.linspace(lb[1], ub[1], nx) X, Y = np.meshgrid(x, y) U1 = griddata(x, u1.flatten(), (X, Y), method = "cubic") U2 = griddata(x, u2.flatten(), (X, Y), method = "cubic") U3 = griddata(x, (u1 - u2).flatten(), (X, Y), method = "cubic") plt.figure(figsize = (height, width)) plt.subplot(1, 3, 1) plt.pcolor(X, Y, U1, cmap = cmap, shading = "auto", vmin = v0, vmax = v1) plt.colorbar(ticks = np.arange(v0, v1 + .001, vt)) plt.xlabel("x") plt.ylabel("y") plt.title(r"$ \phi_1 $") plt.subplot(1, 3, 2) plt.pcolor(X, Y, U2, cmap = cmap, shading = "auto", vmin = v0, vmax = v1) plt.colorbar(ticks = np.arange(v0, v1 + .001, vt)) plt.xlabel("x") plt.ylabel("y") plt.title(r"$ \phi_2 $") plt.subplot(1, 3, 3) plt.pcolor(X, Y, U3, cmap = cmap, shading = "auto", vmin = v0, vmax = v1) plt.colorbar(ticks = np.arange(v0 / 10, (v1 + .001) / 10, vt / 10)) plt.xlabel("x") plt.ylabel("y") plt.title(r"$ \phi_1 - \phi_2 $")
[ "matplotlib.pyplot.title", "matplotlib.pyplot.pcolor", "matplotlib.pyplot.subplot", "numpy.meshgrid", "matplotlib.pyplot.show", "matplotlib.pyplot.colorbar", "matplotlib.pyplot.figure", "numpy.arange", "numpy.linspace", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel" ]
[((376, 405), 'numpy.linspace', 'np.linspace', (['lb[0]', 'ub[0]', 'nx'], {}), '(lb[0], ub[0], nx)\n', (387, 405), True, 'import numpy as np\n'), ((414, 443), 'numpy.linspace', 'np.linspace', (['lb[1]', 'ub[1]', 'nx'], {}), '(lb[1], ub[1], nx)\n', (425, 443), True, 'import numpy as np\n'), ((455, 472), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (466, 472), True, 'import numpy as np\n'), ((538, 573), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(width, height)'}), '(figsize=(width, height))\n', (548, 573), True, 'import matplotlib.pyplot as plt\n'), ((580, 626), 'matplotlib.pyplot.pcolor', 'plt.pcolor', (['X', 'Y', 'U'], {'cmap': 'cmap', 'shading': '"""auto"""'}), "(X, Y, U, cmap=cmap, shading='auto')\n", (590, 626), True, 'import matplotlib.pyplot as plt\n'), ((635, 649), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (647, 649), True, 'import matplotlib.pyplot as plt\n'), ((654, 669), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (664, 669), True, 'import matplotlib.pyplot as plt\n'), ((674, 689), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y"""'], {}), "('y')\n", (684, 689), True, 'import matplotlib.pyplot as plt\n'), ((694, 716), 'matplotlib.pyplot.title', 'plt.title', (['"""$ \\\\phi $"""'], {}), "('$ \\\\phi $')\n", (703, 716), True, 'import matplotlib.pyplot as plt\n'), ((721, 731), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (729, 731), True, 'import matplotlib.pyplot as plt\n'), ((846, 875), 'numpy.linspace', 'np.linspace', (['lb[0]', 'ub[0]', 'nx'], {}), '(lb[0], ub[0], nx)\n', (857, 875), True, 'import numpy as np\n'), ((884, 913), 'numpy.linspace', 'np.linspace', (['lb[1]', 'ub[1]', 'nx'], {}), '(lb[1], ub[1], nx)\n', (895, 913), True, 'import numpy as np\n'), ((925, 942), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (936, 942), True, 'import numpy as np\n'), ((1007, 1042), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(width, height)'}), '(figsize=(width, height))\n', (1017, 1042), True, 'import matplotlib.pyplot as plt\n'), ((1049, 1113), 'matplotlib.pyplot.pcolor', 'plt.pcolor', (['X', 'Y', 'U'], {'cmap': 'cmap', 'shading': '"""auto"""', 'vmin': 'v0', 'vmax': 'v1'}), "(X, Y, U, cmap=cmap, shading='auto', vmin=v0, vmax=v1)\n", (1059, 1113), True, 'import matplotlib.pyplot as plt\n'), ((1181, 1196), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (1191, 1196), True, 'import matplotlib.pyplot as plt\n'), ((1201, 1216), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y"""'], {}), "('y')\n", (1211, 1216), True, 'import matplotlib.pyplot as plt\n'), ((1221, 1243), 'matplotlib.pyplot.title', 'plt.title', (['"""$ \\\\phi $"""'], {}), "('$ \\\\phi $')\n", (1230, 1243), True, 'import matplotlib.pyplot as plt\n'), ((1248, 1258), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1256, 1258), True, 'import matplotlib.pyplot as plt\n'), ((1375, 1404), 'numpy.linspace', 'np.linspace', (['lb[0]', 'ub[0]', 'nx'], {}), '(lb[0], ub[0], nx)\n', (1386, 1404), True, 'import numpy as np\n'), ((1413, 1442), 'numpy.linspace', 'np.linspace', (['lb[1]', 'ub[1]', 'nx'], {}), '(lb[1], ub[1], nx)\n', (1424, 1442), True, 'import numpy as np\n'), ((1454, 1471), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (1465, 1471), True, 'import numpy as np\n'), ((1666, 1701), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(height, width)'}), '(figsize=(height, width))\n', (1676, 1701), True, 'import matplotlib.pyplot as plt\n'), ((1708, 1728), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(1)'], {}), '(1, 3, 1)\n', (1719, 1728), True, 'import matplotlib.pyplot as plt\n'), ((1733, 1798), 'matplotlib.pyplot.pcolor', 'plt.pcolor', (['X', 'Y', 'U1'], {'cmap': 'cmap', 'shading': '"""auto"""', 'vmin': 'v0', 'vmax': 'v1'}), "(X, Y, U1, cmap=cmap, shading='auto', vmin=v0, vmax=v1)\n", (1743, 1798), True, 'import matplotlib.pyplot as plt\n'), ((1866, 1881), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (1876, 1881), True, 'import matplotlib.pyplot as plt\n'), ((1886, 1901), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y"""'], {}), "('y')\n", (1896, 1901), True, 'import matplotlib.pyplot as plt\n'), ((1906, 1930), 'matplotlib.pyplot.title', 'plt.title', (['"""$ \\\\phi_1 $"""'], {}), "('$ \\\\phi_1 $')\n", (1915, 1930), True, 'import matplotlib.pyplot as plt\n'), ((1935, 1955), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(2)'], {}), '(1, 3, 2)\n', (1946, 1955), True, 'import matplotlib.pyplot as plt\n'), ((1960, 2025), 'matplotlib.pyplot.pcolor', 'plt.pcolor', (['X', 'Y', 'U2'], {'cmap': 'cmap', 'shading': '"""auto"""', 'vmin': 'v0', 'vmax': 'v1'}), "(X, Y, U2, cmap=cmap, shading='auto', vmin=v0, vmax=v1)\n", (1970, 2025), True, 'import matplotlib.pyplot as plt\n'), ((2093, 2108), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (2103, 2108), True, 'import matplotlib.pyplot as plt\n'), ((2113, 2128), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y"""'], {}), "('y')\n", (2123, 2128), True, 'import matplotlib.pyplot as plt\n'), ((2133, 2157), 'matplotlib.pyplot.title', 'plt.title', (['"""$ \\\\phi_2 $"""'], {}), "('$ \\\\phi_2 $')\n", (2142, 2157), True, 'import matplotlib.pyplot as plt\n'), ((2162, 2182), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(3)'], {}), '(1, 3, 3)\n', (2173, 2182), True, 'import matplotlib.pyplot as plt\n'), ((2187, 2252), 'matplotlib.pyplot.pcolor', 'plt.pcolor', (['X', 'Y', 'U3'], {'cmap': 'cmap', 'shading': '"""auto"""', 'vmin': 'v0', 'vmax': 'v1'}), "(X, Y, U3, cmap=cmap, shading='auto', vmin=v0, vmax=v1)\n", (2197, 2252), True, 'import matplotlib.pyplot as plt\n'), ((2337, 2352), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (2347, 2352), True, 'import matplotlib.pyplot as plt\n'), ((2357, 2372), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""y"""'], {}), "('y')\n", (2367, 2372), True, 'import matplotlib.pyplot as plt\n'), ((2377, 2411), 'matplotlib.pyplot.title', 'plt.title', (['"""$ \\\\phi_1 - \\\\phi_2 $"""'], {}), "('$ \\\\phi_1 - \\\\phi_2 $')\n", (2386, 2411), True, 'import matplotlib.pyplot as plt\n'), ((1147, 1176), 'numpy.arange', 'np.arange', (['v0', '(v1 + 0.001)', 'vt'], {}), '(v0, v1 + 0.001, vt)\n', (1156, 1176), True, 'import numpy as np\n'), ((1832, 1861), 'numpy.arange', 'np.arange', (['v0', '(v1 + 0.001)', 'vt'], {}), '(v0, v1 + 0.001, vt)\n', (1841, 1861), True, 'import numpy as np\n'), ((2059, 2088), 'numpy.arange', 'np.arange', (['v0', '(v1 + 0.001)', 'vt'], {}), '(v0, v1 + 0.001, vt)\n', (2068, 2088), True, 'import numpy as np\n'), ((2286, 2332), 'numpy.arange', 'np.arange', (['(v0 / 10)', '((v1 + 0.001) / 10)', '(vt / 10)'], {}), '(v0 / 10, (v1 + 0.001) / 10, vt / 10)\n', (2295, 2332), True, 'import numpy as np\n')]
"""Functions to assign and manipulate link capacities of a topology. Link capacities can be assigned either deterministically or randomly, according to various models. """ from distutils.version import LooseVersion import networkx as nx from fnss.util import random_from_pdf from fnss.units import capacity_units __all__ = [ 'set_capacities_constant', 'set_capacities_random', 'set_capacities_random_uniform', 'set_capacities_random_power_law', 'set_capacities_random_zipf', 'set_capacities_random_zipf_mandelbrot', 'set_capacities_degree_gravity', 'set_capacities_betweenness_gravity', 'set_capacities_eigenvector_gravity', 'set_capacities_communicability_gravity', 'set_capacities_pagerank_gravity', 'set_capacities_edge_betweenness', 'set_capacities_edge_communicability', 'get_capacities', 'clear_capacities' ] def set_capacities_constant(topology, capacity, capacity_unit='Mbps', links=None): """ Set constant link capacities Parameters ---------- topology : Topology The topology to which link capacities will be set capacity : float The value of capacity to set links : iterable, optional Iterable container of links, represented as (u, v) tuples to which capacity will be set. If None or not specified, the capacity will be applied to all links. capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) Examples -------- >>> import fnss >>> topology = fnss.erdos_renyi_topology(50, 0.1) >>> fnss.set_capacities_constant(topology, 10, 'Mbps') """ if capacity <= 0: raise ValueError('Capacity must be positive') if not capacity_unit in capacity_units: raise ValueError("The capacity_unit argument is not valid") conversion_factor = 1 if 'capacity_unit' in topology.graph and links is not None: # If a capacity_unit is set, that means that some links have already # been assigned capacities, so set these capacity using the same unit # already used curr_capacity_unit = topology.graph['capacity_unit'] if curr_capacity_unit != capacity_unit: conversion_factor = float(capacity_units[capacity_unit]) \ / capacity_units[curr_capacity_unit] else: topology.graph['capacity_unit'] = capacity_unit edges = links or topology.edges() for u, v in edges: topology.adj[u][v]['capacity'] = capacity * conversion_factor return def set_capacities_random(topology, capacity_pdf, capacity_unit='Mbps'): """ Set random link capacities according to a given probability density function Parameters ---------- topology : Topology The topology to which link capacities will be set capacity_pdf : dict A dictionary representing the probability that a capacity value is assigned to a link capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) links : list, optional List of links, represented as (u, v) tuples to which capacity will be set. If None or not specified, the capacity will be applied to all links. Examples -------- >>> import fnss >>> topology = fnss.erdos_renyi_topology(50, 0.1) >>> pdf = {10: 0.5, 100: 0.2, 1000: 0.3} >>> fnss.set_capacities_constant(topology, pdf, 'Mbps') """ if not capacity_unit in capacity_units: raise ValueError("The capacity_unit argument is not valid") if any((capacity < 0 for capacity in capacity_pdf.keys())): raise ValueError('All capacities in capacity_pdf must be positive') topology.graph['capacity_unit'] = capacity_unit for u, v in topology.edges(): topology.adj[u][v]['capacity'] = random_from_pdf(capacity_pdf) return def set_capacities_random_power_law(topology, capacities, capacity_unit='Mbps', alpha=1.1): """ Set random link capacities according to a power-law probability density function. The probability that a capacity :math:`c_i` is assigned to a link is: .. math:: p(c_i) = \\frac{{c_i}^{-\\alpha}}{\\sum_{c_k \\in C}{{c_k}^{-\\alpha}}}. Where :math:`C` is the set of allowed capacity, i.e. the ``capacities`` argument Note that this capacity assignment differs from ``set_capacities_random_zipf`` because, while in Zipf assignment the power law relationship is between the rank of a capacity and the probability of being assigned to a link, in this assignment, the power law is between the value of the capacity and the probability of being assigned to a link. Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) """ if alpha <= 0.0: raise ValueError('alpha must be positive') capacities = sorted(capacities) pdf = [capacities[i] ** (-alpha) for i in range(len(capacities))] norm_factor = sum(pdf) norm_pdf = {cap: pdf[i] / norm_factor for i, cap in enumerate(capacities)} set_capacities_random(topology, norm_pdf, capacity_unit=capacity_unit) def set_capacities_random_zipf_mandelbrot(topology, capacities, capacity_unit='Mbps', alpha=1.1, q=0.0, reverse=False): """ Set random link capacities according to a Zipf-Mandelbrot probability density function. This capacity allocation consists in the following steps: 1. All capacities are sorted in descending or order (or ascending if reverse is True) 2. The i-th value of the sorted capacities list is then assigned to a link with probability .. math:: p(i) = \\frac{1/(i + q)^\\alpha}{\\sum_{i = 1}^{N}{1/(i + q)^\\alpha}}. Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) alpha : float, default 1.1 The :math`\alpha` parameter of the Zipf-Mandlebrot density function q : float, default 0 The :math`q` parameter of the Zipf-Mandlebrot density function reverse : bool, optional If False, lower capacity links are the most frequent, if True, higher capacity links are more frequent """ if alpha <= 0.0: raise ValueError('alpha must be positive') if q < 0.0: raise ValueError('q must be >= 0') capacities = sorted(capacities, reverse=reverse) pdf = {cap: 1.0 / (i + 1.0 + q) ** alpha for i, cap in enumerate(capacities)} norm_factor = sum(pdf.values()) norm_pdf = {capacity: pdf[capacity] / norm_factor for capacity in pdf} set_capacities_random(topology, norm_pdf, capacity_unit=capacity_unit) def set_capacities_random_zipf(topology, capacities, capacity_unit='Mbps', alpha=1.1, reverse=False): """ Set random link capacities according to a Zipf probability density function. The same objective can be achieved by invoking the function ``set_capacities_random_zipf_mandlebrot`` with parameter q set to 0. This capacity allocation consists in the following steps: 1. All capacities are sorted in descending or order (or ascending if reverse is True) 2. The i-th value of the sorted capacities list is then assigned to a link with probability .. math:: p(i) = \\frac{1/i^\\alpha}{\\sum_{i = 1}^{N}{1/i^\\alpha}}. Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) alpha : float, default 1.1 The :math`\alpha` parameter of the Zipf density function reverse : bool, optional If False, lower capacity links are the most frequent, if True, higher capacity links are more frequent """ set_capacities_random_zipf_mandelbrot(topology, capacities, alpha=alpha, q=0.0, reverse=reverse, capacity_unit=capacity_unit) def set_capacities_random_uniform(topology, capacities, capacity_unit='Mbps'): """ Set random link capacities according to a uniform probability density function. Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) """ capacity_pdf = {capacity: 1.0 / len(capacities) for capacity in capacities} set_capacities_random(topology, capacity_pdf, capacity_unit=capacity_unit) def set_capacities_degree_gravity(topology, capacities, capacity_unit='Mbps'): """ Set link capacities proportionally to the product of the degrees of the two end-points of the link Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) """ if topology.is_directed(): in_degree = nx.in_degree_centrality(topology) out_degree = nx.out_degree_centrality(topology) gravity = {(u, v): out_degree[u] * in_degree[v] for (u, v) in topology.edges()} else: degree = nx.degree_centrality(topology) gravity = {(u, v): degree[u] * degree[v] for (u, v) in topology.edges()} _set_capacities_proportionally(topology, capacities, gravity, capacity_unit=capacity_unit) def set_capacities_betweenness_gravity(topology, capacities, capacity_unit='Mbps', weighted=True): """ Set link capacities proportionally to the product of the betweenness centralities of the two end-points of the link Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) weighted : bool, optional Indicate whether link weights need to be used to compute shortest paths. If links do not have link weights or this parameter is False, shortest paths are calculated based on hop count. """ weight = 'weight' if weighted else None centrality = nx.betweenness_centrality(topology, normalized=False, weight=weight) _set_capacities_gravity(topology, capacities, centrality, capacity_unit) def set_capacities_eigenvector_gravity(topology, capacities, capacity_unit='Mbps', max_iter=1000): """ Set link capacities proportionally to the product of the eigenvector centralities of the two end-points of the link Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) max_iter : int, optional The max number of iteration of the algorithm allowed. If a solution is not found within this period Raises ------ RuntimeError : if the algorithm does not converge in max_iter iterations """ try: centrality = nx.eigenvector_centrality(topology, max_iter=max_iter) except nx.NetworkXError: raise RuntimeError('Algorithm did not converge in %d iterations' % max_iter) _set_capacities_gravity(topology, capacities, centrality, capacity_unit) def set_capacities_pagerank_gravity(topology, capacities, capacity_unit='Mbps', alpha=0.85, weight=None): """ Set link capacities proportionally to the product of the Pagerank centralities of the two end-points of the link Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) alpha : float, optional The apha parameter of the PageRank algorithm weight : str, optional The name of the link attribute to use for the PageRank algorithm. Valid attributes include *capacity* *delay* and *weight*. If ``None``, all links are assigned the same weight. """ centrality = nx.pagerank_numpy(topology, alpha=alpha, personalization=None, weight=weight) _set_capacities_gravity(topology, capacities, centrality, capacity_unit) def set_capacities_communicability_gravity(topology, capacities, capacity_unit='Mbps'): """ Set link capacities proportionally to the product of the communicability centralities of the two end-points of the link Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) """ if LooseVersion(nx.__version__) < LooseVersion("2.0"): centrality = nx.communicability_centrality(topology) else: centrality = nx.subgraph_centrality(topology) _set_capacities_gravity(topology, capacities, centrality, capacity_unit) def set_capacities_edge_betweenness(topology, capacities, capacity_unit='Mbps', weighted=True): """ Set link capacities proportionally to edge betweenness centrality of the link. Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) weighted : bool, optional Indicate whether link weights need to be used to compute shortest paths. If links do not have link weights or this parameter is False, shortest paths are calculated based on hop count. """ weight = 'weight' if weighted else None centrality = nx.edge_betweenness_centrality(topology, normalized=False, weight=weight) _set_capacities_proportionally(topology, capacities, centrality, capacity_unit=capacity_unit) def set_capacities_edge_communicability(topology, capacities, capacity_unit='Mbps'): """ Set link capacities proportionally to edge communicability centrality of the link. Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) """ communicability = nx.communicability(topology) centrality = {(u, v): communicability[u][v] for (u, v) in topology.edges()} _set_capacities_proportionally(topology, capacities, centrality, capacity_unit=capacity_unit) def _set_capacities_gravity(topology, capacities, node_metric, capacity_unit='Mbps'): """ Set link capacities proportionally to the product of the values of a given node metric of the two end-points of the link Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values node_metric : dict A dictionary with all values of the given node metric, keyed by node name capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) """ gravity = {(u, v): node_metric[u] * node_metric[v] for (u, v) in topology.edges()} _set_capacities_proportionally(topology, capacities, gravity, capacity_unit=capacity_unit) def _set_capacities_proportionally(topology, capacities, metric, capacity_unit='Mbps'): """ Set link capacities proportionally to the value of a given edge metric. Parameters ---------- topology : Topology The topology to which link capacities will be set capacities : list A list of all possible capacity values metric : dict A dictionary with all values of the given edge metric, keyed by edge name capacity_unit : str, optional The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..) """ if not capacity_unit in capacity_units: raise ValueError("The capacity_unit argument is not valid") if any((capacity < 0 for capacity in capacities)): raise ValueError('All capacities must be positive') if len(capacities) == 0: raise ValueError('The list of capacities cannot be empty') topology.graph['capacity_unit'] = capacity_unit # If there is only one capacity the capacities list then all links are # assigned the same capacity if len(capacities) == 1: set_capacities_constant(topology, capacities[0], capacity_unit) return # get min and max of selected edge metric min_metric = min(metric.values()) max_metric = max(metric.values()) capacities = sorted(capacities) min_capacity = capacities[0] - 0.5 * (capacities[1] - capacities[0]) max_capacity = capacities[-1] + 0.5 * (capacities[-1] - capacities[-2]) capacity_boundaries = [0.5 * (capacities[i] + capacities[i + 1]) for i in range(len(capacities) - 1)] capacity_boundaries.append(max_capacity) metric_boundaries = [(capacity_boundary - min_capacity) * ((max_metric - min_metric) / (max_capacity - min_capacity)) + min_metric for capacity_boundary in capacity_boundaries] # to prevent float rounding errors metric_boundaries[-1] = max_metric + 0.1 for (u, v), metric_value in metric.items(): for i, boundary in enumerate(metric_boundaries): if metric_value <= boundary: capacity = capacities[i] topology.adj[u][v]['capacity'] = capacity break # if the loop is not stopped yet, it means that because of float # rounding error, max_capacity < metric_boundaries[-1], so we set the # greatest capacity value. # Anyway, the code should never reach this point, because before the # for loop we are already adjusting the value of metric_boundaries[-1] # to make it > max_capacity else: topology.adj[u][v]['capacity'] = capacities[-1] def get_capacities(topology): """ Returns a dictionary with all link capacities. Parameters ---------- topology : Topology The topology whose link delays are requested Returns ------- capacities : dict Dictionary of link capacities keyed by link. Examples -------- >>> import fnss >>> topology = fnss.Topology() >>> topology.add_path([1,2,3]) >>> fnss.set_capacities_constant(topology, 10, 'Mbps') >>> capacity = get_capacities(topology) >>> capacity[(1,2)] 10 """ return nx.get_edge_attributes(topology, 'capacity') def clear_capacities(topology): """ Remove all capacities from the topology. Parameters ---------- topology : Topology """ topology.graph.pop('capacity_unit', None) for u, v in topology.edges(): topology.adj[u][v].pop('capacity', None)
[ "networkx.edge_betweenness_centrality", "networkx.out_degree_centrality", "networkx.communicability", "networkx.betweenness_centrality", "fnss.util.random_from_pdf", "distutils.version.LooseVersion", "networkx.get_edge_attributes", "networkx.pagerank_numpy", "networkx.eigenvector_centrality", "networkx.subgraph_centrality", "networkx.degree_centrality", "networkx.communicability_centrality", "networkx.in_degree_centrality" ]
[((11282, 11350), 'networkx.betweenness_centrality', 'nx.betweenness_centrality', (['topology'], {'normalized': '(False)', 'weight': 'weight'}), '(topology, normalized=False, weight=weight)\n', (11307, 11350), True, 'import networkx as nx\n'), ((13494, 13571), 'networkx.pagerank_numpy', 'nx.pagerank_numpy', (['topology'], {'alpha': 'alpha', 'personalization': 'None', 'weight': 'weight'}), '(topology, alpha=alpha, personalization=None, weight=weight)\n', (13511, 13571), True, 'import networkx as nx\n'), ((15341, 15414), 'networkx.edge_betweenness_centrality', 'nx.edge_betweenness_centrality', (['topology'], {'normalized': '(False)', 'weight': 'weight'}), '(topology, normalized=False, weight=weight)\n', (15371, 15414), True, 'import networkx as nx\n'), ((16146, 16174), 'networkx.communicability', 'nx.communicability', (['topology'], {}), '(topology)\n', (16164, 16174), True, 'import networkx as nx\n'), ((20634, 20678), 'networkx.get_edge_attributes', 'nx.get_edge_attributes', (['topology', '"""capacity"""'], {}), "(topology, 'capacity')\n", (20656, 20678), True, 'import networkx as nx\n'), ((3928, 3957), 'fnss.util.random_from_pdf', 'random_from_pdf', (['capacity_pdf'], {}), '(capacity_pdf)\n', (3943, 3957), False, 'from fnss.util import random_from_pdf\n'), ((9923, 9956), 'networkx.in_degree_centrality', 'nx.in_degree_centrality', (['topology'], {}), '(topology)\n', (9946, 9956), True, 'import networkx as nx\n'), ((9978, 10012), 'networkx.out_degree_centrality', 'nx.out_degree_centrality', (['topology'], {}), '(topology)\n', (10002, 10012), True, 'import networkx as nx\n'), ((10147, 10177), 'networkx.degree_centrality', 'nx.degree_centrality', (['topology'], {}), '(topology)\n', (10167, 10177), True, 'import networkx as nx\n'), ((12320, 12374), 'networkx.eigenvector_centrality', 'nx.eigenvector_centrality', (['topology'], {'max_iter': 'max_iter'}), '(topology, max_iter=max_iter)\n', (12345, 12374), True, 'import networkx as nx\n'), ((14256, 14284), 'distutils.version.LooseVersion', 'LooseVersion', (['nx.__version__'], {}), '(nx.__version__)\n', (14268, 14284), False, 'from distutils.version import LooseVersion\n'), ((14287, 14306), 'distutils.version.LooseVersion', 'LooseVersion', (['"""2.0"""'], {}), "('2.0')\n", (14299, 14306), False, 'from distutils.version import LooseVersion\n'), ((14329, 14368), 'networkx.communicability_centrality', 'nx.communicability_centrality', (['topology'], {}), '(topology)\n', (14358, 14368), True, 'import networkx as nx\n'), ((14400, 14432), 'networkx.subgraph_centrality', 'nx.subgraph_centrality', (['topology'], {}), '(topology)\n', (14422, 14432), True, 'import networkx as nx\n')]
from urllib import request, parse data = bytes(parse.urlencode({'word': 'hello'}), encoding='utf8') response = request.urlopen('http://httpbin.org/post', data=data) print(response.read().decode('utf-8'))
[ "urllib.request.urlopen", "urllib.parse.urlencode" ]
[((113, 166), 'urllib.request.urlopen', 'request.urlopen', (['"""http://httpbin.org/post"""'], {'data': 'data'}), "('http://httpbin.org/post', data=data)\n", (128, 166), False, 'from urllib import request, parse\n'), ((48, 82), 'urllib.parse.urlencode', 'parse.urlencode', (["{'word': 'hello'}"], {}), "({'word': 'hello'})\n", (63, 82), False, 'from urllib import request, parse\n')]
import os, sys, pathlib from dotenv import dotenv_values cwd_path = os.path.join(os.getcwd(), '.env') global_path = os.path.join(pathlib.Path.home(), '.config/tconnectsync/.env') values = {} if os.path.exists(cwd_path): values = dotenv_values(cwd_path) elif os.path.exists(global_path): values = dotenv_values(global_path) else: values = dotenv_values() def get(val, default=None): return os.environ.get(val, values.get(val, default)) def get_number(name, default): val = get(name, default) try: return int(val) except ValueError: print("Error: %s must be a number." % name) print("Current value: %s" % val) sys.exit(1) def get_bool(name, default): return str(get(name, default) or '').lower() in ('true', '1') TCONNECT_EMAIL = get('TCONNECT_EMAIL', '<EMAIL>') TCONNECT_PASSWORD = get('TCONNECT_PASSWORD', 'password') PUMP_SERIAL_NUMBER = get_number('PUMP_SERIAL_NUMBER', '11111111') NS_URL = get('NS_URL', 'https://yournightscouturl/') NS_SECRET = get('NS_SECRET', 'apisecret') TIMEZONE_NAME = get('TIMEZONE_NAME', 'America/New_York') # Optional configuration AUTOUPDATE_DEFAULT_SLEEP_SECONDS = get_number('AUTOUPDATE_DEFAULT_SLEEP_SECONDS', '300') # 5 minutes AUTOUPDATE_MAX_SLEEP_SECONDS = get_number('AUTOUPDATE_MAX_SLEEP_SECONDS', '1500') # 25 minutes AUTOUPDATE_USE_FIXED_SLEEP = get_bool('AUTOUPDATE_USE_FIXED_SLEEP', 'false') AUTOUPDATE_FAILURE_MINUTES = get_number('AUTOUPDATE_FAILURE_MINUTES', '180') # 3 hours AUTOUPDATE_RESTART_ON_FAILURE = get_bool('AUTOUPDATE_RESTART_ON_FAILURE', 'false') ENABLE_TESTING_MODES = get_bool('ENABLE_TESTING_MODES', 'false') SKIP_NS_LAST_UPLOADED_CHECK = get_bool('SKIP_NS_LAST_UPLOADED_CHECK', 'false') if __name__ == '__main__': for k in locals(): print("{} = {}".format(k, locals().get(k)))
[ "pathlib.Path.home", "os.getcwd", "os.path.exists", "dotenv.dotenv_values", "sys.exit" ]
[((197, 221), 'os.path.exists', 'os.path.exists', (['cwd_path'], {}), '(cwd_path)\n', (211, 221), False, 'import os, sys, pathlib\n'), ((82, 93), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (91, 93), False, 'import os, sys, pathlib\n'), ((130, 149), 'pathlib.Path.home', 'pathlib.Path.home', ([], {}), '()\n', (147, 149), False, 'import os, sys, pathlib\n'), ((236, 259), 'dotenv.dotenv_values', 'dotenv_values', (['cwd_path'], {}), '(cwd_path)\n', (249, 259), False, 'from dotenv import dotenv_values\n'), ((265, 292), 'os.path.exists', 'os.path.exists', (['global_path'], {}), '(global_path)\n', (279, 292), False, 'import os, sys, pathlib\n'), ((307, 333), 'dotenv.dotenv_values', 'dotenv_values', (['global_path'], {}), '(global_path)\n', (320, 333), False, 'from dotenv import dotenv_values\n'), ((353, 368), 'dotenv.dotenv_values', 'dotenv_values', ([], {}), '()\n', (366, 368), False, 'from dotenv import dotenv_values\n'), ((673, 684), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (681, 684), False, 'import os, sys, pathlib\n')]
"Setup.py for GeodesyML Converter" #pylint: disable=line-too-long from distutils.command.build import build from subprocess import check_call from setuptools import setup class GenerateBindings(build): "Generate GeodesyML bindings." def run(self): check_call(['cd GeodesyMLToSiteLog && ./generate-bindings-for-geodesymltositelog.sh'], shell=True) check_call(['cd SiteLogToGeodesyML && ./generate-bindings-for-sitelogtogeodesyml.sh'], shell=True) build.run(self) setup( name='geodesyml_converter', version='0.1', license='Creative Commons 4.0', packages=['GeodesyMLToSiteLog', 'SiteLogToGeodesyML'], cmdclass={ 'build': GenerateBindings, }, entry_points={ 'console_scripts': [ 'geodesyml-to-sitelog=GeodesyMLToSiteLog.geodesymltositelog:main', 'sitelog-to-geodesyml=SiteLogToGeodesyML.sitelogtogeodesyml:main', ], }, setup_requires=['pytest-runner'], tests_require=[ 'pytest', 'pytest-console-scripts' ], install_requires=[ 'iso3166', 'pyxb' ], )
[ "setuptools.setup", "subprocess.check_call", "distutils.command.build.build.run" ]
[((499, 996), 'setuptools.setup', 'setup', ([], {'name': '"""geodesyml_converter"""', 'version': '"""0.1"""', 'license': '"""Creative Commons 4.0"""', 'packages': "['GeodesyMLToSiteLog', 'SiteLogToGeodesyML']", 'cmdclass': "{'build': GenerateBindings}", 'entry_points': "{'console_scripts': [\n 'geodesyml-to-sitelog=GeodesyMLToSiteLog.geodesymltositelog:main',\n 'sitelog-to-geodesyml=SiteLogToGeodesyML.sitelogtogeodesyml:main']}", 'setup_requires': "['pytest-runner']", 'tests_require': "['pytest', 'pytest-console-scripts']", 'install_requires': "['iso3166', 'pyxb']"}), "(name='geodesyml_converter', version='0.1', license=\n 'Creative Commons 4.0', packages=['GeodesyMLToSiteLog',\n 'SiteLogToGeodesyML'], cmdclass={'build': GenerateBindings},\n entry_points={'console_scripts': [\n 'geodesyml-to-sitelog=GeodesyMLToSiteLog.geodesymltositelog:main',\n 'sitelog-to-geodesyml=SiteLogToGeodesyML.sitelogtogeodesyml:main']},\n setup_requires=['pytest-runner'], tests_require=['pytest',\n 'pytest-console-scripts'], install_requires=['iso3166', 'pyxb'])\n", (504, 996), False, 'from setuptools import setup\n'), ((268, 376), 'subprocess.check_call', 'check_call', (["['cd GeodesyMLToSiteLog && ./generate-bindings-for-geodesymltositelog.sh']"], {'shell': '(True)'}), "([\n 'cd GeodesyMLToSiteLog && ./generate-bindings-for-geodesymltositelog.sh'\n ], shell=True)\n", (278, 376), False, 'from subprocess import check_call\n'), ((375, 483), 'subprocess.check_call', 'check_call', (["['cd SiteLogToGeodesyML && ./generate-bindings-for-sitelogtogeodesyml.sh']"], {'shell': '(True)'}), "([\n 'cd SiteLogToGeodesyML && ./generate-bindings-for-sitelogtogeodesyml.sh'\n ], shell=True)\n", (385, 483), False, 'from subprocess import check_call\n'), ((482, 497), 'distutils.command.build.build.run', 'build.run', (['self'], {}), '(self)\n', (491, 497), False, 'from distutils.command.build import build\n')]
#### feed in the obs seq line by line #### quantity becomes all one #### feed with all obs seq import numpy as np import pandas as pd import random from sklearn.model_selection import KFold from hmm_class import hmm from sklearn.preprocessing import normalize import time import matplotlib.pyplot as plt # load the obs and split into k fold. def split_load_data(filename, k_splits): obs_seq = np.loadtxt(filename, dtype = int) kf = KFold(n_splits = k_splits, shuffle = False) for train_index, test_index in kf.split(obs_seq): obs_train, obs_test = obs_seq[train_index], obs_seq[test_index] return obs_train, obs_test def load_data(filename): obs_seq = np.loadtxt(filename, dtype = int) return obs_seq # generate random states for the len of data def sts_seq_generate(N, size_data, len_obs): # N states sts_seq = np.zeros((size_data, len_obs), dtype = int) for i in range(size_data): for j in range(len_obs): sts_seq[i][j] = random.randint(0,N-1) return sts_seq # generate emission probability randomly # return as matrix def em_prob_generate(n, m): # n:# states, m: # obs em_prob = np.zeros((n,m)) for i in range(n): for j in range(m): em_prob[i][j] = np.random.uniform(0,1) em_prob = normalize(em_prob, axis = 1, norm = 'l1') return np.asmatrix(em_prob) def trans_prob_generate(n): # n:# states trans_prob = np.zeros((n,n)) for i in range(n): for j in range(n): trans_prob[i][j] = np.random.uniform(0,1) trans_prob = normalize(trans_prob, axis = 1, norm = 'l1') return np.asmatrix(trans_prob) def pi_generate(n): pi = np.zeros(n) for i in range(n): pi[i] = np.random.uniform(0,1) pi = normalize([pi], axis = 1, norm = 'l1') return np.asmatrix(pi) # useful parameter for later use def param_generate(n, obs_seq): size_data = len(obs_seq) # cal the line of obs 1000 len_obs = len(obs_seq[0]) # cal the len of each obs. only works for the same length sts_seq = sts_seq_generate(n, size_data, len_obs) return size_data, len_obs, sts_seq # output all the std file for this project def outfile(filename,N = None, ep = None, tp = None, hidden_sts = None, distribution = None): f = open(filename, "w+") if N: f.write(str(N)) f.write("\n") if np.any(ep): [n, m] = np.shape(ep) for i in range(n*m): f.write(str(ep.item(i))) if i % m == m-1: f.write("\n") else: f.write(",") for j in range(n*n): f.write(str(tp.item(j))) if j % n == n-1: f.write("\n") else: f.write(",") if hidden_sts: size_data = len(hidden_sts) len_seq = len(hidden_sts[0]) for i in range(size_data): for j in range(len_seq): f.write(str(hidden_sts[i][j])) if j % len_seq == len_seq-1: f.write("\n") else: f.write(",") if distribution: size_data = len(distribution) len_seq = len(distribution[0]) for i in range(size_data): for j in range(len_seq): f.write(str(distribution[i][j])) if j % len_seq == len_seq-1: f.write("\n") else: f.write(",") f.close() # compute the predicted output prob distribution # input the hidden states list and unique states outlook # return the distribution list def predic_prob(hidden_sts, uniq_sts): distribution = [] each_prob = [] size_data = len(hidden_sts) len_seq = len(hidden_sts[0]) dis_dict = dict.fromkeys(uniq_sts, 0) for i in range(size_data): for j in range(len_seq): dis_dict[hidden_sts[i][j]] += 1 if j % len_seq == len_seq-1: # change line each_prob = prob_cal(dis_dict) distribution.append(each_prob) dis_dict.clear() dis_dict = dict.fromkeys(uniq_sts, 0) return distribution # probability computation from the dictionary def prob_cal(dictionary): prob_lst = [] length = len(dictionary) total = [] for i in range(length): total.append(dictionary[i]) total = sum(total) for i in range(length): prob_lst.append(dictionary[i]/total) return prob_lst # Given a seq of output, predict the next output and the next state, and test it on the data. def predict_next_sts(hidden_sts, tp): size_data = len(hidden_sts) len_seq = len(hidden_sts[0]) next_sts = [] for i in range(size_data): next_sts.append(np.argmax(tp[hidden_sts[i][len_seq-1],:])) for i in range(size_data): hidden_sts[i].append(next_sts[i]) return hidden_sts if __name__ == '__main__': n = 5 # number of states m = 4 # number of observation k = 5 # k fold num_iter = 1000 # number of iteration tolerance = 10**(-5) obs_seq = load_data('train534.dat') size_data, len_obs, sts_seq = param_generate(n, obs_seq) uniq_sts = list(np.unique(sts_seq)) # the function need to feed in a list of uniq states uniq_obs = list(np.unique(obs_seq)) pi = pi_generate(n) # start prob em_prob = em_prob_generate(n, m) # generate uniform distribution em prob trans_prob = trans_prob_generate(n) # generate uniform distribution trans prob. model = hmm(uniq_sts, uniq_obs, pi, trans_prob, em_prob) # init the model # Number of times, each corresponding item in ‘observation_list’ occurs quantities = np.ones(size_data) prob = model.log_prob(obs_seq, quantities) print("prob of seq with original param %f" %(prob)) # run EM/ Baum_welch to train the data. # use Baum_welch to maximize the likelihood # get the transition matrix A and emission probability matrix B ep, tp, sp, prob_lst, iter_count, loss_lst = model.train_hmm(obs_seq, num_iter, quantities, tolerance) print("emission_prob\n", ep) # pd.DataFrame(model.em_prob, index = uniq_sts, columns = uniq_obs) print("pi\n", sp) print("transition\n", tp) prob = model.log_prob(obs_seq, quantities) print("prob of seq after %d iterations: %f" %(num_iter, prob)) # use viterbi to compute the most likely sequence. Report the time it took. tr_start_t = time.perf_counter() hidden_states = [] for i in range(size_data): hidden_states.append(model.viterbi(obs_seq[i])) tr_end_t = time.perf_counter() print("time for get all the hidden states from training data:", tr_end_t - tr_start_t) # print('hidden states:\n', hidden_states) ###### calculate the log likelihood of test set ###### predict the output from test seq test_obs = load_data('test1_534.dat') size_data_test, len_obs_test, test_sts_seq = param_generate(n, test_obs) test_quant = np.ones(size_data_test) test_prob = model.log_prob(test_obs, test_quant) print("The log likelihood of test set: %f" %(test_prob)) ##### output the hidden states of test set te_start_t = time.perf_counter() test_hidden_sts = [] for i in range(size_data_test): test_hidden_sts.append(model.viterbi(test_obs[i])) te_end_t = time.perf_counter() print("time for get all the hidden states from test data:", te_end_t - te_start_t) test_hidden_sts = [list(map(int, lst)) for lst in test_hidden_sts] # cast the data to int # comput the next state for every sequnece. T=40 to 41 test_hidden_sts = predict_next_sts(test_hidden_sts, tp) # print("test set hidden states:\n", test_hidden_sts) distribution = predic_prob(test_hidden_sts, uniq_sts) ####### output file ########## outfile("modelpars.dat", N = n, ep = ep, tp = tp) outfile("loglik.dat", N = test_prob) outfile("viterbi.dat", hidden_sts = test_hidden_sts) outfile("predict.dat", distribution = distribution) ####### plot ########### x = np.arange(0, iter_count) plt.figure() plt.plot(x, prob_lst, color = 'r') plt.xlabel('iteration times') plt.ylabel('log likelihood') plt.title('Learning curve') plt.show() plt.figure() plt.plot(x, loss_lst, color = 'b') plt.xlabel('iteration times') plt.ylabel('loss') plt.title('Loss from each iteration') plt.show() # ep_test, tp_test, sp_test = model.train(obs_test, 2, quantities_test) # run the baum-welch algo to obtain the A, B matrix and start prob. # def em_prob_generate(): # return np.matrix('0.1 0.2 0.3 0.4; 0.2 0.3 0.1 0.4; 0.4 0.3 0.2 0.1; 0.2 0.1 0.4 0.3; 0.3 0.1 0.2 0.4') # def trans_prob_generate(): # return np.matrix('0.2 0.1 0.3 0.2 0.2; 0.1 0.2 0.2 0.1 0.4; 0.3 0.1 0.1 0.2 0.3; 0.2 0.1 0.1 0.2 0.4; 0.3 0.3 0.2 0.1 0.1') # def pi_generate(): # return np.matrix('0.1 0.2 0.3 0.1 0.3')
[ "matplotlib.pyplot.title", "numpy.argmax", "hmm_class.hmm", "numpy.ones", "numpy.shape", "matplotlib.pyplot.figure", "numpy.arange", "numpy.unique", "random.randint", "numpy.loadtxt", "matplotlib.pyplot.show", "time.perf_counter", "sklearn.preprocessing.normalize", "matplotlib.pyplot.ylabel", "numpy.random.uniform", "matplotlib.pyplot.plot", "numpy.zeros", "sklearn.model_selection.KFold", "numpy.any", "numpy.asmatrix", "matplotlib.pyplot.xlabel" ]
[((411, 442), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {'dtype': 'int'}), '(filename, dtype=int)\n', (421, 442), True, 'import numpy as np\n'), ((452, 491), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': 'k_splits', 'shuffle': '(False)'}), '(n_splits=k_splits, shuffle=False)\n', (457, 491), False, 'from sklearn.model_selection import KFold\n'), ((686, 717), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {'dtype': 'int'}), '(filename, dtype=int)\n', (696, 717), True, 'import numpy as np\n'), ((854, 895), 'numpy.zeros', 'np.zeros', (['(size_data, len_obs)'], {'dtype': 'int'}), '((size_data, len_obs), dtype=int)\n', (862, 895), True, 'import numpy as np\n'), ((1144, 1160), 'numpy.zeros', 'np.zeros', (['(n, m)'], {}), '((n, m))\n', (1152, 1160), True, 'import numpy as np\n'), ((1258, 1295), 'sklearn.preprocessing.normalize', 'normalize', (['em_prob'], {'axis': '(1)', 'norm': '"""l1"""'}), "(em_prob, axis=1, norm='l1')\n", (1267, 1295), False, 'from sklearn.preprocessing import normalize\n'), ((1309, 1329), 'numpy.asmatrix', 'np.asmatrix', (['em_prob'], {}), '(em_prob)\n', (1320, 1329), True, 'import numpy as np\n'), ((1391, 1407), 'numpy.zeros', 'np.zeros', (['(n, n)'], {}), '((n, n))\n', (1399, 1407), True, 'import numpy as np\n'), ((1511, 1551), 'sklearn.preprocessing.normalize', 'normalize', (['trans_prob'], {'axis': '(1)', 'norm': '"""l1"""'}), "(trans_prob, axis=1, norm='l1')\n", (1520, 1551), False, 'from sklearn.preprocessing import normalize\n'), ((1565, 1588), 'numpy.asmatrix', 'np.asmatrix', (['trans_prob'], {}), '(trans_prob)\n', (1576, 1588), True, 'import numpy as np\n'), ((1619, 1630), 'numpy.zeros', 'np.zeros', (['n'], {}), '(n)\n', (1627, 1630), True, 'import numpy as np\n'), ((1693, 1727), 'sklearn.preprocessing.normalize', 'normalize', (['[pi]'], {'axis': '(1)', 'norm': '"""l1"""'}), "([pi], axis=1, norm='l1')\n", (1702, 1727), False, 'from sklearn.preprocessing import normalize\n'), ((1741, 1756), 'numpy.asmatrix', 'np.asmatrix', (['pi'], {}), '(pi)\n', (1752, 1756), True, 'import numpy as np\n'), ((2274, 2284), 'numpy.any', 'np.any', (['ep'], {}), '(ep)\n', (2280, 2284), True, 'import numpy as np\n'), ((5002, 5050), 'hmm_class.hmm', 'hmm', (['uniq_sts', 'uniq_obs', 'pi', 'trans_prob', 'em_prob'], {}), '(uniq_sts, uniq_obs, pi, trans_prob, em_prob)\n', (5005, 5050), False, 'from hmm_class import hmm\n'), ((5160, 5178), 'numpy.ones', 'np.ones', (['size_data'], {}), '(size_data)\n', (5167, 5178), True, 'import numpy as np\n'), ((5900, 5919), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (5917, 5919), False, 'import time\n'), ((6034, 6053), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6051, 6053), False, 'import time\n'), ((6417, 6440), 'numpy.ones', 'np.ones', (['size_data_test'], {}), '(size_data_test)\n', (6424, 6440), True, 'import numpy as np\n'), ((6613, 6632), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6630, 6632), False, 'import time\n'), ((6757, 6776), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6774, 6776), False, 'import time\n'), ((7453, 7477), 'numpy.arange', 'np.arange', (['(0)', 'iter_count'], {}), '(0, iter_count)\n', (7462, 7477), True, 'import numpy as np\n'), ((7480, 7492), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (7490, 7492), True, 'import matplotlib.pyplot as plt\n'), ((7495, 7527), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'prob_lst'], {'color': '"""r"""'}), "(x, prob_lst, color='r')\n", (7503, 7527), True, 'import matplotlib.pyplot as plt\n'), ((7532, 7561), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""iteration times"""'], {}), "('iteration times')\n", (7542, 7561), True, 'import matplotlib.pyplot as plt\n'), ((7564, 7592), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""log likelihood"""'], {}), "('log likelihood')\n", (7574, 7592), True, 'import matplotlib.pyplot as plt\n'), ((7595, 7622), 'matplotlib.pyplot.title', 'plt.title', (['"""Learning curve"""'], {}), "('Learning curve')\n", (7604, 7622), True, 'import matplotlib.pyplot as plt\n'), ((7625, 7635), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7633, 7635), True, 'import matplotlib.pyplot as plt\n'), ((7640, 7652), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (7650, 7652), True, 'import matplotlib.pyplot as plt\n'), ((7655, 7687), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'loss_lst'], {'color': '"""b"""'}), "(x, loss_lst, color='b')\n", (7663, 7687), True, 'import matplotlib.pyplot as plt\n'), ((7692, 7721), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""iteration times"""'], {}), "('iteration times')\n", (7702, 7721), True, 'import matplotlib.pyplot as plt\n'), ((7724, 7742), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""loss"""'], {}), "('loss')\n", (7734, 7742), True, 'import matplotlib.pyplot as plt\n'), ((7745, 7782), 'matplotlib.pyplot.title', 'plt.title', (['"""Loss from each iteration"""'], {}), "('Loss from each iteration')\n", (7754, 7782), True, 'import matplotlib.pyplot as plt\n'), ((7785, 7795), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7793, 7795), True, 'import matplotlib.pyplot as plt\n'), ((1663, 1686), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (1680, 1686), True, 'import numpy as np\n'), ((2298, 2310), 'numpy.shape', 'np.shape', (['ep'], {}), '(ep)\n', (2306, 2310), True, 'import numpy as np\n'), ((4687, 4705), 'numpy.unique', 'np.unique', (['sts_seq'], {}), '(sts_seq)\n', (4696, 4705), True, 'import numpy as np\n'), ((4778, 4796), 'numpy.unique', 'np.unique', (['obs_seq'], {}), '(obs_seq)\n', (4787, 4796), True, 'import numpy as np\n'), ((975, 999), 'random.randint', 'random.randint', (['(0)', '(N - 1)'], {}), '(0, N - 1)\n', (989, 999), False, 'import random\n'), ((1223, 1246), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (1240, 1246), True, 'import numpy as np\n'), ((1473, 1496), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (1490, 1496), True, 'import numpy as np\n'), ((4268, 4312), 'numpy.argmax', 'np.argmax', (['tp[hidden_sts[i][len_seq - 1], :]'], {}), '(tp[hidden_sts[i][len_seq - 1], :])\n', (4277, 4312), True, 'import numpy as np\n')]
''' Kattis - prsteni Simple fraction question. Look around for the pattern, the key insight is radius and number of turns are inversely proportional. Just take the ratio of the radius of the first and the ith circle to get the ratio of the turns of the ith circle to the first circle. Time: O(1) Space: O(1) ''' n = int(input()) arr = list(map(int, input().split())) from fractions import Fraction for i in range(1, n): f = Fraction(arr[0], arr[i]) print(str(f.numerator) + "/" + str(f.denominator))
[ "fractions.Fraction" ]
[((430, 454), 'fractions.Fraction', 'Fraction', (['arr[0]', 'arr[i]'], {}), '(arr[0], arr[i])\n', (438, 454), False, 'from fractions import Fraction\n')]
# Python Code for object # oriented concepts using # the abstract factory # design patter import random class Cars_avaliable: def __init__(self, Car = None): self.cars = Car def show_car(self): rental = self.cars() print(f'Name of car {rental}') print(f'Its priced at {rental.Fee()} /hour') class BMW_330Ci: def Fee(self): return 56 def __str__(self): return "BMW_330Ci" class Audi_A4: def Fee(self): return 30 def __str__(self): return "Audi_A4" class Merc_S550: def Fee(self): return 90 def __str__(self): return 'Merc_S550' def r_choice(): # random_car return random.choice([BMW_330Ci, Audi_A4, Merc_S550])() if __name__ == "__main__": rental = Cars_avaliable(r_choice) for x in range(7): rental.show_car()
[ "random.choice" ]
[((764, 810), 'random.choice', 'random.choice', (['[BMW_330Ci, Audi_A4, Merc_S550]'], {}), '([BMW_330Ci, Audi_A4, Merc_S550])\n', (777, 810), False, 'import random\n')]
# Copyright 2021, <NAME>, mailto:<EMAIL> # # Python tests originally created or extracted from other peoples work. The # parts were too small to be protected. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Tests uncompiled functions and compiled functions responses to inspect and isistance. """ import inspect import types # nuitka-project: --python-flag=no_warnings async def compiledAsyncgen(): yield 1 print(type(compiledAsyncgen())) assert inspect.isfunction(compiledAsyncgen) is True assert inspect.isgeneratorfunction(compiledAsyncgen) is False assert inspect.iscoroutinefunction(compiledAsyncgen) is False assert inspect.isasyncgenfunction(compiledAsyncgen) is True assert isinstance(compiledAsyncgen(), types.GeneratorType) is False assert isinstance(compiledAsyncgen(), types.CoroutineType) is False assert isinstance(compiledAsyncgen(), types.AsyncGeneratorType) is True assert type(compiledAsyncgen()) == types.AsyncGeneratorType, type(compiledAsyncgen()) assert isinstance(compiledAsyncgen, types.AsyncGeneratorType) is False
[ "inspect.iscoroutinefunction", "inspect.isasyncgenfunction", "inspect.isgeneratorfunction", "inspect.isfunction" ]
[((1008, 1044), 'inspect.isfunction', 'inspect.isfunction', (['compiledAsyncgen'], {}), '(compiledAsyncgen)\n', (1026, 1044), False, 'import inspect\n'), ((1060, 1105), 'inspect.isgeneratorfunction', 'inspect.isgeneratorfunction', (['compiledAsyncgen'], {}), '(compiledAsyncgen)\n', (1087, 1105), False, 'import inspect\n'), ((1122, 1167), 'inspect.iscoroutinefunction', 'inspect.iscoroutinefunction', (['compiledAsyncgen'], {}), '(compiledAsyncgen)\n', (1149, 1167), False, 'import inspect\n'), ((1184, 1228), 'inspect.isasyncgenfunction', 'inspect.isasyncgenfunction', (['compiledAsyncgen'], {}), '(compiledAsyncgen)\n', (1210, 1228), False, 'import inspect\n')]
"""Provides MacroCheckerFile, a subclassable type that validates a single file in the spec.""" # Copyright (c) 2018-2019 Collabora, Ltd. # # SPDX-License-Identifier: Apache-2.0 # # Author(s): <NAME> <<EMAIL>> import logging import re from collections import OrderedDict, namedtuple from enum import Enum from inspect import currentframe from .shared import (AUTO_FIX_STRING, CATEGORIES_WITH_VALIDITY, EXTENSION_CATEGORY, NON_EXISTENT_MACROS, EntityData, Message, MessageContext, MessageId, MessageType, generateInclude, toNameAndLine) # Code blocks may start and end with any number of ---- CODE_BLOCK_DELIM = '----' # Mostly for ref page blocks, but also used elsewhere? REF_PAGE_LIKE_BLOCK_DELIM = '--' # For insets/blocks like the implicit valid usage # TODO think it must start with this - does it have to be exactly this? BOX_BLOCK_DELIM = '****' INTERNAL_PLACEHOLDER = re.compile( r'(?P<delim>__+)([a-zA-Z]+)(?P=delim)' ) # Matches a generated (api or validity) include line. INCLUDE = re.compile( r'include::(?P<directory_traverse>((../){1,4}|\{(INCS-VAR|generated)\}/)(generated/)?)(?P<generated_type>(api|validity))/(?P<category>\w+)/(?P<entity_name>[^./]+).txt[\[][\]]') # Matches an [[AnchorLikeThis]] ANCHOR = re.compile(r'\[\[(?P<entity_name>[^\]]+)\]\]') # Looks for flink:foo:: or slink::foo:: at the end of string: # used to detect explicit pname context. PRECEDING_MEMBER_REFERENCE = re.compile( r'\b(?P<macro>[fs](text|link)):(?P<entity_name>[\w*]+)::$') # Matches something like slink:foo::pname:bar as well as # the under-marked-up slink:foo::bar. MEMBER_REFERENCE = re.compile( r'\b(?P<first_part>(?P<scope_macro>[fs](text|link)):(?P<scope>[\w*]+))(?P<double_colons>::)(?P<second_part>(?P<member_macro>pname:?)(?P<entity_name>[\w]+))\b' ) # Matches if a string ends while a link is still "open". # (first half of a link being broken across two lines, # or containing our interested area when matched against the text preceding). # Used to skip checking in some places. OPEN_LINK = re.compile( r'.*(?<!`)<<[^>]*$' ) # Matches if a string begins and is followed by a link "close" without a matching open. # (second half of a link being broken across two lines) # Used to skip checking in some places. CLOSE_LINK = re.compile( r'[^<]*>>.*$' ) # Matches if a line should be skipped without further considering. # Matches lines starting with: # - `ifdef:` # - `endif:` # - `todo` (followed by something matching \b, like : or (. capitalization ignored) SKIP_LINE = re.compile( r'^(ifdef:)|(endif:)|([tT][oO][dD][oO]\b).*' ) # Matches the whole inside of a refpage tag. BRACKETS = re.compile(r'\[(?P<tags>.*)\]') # Matches a key='value' pair from a ref page tag. REF_PAGE_ATTRIB = re.compile( r"(?P<key>[a-z]+)='(?P<value>[^'\\]*(?:\\.[^'\\]*)*)'") class Attrib(Enum): """Attributes of a ref page.""" REFPAGE = 'refpage' DESC = 'desc' TYPE = 'type' ALIAS = 'alias' XREFS = 'xrefs' ANCHOR = 'anchor' VALID_REF_PAGE_ATTRIBS = set( (e.value for e in Attrib)) AttribData = namedtuple('AttribData', ['match', 'key', 'value']) def makeAttribFromMatch(match): """Turn a match of REF_PAGE_ATTRIB into an AttribData value.""" return AttribData(match=match, key=match.group( 'key'), value=match.group('value')) def parseRefPageAttribs(line): """Parse a ref page tag into a dictionary of attribute_name: AttribData.""" return {m.group('key'): makeAttribFromMatch(m) for m in REF_PAGE_ATTRIB.finditer(line)} def regenerateIncludeFromMatch(match, generated_type): """Create an include directive from an INCLUDE match and a (new or replacement) generated_type.""" return generateInclude( match.group('directory_traverse'), generated_type, match.group('category'), match.group('entity_name')) BlockEntry = namedtuple( 'BlockEntry', ['delimiter', 'context', 'block_type', 'refpage']) class BlockType(Enum): """Enumeration of the various distinct block types known.""" CODE = 'code' REF_PAGE_LIKE = 'ref-page-like' # with or without a ref page tag before BOX = 'box' @classmethod def lineToBlockType(self, line): """Return a BlockType if the given line is a block delimiter. Returns None otherwise. """ if line == REF_PAGE_LIKE_BLOCK_DELIM: return BlockType.REF_PAGE_LIKE if line.startswith(CODE_BLOCK_DELIM): return BlockType.CODE if line.startswith(BOX_BLOCK_DELIM): return BlockType.BOX return None def _pluralize(word, num): if num == 1: return word if word.endswith('y'): return word[:-1] + 'ies' return word + 's' def _s_suffix(num): """Simplify pluralization.""" if num > 1: return 's' return '' def shouldEntityBeText(entity, subscript): """Determine if an entity name appears to use placeholders, wildcards, etc. and thus merits use of a *text macro. Call with the entity and subscript groups from a match of MacroChecker.macro_re. """ entity_only = entity if subscript: if subscript == '[]' or subscript == '[i]' or subscript.startswith( '[_') or subscript.endswith('_]'): return True entity_only = entity[:-len(subscript)] if ('*' in entity) or entity.startswith('_') or entity_only.endswith('_'): return True if INTERNAL_PLACEHOLDER.search(entity): return True return False class MacroCheckerFile(object): """Object performing processing of a single AsciiDoctor file from a specification. For testing purposes, may also process a string as if it were a file. """ def __init__(self, checker, filename, enabled_messages, stream_maker): """Construct a MacroCheckerFile object. Typically called by MacroChecker.processFile or MacroChecker.processString(). Arguments: checker -- A MacroChecker object. filename -- A string to use in messages to refer to this checker, typically the file name. enabled_messages -- A set() of MessageId values that should be considered "enabled" and thus stored. stream_maker -- An object with a makeStream() method that returns a stream. """ self.checker = checker self.filename = filename self.stream_maker = stream_maker self.enabled_messages = enabled_messages self.missing_validity_suppressions = set( self.getMissingValiditySuppressions()) self.logger = logging.getLogger(__name__) self.logger.addHandler(logging.NullHandler()) self.fixes = set() self.messages = [] self.pname_data = None self.pname_mentions = {} self.refpage_includes = {} self.lines = [] # For both of these: # keys: entity name # values: MessageContext self.fs_api_includes = {} self.validity_includes = {} self.in_code_block = False self.in_ref_page = False self.prev_line_ref_page_tag = None self.current_ref_page = None # Stack of block-starting delimiters. self.block_stack = [] # Regexes that are members because they depend on the name prefix. self.suspected_missing_macro_re = self.checker.suspected_missing_macro_re self.heading_command_re = self.checker.heading_command_re ### # Main process/checking methods, arranged roughly from largest scope to smallest scope. ### def process(self): """Check the stream (file, string) created by the streammaker supplied to the constructor. This is the top-level method for checking a spec file. """ self.logger.info("processing file %s", self.filename) # File content checks - performed line-by-line with self.stream_maker.make_stream() as f: # Iterate through lines, calling processLine on each. for lineIndex, line in enumerate(f): trimmedLine = line.rstrip() self.lines.append(trimmedLine) self.processLine(lineIndex + 1, trimmedLine) # End of file checks follow: # Check "state" at end of file: should have blocks closed. if self.prev_line_ref_page_tag: self.error(MessageId.REFPAGE_BLOCK, "Reference page tag seen, but block not opened before end of file.", context=self.storeMessageContext(match=None)) if self.block_stack: locations = (x.context for x in self.block_stack) formatted_locations = ['{} opened at {}'.format(x.delimiter, self.getBriefLocation(x.context)) for x in self.block_stack] self.logger.warning("Unclosed blocks: %s", ', '.join(formatted_locations)) self.error(MessageId.UNCLOSED_BLOCK, ["Reached end of page, with these unclosed blocks remaining:"] + formatted_locations, context=self.storeMessageContext(match=None), see_also=locations) # Check that every include of an /api/ file in the protos or structs category # had a matching /validity/ include for entity, includeContext in self.fs_api_includes.items(): if not self.checker.entity_db.entityHasValidity(entity): continue if entity in self.missing_validity_suppressions: continue if entity not in self.validity_includes: self.warning(MessageId.MISSING_VALIDITY_INCLUDE, ['Saw /api/ include for {}, but no matching /validity/ include'.format(entity), 'Expected a line with ' + regenerateIncludeFromMatch(includeContext.match, 'validity')], context=includeContext) # Check that we never include a /validity/ file # without a matching /api/ include for entity, includeContext in self.validity_includes.items(): if entity not in self.fs_api_includes: self.error(MessageId.MISSING_API_INCLUDE, ['Saw /validity/ include for {}, but no matching /api/ include'.format(entity), 'Expected a line with ' + regenerateIncludeFromMatch(includeContext.match, 'api')], context=includeContext) if not self.numDiagnostics(): # no problems, exit quietly return print('\nFor file {}:'.format(self.filename)) self.printMessageCounts() numFixes = len(self.fixes) if numFixes > 0: fixes = ', '.join(('{} -> {}'.format(search, replace) for search, replace in self.fixes)) print('{} unique auto-fix {} recorded: {}'.format(numFixes, _pluralize('pattern', numFixes), fixes)) def processLine(self, lineNum, line): """Check the contents of a single line from a file. Eventually populates self.match, self.entity, self.macro, before calling processMatch. """ self.lineNum = lineNum self.line = line self.match = None self.entity = None self.macro = None self.logger.debug("processing line %d", lineNum) if self.processPossibleBlockDelimiter(): # This is a block delimiter - proceed to next line. # Block-type-specific stuff goes in processBlockOpen and processBlockClosed. return if self.in_code_block: # We do no processing in a code block. return ### # Detect if the previous line was [open,...] starting a refpage # but this line isn't -- # If the line is some other block delimiter, # the related code in self.processPossibleBlockDelimiter() # would have handled it. # (because execution would never get to here for that line) if self.prev_line_ref_page_tag: self.handleExpectedRefpageBlock() ### # Detect headings if line.startswith('=='): # Headings cause us to clear our pname_context self.pname_data = None command = self.heading_command_re.match(line) if command: data = self.checker.findEntity(command) if data: self.pname_data = data return ### # Detect [open, lines for manpages if line.startswith('[open,'): self.checkRefPage() return ### # Skip comments if line.lstrip().startswith('//'): return ### # Skip ifdef/endif if SKIP_LINE.match(line): return ### # Detect include:::....[] lines match = INCLUDE.match(line) if match: self.match = match entity = match.group('entity_name') data = self.checker.findEntity(entity) if not data: self.error(MessageId.UNKNOWN_INCLUDE, 'Saw include for {}, but that entity is unknown.'.format(entity)) self.pname_data = None return self.pname_data = data if match.group('generated_type') == 'api': self.recordInclude(self.checker.apiIncludes) # Set mentions to None. The first time we see something like `* pname:paramHere`, # we will set it to an empty set self.pname_mentions[entity] = None if match.group('category') in CATEGORIES_WITH_VALIDITY: self.fs_api_includes[entity] = self.storeMessageContext() if entity in self.validity_includes: name_and_line = toNameAndLine( self.validity_includes[entity], root_path=self.checker.root_path) self.error(MessageId.API_VALIDITY_ORDER, ['/api/ include found for {} after a corresponding /validity/ include'.format(entity), 'Validity include located at {}'.format(name_and_line)]) elif match.group('generated_type') == 'validity': self.recordInclude(self.checker.validityIncludes) self.validity_includes[entity] = self.storeMessageContext() if entity not in self.pname_mentions: self.error(MessageId.API_VALIDITY_ORDER, '/validity/ include found for {} without a preceding /api/ include'.format(entity)) return if self.pname_mentions[entity]: # Got a validity include and we have seen at least one * pname: line # since we got the API include # so we can warn if we haven't seen a reference to every # parameter/member. members = self.checker.getMemberNames(entity) missing = [member for member in members if member not in self.pname_mentions[entity]] if missing: self.error(MessageId.UNDOCUMENTED_MEMBER, ['Validity include found for {}, but not all members/params apparently documented'.format(entity), 'Members/params not mentioned with pname: {}'.format(', '.join(missing))]) # If we found an include line, we're done with this line. return if self.pname_data is not None and '* pname:' in line: context_entity = self.pname_data.entity if self.pname_mentions[context_entity] is None: # First time seeing * pname: after an api include, prepare the set that # tracks self.pname_mentions[context_entity] = set() ### # Detect [[Entity]] anchors for match in ANCHOR.finditer(line): entity = match.group('entity_name') if self.checker.findEntity(entity): # We found an anchor with the same name as an entity: # treat it (mostly) like an API include self.match = match self.recordInclude(self.checker.apiIncludes, generated_type='api (manual anchor)') ### # Detect :: without pname for match in MEMBER_REFERENCE.finditer(line): if not match.group('member_macro'): self.match = match # Got :: but not followed by pname search = match.group() replacement = match.group( 'first_part') + '::pname:' + match.group('second_part') self.error(MessageId.MEMBER_PNAME_MISSING, 'Found a function parameter or struct member reference with :: but missing pname:', group='double_colons', replacement='::pname:', fix=(search, replacement)) # check pname here because it won't come up in normal iteration below # because of the missing macro self.entity = match.group('entity_name') self.checkPname(match.group('scope')) ### # Look for things that seem like a missing macro. for match in self.suspected_missing_macro_re.finditer(line): if OPEN_LINK.match(line, endpos=match.start()): # this is in a link, skip it. continue if CLOSE_LINK.match(line[match.end():]): # this is in a link, skip it. continue entity = match.group('entity_name') self.match = match self.entity = entity data = self.checker.findEntity(entity) if data: if data.category == EXTENSION_CATEGORY: # Ah, this is an extension self.warning(MessageId.EXTENSION, "Seems like this is an extension name that was not linked.", group='entity_name', replacement=self.makeExtensionLink()) else: self.warning(MessageId.MISSING_MACRO, ['Seems like a "{}" macro was omitted for this reference to a known entity in category "{}".'.format(data.macro, data.category), 'Wrap in ` ` to silence this if you do not want a verified macro here.'], group='entity_name', replacement=self.makeMacroMarkup(data.macro)) else: dataArray = self.checker.findEntityCaseInsensitive(entity) # We might have found the goof... if dataArray: if len(dataArray) == 1: # Yep, found the goof: # incorrect macro and entity capitalization data = dataArray[0] if data.category == EXTENSION_CATEGORY: # Ah, this is an extension self.warning(MessageId.EXTENSION, "Seems like this is an extension name that was not linked.", group='entity_name', replacement=self.makeExtensionLink(data.entity)) else: self.warning(MessageId.MISSING_MACRO, 'Seems like a macro was omitted for this reference to a known entity in category "{}", found by searching case-insensitively.'.format( data.category), replacement=self.makeMacroMarkup(data=data)) else: # Ugh, more than one resolution self.warning(MessageId.MISSING_MACRO, ['Seems like a macro was omitted for this reference to a known entity, found by searching case-insensitively.', 'More than one apparent match.'], group='entity_name', see_also=dataArray[:]) ### # Main operations: detect markup macros for match in self.checker.macro_re.finditer(line): self.match = match self.macro = match.group('macro') self.entity = match.group('entity_name') self.subscript = match.group('subscript') self.processMatch() def processPossibleBlockDelimiter(self): """Look at the current line, and if it's a delimiter, update the block stack. Calls self.processBlockDelimiter() as required. Returns True if a delimiter was processed, False otherwise. """ line = self.line new_block_type = BlockType.lineToBlockType(line) if not new_block_type: return False ### # Detect if the previous line was [open,...] starting a refpage # but this line is some block delimiter other than -- # Must do this here because if we get a different block open instead of the one we want, # the order of block opening will be wrong. if new_block_type != BlockType.REF_PAGE_LIKE and self.prev_line_ref_page_tag: self.handleExpectedRefpageBlock() # Delegate to the main process for delimiters. self.processBlockDelimiter(line, new_block_type) return True def processBlockDelimiter(self, line, new_block_type, context=None): """Update the block stack based on the current or supplied line. Calls self.processBlockOpen() or self.processBlockClosed() as required. Called by self.processPossibleBlockDelimiter() both in normal operation, as well as when "faking" a ref page block open. Returns BlockProcessResult. """ if not context: context = self.storeMessageContext() location = self.getBriefLocation(context) top = self.getInnermostBlockEntry() top_delim = self.getInnermostBlockDelimiter() if top_delim == line: self.processBlockClosed() return if top and top.block_type == new_block_type: # Same block type, but not matching - might be an error? # TODO maybe create a diagnostic here? self.logger.warning( "processPossibleBlockDelimiter: %s: Matched delimiter type %s, but did not exactly match current delim %s to top of stack %s, may be a typo?", location, new_block_type, line, top_delim) # Empty stack, or top doesn't match us. self.processBlockOpen(new_block_type, delimiter=line) def processBlockOpen(self, block_type, context=None, delimiter=None): """Do any block-type-specific processing and push the new block. Must call self.pushBlock(). May be overridden (carefully) or extended. Called by self.processBlockDelimiter(). """ if block_type == BlockType.REF_PAGE_LIKE: if self.prev_line_ref_page_tag: if self.current_ref_page: refpage = self.current_ref_page else: refpage = '?refpage-with-invalid-tag?' self.logger.info( 'processBlockOpen: Opening refpage for %s', refpage) # Opening of refpage block "consumes" the preceding ref # page context self.prev_line_ref_page_tag = None self.pushBlock(block_type, refpage=refpage, context=context, delimiter=delimiter) self.in_ref_page = True return if block_type == BlockType.CODE: self.in_code_block = True self.pushBlock(block_type, context=context, delimiter=delimiter) def processBlockClosed(self): """Do any block-type-specific processing and pop the top block. Must call self.popBlock(). May be overridden (carefully) or extended. Called by self.processPossibleBlockDelimiter(). """ old_top = self.popBlock() if old_top.block_type == BlockType.CODE: self.in_code_block = False elif old_top.block_type == BlockType.REF_PAGE_LIKE and old_top.refpage: self.logger.info( 'processBlockClosed: Closing refpage for %s', old_top.refpage) # leaving a ref page so reset associated state. self.current_ref_page = None self.prev_line_ref_page_tag = None self.in_ref_page = False def processMatch(self): """Process a match of the macro:entity regex for correctness.""" match = self.match entity = self.entity macro = self.macro ### # Track entities that we're actually linking to. ### if self.checker.entity_db.isLinkedMacro(macro): self.checker.addLinkToEntity(entity, self.storeMessageContext()) ### # Link everything that should be, and nothing that shouldn't be ### if self.checkRecognizedEntity(): # if this returns true, # then there is no need to do the remaining checks on this match return ### # Non-existent macros if macro in NON_EXISTENT_MACROS: self.error(MessageId.BAD_MACRO, '{} is not a macro provided in the specification, despite resembling other macros.'.format( macro), group='macro') ### # Wildcards (or leading underscore, or square brackets) # if and only if a 'text' macro self.checkText() # Do some validation of pname references. if macro == 'pname': # See if there's an immediately-preceding entity preceding = self.line[:match.start()] scope = PRECEDING_MEMBER_REFERENCE.search(preceding) if scope: # Yes there is, check it out. self.checkPname(scope.group('entity_name')) elif self.current_ref_page is not None: # No, but there is a current ref page: very reliable self.checkPnameImpliedContext(self.current_ref_page) elif self.pname_data is not None: # No, but there is a pname_context - better than nothing. self.checkPnameImpliedContext(self.pname_data) else: # no, and no existing context we can imply: # can't check this. pass def checkRecognizedEntity(self): """Check the current macro:entity match to see if it is recognized. Returns True if there is no need to perform further checks on this match. Helps avoid duplicate warnings/errors: typically each macro should have at most one of this class of errors. """ entity = self.entity macro = self.macro if self.checker.findMacroAndEntity(macro, entity) is not None: # We know this macro-entity combo return True # We don't know this macro-entity combo. possibleCats = self.checker.entity_db.getCategoriesForMacro(macro) if possibleCats is None: possibleCats = ['???'] msg = ['Definition of link target {} with macro {} (used for {} {}) does not exist.'.format( entity, macro, _pluralize('category', len(possibleCats)), ', '.join(possibleCats))] data = self.checker.findEntity(entity) if data: # We found the goof: incorrect macro msg.append('Apparently matching entity in category {} found.'.format( data.category)) self.handleWrongMacro(msg, data) return True see_also = [] dataArray = self.checker.findEntityCaseInsensitive(entity) if dataArray: # We might have found the goof... if len(dataArray) == 1: # Yep, found the goof: # incorrect macro and entity capitalization data = dataArray[0] msg.append('Apparently matching entity in category {} found by searching case-insensitively.'.format( data.category)) self.handleWrongMacro(msg, data) return True else: # Ugh, more than one resolution msg.append( 'More than one apparent match found by searching case-insensitively, cannot auto-fix.') see_also = dataArray[:] # OK, so we don't recognize this entity (and couldn't auto-fix it). if self.checker.entity_db.shouldBeRecognized(macro, entity): # We should know the target - it's a link macro, # or there's some reason the entity DB thinks we should know it. if self.checker.likelyRecognizedEntity(entity): # Should be linked and it matches our pattern, # so probably not wrong macro. # Human brains required. if not self.checkText(): self.error(MessageId.BAD_ENTITY, msg + ['Might be a misspelling, or, less likely, the wrong macro.'], see_also=see_also) else: # Doesn't match our pattern, # so probably should be name instead of link. newMacro = macro[0] + 'name' if self.checker.entity_db.isValidMacro(newMacro): self.error(MessageId.BAD_ENTITY, msg + ['Entity name does not fit the pattern for this API, which would mean it should be a "name" macro instead of a "link" macro'], group='macro', replacement=newMacro, fix=self.makeFix(newMacro=newMacro), see_also=see_also) else: self.error(MessageId.BAD_ENTITY, msg + ['Entity name does not fit the pattern for this API, which would mean it should be a "name" macro instead of a "link" macro', 'However, {} is not a known macro so cannot auto-fix.'.format(newMacro)], see_also=see_also) elif macro == 'ename': # TODO This might be an ambiguity in the style guide - ename might be a known enumerant value, # or it might be an enumerant value in an external library, etc. that we don't know about - so # hard to check this. if self.checker.likelyRecognizedEntity(entity): if not self.checkText(): self.warning(MessageId.BAD_ENUMERANT, msg + ['Unrecognized ename:{} that we would expect to recognize since it fits the pattern for this API.'.format(entity)], see_also=see_also) else: # This is fine: # it doesn't need to be recognized since it's not linked. pass # Don't skip other tests. return False def checkText(self): """Evaluate the usage (or non-usage) of a *text macro. Wildcards (or leading or trailing underscore, or square brackets with nothing or a placeholder) if and only if a 'text' macro. Called by checkRecognizedEntity() when appropriate. """ macro = self.macro entity = self.entity shouldBeText = shouldEntityBeText(entity, self.subscript) if shouldBeText and not self.macro.endswith( 'text') and not self.macro == 'code': newMacro = macro[0] + 'text' if self.checker.entity_db.getCategoriesForMacro(newMacro): self.error(MessageId.MISSING_TEXT, ['Asterisk/leading or trailing underscore/bracket found - macro should end with "text:", probably {}:'.format(newMacro), AUTO_FIX_STRING], group='macro', replacement=newMacro, fix=self.makeFix(newMacro=newMacro)) else: self.error(MessageId.MISSING_TEXT, ['Asterisk/leading or trailing underscore/bracket found, so macro should end with "text:".', 'However {}: is not a known macro so cannot auto-fix.'.format(newMacro)], group='macro') return True elif macro.endswith('text') and not shouldBeText: msg = [ "No asterisk/leading or trailing underscore/bracket in the entity, so this might be a mistaken use of the 'text' macro {}:".format(macro)] data = self.checker.findEntity(entity) if data: # We found the goof: incorrect macro msg.append('Apparently matching entity in category {} found.'.format( data.category)) msg.append(AUTO_FIX_STRING) replacement = self.makeFix(data=data) if data.category == EXTENSION_CATEGORY: self.error(MessageId.EXTENSION, msg, replacement=replacement, fix=replacement) else: self.error(MessageId.WRONG_MACRO, msg, group='macro', replacement=data.macro, fix=replacement) else: if self.checker.likelyRecognizedEntity(entity): # This is a use of *text: for something that fits the pattern but isn't in the spec. # This is OK. return False msg.append('Entity not found in spec, either.') if macro[0] != 'e': # Only suggest a macro if we aren't in elink/ename/etext, # since ename and elink are not related in an equivalent way # to the relationship between flink and fname. newMacro = macro[0] + 'name' if self.checker.entity_db.getCategoriesForMacro(newMacro): msg.append( 'Consider if {}: might be the correct macro to use here.'.format(newMacro)) else: msg.append( 'Cannot suggest a new macro because {}: is not a known macro.'.format(newMacro)) self.warning(MessageId.MISUSED_TEXT, msg) return True return False def checkPnameImpliedContext(self, pname_context): """Handle pname: macros not immediately preceded by something like flink:entity or slink:entity. Also records pname: mentions of members/parameters for completeness checking in doc blocks. Contains call to self.checkPname(). Called by self.processMatch() """ self.checkPname(pname_context.entity) if pname_context.entity in self.pname_mentions and \ self.pname_mentions[pname_context.entity] is not None: # Record this mention, # in case we're in the documentation block. self.pname_mentions[pname_context.entity].add(self.entity) def checkPname(self, pname_context): """Check the current match (as a pname: usage) with the given entity as its 'pname context', if possible. e.g. slink:foo::pname:bar, pname_context would be 'foo', while self.entity would be 'bar', etc. Called by self.processLine(), self.processMatch(), as well as from self.checkPnameImpliedContext(). """ if '*' in pname_context: # This context has a placeholder, can't verify it. return entity = self.entity context_data = self.checker.findEntity(pname_context) members = self.checker.getMemberNames(pname_context) if context_data and not members: # This is a recognized parent entity that doesn't have detectable member names, # skip validation # TODO: Annotate parameters of function pointer types with <name> # and <param>? return if not members: self.warning(MessageId.UNRECOGNIZED_CONTEXT, 'pname context entity was un-recognized {}'.format(pname_context)) return if entity not in members: self.warning(MessageId.UNKNOWN_MEMBER, ["Could not find member/param named '{}' in {}".format(entity, pname_context), 'Known {} mamber/param names are: {}'.format( pname_context, ', '.join(members))], group='entity_name') def checkIncludeRefPageRelation(self, entity, generated_type): """Identify if our current ref page (or lack thereof) is appropriate for an include just recorded. Called by self.recordInclude(). """ if not self.in_ref_page: # Not in a ref page block: This probably means this entity needs a # ref-page block added. self.handleIncludeMissingRefPage(entity, generated_type) return if not isinstance(self.current_ref_page, EntityData): # This isn't a fully-valid ref page, so can't check the includes any better. return ref_page_entity = self.current_ref_page.entity if ref_page_entity not in self.refpage_includes: self.refpage_includes[ref_page_entity] = set() expected_ref_page_entity = self.computeExpectedRefPageFromInclude( entity) self.refpage_includes[ref_page_entity].add((generated_type, entity)) if ref_page_entity == expected_ref_page_entity: # OK, this is a total match. pass elif self.checker.entity_db.areAliases(expected_ref_page_entity, ref_page_entity): # This appears to be a promoted synonym which is OK. pass else: # OK, we are in a ref page block that doesn't match self.handleIncludeMismatchRefPage(entity, generated_type) def checkRefPage(self): """Check if the current line (a refpage tag) meets requirements. Called by self.processLine(). """ line = self.line # Should always be found self.match = BRACKETS.match(line) data = None directory = None if self.in_ref_page: msg = ["Found reference page markup, but we are already in a refpage block.", "The block before the first message of this type is most likely not closed.", ] # Fake-close the previous ref page, if it's trivial to do so. if self.getInnermostBlockEntry().block_type == BlockType.REF_PAGE_LIKE: msg.append( "Pretending that there was a line with `--` immediately above to close that ref page, for more readable messages.") self.processBlockDelimiter( REF_PAGE_LIKE_BLOCK_DELIM, BlockType.REF_PAGE_LIKE) else: msg.append( "Ref page wasn't the last block opened, so not pretending to auto-close it for more readable messages.") self.error(MessageId.REFPAGE_BLOCK, msg) attribs = parseRefPageAttribs(line) unknown_attribs = set(attribs.keys()).difference( VALID_REF_PAGE_ATTRIBS) if unknown_attribs: self.error(MessageId.REFPAGE_UNKNOWN_ATTRIB, "Found unknown attrib(s) in reference page markup: " + ','.join(unknown_attribs)) # Required field: refpage='xrValidEntityHere' if Attrib.REFPAGE.value in attribs: attrib = attribs[Attrib.REFPAGE.value] text = attrib.value self.entity = text context = self.storeMessageContext( group='value', match=attrib.match) if self.checker.seenRefPage(text): self.error(MessageId.REFPAGE_DUPLICATE, ["Found reference page markup when we already saw refpage='{}' elsewhere.".format( text), "This (or the other mention) may be a copy-paste error."], context=context) self.checker.addRefPage(text) data = self.checker.findEntity(text) if data: # OK, this is a known entity that we're seeing a refpage for. directory = data.directory self.current_ref_page = data else: # TODO suggest fixes here if applicable self.error(MessageId.REFPAGE_NAME, "Found reference page markup, but refpage='{}' does not refer to a recognized entity".format( text), context=context) else: self.error(MessageId.REFPAGE_TAG, "Found apparent reference page markup, but missing refpage='...'", group=None) # Required field: desc='preferably non-empty' if Attrib.DESC.value in attribs: attrib = attribs[Attrib.DESC.value] text = attrib.value if not text: context = self.storeMessageContext( group=None, match=attrib.match) self.warning(MessageId.REFPAGE_MISSING_DESC, "Found reference page markup, but desc='' is empty", context=context) else: self.error(MessageId.REFPAGE_TAG, "Found apparent reference page markup, but missing desc='...'", group=None) # Required field: type='protos' for example # (used by genRef.py to compute the macro to use) if Attrib.TYPE.value in attribs: attrib = attribs[Attrib.TYPE.value] text = attrib.value if directory and not text == directory: context = self.storeMessageContext( group='value', match=attrib.match) self.error(MessageId.REFPAGE_TYPE, "Found reference page markup, but type='{}' is not the expected value '{}'".format( text, directory), context=context) else: self.error(MessageId.REFPAGE_TAG, "Found apparent reference page markup, but missing type='...'", group=None) # Optional field: alias='spaceDelimited validEntities' # Currently does nothing. Could modify checkRefPageXrefs to also # check alias= attribute value # if Attrib.ALIAS.value in attribs: # # This field is optional # self.checkRefPageXrefs(attribs[Attrib.XREFS.value]) # Optional field: xrefs='spaceDelimited validEntities' if Attrib.XREFS.value in attribs: # This field is optional self.checkRefPageXrefs(attribs[Attrib.XREFS.value]) self.prev_line_ref_page_tag = self.storeMessageContext() def checkRefPageXrefs(self, xrefs_attrib): """Check all cross-refs indicated in an xrefs attribute for a ref page. Called by self.checkRefPage(). Argument: xrefs_attrib -- A match of REF_PAGE_ATTRIB where the group 'key' is 'xrefs'. """ text = xrefs_attrib.value context = self.storeMessageContext( group='value', match=xrefs_attrib.match) def splitRefs(s): """Split the string on whitespace, into individual references.""" return s.split() # [x for x in s.split() if x] def remakeRefs(refs): """Re-create a xrefs string from something list-shaped.""" return ' '.join(refs) refs = splitRefs(text) # Pre-checking if messages are enabled, so that we can correctly determine # the current string following any auto-fixes: # the fixes for messages directly in this method would interact, # and thus must be in the order specified here. if self.messageEnabled(MessageId.REFPAGE_XREFS_COMMA) and ',' in text: old_text = text # Re-split after replacing commas. refs = splitRefs(text.replace(',', ' ')) # Re-create the space-delimited text. text = remakeRefs(refs) self.error(MessageId.REFPAGE_XREFS_COMMA, "Found reference page markup, with an unexpected comma in the (space-delimited) xrefs attribute", context=context, replacement=text, fix=(old_text, text)) # We could conditionally perform this creation, but the code complexity would increase substantially, # for presumably minimal runtime improvement. unique_refs = OrderedDict.fromkeys(refs) if self.messageEnabled(MessageId.REFPAGE_XREF_DUPE) and len(unique_refs) != len(refs): # TODO is it safe to auto-fix here? old_text = text text = remakeRefs(unique_refs.keys()) self.warning(MessageId.REFPAGE_XREF_DUPE, ["Reference page for {} contains at least one duplicate in its cross-references.".format( self.entity), "Look carefully to see if this is a copy and paste error and should be changed to a different but related entity:", "auto-fix simply removes the duplicate."], context=context, replacement=text, fix=(old_text, text)) if self.messageEnabled(MessageId.REFPAGE_SELF_XREF) and self.entity and self.entity in unique_refs: # Not modifying unique_refs here because that would accidentally affect the whitespace auto-fix. new_text = remakeRefs( [x for x in unique_refs.keys() if x != self.entity]) # DON'T AUTOFIX HERE because these are likely copy-paste between related entities: # e.g. a Create function and the associated CreateInfo struct. self.warning(MessageId.REFPAGE_SELF_XREF, ["Reference page for {} included itself in its cross-references.".format(self.entity), "This is typically a copy and paste error, and the dupe should likely be changed to a different but related entity.", "Not auto-fixing for this reason."], context=context, replacement=new_text,) # We didn't have another reason to replace the whole attribute value, # so let's make sure it doesn't have any extra spaces if self.messageEnabled(MessageId.REFPAGE_WHITESPACE) and xrefs_attrib.value == text: old_text = text text = remakeRefs(unique_refs.keys()) if old_text != text: self.warning(MessageId.REFPAGE_WHITESPACE, ["Cross-references for reference page for {} had non-minimal whitespace,".format(self.entity), "and no other enabled message has re-constructed this value already."], context=context, replacement=text, fix=(old_text, text)) for entity in unique_refs.keys(): self.checkRefPageXref(entity, context) def checkRefPageXref(self, referenced_entity, line_context): """Check a single cross-reference entry for a refpage. Called by self.checkRefPageXrefs(). Arguments: referenced_entity -- The individual entity under consideration from the xrefs='...' string. line_context -- A MessageContext referring to the entire line. """ data = self.checker.findEntity(referenced_entity) if data: # This is OK return context = line_context match = re.search(r'\b{}\b'.format(referenced_entity), self.line) if match: context = self.storeMessageContext( group=None, match=match) msg = ["Found reference page markup, with an unrecognized entity listed: {}".format( referenced_entity)] see_also = None dataArray = self.checker.findEntityCaseInsensitive( referenced_entity) if dataArray: # We might have found the goof... if len(dataArray) == 1: # Yep, found the goof - incorrect entity capitalization data = dataArray[0] new_entity = data.entity self.error(MessageId.REFPAGE_XREFS, msg + [ 'Apparently matching entity in category {} found by searching case-insensitively.'.format( data.category), AUTO_FIX_STRING], replacement=new_entity, fix=(referenced_entity, new_entity), context=context) return # Ugh, more than one resolution msg.append( 'More than one apparent match found by searching case-insensitively, cannot auto-fix.') see_also = dataArray[:] # Multiple or no resolutions found self.error(MessageId.REFPAGE_XREFS, msg, see_also=see_also, context=context) ### # Message-related methods. ### def warning(self, message_id, messageLines, context=None, group=None, replacement=None, fix=None, see_also=None, frame=None): """Log a warning for the file, if the message ID is enabled. Wrapper around self.diag() that automatically sets severity as well as frame. Arguments: message_id -- A MessageId value. messageLines -- A string or list of strings containing a human-readable error description. Optional, named arguments: context -- A MessageContext. If None, will be constructed from self.match and group. group -- The name of the regex group in self.match that contains the problem. Only used if context is None. If needed and is None, self.group is used instead. replacement -- The string, if any, that should be suggested as a replacement for the group in question. Does not create an auto-fix: sometimes we want to show a possible fix but aren't confident enough (or can't easily phrase a regex) to do it automatically. fix -- A (old text, new text) pair if this error is auto-fixable safely. see_also -- An optional array of other MessageContext locations relevant to this message. frame -- The 'inspect' stack frame corresponding to the location that raised this message. If None, will assume it is the direct caller of self.warning(). """ if not frame: frame = currentframe().f_back self.diag(MessageType.WARNING, message_id, messageLines, group=group, replacement=replacement, context=context, fix=fix, see_also=see_also, frame=frame) def error(self, message_id, messageLines, group=None, replacement=None, context=None, fix=None, see_also=None, frame=None): """Log an error for the file, if the message ID is enabled. Wrapper around self.diag() that automatically sets severity as well as frame. Arguments: message_id -- A MessageId value. messageLines -- A string or list of strings containing a human-readable error description. Optional, named arguments: context -- A MessageContext. If None, will be constructed from self.match and group. group -- The name of the regex group in self.match that contains the problem. Only used if context is None. If needed and is None, self.group is used instead. replacement -- The string, if any, that should be suggested as a replacement for the group in question. Does not create an auto-fix: sometimes we want to show a possible fix but aren't confident enough (or can't easily phrase a regex) to do it automatically. fix -- A (old text, new text) pair if this error is auto-fixable safely. see_also -- An optional array of other MessageContext locations relevant to this message. frame -- The 'inspect' stack frame corresponding to the location that raised this message. If None, will assume it is the direct caller of self.error(). """ if not frame: frame = currentframe().f_back self.diag(MessageType.ERROR, message_id, messageLines, group=group, replacement=replacement, context=context, fix=fix, see_also=see_also, frame=frame) def diag(self, severity, message_id, messageLines, context=None, group=None, replacement=None, fix=None, see_also=None, frame=None): """Log a diagnostic for the file, if the message ID is enabled. Also records the auto-fix, if applicable. Arguments: severity -- A MessageType value. message_id -- A MessageId value. messageLines -- A string or list of strings containing a human-readable error description. Optional, named arguments: context -- A MessageContext. If None, will be constructed from self.match and group. group -- The name of the regex group in self.match that contains the problem. Only used if context is None. If needed and is None, self.group is used instead. replacement -- The string, if any, that should be suggested as a replacement for the group in question. Does not create an auto-fix: sometimes we want to show a possible fix but aren't confident enough (or can't easily phrase a regex) to do it automatically. fix -- A (old text, new text) pair if this error is auto-fixable safely. see_also -- An optional array of other MessageContext locations relevant to this message. frame -- The 'inspect' stack frame corresponding to the location that raised this message. If None, will assume it is the direct caller of self.diag(). """ if not self.messageEnabled(message_id): self.logger.debug( 'Discarding a %s message because it is disabled.', message_id) return if isinstance(messageLines, str): messageLines = [messageLines] self.logger.info('Recording a %s message: %s', message_id, ' '.join(messageLines)) # Ensure all auto-fixes are marked as such. if fix is not None and AUTO_FIX_STRING not in messageLines: messageLines.append(AUTO_FIX_STRING) if not frame: frame = currentframe().f_back if context is None: message = Message(message_id=message_id, message_type=severity, message=messageLines, context=self.storeMessageContext(group=group), replacement=replacement, see_also=see_also, fix=fix, frame=frame) else: message = Message(message_id=message_id, message_type=severity, message=messageLines, context=context, replacement=replacement, see_also=see_also, fix=fix, frame=frame) if fix is not None: self.fixes.add(fix) self.messages.append(message) def messageEnabled(self, message_id): """Return true if the given message ID is enabled.""" return message_id in self.enabled_messages ### # Accessors for externally-interesting information def numDiagnostics(self): """Count the total number of diagnostics (errors or warnings) for this file.""" return len(self.messages) def numErrors(self): """Count the total number of errors for this file.""" return self.numMessagesOfType(MessageType.ERROR) def numMessagesOfType(self, message_type): """Count the number of messages of a particular type (severity).""" return len( [msg for msg in self.messages if msg.message_type == message_type]) def hasFixes(self): """Return True if any messages included auto-fix patterns.""" return len(self.fixes) > 0 ### # Assorted internal methods. def printMessageCounts(self): """Print a simple count of each MessageType of diagnostics.""" for message_type in [MessageType.ERROR, MessageType.WARNING]: count = self.numMessagesOfType(message_type) if count > 0: print('{num} {mtype}{s} generated.'.format( num=count, mtype=message_type, s=_s_suffix(count))) def dumpInternals(self): """Dump internal variables to screen, for debugging.""" print('self.lineNum: ', self.lineNum) print('self.line:', self.line) print('self.prev_line_ref_page_tag: ', self.prev_line_ref_page_tag) print('self.current_ref_page:', self.current_ref_page) def getMissingValiditySuppressions(self): """Return an enumerable of entity names that we shouldn't warn about missing validity. May override. """ return [] def recordInclude(self, include_dict, generated_type=None): """Store the current line as being the location of an include directive or equivalent. Reports duplicate include errors, as well as include/ref-page mismatch or missing ref-page, by calling self.checkIncludeRefPageRelation() for "actual" includes (where generated_type is None). Arguments: include_dict -- The include dictionary to update: one of self.apiIncludes or self.validityIncludes. generated_type -- The type of include (e.g. 'api', 'valid', etc). By default, extracted from self.match. """ entity = self.match.group('entity_name') if generated_type is None: generated_type = self.match.group('generated_type') # Only checking the ref page relation if it's retrieved from regex. # Otherwise it might be a manual anchor recorded as an include, # etc. self.checkIncludeRefPageRelation(entity, generated_type) if entity in include_dict: self.error(MessageId.DUPLICATE_INCLUDE, "Included {} docs for {} when they were already included.".format(generated_type, entity), see_also=include_dict[entity]) include_dict[entity].append(self.storeMessageContext()) else: include_dict[entity] = [self.storeMessageContext()] def getInnermostBlockEntry(self): """Get the BlockEntry for the top block delim on our stack.""" if not self.block_stack: return None return self.block_stack[-1] def getInnermostBlockDelimiter(self): """Get the delimiter for the top block on our stack.""" top = self.getInnermostBlockEntry() if not top: return None return top.delimiter def pushBlock(self, block_type, refpage=None, context=None, delimiter=None): """Push a new entry on the block stack.""" if not delimiter: self.logger.info("pushBlock: not given delimiter") delimiter = self.line if not context: context = self.storeMessageContext() old_top_delim = self.getInnermostBlockDelimiter() self.block_stack.append(BlockEntry( delimiter=delimiter, context=context, refpage=refpage, block_type=block_type)) location = self.getBriefLocation(context) self.logger.info( "pushBlock: %s: Pushed %s delimiter %s, previous top was %s, now %d elements on the stack", location, block_type.value, delimiter, old_top_delim, len(self.block_stack)) self.dumpBlockStack() def popBlock(self): """Pop and return the top entry from the block stack.""" old_top = self.block_stack.pop() location = self.getBriefLocation(old_top.context) self.logger.info( "popBlock: %s: popping %s delimiter %s, now %d elements on the stack", location, old_top.block_type.value, old_top.delimiter, len(self.block_stack)) self.dumpBlockStack() return old_top def dumpBlockStack(self): self.logger.debug('Block stack, top first:') for distFromTop, x in enumerate(reversed(self.block_stack)): self.logger.debug(' - block_stack[%d]: Line %d: "%s" refpage=%s', -1 - distFromTop, x.context.lineNum, x.delimiter, x.refpage) def getBriefLocation(self, context): """Format a context briefly - omitting the filename if it has newlines in it.""" if '\n' in context.filename: return 'input string line {}'.format(context.lineNum) return '{}:{}'.format( context.filename, context.lineNum) ### # Handlers for a variety of diagnostic-meriting conditions # # Split out for clarity and for allowing fine-grained override on a per-project basis. ### def handleIncludeMissingRefPage(self, entity, generated_type): """Report a message about an include outside of a ref-page block.""" msg = ["Found {} include for {} outside of a reference page block.".format(generated_type, entity), "This is probably a missing reference page block."] refpage = self.computeExpectedRefPageFromInclude(entity) data = self.checker.findEntity(refpage) if data: msg.append('Expected ref page block might start like:') msg.append(self.makeRefPageTag(refpage, data=data)) else: msg.append( "But, expected ref page entity name {} isn't recognized...".format(refpage)) self.warning(MessageId.REFPAGE_MISSING, msg) def handleIncludeMismatchRefPage(self, entity, generated_type): """Report a message about an include not matching its containing ref-page block.""" self.warning(MessageId.REFPAGE_MISMATCH, "Found {} include for {}, inside the reference page block of {}".format( generated_type, entity, self.current_ref_page.entity)) def handleWrongMacro(self, msg, data): """Report an appropriate message when we found that the macro used is incorrect. May be overridden depending on each API's behavior regarding macro misuse: e.g. in some cases, it may be considered a MessageId.LEGACY warning rather than a MessageId.WRONG_MACRO or MessageId.EXTENSION. """ message_type = MessageType.WARNING message_id = MessageId.WRONG_MACRO group = 'macro' if data.category == EXTENSION_CATEGORY: # Ah, this is an extension msg.append( 'This is apparently an extension name, which should be marked up as a link.') message_id = MessageId.EXTENSION group = None # replace the whole thing else: # Non-extension, we found the macro though. message_type = MessageType.ERROR msg.append(AUTO_FIX_STRING) self.diag(message_type, message_id, msg, group=group, replacement=self.makeMacroMarkup(data=data), fix=self.makeFix(data=data)) def handleExpectedRefpageBlock(self): """Handle expecting to see -- to start a refpage block, but not seeing that at all.""" self.error(MessageId.REFPAGE_BLOCK, ["Expected, but did not find, a line containing only -- following a reference page tag,", "Pretending to insert one, for more readable messages."], see_also=[self.prev_line_ref_page_tag]) # Fake "in ref page" regardless, to avoid spurious extra errors. self.processBlockDelimiter('--', BlockType.REF_PAGE_LIKE, context=self.prev_line_ref_page_tag) ### # Construct related values (typically named tuples) based on object state and supplied arguments. # # Results are typically supplied to another method call. ### def storeMessageContext(self, group=None, match=None): """Create message context from corresponding instance variables. Arguments: group -- The regex group name, if any, identifying the part of the match to highlight. match -- The regex match. If None, will use self.match. """ if match is None: match = self.match return MessageContext(filename=self.filename, lineNum=self.lineNum, line=self.line, match=match, group=group) def makeFix(self, newMacro=None, newEntity=None, data=None): """Construct a fix pair for replacing the old macro:entity with new. Wrapper around self.makeSearch() and self.makeMacroMarkup(). """ return (self.makeSearch(), self.makeMacroMarkup( newMacro, newEntity, data)) def makeSearch(self): """Construct the string self.macro:self.entity, for use in the old text part of a fix pair.""" return '{}:{}'.format(self.macro, self.entity) def makeMacroMarkup(self, newMacro=None, newEntity=None, data=None): """Construct appropriate markup for referring to an entity. Typically constructs macro:entity, but can construct `<<EXTENSION_NAME>>` if the supplied entity is identified as an extension. Arguments: newMacro -- The macro to use. Defaults to data.macro (if available), otherwise self.macro. newEntity -- The entity to use. Defaults to data.entity (if available), otherwise self.entity. data -- An EntityData value corresponding to this entity. If not provided, will be looked up by newEntity. """ if not newEntity: if data: newEntity = data.entity else: newEntity = self.entity if not newMacro: if data: newMacro = data.macro else: newMacro = self.macro if not data: data = self.checker.findEntity(newEntity) if data and data.category == EXTENSION_CATEGORY: return self.makeExtensionLink(newEntity) return '{}:{}'.format(newMacro, newEntity) def makeExtensionLink(self, newEntity=None): """Create a correctly-formatted link to an extension. Result takes the form `<<EXTENSION_NAME>>`. Argument: newEntity -- The extension name to link to. Defaults to self.entity. """ if not newEntity: newEntity = self.entity return '`<<{}>>`'.format(newEntity) def computeExpectedRefPageFromInclude(self, entity): """Compute the expected ref page entity based on an include entity name.""" # No-op in general. return entity def makeRefPageTag(self, entity, data=None, ref_type=None, desc='', xrefs=None): """Construct a ref page tag string from attribute values.""" if ref_type is None and data is not None: ref_type = data.directory if ref_type is None: ref_type = "????" return "[open,refpage='{}',type='{}',desc='{}',xrefs='{}']".format( entity, ref_type, desc, ' '.join(xrefs or []))
[ "collections.OrderedDict.fromkeys", "collections.namedtuple", "logging.NullHandler", "inspect.currentframe", "logging.getLogger", "re.compile" ]
[((949, 998), 're.compile', 're.compile', (['"""(?P<delim>__+)([a-zA-Z]+)(?P=delim)"""'], {}), "('(?P<delim>__+)([a-zA-Z]+)(?P=delim)')\n", (959, 998), False, 'import re\n'), ((1071, 1272), 're.compile', 're.compile', (['"""include::(?P<directory_traverse>((../){1,4}|\\\\{(INCS-VAR|generated)\\\\}/)(generated/)?)(?P<generated_type>(api|validity))/(?P<category>\\\\w+)/(?P<entity_name>[^./]+).txt[\\\\[][\\\\]]"""'], {}), "(\n 'include::(?P<directory_traverse>((../){1,4}|\\\\{(INCS-VAR|generated)\\\\}/)(generated/)?)(?P<generated_type>(api|validity))/(?P<category>\\\\w+)/(?P<entity_name>[^./]+).txt[\\\\[][\\\\]]'\n )\n", (1081, 1272), False, 'import re\n'), ((1306, 1356), 're.compile', 're.compile', (['"""\\\\[\\\\[(?P<entity_name>[^\\\\]]+)\\\\]\\\\]"""'], {}), "('\\\\[\\\\[(?P<entity_name>[^\\\\]]+)\\\\]\\\\]')\n", (1316, 1356), False, 'import re\n'), ((1486, 1557), 're.compile', 're.compile', (['"""\\\\b(?P<macro>[fs](text|link)):(?P<entity_name>[\\\\w*]+)::$"""'], {}), "('\\\\b(?P<macro>[fs](text|link)):(?P<entity_name>[\\\\w*]+)::$')\n", (1496, 1557), False, 'import re\n'), ((1677, 1860), 're.compile', 're.compile', (['"""\\\\b(?P<first_part>(?P<scope_macro>[fs](text|link)):(?P<scope>[\\\\w*]+))(?P<double_colons>::)(?P<second_part>(?P<member_macro>pname:?)(?P<entity_name>[\\\\w]+))\\\\b"""'], {}), "(\n '\\\\b(?P<first_part>(?P<scope_macro>[fs](text|link)):(?P<scope>[\\\\w*]+))(?P<double_colons>::)(?P<second_part>(?P<member_macro>pname:?)(?P<entity_name>[\\\\w]+))\\\\b'\n )\n", (1687, 1860), False, 'import re\n'), ((2097, 2127), 're.compile', 're.compile', (['""".*(?<!`)<<[^>]*$"""'], {}), "('.*(?<!`)<<[^>]*$')\n", (2107, 2127), False, 'import re\n'), ((2333, 2357), 're.compile', 're.compile', (['"""[^<]*>>.*$"""'], {}), "('[^<]*>>.*$')\n", (2343, 2357), False, 'import re\n'), ((2586, 2642), 're.compile', 're.compile', (['"""^(ifdef:)|(endif:)|([tT][oO][dD][oO]\\\\b).*"""'], {}), "('^(ifdef:)|(endif:)|([tT][oO][dD][oO]\\\\b).*')\n", (2596, 2642), False, 'import re\n'), ((2706, 2738), 're.compile', 're.compile', (['"""\\\\[(?P<tags>.*)\\\\]"""'], {}), "('\\\\[(?P<tags>.*)\\\\]')\n", (2716, 2738), False, 'import re\n'), ((2807, 2878), 're.compile', 're.compile', (['"""(?P<key>[a-z]+)=\'(?P<value>[^\'\\\\\\\\]*(?:\\\\\\\\.[^\'\\\\\\\\]*)*)\'"""'], {}), '("(?P<key>[a-z]+)=\'(?P<value>[^\'\\\\\\\\]*(?:\\\\\\\\.[^\'\\\\\\\\]*)*)\'")\n', (2817, 2878), False, 'import re\n'), ((3137, 3188), 'collections.namedtuple', 'namedtuple', (['"""AttribData"""', "['match', 'key', 'value']"], {}), "('AttribData', ['match', 'key', 'value'])\n", (3147, 3188), False, 'from collections import OrderedDict, namedtuple\n'), ((3943, 4018), 'collections.namedtuple', 'namedtuple', (['"""BlockEntry"""', "['delimiter', 'context', 'block_type', 'refpage']"], {}), "('BlockEntry', ['delimiter', 'context', 'block_type', 'refpage'])\n", (3953, 4018), False, 'from collections import OrderedDict, namedtuple\n'), ((6655, 6682), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (6672, 6682), False, 'import logging\n'), ((45581, 45607), 'collections.OrderedDict.fromkeys', 'OrderedDict.fromkeys', (['refs'], {}), '(refs)\n', (45601, 45607), False, 'from collections import OrderedDict, namedtuple\n'), ((6714, 6735), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (6733, 6735), False, 'import logging\n'), ((51733, 51747), 'inspect.currentframe', 'currentframe', ([], {}), '()\n', (51745, 51747), False, 'from inspect import currentframe\n'), ((53389, 53403), 'inspect.currentframe', 'currentframe', ([], {}), '()\n', (53401, 53403), False, 'from inspect import currentframe\n'), ((55609, 55623), 'inspect.currentframe', 'currentframe', ([], {}), '()\n', (55621, 55623), False, 'from inspect import currentframe\n')]
import unittest import mock from tethys_apps.cli.list_command import list_command try: from StringIO import StringIO except ImportError: from io import StringIO # noqa: F401 class ListCommandTests(unittest.TestCase): def setUp(self): pass def tearDown(self): pass @mock.patch('tethys_apps.cli.list_command.print') @mock.patch('tethys_apps.cli.list_command.get_installed_tethys_extensions') @mock.patch('tethys_apps.cli.list_command.get_installed_tethys_apps') def test_list_command_installed_apps(self, mock_installed_apps, mock_installed_extensions, mock_print): mock_args = mock.MagicMock() mock_installed_apps.return_value = {'foo': '/foo', 'bar': "/bar"} mock_installed_extensions.return_value = {} list_command(mock_args) mock_installed_apps.assert_called_once() # Check if print is called correctly rts_call_args = mock_print.call_args_list check_list = [] for i in range(len(rts_call_args)): check_list.append(rts_call_args[i][0][0]) self.assertIn('Apps:', check_list) self.assertIn(' foo', check_list) self.assertIn(' bar', check_list) @mock.patch('tethys_apps.cli.list_command.print') @mock.patch('tethys_apps.cli.list_command.get_installed_tethys_extensions') @mock.patch('tethys_apps.cli.list_command.get_installed_tethys_apps') def test_list_command_installed_extensions(self, mock_installed_apps, mock_installed_extensions, mock_print): mock_args = mock.MagicMock() mock_installed_apps.return_value = {} mock_installed_extensions.return_value = {'baz': '/baz'} list_command(mock_args) # Check if print is called correctly rts_call_args = mock_print.call_args_list check_list = [] for i in range(len(rts_call_args)): check_list.append(rts_call_args[i][0][0]) self.assertIn('Extensions:', check_list) self.assertIn(' baz', check_list) @mock.patch('tethys_apps.cli.list_command.print') @mock.patch('tethys_apps.cli.list_command.get_installed_tethys_extensions') @mock.patch('tethys_apps.cli.list_command.get_installed_tethys_apps') def test_list_command_installed_both(self, mock_installed_apps, mock_installed_extensions, mock_print): mock_args = mock.MagicMock() mock_installed_apps.return_value = {'foo': '/foo', 'bar': "/bar"} mock_installed_extensions.return_value = {'baz': '/baz'} list_command(mock_args) # Check if print is called correctly rts_call_args = mock_print.call_args_list check_list = [] for i in range(len(rts_call_args)): check_list.append(rts_call_args[i][0][0]) self.assertIn('Apps:', check_list) self.assertIn(' foo', check_list) self.assertIn(' bar', check_list) self.assertIn('Extensions:', check_list) self.assertIn(' baz', check_list)
[ "mock.MagicMock", "tethys_apps.cli.list_command.list_command", "mock.patch" ]
[((308, 356), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.print"""'], {}), "('tethys_apps.cli.list_command.print')\n", (318, 356), False, 'import mock\n'), ((362, 436), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.get_installed_tethys_extensions"""'], {}), "('tethys_apps.cli.list_command.get_installed_tethys_extensions')\n", (372, 436), False, 'import mock\n'), ((442, 510), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.get_installed_tethys_apps"""'], {}), "('tethys_apps.cli.list_command.get_installed_tethys_apps')\n", (452, 510), False, 'import mock\n'), ((1220, 1268), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.print"""'], {}), "('tethys_apps.cli.list_command.print')\n", (1230, 1268), False, 'import mock\n'), ((1274, 1348), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.get_installed_tethys_extensions"""'], {}), "('tethys_apps.cli.list_command.get_installed_tethys_extensions')\n", (1284, 1348), False, 'import mock\n'), ((1354, 1422), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.get_installed_tethys_apps"""'], {}), "('tethys_apps.cli.list_command.get_installed_tethys_apps')\n", (1364, 1422), False, 'import mock\n'), ((2035, 2083), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.print"""'], {}), "('tethys_apps.cli.list_command.print')\n", (2045, 2083), False, 'import mock\n'), ((2089, 2163), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.get_installed_tethys_extensions"""'], {}), "('tethys_apps.cli.list_command.get_installed_tethys_extensions')\n", (2099, 2163), False, 'import mock\n'), ((2169, 2237), 'mock.patch', 'mock.patch', (['"""tethys_apps.cli.list_command.get_installed_tethys_apps"""'], {}), "('tethys_apps.cli.list_command.get_installed_tethys_apps')\n", (2179, 2237), False, 'import mock\n'), ((639, 655), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (653, 655), False, 'import mock\n'), ((791, 814), 'tethys_apps.cli.list_command.list_command', 'list_command', (['mock_args'], {}), '(mock_args)\n', (803, 814), False, 'from tethys_apps.cli.list_command import list_command\n'), ((1557, 1573), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1571, 1573), False, 'import mock\n'), ((1694, 1717), 'tethys_apps.cli.list_command.list_command', 'list_command', (['mock_args'], {}), '(mock_args)\n', (1706, 1717), False, 'from tethys_apps.cli.list_command import list_command\n'), ((2366, 2382), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2380, 2382), False, 'import mock\n'), ((2531, 2554), 'tethys_apps.cli.list_command.list_command', 'list_command', (['mock_args'], {}), '(mock_args)\n', (2543, 2554), False, 'from tethys_apps.cli.list_command import list_command\n')]
from pathlib import Path from fhir.resources.codesystem import CodeSystem from oops_fhir.utils import CodeSystemConcept __all__ = ["v3AcknowledgementType"] _resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json")) class v3AcknowledgementType: """ v3 Code System AcknowledgementType This attribute contains an acknowledgement code as described in the HL7 message processing rules. OpenIssue: Description was copied from attribute and needs to be improved to be appropriate for a code system. Status: active - Version: 2018-08-12 Copyright None http://terminology.hl7.org/CodeSystem/v3-AcknowledgementType """ aa = CodeSystemConcept( { "code": "AA", "definition": "Receiving application successfully processed message.", "display": "Application Acknowledgement Accept", } ) """ Application Acknowledgement Accept Receiving application successfully processed message. """ ae = CodeSystemConcept( { "code": "AE", "definition": "Receiving application found error in processing message. Sending error response with additional error detail information.", "display": "Application Acknowledgement Error", } ) """ Application Acknowledgement Error Receiving application found error in processing message. Sending error response with additional error detail information. """ ar = CodeSystemConcept( { "code": "AR", "definition": "Receiving application failed to process message for reason unrelated to content or format. Original message sender must decide on whether to automatically send message again.", "display": "Application Acknowledgement Reject", } ) """ Application Acknowledgement Reject Receiving application failed to process message for reason unrelated to content or format. Original message sender must decide on whether to automatically send message again. """ ca = CodeSystemConcept( { "code": "CA", "definition": "Receiving message handling service accepts responsibility for passing message onto receiving application.", "display": "Accept Acknowledgement Commit Accept", } ) """ Accept Acknowledgement Commit Accept Receiving message handling service accepts responsibility for passing message onto receiving application. """ ce = CodeSystemConcept( { "code": "CE", "definition": "Receiving message handling service cannot accept message for any other reason (e.g. message sequence number, etc.).", "display": "Accept Acknowledgement Commit Error", } ) """ Accept Acknowledgement Commit Error Receiving message handling service cannot accept message for any other reason (e.g. message sequence number, etc.). """ cr = CodeSystemConcept( { "code": "CR", "definition": "Receiving message handling service rejects message if interaction identifier, version or processing mode is incompatible with known receiving application role information.", "display": "Accept Acknowledgement Commit Reject", } ) """ Accept Acknowledgement Commit Reject Receiving message handling service rejects message if interaction identifier, version or processing mode is incompatible with known receiving application role information. """ class Meta: resource = _resource
[ "pathlib.Path", "oops_fhir.utils.CodeSystemConcept" ]
[((673, 834), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'AA', 'definition':\n 'Receiving application successfully processed message.', 'display':\n 'Application Acknowledgement Accept'}"], {}), "({'code': 'AA', 'definition':\n 'Receiving application successfully processed message.', 'display':\n 'Application Acknowledgement Accept'})\n", (690, 834), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((1012, 1242), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'AE', 'definition':\n 'Receiving application found error in processing message. Sending error response with additional error detail information.'\n , 'display': 'Application Acknowledgement Error'}"], {}), "({'code': 'AE', 'definition':\n 'Receiving application found error in processing message. Sending error response with additional error detail information.'\n , 'display': 'Application Acknowledgement Error'})\n", (1029, 1242), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((1487, 1771), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'AR', 'definition':\n 'Receiving application failed to process message for reason unrelated to content or format. Original message sender must decide on whether to automatically send message again.'\n , 'display': 'Application Acknowledgement Reject'}"], {}), "({'code': 'AR', 'definition':\n 'Receiving application failed to process message for reason unrelated to content or format. Original message sender must decide on whether to automatically send message again.'\n , 'display': 'Application Acknowledgement Reject'})\n", (1504, 1771), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((2070, 2286), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'CA', 'definition':\n 'Receiving message handling service accepts responsibility for passing message onto receiving application.'\n , 'display': 'Accept Acknowledgement Commit Accept'}"], {}), "({'code': 'CA', 'definition':\n 'Receiving message handling service accepts responsibility for passing message onto receiving application.'\n , 'display': 'Accept Acknowledgement Commit Accept'})\n", (2087, 2286), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((2517, 2742), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'CE', 'definition':\n 'Receiving message handling service cannot accept message for any other reason (e.g. message sequence number, etc.).'\n , 'display': 'Accept Acknowledgement Commit Error'}"], {}), "({'code': 'CE', 'definition':\n 'Receiving message handling service cannot accept message for any other reason (e.g. message sequence number, etc.).'\n , 'display': 'Accept Acknowledgement Commit Error'})\n", (2534, 2742), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((2982, 3264), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'CR', 'definition':\n 'Receiving message handling service rejects message if interaction identifier, version or processing mode is incompatible with known receiving application role information.'\n , 'display': 'Accept Acknowledgement Commit Reject'}"], {}), "({'code': 'CR', 'definition':\n 'Receiving message handling service rejects message if interaction identifier, version or processing mode is incompatible with known receiving application role information.'\n , 'display': 'Accept Acknowledgement Commit Reject'})\n", (2999, 3264), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((195, 209), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (199, 209), False, 'from pathlib import Path\n')]
from config import get_arguments from SinGAN.manipulate import * from SinGAN.training import * from SinGAN.imresize import imresize from SinGAN.imresize import imresize_to_shape import SinGAN.functions as functions import os import copy def put_mask(image, n_pixels=30, offset_x=None, offset_y=None): image = copy.deepcopy(image) mask_size = (n_pixels, n_pixels) if offset_x is None: offset_x = random.randint(1, max(image.shape[2] - mask_size[0] - 1, 2)) if offset_y is None: offset_y = random.randint(1, max(image.shape[3] - mask_size[1] - 1, 2)) image[:, :, offset_x:offset_x + mask_size[0], offset_y:offset_y + mask_size[1]] = -1 mask = { 'xmin': offset_x, 'xmax': offset_x + mask_size[0] - 1, 'ymin': offset_y, 'ymax': offset_y + mask_size[1] - 1 } return image, mask if __name__ == '__main__': parser = get_arguments() parser.add_argument('--input_dir', help='input image dir (image on which singan was trained)', default='Input/Images') parser.add_argument('--input_name', help='training image name (image on which singan was trained)', required=True) parser.add_argument('--fake_input_dir', help='input image dir (image used for the reconstruction, e.g the same' ' image' 'as the image used for trained but with a missing part)', default='Input/Images') parser.add_argument('--fake_input_name', help='training image name', required=True) parser.add_argument('--reg', help='regularization parameter', type=float, default=0) parser.add_argument('--disc_loss', help='discrimination loss weight', type=float, default=0.01) parser.add_argument('--use_zopt', help='use z_opt to initialize z', type=bool, default=False) parser.add_argument('--use_mask', help='fake input has mask for inpainting', type=bool, default=False) parser.add_argument('--mask_size', help='mask is a square, specify side size', type=int, default=30) parser.add_argument('--mask_xmin', help='mask is a square, specify offset x (else random)', type=int, default=None) parser.add_argument('--mask_ymin', help='mask is a square, specify offset y (else random)', type=int, default=None) parser.add_argument('--mse_neigh', help='use neighbouring pixels only for MSE reconstruction', type=bool, default=True) parser.add_argument('--prio_neigh', help='use neighbouring pixels only for prio', type=bool, default=True) parser.add_argument('--ss_prio_neigh', help='start scale for using neighbouring pixels only for prio', type=int, default=4) opt = parser.parse_args() opt.mode = 'train' opt = functions.post_config(opt) print(f"Using only neighboring pixels for reconstruction MSE: {opt.mse_neigh}") print(f"Using only neighboring pixels for discimination prior: {opt.prio_neigh}") # LOAD MODEL # input_name = opt.input_name if torch.cuda.is_available(): map_location = lambda storage, loc: storage.cuda() else: map_location = 'cpu' input_name = opt.input_name[:-4] Gs = torch.load(f'TrainedModels/{input_name}/scale_factor=0.750000,alpha=10/Gs.pth', map_location=map_location) Zs = torch.load(f'TrainedModels/{input_name}/scale_factor=0.750000,alpha=10/Zs.pth', map_location=map_location) Ds = torch.load(f'TrainedModels/{input_name}/scale_factor=0.750000,alpha=10/Ds.pth', map_location=map_location) reals = torch.load(f'TrainedModels/{input_name}/scale_factor=0.750000,alpha=10/reals.pth', map_location=map_location) NoiseAmp = torch.load(f'TrainedModels/{input_name}/scale_factor=0.750000,alpha=10/NoiseAmp.pth', map_location=map_location) optbis = torch.load(f'TrainedModels/{input_name}/scale_factor=0.750000,alpha=10/opt.pth', map_location=map_location) opt.scale_factor = optbis.scale_factor opt.scale1 = optbis.scale1 opt.stop_scale = optbis.stop_scale print(f"previous stop scale: {optbis.stop_scale}") print(f"previous scale factor: {optbis.scale_factor}") fake = img.imread('%s/%s' % (opt.fake_input_dir, opt.fake_input_name)) fake = functions.np2torch(fake, opt) functions.adjust_scales2image(fake, opt) fake = imresize(fake, opt.scale1, opt) print(f"new stop scale: {opt.stop_scale}") print(f"new scale factor: {opt.scale_factor}") if opt.use_mask: fake, mask = put_mask(fake, n_pixels=opt.mask_size, offset_x=opt.mask_xmin, offset_y=opt.mask_ymin) fakes, masks = functions.creat_reals_pyramid(fake, opt, mask=mask) # For testing purposes fake_without_mask = img.imread('%s/%s' % (opt.fake_input_dir, opt.fake_input_name)) fake_without_mask = functions.np2torch(fake_without_mask, opt) fake_without_mask = imresize(fake_without_mask, opt.scale1, opt) fakes_without_mask, _ = functions.creat_reals_pyramid(fake_without_mask, opt, mask=mask) else: fakes, masks = functions.creat_reals_pyramid(fake, opt, mask=None) assert masks == [] in_s = torch.full(fakes[0].shape, 0, device=opt.device) image_cur = None pad1 = ((opt.ker_size - 1) * opt.num_layer) / 2 m = nn.ZeroPad2d(int(pad1)) n = 0 Z_stars = [] # Output dir dir_name = f'Recover/{opt.input_name[:-4]}_{opt.fake_input_name[:-4]}_dl-{opt.disc_loss}_' \ f'mse_neigh-{opt.mse_neigh}_prio_neigh-{opt.prio_neigh}' os.makedirs(dir_name, exist_ok=True) for G, Z_opt, noise_amp, fake, D in zip(Gs, Zs, NoiseAmp, fakes, Ds): print(f"\n\n******* Scale {n} ***********\n") nzx = fake.shape[2] nzy = fake.shape[3] image_prev = image_cur if opt.use_zopt: z_curr = Z_opt else: if n == 0: z_curr = functions.generate_noise([1, nzx, nzy], device=opt.device) z_curr = z_curr.expand(1, 3, z_curr.shape[2], z_curr.shape[3]) z_curr = m(z_curr) else: z_curr = functions.generate_noise([opt.nc_z, nzx, nzy], device=opt.device) z_curr = m(z_curr) if image_prev is None: I_prev = m(in_s) else: I_prev = image_prev I_prev = imresize(I_prev.detach(), 1/opt.scale_factor, opt) I_prev = I_prev[:, :, 0:round(1 * fake.shape[2]), 0:round(1 * fake.shape[3])] I_prev = m(I_prev) I_prev = I_prev[:, :, 0:z_curr.shape[2], 0:z_curr.shape[3]] I_prev = functions.upsampling(I_prev, z_curr.shape[2], z_curr.shape[3]) z_curr.requires_grad_() optimizer_z = optim.Adam([z_curr], lr=opt.lr_d) scheduler_z = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizer_z, milestones=[1600], gamma=opt.gamma) def custom_loss(tensor_list_1, tensor_list_2): a = sum([((t1-t2)**2).sum() for t1, t2 in zip(tensor_list_1, tensor_list_2)]) norm = sum([i*j for i, j in [(t.shape[2], t.shape[3]) for t in tensor_list_1]]) return a/norm def custom_loss_bis(tensor_list_1, tensor_list_2): a = sum([((t1-t2)**2).sum() for t1, t2 in zip(tensor_list_1, tensor_list_2)]) norm = sum([j for j in [t.shape[-1] for t in tensor_list_1]]) return a/norm os.mkdir(f"{dir_name}/{n}") disc_mask_zone, disc_output_shape = None, None for i in range(10000): image_cur = G(noise_amp*z_curr + I_prev, I_prev) loss = nn.MSELoss() if opt.use_mask: mask = masks[n] xmin = mask['xmin'] xmax = mask['xmax'] ymin = mask['ymin'] ymax = mask['ymax'] if opt.mse_neigh: fake_parts = [] image_cur_parts = [] max_distance = 7 for dist in range(1, max_distance + 1): coeff = 1 - (dist - 1)/max_distance if xmin - dist >= 0: fake_parts.append(coeff * fake[:, :, xmin - dist, max(0, ymin - dist):(ymax + dist + 1)]) image_cur_parts.append(coeff * image_cur[:, :, xmin - dist, max(0, ymin - dist):(ymax + dist + 1)]) if xmax + dist < image_cur.shape[2]: fake_parts.append(coeff * fake[:, :, xmax + dist, max(0, ymin - dist):(ymax + dist + 1)]) image_cur_parts.append(coeff * image_cur[:, :, xmax + dist, max(0, ymin - dist):(ymax + dist + 1)]) if ymin - dist >= 0: fake_parts.append(coeff * fake[:, :, max(0, xmin - dist + 1):(xmax + dist), ymin - dist]) image_cur_parts.append(coeff * image_cur[:, :, max(0, xmin - dist + 1):(xmax + dist), ymin - dist]) if ymax + dist < image_cur.shape[3]: fake_parts.append(coeff * fake[:, :, max(0, xmin - dist + 1):(xmax + dist), ymax + dist]) image_cur_parts.append(coeff * image_cur[:, :, max(0, xmin - dist + 1):(xmax + dist), ymax + dist]) diff = custom_loss_bis(fake_parts, image_cur_parts) else: diff1 = loss(fake[:, :, 0:mask['xmin'], :], image_cur[:, :, 0:mask['xmin'], :]) diff2 = loss(fake[:, :, mask['xmax']+1:, :], image_cur[:, :, mask['xmax']+1:, :]) diff3 = loss(fake[:, :, mask['xmin']:mask['xmax']+1, mask['ymax']+1:], image_cur[:, :, mask['xmin']:mask['xmax']+1, mask['ymax']+1:]) diff4 = loss(fake[:, :, mask['xmin']:mask['xmax']+1, :mask['ymin']], image_cur[:, :, mask['xmin']:mask['xmax']+1, :mask['ymin']]) diff = diff1 + diff2 + diff3 + diff4 else: diff = loss(fake, image_cur) if opt.use_mask and opt.prio_neigh and n >= opt.ss_prio_neigh: mask = masks[n] disc_mask_zone, disc_output_shape = models.get_mask_discriminator(fake, mask, opt, expand_mask_by=7) xmin, xmax = disc_mask_zone['xmin'], disc_mask_zone['xmax'] ymin, ymax = disc_mask_zone['ymin'], disc_mask_zone['ymax'] errD = - D(image_cur)[:, :, xmin:xmax+1, ymin:ymax+1].mean() else: errD = - D(image_cur).mean() (diff + opt.reg * z_curr.abs().mean() + opt.disc_loss * errD).backward(retain_graph=True) optimizer_z.step() # print(z_curr[0,0,10:15,10:15]) if i % 1000 == 0: print(f"** Iteration {i} ** (reg: {opt.reg}; disc loss weight: {opt.disc_loss}; use zopt:" f" {opt.use_zopt})") print(f"MSE Loss: {diff}") print(f"Mean |z|: {z_curr.abs().mean()}") print(f"Max |z|: {z_curr.abs().max()}") print(f"Error Discriminator: {- D(image_cur).mean()}") print(f"Image shape: {fake.shape}") print(f"Mask: {mask}") if disc_mask_zone is not None: print(f"Discriminator output shape: {disc_output_shape}") print(f"Discriminator mask influence zone: {disc_mask_zone}") with open(f"{dir_name}/{n}/report.txt", 'a') as txt_f: txt_f.write(f'Iteration {i} (reg: {opt.reg}; disc loss weight: {opt.disc_loss};' f' use zopt: {opt.use_zopt})\n' f'MSE loss: {diff}\n' f'Mean |z|: {z_curr.abs().mean()}\n' f'Max |z|: {z_curr.abs().max()}\n' f'Error Discriminator: {- D(image_cur).mean()}\n\n\n') plt.imsave(f'{dir_name}/{n}/reconstructed_image.png', functions.convert_image_np(image_cur.detach()), vmin=0, vmax=1) plt.imsave(f'{dir_name}/{n}/target_image.png', functions.convert_image_np(fake), vmin=0, vmax=1) if opt.use_mask: plt.imsave(f'{dir_name}/{n}/target_image_without_mask.png', functions.convert_image_np(fakes_without_mask[n]), vmin=0, vmax=1) # Full reconstruction: copy paste the square in the good zone if opt.use_mask: new_image = copy.deepcopy(fake) mask = masks[n] for i in range(mask['xmin'], mask['xmax']+1): for j in range(mask['ymin'], mask['ymax']+1): new_image[:,:,i,j] = image_cur[:,:,i,j] plt.imsave(f'{dir_name}/{n}/full_reconstruction_after_copy_paste.png', functions.convert_image_np(new_image.detach()), vmin=0, vmax=1) Z_stars.append(z_curr) torch.save(Z_stars, f'{dir_name}/Z_stars.pth') if opt.use_mask: torch.save(masks, f'{dir_name}/masks.pth') n += 1 def SinGAN_generate(Gs, Zs, reals, NoiseAmp, opt, in_s=None, scale_v=1, scale_h=1, n=0, gen_start_scale=0): if in_s is None: in_s = torch.full(reals[0].shape, 0, device=opt.device) image_cur = None pad1 = ((opt.ker_size - 1) * opt.num_layer) / 2 m = nn.ZeroPad2d(int(pad1)) for G, Z_opt, noise_amp in zip(Gs, Zs, NoiseAmp): nzx = (Z_opt.shape[2]-pad1*2)*scale_v nzy = (Z_opt.shape[3]-pad1*2)*scale_h image_prev = image_cur if n == 0: z_curr = functions.generate_noise([1, nzx, nzy], device=opt.device) z_curr = z_curr.expand(1, 3, z_curr.shape[2], z_curr.shape[3]) z_curr = m(z_curr) else: z_curr = functions.generate_noise([opt.nc_z,nzx,nzy], device=opt.device) z_curr = m(z_curr) if image_prev is None: I_prev = m(in_s) else: I_prev = image_prev I_prev = imresize(I_prev, 1/opt.scale_factor, opt) I_prev = I_prev[:, :, 0:round(scale_v * reals[n].shape[2]), 0:round(scale_h * reals[n].shape[3])] I_prev = m(I_prev) I_prev = I_prev[:, :, 0:z_curr.shape[2], 0:z_curr.shape[3]] I_prev = functions.upsampling(I_prev, z_curr.shape[2], z_curr.shape[3]) if n < gen_start_scale: z_curr = Z_opt z_in = noise_amp*z_curr + I_prev image_cur = G(z_in.detach(), I_prev) n += 1 return image_cur.detach()
[ "SinGAN.functions.adjust_scales2image", "os.mkdir", "copy.deepcopy", "SinGAN.functions.post_config", "os.makedirs", "SinGAN.imresize.imresize", "SinGAN.functions.upsampling", "SinGAN.functions.creat_reals_pyramid", "SinGAN.functions.convert_image_np", "config.get_arguments", "SinGAN.functions.np2torch", "SinGAN.functions.generate_noise" ]
[((315, 335), 'copy.deepcopy', 'copy.deepcopy', (['image'], {}), '(image)\n', (328, 335), False, 'import copy\n'), ((901, 916), 'config.get_arguments', 'get_arguments', ([], {}), '()\n', (914, 916), False, 'from config import get_arguments\n'), ((2867, 2893), 'SinGAN.functions.post_config', 'functions.post_config', (['opt'], {}), '(opt)\n', (2888, 2893), True, 'import SinGAN.functions as functions\n'), ((4321, 4350), 'SinGAN.functions.np2torch', 'functions.np2torch', (['fake', 'opt'], {}), '(fake, opt)\n', (4339, 4350), True, 'import SinGAN.functions as functions\n'), ((4355, 4395), 'SinGAN.functions.adjust_scales2image', 'functions.adjust_scales2image', (['fake', 'opt'], {}), '(fake, opt)\n', (4384, 4395), True, 'import SinGAN.functions as functions\n'), ((4407, 4438), 'SinGAN.imresize.imresize', 'imresize', (['fake', 'opt.scale1', 'opt'], {}), '(fake, opt.scale1, opt)\n', (4415, 4438), False, 'from SinGAN.imresize import imresize\n'), ((5598, 5634), 'os.makedirs', 'os.makedirs', (['dir_name'], {'exist_ok': '(True)'}), '(dir_name, exist_ok=True)\n', (5609, 5634), False, 'import os\n'), ((4691, 4742), 'SinGAN.functions.creat_reals_pyramid', 'functions.creat_reals_pyramid', (['fake', 'opt'], {'mask': 'mask'}), '(fake, opt, mask=mask)\n', (4720, 4742), True, 'import SinGAN.functions as functions\n'), ((4895, 4937), 'SinGAN.functions.np2torch', 'functions.np2torch', (['fake_without_mask', 'opt'], {}), '(fake_without_mask, opt)\n', (4913, 4937), True, 'import SinGAN.functions as functions\n'), ((4966, 5010), 'SinGAN.imresize.imresize', 'imresize', (['fake_without_mask', 'opt.scale1', 'opt'], {}), '(fake_without_mask, opt.scale1, opt)\n', (4974, 5010), False, 'from SinGAN.imresize import imresize\n'), ((5043, 5107), 'SinGAN.functions.creat_reals_pyramid', 'functions.creat_reals_pyramid', (['fake_without_mask', 'opt'], {'mask': 'mask'}), '(fake_without_mask, opt, mask=mask)\n', (5072, 5107), True, 'import SinGAN.functions as functions\n'), ((5142, 5193), 'SinGAN.functions.creat_reals_pyramid', 'functions.creat_reals_pyramid', (['fake', 'opt'], {'mask': 'None'}), '(fake, opt, mask=None)\n', (5171, 5193), True, 'import SinGAN.functions as functions\n'), ((7475, 7502), 'os.mkdir', 'os.mkdir', (['f"""{dir_name}/{n}"""'], {}), "(f'{dir_name}/{n}')\n", (7483, 7502), False, 'import os\n'), ((6680, 6742), 'SinGAN.functions.upsampling', 'functions.upsampling', (['I_prev', 'z_curr.shape[2]', 'z_curr.shape[3]'], {}), '(I_prev, z_curr.shape[2], z_curr.shape[3])\n', (6700, 6742), True, 'import SinGAN.functions as functions\n'), ((12354, 12386), 'SinGAN.functions.convert_image_np', 'functions.convert_image_np', (['fake'], {}), '(fake)\n', (12380, 12386), True, 'import SinGAN.functions as functions\n'), ((12711, 12730), 'copy.deepcopy', 'copy.deepcopy', (['fake'], {}), '(fake)\n', (12724, 12730), False, 'import copy\n'), ((13862, 13920), 'SinGAN.functions.generate_noise', 'functions.generate_noise', (['[1, nzx, nzy]'], {'device': 'opt.device'}), '([1, nzx, nzy], device=opt.device)\n', (13886, 13920), True, 'import SinGAN.functions as functions\n'), ((14062, 14127), 'SinGAN.functions.generate_noise', 'functions.generate_noise', (['[opt.nc_z, nzx, nzy]'], {'device': 'opt.device'}), '([opt.nc_z, nzx, nzy], device=opt.device)\n', (14086, 14127), True, 'import SinGAN.functions as functions\n'), ((14285, 14328), 'SinGAN.imresize.imresize', 'imresize', (['I_prev', '(1 / opt.scale_factor)', 'opt'], {}), '(I_prev, 1 / opt.scale_factor, opt)\n', (14293, 14328), False, 'from SinGAN.imresize import imresize\n'), ((14561, 14623), 'SinGAN.functions.upsampling', 'functions.upsampling', (['I_prev', 'z_curr.shape[2]', 'z_curr.shape[3]'], {}), '(I_prev, z_curr.shape[2], z_curr.shape[3])\n', (14581, 14623), True, 'import SinGAN.functions as functions\n'), ((5970, 6028), 'SinGAN.functions.generate_noise', 'functions.generate_noise', (['[1, nzx, nzy]'], {'device': 'opt.device'}), '([1, nzx, nzy], device=opt.device)\n', (5994, 6028), True, 'import SinGAN.functions as functions\n'), ((6186, 6251), 'SinGAN.functions.generate_noise', 'functions.generate_noise', (['[opt.nc_z, nzx, nzy]'], {'device': 'opt.device'}), '([opt.nc_z, nzx, nzy], device=opt.device)\n', (6210, 6251), True, 'import SinGAN.functions as functions\n'), ((12501, 12550), 'SinGAN.functions.convert_image_np', 'functions.convert_image_np', (['fakes_without_mask[n]'], {}), '(fakes_without_mask[n])\n', (12527, 12550), True, 'import SinGAN.functions as functions\n')]
# -*- coding: utf-8 -*- """ Test of the DOS/bandstructure visualizations """ import os import pytest from matplotlib.pyplot import gcf CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) HDFTEST_DIR = os.path.join(CURRENT_DIR, 'files/hdf5_reader') @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='bands_defaults.png') def test_plot_bands_defaults_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurBands from masci_tools.vis.fleur import plot_fleur_bands TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_bands.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurBands) gcf().clear() plot_fleur_bands(data, attributes, show=False, markersize=30) return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='bands_weighted_non_spinpol.png') def test_plot_bands_weighted_non_spinpol_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurBands from masci_tools.vis.fleur import plot_fleur_bands TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_bands.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurBands) gcf().clear() plot_fleur_bands(data, attributes, show=False, weight='MT:1d') return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='bands_defaults_spinpol.png') def test_plot_bands_spinpol_defaults_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurBands from masci_tools.vis.fleur import plot_fleur_bands TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_spinpol_bands.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurBands) gcf().clear() plot_fleur_bands(data, attributes, show=False, markersize=30) return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='bands_weighted_spinpol.png') def test_plot_bands_weighted_spinpol_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurBands from masci_tools.vis.fleur import plot_fleur_bands TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_spinpol_bands.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurBands) gcf().clear() plot_fleur_bands(data, attributes, show=False, weight='MT:1d') return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='bands_spinpol_hide.png') def test_plot_bands_spinpol_no_spinpol_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurBands from masci_tools.vis.fleur import plot_fleur_bands TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_spinpol_bands.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurBands) gcf().clear() plot_fleur_bands(data, attributes, show=False, markersize=30, spinpol=False) return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='bands_only_spin.png') def test_plot_bands_spinpol_only_spin_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurBands from masci_tools.vis.fleur import plot_fleur_bands TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_spinpol_bands.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurBands) gcf().clear() plot_fleur_bands(data, attributes, show=False, markersize=30, only_spin='up') return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='dos_defaults.png') def test_plot_dos_defaults_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurDOS from masci_tools.vis.fleur import plot_fleur_dos TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_dos.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurDOS) gcf().clear() plot_fleur_dos(data, attributes, show=False) return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='dos_param_by_label.png') def test_plot_dos_param_change_by_label_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurDOS from masci_tools.vis.fleur import plot_fleur_dos TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_dos.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurDOS) gcf().clear() plot_fleur_dos(data, attributes, show=False, color={'MT:1_up': 'red'}, linewidth={'Total_up': 6}) return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='dos_param_by_label_with_general_params.png') def test_plot_dos_param_change_by_label_general_dicts_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurDOS from masci_tools.vis.fleur import plot_fleur_dos TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_dos.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurDOS) gcf().clear() plot_fleur_dos(data, attributes, show=False, color={'MT:1_up': 'red'}, linewidth={'Total_up': 6}, limits={'energy': (-5, 5)}, lines={'vertical': [-1, 0, 1]}) return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='spinpol_dos_defaults.png') def test_plot_spinpol_dos_defaults_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurDOS from masci_tools.vis.fleur import plot_fleur_dos TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_spinpol_dos.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurDOS) gcf().clear() plot_fleur_dos(data, attributes, show=False) return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='dos_selection.png') def test_plot_dos_selection_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurDOS from masci_tools.vis.fleur import plot_fleur_dos TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_dos.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurDOS) gcf().clear() plot_fleur_dos(data, attributes, show=False, show_total=False, show_interstitial=False, show_atoms=1, show_lresolved=2, plot_keys='MT:1p') return gcf() @pytest.mark.mpl_image_compare(baseline_dir='files/fleur_vis/', filename='bands_character.png') def test_plot_bands_characterize_mpl(): from masci_tools.io.parsers.hdf5 import HDF5Reader from masci_tools.io.parsers.hdf5.recipes import FleurBands from masci_tools.vis.fleur import plot_fleur_bands_characterize TEST_BANDDOS_FILE = os.path.join(HDFTEST_DIR, 'banddos_spinpol_bands.hdf') with HDF5Reader(TEST_BANDDOS_FILE) as h5reader: data, attributes = h5reader.read(recipe=FleurBands) gcf().clear() plot_fleur_bands_characterize(data, attributes, ['MT:1s', 'MT:1p', 'MT:1d', 'MT:1f'], ['darkblue', 'darkred', 'darkgreen', 'darkorange'], show=False, markersize=30, only_spin='up') return gcf()
[ "os.path.abspath", "masci_tools.io.parsers.hdf5.HDF5Reader", "masci_tools.vis.fleur.plot_fleur_dos", "masci_tools.vis.fleur.plot_fleur_bands_characterize", "pytest.mark.mpl_image_compare", "masci_tools.vis.fleur.plot_fleur_bands", "matplotlib.pyplot.gcf", "os.path.join" ]
[((207, 253), 'os.path.join', 'os.path.join', (['CURRENT_DIR', '"""files/hdf5_reader"""'], {}), "(CURRENT_DIR, 'files/hdf5_reader')\n", (219, 253), False, 'import os\n'), ((257, 355), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""bands_defaults.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'bands_defaults.png')\n", (286, 355), False, 'import pytest\n'), ((852, 962), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""bands_weighted_non_spinpol.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'bands_weighted_non_spinpol.png')\n", (881, 962), False, 'import pytest\n'), ((1472, 1578), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""bands_defaults_spinpol.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'bands_defaults_spinpol.png')\n", (1501, 1578), False, 'import pytest\n'), ((2091, 2197), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""bands_weighted_spinpol.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'bands_weighted_spinpol.png')\n", (2120, 2197), False, 'import pytest\n'), ((2711, 2813), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""bands_spinpol_hide.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'bands_spinpol_hide.png')\n", (2740, 2813), False, 'import pytest\n'), ((3343, 3442), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""bands_only_spin.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'bands_only_spin.png')\n", (3372, 3442), False, 'import pytest\n'), ((3972, 4068), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""dos_defaults.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'dos_defaults.png')\n", (4001, 4068), False, 'import pytest\n'), ((4538, 4640), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""dos_param_by_label.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'dos_param_by_label.png')\n", (4567, 4640), False, 'import pytest\n'), ((5176, 5298), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""dos_param_by_label_with_general_params.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'dos_param_by_label_with_general_params.png')\n", (5205, 5298), False, 'import pytest\n'), ((6022, 6126), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""spinpol_dos_defaults.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'spinpol_dos_defaults.png')\n", (6051, 6126), False, 'import pytest\n'), ((6612, 6709), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""dos_selection.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'dos_selection.png')\n", (6641, 6709), False, 'import pytest\n'), ((7407, 7506), 'pytest.mark.mpl_image_compare', 'pytest.mark.mpl_image_compare', ([], {'baseline_dir': '"""files/fleur_vis/"""', 'filename': '"""bands_character.png"""'}), "(baseline_dir='files/fleur_vis/', filename=\n 'bands_character.png')\n", (7436, 7506), False, 'import pytest\n'), ((166, 191), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (181, 191), False, 'import os\n'), ((585, 631), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_bands.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_bands.hdf')\n", (597, 631), False, 'import os\n'), ((769, 830), 'masci_tools.vis.fleur.plot_fleur_bands', 'plot_fleur_bands', (['data', 'attributes'], {'show': '(False)', 'markersize': '(30)'}), '(data, attributes, show=False, markersize=30)\n', (785, 830), False, 'from masci_tools.vis.fleur import plot_fleur_bands\n'), ((843, 848), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (846, 848), False, 'from matplotlib.pyplot import gcf\n'), ((1204, 1250), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_bands.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_bands.hdf')\n", (1216, 1250), False, 'import os\n'), ((1388, 1450), 'masci_tools.vis.fleur.plot_fleur_bands', 'plot_fleur_bands', (['data', 'attributes'], {'show': '(False)', 'weight': '"""MT:1d"""'}), "(data, attributes, show=False, weight='MT:1d')\n", (1404, 1450), False, 'from masci_tools.vis.fleur import plot_fleur_bands\n'), ((1463, 1468), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (1466, 1468), False, 'from matplotlib.pyplot import gcf\n'), ((1816, 1870), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_spinpol_bands.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_spinpol_bands.hdf')\n", (1828, 1870), False, 'import os\n'), ((2008, 2069), 'masci_tools.vis.fleur.plot_fleur_bands', 'plot_fleur_bands', (['data', 'attributes'], {'show': '(False)', 'markersize': '(30)'}), '(data, attributes, show=False, markersize=30)\n', (2024, 2069), False, 'from masci_tools.vis.fleur import plot_fleur_bands\n'), ((2082, 2087), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (2085, 2087), False, 'from matplotlib.pyplot import gcf\n'), ((2435, 2489), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_spinpol_bands.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_spinpol_bands.hdf')\n", (2447, 2489), False, 'import os\n'), ((2627, 2689), 'masci_tools.vis.fleur.plot_fleur_bands', 'plot_fleur_bands', (['data', 'attributes'], {'show': '(False)', 'weight': '"""MT:1d"""'}), "(data, attributes, show=False, weight='MT:1d')\n", (2643, 2689), False, 'from masci_tools.vis.fleur import plot_fleur_bands\n'), ((2702, 2707), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (2705, 2707), False, 'from matplotlib.pyplot import gcf\n'), ((3053, 3107), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_spinpol_bands.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_spinpol_bands.hdf')\n", (3065, 3107), False, 'import os\n'), ((3245, 3321), 'masci_tools.vis.fleur.plot_fleur_bands', 'plot_fleur_bands', (['data', 'attributes'], {'show': '(False)', 'markersize': '(30)', 'spinpol': '(False)'}), '(data, attributes, show=False, markersize=30, spinpol=False)\n', (3261, 3321), False, 'from masci_tools.vis.fleur import plot_fleur_bands\n'), ((3334, 3339), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (3337, 3339), False, 'from matplotlib.pyplot import gcf\n'), ((3681, 3735), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_spinpol_bands.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_spinpol_bands.hdf')\n", (3693, 3735), False, 'import os\n'), ((3873, 3950), 'masci_tools.vis.fleur.plot_fleur_bands', 'plot_fleur_bands', (['data', 'attributes'], {'show': '(False)', 'markersize': '(30)', 'only_spin': '"""up"""'}), "(data, attributes, show=False, markersize=30, only_spin='up')\n", (3889, 3950), False, 'from masci_tools.vis.fleur import plot_fleur_bands\n'), ((3963, 3968), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (3966, 3968), False, 'from matplotlib.pyplot import gcf\n'), ((4292, 4336), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_dos.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_dos.hdf')\n", (4304, 4336), False, 'import os\n'), ((4472, 4516), 'masci_tools.vis.fleur.plot_fleur_dos', 'plot_fleur_dos', (['data', 'attributes'], {'show': '(False)'}), '(data, attributes, show=False)\n', (4486, 4516), False, 'from masci_tools.vis.fleur import plot_fleur_dos\n'), ((4529, 4534), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (4532, 4534), False, 'from matplotlib.pyplot import gcf\n'), ((4877, 4921), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_dos.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_dos.hdf')\n", (4889, 4921), False, 'import os\n'), ((5057, 5158), 'masci_tools.vis.fleur.plot_fleur_dos', 'plot_fleur_dos', (['data', 'attributes'], {'show': '(False)', 'color': "{'MT:1_up': 'red'}", 'linewidth': "{'Total_up': 6}"}), "(data, attributes, show=False, color={'MT:1_up': 'red'},\n linewidth={'Total_up': 6})\n", (5071, 5158), False, 'from masci_tools.vis.fleur import plot_fleur_dos\n'), ((5167, 5172), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (5170, 5172), False, 'from matplotlib.pyplot import gcf\n'), ((5549, 5593), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_dos.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_dos.hdf')\n", (5561, 5593), False, 'import os\n'), ((5729, 5895), 'masci_tools.vis.fleur.plot_fleur_dos', 'plot_fleur_dos', (['data', 'attributes'], {'show': '(False)', 'color': "{'MT:1_up': 'red'}", 'linewidth': "{'Total_up': 6}", 'limits': "{'energy': (-5, 5)}", 'lines': "{'vertical': [-1, 0, 1]}"}), "(data, attributes, show=False, color={'MT:1_up': 'red'},\n linewidth={'Total_up': 6}, limits={'energy': (-5, 5)}, lines={\n 'vertical': [-1, 0, 1]})\n", (5743, 5895), False, 'from masci_tools.vis.fleur import plot_fleur_dos\n'), ((6013, 6018), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (6016, 6018), False, 'from matplotlib.pyplot import gcf\n'), ((6358, 6410), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_spinpol_dos.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_spinpol_dos.hdf')\n", (6370, 6410), False, 'import os\n'), ((6546, 6590), 'masci_tools.vis.fleur.plot_fleur_dos', 'plot_fleur_dos', (['data', 'attributes'], {'show': '(False)'}), '(data, attributes, show=False)\n', (6560, 6590), False, 'from masci_tools.vis.fleur import plot_fleur_dos\n'), ((6603, 6608), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (6606, 6608), False, 'from matplotlib.pyplot import gcf\n'), ((6934, 6978), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_dos.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_dos.hdf')\n", (6946, 6978), False, 'import os\n'), ((7114, 7256), 'masci_tools.vis.fleur.plot_fleur_dos', 'plot_fleur_dos', (['data', 'attributes'], {'show': '(False)', 'show_total': '(False)', 'show_interstitial': '(False)', 'show_atoms': '(1)', 'show_lresolved': '(2)', 'plot_keys': '"""MT:1p"""'}), "(data, attributes, show=False, show_total=False,\n show_interstitial=False, show_atoms=1, show_lresolved=2, plot_keys='MT:1p')\n", (7128, 7256), False, 'from masci_tools.vis.fleur import plot_fleur_dos\n'), ((7398, 7403), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (7401, 7403), False, 'from matplotlib.pyplot import gcf\n'), ((7753, 7807), 'os.path.join', 'os.path.join', (['HDFTEST_DIR', '"""banddos_spinpol_bands.hdf"""'], {}), "(HDFTEST_DIR, 'banddos_spinpol_bands.hdf')\n", (7765, 7807), False, 'import os\n'), ((7945, 8134), 'masci_tools.vis.fleur.plot_fleur_bands_characterize', 'plot_fleur_bands_characterize', (['data', 'attributes', "['MT:1s', 'MT:1p', 'MT:1d', 'MT:1f']", "['darkblue', 'darkred', 'darkgreen', 'darkorange']"], {'show': '(False)', 'markersize': '(30)', 'only_spin': '"""up"""'}), "(data, attributes, ['MT:1s', 'MT:1p', 'MT:1d',\n 'MT:1f'], ['darkblue', 'darkred', 'darkgreen', 'darkorange'], show=\n False, markersize=30, only_spin='up')\n", (7974, 8134), False, 'from masci_tools.vis.fleur import plot_fleur_bands_characterize\n'), ((8308, 8313), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (8311, 8313), False, 'from matplotlib.pyplot import gcf\n'), ((642, 671), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (652, 671), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((1261, 1290), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (1271, 1290), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((1881, 1910), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (1891, 1910), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((2500, 2529), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (2510, 2529), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((3118, 3147), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (3128, 3147), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((3746, 3775), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (3756, 3775), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((4347, 4376), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (4357, 4376), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((4932, 4961), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (4942, 4961), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((5604, 5633), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (5614, 5633), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((6421, 6450), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (6431, 6450), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((6989, 7018), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (6999, 7018), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((7818, 7847), 'masci_tools.io.parsers.hdf5.HDF5Reader', 'HDF5Reader', (['TEST_BANDDOS_FILE'], {}), '(TEST_BANDDOS_FILE)\n', (7828, 7847), False, 'from masci_tools.io.parsers.hdf5 import HDF5Reader\n'), ((750, 755), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (753, 755), False, 'from matplotlib.pyplot import gcf\n'), ((1369, 1374), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (1372, 1374), False, 'from matplotlib.pyplot import gcf\n'), ((1989, 1994), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (1992, 1994), False, 'from matplotlib.pyplot import gcf\n'), ((2608, 2613), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (2611, 2613), False, 'from matplotlib.pyplot import gcf\n'), ((3226, 3231), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (3229, 3231), False, 'from matplotlib.pyplot import gcf\n'), ((3854, 3859), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (3857, 3859), False, 'from matplotlib.pyplot import gcf\n'), ((4453, 4458), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (4456, 4458), False, 'from matplotlib.pyplot import gcf\n'), ((5038, 5043), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (5041, 5043), False, 'from matplotlib.pyplot import gcf\n'), ((5710, 5715), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (5713, 5715), False, 'from matplotlib.pyplot import gcf\n'), ((6527, 6532), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (6530, 6532), False, 'from matplotlib.pyplot import gcf\n'), ((7095, 7100), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (7098, 7100), False, 'from matplotlib.pyplot import gcf\n'), ((7926, 7931), 'matplotlib.pyplot.gcf', 'gcf', ([], {}), '()\n', (7929, 7931), False, 'from matplotlib.pyplot import gcf\n')]
# -*- coding: utf-8 -*- from collections import namedtuple from logging import getLogger from kafka.common import OffsetRequest, check_error, OffsetFetchRequest, UnknownTopicOrPartitionError logger = getLogger("offset-fetcher") OffsetsStruct = namedtuple("OffsetsStruct", ["commit", "produced"]) class OffsetsFetcher(object): def __init__(self, client, topic, group_id): self._client = client self._topic = topic self._group_id = group_id self._client.load_metadata_for_topics() self._offsets = OffsetsStruct(commit=dict(), produced=dict()) self._update_group_offsets() self._update_produced_offsets() def _update_produced_offsets(self): """ Arguments: request_time_ms (int): Used to ask for all messages before a certain time (ms). There are two special values. Specify -1 to receive the latest offset (i.e. the offset of the next coming message) and -2 to receive the earliest available offset. Note that because offsets are pulled in descending order, asking for the earliest offset will always return you a single element. """ for partition in self._client.get_partition_ids_for_topic(self._topic): reqs = [OffsetRequest(self._topic, partition, -1, 1)] (resp,) = self._client.send_offset_request(reqs) check_error(resp) assert resp.topic == self._topic assert resp.partition == partition self._offsets.produced[partition] = resp.offsets[0] def _update_group_offsets(self): logger.info("Consumer fetching stored offsets") for partition in self._client.get_partition_ids_for_topic(self._topic): (resp,) = self._client.send_offset_fetch_request( self._group_id, [OffsetFetchRequest(self._topic, partition)], fail_on_error=False) try: check_error(resp) except UnknownTopicOrPartitionError: pass if resp.offset == -1: self._offsets.commit[partition] = None else: self._offsets.commit[partition] = resp.offset def get(self): """ :return: dict Lags per partition """ self._update_produced_offsets() self._update_group_offsets() lags = {} for partition in self._client.get_partition_ids_for_topic(self._topic): produced = self._offsets.produced[partition] lag = produced - self._offsets.commit[partition] if self._offsets.commit[partition] else 0.0 lags[partition] = lag return lags
[ "kafka.common.OffsetFetchRequest", "kafka.common.check_error", "kafka.common.OffsetRequest", "collections.namedtuple", "logging.getLogger" ]
[((202, 229), 'logging.getLogger', 'getLogger', (['"""offset-fetcher"""'], {}), "('offset-fetcher')\n", (211, 229), False, 'from logging import getLogger\n'), ((246, 297), 'collections.namedtuple', 'namedtuple', (['"""OffsetsStruct"""', "['commit', 'produced']"], {}), "('OffsetsStruct', ['commit', 'produced'])\n", (256, 297), False, 'from collections import namedtuple\n'), ((1459, 1476), 'kafka.common.check_error', 'check_error', (['resp'], {}), '(resp)\n', (1470, 1476), False, 'from kafka.common import OffsetRequest, check_error, OffsetFetchRequest, UnknownTopicOrPartitionError\n'), ((1338, 1382), 'kafka.common.OffsetRequest', 'OffsetRequest', (['self._topic', 'partition', '(-1)', '(1)'], {}), '(self._topic, partition, -1, 1)\n', (1351, 1382), False, 'from kafka.common import OffsetRequest, check_error, OffsetFetchRequest, UnknownTopicOrPartitionError\n'), ((2033, 2050), 'kafka.common.check_error', 'check_error', (['resp'], {}), '(resp)\n', (2044, 2050), False, 'from kafka.common import OffsetRequest, check_error, OffsetFetchRequest, UnknownTopicOrPartitionError\n'), ((1918, 1960), 'kafka.common.OffsetFetchRequest', 'OffsetFetchRequest', (['self._topic', 'partition'], {}), '(self._topic, partition)\n', (1936, 1960), False, 'from kafka.common import OffsetRequest, check_error, OffsetFetchRequest, UnknownTopicOrPartitionError\n')]
import os import pandas as pd import src.utils.metrics as metrics_module from src.utils.metrics import smp_metrics import src.viz.eval as viz_eval_module import torch import wandb def log_to_wandb(figures_dict, phase, train_metrics=None, train_loss=None, val_metrics=None, val_loss=None, test_metrics=None, test_loss=None, epochID=0): if phase not in ["train", "inference"]: raise Exception( "Invalid phase! Please choose one of ['train', 'inference']") if figures_dict is not None: new_fig_dict = convert_to_wandb_images(figures_dict) wandb.log(new_fig_dict, step=epochID) if phase == "train": wandb.log(train_metrics, step=epochID) wandb.log(val_metrics, step=epochID) wandb.log({"train_loss": train_loss}, step=epochID) wandb.log({"val_loss": val_loss}, step=epochID) else: wandb.log(test_metrics, step=epochID) wandb.log({"test_loss": test_loss}, step=epochID) return None def evaluate(gt, preds, metric_type, cfg, phase): """ Support only for classification metrics in this function. """ return_dict = {} for metric in cfg["eval"]["logging_metrics"]["{}_metrics".format(metric_type)]: callable_metric = getattr(metrics_module, metric) return_dict["{}_{}".format(phase, metric)] = callable_metric(gt, preds) return return_dict def calculate_metrics(cfg, gt_dict, pred_dict, phase, dataloader): metrics_dict = {} if (cfg["task_type"] == "binary-classification" or cfg["task_type"] == "multiclass-classification"): gt_labels = gt_dict['gt_labels'] pred_labels = pred_dict['pred_labels'] pred_scores = pred_dict['pred_scores'] # TODO: Make scores implementation compatible for multiple classes metrics_dict.update(evaluate(gt_labels, pred_scores, "score", cfg, phase)) metrics_dict.update(evaluate(gt_labels, pred_labels, "label", cfg, phase)) elif cfg["task_type"] == "multilabel-classification": gt_labels = gt_dict['gt_labels'] pred_labels = pred_dict['pred_labels'] pred_scores = pred_dict['pred_scores'] name_to_code_mapping = dataloader.dataset.name_to_feature_code_mapping code_to_name_mapping = {v: k for k, v in name_to_code_mapping.items()} df_dict = {} for i in range(gt_labels.shape[1]): feature_name = code_to_name_mapping[i] feature_dict = evaluate( gt_labels[:, i], pred_scores[:, i], "score", cfg, phase ) feature_dict.update( evaluate(gt_labels[:, i], pred_labels[:, i], "label", cfg, phase) ) df_dict[feature_name] = feature_dict feature_dict = {f"{feature_name}_{k}": v for k, v in feature_dict.items()} metrics_dict.update(feature_dict) metrics_df = pd.DataFrame.from_dict(df_dict).T metrics_df.reset_index(inplace=True) metrics_dict.update( {phase + "_summary_table": wandb.Table(dataframe=metrics_df)} ) elif cfg["task_type"] == "semantic-segmentation": # Support for only mask point metrics for now. Will add support for mask metrics later. gt_masks = gt_dict['gt_masks'] pred_masks = pred_dict['pred_masks'] mode = metrics_dict['mask_metrics']['params']['mode'] threshold = metrics_dict['mask_metrics']['params']['threshold'] tp, fp, fn, tn = smp_metrics.get_stats(pred_masks, gt_masks, mode=mode, threshold=threshold) metrics_dict = cfg["eval"]["logging_metrics"] for metric in metrics_dict['mask_metrics']['metrics_list']: func = getattr(smp_metrics, metric) reduction = metrics_dict['mask_metrics']['params']['reduction'] value = func(tp, fp, fn, tn, reduction=reduction) metrics_dict.update({metric: value}) else: raise ValueError( "Support for given task_type is not yet present in the metrics module." + "Please choose one of - `binary-classification`, `multiclass-classification`, or `multilabel-classification`" ) return metrics_dict def create_figures(cfg, phase, train_gt_labels=None, train_scores=None, val_gt_labels=None, val_scores=None, test_gt_labels=None, test_scores=None, labels_dict=None): """ Plot figures to be logged to wandb. phase = "train" plots both the training and validation figures to allow better analysis. Phase = "inference" may be used for either standalone inference on validation set or on test set. The variable names use "test*" format since this will be create plots for a single forward pass (no training). Args: cfg (dict): Configuration file used to run the code. phase (str): Run a training loop or inference loop. Should be one of "train" or "inference". train_gt_labels (list, optional): Ground truth labels from the train set. Defaults to None. train_scores (list, optional): Class wise scores predicted by the model for the training set. Defaults to None. val_gt_labels (list, optional): Ground truth labels from the validation set. Defaults to None. val_scores (list, optional): Class wise scores predicted by the model for the validation set. Defaults to None. test_gt_labels (list, optional): Ground truth labels from the test set. Defaults to None. test_scores (list, optional): Class wise scores predicted by the model for the test set. Defaults to None. Raises: Exception: Incorrect phase values like "val" or "test" may lead to incorrect performance. Also ensures that case sensitive phase flags are passed. Returns: dict: A dictionary containing mappings to all figures that need to be plotted according to the config file """ if phase not in ["train", "inference"]: raise Exception("Invalid phase! Please choose one of ['train', 'inference']") figures_dict = {} if phase == "train": if cfg["task_type"] == "binary-classification": y_true_arr = [train_gt_labels, val_gt_labels] y_pred_proba_arr = [train_scores, val_scores] labels_arr = ["Train", "Val"] plot_thres_for_idx = [1] elif cfg["task_type"] == "multilabel-classification": y_true_arr, y_pred_proba_arr, labels_arr, plot_thres_for_idx = [[] for _ in range(4)] for i in range(train_gt_labels.shape[1]): y_true_arr.append(train_gt_labels[:, i]) y_pred_proba_arr.append(train_scores[:, i]) labels_arr.append(labels_dict[i]) plot_thres_for_idx.append(i) else: if cfg["task_type"] == "binary-classification": y_true_arr = [test_gt_labels] y_pred_proba_arr = [test_scores] labels_arr = ["Test"] plot_thres_for_idx = [0] elif cfg["task_type"] == "multilabel-classification": y_true_arr, y_pred_proba_arr, labels_arr, plot_thres_for_idx = [[] for i in range(4)] for i in range(test_gt_labels.shape[1]): y_true_arr.append(test_gt_labels[:, i]) y_pred_proba_arr.append(test_scores[:, i]) labels_arr.append(labels_dict[i]) plot_thres_for_idx.append(i) for func in cfg["viz"]["eval"]: if func == "plot_pr_curve": fig, _ = getattr(viz_eval_module, func)( y_true_arr, y_pred_proba_arr, labels_arr, plot_thres_for_idx=plot_thres_for_idx, pos_label=1, plot_prevalance_for_idx=plot_thres_for_idx) else: fig, _ = getattr(viz_eval_module, func)(y_true_arr, y_pred_proba_arr, labels_arr, plot_thres_for_idx=plot_thres_for_idx, pos_label=1) figures_dict[func] = fig return figures_dict def convert_to_wandb_images(figures_dict): new_fig_dict = dict() for fig in figures_dict: new_fig_dict[fig] = [wandb.Image(figures_dict[fig])] return new_fig_dict def save_model_checkpoints(cfg, phase, metrics, ckpt_metric, ckpt_dir, model, optimizer, epochID): state_dict = { "epoch": epochID, "model_state_dict": model.state_dict(), "optimizer_state_dict": optimizer.state_dict(), "metric": ckpt_metric, } metric_key = cfg["eval"]["ckpt_metric"] if metrics[metric_key] > ckpt_metric: ckpt_metric = metrics[metric_key] best_model_path = os.path.join(ckpt_dir, "best_model.pth.tar") torch.save(state_dict, best_model_path) model_name = "checkpoint-{}.pth.tar".format(epochID) model_path = os.path.join(ckpt_dir, model_name) torch.save(state_dict, model_path)
[ "wandb.log", "src.utils.metrics.smp_metrics.get_stats", "pandas.DataFrame.from_dict", "torch.save", "wandb.Image", "wandb.Table", "os.path.join" ]
[((8744, 8778), 'os.path.join', 'os.path.join', (['ckpt_dir', 'model_name'], {}), '(ckpt_dir, model_name)\n', (8756, 8778), False, 'import os\n'), ((8783, 8817), 'torch.save', 'torch.save', (['state_dict', 'model_path'], {}), '(state_dict, model_path)\n', (8793, 8817), False, 'import torch\n'), ((600, 637), 'wandb.log', 'wandb.log', (['new_fig_dict'], {'step': 'epochID'}), '(new_fig_dict, step=epochID)\n', (609, 637), False, 'import wandb\n'), ((671, 709), 'wandb.log', 'wandb.log', (['train_metrics'], {'step': 'epochID'}), '(train_metrics, step=epochID)\n', (680, 709), False, 'import wandb\n'), ((718, 754), 'wandb.log', 'wandb.log', (['val_metrics'], {'step': 'epochID'}), '(val_metrics, step=epochID)\n', (727, 754), False, 'import wandb\n'), ((763, 814), 'wandb.log', 'wandb.log', (["{'train_loss': train_loss}"], {'step': 'epochID'}), "({'train_loss': train_loss}, step=epochID)\n", (772, 814), False, 'import wandb\n'), ((823, 870), 'wandb.log', 'wandb.log', (["{'val_loss': val_loss}"], {'step': 'epochID'}), "({'val_loss': val_loss}, step=epochID)\n", (832, 870), False, 'import wandb\n'), ((889, 926), 'wandb.log', 'wandb.log', (['test_metrics'], {'step': 'epochID'}), '(test_metrics, step=epochID)\n', (898, 926), False, 'import wandb\n'), ((935, 984), 'wandb.log', 'wandb.log', (["{'test_loss': test_loss}"], {'step': 'epochID'}), "({'test_loss': test_loss}, step=epochID)\n", (944, 984), False, 'import wandb\n'), ((8576, 8620), 'os.path.join', 'os.path.join', (['ckpt_dir', '"""best_model.pth.tar"""'], {}), "(ckpt_dir, 'best_model.pth.tar')\n", (8588, 8620), False, 'import os\n'), ((8629, 8668), 'torch.save', 'torch.save', (['state_dict', 'best_model_path'], {}), '(state_dict, best_model_path)\n', (8639, 8668), False, 'import torch\n'), ((8077, 8107), 'wandb.Image', 'wandb.Image', (['figures_dict[fig]'], {}), '(figures_dict[fig])\n', (8088, 8107), False, 'import wandb\n'), ((2893, 2924), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['df_dict'], {}), '(df_dict)\n', (2915, 2924), True, 'import pandas as pd\n'), ((3479, 3554), 'src.utils.metrics.smp_metrics.get_stats', 'smp_metrics.get_stats', (['pred_masks', 'gt_masks'], {'mode': 'mode', 'threshold': 'threshold'}), '(pred_masks, gt_masks, mode=mode, threshold=threshold)\n', (3500, 3554), False, 'from src.utils.metrics import smp_metrics\n'), ((3040, 3073), 'wandb.Table', 'wandb.Table', ([], {'dataframe': 'metrics_df'}), '(dataframe=metrics_df)\n', (3051, 3073), False, 'import wandb\n')]
from functools import partial import numpy as np import matplotlib.pyplot as plt from mne.utils import _TempDir from pactools.dar_model import AR, DAR, HAR, StableDAR from pactools.utils.testing import assert_equal, assert_greater from pactools.utils.testing import assert_raises, assert_array_equal from pactools.utils.testing import assert_true, assert_array_almost_equal from pactools.comodulogram import Comodulogram, read_comodulogram from pactools.comodulogram import ALL_PAC_METRICS, BICOHERENCE_PAC_METRICS from pactools.simulate_pac import simulate_pac # Parameters used for the simulated signal in the test low_fq_range = [1., 3., 5., 7.] high_fq_range = [25., 50., 75.] n_low = len(low_fq_range) n_high = len(high_fq_range) high_fq = high_fq_range[1] low_fq = low_fq_range[1] n_points = 1024 fs = 200. signal = simulate_pac(n_points=n_points, fs=fs, high_fq=high_fq, low_fq=low_fq, low_fq_width=1., noise_level=0.1, random_state=0) signal_copy = signal.copy() class ComodTest(Comodulogram): # A comodulogram call with default params used for testing def __init__(self, fs=fs, low_fq_range=low_fq_range, low_fq_width=1., high_fq_range=high_fq_range, high_fq_width='auto', method='tort', n_surrogates=0, vmin=None, vmax=None, progress_bar=False, ax_special=None, minimum_shift=1.0, random_state=0, coherence_params=dict(), low_fq_width_2=4.0): super(ComodTest, self).__init__( fs=fs, low_fq_range=low_fq_range, low_fq_width=low_fq_width, high_fq_range=high_fq_range, high_fq_width=high_fq_width, method=method, n_surrogates=n_surrogates, vmin=vmin, vmax=vmax, progress_bar=progress_bar, ax_special=ax_special, minimum_shift=minimum_shift, random_state=random_state, coherence_params=coherence_params, low_fq_width_2=low_fq_width_2) def fast_comod(low_sig=signal, high_sig=None, mask=None, *args, **kwargs): return ComodTest(*args, **kwargs).fit(low_sig=low_sig, high_sig=high_sig, mask=mask).comod_ def test_input_checking(): # test that we have a ValueError for bad parameters func = partial(fast_comod, method='wrong') assert_raises(ValueError, func) func = partial(fast_comod, fs='wrong') assert_raises(ValueError, func) func = partial(fast_comod, low_sig='wrong') assert_raises(ValueError, func) func = partial(fast_comod, high_sig='wrong') assert_raises(ValueError, func) def test_different_dimension_in_input(): # Test that 1D or 2D signals are accepted, but not 3D for dim in [ (4, -1), (-1, ), (1, -1), ]: fast_comod(signal.reshape(*dim)) dim = (2, 2, -1) assert_raises(ValueError, fast_comod, signal.reshape(*dim)) def test_high_sig_identical(): # Test that we have the same result with high_sig=low_sig and high_sig=None for method in ALL_PAC_METRICS: if method in BICOHERENCE_PAC_METRICS: continue comod_0 = fast_comod(method=method) comod_1 = fast_comod(high_sig=signal, method=method) assert_array_equal(comod_0, comod_1) def test_comod_correct_maximum(): # Test that the PAC is maximum at the correct location in the comodulogram for method in ALL_PAC_METRICS: est = ComodTest(method=method, progress_bar=True).fit(signal) comod = est.comod_ # test the shape of the comodulogram assert_array_equal(comod.shape, (n_low, n_high)) # the bicoherence metrics fail this test with current parameters if method in BICOHERENCE_PAC_METRICS or method == 'jiang': continue low_fq_0, high_fq_0, max_pac = est.get_maximum_pac() assert_equal(low_fq_0, low_fq) assert_equal(high_fq_0, high_fq) assert_equal(max_pac, comod.max()) assert_true(np.all(comod > 0)) def test_empty_mask(): # Test that using an empty mask does not change the results mask = np.zeros(n_points, dtype=bool) for method in ALL_PAC_METRICS: comod_0 = fast_comod(mask=mask, method=method) comod_1 = fast_comod(low_sig=signal[~mask], method=method) assert_array_almost_equal(comod_0, comod_1, decimal=7) def test_surrogates(): # Test the surrogates comodulogram for method in ALL_PAC_METRICS: msg = 'with method=%s' % method if method in BICOHERENCE_PAC_METRICS or method == 'jiang': continue n_surrogates = 10 est = ComodTest(method=method, n_surrogates=n_surrogates).fit(signal) assert_array_equal(est.comod_.shape, (n_low, n_high), err_msg=msg) assert_array_equal(est.surrogates_.shape, (n_surrogates, n_low, n_high), err_msg=msg) # z-score z_score = est.comod_z_score_ assert_array_equal(z_score.shape, (n_low, n_high), err_msg=msg) if method != 'jiang': # 'jiang' method does not estimate CFC but CFD assert_greater(z_score[1, 1], z_score[-1, -1], msg=msg) # surrogate_max surrogate_max = est.surrogate_max_ assert_array_equal(surrogate_max.shape, (n_surrogates, )) assert_greater(est.comod_[1, 1], surrogate_max.max(), msg=msg) assert_greater(surrogate_max.max(), est.comod_[-1, -1], msg=msg) # Smoke test with contours in the plotting function est.plot(contour_level=0.01, contour_method='comod_max') est.plot(contour_level=3, contour_method='z_score') plt.close('all') def test_no_surrogate(): # Test the errors when n_surrogates == 0 for method in ALL_PAC_METRICS: est = ComodTest(method=method, n_surrogates=0).fit(signal) with assert_raises(ValueError): est.comod_z_score_ with assert_raises(ValueError): est.surrogate_max_ with assert_raises(ValueError): est.plot(contour_level=0.01) plt.close('all') def test_comodulogram_dar_models(): # Smoke test with DAR models for klass in (AR, DAR, HAR, StableDAR): if klass is StableDAR: model = klass(ordar=10, ordriv=2, iter_newton=10) else: model = klass(ordar=10, ordriv=2) comod = fast_comod(method=model) assert_true(~np.any(np.isnan(comod))) def test_plot_comodulogram(): # Smoke test with the standard plotting function est = ComodTest().fit(signal) est.plot() # Smoke test with the special plotting functions ax = plt.figure().gca() for method in ALL_PAC_METRICS: est = ComodTest(low_fq_range=[low_fq], method=method, ax_special=ax).fit(signal) # Test that it raises an error if ax_special is not None and low_fq_range # has more than one element func = partial(fast_comod, ax_special=ax) assert_raises(ValueError, func) plt.close('all') def test_signal_unchanged(): # Test that signal has not been changed during the test assert_array_equal(signal_copy, signal) def _compare_values(v, v2): if isinstance(v, np.ndarray): assert_array_equal(v, v2) elif isinstance(v, dict): for key, value in v.items(): _compare_values(v[key], v2[key]) elif isinstance(v, np.random.RandomState): for s, s2 in zip(v.get_state(), v2.get_state()): _compare_values(s, s2) else: assert_equal(v, v2) def _compare_instance(inst1, inst2): for k, v in vars(inst1).items(): v2 = getattr(inst2, k) _compare_values(v, v2) def test_save(): # Test File IO tmp = _TempDir() est = ComodTest() fname = tmp + '/test.hdf5' est.save(fname) est2 = read_comodulogram(fname) _compare_instance(est, est2) # Now fit and save est.fit(signal) est.save(fname, overwrite=True) est3 = read_comodulogram(fname) _compare_instance(est, est3)
[ "functools.partial", "pactools.simulate_pac.simulate_pac", "pactools.utils.testing.assert_raises", "mne.utils._TempDir", "matplotlib.pyplot.close", "pactools.utils.testing.assert_array_equal", "pactools.comodulogram.read_comodulogram", "numpy.zeros", "pactools.utils.testing.assert_equal", "pactools.utils.testing.assert_array_almost_equal", "pactools.utils.testing.assert_greater", "numpy.isnan", "matplotlib.pyplot.figure", "numpy.all" ]
[((827, 952), 'pactools.simulate_pac.simulate_pac', 'simulate_pac', ([], {'n_points': 'n_points', 'fs': 'fs', 'high_fq': 'high_fq', 'low_fq': 'low_fq', 'low_fq_width': '(1.0)', 'noise_level': '(0.1)', 'random_state': '(0)'}), '(n_points=n_points, fs=fs, high_fq=high_fq, low_fq=low_fq,\n low_fq_width=1.0, noise_level=0.1, random_state=0)\n', (839, 952), False, 'from pactools.simulate_pac import simulate_pac\n'), ((2237, 2272), 'functools.partial', 'partial', (['fast_comod'], {'method': '"""wrong"""'}), "(fast_comod, method='wrong')\n", (2244, 2272), False, 'from functools import partial\n'), ((2277, 2308), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'func'], {}), '(ValueError, func)\n', (2290, 2308), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((2320, 2351), 'functools.partial', 'partial', (['fast_comod'], {'fs': '"""wrong"""'}), "(fast_comod, fs='wrong')\n", (2327, 2351), False, 'from functools import partial\n'), ((2356, 2387), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'func'], {}), '(ValueError, func)\n', (2369, 2387), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((2399, 2435), 'functools.partial', 'partial', (['fast_comod'], {'low_sig': '"""wrong"""'}), "(fast_comod, low_sig='wrong')\n", (2406, 2435), False, 'from functools import partial\n'), ((2440, 2471), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'func'], {}), '(ValueError, func)\n', (2453, 2471), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((2483, 2520), 'functools.partial', 'partial', (['fast_comod'], {'high_sig': '"""wrong"""'}), "(fast_comod, high_sig='wrong')\n", (2490, 2520), False, 'from functools import partial\n'), ((2525, 2556), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'func'], {}), '(ValueError, func)\n', (2538, 2556), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((4059, 4089), 'numpy.zeros', 'np.zeros', (['n_points'], {'dtype': 'bool'}), '(n_points, dtype=bool)\n', (4067, 4089), True, 'import numpy as np\n'), ((5593, 5609), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (5602, 5609), True, 'import matplotlib.pyplot as plt\n'), ((6005, 6021), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (6014, 6021), True, 'import matplotlib.pyplot as plt\n'), ((6863, 6897), 'functools.partial', 'partial', (['fast_comod'], {'ax_special': 'ax'}), '(fast_comod, ax_special=ax)\n', (6870, 6897), False, 'from functools import partial\n'), ((6902, 6933), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError', 'func'], {}), '(ValueError, func)\n', (6915, 6933), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((6938, 6954), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (6947, 6954), True, 'import matplotlib.pyplot as plt\n'), ((7050, 7089), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['signal_copy', 'signal'], {}), '(signal_copy, signal)\n', (7068, 7089), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((7663, 7673), 'mne.utils._TempDir', '_TempDir', ([], {}), '()\n', (7671, 7673), False, 'from mne.utils import _TempDir\n'), ((7758, 7782), 'pactools.comodulogram.read_comodulogram', 'read_comodulogram', (['fname'], {}), '(fname)\n', (7775, 7782), False, 'from pactools.comodulogram import Comodulogram, read_comodulogram\n'), ((7907, 7931), 'pactools.comodulogram.read_comodulogram', 'read_comodulogram', (['fname'], {}), '(fname)\n', (7924, 7931), False, 'from pactools.comodulogram import Comodulogram, read_comodulogram\n'), ((3187, 3223), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['comod_0', 'comod_1'], {}), '(comod_0, comod_1)\n', (3205, 3223), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((3524, 3572), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['comod.shape', '(n_low, n_high)'], {}), '(comod.shape, (n_low, n_high))\n', (3542, 3572), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((3805, 3835), 'pactools.utils.testing.assert_equal', 'assert_equal', (['low_fq_0', 'low_fq'], {}), '(low_fq_0, low_fq)\n', (3817, 3835), False, 'from pactools.utils.testing import assert_equal, assert_greater\n'), ((3844, 3876), 'pactools.utils.testing.assert_equal', 'assert_equal', (['high_fq_0', 'high_fq'], {}), '(high_fq_0, high_fq)\n', (3856, 3876), False, 'from pactools.utils.testing import assert_equal, assert_greater\n'), ((4256, 4310), 'pactools.utils.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['comod_0', 'comod_1'], {'decimal': '(7)'}), '(comod_0, comod_1, decimal=7)\n', (4281, 4310), False, 'from pactools.utils.testing import assert_true, assert_array_almost_equal\n'), ((4651, 4717), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['est.comod_.shape', '(n_low, n_high)'], {'err_msg': 'msg'}), '(est.comod_.shape, (n_low, n_high), err_msg=msg)\n', (4669, 4717), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((4726, 4815), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['est.surrogates_.shape', '(n_surrogates, n_low, n_high)'], {'err_msg': 'msg'}), '(est.surrogates_.shape, (n_surrogates, n_low, n_high),\n err_msg=msg)\n', (4744, 4815), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((4927, 4990), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['z_score.shape', '(n_low, n_high)'], {'err_msg': 'msg'}), '(z_score.shape, (n_low, n_high), err_msg=msg)\n', (4945, 4990), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((5213, 5269), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['surrogate_max.shape', '(n_surrogates,)'], {}), '(surrogate_max.shape, (n_surrogates,))\n', (5231, 5269), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((5937, 5962), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError'], {}), '(ValueError)\n', (5950, 5962), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((7162, 7187), 'pactools.utils.testing.assert_array_equal', 'assert_array_equal', (['v', 'v2'], {}), '(v, v2)\n', (7180, 7187), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((3940, 3957), 'numpy.all', 'np.all', (['(comod > 0)'], {}), '(comod > 0)\n', (3946, 3957), True, 'import numpy as np\n'), ((5081, 5136), 'pactools.utils.testing.assert_greater', 'assert_greater', (['z_score[1, 1]', 'z_score[-1, -1]'], {'msg': 'msg'}), '(z_score[1, 1], z_score[-1, -1], msg=msg)\n', (5095, 5136), False, 'from pactools.utils.testing import assert_equal, assert_greater\n'), ((5798, 5823), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError'], {}), '(ValueError)\n', (5811, 5823), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((5869, 5894), 'pactools.utils.testing.assert_raises', 'assert_raises', (['ValueError'], {}), '(ValueError)\n', (5882, 5894), False, 'from pactools.utils.testing import assert_raises, assert_array_equal\n'), ((6574, 6586), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6584, 6586), True, 'import matplotlib.pyplot as plt\n'), ((7457, 7476), 'pactools.utils.testing.assert_equal', 'assert_equal', (['v', 'v2'], {}), '(v, v2)\n', (7469, 7476), False, 'from pactools.utils.testing import assert_equal, assert_greater\n'), ((6359, 6374), 'numpy.isnan', 'np.isnan', (['comod'], {}), '(comod)\n', (6367, 6374), True, 'import numpy as np\n')]
# Copyright (c) 2013 Shotgun Software Inc. # # CONFIDENTIAL AND PROPRIETARY # # This work is provided "AS IS" and subject to the Shotgun Pipeline Toolkit # Source Code License included in this distribution package. See LICENSE. # By accessing, using, copying or modifying this work you indicate your # agreement to the Shotgun Pipeline Toolkit Source Code License. All rights # not expressly granted therein are reserved by Shotgun Software Inc. import os from datetime import datetime from itertools import chain import sgtk from tank_vendor.shotgun_api3 import sg_timezone from sgtk import TankError from .file_item import FileItem from .users import UserCache class FileFinder(object): """ Helper class to find work and publish files for a specified context and set of templates """ def __init__(self, app, user_cache=None): """ Construction :param app: The Workfiles app instance :param user_cache: An UserCache instance used to retrieve Shotgun user information """ self.__app = app self.__user_cache = user_cache or UserCache(app) # cache the valid file extensions that can be found self.__visible_file_extensions = [".%s" % ext if not ext.startswith(".") else ext for ext in self.__app.get_setting("file_extensions", [])] # and cache any fields that should be ignored when comparing work files: self.__version_compare_ignore_fields = self.__app.get_setting("version_compare_ignore_fields", []) def find_files(self, work_template, publish_template, context, filter_file_key=None, require_path=False): """ Find files using the specified context, work and publish templates :param work_template: The template to use when searching for work files :param publish_template: The template to use when searching for publish files :param context: The context to search for file with :param filter_file_key: A unique file 'key' that if specified will limit the returned list of files to just those that match. This 'key' should be generated using the FileItem.build_file_key() method. :param require_path: If True, ensures that all fields of the template can be resolved. Defaults to False. :returns: A list of FileItem instances, one for each unique version of a file found in either the work or publish areas """ # can't find anything without a work template! if not work_template: return [] # find all work & publish files and filter out any that should be ignored: work_files = self.__find_work_files(context, work_template, require_path) work_files = [wf for wf in work_files if not self.__ignore_file_path(wf["path"])] published_files = self.__find_publishes(context, publish_template) published_files = [pf for pf in published_files if not self.__ignore_file_path(pf["path"])] # now amalgamate the two lists together: files = {} key_to_name_map = {} # first, process work files: for work_file in work_files: # always have the work path: work_path = work_file["path"] # get fields for work file: wf_fields = work_template.get_fields(work_path) # build the unique file key for the work path. All files that share the same key are considered # to be different versions of the same file. # file_key = FileItem.build_file_key(wf_fields, work_template, self.__version_compare_ignore_fields + ["version"]) if filter_file_key and file_key != filter_file_key: # we can ignore this file completely! continue # copy common fields from work_file: # file_details = dict([(k, v) for k, v in work_file.iteritems() if k != "path"]) # get version from fields if not specified in work file: if not file_details["version"]: file_details["version"] = wf_fields.get("version", 0) # if no task try to determine from context or path: if not file_details["task"]: if context.task: file_details["task"] = context.task else: # try to create a context from the path and see if that contains a task: wf_ctx = self.__app.sgtk.context_from_path(work_path, context) if wf_ctx and wf_ctx.task: file_details["task"] = wf_ctx.task # add additional fields: # # entity: file_details["entity"] = context.entity # file modified details: if not file_details["modified_at"]: file_details["modified_at"] = datetime.fromtimestamp(os.path.getmtime(work_path), tz=sg_timezone.local) if not file_details["modified_by"]: file_details["modified_by"] = self.__user_cache.get_file_last_modified_user(work_path) # make sure all files with the same key have the same name: update_name_map = True if file_key in key_to_name_map: # always use the same name update_name_map = False file_details["name"] = key_to_name_map[file_key] elif not file_details["name"]: # ensure we have a name: file_details["name"] = self.__get_file_display_name(work_path, work_template, wf_fields) # add new file item file_item = FileItem(work_path, None, True, False, file_details, file_key) files[(file_key, file_details["version"])] = file_item if update_name_map: # update name map with name: key_to_name_map[file_key] = file_item.name # and add in publish details: ctx_fields = context.as_template_fields(work_template) for published_file in published_files: file_details = {} # always have a path: publish_path = published_file["path"] # determine the work path fields from the publish fields + ctx fields: # The order is important as it ensures that the user is correct if the # publish file is in a user sandbox but we also need to be careful not # to overrwrite fields that are being ignored when comparing work files publish_fields = publish_template.get_fields(publish_path) wp_fields = publish_fields.copy() for k, v in ctx_fields.iteritems(): if k not in self.__version_compare_ignore_fields: wp_fields[k] = v # build the unique file key for the publish path. All files that share the same key are considered # to be different versions of the same file. file_key = FileItem.build_file_key(wp_fields, work_template, self.__version_compare_ignore_fields + ["version"]) if filter_file_key and file_key != filter_file_key: # we can ignore this file completely! continue # resolve the work path: work_path = work_template.apply_fields(wp_fields) # copy common fields from published_file: # file_details = dict([(k, v) for k, v in published_file.iteritems() if k != "path"]) # get version from fields if not specified in publish file: if file_details["version"] == None: file_details["version"] = publish_fields.get("version", 0) # look to see if we have a matching work file for this published file have_work_file = False existing_file_item = files.get((file_key, file_details["version"])) if existing_file_item and existing_file_item.is_local: # we do so check the paths match: if existing_file_item.path != work_path: raise TankError("Work file mismatch when finding files!") # and copy the work file details - giving precedence to the published details: file_details = dict([(k,v) for k, v in chain(existing_file_item.details.iteritems(), file_details.iteritems()) if v != None]) have_work_file = True else: # no work file so just use publish details: # entity file_details["entity"] = context.entity # local file modified details: if os.path.exists(publish_path): file_details["modified_at"] = datetime.fromtimestamp(os.path.getmtime(publish_path), tz=sg_timezone.local) file_details["modified_by"] = self.__user_cache.get_file_last_modified_user(publish_path) else: # just use the publish info file_details["modified_at"] = published_file.get("published_at") file_details["modified_by"] = published_file.get("published_by") # make sure all files with the same key have the same name: update_name_map = True if file_key in key_to_name_map: # always use the same name update_name_map = False file_details["name"] = key_to_name_map[file_key] elif not file_details["name"]: # ensure we have a name: file_details["name"] = self.__get_file_display_name(publish_path, publish_template, publish_fields) # add new file item file_item = FileItem(work_path, publish_path, have_work_file, True, file_details, file_key) files[(file_key, file_details["version"])] = file_item if update_name_map: # update name map with name: key_to_name_map[file_key] = file_item.name # return list of FileItems return files.values() def __find_publishes(self, context, publish_template): """ Find all publishes for the specified context and publish template :param context: The context to find publishes for :param publish_template: The publish template to match found publishes against :returns: List of dictionaries, each one containing the details of an individual published file """ # get list of published files for the context from Shotgun: sg_filters = [["entity", "is", context.entity or context.project]] if context.task: sg_filters.append(["task", "is", context.task]) published_file_entity_type = sgtk.util.get_published_file_entity_type(self.__app.sgtk) sg_fields = ["id", "description", "version_number", "image", "created_at", "created_by", "name", "path", "task"] sg_res = self.__app.shotgun.find(published_file_entity_type, sg_filters, sg_fields) # build list of publishes to send to the filter_publishes hook: hook_publishes = [{"sg_publish":sg_publish} for sg_publish in sg_res] # execute the hook - this will return a list of filtered publishes: hook_result = self.__app.execute_hook("hook_filter_publishes", publishes = hook_publishes) if not isinstance(hook_result, list): self.__app.log_error("hook_filter_publishes returned an unexpected result type '%s' - ignoring!" % type(hook_result).__name__) hook_result = [] # split back out publishes: published_files = [] for item in hook_result: sg_publish = item.get("sg_publish") if not sg_publish: continue # all publishes should have a local path: path = sg_publish.get("path", {}).get("local_path") if not path: continue # make sure path matches the publish template: if not publish_template.validate(path): continue # build file details for this publish: file_details = {"path":path} # add in details from sg record: file_details["version"] = sg_publish.get("version_number") file_details["name"] = sg_publish.get("name") file_details["task"] = sg_publish.get("task") file_details["publish_description"] = sg_publish.get("description") file_details["thumbnail"] = sg_publish.get("image") file_details["published_at"] = sg_publish.get("created_at") file_details["published_by"] = sg_publish.get("created_by", {}) file_details["published_file_entity_id"] = sg_publish.get("id") # find additional information: editable_info = item.get("editable") if editable_info and isinstance(editable_info, dict): file_details["editable"] = editable_info.get("can_edit", True) file_details["editable_reason"] = editable_info.get("reason", "") # append to published files list: published_files.append(file_details) return published_files def __find_work_files(self, context, work_template, require_path): """ Find all work files for the specified context and work template :param context: The context to find work files for :param publish_template: The work template to match found files against :param require_path: If True, ensures that all fields of the template can be resolved. :returns: List of dictionaries, each one containing the details of an individual work file :raises TankError: Raised when not all fields of the template can be resolved. """ # find work files that match the current work template: work_fields = [] try: work_fields = context.as_template_fields(work_template, validate=True) except TankError: if require_path: self.__app.log_exception("Unable to resolve all template fields.") self.__app.log_debug("Template: %s" % work_template) self.__app.log_debug("Context: %s" % context) raise TankError( "Unable to resolve template fields! This could mean there is a mismatch " "between your folder schema and templates or that not all entities could " "be matched to a folder. Please email <EMAIL> if you " "need help fixing this." ) # could not resolve fields from this context. This typically happens # when the context object does not have any corresponding objects on # disk / in the path cache. In this case, we cannot continue with any # file system resolution, so just exit early insted. return [] # build list of fields to ignore when looking for files: skip_fields = list(self.__version_compare_ignore_fields) # Skip any keys from work_fields that are _only_ optional in the template. This is to # ensure we find as wide a range of files as possible considering all optional keys. # Note, this may be better as a general change to the paths_from_template method... skip_fields += [n for n in work_fields.keys() if work_template.is_optional(n)] # Find all versions so skip the 'version' key if it's present: skip_fields += ["version"] # find paths: work_file_paths = self.__app.sgtk.paths_from_template(work_template, work_fields, skip_fields, skip_missing_optional_keys=True) # paths_from_template may have returned additional files that we don't want (aren't valid within this # work area) if any of the fields were populated by the context. Filter the list to remove these # extra files. filtered_paths = [] for p in work_file_paths: # (AD) TODO - this should be optimized as it's doing 'get_fields' again # when this method returns! fields = work_template.get_fields(p) is_match = True for wfn, wfv in work_fields.iteritems(): if wfn in fields: if fields[wfn] != wfv: is_match = False break elif wfn not in skip_fields: is_match = False break if is_match: filtered_paths.append(p) work_file_paths = filtered_paths # build list of work files to send to the filter_work_files hook: hook_work_files = [{"work_file":{"path":path}} for path in work_file_paths] # execute the hook - this will return a list of filtered publishes: hook_result = self.__app.execute_hook("hook_filter_work_files", work_files = hook_work_files) if not isinstance(hook_result, list): self.__app.log_error("hook_filter_work_files returned an unexpected result type '%s' - ignoring!" % type(hook_result).__name__) hook_result = [] # split back out work files: work_files = [] for item in hook_result: work_file = item.get("work_file") if not work_file: continue path = work_file.get("path") if not path: continue file_details = {"path":path} file_details["version"] = work_file.get("version_number") file_details["name"] = work_file.get("name") file_details["task"] = work_file.get("task") file_details["description"] = work_file.get("description") file_details["thumbnail"] = work_file.get("thumbnail") file_details["modified_at"] = work_file.get("modified_at") file_details["modified_by"] = work_file.get("modified_by", {}) # find additional information: editable_info = item.get("editable") if editable_info and isinstance(editable_info, dict): file_details["editable"] = editable_info.get("can_edit", True) file_details["editable_reason"] = editable_info.get("reason", "") work_files.append(file_details) return work_files def __ignore_file_path(self, path): """ Determine if this file should be ignored when finding files :param path: Path to check :returns: True if the path should be ignored. """ if self.__visible_file_extensions: _, ext = os.path.splitext(path) if ext and ext not in self.__visible_file_extensions: # we want to ignore this file! return True return False def __get_file_display_name(self, path, template, fields=None): """ Return the 'name' to be used for the file - if possible this will return a 'versionless' name """ # first, extract the fields from the path using the template: fields = fields.copy() if fields else template.get_fields(path) if "name" in fields and fields["name"]: # well, that was easy! name = fields["name"] else: # find out if version is used in the file name: template_name, _ = os.path.splitext(os.path.basename(template.definition)) version_in_name = "{version}" in template_name # extract the file name from the path: name, _ = os.path.splitext(os.path.basename(path)) delims_str = "_-. " if version_in_name: # looks like version is part of the file name so we # need to isolate it so that we can remove it safely. # First, find a dummy version whose string representation # doesn't exist in the name string version_key = template.keys["version"] dummy_version = 9876 while True: test_str = version_key.str_from_value(dummy_version) if test_str not in name: break dummy_version += 1 # now use this dummy version and rebuild the path fields["version"] = dummy_version path = template.apply_fields(fields) name, _ = os.path.splitext(os.path.basename(path)) # we can now locate the version in the name and remove it dummy_version_str = version_key.str_from_value(dummy_version) v_pos = name.find(dummy_version_str) # remove any preceeding 'v' pre_v_str = name[:v_pos].rstrip("v") post_v_str = name[v_pos + len(dummy_version_str):] if (pre_v_str and post_v_str and pre_v_str[-1] in delims_str and post_v_str[0] in delims_str): # only want one delimiter - strip the second one: post_v_str = post_v_str.lstrip(delims_str) versionless_name = pre_v_str + post_v_str versionless_name = versionless_name.strip(delims_str) if versionless_name: # great - lets use this! name = versionless_name else: # likely that version is only thing in the name so # instead, replace the dummy version with #'s: zero_version_str = version_key.str_from_value(0) new_version_str = "#" * len(zero_version_str) name = name.replace(dummy_version_str, new_version_str) return name
[ "sgtk.util.get_published_file_entity_type", "os.path.basename", "os.path.exists", "sgtk.TankError", "os.path.splitext", "os.path.getmtime" ]
[((11624, 11681), 'sgtk.util.get_published_file_entity_type', 'sgtk.util.get_published_file_entity_type', (['self.__app.sgtk'], {}), '(self.__app.sgtk)\n', (11664, 11681), False, 'import sgtk\n'), ((20159, 20181), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (20175, 20181), False, 'import os\n'), ((9412, 9440), 'os.path.exists', 'os.path.exists', (['publish_path'], {}), '(publish_path)\n', (9426, 9440), False, 'import os\n'), ((20941, 20978), 'os.path.basename', 'os.path.basename', (['template.definition'], {}), '(template.definition)\n', (20957, 20978), False, 'import os\n'), ((21138, 21160), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (21154, 21160), False, 'import os\n'), ((5333, 5360), 'os.path.getmtime', 'os.path.getmtime', (['work_path'], {}), '(work_path)\n', (5349, 5360), False, 'import os\n'), ((8713, 8764), 'sgtk.TankError', 'TankError', (['"""Work file mismatch when finding files!"""'], {}), "('Work file mismatch when finding files!')\n", (8722, 8764), False, 'from sgtk import TankError\n'), ((15414, 15656), 'sgtk.TankError', 'TankError', (['"""Unable to resolve template fields! This could mean there is a mismatch between your folder schema and templates or that not all entities could be matched to a folder. Please email <EMAIL> if you need help fixing this."""'], {}), "(\n 'Unable to resolve template fields! This could mean there is a mismatch between your folder schema and templates or that not all entities could be matched to a folder. Please email <EMAIL> if you need help fixing this.'\n )\n", (15423, 15656), False, 'from sgtk import TankError\n'), ((22035, 22057), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (22051, 22057), False, 'import os\n'), ((9515, 9545), 'os.path.getmtime', 'os.path.getmtime', (['publish_path'], {}), '(publish_path)\n', (9531, 9545), False, 'import os\n')]
import os from typing import Dict from enum import Enum from match_pattern import Pattern from zcommon.serialization import DictionarySerializableMixin from zcommon.textops import ( json_dump_with_types, json_load_with_types, yaml_dump_with_types, yaml_load_with_types, ) def is_iterable(obj): """Returns true of the object is iterable. """ try: iter(obj) except Exception: return False else: return True def to_enum(val: str, enum_type): """Convert a value to its enum type. Args: val: The value enum_type (type): The type of enum Returns: enum_type: The enum """ if isinstance(val, enum_type): return val if isinstance(val, Enum): assert val.value in enum_type, ValueError(f"Cannot convert {val} to enum {enum_type}") return enum_type[val.value] elif isinstance(val, str): if val in enum_type.__members__: return enum_type[val] return None else: raise ValueError(f"Cannot convert {type(val)} to {enum_type}") def as_object(_unqiue_object_name="anon_obj", **kwargs) -> object: """Converts a list of kwargs to an object with the same attributes. Args: _unqiue_object_name (str, optional): The name of the object class to create. Defaults to "anon_obj". Returns: object: The generated object. """ return type(_unqiue_object_name, (object,), kwargs) class StringEnum(Enum): """A common string enum structure. """ def __str__(self): return self.value def __repr__(self): return self.__str__() def __hash__(self): return self.value.__hash__() @classmethod def parse(cls, val: str): """Parse the enum value from string or None. Args: val (str): The value to parse. Returns: Enum(of current type): the enum. """ if val is None: return None return to_enum(val, cls) class SerializableDictFormat(StringEnum): yaml: str = "yaml" json: str = "json" FILE_DICT_AUTO_DETECT_PATTERNS: Dict[SerializableDictFormat, Pattern] = dict() FILE_DICT_AUTO_DETECT_PATTERNS[SerializableDictFormat.yaml] = Pattern("*.yml|*.yaml") class JsonSerializableMixin: """Mixin functions to allow object conversion to json. The result of this functions must represent the object. Works with DictionarySerializableMixin to auto convert the object. """ def __to_json__(self): val = self if isinstance(self, DictionarySerializableMixin): val = self.__to_dictionary__() elif not isinstance(self, (dict, list, tuple)): raise NotImplementedError() return json_dump_with_types(val) @classmethod def __from_json__(cls, as_json: str): parsed = json_load_with_types(as_json) if issubclass(cls, DictionarySerializableMixin): return cls.__from_dictionary__(parsed) elif issubclass(cls, dict) and isinstance(parsed, dict): o: dict = cls() o.update(parsed) return o elif issubclass(cls, (dict, tuple)) and isinstance(parsed, list): return cls(*parsed) raise NotImplementedError() class YamlSerializedMixin: def __to_yaml__(self): val = self if isinstance(self, DictionarySerializableMixin): val = self.__to_dictionary__() elif not isinstance(self, (dict, list, tuple)): raise NotImplementedError() return yaml_dump_with_types(val) @classmethod def __from_yaml__(cls, as_json: str): parsed = yaml_load_with_types(as_json) if issubclass(cls, DictionarySerializableMixin): return cls.__from_dictionary__(parsed) elif issubclass(cls, dict) and isinstance(parsed, dict): o: dict = cls() o.update(parsed) return o elif issubclass(cls, (dict, tuple)) and isinstance(parsed, list): return cls(*parsed) raise NotImplementedError() class SerializableDict(dict, DictionarySerializableMixin, JsonSerializableMixin, YamlSerializedMixin): _autodetect_patterns: Dict[SerializableDictFormat, Pattern] = FILE_DICT_AUTO_DETECT_PATTERNS def __init__(self, **kwargs): """Creates a dictionary that can be written to file and allows parameters to be sent as variables. """ super().__init__() self.update(kwargs) def __auto_detect_file_format(self, filepath: str) -> SerializableDictFormat: for file_format in self._autodetect_patterns.keys(): if self._autodetect_patterns[file_format].test(filepath): return file_format return SerializableDictFormat.json def save(self, filepath: str, file_format: SerializableDictFormat = None): """Saves the dictionary to file. Args: filepath (str): The file name format (SerializableDictFormat, optional): The format to write the file in. if None, autodetect by ext or json. Defaults to None. """ file_format = file_format or self.__auto_detect_file_format(filepath) with open(filepath, "w") as raw: if file_format == SerializableDictFormat.yaml: raw.write(yaml_dump_with_types(self)) elif file_format == SerializableDictFormat.json: raw.write(json_dump_with_types(self)) else: raise BufferError(f"Invalid file format {file_format} for file {filepath}") def load(self, filepath: str, file_format: SerializableDictFormat = None): """Loads the dictionary from file. Args: filepath (str): The file name format (SerializableDictFormat, optional): The format to write the file in. if None, autodetect by ext or json. Defaults to None. """ file_format = file_format or self.__auto_detect_file_format(filepath) with open(filepath, "r") as raw: as_dict = None if file_format == SerializableDictFormat.yaml: as_dict = yaml_load_with_types(raw.read()) as_dict = as_dict or {} elif file_format == SerializableDictFormat.json: as_dict = json_load_with_types(raw.read()) as_dict = as_dict or {} assert isinstance(as_dict, dict), BufferError(f"Invalid file format {file_format} for file {filepath}") self.update(as_dict) def copy(self, merge_with: dict = None): """Copies the current object and makes a clone with the same type. Args: merge_with (dict, optional): If not null, merge with the current object when copying. Defaults to None. Returns: a copy of the current object and type. """ copy = self.__class__() copy.update(self) if merge_with is not None: copy.update(merge_with) return copy class SerializeableEnvConfig(SerializableDict): """A serializable collection that loads keys also from os.environ. """ def __getitem_or_env_default__(self, key, default=None): if key in self: return super().get(key) return os.environ.get(key, default) def __setitem__(self, key, value): super().__setitem__(key, value) def __contains__(self, key): return key in self.__dict__ or super().__contains__(key) def _validate_default_and_parse(self, key: str, otype: type, default=None): # use when default is hard to calculate. val = self.__getitem_or_env_default__(key, default) val = self._parse(key, otype, default, val) if key not in self: self[key] = val return val def _parse(self, key: str, otype: type, default=None, val=None): if val is None: val = self.__getitem_or_env_default__(key, default) if val is None: return val elif isinstance(val, otype): return val elif issubclass(otype, int): return int(val) elif issubclass(otype, float): return float(val) elif issubclass(otype, bool): return val if isinstance(val, bool) else str(val).strip().lower() == "true" return val
[ "zcommon.textops.json_dump_with_types", "zcommon.textops.yaml_load_with_types", "zcommon.textops.yaml_dump_with_types", "os.environ.get", "match_pattern.Pattern", "zcommon.textops.json_load_with_types" ]
[((2258, 2281), 'match_pattern.Pattern', 'Pattern', (['"""*.yml|*.yaml"""'], {}), "('*.yml|*.yaml')\n", (2265, 2281), False, 'from match_pattern import Pattern\n'), ((2771, 2796), 'zcommon.textops.json_dump_with_types', 'json_dump_with_types', (['val'], {}), '(val)\n', (2791, 2796), False, 'from zcommon.textops import json_dump_with_types, json_load_with_types, yaml_dump_with_types, yaml_load_with_types\n'), ((2874, 2903), 'zcommon.textops.json_load_with_types', 'json_load_with_types', (['as_json'], {}), '(as_json)\n', (2894, 2903), False, 'from zcommon.textops import json_dump_with_types, json_load_with_types, yaml_dump_with_types, yaml_load_with_types\n'), ((3584, 3609), 'zcommon.textops.yaml_dump_with_types', 'yaml_dump_with_types', (['val'], {}), '(val)\n', (3604, 3609), False, 'from zcommon.textops import json_dump_with_types, json_load_with_types, yaml_dump_with_types, yaml_load_with_types\n'), ((3687, 3716), 'zcommon.textops.yaml_load_with_types', 'yaml_load_with_types', (['as_json'], {}), '(as_json)\n', (3707, 3716), False, 'from zcommon.textops import json_dump_with_types, json_load_with_types, yaml_dump_with_types, yaml_load_with_types\n'), ((7338, 7366), 'os.environ.get', 'os.environ.get', (['key', 'default'], {}), '(key, default)\n', (7352, 7366), False, 'import os\n'), ((5373, 5399), 'zcommon.textops.yaml_dump_with_types', 'yaml_dump_with_types', (['self'], {}), '(self)\n', (5393, 5399), False, 'from zcommon.textops import json_dump_with_types, json_load_with_types, yaml_dump_with_types, yaml_load_with_types\n'), ((5488, 5514), 'zcommon.textops.json_dump_with_types', 'json_dump_with_types', (['self'], {}), '(self)\n', (5508, 5514), False, 'from zcommon.textops import json_dump_with_types, json_load_with_types, yaml_dump_with_types, yaml_load_with_types\n')]
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import enum import os import pathlib from typing import Optional from fbpcs.onedocker_binary_names import OneDockerBinaryNames from fbpcs.private_computation.service.run_binary_base_service import ( RunBinaryBaseService, ) CPP_SHARDER_PATH = pathlib.Path(os.environ.get("CPP_SHARDER_PATH", os.getcwd())) CPP_SHARDER_HASHED_FOR_PID_PATH = pathlib.Path( os.environ.get("CPP_SHARDER_HASHED_FOR_PID_PATH", "cpp_bin/sharder_hashed_for_pid") ) # 10800 s = 3 hrs DEFAULT_CONTAINER_TIMEOUT_IN_SEC = 10800 class ShardType(enum.Enum): ROUND_ROBIN = 1 HASHED_FOR_PID = 2 class ShardingService(RunBinaryBaseService): @staticmethod def build_args( filepath: str, output_base_path: str, file_start_index: int, num_output_files: int, tmp_directory: str = "/tmp/", hmac_key: Optional[str] = None, ) -> str: cmd_args = " ".join( [ f"--input_filename={filepath}", f"--output_base_path={output_base_path}", f"--file_start_index={file_start_index}", f"--num_output_files={num_output_files}", f"--tmp_directory={tmp_directory}", ] ) if hmac_key: cmd_args += f" --hmac_base64_key={hmac_key}" return cmd_args @staticmethod def get_binary_name( shard_type: ShardType, ): # TODO: Probably put exe in an env variable? # Try to align with existing paths if shard_type is ShardType.ROUND_ROBIN: return OneDockerBinaryNames.SHARDER.value elif shard_type is ShardType.HASHED_FOR_PID: return OneDockerBinaryNames.SHARDER_HASHED_FOR_PID.value else: raise RuntimeError(f"Unsupported ShardType passed: {shard_type}")
[ "os.environ.get", "os.getcwd" ]
[((566, 653), 'os.environ.get', 'os.environ.get', (['"""CPP_SHARDER_HASHED_FOR_PID_PATH"""', '"""cpp_bin/sharder_hashed_for_pid"""'], {}), "('CPP_SHARDER_HASHED_FOR_PID_PATH',\n 'cpp_bin/sharder_hashed_for_pid')\n", (580, 653), False, 'import os\n'), ((500, 511), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (509, 511), False, 'import os\n')]
#!C:\Users\579860\Desktop\Steer-Clear-Backend-login\env\Scripts\python.exe # EASY-INSTALL-ENTRY-SCRIPT: 'distribute==0.6.24','console_scripts','easy_install-2.7' __requires__ = 'distribute==0.6.24' import sys from pkg_resources import load_entry_point if __name__ == '__main__': sys.exit( load_entry_point('distribute==0.6.24', 'console_scripts', 'easy_install-2.7')() )
[ "pkg_resources.load_entry_point" ]
[((302, 379), 'pkg_resources.load_entry_point', 'load_entry_point', (['"""distribute==0.6.24"""', '"""console_scripts"""', '"""easy_install-2.7"""'], {}), "('distribute==0.6.24', 'console_scripts', 'easy_install-2.7')\n", (318, 379), False, 'from pkg_resources import load_entry_point\n')]
import os os.environ['CUDA_VISIBLE_DEVICES'] = '3' import onnx import torch import numpy as np from utils import get_path_with_annotation,preprocess_image,postprocess from model import unet_2d import tensorrt as trt from torch2trt import torch2trt class ImageCalibDataset(): def __init__(self, data_list): self.data_list = data_list def __len__(self): return len(self.data_list) def __getitem__(self, idx): image, _ = preprocess_image(self.data_list[idx]) # image = image[None, ...] # add batch dimension # print(image.size()) image = image[0] return [image] def main(): # load pre-trained model ------------------------------------------------------------------------------------------- csv_path = 'test.csv' weight_path = 'unet_bladder.pth' data_list = get_path_with_annotation(csv_path,'path','Bladder') model = unet_2d(n_channels=1, n_classes=2) checkpoint = torch.load(weight_path) model.load_state_dict(checkpoint['state_dict']) # preprocessing stage ---------------------------------------------------------------------------------------------- input, target = preprocess_image(data_list[0]) input = input.cuda() input = input.float() # inference stage -------------------------------------------------------------------------------------------------- model = model.float() model.eval() model.cuda() output = model(input) # post-processing stage -------------------------------------------------------------------------------------------- dice = postprocess(output, target) # convert to trt -------------------------------------------------------------------------------------------------- # p40----------------------------------------- # TRT_FILE_PATH = "./p40/unet_bladder_trt_fp16_p40.pth" # TRT_FILE_PATH = "./p40/unet_bladder_trt_fp16_bs8_p40.pth" # TRT_FILE_PATH = "./p40/unet_bladder_trt_int8_bs8_p40.pth" # TRT_FILE_PATH = "./p40/unet_bladder_trt_cali_int8_bs8_p40.pth" # v100----------------------------------------- # TRT_FILE_PATH = "./v100/unet_bladder_trt_fp16.pth" # TRT_FILE_PATH = "./v100/unet_bladder_trt_fp16_bs4.pth" # TRT_FILE_PATH = "./v100/unet_bladder_trt_fp16_bs8.pth" # TRT_FILE_PATH = "./v100/unet_bladder_trt_int8_bs4.pth" # TRT_FILE_PATH = "./v100/unet_bladder_trt_int8_bs8.pth" # TRT_FILE_PATH = "./v100/unet_bladder_trt_cali_int8_bs4.pth" TRT_FILE_PATH = "./v100/unet_bladder_trt_cali_int8_bs8.pth" data = torch.randn(1, 1, 512, 512).cuda() # FP16 # model_trt = torch2trt(model,[data], max_batch_size=8, fp16_mode=True) # Int 8 # Plan A -------------------------------------------------------------------------------------------------- # It's OK # model_trt = torch2trt(model, [data], max_batch_size=8, int8_mode=True, int8_calib_algorithm=trt.CalibrationAlgoType.MINMAX_CALIBRATION, int8_calib_batch_size=32) # Plan B -------------------------------------------------------------------------------------------------- dataset = ImageCalibDataset(data_list=data_list[:128]) model_trt = torch2trt(model, [data], max_batch_size=8, int8_calib_dataset=dataset, int8_calib_algorithm=trt.CalibrationAlgoType.MINMAX_CALIBRATION, int8_mode=True, int8_calib_batch_size=32) # Note: # use data calibrator can improve the accuracy of the quantized model # Plan B is better!! torch.save(model_trt.state_dict(), TRT_FILE_PATH) output = model_trt(input) # post-processing stage -------------------------------------------------------------------------------------------- dice = postprocess(output, target) '''Failed !!! why? # convert to onnx -------------------------------------------------------------------------------------------------- ONNX_FILE_PATH = "unet_bladder_trt_int8.onnx" torch.onnx._export(model_trt, input, ONNX_FILE_PATH, input_names=["input"], output_names=["output"], export_params=True, opset_version=11) onnx_model = onnx.load(ONNX_FILE_PATH) # check that the model converted fine onnx.checker.check_model(onnx_model) print("Model was successfully converted to ONNX format.") print("It was saved to", ONNX_FILE_PATH) ''' if __name__ == '__main__': main()
[ "torch.load", "torch.randn", "utils.preprocess_image", "utils.postprocess", "model.unet_2d", "torch2trt.torch2trt", "utils.get_path_with_annotation" ]
[((858, 911), 'utils.get_path_with_annotation', 'get_path_with_annotation', (['csv_path', '"""path"""', '"""Bladder"""'], {}), "(csv_path, 'path', 'Bladder')\n", (882, 911), False, 'from utils import get_path_with_annotation, preprocess_image, postprocess\n'), ((924, 958), 'model.unet_2d', 'unet_2d', ([], {'n_channels': '(1)', 'n_classes': '(2)'}), '(n_channels=1, n_classes=2)\n', (931, 958), False, 'from model import unet_2d\n'), ((976, 999), 'torch.load', 'torch.load', (['weight_path'], {}), '(weight_path)\n', (986, 999), False, 'import torch\n'), ((1194, 1224), 'utils.preprocess_image', 'preprocess_image', (['data_list[0]'], {}), '(data_list[0])\n', (1210, 1224), False, 'from utils import get_path_with_annotation, preprocess_image, postprocess\n'), ((1616, 1643), 'utils.postprocess', 'postprocess', (['output', 'target'], {}), '(output, target)\n', (1627, 1643), False, 'from utils import get_path_with_annotation, preprocess_image, postprocess\n'), ((3208, 3393), 'torch2trt.torch2trt', 'torch2trt', (['model', '[data]'], {'max_batch_size': '(8)', 'int8_calib_dataset': 'dataset', 'int8_calib_algorithm': 'trt.CalibrationAlgoType.MINMAX_CALIBRATION', 'int8_mode': '(True)', 'int8_calib_batch_size': '(32)'}), '(model, [data], max_batch_size=8, int8_calib_dataset=dataset,\n int8_calib_algorithm=trt.CalibrationAlgoType.MINMAX_CALIBRATION,\n int8_mode=True, int8_calib_batch_size=32)\n', (3217, 3393), False, 'from torch2trt import torch2trt\n'), ((3725, 3752), 'utils.postprocess', 'postprocess', (['output', 'target'], {}), '(output, target)\n', (3736, 3752), False, 'from utils import get_path_with_annotation, preprocess_image, postprocess\n'), ((462, 499), 'utils.preprocess_image', 'preprocess_image', (['self.data_list[idx]'], {}), '(self.data_list[idx])\n', (478, 499), False, 'from utils import get_path_with_annotation, preprocess_image, postprocess\n'), ((2585, 2612), 'torch.randn', 'torch.randn', (['(1)', '(1)', '(512)', '(512)'], {}), '(1, 1, 512, 512)\n', (2596, 2612), False, 'import torch\n')]
import numpy as np from ..base import NCMBase from sklearn.neighbors import NearestNeighbors class KNeighborsMean(NCMBase): def __init__(self, **sklearn): if "n_neighbors" in sklearn: sklearn["n_neighbors"] += 1 else: sklearn["n_neighbors"] = 6 self.clf = NearestNeighbors(**sklearn) self.y = [] def fit(self, X, y): self.clf.fit(X) self.y = y def scores(self, X, y, X_eq_fit): if X_eq_fit: pass else: ind = self.clf.kneighbors(X, return_distance=False) y_pred = self.y[ind] res = [] for label, row in zip(y, y_pred): count_label = [count for label_, count in zip(*np.unique(row, return_counts=True)) if label_ == label] if len(count_label) == 1: res.append( (len(row) - count_label[0]) / len(row) ) else: res.append(1.0) return np.array(res)
[ "numpy.array", "sklearn.neighbors.NearestNeighbors", "numpy.unique" ]
[((312, 339), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {}), '(**sklearn)\n', (328, 339), False, 'from sklearn.neighbors import NearestNeighbors\n'), ((1084, 1097), 'numpy.array', 'np.array', (['res'], {}), '(res)\n', (1092, 1097), True, 'import numpy as np\n'), ((768, 802), 'numpy.unique', 'np.unique', (['row'], {'return_counts': '(True)'}), '(row, return_counts=True)\n', (777, 802), True, 'import numpy as np\n')]
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- # pylint: disable=line-too-long import os import unittest from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) class DnsScenarioTest(ScenarioTest): @ResourceGroupPreparer(name_prefix='cli_test_dns') def test_dns(self, resource_group): # pylint: disable=unused-argument self.kwargs['zone'] = 'myzone.com' self.cmd('network dns zone list') # just verify is works (no Exception raised) self.cmd('network dns zone create -n {zone} -g {rg}') self.cmd('network dns zone list -g {rg}', checks=self.check('length(@)', 1)) base_record_sets = 2 self.cmd('network dns zone show -n {zone} -g {rg}', checks=self.check('numberOfRecordSets', base_record_sets)) args = { 'a': '--ipv4-address 10.0.0.10', 'aaaa': '--ipv6-address 2001:db8:0:1:1:1:1:1', 'caa': '--flags 0 --tag foo --value "my value"', 'cname': '--cname mycname', 'mx': '--exchange 12 --preference 13', 'ns': '--nsdname foobar.com', 'ptr': '--ptrdname foobar.com', 'soa': '--email foo.com --expire-time 30 --minimum-ttl 20 --refresh-time 60 --retry-time 90 --serial-number 123', 'srv': '--port 1234 --priority 1 --target target.com --weight 50', 'txt': '--value some_text' } record_types = ['a', 'aaaa', 'caa', 'cname', 'mx', 'ns', 'ptr', 'srv', 'txt'] for t in record_types: # test creating the record set and then adding records self.cmd('network dns record-set {0} create -n myrs{0} -g {{rg}} --zone-name {{zone}}'.format(t)) add_command = 'set-record' if t == 'cname' else 'add-record' self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t], add_command)) # test creating the record set at the same time you add records self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0}alt {1}'.format(t, args[t], add_command)) self.cmd('network dns record-set a add-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11') self.cmd('network dns record-set soa update -g {{rg}} --zone-name {{zone}} {0}'.format(args['soa'])) long_value = '0123456789' * 50 self.cmd('network dns record-set txt add-record -g {{rg}} -z {{zone}} -n longtxt -v {0}'.format(long_value)) typed_record_sets = 2 * len(record_types) + 1 self.cmd('network dns zone show -n {zone} -g {rg}', checks=self.check('numberOfRecordSets', base_record_sets + typed_record_sets)) self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}', checks=self.check('length(arecords)', 2)) # test list vs. list type self.cmd('network dns record-set list -g {rg} -z {zone}', checks=self.check('length(@)', base_record_sets + typed_record_sets)) self.cmd('network dns record-set txt list -g {rg} -z {zone}', checks=self.check('length(@)', 3)) for t in record_types: self.cmd('network dns record-set {0} remove-record -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t])) self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}', checks=self.check('length(arecords)', 1)) self.cmd('network dns record-set a remove-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11') self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}', checks=self.is_empty()) self.cmd('network dns record-set a delete -n myrsa -g {rg} --zone-name {zone} -y') self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}') self.cmd('network dns zone delete -g {rg} -n {zone} -y', checks=self.is_empty()) @ResourceGroupPreparer(name_prefix='cli_test_dns') def test_private_dns(self, resource_group): # pylint: disable=unused-argument self.kwargs['zone'] = 'myprivatezone.com' self.kwargs['regvnet'] = 'regvnet' self.kwargs['resvnet'] = 'resvnet' self.cmd('network vnet create -n {regvnet} -g {rg}') self.cmd('network vnet create -n {resvnet} -g {rg}') self.cmd('network dns zone list') # just verify is works (no Exception raised) self.cmd('network dns zone create -n {zone} -g {rg} --zone-type Private --registration-vnets {regvnet} --resolution-vnets {resvnet}') self.cmd('network dns zone list -g {rg}', checks=self.check('length(@)', 1)) self.cmd('network dns zone update -n {zone} -g {rg} --zone-type Private --registration-vnets "" --resolution-vnets ""') self.cmd('network dns zone update -n {zone} -g {rg} --zone-type Private --registration-vnets {regvnet} --resolution-vnets {resvnet}') base_record_sets = 1 self.cmd('network dns zone show -n {zone} -g {rg}', checks=self.check('numberOfRecordSets', base_record_sets)) args = { 'a': '--ipv4-address 10.0.0.10', 'aaaa': '--ipv6-address 2001:db8:0:1:1:1:1:1', 'caa': '--flags 0 --tag foo --value "my value"', 'cname': '--cname mycname', 'mx': '--exchange 12 --preference 13', 'ptr': '--ptrdname foobar.com', 'soa': '--email foo.com --expire-time 30 --minimum-ttl 20 --refresh-time 60 --retry-time 90 --serial-number 123', 'srv': '--port 1234 --priority 1 --target target.com --weight 50', 'txt': '--value some_text' } # Private Zones do NOT support delegation through NS records record_types = ['a', 'aaaa', 'caa', 'cname', 'mx', 'ptr', 'srv', 'txt'] for t in record_types: # test creating the record set and then adding records self.cmd('network dns record-set {0} create -n myrs{0} -g {{rg}} --zone-name {{zone}}'.format(t)) add_command = 'set-record' if t == 'cname' else 'add-record' self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t], add_command)) # test creating the record set at the same time you add records self.cmd('network dns record-set {0} {2} -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0}alt {1}'.format(t, args[t], add_command)) self.cmd('network dns record-set a add-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11') self.cmd('network dns record-set soa update -g {{rg}} --zone-name {{zone}} {0}'.format(args['soa'])) long_value = '0123456789' * 50 self.cmd('network dns record-set txt add-record -g {{rg}} -z {{zone}} -n longtxt -v {0}'.format(long_value)) typed_record_sets = 2 * len(record_types) + 1 self.cmd('network dns zone show -n {zone} -g {rg}', checks=self.check('numberOfRecordSets', base_record_sets + typed_record_sets)) self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}', checks=self.check('length(arecords)', 2)) # test list vs. list type self.cmd('network dns record-set list -g {rg} -z {zone}', checks=self.check('length(@)', base_record_sets + typed_record_sets)) self.cmd('network dns record-set txt list -g {rg} -z {zone}', checks=self.check('length(@)', 3)) for t in record_types: self.cmd('network dns record-set {0} remove-record -g {{rg}} --zone-name {{zone}} --record-set-name myrs{0} {1}'.format(t, args[t])) self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}', checks=self.check('length(arecords)', 1)) self.cmd('network dns record-set a remove-record -g {rg} --zone-name {zone} --record-set-name myrsa --ipv4-address 10.0.0.11') self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}', checks=self.is_empty()) self.cmd('network dns record-set a delete -n myrsa -g {rg} --zone-name {zone} -y') self.cmd('network dns record-set a show -n myrsa -g {rg} --zone-name {zone}') self.cmd('network dns zone delete -g {rg} -n {zone} -y', checks=self.is_empty()) if __name__ == '__main__': unittest.main()
[ "unittest.main", "os.path.abspath", "azure.cli.testsdk.ResourceGroupPreparer" ]
[((591, 640), 'azure.cli.testsdk.ResourceGroupPreparer', 'ResourceGroupPreparer', ([], {'name_prefix': '"""cli_test_dns"""'}), "(name_prefix='cli_test_dns')\n", (612, 640), False, 'from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer\n'), ((4481, 4530), 'azure.cli.testsdk.ResourceGroupPreparer', 'ResourceGroupPreparer', ([], {'name_prefix': '"""cli_test_dns"""'}), "(name_prefix='cli_test_dns')\n", (4502, 4530), False, 'from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer\n'), ((8994, 9009), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9007, 9009), False, 'import unittest\n'), ((512, 537), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (527, 537), False, 'import os\n')]
########################### # Reset working directory # ########################### import os os.chdir("/home/btrabucco/research/multiattend") ########################### # MultiAttend Package.... # ########################### from multiattend.dataset.io_synthesis.io_synthesis_args import IOSynthesisArgs from multiattend.dataset.io_synthesis.io_synthesis_utils import IOSynthesisUtils class IOSynthesis(object): def __init__( self): self.io_synthesis_args = IOSynthesisArgs() self.io_synthesis_utils = IOSynthesisUtils() def get_train_dataset( self): args = self.io_synthesis_args() return self.io_synthesis_utils.get_dataset( args.range, args.length, args.train_instances) def get_val_dataset( self): args = self.io_synthesis_args() return self.io_synthesis_utils.get_dataset( args.range, args.length, args.val_instances) def __call__( self): train_dataset = self.get_train_dataset() val_dataset = self.get_val_dataset() return train_dataset, val_dataset
[ "multiattend.dataset.io_synthesis.io_synthesis_utils.IOSynthesisUtils", "multiattend.dataset.io_synthesis.io_synthesis_args.IOSynthesisArgs", "os.chdir" ]
[((94, 142), 'os.chdir', 'os.chdir', (['"""/home/btrabucco/research/multiattend"""'], {}), "('/home/btrabucco/research/multiattend')\n", (102, 142), False, 'import os\n'), ((486, 503), 'multiattend.dataset.io_synthesis.io_synthesis_args.IOSynthesisArgs', 'IOSynthesisArgs', ([], {}), '()\n', (501, 503), False, 'from multiattend.dataset.io_synthesis.io_synthesis_args import IOSynthesisArgs\n'), ((538, 556), 'multiattend.dataset.io_synthesis.io_synthesis_utils.IOSynthesisUtils', 'IOSynthesisUtils', ([], {}), '()\n', (554, 556), False, 'from multiattend.dataset.io_synthesis.io_synthesis_utils import IOSynthesisUtils\n')]
from wtforms.validators import InputRequired from wtforms.widgets import TextArea from eNMS import app from eNMS.forms import BaseForm from eNMS.forms.fields import ( BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField, ) def configure_pool_form(cls): cls.device_properties = app.properties["filtering"]["device"] cls.link_properties = app.properties["filtering"]["link"] for cls_name, properties in ( ("device", app.properties["filtering"]["device"]), ("link", app.properties["filtering"]["link"]), ): for property in properties: match_field = f"{cls_name}_{property}_match" setattr(cls, f"{cls_name}_{property}", StringField(property)) setattr( cls, match_field, SelectField( choices=( ("inclusion", "Inclusion"), ("equality", "Equality"), ("regex", "Regular Expression"), ) ), ) return cls class DeviceConnectionForm(BaseForm): template = "device_connection" form_type = HiddenField(default="device_connection") address_choices = [("ip_address", "IP address"), ("name", "Name")] + [ (property, values["pretty_name"]) for property, values in app.properties["custom"]["device"].items() if values.get("is_address", False) ] address = SelectField(choices=address_choices) username = StringField("Username") password = PasswordField("Password") class ObjectForm(BaseForm): form_type = HiddenField(default="object") name = StringField("Name", [InputRequired()]) public = BooleanField("Public", default=False) description = StringField("Description") subtype = StringField("Subtype") location = StringField("Location") vendor = StringField("Vendor") model = StringField("Model") class DeviceForm(ObjectForm): template = "object" form_type = HiddenField(default="device") id = HiddenField() icon = SelectField( "Icon", choices=( ("antenna", "Antenna"), ("firewall", "Firewall"), ("host", "Host"), ("optical_switch", "Optical switch"), ("regenerator", "Regenerator"), ("router", "Router"), ("server", "Server"), ("switch", "Switch"), ), ) ip_address = StringField("IP address") port = IntegerField("Port", default=22) operating_system = StringField("Operating System") os_version = StringField("OS Version") longitude = StringField("Longitude", default=0.0) latitude = StringField("Latitude", default=0.0) username = StringField("Username") password = PasswordField("Password") enable_password = PasswordField("'<PASSWORD>") napalm_driver = SelectField( "NAPALM Driver", choices=app.NAPALM_DRIVERS, default="ios" ) netmiko_driver = SelectField( "Netmiko Driver", choices=app.NETMIKO_DRIVERS, default="cisco_ios" ) class DeviceDataForm(BaseForm): template = "device_data" form_type = HiddenField(default="device_data") data_type = SelectField("Display", choices=app.configuration_properties) class LinkForm(ObjectForm): template = "object" form_type = HiddenField(default="link") id = HiddenField() source = InstanceField("Source", [InputRequired()], model="device") destination = InstanceField("Destination", [InputRequired()], model="device") color = StringField("Color") @configure_pool_form class PoolForm(BaseForm): template = "pool" form_type = HiddenField(default="pool") id = HiddenField() name = StringField("Name", [InputRequired()]) public = BooleanField("Public", default=False) description = StringField("Description") longitude = StringField("Longitude", default=0.0) latitude = StringField("Latitude", default=0.0) operator = SelectField( "Type of match", choices=( ("all", "Match if all properties match"), ("any", "Match if any property matches"), ), ) manually_defined = BooleanField("Manually defined (won't be automatically updated)") class PoolObjectsForm(BaseForm): template = "pool_objects" form_type = HiddenField(default="pool_objects") devices = MultipleInstanceField("Devices") links = MultipleInstanceField("Links") string_devices = StringField(widget=TextArea(), render_kw={"rows": 5}) string_links = StringField(widget=TextArea(), render_kw={"rows": 5}) class ExcelImportForm(BaseForm): template = "topology_import" form_type = HiddenField(default="excel_import") replace = BooleanField("Replace Existing Topology") class ExportForm(BaseForm): action = "eNMS.inventory.exportTopology" form_type = HiddenField(default="excel_export") export_filename = StringField("Filename")
[ "wtforms.widgets.TextArea", "wtforms.validators.InputRequired", "eNMS.forms.fields.StringField", "eNMS.forms.fields.MultipleInstanceField", "eNMS.forms.fields.PasswordField", "eNMS.forms.fields.BooleanField", "eNMS.forms.fields.HiddenField", "eNMS.forms.fields.SelectField", "eNMS.forms.fields.IntegerField" ]
[((1254, 1294), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""device_connection"""'}), "(default='device_connection')\n", (1265, 1294), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1550, 1586), 'eNMS.forms.fields.SelectField', 'SelectField', ([], {'choices': 'address_choices'}), '(choices=address_choices)\n', (1561, 1586), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1602, 1625), 'eNMS.forms.fields.StringField', 'StringField', (['"""Username"""'], {}), "('Username')\n", (1613, 1625), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1641, 1666), 'eNMS.forms.fields.PasswordField', 'PasswordField', (['"""Password"""'], {}), "('Password')\n", (1654, 1666), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1713, 1742), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""object"""'}), "(default='object')\n", (1724, 1742), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1806, 1843), 'eNMS.forms.fields.BooleanField', 'BooleanField', (['"""Public"""'], {'default': '(False)'}), "('Public', default=False)\n", (1818, 1843), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1862, 1888), 'eNMS.forms.fields.StringField', 'StringField', (['"""Description"""'], {}), "('Description')\n", (1873, 1888), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1903, 1925), 'eNMS.forms.fields.StringField', 'StringField', (['"""Subtype"""'], {}), "('Subtype')\n", (1914, 1925), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1941, 1964), 'eNMS.forms.fields.StringField', 'StringField', (['"""Location"""'], {}), "('Location')\n", (1952, 1964), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1978, 1999), 'eNMS.forms.fields.StringField', 'StringField', (['"""Vendor"""'], {}), "('Vendor')\n", (1989, 1999), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2012, 2032), 'eNMS.forms.fields.StringField', 'StringField', (['"""Model"""'], {}), "('Model')\n", (2023, 2032), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2105, 2134), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""device"""'}), "(default='device')\n", (2116, 2134), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2144, 2157), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {}), '()\n', (2155, 2157), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2169, 2415), 'eNMS.forms.fields.SelectField', 'SelectField', (['"""Icon"""'], {'choices': "(('antenna', 'Antenna'), ('firewall', 'Firewall'), ('host', 'Host'), (\n 'optical_switch', 'Optical switch'), ('regenerator', 'Regenerator'), (\n 'router', 'Router'), ('server', 'Server'), ('switch', 'Switch'))"}), "('Icon', choices=(('antenna', 'Antenna'), ('firewall',\n 'Firewall'), ('host', 'Host'), ('optical_switch', 'Optical switch'), (\n 'regenerator', 'Regenerator'), ('router', 'Router'), ('server',\n 'Server'), ('switch', 'Switch')))\n", (2180, 2415), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2550, 2575), 'eNMS.forms.fields.StringField', 'StringField', (['"""IP address"""'], {}), "('IP address')\n", (2561, 2575), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2587, 2619), 'eNMS.forms.fields.IntegerField', 'IntegerField', (['"""Port"""'], {'default': '(22)'}), "('Port', default=22)\n", (2599, 2619), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2643, 2674), 'eNMS.forms.fields.StringField', 'StringField', (['"""Operating System"""'], {}), "('Operating System')\n", (2654, 2674), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2692, 2717), 'eNMS.forms.fields.StringField', 'StringField', (['"""OS Version"""'], {}), "('OS Version')\n", (2703, 2717), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2734, 2771), 'eNMS.forms.fields.StringField', 'StringField', (['"""Longitude"""'], {'default': '(0.0)'}), "('Longitude', default=0.0)\n", (2745, 2771), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2787, 2823), 'eNMS.forms.fields.StringField', 'StringField', (['"""Latitude"""'], {'default': '(0.0)'}), "('Latitude', default=0.0)\n", (2798, 2823), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2839, 2862), 'eNMS.forms.fields.StringField', 'StringField', (['"""Username"""'], {}), "('Username')\n", (2850, 2862), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2878, 2903), 'eNMS.forms.fields.PasswordField', 'PasswordField', (['"""Password"""'], {}), "('Password')\n", (2891, 2903), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2926, 2954), 'eNMS.forms.fields.PasswordField', 'PasswordField', (['"""\'<PASSWORD>"""'], {}), '("\'<PASSWORD>")\n', (2939, 2954), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((2975, 3046), 'eNMS.forms.fields.SelectField', 'SelectField', (['"""NAPALM Driver"""'], {'choices': 'app.NAPALM_DRIVERS', 'default': '"""ios"""'}), "('NAPALM Driver', choices=app.NAPALM_DRIVERS, default='ios')\n", (2986, 3046), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3082, 3161), 'eNMS.forms.fields.SelectField', 'SelectField', (['"""Netmiko Driver"""'], {'choices': 'app.NETMIKO_DRIVERS', 'default': '"""cisco_ios"""'}), "('Netmiko Driver', choices=app.NETMIKO_DRIVERS, default='cisco_ios')\n", (3093, 3161), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3255, 3289), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""device_data"""'}), "(default='device_data')\n", (3266, 3289), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3306, 3366), 'eNMS.forms.fields.SelectField', 'SelectField', (['"""Display"""'], {'choices': 'app.configuration_properties'}), "('Display', choices=app.configuration_properties)\n", (3317, 3366), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3437, 3464), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""link"""'}), "(default='link')\n", (3448, 3464), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3474, 3487), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {}), '()\n', (3485, 3487), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3654, 3674), 'eNMS.forms.fields.StringField', 'StringField', (['"""Color"""'], {}), "('Color')\n", (3665, 3674), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3762, 3789), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""pool"""'}), "(default='pool')\n", (3773, 3789), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3799, 3812), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {}), '()\n', (3810, 3812), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3876, 3913), 'eNMS.forms.fields.BooleanField', 'BooleanField', (['"""Public"""'], {'default': '(False)'}), "('Public', default=False)\n", (3888, 3913), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3932, 3958), 'eNMS.forms.fields.StringField', 'StringField', (['"""Description"""'], {}), "('Description')\n", (3943, 3958), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((3975, 4012), 'eNMS.forms.fields.StringField', 'StringField', (['"""Longitude"""'], {'default': '(0.0)'}), "('Longitude', default=0.0)\n", (3986, 4012), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4028, 4064), 'eNMS.forms.fields.StringField', 'StringField', (['"""Latitude"""'], {'default': '(0.0)'}), "('Latitude', default=0.0)\n", (4039, 4064), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4080, 4211), 'eNMS.forms.fields.SelectField', 'SelectField', (['"""Type of match"""'], {'choices': "(('all', 'Match if all properties match'), ('any',\n 'Match if any property matches'))"}), "('Type of match', choices=(('all',\n 'Match if all properties match'), ('any', 'Match if any property matches'))\n )\n", (4091, 4211), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4284, 4349), 'eNMS.forms.fields.BooleanField', 'BooleanField', (['"""Manually defined (won\'t be automatically updated)"""'], {}), '("Manually defined (won\'t be automatically updated)")\n', (4296, 4349), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4431, 4466), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""pool_objects"""'}), "(default='pool_objects')\n", (4442, 4466), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4481, 4513), 'eNMS.forms.fields.MultipleInstanceField', 'MultipleInstanceField', (['"""Devices"""'], {}), "('Devices')\n", (4502, 4513), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4526, 4556), 'eNMS.forms.fields.MultipleInstanceField', 'MultipleInstanceField', (['"""Links"""'], {}), "('Links')\n", (4547, 4556), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4789, 4824), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""excel_import"""'}), "(default='excel_import')\n", (4800, 4824), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4839, 4880), 'eNMS.forms.fields.BooleanField', 'BooleanField', (['"""Replace Existing Topology"""'], {}), "('Replace Existing Topology')\n", (4851, 4880), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((4972, 5007), 'eNMS.forms.fields.HiddenField', 'HiddenField', ([], {'default': '"""excel_export"""'}), "(default='excel_export')\n", (4983, 5007), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((5030, 5053), 'eNMS.forms.fields.StringField', 'StringField', (['"""Filename"""'], {}), "('Filename')\n", (5041, 5053), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((1775, 1790), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (1788, 1790), False, 'from wtforms.validators import InputRequired\n'), ((3526, 3541), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (3539, 3541), False, 'from wtforms.validators import InputRequired\n'), ((3608, 3623), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (3621, 3623), False, 'from wtforms.validators import InputRequired\n'), ((3845, 3860), 'wtforms.validators.InputRequired', 'InputRequired', ([], {}), '()\n', (3858, 3860), False, 'from wtforms.validators import InputRequired\n'), ((4597, 4607), 'wtforms.widgets.TextArea', 'TextArea', ([], {}), '()\n', (4605, 4607), False, 'from wtforms.widgets import TextArea\n'), ((4670, 4680), 'wtforms.widgets.TextArea', 'TextArea', ([], {}), '()\n', (4678, 4680), False, 'from wtforms.widgets import TextArea\n'), ((781, 802), 'eNMS.forms.fields.StringField', 'StringField', (['property'], {}), '(property)\n', (792, 802), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n'), ((891, 1003), 'eNMS.forms.fields.SelectField', 'SelectField', ([], {'choices': "(('inclusion', 'Inclusion'), ('equality', 'Equality'), ('regex',\n 'Regular Expression'))"}), "(choices=(('inclusion', 'Inclusion'), ('equality', 'Equality'),\n ('regex', 'Regular Expression')))\n", (902, 1003), False, 'from eNMS.forms.fields import BooleanField, HiddenField, InstanceField, IntegerField, MultipleInstanceField, PasswordField, SelectField, StringField\n')]
#! /usr/bin/env python """ A script to generate a flat subproject directory from a tree of dependency directories """ import os import subprocess import textwrap import json import shutil import sys import warnings def project_name(): return os.path.split( os.getcwd() )[1] def dependency_directory(): """ A function isolating the path specification to the project's dependency directory """ return os.path.join( os.getcwd(), "dependencies" ) def clone_submodule( relative_path ): """ A function to clone a git submodule """ print( textwrap.dedent( """ ---------------------------------------- Fetching {relative_path}... ---------------------------------------- """.format( relative_path = relative_path ) ) ) invocation = [ "git", "submodule", "update", "-q","--init", "--", relative_path ] # if os.name == "nt": # invocation.insert( 0, "powershell" ) clone = subprocess.Popen( invocation ) clone.communicate() def update_repository( git ): """ A function to update a submodule to lastest commit of the master branch """ if project_name() not in git: print("Updating to master branch...\n") invocation = ["git", "pull", "-q", "origin", "master"] else: print("Checking out revision {}...\n".format( git[ project_name() ] ) ) invocation = ["git", "pull", "-q", "origin", git[ project_name() ] ] # if os.name == "nt": # invocation.insert( 0, "powershell" ) update = subprocess.Popen( invocation ) update.communicate() def traverse_dependencies( destination, traversed, git ): """ Clone and update dependencies uniquely and collect links to dependency projects in a destination folder """ if not os.path.isdir( dependency_directory() ): return os.chdir( dependency_directory() ) for dependency in os.listdir( os.getcwd() ) : if os.path.isdir( dependency ) and not dependency in traversed : traversed.add( dependency ) clone_submodule( dependency ) os.chdir( dependency ) update_repository( git ) if not os.path.isdir( os.path.join( destination, dependency ) ): try: os.symlink( os.getcwd(), os.path.join( destination, dependency ) ) except OSError: warnings.warn( "Could not create symbolic link " "from {} to subprojects directory."\ .format( os.getcwd() ) ) warnings.warn( "Copying directory contents instead" ) shutil.copytree( os.getcwd(), os.path.join( destination, dependency ), ignore = shutil.ignore_patterns("dependencies") ) traverse_dependencies( destination, traversed, git ) os.chdir( ".." ) os.chdir( os.path.join( ".." ) ) def collect_subprojects( git ): if git: update_repository( git ) destination = os.path.join( os.getcwd(), "subprojects" ) if not os.path.isdir( destination ): os.makedirs( destination ) traverse_dependencies( destination, set(), git ) git = {} if len(sys.argv) > 1: with open ( sys.argv[1], "r" ) as json_file: signature = json.loads( json_file.read() ) git = signature['git'] collect_subprojects( git )
[ "shutil.ignore_patterns", "subprocess.Popen", "os.makedirs", "os.getcwd", "os.path.isdir", "warnings.warn", "os.path.join", "os.chdir" ]
[((950, 978), 'subprocess.Popen', 'subprocess.Popen', (['invocation'], {}), '(invocation)\n', (966, 978), False, 'import subprocess\n'), ((1524, 1552), 'subprocess.Popen', 'subprocess.Popen', (['invocation'], {}), '(invocation)\n', (1540, 1552), False, 'import subprocess\n'), ((435, 446), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (444, 446), False, 'import os\n'), ((1915, 1926), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1924, 1926), False, 'import os\n'), ((3020, 3038), 'os.path.join', 'os.path.join', (['""".."""'], {}), "('..')\n", (3032, 3038), False, 'import os\n'), ((3158, 3169), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3167, 3169), False, 'import os\n'), ((3198, 3224), 'os.path.isdir', 'os.path.isdir', (['destination'], {}), '(destination)\n', (3211, 3224), False, 'import os\n'), ((3236, 3260), 'os.makedirs', 'os.makedirs', (['destination'], {}), '(destination)\n', (3247, 3260), False, 'import os\n'), ((262, 273), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (271, 273), False, 'import os\n'), ((1942, 1967), 'os.path.isdir', 'os.path.isdir', (['dependency'], {}), '(dependency)\n', (1955, 1967), False, 'import os\n'), ((2098, 2118), 'os.chdir', 'os.chdir', (['dependency'], {}), '(dependency)\n', (2106, 2118), False, 'import os\n'), ((2976, 2990), 'os.chdir', 'os.chdir', (['""".."""'], {}), "('..')\n", (2984, 2990), False, 'import os\n'), ((2192, 2229), 'os.path.join', 'os.path.join', (['destination', 'dependency'], {}), '(destination, dependency)\n', (2204, 2229), False, 'import os\n'), ((2288, 2299), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2297, 2299), False, 'import os\n'), ((2333, 2370), 'os.path.join', 'os.path.join', (['destination', 'dependency'], {}), '(destination, dependency)\n', (2345, 2370), False, 'import os\n'), ((2629, 2680), 'warnings.warn', 'warnings.warn', (['"""Copying directory contents instead"""'], {}), "('Copying directory contents instead')\n", (2642, 2680), False, 'import warnings\n'), ((2720, 2731), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2729, 2731), False, 'import os\n'), ((2770, 2807), 'os.path.join', 'os.path.join', (['destination', 'dependency'], {}), '(destination, dependency)\n', (2782, 2807), False, 'import os\n'), ((2593, 2604), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2602, 2604), False, 'import os\n'), ((2857, 2895), 'shutil.ignore_patterns', 'shutil.ignore_patterns', (['"""dependencies"""'], {}), "('dependencies')\n", (2879, 2895), False, 'import shutil\n')]
""" Coursework for Generative Systems for Design, Fall 2019, Carnegie Mellon University Author: <NAME> <EMAIL> L-system with node rewriting This code rewrites the l-system string. The axiom, productions (rules), number of iterations and random seed are defined by user input. Length and angle parameters can be included in the rules if scalar modification of the modular dimensions is desired. Branch length can be limited by specifying the maximum quantity of a given node in the branch. """ __author__ = "mpste" __version__ = "2019.10.02" import random random.seed(seed) def rollDie(): return random.randint(0,9) def getBranch(s, i): lefts = 0 rights = 0 j = i #find open right bracket while rights <= lefts: j += 1 if s[j] == "[": lefts += 1 if s[j] == "]": rights += 1 end = j+1 lefts = 0 rights = 1 k = j #find matching left bracket while lefts < rights: k -= 1 if s[k] == "[": lefts += 1 if s[k] == "]": rights += 1 start = k branch = s[start:end] return branch def numNodes(s, node): count = 0 for c in s: if c == node: count +=1 return count def replaceString(s,n): for i in range(n): subS = "" for j in range(len(s)): if s[j] == "A": branch = getBranch(s,j) nodes = numNodes(branch, "A") + numNodes(branch, "Z") if nodes <= wing_length: subS += rule_2[rollDie()] else: subS += rule_2[0] elif s[j] == "B": branch = getBranch(s,j) nodes = numNodes(branch, "B") + numNodes(branch, "D") if nodes <= trunk_length: subS += rule_1[rollDie()] else: subS += rule_4[0] elif s[j] == "D": subS += rule_3[rollDie()] else: subS += s[j] s = subS return s outString = replaceString(axiom, iterations)
[ "random.seed", "random.randint" ]
[((583, 600), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (594, 600), False, 'import random\n'), ((631, 651), 'random.randint', 'random.randint', (['(0)', '(9)'], {}), '(0, 9)\n', (645, 651), False, 'import random\n')]
import argparse from ...context import mango from ...fakes import fake_context, fake_model_state, fake_loaded_market, fake_order from decimal import Decimal from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import ( PreventPostOnlyCrossingBookElement, ) # The top bid is the highest price someone is willing to pay to BUY top_bid: mango.Order = fake_order( price=Decimal(90), side=mango.Side.BUY, order_type=mango.OrderType.POST_ONLY ) # The top ask is the lowest price someone is willing to pay to SELL top_ask: mango.Order = fake_order( price=Decimal(110), side=mango.Side.SELL, order_type=mango.OrderType.POST_ONLY ) orderbook: mango.OrderBook = mango.OrderBook( "TEST", mango.NullLotSizeConverter(), [top_bid], [top_ask] ) model_state = fake_model_state(market=fake_loaded_market(), orderbook=orderbook) def test_from_args() -> None: args: argparse.Namespace = argparse.Namespace() actual: PreventPostOnlyCrossingBookElement = ( PreventPostOnlyCrossingBookElement.from_command_line_parameters(args) ) assert actual is not None def test_not_crossing_results_in_no_change() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(100), order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() result = actual.process(context, model_state, [order]) assert result == [order] def test_bid_too_high_results_in_new_bid() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(120), side=mango.Side.BUY, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() result = actual.process(context, model_state, [order]) assert result[0].price == 109 def test_bid_too_low_results_in_no_change() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(80), side=mango.Side.BUY, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() result = actual.process(context, model_state, [order]) assert result == [order] def test_ask_too_low_results_in_new_ask() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(80), side=mango.Side.SELL, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() result = actual.process(context, model_state, [order]) assert result[0].price == 91 def test_ask_too_high_results_in_no_change() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(120), side=mango.Side.SELL, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() result = actual.process(context, model_state, [order]) assert result == [order] def test_bid_too_high_no_bid_results_in_new_bid() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(120), side=mango.Side.BUY, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() orderbook: mango.OrderBook = mango.OrderBook( "TEST", mango.NullLotSizeConverter(), [], [top_ask] ) model_state = fake_model_state(market=fake_loaded_market(), orderbook=orderbook) result = actual.process(context, model_state, [order]) assert result[0].price == 109 def test_ask_too_low_no_ask_results_in_new_ask() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(80), side=mango.Side.SELL, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() orderbook: mango.OrderBook = mango.OrderBook( "TEST", mango.NullLotSizeConverter(), [top_bid], [] ) model_state = fake_model_state(market=fake_loaded_market(), orderbook=orderbook) result = actual.process(context, model_state, [order]) assert result[0].price == 91 def test_ask_no_orderbook_results_in_no_change() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(120), side=mango.Side.SELL, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() orderbook: mango.OrderBook = mango.OrderBook( "TEST", mango.NullLotSizeConverter(), [], [] ) model_state = fake_model_state(market=fake_loaded_market(), orderbook=orderbook) result = actual.process(context, model_state, [order]) assert result == [order] def test_bid_no_orderbook_results_in_no_change() -> None: context = fake_context() order: mango.Order = fake_order( price=Decimal(80), side=mango.Side.BUY, order_type=mango.OrderType.POST_ONLY ) actual: PreventPostOnlyCrossingBookElement = PreventPostOnlyCrossingBookElement() orderbook: mango.OrderBook = mango.OrderBook( "TEST", mango.NullLotSizeConverter(), [], [] ) model_state = fake_model_state(market=fake_loaded_market(), orderbook=orderbook) result = actual.process(context, model_state, [order]) assert result == [order]
[ "argparse.Namespace", "decimal.Decimal", "mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement", "mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement.from_command_line_parameters" ]
[((914, 934), 'argparse.Namespace', 'argparse.Namespace', ([], {}), '()\n', (932, 934), False, 'import argparse\n'), ((994, 1063), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement.from_command_line_parameters', 'PreventPostOnlyCrossingBookElement.from_command_line_parameters', (['args'], {}), '(args)\n', (1057, 1063), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((1343, 1379), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (1377, 1379), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((1731, 1767), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (1765, 1767), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((2124, 2160), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (2158, 2160), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((2511, 2547), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (2545, 2547), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((2906, 2942), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (2940, 2942), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((3301, 3337), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (3335, 3337), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((3902, 3938), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (3936, 3938), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((4502, 4538), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (4536, 4538), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((5089, 5125), 'mango.marketmaking.orderchain.preventpostonlycrossingbookelement.PreventPostOnlyCrossingBookElement', 'PreventPostOnlyCrossingBookElement', ([], {}), '()\n', (5123, 5125), False, 'from mango.marketmaking.orderchain.preventpostonlycrossingbookelement import PreventPostOnlyCrossingBookElement\n'), ((396, 407), 'decimal.Decimal', 'Decimal', (['(90)'], {}), '(90)\n', (403, 407), False, 'from decimal import Decimal\n'), ((582, 594), 'decimal.Decimal', 'Decimal', (['(110)'], {}), '(110)\n', (589, 594), False, 'from decimal import Decimal\n'), ((1236, 1248), 'decimal.Decimal', 'Decimal', (['(100)'], {}), '(100)\n', (1243, 1248), False, 'from decimal import Decimal\n'), ((1603, 1615), 'decimal.Decimal', 'Decimal', (['(120)'], {}), '(120)\n', (1610, 1615), False, 'from decimal import Decimal\n'), ((1997, 2008), 'decimal.Decimal', 'Decimal', (['(80)'], {}), '(80)\n', (2004, 2008), False, 'from decimal import Decimal\n'), ((2383, 2394), 'decimal.Decimal', 'Decimal', (['(80)'], {}), '(80)\n', (2390, 2394), False, 'from decimal import Decimal\n'), ((2777, 2789), 'decimal.Decimal', 'Decimal', (['(120)'], {}), '(120)\n', (2784, 2789), False, 'from decimal import Decimal\n'), ((3173, 3185), 'decimal.Decimal', 'Decimal', (['(120)'], {}), '(120)\n', (3180, 3185), False, 'from decimal import Decimal\n'), ((3774, 3785), 'decimal.Decimal', 'Decimal', (['(80)'], {}), '(80)\n', (3781, 3785), False, 'from decimal import Decimal\n'), ((4373, 4385), 'decimal.Decimal', 'Decimal', (['(120)'], {}), '(120)\n', (4380, 4385), False, 'from decimal import Decimal\n'), ((4962, 4973), 'decimal.Decimal', 'Decimal', (['(80)'], {}), '(80)\n', (4969, 4973), False, 'from decimal import Decimal\n')]
import torch import torch.nn as nn from torch import sigmoid from torch.nn.init import xavier_uniform_, zeros_ def conv(in_planes, out_planes, kernel_size=3): return nn.Sequential( nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, padding=(kernel_size - 1) // 2, # for half output # padding=1, stride=2), nn.ReLU(inplace=True) ) def upconv(in_planes, out_planes): return nn.Sequential( nn.ConvTranspose2d(in_planes, out_planes, kernel_size=4, stride=2, padding=1), nn.ReLU(inplace=True) ) class PoseNet(nn.Module): def __init__(self): super(PoseNet, self).__init__() conv_planes = [16, 32, 64, 128, 256, 256, 256] self.conv1 = conv(6, conv_planes[0], kernel_size=7) # 6,16,7 self.conv2 = conv(conv_planes[0], conv_planes[1], kernel_size=5) # 16,32,5 self.conv3 = conv(conv_planes[1], conv_planes[2]) self.conv4 = conv(conv_planes[2], conv_planes[3]) self.conv5 = conv(conv_planes[3], conv_planes[4]) self.conv6 = conv(conv_planes[4], conv_planes[5]) self.conv7 = conv(conv_planes[5], conv_planes[6]) self.pose_pred = nn.Conv2d(conv_planes[6], 6, kernel_size=1, padding=0) # strid = 1 pading =0-->output不变, 仅仅channel变化 def init_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d): xavier_uniform_(m.weight.data) if m.bias is not None: zeros_(m.bias) def forward(self, frames): out_conv1 = self.conv1(frames) # out_conv1.shape = 1,16,128,416 out_conv2 = self.conv2(out_conv1) # out_conv3 = self.conv3(out_conv2) # out_conv2.shape = 1,32,64,208 out_conv4 = self.conv4(out_conv3) # out_conv3.shape = 1,64,32,104 out_conv5 = self.conv5(out_conv4) # out_conv4.shape = 1,128,16,52 out_conv6 = self.conv6(out_conv5) ##out_conv5 = 1,256,8,26 out_conv7 = self.conv7(out_conv6) # out_conv6.shape = 1,256,4,13 pose = self.pose_pred(out_conv7) # out_conv7.shape = 1,256,2,7, pose.shape = 1,6,2,7 pose = pose.mean(3).mean(2) # pose.shape=1,6 pose = 0.01 * pose.view(pose.size(0), 6) # # pose = 0.01 * pose.reshape(1, 6)# return pose
[ "torch.nn.ReLU", "torch.nn.ConvTranspose2d", "torch.nn.init.xavier_uniform_", "torch.nn.Conv2d", "torch.nn.init.zeros_" ]
[((195, 299), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'out_planes'], {'kernel_size': 'kernel_size', 'padding': '((kernel_size - 1) // 2)', 'stride': '(2)'}), '(in_planes, out_planes, kernel_size=kernel_size, padding=(\n kernel_size - 1) // 2, stride=2)\n', (204, 299), True, 'import torch.nn as nn\n'), ((391, 412), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (398, 412), True, 'import torch.nn as nn\n'), ((490, 567), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['in_planes', 'out_planes'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(in_planes, out_planes, kernel_size=4, stride=2, padding=1)\n', (508, 567), True, 'import torch.nn as nn\n'), ((577, 598), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (584, 598), True, 'import torch.nn as nn\n'), ((1224, 1278), 'torch.nn.Conv2d', 'nn.Conv2d', (['conv_planes[6]', '(6)'], {'kernel_size': '(1)', 'padding': '(0)'}), '(conv_planes[6], 6, kernel_size=1, padding=0)\n', (1233, 1278), True, 'import torch.nn as nn\n'), ((1517, 1547), 'torch.nn.init.xavier_uniform_', 'xavier_uniform_', (['m.weight.data'], {}), '(m.weight.data)\n', (1532, 1547), False, 'from torch.nn.init import xavier_uniform_, zeros_\n'), ((1607, 1621), 'torch.nn.init.zeros_', 'zeros_', (['m.bias'], {}), '(m.bias)\n', (1613, 1621), False, 'from torch.nn.init import xavier_uniform_, zeros_\n')]
""" .. _pyvista_demo_ref: 3D Visualization with PyVista ============================= The example demonstrates the how to use the VTK interface via the `pyvista library <http://docs.pyvista.org>`__ . To run this example, you will need to `install pyvista <http://docs.pyvista.org/getting-started/installation.html>`__ . - contributed by `@banesullivan <https://github.com/banesullivan>`_ Using the inversion result from the example notebook `plot_laguna_del_maule_inversion.ipynb <http://docs.simpeg.xyz/content/examples/20-published/plot_laguna_del_maule_inversion.html>`_ """ # sphinx_gallery_thumbnail_number = 2 import os import shutil import tarfile import shelve import tarfile import discretize import pyvista as pv import numpy as np # Set a documentation friendly plotting theme pv.set_plot_theme('document') print('PyVista Version: {}'.format(pv.__version__)) ############################################################################### # Download and load data # ---------------------- # # In the following we load the :code:`mesh` and :code:`Lpout` that you would # get from running the laguna-del-maule inversion notebook as well as some of # the raw data for the topography surface and gravity observations. # Download Topography and Observed gravity data url = "https://storage.googleapis.com/simpeg/Chile_GRAV_4_Miller/Chile_GRAV_4_Miller.tar.gz" downloads = discretize.utils.download(url, overwrite=True) basePath = downloads.split(".")[0] # unzip the tarfile tar = tarfile.open(downloads, "r") tar.extractall() tar.close() # Download the inverted model f = discretize.utils.download( "https://storage.googleapis.com/simpeg/laguna_del_maule_slicer.tar.gz" ) tar = tarfile.open(f, "r") tar.extractall() tar.close() with shelve.open('./laguna_del_maule_slicer/laguna_del_maule-result') as db: mesh = db['mesh'] Lpout = db['Lpout'] # Load the mesh/data mesh = discretize.TensorMesh.copy(mesh) models = {'Lpout':Lpout} ############################################################################### # Create PyVista data objects # --------------------------- # # Here we start making PyVista data objects of all the spatially referenced # data. # Get the PyVista dataset of the inverted model dataset = mesh.to_vtk(models) ############################################################################### # Load topography points from text file as XYZ numpy array topo_pts = np.loadtxt('Chile_GRAV_4_Miller/LdM_topo.topo', skiprows=1) # Create the topography points and apply an elevation filter topo = pv.PolyData(topo_pts).delaunay_2d().elevation() ############################################################################### # Load the gravity data from text file as XYZ+attributes numpy array grav_data = np.loadtxt('Chile_GRAV_4_Miller/LdM_grav_obs.grv', skiprows=1) print('gravity file shape: ', grav_data.shape) # Use the points to create PolyData grav = pv.PolyData(grav_data[:,0:3]) # Add the data arrays grav.point_arrays['comp-1'] = grav_data[:,3] grav.point_arrays['comp-2'] = grav_data[:,4] ############################################################################### # Plot the topographic surface and the gravity data p = pv.Plotter() p.add_mesh(topo, color='grey') p.add_mesh(grav, stitle='Observed Gravtiy Data', point_size=15, render_points_as_spheres=True) # Use a non-phot-realistic shading technique to show topographic relief p.enable_eye_dome_lighting() p.show(window_size=[1024, 768]) ############################################################################### # Visualize Using PyVista # ----------------------- # # Here we visualize all the data in 3D! # Create display parameters for inverted model dparams = dict( show_edges=False, cmap='bwr', clim=[-0.6, 0.6], ) # Apply a threshold filter to remove topography # no arguments will remove the NaN values dataset_t = dataset.threshold() # Extract volumetric threshold threshed = dataset_t.threshold(-0.2, invert=True) # Create the rendering scene p = pv.Plotter() # add a grid axes p.show_grid() # Add spatially referenced data to the scene p.add_mesh(dataset_t.slice('x'), **dparams) p.add_mesh(dataset_t.slice('y'), **dparams) p.add_mesh(threshed, **dparams) p.add_mesh(topo, opacity=0.75, color='grey', #cmap='gist_earth', clim=[1.7e+03, 3.104e+03], ) p.add_mesh(grav, cmap='viridis', point_size=15, render_points_as_spheres=True) # Here is a nice camera position we manually found: cpos = [(395020.7332989303, 6039949.0452080015, 20387.583125699253), (364528.3152860675, 6008839.363092581, -3776.318305935185), (-0.3423732500124074, -0.34364514928896667, 0.8744647328772646)] p.camera_position = cpos # Render the scene! p.show(window_size=[1024, 768])
[ "pyvista.set_plot_theme", "discretize.utils.download", "shelve.open", "pyvista.Plotter", "numpy.loadtxt", "tarfile.open", "discretize.TensorMesh.copy", "pyvista.PolyData" ]
[((794, 823), 'pyvista.set_plot_theme', 'pv.set_plot_theme', (['"""document"""'], {}), "('document')\n", (811, 823), True, 'import pyvista as pv\n'), ((1387, 1433), 'discretize.utils.download', 'discretize.utils.download', (['url'], {'overwrite': '(True)'}), '(url, overwrite=True)\n', (1412, 1433), False, 'import discretize\n'), ((1496, 1524), 'tarfile.open', 'tarfile.open', (['downloads', '"""r"""'], {}), "(downloads, 'r')\n", (1508, 1524), False, 'import tarfile\n'), ((1589, 1691), 'discretize.utils.download', 'discretize.utils.download', (['"""https://storage.googleapis.com/simpeg/laguna_del_maule_slicer.tar.gz"""'], {}), "(\n 'https://storage.googleapis.com/simpeg/laguna_del_maule_slicer.tar.gz')\n", (1614, 1691), False, 'import discretize\n'), ((1699, 1719), 'tarfile.open', 'tarfile.open', (['f', '"""r"""'], {}), "(f, 'r')\n", (1711, 1719), False, 'import tarfile\n'), ((1902, 1934), 'discretize.TensorMesh.copy', 'discretize.TensorMesh.copy', (['mesh'], {}), '(mesh)\n', (1928, 1934), False, 'import discretize\n'), ((2419, 2478), 'numpy.loadtxt', 'np.loadtxt', (['"""Chile_GRAV_4_Miller/LdM_topo.topo"""'], {'skiprows': '(1)'}), "('Chile_GRAV_4_Miller/LdM_topo.topo', skiprows=1)\n", (2429, 2478), True, 'import numpy as np\n'), ((2758, 2820), 'numpy.loadtxt', 'np.loadtxt', (['"""Chile_GRAV_4_Miller/LdM_grav_obs.grv"""'], {'skiprows': '(1)'}), "('Chile_GRAV_4_Miller/LdM_grav_obs.grv', skiprows=1)\n", (2768, 2820), True, 'import numpy as np\n'), ((2911, 2941), 'pyvista.PolyData', 'pv.PolyData', (['grav_data[:, 0:3]'], {}), '(grav_data[:, 0:3])\n', (2922, 2941), True, 'import pyvista as pv\n'), ((3191, 3203), 'pyvista.Plotter', 'pv.Plotter', ([], {}), '()\n', (3201, 3203), True, 'import pyvista as pv\n'), ((4016, 4028), 'pyvista.Plotter', 'pv.Plotter', ([], {}), '()\n', (4026, 4028), True, 'import pyvista as pv\n'), ((1755, 1819), 'shelve.open', 'shelve.open', (['"""./laguna_del_maule_slicer/laguna_del_maule-result"""'], {}), "('./laguna_del_maule_slicer/laguna_del_maule-result')\n", (1766, 1819), False, 'import shelve\n'), ((2547, 2568), 'pyvista.PolyData', 'pv.PolyData', (['topo_pts'], {}), '(topo_pts)\n', (2558, 2568), True, 'import pyvista as pv\n')]
#!/usr/bin/python # Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys import time import click from solar.cli.uids_history import remember_uid from solar.cli.uids_history import SOLARUID from solar import errors from solar.orchestration import filters from solar.orchestration import graph from solar.orchestration import tasks from solar.orchestration.traversal import states from solar.orchestration import utils @click.group(name='orch') def orchestration(): """\b create solar/orchestration/examples/multi.yaml <id> run-once <id> report <id> <task> -> <status> restart <id> --reset """ @orchestration.command() @click.argument('plan') def create(plan): uid = graph.create_plan(plan).graph['uid'] remember_uid(uid) click.echo(uid) def wait_report(uid, timeout, interval=3): try: if timeout: for summary in graph.wait_finish(uid, timeout=timeout): stringified_summary = '\r' + ' '.join( ['{}: {}'.format(state, count) for state, count in summary.items()]) click.echo(stringified_summary, nl=False) sys.stdout.flush() pending = states.PENDING.name in_progress = states.INPROGRESS.name if summary[pending] + summary[in_progress] != 0: time.sleep(interval) except errors.SolarError: click.echo('') click_report(uid) sys.exit(1) else: click.echo('') click_report(uid) def click_report(uid): colors = { 'PENDING': 'cyan', 'ERROR': 'red', 'SUCCESS': 'green', 'INPROGRESS': 'yellow', 'SKIPPED': 'blue', 'NOOP': 'black'} total = 0.0 report = graph.report_topo(uid) for item in report: msg = '{} -> {}'.format(item[0], item[1]) if item[2]: msg += ' :: {}'.format(item[2]) if item[4] and item[3]: delta = float(item[4]) - float(item[3]) total += delta msg += ' D: {}'.format(delta) click.echo(click.style(msg, fg=colors[item[1]])) click.echo('Delta SUM: {}'.format(total)) @orchestration.command() @click.argument('uid', type=SOLARUID, default='last') @click.option('-w', 'wait', default=0) def report(uid, wait): wait_report(uid, wait) @orchestration.command() @click.argument('uid', type=SOLARUID) @click.option('--start', '-s', multiple=True) @click.option('--end', '-e', multiple=True) def filter(uid, start, end): graph.reset_filtered(uid) plan = graph.get_graph(uid) errors = filters.filter(plan, start=start, end=end) if errors: raise click.ClickException('\n'.join(errors)) graph.update_graph(plan) utils.write_graph(plan) click.echo('Created {name}.png'.format(name=plan.graph['name'])) @orchestration.command(help='Used to mark task as executed') @click.argument('uid', type=SOLARUID) @click.option('--task', '-t', multiple=True) def noop(uid, task): graph.set_states(uid, task) @orchestration.command(name='run-once') @click.argument('uid', type=SOLARUID, default='last') @click.option('-w', 'wait', default=0) def run_once(uid, wait): tasks.schedule_start.apply_async( args=[uid], queue='scheduler') wait_report(uid, wait) @orchestration.command() @click.argument('uid', type=SOLARUID) @click.option('-w', 'wait', default=0) def restart(uid, wait): graph.reset_by_uid(uid) tasks.schedule_start.apply_async(args=[uid], queue='scheduler') wait_report(uid, wait) @orchestration.command() @click.argument('uid', type=SOLARUID) def stop(uid): # TODO(dshulyak) how to do "hard" stop? # using revoke(terminate=True) will lead to inability to restart execution # research possibility of customizations # app.control and Panel.register in celery tasks.soft_stop.apply_async(args=[uid], queue='scheduler') @orchestration.command() @click.argument('uid', type=SOLARUID) def reset(uid): graph.reset_by_uid(uid) @orchestration.command() @click.argument('uid', type=SOLARUID) def resume(uid): graph.reset_by_uid(uid, state_list=['SKIPPED']) tasks.schedule_start.apply_async(args=[uid], queue='scheduler') @orchestration.command() @click.argument('uid', type=SOLARUID) def retry(uid): graph.reset_by_uid(uid, state_list=['ERROR']) tasks.schedule_start.apply_async(args=[uid], queue='scheduler') @orchestration.command() @click.argument('uid', type=SOLARUID) @click.option('--start', '-s', multiple=True) @click.option('--end', '-e', multiple=True) def dg(uid, start, end): plan = graph.get_graph(uid) if start or end: errors = filters.filter(plan, start=start, end=end) if errors: raise click.ClickException('\n'.join(errors)) utils.write_graph(plan) click.echo('Created {name}.svg'.format(name=plan.graph['name'])) @orchestration.command() @click.argument('uid', type=SOLARUID) def show(uid): click.echo(graph.show(uid))
[ "solar.orchestration.graph.reset_by_uid", "click.option", "click.echo", "sys.stdout.flush", "solar.orchestration.graph.update_graph", "solar.orchestration.graph.set_states", "click.group", "solar.orchestration.graph.show", "click.style", "solar.orchestration.utils.write_graph", "solar.orchestration.graph.get_graph", "time.sleep", "solar.orchestration.graph.report_topo", "solar.cli.uids_history.remember_uid", "sys.exit", "solar.orchestration.graph.create_plan", "click.argument", "solar.orchestration.graph.wait_finish", "solar.orchestration.tasks.schedule_start.apply_async", "solar.orchestration.filters.filter", "solar.orchestration.tasks.soft_stop.apply_async", "solar.orchestration.graph.reset_filtered" ]
[((989, 1013), 'click.group', 'click.group', ([], {'name': '"""orch"""'}), "(name='orch')\n", (1000, 1013), False, 'import click\n'), ((1224, 1246), 'click.argument', 'click.argument', (['"""plan"""'], {}), "('plan')\n", (1238, 1246), False, 'import click\n'), ((2798, 2850), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID', 'default': '"""last"""'}), "('uid', type=SOLARUID, default='last')\n", (2812, 2850), False, 'import click\n'), ((2852, 2889), 'click.option', 'click.option', (['"""-w"""', '"""wait"""'], {'default': '(0)'}), "('-w', 'wait', default=0)\n", (2864, 2889), False, 'import click\n'), ((2968, 3004), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (2982, 3004), False, 'import click\n'), ((3006, 3050), 'click.option', 'click.option', (['"""--start"""', '"""-s"""'], {'multiple': '(True)'}), "('--start', '-s', multiple=True)\n", (3018, 3050), False, 'import click\n'), ((3052, 3094), 'click.option', 'click.option', (['"""--end"""', '"""-e"""'], {'multiple': '(True)'}), "('--end', '-e', multiple=True)\n", (3064, 3094), False, 'import click\n'), ((3501, 3537), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (3515, 3537), False, 'import click\n'), ((3539, 3582), 'click.option', 'click.option', (['"""--task"""', '"""-t"""'], {'multiple': '(True)'}), "('--task', '-t', multiple=True)\n", (3551, 3582), False, 'import click\n'), ((3679, 3731), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID', 'default': '"""last"""'}), "('uid', type=SOLARUID, default='last')\n", (3693, 3731), False, 'import click\n'), ((3733, 3770), 'click.option', 'click.option', (['"""-w"""', '"""wait"""'], {'default': '(0)'}), "('-w', 'wait', default=0)\n", (3745, 3770), False, 'import click\n'), ((3936, 3972), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (3950, 3972), False, 'import click\n'), ((3974, 4011), 'click.option', 'click.option', (['"""-w"""', '"""wait"""'], {'default': '(0)'}), "('-w', 'wait', default=0)\n", (3986, 4011), False, 'import click\n'), ((4187, 4223), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (4201, 4223), False, 'import click\n'), ((4545, 4581), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (4559, 4581), False, 'import click\n'), ((4654, 4690), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (4668, 4690), False, 'import click\n'), ((4856, 4892), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (4870, 4892), False, 'import click\n'), ((5055, 5091), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (5069, 5091), False, 'import click\n'), ((5093, 5137), 'click.option', 'click.option', (['"""--start"""', '"""-s"""'], {'multiple': '(True)'}), "('--start', '-s', multiple=True)\n", (5105, 5137), False, 'import click\n'), ((5139, 5181), 'click.option', 'click.option', (['"""--end"""', '"""-e"""'], {'multiple': '(True)'}), "('--end', '-e', multiple=True)\n", (5151, 5181), False, 'import click\n'), ((5522, 5558), 'click.argument', 'click.argument', (['"""uid"""'], {'type': 'SOLARUID'}), "('uid', type=SOLARUID)\n", (5536, 5558), False, 'import click\n'), ((1316, 1333), 'solar.cli.uids_history.remember_uid', 'remember_uid', (['uid'], {}), '(uid)\n', (1328, 1333), False, 'from solar.cli.uids_history import remember_uid\n'), ((1338, 1353), 'click.echo', 'click.echo', (['uid'], {}), '(uid)\n', (1348, 1353), False, 'import click\n'), ((2353, 2375), 'solar.orchestration.graph.report_topo', 'graph.report_topo', (['uid'], {}), '(uid)\n', (2370, 2375), False, 'from solar.orchestration import graph\n'), ((3128, 3153), 'solar.orchestration.graph.reset_filtered', 'graph.reset_filtered', (['uid'], {}), '(uid)\n', (3148, 3153), False, 'from solar.orchestration import graph\n'), ((3165, 3185), 'solar.orchestration.graph.get_graph', 'graph.get_graph', (['uid'], {}), '(uid)\n', (3180, 3185), False, 'from solar.orchestration import graph\n'), ((3199, 3241), 'solar.orchestration.filters.filter', 'filters.filter', (['plan'], {'start': 'start', 'end': 'end'}), '(plan, start=start, end=end)\n', (3213, 3241), False, 'from solar.orchestration import filters\n'), ((3315, 3339), 'solar.orchestration.graph.update_graph', 'graph.update_graph', (['plan'], {}), '(plan)\n', (3333, 3339), False, 'from solar.orchestration import graph\n'), ((3344, 3367), 'solar.orchestration.utils.write_graph', 'utils.write_graph', (['plan'], {}), '(plan)\n', (3361, 3367), False, 'from solar.orchestration import utils\n'), ((3608, 3635), 'solar.orchestration.graph.set_states', 'graph.set_states', (['uid', 'task'], {}), '(uid, task)\n', (3624, 3635), False, 'from solar.orchestration import graph\n'), ((3800, 3863), 'solar.orchestration.tasks.schedule_start.apply_async', 'tasks.schedule_start.apply_async', ([], {'args': '[uid]', 'queue': '"""scheduler"""'}), "(args=[uid], queue='scheduler')\n", (3832, 3863), False, 'from solar.orchestration import tasks\n'), ((4040, 4063), 'solar.orchestration.graph.reset_by_uid', 'graph.reset_by_uid', (['uid'], {}), '(uid)\n', (4058, 4063), False, 'from solar.orchestration import graph\n'), ((4068, 4131), 'solar.orchestration.tasks.schedule_start.apply_async', 'tasks.schedule_start.apply_async', ([], {'args': '[uid]', 'queue': '"""scheduler"""'}), "(args=[uid], queue='scheduler')\n", (4100, 4131), False, 'from solar.orchestration import tasks\n'), ((4458, 4516), 'solar.orchestration.tasks.soft_stop.apply_async', 'tasks.soft_stop.apply_async', ([], {'args': '[uid]', 'queue': '"""scheduler"""'}), "(args=[uid], queue='scheduler')\n", (4485, 4516), False, 'from solar.orchestration import tasks\n'), ((4602, 4625), 'solar.orchestration.graph.reset_by_uid', 'graph.reset_by_uid', (['uid'], {}), '(uid)\n', (4620, 4625), False, 'from solar.orchestration import graph\n'), ((4712, 4759), 'solar.orchestration.graph.reset_by_uid', 'graph.reset_by_uid', (['uid'], {'state_list': "['SKIPPED']"}), "(uid, state_list=['SKIPPED'])\n", (4730, 4759), False, 'from solar.orchestration import graph\n'), ((4764, 4827), 'solar.orchestration.tasks.schedule_start.apply_async', 'tasks.schedule_start.apply_async', ([], {'args': '[uid]', 'queue': '"""scheduler"""'}), "(args=[uid], queue='scheduler')\n", (4796, 4827), False, 'from solar.orchestration import tasks\n'), ((4913, 4958), 'solar.orchestration.graph.reset_by_uid', 'graph.reset_by_uid', (['uid'], {'state_list': "['ERROR']"}), "(uid, state_list=['ERROR'])\n", (4931, 4958), False, 'from solar.orchestration import graph\n'), ((4963, 5026), 'solar.orchestration.tasks.schedule_start.apply_async', 'tasks.schedule_start.apply_async', ([], {'args': '[uid]', 'queue': '"""scheduler"""'}), "(args=[uid], queue='scheduler')\n", (4995, 5026), False, 'from solar.orchestration import tasks\n'), ((5218, 5238), 'solar.orchestration.graph.get_graph', 'graph.get_graph', (['uid'], {}), '(uid)\n', (5233, 5238), False, 'from solar.orchestration import graph\n'), ((5401, 5424), 'solar.orchestration.utils.write_graph', 'utils.write_graph', (['plan'], {}), '(plan)\n', (5418, 5424), False, 'from solar.orchestration import utils\n'), ((2079, 2093), 'click.echo', 'click.echo', (['""""""'], {}), "('')\n", (2089, 2093), False, 'import click\n'), ((5277, 5319), 'solar.orchestration.filters.filter', 'filters.filter', (['plan'], {'start': 'start', 'end': 'end'}), '(plan, start=start, end=end)\n', (5291, 5319), False, 'from solar.orchestration import filters\n'), ((5589, 5604), 'solar.orchestration.graph.show', 'graph.show', (['uid'], {}), '(uid)\n', (5599, 5604), False, 'from solar.orchestration import graph\n'), ((1275, 1298), 'solar.orchestration.graph.create_plan', 'graph.create_plan', (['plan'], {}), '(plan)\n', (1292, 1298), False, 'from solar.orchestration import graph\n'), ((1455, 1494), 'solar.orchestration.graph.wait_finish', 'graph.wait_finish', (['uid'], {'timeout': 'timeout'}), '(uid, timeout=timeout)\n', (1472, 1494), False, 'from solar.orchestration import graph\n'), ((2000, 2014), 'click.echo', 'click.echo', (['""""""'], {}), "('')\n", (2010, 2014), False, 'import click\n'), ((2049, 2060), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2057, 2060), False, 'import sys\n'), ((2686, 2722), 'click.style', 'click.style', (['msg'], {'fg': 'colors[item[1]]'}), '(msg, fg=colors[item[1]])\n', (2697, 2722), False, 'import click\n'), ((1680, 1721), 'click.echo', 'click.echo', (['stringified_summary'], {'nl': '(False)'}), '(stringified_summary, nl=False)\n', (1690, 1721), False, 'import click\n'), ((1738, 1756), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1754, 1756), False, 'import sys\n'), ((1941, 1961), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (1951, 1961), False, 'import time\n')]
"""Global logging helpers.""" import logging import os import sys from distutils.util import strtobool import colorama from pythonjsonlogger import jsonlogger from six import iteritems CONSOLE_FORMAT = "{}%(levelname)s:{} %(message)s" JSON_FORMAT = "(asctime) (levelname) (message)" def to_bool(string): return bool(strtobool(str(string))) def _should_do_markup(): py_colors = os.environ.get("PY_COLORS", None) if py_colors is not None: return to_bool(py_colors) return sys.stdout.isatty() and os.environ.get("TERM") != "dumb" colorama.init(autoreset=True, strip=(not _should_do_markup())) def flag_extra(extra): """Ensure extra args are prefixed.""" flagged = dict() if isinstance(extra, dict): for key, value in iteritems(extra): flagged["later_" + key] = value return flagged class LogFilter(object): """A custom log filter which excludes log messages above the logged level.""" def __init__(self, level): """ Initialize a new custom log filter. :param level: Log level limit :returns: None """ self.__level = level def filter(self, logRecord): # noqa # https://docs.python.org/3/library/logging.html#logrecord-attributes return logRecord.levelno <= self.__level class MultilineFormatter(logging.Formatter): """Logging Formatter to reset color after newline characters.""" def format(self, record): # noqa record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL)) record.msg = record.msg + "\n" return logging.Formatter.format(self, record) class MultilineJsonFormatter(jsonlogger.JsonFormatter): """Logging Formatter to remove newline characters.""" def format(self, record): # noqa record.msg = record.msg.replace("\n", " ") return jsonlogger.JsonFormatter.format(self, record) def get_logger(name=None, level=logging.DEBUG, json=False): """ Build a logger with the given name and returns the logger. :param name: The name for the logger. This is usually the module name, `__name__`. :param level: Initialize the new logger with given log level. :param json: Boolean flag to enable json formatted log output. :return: logger object """ logger = logging.getLogger(name) logger.setLevel(level) logger.addHandler(_get_error_handler(json=json)) logger.addHandler(_get_warn_handler(json=json)) logger.addHandler(_get_info_handler(json=json)) logger.addHandler(_get_critical_handler(json=json)) logger.propagate = False return logger def update_logger(logger, level=None, json=None): """Update logger configuration to change logging settings.""" for handler in logger.handlers[:]: logger.removeHandler(handler) logger.setLevel(level) logger.addHandler(_get_error_handler(json=json)) logger.addHandler(_get_warn_handler(json=json)) logger.addHandler(_get_info_handler(json=json)) logger.addHandler(_get_critical_handler(json=json)) def _get_error_handler(json=False): handler = logging.StreamHandler(sys.stderr) handler.setLevel(logging.ERROR) handler.addFilter(LogFilter(logging.ERROR)) handler.setFormatter(MultilineFormatter(error(CONSOLE_FORMAT))) if json: handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT)) return handler def _get_warn_handler(json=False): handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.WARN) handler.addFilter(LogFilter(logging.WARN)) handler.setFormatter(MultilineFormatter(warn(CONSOLE_FORMAT))) if json: handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT)) return handler def _get_info_handler(json=False): handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) handler.addFilter(LogFilter(logging.INFO)) handler.setFormatter(MultilineFormatter(info(CONSOLE_FORMAT))) if json: handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT)) return handler def _get_critical_handler(json=False): handler = logging.StreamHandler(sys.stderr) handler.setLevel(logging.CRITICAL) handler.addFilter(LogFilter(logging.CRITICAL)) handler.setFormatter(MultilineFormatter(critical(CONSOLE_FORMAT))) if json: handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT)) return handler def critical(message): """Format critical messages and return string.""" return color_text(colorama.Fore.RED, message) def error(message): """Format error messages and return string.""" return color_text(colorama.Fore.RED, message) def warn(message): """Format warn messages and return string.""" return color_text(colorama.Fore.YELLOW, message) def info(message): """Format info messages and return string.""" return color_text(colorama.Fore.BLUE, message) def color_text(color, msg): """ Colorize strings. :param color: colorama color settings :param msg: string to colorize :returns: string """ msg = msg.format(colorama.Style.BRIGHT, colorama.Style.NORMAL) return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
[ "logging.Formatter.format", "logging.StreamHandler", "pythonjsonlogger.jsonlogger.JsonFormatter.format", "os.environ.get", "sys.stdout.isatty", "six.iteritems", "logging.getLogger" ]
[((393, 426), 'os.environ.get', 'os.environ.get', (['"""PY_COLORS"""', 'None'], {}), "('PY_COLORS', None)\n", (407, 426), False, 'import os\n'), ((2333, 2356), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (2350, 2356), False, 'import logging\n'), ((3133, 3166), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stderr'], {}), '(sys.stderr)\n', (3154, 3166), False, 'import logging\n'), ((3470, 3503), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (3491, 3503), False, 'import logging\n'), ((3804, 3837), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (3825, 3837), False, 'import logging\n'), ((4142, 4175), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stderr'], {}), '(sys.stderr)\n', (4163, 4175), False, 'import logging\n'), ((503, 522), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (520, 522), False, 'import sys\n'), ((772, 788), 'six.iteritems', 'iteritems', (['extra'], {}), '(extra)\n', (781, 788), False, 'from six import iteritems\n'), ((1624, 1662), 'logging.Formatter.format', 'logging.Formatter.format', (['self', 'record'], {}), '(self, record)\n', (1648, 1662), False, 'import logging\n'), ((1884, 1929), 'pythonjsonlogger.jsonlogger.JsonFormatter.format', 'jsonlogger.JsonFormatter.format', (['self', 'record'], {}), '(self, record)\n', (1915, 1929), False, 'from pythonjsonlogger import jsonlogger\n'), ((527, 549), 'os.environ.get', 'os.environ.get', (['"""TERM"""'], {}), "('TERM')\n", (541, 549), False, 'import os\n')]
import os import joblib from a2c_ppo_acktr.multi_agent.utils import plot_statistics, plot_agent_statistics, get_remote_file, remote_listdir # SMOOTH_ARGS = { # "window_length": 53, # "polyorder": 3, # } def plot_full(statistics, max_iter=None): plot_statistics(statistics, "reward", max_iter=max_iter) # plot_statistics(statistics, "reward_prediction_loss", max_iter=max_iter) plot_statistics(statistics, "efficiency", max_iter=max_iter) # plot_statistics(statistics, "grad_norm", max_iter=max_iter) # plot_statistics(statistics, "value_loss", max_iter=max_iter) # plot_statistics(statistics, "action_loss", max_iter=max_iter) # plot_statistics(statistics, "dist_entropy", max_iter=max_iter) # plot_statistics(statistics, "dist_penalty", max_iter=max_iter) def plot_agent(statistics): plot_agent_statistics(statistics, "reward") # plot_agent_statistics(statistics, "dist_entropy") # plot_agent_statistics(statistics, "efficiency") # plot_agent_statistics(statistics, "grad_norm") # plot_agent_statistics(statistics, "value_loss") plot_agent_statistics(statistics, "likelihood") # plot_agent_statistics(statistics, "action_loss") # plot_agent_statistics(statistics, "dvd_loss") # plot_agent_statistics(statistics, "pd") def get_statistics(path, remote): path = os.path.join(path, "statistics.obj") if remote: path = get_remote_file(path) if path is not None: return joblib.load(path) else: return None def plot_single(path, agent, remote, max_iter=None): if agent is not None: path = os.path.join(path, str(agent)) statistics = get_statistics(path, remote) if agent is not None: plot_agent(statistics) else: plot_full(statistics, max_iter=max_iter) def plot_all(path, agent, remote): assert agent is None statistics = [] if not remote: folders = os.listdir(path) else: folders = remote_listdir(path) # print(folders) # return for folder in folders: _path = os.path.join(path, folder) if agent is not None: _path = os.path.join(_path, str(agent)) _statistics = get_statistics(_path, remote) if _statistics is not None: statistics.append(_statistics) if agent is not None: plot_agent(statistics) else: plot_full(statistics) if __name__ == "__main__": plot_single("./sync-results/walker2d/parallel/2021-08-08T23:36:03.452837#90bb/copy-0", "0", False, max_iter=None) # plot_all("./sync-results/escalation-gw/1-prediction-new-2", None, True)
[ "a2c_ppo_acktr.multi_agent.utils.plot_statistics", "a2c_ppo_acktr.multi_agent.utils.plot_agent_statistics", "a2c_ppo_acktr.multi_agent.utils.remote_listdir", "joblib.load", "a2c_ppo_acktr.multi_agent.utils.get_remote_file", "os.path.join", "os.listdir" ]
[((261, 317), 'a2c_ppo_acktr.multi_agent.utils.plot_statistics', 'plot_statistics', (['statistics', '"""reward"""'], {'max_iter': 'max_iter'}), "(statistics, 'reward', max_iter=max_iter)\n", (276, 317), False, 'from a2c_ppo_acktr.multi_agent.utils import plot_statistics, plot_agent_statistics, get_remote_file, remote_listdir\n'), ((401, 461), 'a2c_ppo_acktr.multi_agent.utils.plot_statistics', 'plot_statistics', (['statistics', '"""efficiency"""'], {'max_iter': 'max_iter'}), "(statistics, 'efficiency', max_iter=max_iter)\n", (416, 461), False, 'from a2c_ppo_acktr.multi_agent.utils import plot_statistics, plot_agent_statistics, get_remote_file, remote_listdir\n'), ((835, 878), 'a2c_ppo_acktr.multi_agent.utils.plot_agent_statistics', 'plot_agent_statistics', (['statistics', '"""reward"""'], {}), "(statistics, 'reward')\n", (856, 878), False, 'from a2c_ppo_acktr.multi_agent.utils import plot_statistics, plot_agent_statistics, get_remote_file, remote_listdir\n'), ((1100, 1147), 'a2c_ppo_acktr.multi_agent.utils.plot_agent_statistics', 'plot_agent_statistics', (['statistics', '"""likelihood"""'], {}), "(statistics, 'likelihood')\n", (1121, 1147), False, 'from a2c_ppo_acktr.multi_agent.utils import plot_statistics, plot_agent_statistics, get_remote_file, remote_listdir\n'), ((1348, 1384), 'os.path.join', 'os.path.join', (['path', '"""statistics.obj"""'], {}), "(path, 'statistics.obj')\n", (1360, 1384), False, 'import os\n'), ((1415, 1436), 'a2c_ppo_acktr.multi_agent.utils.get_remote_file', 'get_remote_file', (['path'], {}), '(path)\n', (1430, 1436), False, 'from a2c_ppo_acktr.multi_agent.utils import plot_statistics, plot_agent_statistics, get_remote_file, remote_listdir\n'), ((1477, 1494), 'joblib.load', 'joblib.load', (['path'], {}), '(path)\n', (1488, 1494), False, 'import joblib\n'), ((1945, 1961), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1955, 1961), False, 'import os\n'), ((1990, 2010), 'a2c_ppo_acktr.multi_agent.utils.remote_listdir', 'remote_listdir', (['path'], {}), '(path)\n', (2004, 2010), False, 'from a2c_ppo_acktr.multi_agent.utils import plot_statistics, plot_agent_statistics, get_remote_file, remote_listdir\n'), ((2088, 2114), 'os.path.join', 'os.path.join', (['path', 'folder'], {}), '(path, folder)\n', (2100, 2114), False, 'import os\n')]
import copy import logging import random from couchbase.bucket import Bucket from couchbase_helper.data import FIRST_NAMES, COUNTRIES from couchbase_helper.documentgenerator import DocumentGenerator from couchbase_helper.tuq_generators import TuqGenerators from couchbase_helper.query_definitions import QueryDefinition, SQLDefinitionGenerator from remote.remote_util import RemoteMachineShellConnection from membase.api.rest_client import RestConnection, RestHelper from upgrade_2i import UpgradeSecondaryIndex from base_2i import BaseSecondaryIndexingTests log = logging.getLogger(__name__) INT64_VALUES = [-9223372036854775808, 9223372036854775807, 9223372036854770000, 9000000000000000000, -9000000000000000000, -9223372036854770000, 5464748874972.17865, -5464748874972.17865, -2147483648, 2147483647, -2147483600, 2147483600, 32767, -32768, 30000, -30000, 100, -100, 0, 110000000003421999, 9223372036852775807, 9007199254740991] UPGRADE_VERS = ["5.0.0", "5.0.1", "5.1.0"] class UpgradeSecondaryIndexInt64(UpgradeSecondaryIndex): def setUp(self): super(UpgradeSecondaryIndexInt64, self).setUp() self.disable_plasma_upgrade = self.input.param("disable_plasma_upgrade", False) self.rebalance_empty_node = self.input.param("rebalance_empty_node", True) self.initial_version = self.input.param('initial_version', '4.6.0-3653') self.post_upgrade_gsi_type = self.input.param('post_upgrade_gsi_type', 'memory_optimized') self.upgrade_to = self.input.param("upgrade_to") self.int64_verify_results = self.input.param("int64_verify_results", False) self.index_batch_size = self.input.param("index_batch_size", -1) self.query_results = {} self._create_int64_dataset() query_definition_generator = QueryDefs() self.query_definitions = query_definition_generator.generate_query_definition_for_aggr_data() self.query_definitions = query_definition_generator.filter_by_group(self.groups, self.query_definitions) self._create_indexes() self._query_index("pre_upgrade") def tearDown(self): super(UpgradeSecondaryIndexInt64, self).tearDown() def test_offline_upgrade(self): upgrade_nodes = self.servers[:self.nodes_init] if self.disable_plasma_upgrade: self._install(self.nodes_in_list, version=self.upgrade_to) rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [self.nodes_in_list[0]], [], services=["index"]) rebalance.result() self.sleep(100) self.disable_upgrade_to_plasma(self.nodes_in_list[0]) for server in upgrade_nodes: remote = RemoteMachineShellConnection(server) remote.stop_server() remote.disconnect() upgrade_threads = self._async_update(self.upgrade_to, [server]) for upgrade_thread in upgrade_threads: upgrade_thread.join() self.upgrade_servers.append(server) self.sleep(100) msg = "Cluster is not healthy after upgrade" self.assertTrue(self.wait_until_cluster_is_healthy(), msg) log.info("Cluster is healthy") self.assertTrue(self.wait_until_indexes_online(), "Some indexes are not online") log.info("All indexes are online") self.add_built_in_server_user() self.sleep(20) if self.initial_version.split("-")[0] in UPGRADE_VERS: self.multi_drop_index() self.sleep(100) self._create_indexes() self.sleep(100) self._query_index("post_upgrade") self._verify_post_upgrade_results() self._update_int64_dataset() self._query_for_long_num() def test_online_upgrade_with_failover(self): upgrade_nodes = self.servers[:self.nodes_init] if self.disable_plasma_upgrade: self._install(self.nodes_in_list, version=self.upgrade_to) rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init], [self.nodes_in_list[0]], [], services=["index"]) rebalance.result() self.sleep(100) self.disable_upgrade_to_plasma(self.nodes_in_list[0]) for node in upgrade_nodes: node_rest = RestConnection(node) node_info = "{0}:{1}".format(node.ip, node.port) node_services_list = node_rest.get_nodes_services()[node_info] if "index" in node_services_list: self._create_equivalent_indexes(node) failover_task = self.cluster.async_failover([self.master], failover_nodes=[node], graceful=False) failover_task.result() self.sleep(100) log.info("Node Failed over...") upgrade_th = self._async_update(self.upgrade_to, [node]) for th in upgrade_th: th.join() log.info("==== Upgrade Complete ====") self.sleep(120) rest = RestConnection(self.master) nodes_all = rest.node_statuses() for cluster_node in nodes_all: if cluster_node.ip == node.ip: log.info("Adding Back: {0}".format(node)) rest.add_back_node(cluster_node.id) rest.set_recovery_type(otpNode=cluster_node.id, recoveryType="full") log.info("Adding node back to cluster...") active_nodes = [srvr for srvr in self.servers if srvr.ip != node.ip] rebalance = self.cluster.async_rebalance(active_nodes, [], []) rebalance.result() self.sleep(100) self._remove_equivalent_indexes(node) self.sleep(60) msg = "Cluster is not healthy after upgrade" self.assertTrue(self.wait_until_cluster_is_healthy(), msg) log.info("Cluster is healthy") self.add_built_in_server_user() self.sleep(20) if self.initial_version.split("-")[0] in UPGRADE_VERS: self.multi_drop_index() self.sleep(100) self._create_indexes() self.sleep(100) self.assertTrue(self.wait_until_indexes_online(), "Some indexes are not online") log.info("All indexes are online") self._query_index("post_upgrade") self._verify_post_upgrade_results() self._update_int64_dataset() self._query_for_long_num() def test_online_upgrade_with_rebalance(self): upgrade_nodes = self.servers[:self.nodes_init] if self.disable_plasma_upgrade: self._install(self.nodes_in_list, version=self.upgrade_to) rebalance = self.cluster.async_rebalance( self.servers[:self.nodes_init], [self.nodes_in_list[0]], [], services=["index"]) rebalance.result() self.sleep(100) self.disable_upgrade_to_plasma(self.nodes_in_list[0]) for node in upgrade_nodes: node_rest = RestConnection(node) node_info = "{0}:{1}".format(node.ip, node.port) node_services_list = node_rest.get_nodes_services()[node_info] node_services = [",".join(node_services_list)] log.info("Rebalancing the node out...") rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init],[], [node]) rebalance.result() self.sleep(100) active_nodes = [srvr for srvr in self.servers if srvr.ip != node.ip] log.info("Upgrading the node...") upgrade_th = self._async_update(self.upgrade_to, [node]) for th in upgrade_th: th.join() self.sleep(120) log.info("==== Upgrade Complete ====") log.info("Adding node back to cluster...") rebalance = self.cluster.async_rebalance(active_nodes, [node], [], services=node_services) rebalance.result() self.sleep(100) node_version = RestConnection(node).get_nodes_versions() log.info("{0} node Upgraded to: {1}".format(node.ip, node_version)) msg = "Cluster is not healthy after upgrade" self.assertTrue(self.wait_until_cluster_is_healthy(), msg) log.info("Cluster is healthy") self.add_built_in_server_user() self.sleep(20) if self.initial_version.split("-")[0] in UPGRADE_VERS: self.multi_drop_index() self.sleep(100) self._create_indexes() self.sleep(100) else: self._create_indexes() self.sleep(100) self.assertTrue(self.wait_until_indexes_online(), "Some indexes are not online") log.info("All indexes are online") self._query_index("post_upgrade") self._verify_post_upgrade_results() self._update_int64_dataset() self._query_for_long_num() def test_online_upgrade_with_swap_rebalance(self): """ :return: """ old_servers = self.servers[:self.nodes_init] if self.disable_plasma_upgrade: self._install(self.nodes_in_list, version=self.upgrade_to) rebalance = self.cluster.async_rebalance( self.servers[:self.nodes_init], [self.nodes_in_list[1]], [], services=["index"]) rebalance.result() self.sleep(100) self.disable_upgrade_to_plasma(self.nodes_in_list[1]) old_servers.append(self.nodes_in_list[1]) log.info("Swapping servers...") service_map = self.get_nodes_services() i = 0 swap_server = self.nodes_in_list[0] upgrade_nodes_list = self.servers[:self.nodes_init] for node in upgrade_nodes_list: self._install([swap_server], version=self.upgrade_to) service_on_node = service_map[node.ip] log.info("Swapping %s with %s with %s services" % (node,swap_server,service_on_node)) servers = old_servers rebalance = self.cluster.async_rebalance(servers, [swap_server], [node], services=[service_on_node]) rebalance.result() self.sleep(30) i += 1 old_servers.append(swap_server) old_servers.remove(node) if (self.master not in old_servers) and ("kv" in service_on_node): self.master = swap_server swap_server = node self.n1ql_node = self.get_nodes_from_services_map( service_type="n1ql",master=old_servers[0]) log.info("Master : %s",self.master) log.info("===== Nodes Swapped with Upgraded versions =====") self.upgrade_servers = self.nodes_in_list if self.initial_version.split("-")[0] in UPGRADE_VERS: self.multi_drop_index() self.sleep(100) self._create_indexes() self.sleep(100) else: self._create_indexes() self.sleep(100) msg = "Cluster is not healthy after upgrade" self.assertTrue(self.wait_until_cluster_is_healthy(), msg) log.info("Cluster is healthy") self.add_built_in_server_user() self.sleep(20) self.assertTrue(self.wait_until_indexes_online(), "Some indexes are not online") log.info("All indexes are online") self._query_index("post_upgrade") self._verify_post_upgrade_results() self._update_int64_dataset() self._query_for_long_num() def test_rolling_upgrade(self): upgrade_versions = ["5.0.0-5003", self.upgrade_to] for ver in upgrade_versions: for server in self.servers: remote = RemoteMachineShellConnection(server) remote.stop_server() remote.disconnect() self.upgrade_servers.append(server) upgrade_threads = self._async_update(self.upgrade_to, self.servers) for upgrade_thread in upgrade_threads: upgrade_thread.join() self.sleep(120) self.add_built_in_server_user() self.sleep(20) if ver.startswith("5.5") or ver.startswith("5.1"): self.multi_drop_index() self.sleep(100) self._create_indexes() self.sleep(100) msg = "Cluster is not healthy after upgrade" self.assertTrue(self.wait_until_cluster_is_healthy(), msg) log.info("Cluster is healthy") self.add_built_in_server_user() self.sleep(20) self.assertTrue(self.wait_until_indexes_online(), "Some indexes are not online") log.info("All indexes are online") self._query_index("post_upgrade") if ver == self.upgrade_to: self._verify_post_upgrade_results() self._update_int64_dataset() self._query_for_long_num() def test_online_upgrade_with_rebalance_stats(self): upgrade_nodes = self.servers[:self.nodes_init] create_index_query1 = "CREATE INDEX idx ON default(name) USING GSI WITH {'nodes': ['%s:%s']}" % (self.servers[1].ip,self.servers[1].port) self.n1ql_helper.run_cbq_query(query=create_index_query1, server=self.n1ql_node) if self.disable_plasma_upgrade: self._install(self.nodes_in_list, version=self.upgrade_to) rebalance = self.cluster.async_rebalance( self.servers[:self.nodes_init], [self.nodes_in_list[0]], [], services=["index"]) rebalance.result() self.sleep(30) self.disable_upgrade_to_plasma(self.nodes_in_list[0]) for node in upgrade_nodes: node_rest = RestConnection(node) node_info = "{0}:{1}".format(node.ip, node.port) node_services_list = node_rest.get_nodes_services()[node_info] node_services = [",".join(node_services_list)] log.info("Rebalancing the node out...") rebalance = self.cluster.async_rebalance(self.servers[:self.nodes_init],[], [node]) rebalance.result() self.sleep(30) active_nodes = [srvr for srvr in self.servers if srvr.ip != node.ip] log.info("Upgrading the node...") upgrade_th = self._async_update(self.upgrade_to, [node]) for th in upgrade_th: th.join() self.sleep(60) log.info("==== Upgrade Complete ====") log.info("Adding node back to cluster...") rebalance = self.cluster.async_rebalance(active_nodes, [node], [], services=node_services) rebalance.result() self.sleep(30) node_version = RestConnection(node).get_nodes_versions() log.info("{0} node Upgraded to: {1}".format(node.ip, node_version)) msg = "Cluster is not healthy after upgrade" self.assertTrue(self.wait_until_cluster_is_healthy(), msg) log.info("Cluster is healthy") self.add_built_in_server_user() self.sleep(20) self.assertTrue(self.wait_until_indexes_online(), "Some indexes are not online") log.info("All indexes are online") index_map = self.get_index_stats() self.log.info(index_map) for index in index_map['default']: self.assertTrue("key_size_distribution" in str(index_map['default'][index])) self.rest.flush_bucket("default") self.sleep(60) string_70 = "x" * 70 string_3000 = "x" * 3000 string_103000 = "x" * 103000 insert_query1 = 'INSERT INTO default (KEY, VALUE) VALUES ("id1", { "name" : "%s" })' % string_70 insert_query2 = 'INSERT INTO default (KEY, VALUE) VALUES ("id2", { "name" : {"name": "%s", "fake": "%s"} })' % ( string_70, string_3000) insert_query5 = 'INSERT INTO default (KEY, VALUE) VALUES ("id5", { "name" : "%s" })' % string_103000 self.n1ql_helper.run_cbq_query(query=insert_query1, server=self.n1ql_node) self.n1ql_helper.run_cbq_query(query=insert_query2, server=self.n1ql_node) self.n1ql_helper.run_cbq_query(query=insert_query5, server=self.n1ql_node) index_map = self.get_index_stats() self.log.info(index_map) for index in index_map['default']: if index == 'idx': self.log.info(index_map['default'][index]['key_size_distribution']) self.assertTrue(str(index_map['default'][index]['key_size_distribution']) == "{u'(0-64)': 0, u'(257-1024)': 0, u'(65-256)': 1, u'(4097-102400)': 0, u'(1025-4096)': 1, u'(102401-max)': 1}") def _create_int64_dataset(self): generators = [] document_template = '{{"name":"{0}", "int_num": {1}, "long_num":{2}, "long_num_partial":{3}, "long_arr": {4},' \ '"int_arr": {5}}}' num_items = len(self.full_docs_list) for i in range(num_items): name = random.choice(FIRST_NAMES) int_num = random.randint(-100, 100) long_num = random.choice(INT64_VALUES) long_arr = [random.choice(INT64_VALUES) for i in range(10)] int_arr = [random.randint(-100, 100) for i in range(10)] doc_id = "int64_" + str(random.random()*100000) generators.append(DocumentGenerator(doc_id, document_template, [name], [int_num], [long_num], [long_num], [long_arr], [int_arr], start=0, end=1)) self.load(generators, buckets=self.buckets, flag=self.item_flag, verify_data=False, batch_size=self.batch_size) self.full_docs_list = self.generate_full_docs_list(generators) self.gen_results = TuqGenerators(self.log, self.full_docs_list) def _update_int64_dataset(self): testuser = [] rolelist = [] for bucket in self.buckets: testuser.append({'id': bucket.name, 'name': bucket.name, 'password': 'password'}) rolelist.append({'id': bucket.name, 'name': bucket.name, 'roles': 'admin'}) self.add_built_in_server_user(testuser=testuser, rolelist=rolelist) for doc in self.full_docs_list: doc["name"] = random.choice(FIRST_NAMES) self._update_document(doc["_id"], doc) def _update_document(self, key, document): url = 'couchbase://{ip}/default'.format(ip=self.master.ip) if self.upgrade_to.startswith("4"): bucket = Bucket(url) else: bucket = Bucket(url, username="default", password="password") bucket.upsert(key, document) def _query_for_long_num(self): wrong_results = [] for query_def in self.query_definitions: if query_def.index_name.endswith("_long_num"): query_def.query_template.extend("select long_num from default use index ({0}) where long_num = " + str(t) for t in INT64_VALUES) query_def.query_template.extend("select long_num from default use index ({0}) where long_num > " + str(t) for t in INT64_VALUES) query_def.query_template.extend("select long_num from default use index ({0}) where long_num > " + str(t-1) + " and long_num < " + str(t+1) for t in INT64_VALUES) query_def.query_template.extend("select long_num from default use index ({0}) where long_num between " + str(t-1) + " and " + str(t+1) for t in INT64_VALUES) for query in query_def.query_template: query = query.format(query_def.index_name) if " = " in query: if_cond = 'num["long_num"]' + query.split("where long_num")[1].replace("=", "==") elif ">" in query: if_cond = query.split("where ")[1].replace("long_num", "num['long_num']") elif "between" in query: if_cond = 'num["long_num"] > ' + query.split("where long_num between ")[1].split("and")[0] \ + ' and num["long_num"] < ' + query.split("where long_num between ")[1].split("and")[1] elif "null" in query: if_cond = query.split("where ")[1].replace("long_num", "num['long_num']").replace("null", "None") self.gen_results.query = query expected_result = [{"long_num": num["long_num"]} for num in self.full_docs_list if eval(if_cond)] msg, check = self.n1ql_helper.run_query_and_verify_result(query=query, server=self.n1ql_node, timeout=500, expected_result=expected_result, scan_consistency="request_plus", verify_results=self.int64_verify_results) if not check: wrong_results.append(query) self.assertEqual(len(wrong_results), 0, str(wrong_results)) def _create_indexes(self): for query_def in self.query_definitions: for bucket in self.buckets: self.create_index(bucket.name, query_def) def _query_index(self, phase): if phase not in self.query_results.keys(): self.query_results[phase] = {} query_results = {} for query_def in self.query_definitions: if query_def.index_name not in query_results.keys(): query_results[query_def.index_name] = [] for query in query_def.query_template: query = query.format(query_def.index_name) results = self.n1ql_helper.run_cbq_query(query=query, server=self.n1ql_node) query_results[query_def.index_name].append(results["results"]) self.query_results[phase] = query_results def _verify_post_upgrade_results(self): wrong_results = {} for query_def in self.query_definitions: index_name = query_def.index_name if "long_num" in index_name or "long_arr" in index_name: continue elif not index_name.endswith("_long_num_name"): for i in range(len(self.query_results["pre_upgrade"][index_name])): if sorted(self.query_results["pre_upgrade"][index_name][i]) != sorted(self.query_results["post_upgrade"][index_name][i]): if index_name not in wrong_results.keys(): wrong_results[index_name] = query_def.query_template else: wrong_results[index_name].extend(query_def.query_template) self.assertEqual(len(wrong_results), 0, str(wrong_results)) class SecondaryIndexIndexInt64(BaseSecondaryIndexingTests): def setUp(self): super(SecondaryIndexIndexInt64, self).setUp() self.query_results = {} self._create_int64_dataset() query_definition_generator = QueryDefs() self.query_definitions = query_definition_generator.generate_query_definition_for_aggr_data() self.query_definitions = query_definition_generator.filter_by_group(self.groups, self.query_definitions) def tearDown(self): super(SecondaryIndexIndexInt64, self).tearDown() def test_fresh_install_int64(self): for query_def in self.query_definitions: for bucket in self.buckets: self.create_index(bucket.name, query_def) for query_def in self.query_definitions: for query in query_def.query_template: index_query = query.format(query_def.index_name) query_results = self.n1ql_helper.run_cbq_query(query=index_query, server=self.n1ql_node) primary_query = query.format("#primary") primary_results = self.n1ql_helper.run_cbq_query(query=primary_query, server=self.n1ql_node) self._verify_aggregate_pushdown_results(query_def.index_name, index_query, query_results, primary_results) def _verify_aggregate_pushdown_results(self, index_name, query, query_results, primary_results): wrong_results = {} if "long_num" in index_name or "long_arr" in index_name or not index_name.endswith("_long_num_name"): return if sorted(query_results) != sorted(primary_results): if index_name not in wrong_results.keys(): wrong_results[index_name] = [query] else: wrong_results[index_name].append(query) self.assertEqual(len(wrong_results), 0, str(wrong_results)) class QueryDefs(SQLDefinitionGenerator): def generate_query_definition_for_aggr_data(self): definitions_list = [] index_name_prefix = "int_64_" + str(random.randint(100000, 999999)) # simple index on string definitions_list.append( QueryDefinition(index_name=index_name_prefix + "_name", index_fields=["name"], query_template=["SELECT name FROM default use index ({0}) where name = 'Ciara'", "SELECT name FROM default use index ({0}) where name > 'Ciara'", "SELECT name FROM default where name is not null"], groups=["all", "simple_index"])) definitions_list.append( QueryDefinition(index_name=index_name_prefix + "_long_num", index_fields=["long_num"], query_template=["SELECT long_num FROM default use index ({0}) where long_num = 2147483600", "SELECT long_num FROM default use index ({0}) where long_num > 2147483600", "SELECT long_num FROM default use index ({0}) where long_num > 2147483599 and long_num < 2147483601", "SELECT long_num FROM default where long_num is not null"], groups=["all", "simple_index"])) definitions_list.append( QueryDefinition(index_name=index_name_prefix + "_long_num_partial", index_fields=["long_num_partial"], query_template=["SELECT long_num_partial FROM default use index ({0}) where long_num = 2147483600", "SELECT long_num_partial FROM default use index ({0}) where long_num > 2147483600", "SELECT long_num_partial FROM default use index ({0}) where (long_num % 10 != 0)", "SELECT long_num_partial FROM default where long_num is not null"], groups=["all", "simple_index"], index_where_clause=" long_num > 20 ")) definitions_list.append( QueryDefinition(index_name=index_name_prefix + "_long_arr", index_fields=["ALL ARRAY t FOR t in `long_arr` END"], query_template=["SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t = 2147483600 END", "SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t > 2147483600 END", "SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t is not null END"], groups=["all", "simple_index"])) definitions_list.append( QueryDefinition(index_name=index_name_prefix + "_long_num_name", index_fields=["long_num", "name"], query_template=[ # Commented out because of MB-30207 #"SELECT sum(long_num) as long_num, name FROM default use index ({0}) where long_num > 2147483600 group by name", "SELECT min(long_num) as long_num, name FROM default use index ({0}) where long_num > 2147483600 group by name", "SELECT long_num, name FROM default use index ({0}) where long_num > 2147483600"], groups=["all", "simple_index"])) return definitions_list
[ "couchbase.bucket.Bucket", "random.randint", "random.choice", "random.random", "remote.remote_util.RemoteMachineShellConnection", "membase.api.rest_client.RestConnection", "couchbase_helper.tuq_generators.TuqGenerators", "couchbase_helper.query_definitions.QueryDefinition", "logging.getLogger", "couchbase_helper.documentgenerator.DocumentGenerator" ]
[((567, 594), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (584, 594), False, 'import logging\n'), ((18484, 18528), 'couchbase_helper.tuq_generators.TuqGenerators', 'TuqGenerators', (['self.log', 'self.full_docs_list'], {}), '(self.log, self.full_docs_list)\n', (18497, 18528), False, 'from couchbase_helper.tuq_generators import TuqGenerators\n'), ((2842, 2878), 'remote.remote_util.RemoteMachineShellConnection', 'RemoteMachineShellConnection', (['server'], {}), '(server)\n', (2870, 2878), False, 'from remote.remote_util import RemoteMachineShellConnection\n'), ((4523, 4543), 'membase.api.rest_client.RestConnection', 'RestConnection', (['node'], {}), '(node)\n', (4537, 4543), False, 'from membase.api.rest_client import RestConnection, RestHelper\n'), ((5224, 5251), 'membase.api.rest_client.RestConnection', 'RestConnection', (['self.master'], {}), '(self.master)\n', (5238, 5251), False, 'from membase.api.rest_client import RestConnection, RestHelper\n'), ((7227, 7247), 'membase.api.rest_client.RestConnection', 'RestConnection', (['node'], {}), '(node)\n', (7241, 7247), False, 'from membase.api.rest_client import RestConnection, RestHelper\n'), ((14268, 14288), 'membase.api.rest_client.RestConnection', 'RestConnection', (['node'], {}), '(node)\n', (14282, 14288), False, 'from membase.api.rest_client import RestConnection, RestHelper\n'), ((17715, 17741), 'random.choice', 'random.choice', (['FIRST_NAMES'], {}), '(FIRST_NAMES)\n', (17728, 17741), False, 'import random\n'), ((17764, 17789), 'random.randint', 'random.randint', (['(-100)', '(100)'], {}), '(-100, 100)\n', (17778, 17789), False, 'import random\n'), ((17813, 17840), 'random.choice', 'random.choice', (['INT64_VALUES'], {}), '(INT64_VALUES)\n', (17826, 17840), False, 'import random\n'), ((18971, 18997), 'random.choice', 'random.choice', (['FIRST_NAMES'], {}), '(FIRST_NAMES)\n', (18984, 18997), False, 'import random\n'), ((19229, 19240), 'couchbase.bucket.Bucket', 'Bucket', (['url'], {}), '(url)\n', (19235, 19240), False, 'from couchbase.bucket import Bucket\n'), ((19276, 19328), 'couchbase.bucket.Bucket', 'Bucket', (['url'], {'username': '"""default"""', 'password': '"""password"""'}), "(url, username='default', password='password')\n", (19282, 19328), False, 'from couchbase.bucket import Bucket\n'), ((25809, 26139), 'couchbase_helper.query_definitions.QueryDefinition', 'QueryDefinition', ([], {'index_name': "(index_name_prefix + '_name')", 'index_fields': "['name']", 'query_template': '["SELECT name FROM default use index ({0}) where name = \'Ciara\'",\n "SELECT name FROM default use index ({0}) where name > \'Ciara\'",\n \'SELECT name FROM default where name is not null\']', 'groups': "['all', 'simple_index']"}), '(index_name=index_name_prefix + \'_name\', index_fields=[\n \'name\'], query_template=[\n "SELECT name FROM default use index ({0}) where name = \'Ciara\'",\n "SELECT name FROM default use index ({0}) where name > \'Ciara\'",\n \'SELECT name FROM default where name is not null\'], groups=[\'all\',\n \'simple_index\'])\n', (25824, 26139), False, 'from couchbase_helper.query_definitions import QueryDefinition, SQLDefinitionGenerator\n'), ((26336, 26812), 'couchbase_helper.query_definitions.QueryDefinition', 'QueryDefinition', ([], {'index_name': "(index_name_prefix + '_long_num')", 'index_fields': "['long_num']", 'query_template': "['SELECT long_num FROM default use index ({0}) where long_num = 2147483600',\n 'SELECT long_num FROM default use index ({0}) where long_num > 2147483600',\n 'SELECT long_num FROM default use index ({0}) where long_num > 2147483599 and long_num < 2147483601'\n , 'SELECT long_num FROM default where long_num is not null']", 'groups': "['all', 'simple_index']"}), "(index_name=index_name_prefix + '_long_num', index_fields=[\n 'long_num'], query_template=[\n 'SELECT long_num FROM default use index ({0}) where long_num = 2147483600',\n 'SELECT long_num FROM default use index ({0}) where long_num > 2147483600',\n 'SELECT long_num FROM default use index ({0}) where long_num > 2147483599 and long_num < 2147483601'\n , 'SELECT long_num FROM default where long_num is not null'], groups=[\n 'all', 'simple_index'])\n", (26351, 26812), False, 'from couchbase_helper.query_definitions import QueryDefinition, SQLDefinitionGenerator\n'), ((27047, 27590), 'couchbase_helper.query_definitions.QueryDefinition', 'QueryDefinition', ([], {'index_name': "(index_name_prefix + '_long_num_partial')", 'index_fields': "['long_num_partial']", 'query_template': "['SELECT long_num_partial FROM default use index ({0}) where long_num = 2147483600'\n ,\n 'SELECT long_num_partial FROM default use index ({0}) where long_num > 2147483600'\n ,\n 'SELECT long_num_partial FROM default use index ({0}) where (long_num % 10 != 0)'\n , 'SELECT long_num_partial FROM default where long_num is not null']", 'groups': "['all', 'simple_index']", 'index_where_clause': '""" long_num > 20 """'}), "(index_name=index_name_prefix + '_long_num_partial',\n index_fields=['long_num_partial'], query_template=[\n 'SELECT long_num_partial FROM default use index ({0}) where long_num = 2147483600'\n ,\n 'SELECT long_num_partial FROM default use index ({0}) where long_num > 2147483600'\n ,\n 'SELECT long_num_partial FROM default use index ({0}) where (long_num % 10 != 0)'\n , 'SELECT long_num_partial FROM default where long_num is not null'],\n groups=['all', 'simple_index'], index_where_clause=' long_num > 20 ')\n", (27062, 27590), False, 'from couchbase_helper.query_definitions import QueryDefinition, SQLDefinitionGenerator\n'), ((27845, 28327), 'couchbase_helper.query_definitions.QueryDefinition', 'QueryDefinition', ([], {'index_name': "(index_name_prefix + '_long_arr')", 'index_fields': "['ALL ARRAY t FOR t in `long_arr` END']", 'query_template': "['SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t = 2147483600 END'\n ,\n 'SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t > 2147483600 END'\n ,\n 'SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t is not null END'\n ]", 'groups': "['all', 'simple_index']"}), "(index_name=index_name_prefix + '_long_arr', index_fields=[\n 'ALL ARRAY t FOR t in `long_arr` END'], query_template=[\n 'SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t = 2147483600 END'\n ,\n 'SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t > 2147483600 END'\n ,\n 'SELECT t FROM default use index ({0}) where any t in `long_arr` satisfies t is not null END'\n ], groups=['all', 'simple_index'])\n", (27860, 28327), False, 'from couchbase_helper.query_definitions import QueryDefinition, SQLDefinitionGenerator\n'), ((28513, 28879), 'couchbase_helper.query_definitions.QueryDefinition', 'QueryDefinition', ([], {'index_name': "(index_name_prefix + '_long_num_name')", 'index_fields': "['long_num', 'name']", 'query_template': "['SELECT min(long_num) as long_num, name FROM default use index ({0}) where long_num > 2147483600 group by name'\n ,\n 'SELECT long_num, name FROM default use index ({0}) where long_num > 2147483600'\n ]", 'groups': "['all', 'simple_index']"}), "(index_name=index_name_prefix + '_long_num_name',\n index_fields=['long_num', 'name'], query_template=[\n 'SELECT min(long_num) as long_num, name FROM default use index ({0}) where long_num > 2147483600 group by name'\n ,\n 'SELECT long_num, name FROM default use index ({0}) where long_num > 2147483600'\n ], groups=['all', 'simple_index'])\n", (28528, 28879), False, 'from couchbase_helper.query_definitions import QueryDefinition, SQLDefinitionGenerator\n'), ((12161, 12197), 'remote.remote_util.RemoteMachineShellConnection', 'RemoteMachineShellConnection', (['server'], {}), '(server)\n', (12189, 12197), False, 'from remote.remote_util import RemoteMachineShellConnection\n'), ((17865, 17892), 'random.choice', 'random.choice', (['INT64_VALUES'], {}), '(INT64_VALUES)\n', (17878, 17892), False, 'import random\n'), ((17936, 17961), 'random.randint', 'random.randint', (['(-100)', '(100)'], {}), '(-100, 100)\n', (17950, 17961), False, 'import random\n'), ((18072, 18202), 'couchbase_helper.documentgenerator.DocumentGenerator', 'DocumentGenerator', (['doc_id', 'document_template', '[name]', '[int_num]', '[long_num]', '[long_num]', '[long_arr]', '[int_arr]'], {'start': '(0)', 'end': '(1)'}), '(doc_id, document_template, [name], [int_num], [long_num],\n [long_num], [long_arr], [int_arr], start=0, end=1)\n', (18089, 18202), False, 'from couchbase_helper.documentgenerator import DocumentGenerator\n'), ((25699, 25729), 'random.randint', 'random.randint', (['(100000)', '(999999)'], {}), '(100000, 999999)\n', (25713, 25729), False, 'import random\n'), ((8320, 8340), 'membase.api.rest_client.RestConnection', 'RestConnection', (['node'], {}), '(node)\n', (8334, 8340), False, 'from membase.api.rest_client import RestConnection, RestHelper\n'), ((15358, 15378), 'membase.api.rest_client.RestConnection', 'RestConnection', (['node'], {}), '(node)\n', (15372, 15378), False, 'from membase.api.rest_client import RestConnection, RestHelper\n'), ((18018, 18033), 'random.random', 'random.random', ([], {}), '()\n', (18031, 18033), False, 'import random\n')]
import numpy as np # time occ1 occ2 mctdh_data = np.array( [[5.0000000e-01, 1.2083970e-02, 9.8791603e-01], [1.0000000e+00, 4.3008830e-02, 9.5699117e-01], [1.5000000e+00, 7.9675930e-02, 9.2032407e-01], [2.0000000e+00, 1.0804013e-01, 8.9195987e-01], [2.5000000e+00, 1.1972252e-01, 8.8027748e-01], [3.0000000e+00, 1.1480491e-01, 8.8519509e-01], [3.5000000e+00, 9.9963810e-02, 9.0003619e-01], [4.0000000e+00, 8.3915230e-02, 9.1608477e-01], [4.5000000e+00, 7.3173540e-02, 9.2682646e-01], [5.0000000e+00, 7.0139510e-02, 9.2986049e-01], [5.5000000e+00, 7.3678880e-02, 9.2632112e-01], [6.0000000e+00, 8.1009600e-02, 9.1899040e-01], [6.5000000e+00, 8.9498230e-02, 9.1050177e-01], [7.0000000e+00, 9.7581630e-02, 9.0241837e-01], [7.5000000e+00, 1.0480220e-01, 8.9519780e-01], [8.0000000e+00, 1.1138269e-01, 8.8861731e-01], [8.5000000e+00, 1.1778152e-01, 8.8221848e-01], [9.0000000e+00, 1.2444794e-01, 8.7555206e-01], [9.5000000e+00, 1.3177114e-01, 8.6822886e-01], [1.0000000e+01, 1.4010714e-01, 8.5989286e-01], [1.0500000e+01, 1.4975263e-01, 8.5024737e-01], [1.1000000e+01, 1.6089050e-01, 8.3910950e-01], [1.1500000e+01, 1.7348692e-01, 8.2651308e-01], [1.2000000e+01, 1.8720687e-01, 8.1279313e-01], [1.2500000e+01, 2.0153796e-01, 7.9846204e-01], [1.3000000e+01, 2.1602325e-01, 7.8397675e-01], [1.3500000e+01, 2.3037238e-01, 7.6962762e-01], [1.4000000e+01, 2.4446829e-01, 7.5553171e-01], [1.4500000e+01, 2.5835149e-01, 7.4164851e-01], [1.5000000e+01, 2.7219119e-01, 7.2780881e-01], [1.5500000e+01, 2.8613236e-01, 7.1386764e-01], [1.6000000e+01, 3.0009972e-01, 6.9990028e-01], [1.6500000e+01, 3.1382441e-01, 6.8617559e-01], [1.7000000e+01, 3.2698213e-01, 6.7301787e-01], [1.7500000e+01, 3.3937635e-01, 6.6062365e-01], [1.8000000e+01, 3.5102877e-01, 6.4897123e-01], [1.8500000e+01, 3.6206939e-01, 6.3793061e-01], [1.9000000e+01, 3.7267412e-01, 6.2732588e-01], [1.9500000e+01, 3.8298746e-01, 6.1701254e-01], [2.0000000e+01, 3.9308095e-01, 6.0691905e-01], [2.0500000e+01, 4.0301861e-01, 5.9698139e-01], [2.1000000e+01, 4.1282250e-01, 5.8717750e-01], [2.1500000e+01, 4.2245660e-01, 5.7754340e-01], [2.2000000e+01, 4.3190030e-01, 5.6809970e-01], [2.2500000e+01, 4.4112886e-01, 5.5887114e-01], [2.3000000e+01, 4.5010529e-01, 5.4989471e-01], [2.3500000e+01, 4.5883535e-01, 5.4116465e-01], [2.4000000e+01, 4.6722627e-01, 5.3277373e-01], [2.4500000e+01, 4.7498362e-01, 5.2501638e-01], [2.5000000e+01, 4.8179766e-01, 5.1820234e-01], [2.5500000e+01, 4.8751389e-01, 5.1248611e-01], [2.6000000e+01, 4.9209686e-01, 5.0790314e-01], [2.6500000e+01, 4.9574730e-01, 5.0425270e-01], [2.7000000e+01, 4.9904364e-01, 5.0095636e-01], [2.7500000e+01, 5.0257806e-01, 4.9742194e-01], [2.8000000e+01, 5.0676072e-01, 4.9323928e-01], [2.8500000e+01, 5.1170749e-01, 4.8829251e-01], [2.9000000e+01, 5.1722842e-01, 4.8277158e-01], [2.9500000e+01, 5.2295475e-01, 4.7704525e-01], [3.0000000e+01, 5.2852166e-01, 4.7147834e-01], [3.0500000e+01, 5.3372943e-01, 4.6627057e-01], [3.1000000e+01, 5.3829162e-01, 4.6170838e-01], [3.1500000e+01, 5.4233712e-01, 4.5766288e-01], [3.2000000e+01, 5.4618162e-01, 4.5381838e-01], [3.2500000e+01, 5.5001448e-01, 4.4998552e-01], [3.3000000e+01, 5.5421102e-01, 4.4578898e-01], [3.3500000e+01, 5.5877238e-01, 4.4122762e-01], [3.4000000e+01, 5.6366982e-01, 4.3633018e-01], [3.4500000e+01, 5.6899077e-01, 4.3100923e-01], [3.5000000e+01, 5.7434030e-01, 4.2565970e-01], [3.5500000e+01, 5.7964648e-01, 4.2035352e-01], [3.6000000e+01, 5.8496463e-01, 4.1503537e-01], [3.6500000e+01, 5.9025535e-01, 4.0974465e-01], [3.7000000e+01, 5.9588610e-01, 4.0411390e-01], [3.7500000e+01, 6.0174132e-01, 3.9825868e-01], [3.8000000e+01, 6.0794735e-01, 3.9205265e-01], [3.8500000e+01, 6.1445039e-01, 3.8554961e-01], [3.9000000e+01, 6.2091842e-01, 3.7908158e-01], [3.9500000e+01, 6.2772288e-01, 3.7227712e-01], [4.0000000e+01, 6.3477112e-01, 3.6522888e-01], [4.0500000e+01, 6.4184368e-01, 3.5815632e-01], [4.1000000e+01, 6.4928782e-01, 3.5071218e-01], [4.1500000e+01, 6.5735624e-01, 3.4264376e-01], [4.2000000e+01, 6.6578903e-01, 3.3421097e-01], [4.2500000e+01, 6.7432732e-01, 3.2567268e-01], [4.3000000e+01, 6.8295713e-01, 3.1704287e-01], [4.3500000e+01, 6.9150497e-01, 3.0849503e-01], [4.4000000e+01, 6.9983774e-01, 3.0016226e-01], [4.4500000e+01, 7.0817274e-01, 2.9182726e-01], [4.5000000e+01, 7.1672047e-01, 2.8327953e-01], [4.5500000e+01, 7.2537341e-01, 2.7462659e-01], [4.6000000e+01, 7.3393667e-01, 2.6606333e-01], [4.6500000e+01, 7.4239650e-01, 2.5760350e-01], [4.7000000e+01, 7.5084866e-01, 2.4915134e-01], [4.7500000e+01, 7.5929325e-01, 2.4070675e-01], [4.8000000e+01, 7.6748428e-01, 2.3251572e-01], [4.8500000e+01, 7.7499907e-01, 2.2500093e-01], [4.9000000e+01, 7.8147300e-01, 2.1852700e-01], [4.9500000e+01, 7.8661012e-01, 2.1338988e-01], [5.0000000e+01, 7.9006035e-01, 2.0993965e-01], [5.0500000e+01, 7.9159180e-01, 2.0840820e-01], [5.1000000e+01, 7.9141817e-01, 2.0858183e-01], [5.1500000e+01, 7.9027855e-01, 2.0972145e-01], [5.2000000e+01, 7.8918090e-01, 2.1081910e-01], [5.2500000e+01, 7.8902032e-01, 2.1097968e-01], [5.3000000e+01, 7.9041607e-01, 2.0958393e-01], [5.3500000e+01, 7.9373225e-01, 2.0626775e-01], [5.4000000e+01, 7.9883965e-01, 2.0116035e-01], [5.4500000e+01, 8.0470534e-01, 1.9529466e-01], [5.5000000e+01, 8.0938464e-01, 1.9061536e-01], [5.5500000e+01, 8.1079180e-01, 1.8920820e-01], [5.6000000e+01, 8.0799382e-01, 1.9200618e-01], [5.6500000e+01, 8.0187198e-01, 1.9812802e-01], [5.7000000e+01, 7.9445757e-01, 2.0554243e-01], [5.7500000e+01, 7.8781232e-01, 2.1218768e-01], [5.8000000e+01, 7.8320459e-01, 2.1679541e-01], [5.8500000e+01, 7.8067931e-01, 2.1932069e-01], [5.9000000e+01, 7.7953602e-01, 2.2046398e-01], [5.9500000e+01, 7.7929186e-01, 2.2070814e-01], [6.0000000e+01, 7.7972442e-01, 2.2027558e-01], [6.0500000e+01, 7.8047304e-01, 2.1952696e-01], [6.1000000e+01, 7.8136710e-01, 2.1863290e-01], [6.1500000e+01, 7.8251678e-01, 2.1748322e-01], [6.2000000e+01, 7.8353249e-01, 2.1646751e-01], [6.2500000e+01, 7.8336467e-01, 2.1663533e-01], [6.3000000e+01, 7.8158438e-01, 2.1841562e-01], [6.3500000e+01, 7.7916625e-01, 2.2083375e-01], [6.4000000e+01, 7.7744298e-01, 2.2255702e-01], [6.4500000e+01, 7.7732909e-01, 2.2267091e-01], [6.5000000e+01, 7.7936981e-01, 2.2063019e-01], [6.5500000e+01, 7.8319636e-01, 2.1680364e-01], [6.6000000e+01, 7.8739037e-01, 2.1260963e-01], [6.6500000e+01, 7.9077882e-01, 2.0922118e-01], [6.7000000e+01, 7.9332419e-01, 2.0667581e-01], [6.7500000e+01, 7.9557840e-01, 2.0442160e-01], [6.8000000e+01, 7.9825531e-01, 2.0174469e-01], [6.8500000e+01, 8.0211292e-01, 1.9788708e-01], [6.9000000e+01, 8.0710085e-01, 1.9289915e-01], [6.9500000e+01, 8.1213965e-01, 1.8786035e-01], [7.0000000e+01, 8.1624836e-01, 1.8375164e-01], [7.0500000e+01, 8.1922441e-01, 1.8077559e-01], [7.1000000e+01, 8.2136937e-01, 1.7863063e-01], [7.1500000e+01, 8.2308043e-01, 1.7691957e-01], [7.2000000e+01, 8.2440015e-01, 1.7559985e-01], [7.2500000e+01, 8.2492880e-01, 1.7507120e-01], [7.3000000e+01, 8.2421052e-01, 1.7578948e-01], [7.3500000e+01, 8.2197299e-01, 1.7802701e-01], [7.4000000e+01, 8.1841193e-01, 1.8158807e-01], [7.4500000e+01, 8.1409910e-01, 1.8590090e-01], [7.5000000e+01, 8.0929874e-01, 1.9070126e-01], [7.5500000e+01, 8.0380650e-01, 1.9619350e-01], [7.6000000e+01, 7.9742293e-01, 2.0257707e-01], [7.6500000e+01, 7.9001137e-01, 2.0998863e-01], [7.7000000e+01, 7.8124159e-01, 2.1875841e-01], [7.7500000e+01, 7.7098038e-01, 2.2901962e-01], [7.8000000e+01, 7.5977139e-01, 2.4022861e-01], [7.8500000e+01, 7.4845065e-01, 2.5154935e-01], [7.9000000e+01, 7.3754115e-01, 2.6245885e-01], [7.9500000e+01, 7.2720729e-01, 2.7279271e-01], [8.0000000e+01, 7.1757244e-01, 2.8242756e-01], [8.0500000e+01, 7.0883123e-01, 2.9116877e-01], [8.1000000e+01, 7.0085659e-01, 2.9914341e-01], [8.1500000e+01, 6.9303527e-01, 3.0696473e-01], [8.2000000e+01, 6.8486829e-01, 3.1513171e-01], [8.2500000e+01, 6.7646527e-01, 3.2353473e-01], [8.3000000e+01, 6.6835282e-01, 3.3164718e-01], [8.3500000e+01, 6.6088327e-01, 3.3911673e-01], [8.4000000e+01, 6.5393189e-01, 3.4606811e-01], [8.4500000e+01, 6.4749414e-01, 3.5250586e-01], [8.5000000e+01, 6.4213779e-01, 3.5786221e-01], [8.5500000e+01, 6.3822323e-01, 3.6177677e-01], [8.6000000e+01, 6.3508024e-01, 3.6491976e-01], [8.6500000e+01, 6.3187553e-01, 3.6812447e-01], [8.7000000e+01, 6.2866410e-01, 3.7133590e-01], [8.7500000e+01, 6.2567578e-01, 3.7432422e-01], [8.8000000e+01, 6.2327737e-01, 3.7672263e-01], [8.8500000e+01, 6.2202450e-01, 3.7797550e-01], [8.9000000e+01, 6.2142673e-01, 3.7857327e-01], [8.9500000e+01, 6.2085314e-01, 3.7914686e-01], [9.0000000e+01, 6.2039036e-01, 3.7960964e-01], [9.0500000e+01, 6.1980805e-01, 3.8019195e-01], [9.1000000e+01, 6.1892296e-01, 3.8107704e-01], [9.1500000e+01, 6.1828657e-01, 3.8171343e-01], [9.2000000e+01, 6.1839927e-01, 3.8160073e-01], [9.2500000e+01, 6.1910003e-01, 3.8089997e-01], [9.3000000e+01, 6.2054513e-01, 3.7945487e-01], [9.3500000e+01, 6.2310521e-01, 3.7689479e-01], [9.4000000e+01, 6.2607344e-01, 3.7392656e-01], [9.4500000e+01, 6.2886337e-01, 3.7113663e-01], [9.5000000e+01, 6.3217429e-01, 3.6782571e-01], [9.5500000e+01, 6.3663281e-01, 3.6336719e-01], [9.6000000e+01, 6.4211005e-01, 3.5788995e-01], [9.6500000e+01, 6.4845972e-01, 3.5154028e-01], [9.7000000e+01, 6.5566634e-01, 3.4433366e-01], [9.7500000e+01, 6.6344440e-01, 3.3655560e-01], [9.8000000e+01, 6.7139138e-01, 3.2860862e-01], [9.8500000e+01, 6.7944561e-01, 3.2055439e-01], [9.9000000e+01, 6.8767516e-01, 3.1232484e-01], [9.9500000e+01, 6.9606729e-01, 3.0393271e-01], [1.0000000e+02, 7.0464076e-01, 2.9535924e-01], [1.0050000e+02, 7.1352864e-01, 2.8647136e-01], [1.0100000e+02, 7.2275077e-01, 2.7724923e-01], [1.0150000e+02, 7.3202915e-01, 2.6797085e-01], [1.0200000e+02, 7.4071962e-01, 2.5928038e-01], [1.0250000e+02, 7.4834850e-01, 2.5165150e-01], [1.0300000e+02, 7.5504078e-01, 2.4495922e-01], [1.0350000e+02, 7.6135113e-01, 2.3864887e-01], [1.0400000e+02, 7.6773182e-01, 2.3226818e-01], [1.0450000e+02, 7.7409993e-01, 2.2590007e-01], [1.0500000e+02, 7.8042367e-01, 2.1957633e-01], [1.0550000e+02, 7.8673749e-01, 2.1326251e-01], [1.0600000e+02, 7.9254266e-01, 2.0745734e-01], [1.0650000e+02, 7.9707885e-01, 2.0292115e-01], [1.0700000e+02, 8.0012517e-01, 1.9987483e-01], [1.0750000e+02, 8.0232982e-01, 1.9767018e-01], [1.0800000e+02, 8.0447387e-01, 1.9552613e-01], [1.0850000e+02, 8.0691381e-01, 1.9308619e-01], [1.0900000e+02, 8.0991568e-01, 1.9008432e-01], [1.0950000e+02, 8.1386778e-01, 1.8613222e-01], [1.1000000e+02, 8.1882018e-01, 1.8117982e-01], [1.1050000e+02, 8.2400922e-01, 1.7599078e-01], [1.1100000e+02, 8.2818942e-01, 1.7181058e-01], [1.1150000e+02, 8.3038865e-01, 1.6961135e-01], [1.1200000e+02, 8.3033898e-01, 1.6966102e-01], [1.1250000e+02, 8.2863678e-01, 1.7136322e-01], [1.1300000e+02, 8.2653080e-01, 1.7346920e-01], [1.1350000e+02, 8.2536814e-01, 1.7463186e-01], [1.1400000e+02, 8.2604248e-01, 1.7395752e-01], [1.1450000e+02, 8.2854491e-01, 1.7145509e-01], [1.1500000e+02, 8.3199930e-01, 1.6800070e-01], [1.1550000e+02, 8.3538285e-01, 1.6461715e-01], [1.1600000e+02, 8.3816342e-01, 1.6183658e-01], [1.1650000e+02, 8.4022613e-01, 1.5977387e-01], [1.1700000e+02, 8.4147193e-01, 1.5852807e-01], [1.1750000e+02, 8.4185527e-01, 1.5814473e-01], [1.1800000e+02, 8.4185298e-01, 1.5814702e-01], [1.1850000e+02, 8.4241659e-01, 1.5758341e-01], [1.1900000e+02, 8.4415570e-01, 1.5584430e-01], [1.1950000e+02, 8.4685108e-01, 1.5314892e-01], [1.2000000e+02, 8.4990233e-01, 1.5009767e-01]] )
[ "numpy.array" ]
[((76, 8320), 'numpy.array', 'np.array', (['[[0.5, 0.01208397, 0.98791603], [1.0, 0.04300883, 0.95699117], [1.5, \n 0.07967593, 0.92032407], [2.0, 0.10804013, 0.89195987], [2.5, \n 0.11972252, 0.88027748], [3.0, 0.11480491, 0.88519509], [3.5, \n 0.09996381, 0.90003619], [4.0, 0.08391523, 0.91608477], [4.5, \n 0.07317354, 0.92682646], [5.0, 0.07013951, 0.92986049], [5.5, \n 0.07367888, 0.92632112], [6.0, 0.0810096, 0.9189904], [6.5, 0.08949823,\n 0.91050177], [7.0, 0.09758163, 0.90241837], [7.5, 0.1048022, 0.8951978],\n [8.0, 0.11138269, 0.88861731], [8.5, 0.11778152, 0.88221848], [9.0, \n 0.12444794, 0.87555206], [9.5, 0.13177114, 0.86822886], [10.0, \n 0.14010714, 0.85989286], [10.5, 0.14975263, 0.85024737], [11.0, \n 0.1608905, 0.8391095], [11.5, 0.17348692, 0.82651308], [12.0, \n 0.18720687, 0.81279313], [12.5, 0.20153796, 0.79846204], [13.0, \n 0.21602325, 0.78397675], [13.5, 0.23037238, 0.76962762], [14.0, \n 0.24446829, 0.75553171], [14.5, 0.25835149, 0.74164851], [15.0, \n 0.27219119, 0.72780881], [15.5, 0.28613236, 0.71386764], [16.0, \n 0.30009972, 0.69990028], [16.5, 0.31382441, 0.68617559], [17.0, \n 0.32698213, 0.67301787], [17.5, 0.33937635, 0.66062365], [18.0, \n 0.35102877, 0.64897123], [18.5, 0.36206939, 0.63793061], [19.0, \n 0.37267412, 0.62732588], [19.5, 0.38298746, 0.61701254], [20.0, \n 0.39308095, 0.60691905], [20.5, 0.40301861, 0.59698139], [21.0, \n 0.4128225, 0.5871775], [21.5, 0.4224566, 0.5775434], [22.0, 0.4319003, \n 0.5680997], [22.5, 0.44112886, 0.55887114], [23.0, 0.45010529, \n 0.54989471], [23.5, 0.45883535, 0.54116465], [24.0, 0.46722627, \n 0.53277373], [24.5, 0.47498362, 0.52501638], [25.0, 0.48179766, \n 0.51820234], [25.5, 0.48751389, 0.51248611], [26.0, 0.49209686, \n 0.50790314], [26.5, 0.4957473, 0.5042527], [27.0, 0.49904364, \n 0.50095636], [27.5, 0.50257806, 0.49742194], [28.0, 0.50676072, \n 0.49323928], [28.5, 0.51170749, 0.48829251], [29.0, 0.51722842, \n 0.48277158], [29.5, 0.52295475, 0.47704525], [30.0, 0.52852166, \n 0.47147834], [30.5, 0.53372943, 0.46627057], [31.0, 0.53829162, \n 0.46170838], [31.5, 0.54233712, 0.45766288], [32.0, 0.54618162, \n 0.45381838], [32.5, 0.55001448, 0.44998552], [33.0, 0.55421102, \n 0.44578898], [33.5, 0.55877238, 0.44122762], [34.0, 0.56366982, \n 0.43633018], [34.5, 0.56899077, 0.43100923], [35.0, 0.5743403, \n 0.4256597], [35.5, 0.57964648, 0.42035352], [36.0, 0.58496463, \n 0.41503537], [36.5, 0.59025535, 0.40974465], [37.0, 0.5958861, \n 0.4041139], [37.5, 0.60174132, 0.39825868], [38.0, 0.60794735, \n 0.39205265], [38.5, 0.61445039, 0.38554961], [39.0, 0.62091842, \n 0.37908158], [39.5, 0.62772288, 0.37227712], [40.0, 0.63477112, \n 0.36522888], [40.5, 0.64184368, 0.35815632], [41.0, 0.64928782, \n 0.35071218], [41.5, 0.65735624, 0.34264376], [42.0, 0.66578903, \n 0.33421097], [42.5, 0.67432732, 0.32567268], [43.0, 0.68295713, \n 0.31704287], [43.5, 0.69150497, 0.30849503], [44.0, 0.69983774, \n 0.30016226], [44.5, 0.70817274, 0.29182726], [45.0, 0.71672047, \n 0.28327953], [45.5, 0.72537341, 0.27462659], [46.0, 0.73393667, \n 0.26606333], [46.5, 0.7423965, 0.2576035], [47.0, 0.75084866, \n 0.24915134], [47.5, 0.75929325, 0.24070675], [48.0, 0.76748428, \n 0.23251572], [48.5, 0.77499907, 0.22500093], [49.0, 0.781473, 0.218527],\n [49.5, 0.78661012, 0.21338988], [50.0, 0.79006035, 0.20993965], [50.5, \n 0.7915918, 0.2084082], [51.0, 0.79141817, 0.20858183], [51.5, \n 0.79027855, 0.20972145], [52.0, 0.7891809, 0.2108191], [52.5, \n 0.78902032, 0.21097968], [53.0, 0.79041607, 0.20958393], [53.5, \n 0.79373225, 0.20626775], [54.0, 0.79883965, 0.20116035], [54.5, \n 0.80470534, 0.19529466], [55.0, 0.80938464, 0.19061536], [55.5, \n 0.8107918, 0.1892082], [56.0, 0.80799382, 0.19200618], [56.5, \n 0.80187198, 0.19812802], [57.0, 0.79445757, 0.20554243], [57.5, \n 0.78781232, 0.21218768], [58.0, 0.78320459, 0.21679541], [58.5, \n 0.78067931, 0.21932069], [59.0, 0.77953602, 0.22046398], [59.5, \n 0.77929186, 0.22070814], [60.0, 0.77972442, 0.22027558], [60.5, \n 0.78047304, 0.21952696], [61.0, 0.7813671, 0.2186329], [61.5, \n 0.78251678, 0.21748322], [62.0, 0.78353249, 0.21646751], [62.5, \n 0.78336467, 0.21663533], [63.0, 0.78158438, 0.21841562], [63.5, \n 0.77916625, 0.22083375], [64.0, 0.77744298, 0.22255702], [64.5, \n 0.77732909, 0.22267091], [65.0, 0.77936981, 0.22063019], [65.5, \n 0.78319636, 0.21680364], [66.0, 0.78739037, 0.21260963], [66.5, \n 0.79077882, 0.20922118], [67.0, 0.79332419, 0.20667581], [67.5, \n 0.7955784, 0.2044216], [68.0, 0.79825531, 0.20174469], [68.5, \n 0.80211292, 0.19788708], [69.0, 0.80710085, 0.19289915], [69.5, \n 0.81213965, 0.18786035], [70.0, 0.81624836, 0.18375164], [70.5, \n 0.81922441, 0.18077559], [71.0, 0.82136937, 0.17863063], [71.5, \n 0.82308043, 0.17691957], [72.0, 0.82440015, 0.17559985], [72.5, \n 0.8249288, 0.1750712], [73.0, 0.82421052, 0.17578948], [73.5, \n 0.82197299, 0.17802701], [74.0, 0.81841193, 0.18158807], [74.5, \n 0.8140991, 0.1859009], [75.0, 0.80929874, 0.19070126], [75.5, 0.8038065,\n 0.1961935], [76.0, 0.79742293, 0.20257707], [76.5, 0.79001137, \n 0.20998863], [77.0, 0.78124159, 0.21875841], [77.5, 0.77098038, \n 0.22901962], [78.0, 0.75977139, 0.24022861], [78.5, 0.74845065, \n 0.25154935], [79.0, 0.73754115, 0.26245885], [79.5, 0.72720729, \n 0.27279271], [80.0, 0.71757244, 0.28242756], [80.5, 0.70883123, \n 0.29116877], [81.0, 0.70085659, 0.29914341], [81.5, 0.69303527, \n 0.30696473], [82.0, 0.68486829, 0.31513171], [82.5, 0.67646527, \n 0.32353473], [83.0, 0.66835282, 0.33164718], [83.5, 0.66088327, \n 0.33911673], [84.0, 0.65393189, 0.34606811], [84.5, 0.64749414, \n 0.35250586], [85.0, 0.64213779, 0.35786221], [85.5, 0.63822323, \n 0.36177677], [86.0, 0.63508024, 0.36491976], [86.5, 0.63187553, \n 0.36812447], [87.0, 0.6286641, 0.3713359], [87.5, 0.62567578, \n 0.37432422], [88.0, 0.62327737, 0.37672263], [88.5, 0.6220245, \n 0.3779755], [89.0, 0.62142673, 0.37857327], [89.5, 0.62085314, \n 0.37914686], [90.0, 0.62039036, 0.37960964], [90.5, 0.61980805, \n 0.38019195], [91.0, 0.61892296, 0.38107704], [91.5, 0.61828657, \n 0.38171343], [92.0, 0.61839927, 0.38160073], [92.5, 0.61910003, \n 0.38089997], [93.0, 0.62054513, 0.37945487], [93.5, 0.62310521, \n 0.37689479], [94.0, 0.62607344, 0.37392656], [94.5, 0.62886337, \n 0.37113663], [95.0, 0.63217429, 0.36782571], [95.5, 0.63663281, \n 0.36336719], [96.0, 0.64211005, 0.35788995], [96.5, 0.64845972, \n 0.35154028], [97.0, 0.65566634, 0.34433366], [97.5, 0.6634444, \n 0.3365556], [98.0, 0.67139138, 0.32860862], [98.5, 0.67944561, \n 0.32055439], [99.0, 0.68767516, 0.31232484], [99.5, 0.69606729, \n 0.30393271], [100.0, 0.70464076, 0.29535924], [100.5, 0.71352864, \n 0.28647136], [101.0, 0.72275077, 0.27724923], [101.5, 0.73202915, \n 0.26797085], [102.0, 0.74071962, 0.25928038], [102.5, 0.7483485, \n 0.2516515], [103.0, 0.75504078, 0.24495922], [103.5, 0.76135113, \n 0.23864887], [104.0, 0.76773182, 0.23226818], [104.5, 0.77409993, \n 0.22590007], [105.0, 0.78042367, 0.21957633], [105.5, 0.78673749, \n 0.21326251], [106.0, 0.79254266, 0.20745734], [106.5, 0.79707885, \n 0.20292115], [107.0, 0.80012517, 0.19987483], [107.5, 0.80232982, \n 0.19767018], [108.0, 0.80447387, 0.19552613], [108.5, 0.80691381, \n 0.19308619], [109.0, 0.80991568, 0.19008432], [109.5, 0.81386778, \n 0.18613222], [110.0, 0.81882018, 0.18117982], [110.5, 0.82400922, \n 0.17599078], [111.0, 0.82818942, 0.17181058], [111.5, 0.83038865, \n 0.16961135], [112.0, 0.83033898, 0.16966102], [112.5, 0.82863678, \n 0.17136322], [113.0, 0.8265308, 0.1734692], [113.5, 0.82536814, \n 0.17463186], [114.0, 0.82604248, 0.17395752], [114.5, 0.82854491, \n 0.17145509], [115.0, 0.8319993, 0.1680007], [115.5, 0.83538285, \n 0.16461715], [116.0, 0.83816342, 0.16183658], [116.5, 0.84022613, \n 0.15977387], [117.0, 0.84147193, 0.15852807], [117.5, 0.84185527, \n 0.15814473], [118.0, 0.84185298, 0.15814702], [118.5, 0.84241659, \n 0.15758341], [119.0, 0.8441557, 0.1558443], [119.5, 0.84685108, \n 0.15314892], [120.0, 0.84990233, 0.15009767]]'], {}), '([[0.5, 0.01208397, 0.98791603], [1.0, 0.04300883, 0.95699117], [\n 1.5, 0.07967593, 0.92032407], [2.0, 0.10804013, 0.89195987], [2.5, \n 0.11972252, 0.88027748], [3.0, 0.11480491, 0.88519509], [3.5, \n 0.09996381, 0.90003619], [4.0, 0.08391523, 0.91608477], [4.5, \n 0.07317354, 0.92682646], [5.0, 0.07013951, 0.92986049], [5.5, \n 0.07367888, 0.92632112], [6.0, 0.0810096, 0.9189904], [6.5, 0.08949823,\n 0.91050177], [7.0, 0.09758163, 0.90241837], [7.5, 0.1048022, 0.8951978],\n [8.0, 0.11138269, 0.88861731], [8.5, 0.11778152, 0.88221848], [9.0, \n 0.12444794, 0.87555206], [9.5, 0.13177114, 0.86822886], [10.0, \n 0.14010714, 0.85989286], [10.5, 0.14975263, 0.85024737], [11.0, \n 0.1608905, 0.8391095], [11.5, 0.17348692, 0.82651308], [12.0, \n 0.18720687, 0.81279313], [12.5, 0.20153796, 0.79846204], [13.0, \n 0.21602325, 0.78397675], [13.5, 0.23037238, 0.76962762], [14.0, \n 0.24446829, 0.75553171], [14.5, 0.25835149, 0.74164851], [15.0, \n 0.27219119, 0.72780881], [15.5, 0.28613236, 0.71386764], [16.0, \n 0.30009972, 0.69990028], [16.5, 0.31382441, 0.68617559], [17.0, \n 0.32698213, 0.67301787], [17.5, 0.33937635, 0.66062365], [18.0, \n 0.35102877, 0.64897123], [18.5, 0.36206939, 0.63793061], [19.0, \n 0.37267412, 0.62732588], [19.5, 0.38298746, 0.61701254], [20.0, \n 0.39308095, 0.60691905], [20.5, 0.40301861, 0.59698139], [21.0, \n 0.4128225, 0.5871775], [21.5, 0.4224566, 0.5775434], [22.0, 0.4319003, \n 0.5680997], [22.5, 0.44112886, 0.55887114], [23.0, 0.45010529, \n 0.54989471], [23.5, 0.45883535, 0.54116465], [24.0, 0.46722627, \n 0.53277373], [24.5, 0.47498362, 0.52501638], [25.0, 0.48179766, \n 0.51820234], [25.5, 0.48751389, 0.51248611], [26.0, 0.49209686, \n 0.50790314], [26.5, 0.4957473, 0.5042527], [27.0, 0.49904364, \n 0.50095636], [27.5, 0.50257806, 0.49742194], [28.0, 0.50676072, \n 0.49323928], [28.5, 0.51170749, 0.48829251], [29.0, 0.51722842, \n 0.48277158], [29.5, 0.52295475, 0.47704525], [30.0, 0.52852166, \n 0.47147834], [30.5, 0.53372943, 0.46627057], [31.0, 0.53829162, \n 0.46170838], [31.5, 0.54233712, 0.45766288], [32.0, 0.54618162, \n 0.45381838], [32.5, 0.55001448, 0.44998552], [33.0, 0.55421102, \n 0.44578898], [33.5, 0.55877238, 0.44122762], [34.0, 0.56366982, \n 0.43633018], [34.5, 0.56899077, 0.43100923], [35.0, 0.5743403, \n 0.4256597], [35.5, 0.57964648, 0.42035352], [36.0, 0.58496463, \n 0.41503537], [36.5, 0.59025535, 0.40974465], [37.0, 0.5958861, \n 0.4041139], [37.5, 0.60174132, 0.39825868], [38.0, 0.60794735, \n 0.39205265], [38.5, 0.61445039, 0.38554961], [39.0, 0.62091842, \n 0.37908158], [39.5, 0.62772288, 0.37227712], [40.0, 0.63477112, \n 0.36522888], [40.5, 0.64184368, 0.35815632], [41.0, 0.64928782, \n 0.35071218], [41.5, 0.65735624, 0.34264376], [42.0, 0.66578903, \n 0.33421097], [42.5, 0.67432732, 0.32567268], [43.0, 0.68295713, \n 0.31704287], [43.5, 0.69150497, 0.30849503], [44.0, 0.69983774, \n 0.30016226], [44.5, 0.70817274, 0.29182726], [45.0, 0.71672047, \n 0.28327953], [45.5, 0.72537341, 0.27462659], [46.0, 0.73393667, \n 0.26606333], [46.5, 0.7423965, 0.2576035], [47.0, 0.75084866, \n 0.24915134], [47.5, 0.75929325, 0.24070675], [48.0, 0.76748428, \n 0.23251572], [48.5, 0.77499907, 0.22500093], [49.0, 0.781473, 0.218527],\n [49.5, 0.78661012, 0.21338988], [50.0, 0.79006035, 0.20993965], [50.5, \n 0.7915918, 0.2084082], [51.0, 0.79141817, 0.20858183], [51.5, \n 0.79027855, 0.20972145], [52.0, 0.7891809, 0.2108191], [52.5, \n 0.78902032, 0.21097968], [53.0, 0.79041607, 0.20958393], [53.5, \n 0.79373225, 0.20626775], [54.0, 0.79883965, 0.20116035], [54.5, \n 0.80470534, 0.19529466], [55.0, 0.80938464, 0.19061536], [55.5, \n 0.8107918, 0.1892082], [56.0, 0.80799382, 0.19200618], [56.5, \n 0.80187198, 0.19812802], [57.0, 0.79445757, 0.20554243], [57.5, \n 0.78781232, 0.21218768], [58.0, 0.78320459, 0.21679541], [58.5, \n 0.78067931, 0.21932069], [59.0, 0.77953602, 0.22046398], [59.5, \n 0.77929186, 0.22070814], [60.0, 0.77972442, 0.22027558], [60.5, \n 0.78047304, 0.21952696], [61.0, 0.7813671, 0.2186329], [61.5, \n 0.78251678, 0.21748322], [62.0, 0.78353249, 0.21646751], [62.5, \n 0.78336467, 0.21663533], [63.0, 0.78158438, 0.21841562], [63.5, \n 0.77916625, 0.22083375], [64.0, 0.77744298, 0.22255702], [64.5, \n 0.77732909, 0.22267091], [65.0, 0.77936981, 0.22063019], [65.5, \n 0.78319636, 0.21680364], [66.0, 0.78739037, 0.21260963], [66.5, \n 0.79077882, 0.20922118], [67.0, 0.79332419, 0.20667581], [67.5, \n 0.7955784, 0.2044216], [68.0, 0.79825531, 0.20174469], [68.5, \n 0.80211292, 0.19788708], [69.0, 0.80710085, 0.19289915], [69.5, \n 0.81213965, 0.18786035], [70.0, 0.81624836, 0.18375164], [70.5, \n 0.81922441, 0.18077559], [71.0, 0.82136937, 0.17863063], [71.5, \n 0.82308043, 0.17691957], [72.0, 0.82440015, 0.17559985], [72.5, \n 0.8249288, 0.1750712], [73.0, 0.82421052, 0.17578948], [73.5, \n 0.82197299, 0.17802701], [74.0, 0.81841193, 0.18158807], [74.5, \n 0.8140991, 0.1859009], [75.0, 0.80929874, 0.19070126], [75.5, 0.8038065,\n 0.1961935], [76.0, 0.79742293, 0.20257707], [76.5, 0.79001137, \n 0.20998863], [77.0, 0.78124159, 0.21875841], [77.5, 0.77098038, \n 0.22901962], [78.0, 0.75977139, 0.24022861], [78.5, 0.74845065, \n 0.25154935], [79.0, 0.73754115, 0.26245885], [79.5, 0.72720729, \n 0.27279271], [80.0, 0.71757244, 0.28242756], [80.5, 0.70883123, \n 0.29116877], [81.0, 0.70085659, 0.29914341], [81.5, 0.69303527, \n 0.30696473], [82.0, 0.68486829, 0.31513171], [82.5, 0.67646527, \n 0.32353473], [83.0, 0.66835282, 0.33164718], [83.5, 0.66088327, \n 0.33911673], [84.0, 0.65393189, 0.34606811], [84.5, 0.64749414, \n 0.35250586], [85.0, 0.64213779, 0.35786221], [85.5, 0.63822323, \n 0.36177677], [86.0, 0.63508024, 0.36491976], [86.5, 0.63187553, \n 0.36812447], [87.0, 0.6286641, 0.3713359], [87.5, 0.62567578, \n 0.37432422], [88.0, 0.62327737, 0.37672263], [88.5, 0.6220245, \n 0.3779755], [89.0, 0.62142673, 0.37857327], [89.5, 0.62085314, \n 0.37914686], [90.0, 0.62039036, 0.37960964], [90.5, 0.61980805, \n 0.38019195], [91.0, 0.61892296, 0.38107704], [91.5, 0.61828657, \n 0.38171343], [92.0, 0.61839927, 0.38160073], [92.5, 0.61910003, \n 0.38089997], [93.0, 0.62054513, 0.37945487], [93.5, 0.62310521, \n 0.37689479], [94.0, 0.62607344, 0.37392656], [94.5, 0.62886337, \n 0.37113663], [95.0, 0.63217429, 0.36782571], [95.5, 0.63663281, \n 0.36336719], [96.0, 0.64211005, 0.35788995], [96.5, 0.64845972, \n 0.35154028], [97.0, 0.65566634, 0.34433366], [97.5, 0.6634444, \n 0.3365556], [98.0, 0.67139138, 0.32860862], [98.5, 0.67944561, \n 0.32055439], [99.0, 0.68767516, 0.31232484], [99.5, 0.69606729, \n 0.30393271], [100.0, 0.70464076, 0.29535924], [100.5, 0.71352864, \n 0.28647136], [101.0, 0.72275077, 0.27724923], [101.5, 0.73202915, \n 0.26797085], [102.0, 0.74071962, 0.25928038], [102.5, 0.7483485, \n 0.2516515], [103.0, 0.75504078, 0.24495922], [103.5, 0.76135113, \n 0.23864887], [104.0, 0.76773182, 0.23226818], [104.5, 0.77409993, \n 0.22590007], [105.0, 0.78042367, 0.21957633], [105.5, 0.78673749, \n 0.21326251], [106.0, 0.79254266, 0.20745734], [106.5, 0.79707885, \n 0.20292115], [107.0, 0.80012517, 0.19987483], [107.5, 0.80232982, \n 0.19767018], [108.0, 0.80447387, 0.19552613], [108.5, 0.80691381, \n 0.19308619], [109.0, 0.80991568, 0.19008432], [109.5, 0.81386778, \n 0.18613222], [110.0, 0.81882018, 0.18117982], [110.5, 0.82400922, \n 0.17599078], [111.0, 0.82818942, 0.17181058], [111.5, 0.83038865, \n 0.16961135], [112.0, 0.83033898, 0.16966102], [112.5, 0.82863678, \n 0.17136322], [113.0, 0.8265308, 0.1734692], [113.5, 0.82536814, \n 0.17463186], [114.0, 0.82604248, 0.17395752], [114.5, 0.82854491, \n 0.17145509], [115.0, 0.8319993, 0.1680007], [115.5, 0.83538285, \n 0.16461715], [116.0, 0.83816342, 0.16183658], [116.5, 0.84022613, \n 0.15977387], [117.0, 0.84147193, 0.15852807], [117.5, 0.84185527, \n 0.15814473], [118.0, 0.84185298, 0.15814702], [118.5, 0.84241659, \n 0.15758341], [119.0, 0.8441557, 0.1558443], [119.5, 0.84685108, \n 0.15314892], [120.0, 0.84990233, 0.15009767]])\n', (84, 8320), True, 'import numpy as np\n')]
#MenuTitle: Guides through All Selected Nodes # -*- coding: utf-8 -*- __doc__=""" Creates guides through all selected nodes. """ import math thisFont = Glyphs.font # frontmost font selectedLayers = thisFont.selectedLayers # active layers of selected glyphs def angle( firstPoint, secondPoint ): """ Returns the angle (in degrees) of the straight line between firstPoint and secondPoint, 0 degrees being the second point to the right of first point. firstPoint, secondPoint: must be NSPoint or GSNode """ xDiff = secondPoint.x - firstPoint.x yDiff = secondPoint.y - firstPoint.y return math.degrees(math.atan2(yDiff,xDiff)) def newGuide( position, angle ): newGuide = GSGuideLine() newGuide.position = position newGuide.angle = angle return newGuide def isThereAlreadyAGuideWithTheseProperties(thisLayer,guideposition,guideangle): for thisGuide in thisLayer.guides: if thisGuide.angle == guideangle: if thisGuide.position == guideposition: return True elif angle(guideposition,thisGuide.position) % 180 == guideangle % 180: return True return False if len(selectedLayers) == 1: thisLayer = selectedLayers[0] thisGlyph = thisLayer.parent currentPointSelection = [point.position for point in thisLayer.selection if type(point) == GSNode] # clear selection: Layer.clearSelection() thisGlyph.beginUndo() # begin undo grouping if len(currentPointSelection) > 1: currentPointSelection.append(currentPointSelection[0]) for i,j in enumerate(range(1,len(currentPointSelection))): point1 = currentPointSelection[i] point2 = currentPointSelection[j] angleBetweenPoints = angle(point1,point2) middlePoint = addPoints(point1,point2) middlePoint.x *= 0.5 middlePoint.y *= 0.5 # create guide and add it to layer: if not isThereAlreadyAGuideWithTheseProperties(thisLayer, middlePoint, angleBetweenPoints): guideBetweenPoints = newGuide(middlePoint, angleBetweenPoints) thisLayer.guides.append( guideBetweenPoints ) # select it: thisLayer.selection.append(guideBetweenPoints) thisGlyph.endUndo() # end undo grouping
[ "math.atan2" ]
[((609, 633), 'math.atan2', 'math.atan2', (['yDiff', 'xDiff'], {}), '(yDiff, xDiff)\n', (619, 633), False, 'import math\n')]
import re import sys keymap = { '2': 'abc', '3': 'def', '4': 'ghi', '5': 'jkl', '6': 'mno', '7': 'pqrs', '8': 'tuv', '9': 'wxyz', } def ask_for_numbers(): while True: response = input('What numbers have you pressed? ').strip() if len(response) < 3: print('You need to enter at least three numbers.', file=sys.stderr) elif re.search("[^2-9]", response): print("You entered a character that isn't one of 2, 3, 4, 5, 6, 7, 8, or 9. Please try again.", file=sys.stderr) else: return response def read_content(filename='words.txt'): with open(filename, 'r') as f: return f.read().strip().replace(',', ' ')
[ "re.search" ]
[((398, 427), 're.search', 're.search', (['"""[^2-9]"""', 'response'], {}), "('[^2-9]', response)\n", (407, 427), False, 'import re\n')]
#!/usr/bin/env python ############################################################################### # Copyright Kitware Inc. and Contributors # Distributed under the Apache License, 2.0 (apache.org/licenses/LICENSE-2.0) # See accompanying Copyright.txt and LICENSE files for details ############################################################################### import argparse import cv2 import gdal import logging import numpy import ogr import osr import os import subprocess import shutil from danesfield import gdal_utils from danesfield import rasterize VECTOR_TYPES = ["buildings", "roads"] def shift_vector(inputFeatures, outputVectorFile, outputLayerName, outProjection, offsetGeo): outDriver = ogr.GetDriverByName("ESRI Shapefile") print("Shifting vector -> {}".format(os.path.basename(outputVectorFile))) outVector = outDriver.CreateDataSource(outputVectorFile) outSrs = osr.SpatialReference(outProjection) # create layer outLayer = outVector.CreateLayer(os.path.basename(outputLayerName), srs=outSrs, geom_type=ogr.wkbPolygon) outFeatureDef = outLayer.GetLayerDefn() # create rings from input rings by shifting points for feature in inputFeatures: # create the poly outPoly = ogr.Geometry(ogr.wkbPolygon) poly = feature.GetGeometryRef() for ring_idx in range(poly.GetGeometryCount()): ring = poly.GetGeometryRef(ring_idx) # create the ring outRing = ogr.Geometry(ogr.wkbLinearRing) for i in range(0, ring.GetPointCount()): pt = ring.GetPoint(i) outRing.AddPoint(pt[0] + offsetGeo[0], pt[1] + offsetGeo[1]) outPoly.AddGeometry(outRing) # create feature outFeature = ogr.Feature(outFeatureDef) outFeature.SetGeometry(outPoly) outLayer.CreateFeature(outFeature) def copy_shapefile(input, output): inputNoExt = os.path.splitext(input)[0] outputNoExt = os.path.splitext(output)[0] for ext in ['.dbf', '.prj', '.shp', '.shx']: shutil.copyfile(inputNoExt + ext, outputNoExt + ext) def remove_shapefile(input): inputNoExt = os.path.splitext(input)[0] for ext in ['.dbf', '.prj', '.shp', '.shx']: os.remove(inputNoExt + ext) # project a vector point to image def ProjectPoint(model, pt): # simplest projection model px = int((pt[0]-model['corners'][0])/model['project_model'][1]*model['scale']) py = int((pt[1]-model['corners'][1])/model['project_model'][5]*model['scale']) return [px, py] def computeMatchingPoints(check_point_list, edge_img, dx, dy): img_height = edge_img.shape[0] img_width = edge_img.shape[1] total_value = 0 # find overlap mask for pt in check_point_list: if pt[1]+dy < 0 or pt[1]+dy >= img_height or\ pt[0]+dx < 0 or pt[0]+dx >= img_width: continue if edge_img[pt[1]+dy, pt[0]+dx] > 200: total_value += 1 return total_value def spat_vectors(inputVectorFileNames, inputImageCorners, inputImageSrs, outputMaskFileName, debug=False): """ Returns building features and optionally road features. """ global VECTOR_TYPES geometryTypes = [ogr.wkbPolygon, ogr.wkbLineString] resultList = [] for typeIndex in range(len(inputVectorFileNames)): inputVectorFileName = inputVectorFileNames[typeIndex] inputVector = gdal_utils.ogr_open(inputVectorFileName) inputLayer = gdal_utils.ogr_get_layer(inputVector, geometryTypes[typeIndex]) inputVectorSrs = inputLayer.GetSpatialRef() imageVectorDifferentSrs = False if inputVectorSrs.IsSame(inputImageSrs) else True layerDefinition = inputLayer.GetLayerDefn() hasBuildingField = False for i in range(layerDefinition.GetFieldCount()): if layerDefinition.GetFieldDefn(i).GetName() == "building": hasBuildingField = True break # clip the shape file first outputNoExt = os.path.splitext(outputMaskFileName)[0] if imageVectorDifferentSrs: outputVectorFile = outputNoExt + "_" + VECTOR_TYPES[typeIndex] + "_original.shp" else: outputVectorFile = outputNoExt + "_" + VECTOR_TYPES[typeIndex] + "_spat_not_aligned.shp" ogr2ogr_args = ["ogr2ogr", "-spat", str(inputImageCorners[0]), str(inputImageCorners[2]), str(inputImageCorners[1]), str(inputImageCorners[3])] if imageVectorDifferentSrs: ogr2ogr_args.extend(["-spat_srs", str(inputImageSrs)]) if hasBuildingField: ogr2ogr_args.extend(["-where", "building is not null"]) ogr2ogr_args.extend([outputVectorFile, inputVectorFileName]) ogr2ogr_args.append(inputLayer.GetName()) print("Spatial query (clip): {} -> {}".format( os.path.basename(inputVectorFileName), os.path.basename(outputVectorFile))) response = subprocess.run(ogr2ogr_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if debug: print(*ogr2ogr_args) print("{}\n{}".format(response.stdout, response.stderr)) if imageVectorDifferentSrs: # convert to the same SRS as the image file inputVectorFileName = outputNoExt + "_" + VECTOR_TYPES[typeIndex] + "_original.shp" outputVectorFile = outputNoExt + "_" + VECTOR_TYPES[typeIndex] + "_spat_not_aligned.shp" ogr2ogr_args = ["ogr2ogr", "-t_srs", str(inputImageSrs), outputVectorFile, inputVectorFileName] print("Convert SRS: {} -> {}".format( os.path.basename(inputVectorFileName), os.path.basename(outputVectorFile))) response = subprocess.run(ogr2ogr_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if debug: print(*ogr2ogr_args) print("{}\n{}".format(response.stdout, response.stderr)) else: remove_shapefile(inputVectorFileName) inputVectorFileName = outputVectorFile inputLayerName = os.path.splitext(os.path.basename(inputVectorFileName))[0] inputVector = gdal_utils.ogr_open(inputVectorFileName) inputLayer = inputVector.GetLayer(inputLayerName) inputList = list(inputLayer) resultList.append(inputList) return resultList def main(args): global VECTOR_TYPES parser = argparse.ArgumentParser( description="Generate building mask aligned with image. To do that we shift input " "vector to match edges generated from image.") parser.add_argument('output_mask', help="Output image mask base name. <output_mask>_buildings.shp, " "<output_mask>_buildings.tif are generated. Optionally " "<output_mask>_roads.tif and <output_mask>_roads.shp are " "also generated. See --input_vectors parameter.") parser.add_argument('input_image', help='Orthorectified 8-bit image file') parser.add_argument('input_vectors', nargs='+', help='Buildings and optionally road vector files with OSM or ' 'US Cities data. A polygon layer is chosen for buildings and a ' 'line string layer is chosen for roads. ' 'If both building and road layers are in the same vector file just ' 'pass the file twice. Only elevated bridges are rendered ' 'by default. If all roads need to be rendered pass --render_roads') parser.add_argument('--render_cls', action="store_true", help='Output a CLS image') parser.add_argument('--render_roads', action="store_true", help='Render all roads, not only elevated bridges') parser.add_argument('--scale', type=float, default=0.2, help='Scale factor. ' 'We cannot deal with the images with original resolution') parser.add_argument('--move_thres', type=float, default=5, help='Distance for edge matching') parser.add_argument("--offset", type=float, nargs=2, help="Shift the mask using the offset specified " "(using the SRS of the input_image) instead of the computed offset.") parser.add_argument("--debug", action="store_true", help="Print debugging information") args = parser.parse_args(args) scale = args.scale inputImage = gdal_utils.gdal_open(args.input_image, gdal.GA_ReadOnly) band = inputImage.GetRasterBand(1) if (not band.DataType == gdal.GDT_Byte): raise RuntimeError( "Input image {} does not have Byte type. Use msi-to-rgb.py to-8bit.py " "to convert it.".format(args.input_image)) projection = inputImage.GetProjection() inputImageSrs = osr.SpatialReference(projection) gt = inputImage.GetGeoTransform() # captures origin and pixel size left, top = gdal.ApplyGeoTransform(gt, 0, 0) right, bottom = gdal.ApplyGeoTransform(gt, inputImage.RasterXSize, inputImage.RasterYSize) band = None print("Resize and edge detection: {}".format(os.path.basename(args.input_image))) color_image = cv2.imread(args.input_image) small_color_image = numpy.zeros( (int(color_image.shape[0]*scale), int(color_image.shape[1]*scale), 3), dtype=numpy.uint8) if scale != 1.0: small_color_image = cv2.resize(color_image, None, fx=scale, fy=scale) color_image = small_color_image grayimg = cv2.cvtColor(color_image, cv2.COLOR_BGR2GRAY) edge_img = cv2.Canny(grayimg, 100, 200) if args.debug: cv2.imwrite(os.path.splitext(args.output_mask)[0] + '_edge.tif', edge_img) model = {} model['corners'] = [left, top, right, bottom] model['project_model'] = gt model['scale'] = scale inputImageCorners = [left, right, bottom, top] features = spat_vectors( args.input_vectors, inputImageCorners, inputImageSrs, args.output_mask) print("Aligning {} buildings ...".format(len(features[0]))) tmp_img = numpy.zeros([int(color_image.shape[0]), int(color_image.shape[1])], dtype=numpy.uint8) for feature in features[0]: poly = feature.GetGeometryRef() for ring_idx in range(poly.GetGeometryCount()): ring = poly.GetGeometryRef(ring_idx) rp = [] for i in range(0, ring.GetPointCount()): pt = ring.GetPoint(i) rp.append(ProjectPoint(model, pt)) ring_points = numpy.array(rp) ring_points = ring_points.reshape((-1, 1, 2)) # edge mask of the building cluster cv2.polylines(tmp_img, [ring_points], True, (255), thickness=2) check_point_list = [] # build a sparse set to fast process for y in range(0, tmp_img.shape[0]): for x in range(0, tmp_img.shape[1]): if tmp_img[y, x] > 200: check_point_list.append([x, y]) print("Checking {} points ...".format(len(check_point_list))) max_value = 0 index_max_value = 0 offsetGeo = [0.0, 0.0] current = [0, 0] if not args.offset: offset = [0, 0] # shift moves possible from [0, 0] moves = [ [1, 0], # 0 [1, 1], # 1 [0, 1], # 2 [-1, 1], # 3 [-1, 0], # 4 [-1, -1], # 5 [0, -1], # 6 [1, -1]] # 7 initial_cases = range(8) # cases[i] shows shift moves possible after the previous move was cases[i][0] # we change direction with at most 45 degrees. next_cases = [ [0, 7, 1], [1, 0, 2], [2, 1, 3], [3, 2, 4], [4, 3, 5], [5, 4, 6], [6, 5, 7], [7, 6, 0] ] # move the mask to match cases = initial_cases old_max_value = 0 total_value = computeMatchingPoints(check_point_list, edge_img, 0, 0) max_value = total_value if args.debug: print("Total value for ({}, {}) is: {} (max value: {})".format( 0, 0, total_value, max_value)) for i in range(args.move_thres): if args.debug: print("===== {} =====".format(i)) while (max_value > old_max_value): old_max_value = max_value for i in cases: [dx, dy] = moves[i] total_value = computeMatchingPoints(check_point_list, edge_img, current[0] + dx, current[1] + dy) if args.debug: print("Total value for ({}, {}) is: {} (max value: {})".format( dx, dy, total_value, max_value)) if total_value > max_value: max_value = total_value index_max_value = i if (max_value > old_max_value): [dx, dy] = moves[index_max_value] current = [current[0] + dx, current[1] + dy] if args.debug: print("Current: {}".format(current)) offset = current cases = next_cases[index_max_value] break offsetGeo = gdal.ApplyGeoTransform(gt, offset[0] / scale, offset[1] / scale) offsetGeo[0] = offsetGeo[0] - left offsetGeo[1] = top - offsetGeo[1] print("Using offset: {} ({})".format(offsetGeo, offset)) if max_value/float(len(check_point_list)) < 0.05: print("Fewer than 5% of points match {} / {}. This may happen because of " "missing areas in the orthorectified image. " "Increasing scale may increase the number of points that match.".format( max_value, len(check_point_list))) else: print("Using offset: {}".format(offsetGeo)) offsetGeo = args.offset for i in range(len(features)): outputNoExt = os.path.splitext(args.output_mask)[0] outputVectorFile = outputNoExt + "_" + VECTOR_TYPES[i] + "_spat.shp" if not (offsetGeo[0] == 0.0 and offsetGeo[1] == 0.0): shift_vector(features[i], outputVectorFile, outputNoExt, projection, offsetGeo) else: inputVectorFileName = outputNoExt + "_" + VECTOR_TYPES[i] + "_spat_not_aligned.shp" print("Copy vector -> {}".format(os.path.basename(outputVectorFile))) copy_shapefile(inputVectorFileName, outputVectorFile) if not args.debug: remove_shapefile(outputNoExt + "_" + VECTOR_TYPES[i] + "_spat_not_aligned.shp") ogr2ogr_args = ["ogr2ogr", "-clipsrc", str(inputImageCorners[0]), str(inputImageCorners[2]), str(inputImageCorners[1]), str(inputImageCorners[3])] outputNoExt = os.path.splitext(args.output_mask)[0] ogr2ogr_args.extend([outputNoExt + "_" + VECTOR_TYPES[i] + ".shp", outputNoExt + "_" + VECTOR_TYPES[i] + "_spat.shp"]) print("Clipping vector file {} -> {}".format( os.path.basename(outputNoExt + "_" + VECTOR_TYPES[i] + "_spat.shp"), os.path.basename(outputNoExt + "_" + VECTOR_TYPES[i] + ".shp"))) response = subprocess.run(ogr2ogr_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if args.debug: print(*ogr2ogr_args) print("{}\n{}".format(response.stdout, response.stderr)) remove_shapefile(outputNoExt + "_" + VECTOR_TYPES[i] + "_spat.shp") if i == 0: print("Rasterizing buildings ...") if args.render_cls: rasterize_args = ["gdal_rasterize", "-ot", "Byte", "-init", "2", "-burn", "6", "-ts", str(inputImage.RasterXSize), str(inputImage.RasterYSize), "-te", str(inputImageCorners[0]), str(inputImageCorners[2]), str(inputImageCorners[1]), str(inputImageCorners[3])] else: # make buildings red rasterize_args = ["gdal_rasterize", "-ot", "Byte", "-burn", "255", "-burn", "0", "-burn", "0", "-burn", "255", "-ts", str(inputImage.RasterXSize), str(inputImage.RasterYSize), "-te", str(inputImageCorners[0]), str(inputImageCorners[2]), str(inputImageCorners[1]), str(inputImageCorners[3])] outputNoExt = os.path.splitext(args.output_mask)[0] rasterize_args.extend([outputNoExt + "_" + VECTOR_TYPES[i] + ".shp", outputNoExt + "_" + VECTOR_TYPES[i] + ".tif"]) print("Rasterizing {} -> {}".format( os.path.basename(outputNoExt + "_" + VECTOR_TYPES[i] + ".shp"), os.path.basename(outputNoExt + "_" + VECTOR_TYPES[i] + ".tif"))) response = subprocess.run( rasterize_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if args.debug: print(*rasterize_args) print("{}\n{}".format(response.stdout, response.stderr)) else: print("Rasterizing bridges ...") outputNoExt = os.path.splitext(args.output_mask)[0] input = os.path.basename(outputNoExt + "_" + VECTOR_TYPES[i] + ".shp") output = os.path.basename(outputNoExt + "_" + VECTOR_TYPES[i] + "_bridges.tif") bridges = rasterize.rasterize_file_dilated_line( input, inputImage, output, numpy.ones((3, 3)), dilation_iterations=20, query=rasterize.ELEVATED_ROADS_QUERY, ) if not args.debug: os.remove(output) if args.render_roads: output = os.path.basename(outputNoExt + "_" + VECTOR_TYPES[i] + "_roads.tif") roads = rasterize.rasterize_file_dilated_line( input, inputImage, output, numpy.ones((3, 3)), dilation_iterations=20, query=rasterize.ROADS_QUERY) if not args.debug: os.remove(output) buildingsData = gdal_utils.gdal_open( os.path.basename(outputNoExt + "_" + VECTOR_TYPES[0] + ".tif"), gdal.GA_ReadOnly) if args.render_cls: cls = buildingsData.GetRasterBand(1).ReadAsArray() if args.render_roads: cls[roads] = 11 cls[bridges] = 17 gdal_utils.gdal_save(cls, inputImage, os.path.basename(outputNoExt + ".tif"), gdal.GDT_Byte, options=['COMPRESS=DEFLATE']) else: red = buildingsData.GetRasterBand(1).ReadAsArray() green = buildingsData.GetRasterBand(2).ReadAsArray() blue = buildingsData.GetRasterBand(3).ReadAsArray() opacity = buildingsData.GetRasterBand(4).ReadAsArray() if args.render_roads: red[roads] = 0 green[roads] = 255 blue[roads] = 0 opacity[roads] = 255 red[bridges] = 0 green[bridges] = 0 blue[bridges] = 255 opacity[bridges] = 255 gdal_utils.gdal_save([red, green, blue, opacity], inputImage, os.path.basename(outputNoExt + ".tif"), gdal.GDT_Byte, options=['COMPRESS=DEFLATE']) if not args.debug: os.remove(os.path.basename(outputNoExt + "_" + VECTOR_TYPES[0] + ".tif")) if __name__ == '__main__': import sys try: main(sys.argv[1:]) except Exception as e: logging.exception(e) sys.exit(1)
[ "os.remove", "argparse.ArgumentParser", "numpy.ones", "danesfield.gdal_utils.ogr_open", "ogr.Feature", "danesfield.gdal_utils.gdal_open", "cv2.cvtColor", "danesfield.gdal_utils.ogr_get_layer", "shutil.copyfile", "cv2.resize", "cv2.Canny", "os.path.basename", "ogr.GetDriverByName", "gdal.ApplyGeoTransform", "ogr.Geometry", "sys.exit", "subprocess.run", "logging.exception", "cv2.polylines", "osr.SpatialReference", "cv2.imread", "numpy.array", "os.path.splitext" ]
[((717, 754), 'ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""ESRI Shapefile"""'], {}), "('ESRI Shapefile')\n", (736, 754), False, 'import ogr\n'), ((907, 942), 'osr.SpatialReference', 'osr.SpatialReference', (['outProjection'], {}), '(outProjection)\n', (927, 942), False, 'import osr\n'), ((6512, 6673), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate building mask aligned with image. To do that we shift input vector to match edges generated from image."""'}), "(description=\n 'Generate building mask aligned with image. To do that we shift input vector to match edges generated from image.'\n )\n", (6535, 6673), False, 'import argparse\n'), ((8734, 8790), 'danesfield.gdal_utils.gdal_open', 'gdal_utils.gdal_open', (['args.input_image', 'gdal.GA_ReadOnly'], {}), '(args.input_image, gdal.GA_ReadOnly)\n', (8754, 8790), False, 'from danesfield import gdal_utils\n'), ((9107, 9139), 'osr.SpatialReference', 'osr.SpatialReference', (['projection'], {}), '(projection)\n', (9127, 9139), False, 'import osr\n'), ((9229, 9261), 'gdal.ApplyGeoTransform', 'gdal.ApplyGeoTransform', (['gt', '(0)', '(0)'], {}), '(gt, 0, 0)\n', (9251, 9261), False, 'import gdal\n'), ((9282, 9356), 'gdal.ApplyGeoTransform', 'gdal.ApplyGeoTransform', (['gt', 'inputImage.RasterXSize', 'inputImage.RasterYSize'], {}), '(gt, inputImage.RasterXSize, inputImage.RasterYSize)\n', (9304, 9356), False, 'import gdal\n'), ((9478, 9506), 'cv2.imread', 'cv2.imread', (['args.input_image'], {}), '(args.input_image)\n', (9488, 9506), False, 'import cv2\n'), ((9804, 9849), 'cv2.cvtColor', 'cv2.cvtColor', (['color_image', 'cv2.COLOR_BGR2GRAY'], {}), '(color_image, cv2.COLOR_BGR2GRAY)\n', (9816, 9849), False, 'import cv2\n'), ((9865, 9893), 'cv2.Canny', 'cv2.Canny', (['grayimg', '(100)', '(200)'], {}), '(grayimg, 100, 200)\n', (9874, 9893), False, 'import cv2\n'), ((999, 1032), 'os.path.basename', 'os.path.basename', (['outputLayerName'], {}), '(outputLayerName)\n', (1015, 1032), False, 'import os\n'), ((1286, 1314), 'ogr.Geometry', 'ogr.Geometry', (['ogr.wkbPolygon'], {}), '(ogr.wkbPolygon)\n', (1298, 1314), False, 'import ogr\n'), ((1799, 1825), 'ogr.Feature', 'ogr.Feature', (['outFeatureDef'], {}), '(outFeatureDef)\n', (1810, 1825), False, 'import ogr\n'), ((1963, 1986), 'os.path.splitext', 'os.path.splitext', (['input'], {}), '(input)\n', (1979, 1986), False, 'import os\n'), ((2008, 2032), 'os.path.splitext', 'os.path.splitext', (['output'], {}), '(output)\n', (2024, 2032), False, 'import os\n'), ((2093, 2145), 'shutil.copyfile', 'shutil.copyfile', (['(inputNoExt + ext)', '(outputNoExt + ext)'], {}), '(inputNoExt + ext, outputNoExt + ext)\n', (2108, 2145), False, 'import shutil\n'), ((2194, 2217), 'os.path.splitext', 'os.path.splitext', (['input'], {}), '(input)\n', (2210, 2217), False, 'import os\n'), ((2278, 2305), 'os.remove', 'os.remove', (['(inputNoExt + ext)'], {}), '(inputNoExt + ext)\n', (2287, 2305), False, 'import os\n'), ((3470, 3510), 'danesfield.gdal_utils.ogr_open', 'gdal_utils.ogr_open', (['inputVectorFileName'], {}), '(inputVectorFileName)\n', (3489, 3510), False, 'from danesfield import gdal_utils\n'), ((3532, 3595), 'danesfield.gdal_utils.ogr_get_layer', 'gdal_utils.ogr_get_layer', (['inputVector', 'geometryTypes[typeIndex]'], {}), '(inputVector, geometryTypes[typeIndex])\n', (3556, 3595), False, 'from danesfield import gdal_utils\n'), ((5039, 5115), 'subprocess.run', 'subprocess.run', (['ogr2ogr_args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(ogr2ogr_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (5053, 5115), False, 'import subprocess\n'), ((6262, 6302), 'danesfield.gdal_utils.ogr_open', 'gdal_utils.ogr_open', (['inputVectorFileName'], {}), '(inputVectorFileName)\n', (6281, 6302), False, 'from danesfield import gdal_utils\n'), ((9700, 9749), 'cv2.resize', 'cv2.resize', (['color_image', 'None'], {'fx': 'scale', 'fy': 'scale'}), '(color_image, None, fx=scale, fy=scale)\n', (9710, 9749), False, 'import cv2\n'), ((13684, 13748), 'gdal.ApplyGeoTransform', 'gdal.ApplyGeoTransform', (['gt', '(offset[0] / scale)', '(offset[1] / scale)'], {}), '(gt, offset[0] / scale, offset[1] / scale)\n', (13706, 13748), False, 'import gdal\n'), ((15702, 15778), 'subprocess.run', 'subprocess.run', (['ogr2ogr_args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(ogr2ogr_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (15716, 15778), False, 'import subprocess\n'), ((796, 830), 'os.path.basename', 'os.path.basename', (['outputVectorFile'], {}), '(outputVectorFile)\n', (812, 830), False, 'import os\n'), ((1512, 1543), 'ogr.Geometry', 'ogr.Geometry', (['ogr.wkbLinearRing'], {}), '(ogr.wkbLinearRing)\n', (1524, 1543), False, 'import ogr\n'), ((4074, 4110), 'os.path.splitext', 'os.path.splitext', (['outputMaskFileName'], {}), '(outputMaskFileName)\n', (4090, 4110), False, 'import os\n'), ((5826, 5902), 'subprocess.run', 'subprocess.run', (['ogr2ogr_args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(ogr2ogr_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (5840, 5902), False, 'import subprocess\n'), ((9423, 9457), 'os.path.basename', 'os.path.basename', (['args.input_image'], {}), '(args.input_image)\n', (9439, 9457), False, 'import os\n'), ((10846, 10861), 'numpy.array', 'numpy.array', (['rp'], {}), '(rp)\n', (10857, 10861), False, 'import numpy\n'), ((10981, 11042), 'cv2.polylines', 'cv2.polylines', (['tmp_img', '[ring_points]', '(True)', '(255)'], {'thickness': '(2)'}), '(tmp_img, [ring_points], True, 255, thickness=2)\n', (10994, 11042), False, 'import cv2\n'), ((14406, 14440), 'os.path.splitext', 'os.path.splitext', (['args.output_mask'], {}), '(args.output_mask)\n', (14422, 14440), False, 'import os\n'), ((15278, 15312), 'os.path.splitext', 'os.path.splitext', (['args.output_mask'], {}), '(args.output_mask)\n', (15294, 15312), False, 'import os\n'), ((17516, 17594), 'subprocess.run', 'subprocess.run', (['rasterize_args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(rasterize_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (17530, 17594), False, 'import subprocess\n'), ((17906, 17968), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[i] + '.shp')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[i] + '.shp')\n", (17922, 17968), False, 'import os\n'), ((17994, 18064), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[i] + '_bridges.tif')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[i] + '_bridges.tif')\n", (18010, 18064), False, 'import os\n'), ((20624, 20644), 'logging.exception', 'logging.exception', (['e'], {}), '(e)\n', (20641, 20644), False, 'import logging\n'), ((20653, 20664), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (20661, 20664), False, 'import sys\n'), ((4944, 4981), 'os.path.basename', 'os.path.basename', (['inputVectorFileName'], {}), '(inputVectorFileName)\n', (4960, 4981), False, 'import os\n'), ((4983, 5017), 'os.path.basename', 'os.path.basename', (['outputVectorFile'], {}), '(outputVectorFile)\n', (4999, 5017), False, 'import os\n'), ((6197, 6234), 'os.path.basename', 'os.path.basename', (['inputVectorFileName'], {}), '(inputVectorFileName)\n', (6213, 6234), False, 'import os\n'), ((15537, 15604), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[i] + '_spat.shp')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[i] + '_spat.shp')\n", (15553, 15604), False, 'import os\n'), ((15618, 15680), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[i] + '.shp')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[i] + '.shp')\n", (15634, 15680), False, 'import os\n'), ((17083, 17117), 'os.path.splitext', 'os.path.splitext', (['args.output_mask'], {}), '(args.output_mask)\n', (17099, 17117), False, 'import os\n'), ((17844, 17878), 'os.path.splitext', 'os.path.splitext', (['args.output_mask'], {}), '(args.output_mask)\n', (17860, 17878), False, 'import os\n'), ((18197, 18215), 'numpy.ones', 'numpy.ones', (['(3, 3)'], {}), '((3, 3))\n', (18207, 18215), False, 'import numpy\n'), ((18372, 18389), 'os.remove', 'os.remove', (['output'], {}), '(output)\n', (18381, 18389), False, 'import os\n'), ((18457, 18525), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[i] + '_roads.tif')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[i] + '_roads.tif')\n", (18473, 18525), False, 'import os\n'), ((18896, 18958), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[0] + '.tif')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[0] + '.tif')\n", (18912, 18958), False, 'import os\n'), ((5727, 5764), 'os.path.basename', 'os.path.basename', (['inputVectorFileName'], {}), '(inputVectorFileName)\n', (5743, 5764), False, 'import os\n'), ((5766, 5800), 'os.path.basename', 'os.path.basename', (['outputVectorFile'], {}), '(outputVectorFile)\n', (5782, 5800), False, 'import os\n'), ((9933, 9967), 'os.path.splitext', 'os.path.splitext', (['args.output_mask'], {}), '(args.output_mask)\n', (9949, 9967), False, 'import os\n'), ((14830, 14864), 'os.path.basename', 'os.path.basename', (['outputVectorFile'], {}), '(outputVectorFile)\n', (14846, 14864), False, 'import os\n'), ((17348, 17410), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[i] + '.shp')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[i] + '.shp')\n", (17364, 17410), False, 'import os\n'), ((17428, 17490), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[i] + '.tif')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[i] + '.tif')\n", (17444, 17490), False, 'import os\n'), ((18668, 18686), 'numpy.ones', 'numpy.ones', (['(3, 3)'], {}), '((3, 3))\n', (18678, 18686), False, 'import numpy\n'), ((18804, 18821), 'os.remove', 'os.remove', (['output'], {}), '(output)\n', (18813, 18821), False, 'import os\n'), ((19324, 19362), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '.tif')"], {}), "(outputNoExt + '.tif')\n", (19340, 19362), False, 'import os\n'), ((20254, 20292), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '.tif')"], {}), "(outputNoExt + '.tif')\n", (20270, 20292), False, 'import os\n'), ((20445, 20507), 'os.path.basename', 'os.path.basename', (["(outputNoExt + '_' + VECTOR_TYPES[0] + '.tif')"], {}), "(outputNoExt + '_' + VECTOR_TYPES[0] + '.tif')\n", (20461, 20507), False, 'import os\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2021 4Paradigm # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # -*- coding: utf-8 -*- from testcasebase import TestCaseBase import libs.ddt as ddt from libs.test_loader import load @ddt.ddt class TestSetLimit(TestCaseBase): @ddt.data( ('setlimit', 'Put', 10, 'Set Limit ok'), ('setlimit', 'Put', 0, 'Set Limit ok'), ('setlimit', 'Put', 1, 'Set Limit ok'), ('setlimit', 'Put', 2147483647, 'Set Limit ok'), ('setlimit', 'Put', -1, 'Fail to set limit'), ('setlimit', 'Put', 1.5, 'Bad set limit format'), ('setlimit', 'Get', 10, 'Set Limit ok'), ('setlimit', 'Get', 0, 'Set Limit ok'), ('setlimit', 'Get', 1, 'Set Limit ok'), ('setlimit', 'Get', 2147483647, 'Set Limit ok'), ('setlimit', 'Get', -1, 'Fail to set limit'), ('setlimit', 'Get', 1.5, 'Bad set limit format'), ('setlimit', 'Scan', 10, 'Set Limit ok'), ('setlimit', 'Scan', 0, 'Set Limit ok'), ('setlimit', 'Scan', 1, 'Set Limit ok'), ('setlimit', 'Scan', 2147483647, 'Set Limit ok'), ('setlimit', 'Scan', -1, 'Fail to set limit'), ('setlimit', 'Scan', 1.5, 'Bad set limit format'), ('setlimit', 'Server', 10, 'Set Limit ok'), ('setlimit', 'Server', 0, 'Set Limit ok'), ('setlimit', 'Server', 1, 'Set Limit ok'), ('setlimit', 'Server', 2147483647, 'Set Limit ok'), ('setlimit', 'Server', -1, 'Fail to set limit'), ('setlimit', 'Server', 1.5, 'Bad set limit format'), ('setLimit', 'Scan', 1, 'unsupported cmd') ) @ddt.unpack def test_set_max_concurrency(self, command, method, max_concurrency_limit, rsp_msg): """ 修改并发限制的数值 :param self: :param max_concurrency_limit: :return: """ rs1 = self.ns_setlimit(self.leader, command, method, max_concurrency_limit) self.assertIn(rsp_msg, rs1) if __name__ == "__main__": load(TestSetLimit)
[ "libs.ddt.data", "libs.test_loader.load" ]
[((789, 1993), 'libs.ddt.data', 'ddt.data', (["('setlimit', 'Put', 10, 'Set Limit ok')", "('setlimit', 'Put', 0, 'Set Limit ok')", "('setlimit', 'Put', 1, 'Set Limit ok')", "('setlimit', 'Put', 2147483647, 'Set Limit ok')", "('setlimit', 'Put', -1, 'Fail to set limit')", "('setlimit', 'Put', 1.5, 'Bad set limit format')", "('setlimit', 'Get', 10, 'Set Limit ok')", "('setlimit', 'Get', 0, 'Set Limit ok')", "('setlimit', 'Get', 1, 'Set Limit ok')", "('setlimit', 'Get', 2147483647, 'Set Limit ok')", "('setlimit', 'Get', -1, 'Fail to set limit')", "('setlimit', 'Get', 1.5, 'Bad set limit format')", "('setlimit', 'Scan', 10, 'Set Limit ok')", "('setlimit', 'Scan', 0, 'Set Limit ok')", "('setlimit', 'Scan', 1, 'Set Limit ok')", "('setlimit', 'Scan', 2147483647, 'Set Limit ok')", "('setlimit', 'Scan', -1, 'Fail to set limit')", "('setlimit', 'Scan', 1.5, 'Bad set limit format')", "('setlimit', 'Server', 10, 'Set Limit ok')", "('setlimit', 'Server', 0, 'Set Limit ok')", "('setlimit', 'Server', 1, 'Set Limit ok')", "('setlimit', 'Server', 2147483647, 'Set Limit ok')", "('setlimit', 'Server', -1, 'Fail to set limit')", "('setlimit', 'Server', 1.5, 'Bad set limit format')", "('setLimit', 'Scan', 1, 'unsupported cmd')"], {}), "(('setlimit', 'Put', 10, 'Set Limit ok'), ('setlimit', 'Put', 0,\n 'Set Limit ok'), ('setlimit', 'Put', 1, 'Set Limit ok'), ('setlimit',\n 'Put', 2147483647, 'Set Limit ok'), ('setlimit', 'Put', -1,\n 'Fail to set limit'), ('setlimit', 'Put', 1.5, 'Bad set limit format'),\n ('setlimit', 'Get', 10, 'Set Limit ok'), ('setlimit', 'Get', 0,\n 'Set Limit ok'), ('setlimit', 'Get', 1, 'Set Limit ok'), ('setlimit',\n 'Get', 2147483647, 'Set Limit ok'), ('setlimit', 'Get', -1,\n 'Fail to set limit'), ('setlimit', 'Get', 1.5, 'Bad set limit format'),\n ('setlimit', 'Scan', 10, 'Set Limit ok'), ('setlimit', 'Scan', 0,\n 'Set Limit ok'), ('setlimit', 'Scan', 1, 'Set Limit ok'), ('setlimit',\n 'Scan', 2147483647, 'Set Limit ok'), ('setlimit', 'Scan', -1,\n 'Fail to set limit'), ('setlimit', 'Scan', 1.5, 'Bad set limit format'),\n ('setlimit', 'Server', 10, 'Set Limit ok'), ('setlimit', 'Server', 0,\n 'Set Limit ok'), ('setlimit', 'Server', 1, 'Set Limit ok'), ('setlimit',\n 'Server', 2147483647, 'Set Limit ok'), ('setlimit', 'Server', -1,\n 'Fail to set limit'), ('setlimit', 'Server', 1.5,\n 'Bad set limit format'), ('setLimit', 'Scan', 1, 'unsupported cmd'))\n", (797, 1993), True, 'import libs.ddt as ddt\n'), ((2512, 2530), 'libs.test_loader.load', 'load', (['TestSetLimit'], {}), '(TestSetLimit)\n', (2516, 2530), False, 'from libs.test_loader import load\n')]
#!/usr/bin/env python3 import argparse import enum import json import os.path import re import urllib.request DOC_URL_BASE = "https://raw.githubusercontent.com/mist64/c64ref/master/6502/" doc_files = {f"{DOC_URL_BASE}{filename}":cpu_type for filename, cpu_type in { "cpu_6502.txt" : "6502", "cpu_65c02.txt" : "65c02", }.items() } mode_change_regex = re.compile(r"\[(?P<mode_name>.*)\]") comment_regex = re.compile(r"##") mnemonic_regex = re.compile(r"(?P<mnemonic>\S+)\s+(?P<name>.*)") description_start_regex = re.compile(r"(?P<mnemonic>\S+)\s+(?P<long_name>.*)") description_continue_regex = re.compile(r"\s+(?P<description>.*)") class ParseMode(enum.Enum): IGNORE = enum.auto() MNEMONICS = enum.auto() DESCRIPTIONS = enum.auto() class Instruction: def __init__(self, mnemonic, cpu_type): self.mnemonic = mnemonic self.cpu_type = cpu_type self.name = "" self.long_name = "" self.description = [] def html_description(self): if self.description: html = "" for desc_line in self.description: html += f"<p>{escape_quotes(desc_line)}</p>" return html elif self.long_name: return f"<p>{escape_quotes(self.long_name)}</p>" elif self.name: return f"<p>{escape_quotes(self.name)}</p>" else: return f"<p>{self.mnemonic}</p>" def get_instructions(): """Gathers all instruction data and returns it in a dictionary.""" instructions = {} for f, t in doc_files.items(): instructions_from_file(f, t, instructions) return instructions def instructions_from_file(filename, cpu_type, instructions): """Gathers instruction data from a file and adds it to the dictionary.""" with open_file(filename) as response: print(f"Reading from {filename}...") parse_mode = ParseMode.IGNORE parse_funcs = {ParseMode.MNEMONICS: parse_mnemonics, ParseMode.DESCRIPTIONS: parse_descriptions} for line_num, line in enumerate(response_to_lines(response), start=1): #print(str(line_num) + "\t" + str(line)) line = remove_comments(line) if not line or line.isspace(): continue regex_match = mode_change_regex.match(line) if regex_match: parse_mode = mode_change(regex_match.group("mode_name")) continue if parse_mode == ParseMode.IGNORE: continue parse_funcs[parse_mode](line, line_num, cpu_type, instructions) def open_file(filename): """Opens a documentation file from the internet.""" return urllib.request.urlopen(filename) def response_to_lines(response): """Converts an HTTP response to a list containing each line of text.""" return response.read().decode("utf-8").replace("\xad", "").split("\n") def remove_comments(line): """Removes comments from a line of a documentation file.""" regex_match = comment_regex.search(line) if regex_match: return line[:regex_match.start()] else: return line def mode_change(mode_name): if mode_name == "mnemos": return ParseMode.MNEMONICS elif mode_name == "documentation-mnemos": return ParseMode.DESCRIPTIONS else: return ParseMode.IGNORE def parse_mnemonics(line, line_num, cpu_type, instructions): regex_match = mnemonic_regex.match(line) if regex_match: mnemonic = regex_match.group("mnemonic") name = regex_match.group("name") if mnemonic not in instructions: instructions[mnemonic] = Instruction(mnemonic, cpu_type) instructions[mnemonic].name = name else: print(f"Mnemonic parsing: Match failure on line {str(line_num)}") print(" " + line) def parse_descriptions(line, line_num, cpu_type, instructions): start_match = description_start_regex.match(line) continue_match = description_continue_regex.match(line) if start_match: mnemonic = start_match.group("mnemonic") parse_descriptions.last_mnemonic = mnemonic long_name = start_match.group("long_name") if mnemonic not in instructions: instructions[mnemonic] = Instruction(mnemonic, cpu_type) instructions[mnemonic].long_name = long_name elif continue_match: mnemonic = parse_descriptions.last_mnemonic description = continue_match.group("description") instructions[mnemonic].description.append(description) def write_script(filename, instructions): script = ["export function getAsmOpcode(opcode) {", " if (!opcode) return;", " switch (opcode.toUpperCase()) {"] for inst in instructions.values(): script.append(f" case \"{inst.mnemonic}\":") script.append(" return {") html = f"{16 * ' '}\"html\": \"" html += inst.html_description() html += "\"," script.append(html) if inst.long_name: safe_ln = escape_quotes(inst.long_name) script.append(f"{16 * ' '}\"tooltip\": \"{safe_ln}\",") elif inst.name: safe_n = escape_quotes(inst.name) script.append(f"{16 * ' '}\"tooltip\": \"{safe_n}\",") else: script.append(f"{16 * ' '}\"tooltip\": \"{inst.mnemonic}\",") # Will need to be replaced when other 65xx CPUs are added s = "https://www.pagetable.com/c64ref/6502/?cpu=" e = "&tab=2#" t = inst.cpu_type m = inst.mnemonic script.append(f"{16 * ' '}\"url\": \"{s}{t}{e}{m}\",") script.append(12 * " " + "};") script.append("") script.append(" }") script.append("}") with open(filename, "w") as f: print(f"Writing output to {filename}...") f.write("\n".join(script)) #print("\n".join(script)) def escape_quotes(string): return string.replace("\"", "\\\"") def get_arguments(): parser = argparse.ArgumentParser() help_text = "the location to which the script will be written" relative_path = "/../../../lib/handlers/asm-docs-6502.js" script_path = os.path.realpath(__file__) script_dir = os.path.dirname(script_path) default_path = os.path.normpath(script_dir + relative_path) parser.add_argument("-o", "--output", help=help_text, default=default_path) return parser.parse_args() def main(): args = get_arguments() instructions = get_instructions() #for inst in instructions.values(): #print(inst.__dict__) write_script(args.output, instructions) if __name__ == "__main__": main()
[ "enum.auto", "argparse.ArgumentParser", "re.compile" ]
[((364, 401), 're.compile', 're.compile', (['"""\\\\[(?P<mode_name>.*)\\\\]"""'], {}), "('\\\\[(?P<mode_name>.*)\\\\]')\n", (374, 401), False, 'import re\n'), ((417, 433), 're.compile', 're.compile', (['"""##"""'], {}), "('##')\n", (427, 433), False, 'import re\n'), ((452, 500), 're.compile', 're.compile', (['"""(?P<mnemonic>\\\\S+)\\\\s+(?P<name>.*)"""'], {}), "('(?P<mnemonic>\\\\S+)\\\\s+(?P<name>.*)')\n", (462, 500), False, 'import re\n'), ((526, 579), 're.compile', 're.compile', (['"""(?P<mnemonic>\\\\S+)\\\\s+(?P<long_name>.*)"""'], {}), "('(?P<mnemonic>\\\\S+)\\\\s+(?P<long_name>.*)')\n", (536, 579), False, 'import re\n'), ((608, 645), 're.compile', 're.compile', (['"""\\\\s+(?P<description>.*)"""'], {}), "('\\\\s+(?P<description>.*)')\n", (618, 645), False, 'import re\n'), ((689, 700), 'enum.auto', 'enum.auto', ([], {}), '()\n', (698, 700), False, 'import enum\n'), ((717, 728), 'enum.auto', 'enum.auto', ([], {}), '()\n', (726, 728), False, 'import enum\n'), ((748, 759), 'enum.auto', 'enum.auto', ([], {}), '()\n', (757, 759), False, 'import enum\n'), ((6051, 6076), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6074, 6076), False, 'import argparse\n')]
# Copyright 2015 Internap. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import __builtin__ import importlib import json import uuid import warnings import requests from netman import raw_or_json from netman.api import NETMAN_API_VERSION from netman.api.objects import bond from netman.api.objects import interface from netman.api.objects import vlan from netman.core.objects.access_groups import IN, OUT from netman.core.objects.exceptions import NetmanException, UnknownSession from netman.core.objects.interface_states import OFF, ON from netman.core.objects.switch_base import SwitchBase def factory(switch_descriptor): warnings.warn("Use SwitchFactory.get_switch_by_descriptor directly to instanciate a switch", DeprecationWarning) return RemoteSwitch(switch_descriptor) class RemoteSwitch(SwitchBase): max_version = NETMAN_API_VERSION def __init__(self, switch_descriptor): super(RemoteSwitch, self).__init__(switch_descriptor) self.requests = requests self.session_id = None if isinstance(self.switch_descriptor.netman_server, list): self._proxy = self.switch_descriptor.netman_server[0] self._next_proxies = self.switch_descriptor.netman_server[1:] else: self._proxy = self.switch_descriptor.netman_server self._next_proxies = [] def _connect(self): self.session_id = str(uuid.uuid4()) self.logger.info("Requesting session {}".format(self.session_id)) url = "{netman}/switches-sessions/{session_id}".format(netman=self._proxy, session_id=self.session_id) details = self.request() details['headers']['Netman-Session-Id'] = self.session_id self.validated(self.requests.post( url=url, data=json.dumps({'hostname': self.switch_descriptor.hostname}), headers=details['headers']) ) self.logger.info("Obtained session {}".format(self.session_id)) def _disconnect(self): self.logger.info("Ending session {}".format(self.session_id)) url = "{netman}/switches-sessions/{session_id}".format(netman=self._proxy, session_id=self.session_id) session_id = self.session_id self.session_id = None self.validated(self.requests.delete(url=url, headers={'Netman-Verbose-Errors': "yes", 'Netman-Max-Version': str(self.max_version), 'Netman-Session-Id': session_id})) self.logger.info("Ended session {}".format(self.session_id)) def _start_transaction(self): self.logger.info("Starting Transaction for session_id: {}".format(self.session_id)) url = "{netman}/switches-sessions/{session_id}/actions".format(netman=self._proxy, session_id=self.session_id) self.validated(self.requests.post(url=url, headers={'Netman-Verbose-Errors': "yes", 'Netman-Max-Version': str(self.max_version), 'Netman-Session-Id': self.session_id}, data='start_transaction')) self.logger.info("Started Transaction for session_id: {}".format(self.session_id)) def commit_transaction(self): self.logger.info("Commiting {}".format(self.session_id)) url = "{netman}/switches-sessions/{session_id}/actions".format(netman=self._proxy, session_id=self.session_id) self.validated(self.requests.post(url=url, headers={'Netman-Verbose-Errors': "yes", 'Netman-Max-Version': str(self.max_version), 'Netman-Session-Id': self.session_id}, data='commit')) self.logger.info("Commited {}".format(self.session_id)) def rollback_transaction(self): self.logger.info("Rollbacking {}".format(self.session_id)) url = "{netman}/switches-sessions/{session_id}/actions".format(netman=self._proxy, session_id=self.session_id) self.validated(self.requests.post(url=url, headers={'Netman-Verbose-Errors': "yes", 'Netman-Max-Version': str(self.max_version), 'Netman-Session-Id': self.session_id}, data='rollback')) self.logger.info("Rollbacked {}".format(self.session_id)) def _end_transaction(self): self.logger.info("Ending Transaction for session_id: {}".format(self.session_id)) url = "{netman}/switches-sessions/{session_id}/actions".format(netman=self._proxy, session_id=self.session_id) self.validated(self.requests.post(url=url, headers={'Netman-Verbose-Errors': "yes", 'Netman-Max-Version': str(self.max_version), 'Netman-Session-Id': self.session_id}, data='end_transaction')) self.logger.info("Transaction ended for session_id: {}".format(self.session_id)) def get_vlan(self, number): return vlan.to_core(self.get("/vlans/{}".format(number)).json()) def get_vlans(self): return [vlan.to_core(row) for row in self.get("/vlans").json()] def get_interface(self, interface_id): return interface.to_core(self.get("/interfaces/{}".format(interface_id)).json()) def get_interfaces(self): return [interface.to_core(row) for row in self.get("/interfaces").json()] def get_bond(self, number): reply = self.get('/bonds/{}'.format(number)) return bond.to_core(reply.json(), version=reply.headers.get('Netman-Version')) def get_bonds(self): reply = self.get("/bonds") return [bond.to_core(row, version=reply.headers.get('Netman-Version')) for row in reply.json()] def add_vlan(self, number, name=None): data = {'number': number} if name is not None: data['name'] = name self.post("/vlans", data=data) def remove_vlan(self, number): self.delete("/vlans/{0}".format(str(number))) def get_vlan_interfaces(self, vlan_number): return self.get("/vlans/{}/interfaces".format(vlan_number)).json() def set_vlan_access_group(self, vlan_number, direction, name): self.put('/vlans/{vlan_number}/access-groups/{direction}'.format( vlan_number=vlan_number, direction={IN: 'in', OUT: 'out'}[direction] ), raw_data=name) def unset_vlan_access_group(self, vlan_number, direction): self.delete('/vlans/{vlan_number}/access-groups/{direction}'.format( vlan_number=vlan_number, direction={IN: 'in', OUT: 'out'}[direction] )) def add_ip_to_vlan(self, vlan_number, ip_network): self.post('/vlans/{vlan_number}/ips'.format( vlan_number=vlan_number ), raw_data=str(ip_network)) def remove_ip_from_vlan(self, vlan_number, ip_network): self.delete('/vlans/{vlan_number}/ips/{ip_network}'.format( vlan_number=vlan_number, ip_network=ip_network )) def set_vlan_vrf(self, vlan_number, vrf_name): self.put('/vlans/{vlan_number}/vrf-forwarding'.format( vlan_number=vlan_number ), raw_data=str(vrf_name)) def unset_vlan_vrf(self, vlan_number): self.delete('/vlans/{vlan_number}/vrf-forwarding'.format(vlan_number=vlan_number)) def set_vlan_load_interval(self, vlan_number, time_interval): self.put('/vlans/{vlan_number}/load-interval'.format( vlan_number=vlan_number ), raw_data=str(time_interval)) def unset_vlan_load_interval(self, vlan_number): self.delete('/vlans/{vlan_number}/load-interval'.format(vlan_number=vlan_number)) def set_vlan_mpls_ip_state(self, vlan_number, state): self.put('/vlans/{vlan_number}/mpls-ip'.format( vlan_number=vlan_number ), data=state) def set_access_mode(self, interface_id): self.put("/interfaces/" + interface_id + '/port-mode', raw_data='access') def set_trunk_mode(self, interface_id): self.put("/interfaces/" + interface_id + '/port-mode', raw_data='trunk') def set_bond_access_mode(self, bond_number): self.put("/bonds/" + str(bond_number) + '/port-mode', raw_data='access') def set_bond_trunk_mode(self, bond_number): self.put("/bonds/" + str(bond_number) + '/port-mode', raw_data='trunk') def set_access_vlan(self, interface_id, vlan): self.put("/interfaces/" + interface_id + '/access-vlan', raw_data=str(vlan)) def reset_interface(self, interface_id): self.put("/interfaces/" + interface_id) def unset_interface_access_vlan(self, interface_id): self.delete("/interfaces/" + interface_id + '/access-vlan') def set_interface_native_vlan(self, interface_id, vlan): self.put("/interfaces/" + interface_id + '/trunk-native-vlan', raw_data=str(vlan)) def unset_interface_native_vlan(self, interface_id): self.delete("/interfaces/" + interface_id + '/trunk-native-vlan') def set_bond_native_vlan(self, bond_number, vlan): self.put("/bonds/" + str(bond_number) + '/trunk-native-vlan', raw_data=str(vlan)) def unset_bond_native_vlan(self, bond_number): self.delete("/bonds/" + str(bond_number) + '/trunk-native-vlan') def add_trunk_vlan(self, interface_id, vlan): self.post("/interfaces/" + interface_id + '/trunk-vlans', raw_data=str(vlan)) def remove_trunk_vlan(self, interface_id, vlan): self.delete("/interfaces/" + interface_id + '/trunk-vlans/' + str(vlan)) def add_bond_trunk_vlan(self, bond_number, vlan): self.post("/bonds/" + str(bond_number) + '/trunk-vlans', raw_data=str(vlan)) def remove_bond_trunk_vlan(self, bond_number, vlan): self.delete("/bonds/" + str(bond_number) + '/trunk-vlans/' + str(vlan)) def set_interface_description(self, interface_id, description): self.put("/interfaces/" + interface_id + '/description', raw_data=description) def unset_interface_description(self, interface_id): self.delete("/interfaces/" + interface_id + '/description') def set_bond_description(self, bond_number, description): self.put("/bonds/" + str(bond_number) + '/description', raw_data=description) def unset_bond_description(self, bond_number): self.delete("/bonds/" + str(bond_number) + '/description') def set_interface_mtu(self, interface_id, size): self.put("/interfaces/" + interface_id + '/mtu', raw_data=str(size)) def unset_interface_mtu(self, interface_id): self.delete("/interfaces/" + interface_id + '/mtu') def set_bond_mtu(self, bond_number, size): self.put("/bonds/" + str(bond_number) + '/mtu', raw_data=str(size)) def unset_bond_mtu(self, bond_number): self.delete("/bonds/" + str(bond_number) + '/mtu') def edit_interface_spanning_tree(self, interface_id, edge=None): data = {} if edge is not None: data["edge"] = edge self.put("/interfaces/" + interface_id + '/spanning-tree', data=data) def set_interface_state(self, interface_id, state): self.put("/interfaces/" + interface_id + '/shutdown', raw_data='true' if state is OFF else 'false') def unset_interface_state(self, interface_id): self.delete("/interfaces/" + interface_id + '/shutdown') def set_interface_auto_negotiation_state(self, interface_id, state): self.put("/interfaces/" + interface_id + '/auto-negotiation', raw_data='true' if state is ON else 'false') def unset_interface_auto_negotiation_state(self, interface_id): self.delete("/interfaces/" + interface_id + '/auto-negotiation') def add_bond(self, number): self.post("/bonds", data={'number': number}) def remove_bond(self, number): self.delete("/bonds/" + str(number)) def add_interface_to_bond(self, interface, bond_number): self.put("/interfaces/" + interface + '/bond-master', raw_data=str(bond_number)) def remove_interface_from_bond(self, interface): self.delete("/interfaces/" + interface + '/bond-master') def set_bond_link_speed(self, number, speed): self.put("/bonds/{0}/link-speed".format(number), raw_data=speed) def edit_bond_spanning_tree(self, number, edge=None): data = {} if edge is not None: data["edge"] = edge self.put("/bonds/{0}/spanning-tree".format(number), data=data) def add_vrrp_group(self, vlan_number, group_id, ips=None, priority=None, hello_interval=None, dead_interval=None, track_id=None, track_decrement=None): self.post("/vlans/{}/vrrp-groups".format(vlan_number), data=dict(id=group_id, ips=[str(ip) for ip in ips], priority=priority, hello_interval=hello_interval, dead_interval=dead_interval, track_id=track_id, track_decrement=track_decrement)) def remove_vrrp_group(self, vlan_number, group_id): self.delete("/vlans/{}/vrrp-groups/{}".format(vlan_number, group_id)) def add_vlan_varp_ip(self, vlan_number, ip_network): self.post('/vlans/{vlan_number}/varp-ips'.format( vlan_number=vlan_number, ), raw_data=str(ip_network)) def remove_vlan_varp_ip(self, vlan_number, ip_network): self.delete('/vlans/{vlan_number}/varp-ips/{ip_network}'.format( vlan_number=vlan_number, ip_network=ip_network )) def add_dhcp_relay_server(self, vlan_number, ip_address): self.post("/vlans/{}/dhcp-relay-server".format( vlan_number), raw_data=str(ip_address)) def remove_dhcp_relay_server(self, vlan_number, ip_address): self.delete("/vlans/{}/dhcp-relay-server/{}".format( vlan_number, ip_address)) def set_interface_lldp_state(self, interface_id, enabled): self.put("/interfaces/{}/lldp".format(interface_id), raw_data=_get_json_boolean(enabled)) def set_vlan_arp_routing_state(self, vlan_number, state): self.put('/vlans/{}/arp-routing'.format(vlan_number), raw_data='true' if state is ON else 'false') def set_vlan_icmp_redirects_state(self, vlan_number, state): self.put('/vlans/{}/icmp-redirects'.format(vlan_number), raw_data=_get_json_boolean(state)) def set_vlan_ntp_state(self, vlan_number, state): self.put('/vlans/{}/ntp'.format(vlan_number), raw_data=_get_json_boolean(state)) def set_vlan_unicast_rpf_mode(self, vlan_number, mode): self.put('/vlans/{}/unicast-rpf-mode'.format(vlan_number), raw_data=str(mode)) def unset_vlan_unicast_rpf_mode(self, vlan_number): self.delete('/vlans/{}/unicast-rpf-mode'.format(vlan_number)) def get_mac_addresses(self): return self.get("/interfaces/mac-addresses") def get_versions(self): return self.get("/versions").json() def get(self, relative_url): return self._retry_on_unknown_session( lambda: self.validated( self.requests.get(**self.request(relative_url)))) def post(self, relative_url, data=None, raw_data=None): return self._retry_on_unknown_session( lambda: self.validated( self.requests.post( data=raw_or_json(raw_data, data), **self.request(relative_url)))) def put(self, relative_url, data=None, raw_data=None): return self._retry_on_unknown_session( lambda: self.validated( self.requests.put( data=raw_or_json(raw_data, data), **self.request(relative_url)))) def delete(self, relative_url): return self._retry_on_unknown_session( lambda: self.validated( self.requests.delete(**self.request(relative_url)))) def request(self, relative_url=''): headers = { 'Netman-Model': self.switch_descriptor.model, 'Netman-Username': self.switch_descriptor.username, 'Netman-Password': self.switch_descriptor.password, 'Netman-Port': str(self.switch_descriptor.port), 'Netman-Max-Version': str(self.max_version), 'Netman-Verbose-Errors': "yes" } if len(self._next_proxies) > 0: headers["Netman-Proxy-Server"] = ",".join(self._next_proxies) if self.session_id: url = "{netman_url}/switches-sessions/{session_id}{path}".format( netman_url=self._proxy, session_id=self.session_id, path=relative_url ) headers['Netman-Session-Id'] = self.session_id else: url = "{netman_url}/switches/{switch}{path}".format( netman_url=self._proxy, switch=self.switch_descriptor.hostname, path=relative_url) self.logger.info("Querying " + url) return { "url": url, "headers": headers, } def validated(self, req): if req.status_code >= 400: try: error = req.json() except Exception as e: self.logger.exception(e) raise Exception("{0}: {1}".format(req.status_code, req.content)) if "error-class" in error: if "error-module" in error: try: module = importlib.import_module(error["error-module"]) exception = getattr(module, error["error-class"])() exception.args = (error["error"], ) exception.message = error["error"] except: exception = NetmanException('{error-module}.{error-class}: {error}'.format(**error)) else: exception = getattr(__builtin__, error["error-class"])(error["error"]) else: exception = Exception(error["error"]) raise exception return req def _retry_on_unknown_session(self, operation): try: return operation() except UnknownSession as e: self.logger.warning("Could not perform operation, {}... " "Requesting a new session".format(e)) self._connect() return operation() def _get_json_boolean(state): return {True: "true", False: "false"}[state]
[ "netman.api.objects.interface.to_core", "uuid.uuid4", "importlib.import_module", "json.dumps", "netman.api.objects.vlan.to_core", "netman.raw_or_json", "warnings.warn" ]
[((1133, 1255), 'warnings.warn', 'warnings.warn', (['"""Use SwitchFactory.get_switch_by_descriptor directly to instanciate a switch"""', 'DeprecationWarning'], {}), "(\n 'Use SwitchFactory.get_switch_by_descriptor directly to instanciate a switch'\n , DeprecationWarning)\n", (1146, 1255), False, 'import warnings\n'), ((1906, 1918), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1916, 1918), False, 'import uuid\n'), ((5768, 5785), 'netman.api.objects.vlan.to_core', 'vlan.to_core', (['row'], {}), '(row)\n', (5780, 5785), False, 'from netman.api.objects import vlan\n'), ((6004, 6026), 'netman.api.objects.interface.to_core', 'interface.to_core', (['row'], {}), '(row)\n', (6021, 6026), False, 'from netman.api.objects import interface\n'), ((2285, 2342), 'json.dumps', 'json.dumps', (["{'hostname': self.switch_descriptor.hostname}"], {}), "({'hostname': self.switch_descriptor.hostname})\n", (2295, 2342), False, 'import json\n'), ((18217, 18263), 'importlib.import_module', 'importlib.import_module', (["error['error-module']"], {}), "(error['error-module'])\n", (18240, 18263), False, 'import importlib\n'), ((16078, 16105), 'netman.raw_or_json', 'raw_or_json', (['raw_data', 'data'], {}), '(raw_data, data)\n', (16089, 16105), False, 'from netman import raw_or_json\n'), ((16362, 16389), 'netman.raw_or_json', 'raw_or_json', (['raw_data', 'data'], {}), '(raw_data, data)\n', (16373, 16389), False, 'from netman import raw_or_json\n')]