index
int64
repo_name
string
branch_name
string
path
string
content
string
import_graph
string
36,377
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/library/bridgeinfo.py
#!/usr/bin/python # Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ansible.module_utils.basic import AnsibleModule ANSIBLE_METADATA = { 'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: bridgeinfo short_description: Query Open vSwitch bridges version_added: "2.4" description: - "Query Open vSwitch bridges" author: Vlad Gridin (vladyslav.gridin@nokia.com) options: host: default: '127.0.0.1' description: - OVS Manager address port: default: '6640' description: - OVS Manager port bridge_mappings: default: {} description: - Dict of physnet/bridge relations from OVS agent ''' EXAMPLES = ''' ''' RETURN = ''' brinfo: description: Dict with phy interface to ovs bridge mapping returned: always type: dict sample: { "eth0": "br-ex", "eth1": "br-public", "eth2": None } ''' class OvsdbQuery(object): def __init__(self, module): self.module = module self.ovsdbclient = self._get_ovsdb_client(module) def _get_ovsdb_client(self, module): try: from ovsdbapp.backend.ovs_idl import command from ovsdbapp.backend.ovs_idl import connection from ovsdbapp.backend.ovs_idl import idlutils from ovsdbapp.schema.open_vswitch import impl_idl except ImportError as e: self.module.log(msg=str(e)) self.module.fail_json(msg="ovsdbapp module is required") class GetIfaceCommand(command.ReadOnlyCommand): def __init__(self, api, iface): super(GetIfaceCommand, self).__init__(api) self.iface = iface def run_idl(self, txn): iface = idlutils.row_by_value(self.api.idl, 'Interface', 'name', self.iface) self.result = iface class TcOvsdbIdl(impl_idl.OvsdbIdl): def __init__(self, connection): super(TcOvsdbIdl, self).__init__(connection) def get_iface(self, iface): return GetIfaceCommand(self, iface) endpoint = ("tcp:%(host)s:%(port)s" % module.params) client = None try: idl = connection.OvsdbIdl.from_server(endpoint, 'Open_vSwitch') connection = connection.Connection(idl=idl, timeout=3) client = TcOvsdbIdl(connection) except Exception as e: self.module.fail_json(msg=("could not connect to openvswitch. " "error: %s") % str(e)) return client def get_ovs_topology(self): ovs_topology = dict() bridge_mappings = self.module.params['bridge_mappings'] bridges = self.ovsdbclient.list_br().execute(check_error=True) for br in bridges: if br in bridge_mappings.values(): ifaces = self.ovsdbclient.list_ifaces(br).execute( check_error=True) for ifname in ifaces: iface = self.ovsdbclient.get_iface(ifname).execute( check_error=True) ovs_topology[ifname] = {'bridge': br, 'type': iface.type} return ovs_topology def run_module(): module_args = dict( host=dict(type='str', required=False, default='127.0.0.1'), port=dict(type='str', required=False, default='6640'), bridge_mappings=dict(type='dict', required=False, default=dict()) ) result = dict( changed=False, brinfo=dict() ) module = AnsibleModule( argument_spec=module_args, supports_check_mode=True ) if module.check_mode: module.exit_json(**result) query = OvsdbQuery(module) bridgeinfo = query.get_ovs_topology() result['brinfo'] = bridgeinfo module.exit_json(**result) def main(): run_module() if __name__ == '__main__': main()
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,378
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/topology_import.py
#!/usr/bin/env python # Copyright 2017 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import logging import os import sys from neutronclient.common.exceptions import Conflict # TODO(OPENSTACK-2892) : # This is temporary code for dealing with py2/py3 compatibility and have # unit tests pass, while the production code isn't deployed as a true # python package. This will be worked on in a subsequent release. try: from .helper import constants from .helper import script_logging from .helper.osclient import NeutronClient from .helper.utils import Utils except (ImportError, ValueError): from helper import constants from helper import script_logging from helper.osclient import NeutronClient from helper.utils import Utils script_name = 'topology_import' LOG = logging.getLogger(script_name) interface_of_compute_with_error = {} compute_host_name = None class TopologyReader(object): def __init__(self, path): super(TopologyReader, self).__init__() self.path = path self.json_data = self._load_json() def _load_json(self): with open(self.path) as topology_file: return json.load(topology_file) def interfaces(self): global compute_host_name compute_host_index = 0 total_compute_host = len(self.json_data['compute-hosts']) for compute_host in self.json_data['compute-hosts']: compute_host_name = str(self.json_data['compute-hosts'] [compute_host_index] ['service_host name']) msg = "\n Processing Compute Host - " + compute_host_name total_compute_host_left = total_compute_host - compute_host_index msg = msg + ", Total Computes Left: %s" % total_compute_host_left LOG.user(msg) compute_host_index = compute_host_index + 1 total_interfaces_within_compute = len(compute_host['interfaces']) interfaces_within_compute_index = 0 for interface in compute_host['interfaces']: with script_logging.indentation(): msg = " Procesing Interface - " + interface["name"] total_interfaces_within_compute_left = \ total_interfaces_within_compute - \ interfaces_within_compute_index msg = msg + (", Total Interfaces Left: %s" % total_interfaces_within_compute_left) LOG.user(msg) interfaces_within_compute_index = \ interfaces_within_compute_index + 1 interface['host_id'] = compute_host['service_host name'] yield interface class TopologyConverter(object): def __init__(self, neutronclient): super(TopologyConverter, self).__init__() self.neutron = neutronclient def interface_to_mappings(self, interface): base_mapping = { 'switch_info': interface['neighbor-system-name'], 'switch_id': interface['neighbor-system-mgmt-ip'], 'port_id': interface['neighbor-system-port'], 'host_id': interface['host_id'], 'bridge': interface['ovs-bridge'] } interface_mappings = [] for virtual_function in interface['vf_info']: vf_mapping = self.function_to_mapping(virtual_function) interface_mapping = dict(base_mapping, **vf_mapping) interface_mappings.append(interface_mapping) vf_mapping = {'pci_slot': interface['name']} interface_mapping = dict(base_mapping, **vf_mapping) interface_mappings.append(interface_mapping) with script_logging.indentation(): if not interface_mappings: LOG.user("No Interface mapping exsits for %s " % interface["name"]) else: LOG.user("Processing %s mappings" % len(interface_mappings)) return interface_mappings def function_to_mapping(self, virtual_function): return {'pci_slot': virtual_function['pci-id']} def create_or_update(converter, switchport_mapping): try: body = {'switchport_mapping': switchport_mapping} converter.neutron.create_switchport_mapping(body) except Conflict: # mapping for host/slot might exist already # we attempt to retrieve it and do update filters = { 'host_id': switchport_mapping.get('host_id'), 'pci_slot': switchport_mapping.get('pci_slot') } mappings = converter.neutron.get_switchport_mapping( **filters)['switchport_mappings'] if not mappings: raise converter.neutron.update_switchport_mapping( mappings[0]['id'], body) @script_logging.step(description="importing topology") def import_interfaces(reader, converter): global compute_host_name, interface_of_compute_with_error for interface in reader.interfaces(): with script_logging.indentation(): mapping = [] for switchport_mapping in converter.interface_to_mappings( interface): if switchport_mapping: LOG.debug("Sending %s", {'switchport_mapping': switchport_mapping}) try: create_or_update(converter, switchport_mapping) LOG.debug("Successfully imported the SwitchPort Mapping") except Exception as e: with script_logging.indentation(): msg_arg = { "error_msg": e.message, "switchport_mapping": switchport_mapping, "interface": interface["name"] } LOG.user("Failed to import SwitchPort Mapping:" "%(switchport_mapping)s" % msg_arg, exc_info=True) LOG.user("ERROR: %(error_msg)s" % msg_arg) mapping.append(switchport_mapping) interface_of_compute_with_error.update({ (compute_host_name, interface["name"]): mapping}) LOG.debug("\n") LOG.debug("-----------------") LOG.debug(" Failure summary") LOG.debug("-------------------") LOG.debug("Errors occurred in:") for (compute_name, interface_name), switchport_mapping in \ interface_of_compute_with_error.items(): with script_logging.indentation(): LOG.debug("Compute Host %s" % compute_name) with script_logging.indentation(): LOG.debug("Interface Name: %s" % interface_name) with script_logging.indentation(): for mapping in switchport_mapping: LOG.debug("SwitchPort Mapping: %s" % mapping) LOG.debug("\n") log_dir = os.path.expanduser('~') + '/nuage_logs' LOG.user("Complete!! Please check the log file %s for summary" % log_dir) def main(argv): if len(argv) != 2: sys.stdout.write("ERROR: Please pass the new report as an argument.\n") sys.exit(1) if not script_logging.log_file: script_logging.init_logging(script_name) if not os.path.exists(argv[1]): sys.stdout.write("ERROR: The report %s does not exist. \n" % argv[1]) sys.exit(1) if not Utils.check_user(constants.STACK_USER): sys.stdout.write("ERROR: Run the script as %s " "user.\n" % constants.STACK_USER) sys.exit(1) if not os.path.isfile(constants.OVERCLOUDRC_FILE): sys.stdout.write("ERROR: %s does not exist." "\n" % constants.OVERCLOUDRC_FILE) sys.exit(1) Utils.source_rc_files(constants.OVERCLOUDRC_FILE) neutron_client = NeutronClient() neutron_client.authenticate() reader = TopologyReader(argv[1]) converter = TopologyConverter(neutron_client) import_interfaces(reader, converter) if __name__ == '__main__': main(sys.argv)
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,379
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/tests/test_topology_import.py
import testtools # test the imports from nuage_topology_collector.scripts.topology_import import TopologyConverter from nuage_topology_collector.scripts.topology_import import TopologyReader class TestTopologyImport(testtools.TestCase): def test_import(self): yield TopologyReader # reference the import for pep8 compatibility yield TopologyConverter # reference the import for pep8 compatibility
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,380
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/helper/utils.py
# Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import getpass import os import subprocess import sys import traceback from ansible.parsing.dataloader import DataLoader from ansible.vars.manager import VariableManager from ansible.inventory.manager import InventoryManager from ansible.executor.playbook_executor import PlaybookExecutor from oslo_utils import uuidutils try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class OSCredentials(object): def __init__(self, auth_url, username, password, project_name, identity_api_version, user_domain_id=None, user_domain_name=None, project_domain_id=None, project_domain_name=None, verify_ca=True, ca_cert=None): self.auth_url = auth_url self.username = username self.password = password self.project_name = project_name self.verify_ca = verify_ca self.ca_cert = ca_cert if verify_ca else None self.identity_api_version = identity_api_version if identity_api_version == 3: self.auth_url = self.assure_endswith(self.auth_url, '/v3') self.user_domain_id = user_domain_id self.user_domain_name = user_domain_name self.project_domain_id = project_domain_id self.project_domain_name = project_domain_name else: self.auth_url = self.assure_endswith(self.auth_url, '/v2.0') @staticmethod def assure_endswith(url, endswith): return url if url.endswith(endswith) else (url + endswith) class Utils(object): @staticmethod def env_error(msg, *args): raise EnvironmentError((msg % tuple(args)) if args else msg) @staticmethod def report_traceback(reporter): reporter.report(traceback.format_exc()) @staticmethod def get_env_var(name, default=None, required=False): assert default is None or not required # don't set default and # required at same time try: if os.environ[name] is not None: return os.environ[name] else: return default except KeyError: if not required: return default else: Utils.env_error('Please set %s. Aborting.', name) @staticmethod def get_env_bool(name, default=False): return (str(Utils.get_env_var(name, default)).lower() in ['t', 'true', 'yes', 'y', '1']) @staticmethod def is_uuid(uuid): return uuidutils.is_uuid_like(uuid) @staticmethod def check_user(required_user): current_user = getpass.getuser() if current_user == required_user: return True else: return False @staticmethod def get_os_credentials(): auth_url = Utils.get_env_var('OS_AUTH_URL', required=True) username = Utils.get_env_var('OS_USERNAME', required=True) password = Utils.get_env_var('OS_PASSWORD', required=True) project_name = Utils.get_env_var( 'OS_PROJECT_NAME', Utils.get_env_var('OS_TENANT_NAME')) if not project_name: Utils.env_error('OS_PROJECT_NAME nor OS_TENANT_NAME ' 'is defined. Please set either of both.') identity_api_version = float( # deal with version '2.0' e.g. Utils.get_env_var('OS_IDENTITY_API_VERSION', 2)) if identity_api_version == 3: user_domain_id = Utils.get_env_var('OS_USER_DOMAIN_ID') user_domain_name = Utils.get_env_var('OS_USER_DOMAIN_NAME') if not user_domain_name and not user_domain_id: Utils.env_error('OS_USER_DOMAIN_ID ' 'nor OS_USER_DOMAIN_NAME ' 'is defined. Please set either of both.') project_domain_id = Utils.get_env_var('OS_PROJECT_DOMAIN_ID') project_domain_name = Utils.get_env_var('OS_PROJECT_DOMAIN_NAME') if not project_domain_name and not project_domain_id: Utils.env_error('OS_PROJECT_DOMAIN_ID ' 'nor OS_PROJECT_DOMAIN_NAME ' 'is defined. Please set either of both.') else: user_domain_id = user_domain_name = None project_domain_id = project_domain_name = None # below is not a standard OS env setting -> to be documented verify_ca = Utils.get_env_bool('OS_VERIFY_CA', True) # below is standard -- ca_cert = Utils.get_env_var('OS_CACERT') return OSCredentials( auth_url, username, password, project_name, identity_api_version, user_domain_id, user_domain_name, project_domain_id, project_domain_name, verify_ca, ca_cert) @staticmethod def source_rc_files(rc_file_path): rc_lines = Utils.cmds_run(["env -i bash -c 'source " "%s && env'" % rc_file_path]) rc_variables = rc_lines.split('\n') for variable in rc_variables: (key, _, value) = variable.partition("=") if key != '': os.environ[key] = value @staticmethod def cmds_run(cmds): if not cmds: return output_list = [] for cmd in cmds: proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, close_fds=True) out = "" err = "" while True: output = proc.stdout.readline().decode() err = err + proc.stderr.readline().decode() if output == '' and proc.poll() is not None: break if output: out = out + output proc.poll() if proc.returncode and err and err.split(): sys.stdout.write("error occurred during command:\n" " %s\n error:\n %s " "\n exiting" % (cmd, err)) sys.exit(1) output_list.append(out) if len(cmds) == 1: return output_list[0] else: return output_list def run_ansible(ansible_playbook_path, opts): loader = DataLoader() inventory = InventoryManager(loader=loader, sources=[]) variable_manager = VariableManager(loader=loader, inventory=inventory) passwords = {} display.verbosity = opts.verbosity # Since ansible has deprecated the options and introduced # new library context starting from 2.8 try: from ansible import context from ansible.cli import CLI from ansible.module_utils.common.collections import ImmutableDict context.CLIARGS = ImmutableDict( listtags=False, listtasks=False, listhosts=False, syntax=False, connection='smart', module_path=None, forks=100, remote_user='slotlocker', timeout=10, become=False, become_ask_pass=False, ask_pass=False, become_method='sudo', become_user='root', verbosity=opts.verbosity, check=opts.check, diff=False, subset=opts.subset, step=False, start_at_task=None) CLI.get_host_list(inventory, context.CLIARGS['subset']) playbook = PlaybookExecutor( playbooks=[ansible_playbook_path], inventory=inventory, variable_manager=variable_manager, loader=loader, passwords=passwords) except (ImportError, ValueError): from collections import namedtuple from ansible.cli import CLI Options = namedtuple( 'Options', [ 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'diff']) options = Options( listtags=False, listtasks=False, listhosts=False, syntax=False, connection='ssh', module_path=None, forks=100, remote_user='slotlocker', private_key_file=None, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=False, become_method='sudo', become_user='root', verbosity=opts.verbosity, check=opts.check, diff=False) CLI.get_host_list(inventory, opts.subset) playbook = PlaybookExecutor( playbooks=[ansible_playbook_path], inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords) rc = playbook.run() if rc == 0: sys.stdout.write("\n Running Ansible Completed !! \n") else: sys.stdout.write("\n Running Ansible seems to have some error !! \n")
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,381
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/library/topology.py
#!/usr/bin/python # Copyright 2018 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import binascii import datetime import json import re from ansible.module_utils.basic import AnsibleModule from abc import abstractmethod import construct from construct import core import functools import netaddr DOCUMENTATION = ''' --- module: topology short_description: Decodes lldp tlvs and produces json report with interface to TOR switch port mapping options: system_name: description: - The system name or IP address of the compute node being processed required: true interfaces: description: - Dict with lldp and VF information per interface required: true ovs_bridges: description: - Dict of interface to bridge mappings required: false ''' EXAMPLES = ''' - topoloy: system_name: 192.168.1.1 interfaces: [eth0] ovs_bridges: {'eth0': { 'bridge':'br-ex', 'type': 'internal'} } ''' """ Link Layer Discovery Protocol TLVs """ # TLV types we are interested in LLDP_TLV_END_LLDPPDU = 0 LLDP_TLV_PORT_ID = 2 LLDP_TLV_SYS_NAME = 5 LLDP_TLV_SYS_DESCRIPTION = 6 LLDP_TLV_MGMT_ADDRESS = 8 def bytes_to_int(obj): """Convert bytes to an integer :param: obj - array of bytes """ return functools.reduce(lambda x, y: x << 8 | y, obj) def mapping_for_enum(mapping): """Return tuple used for keys as a dict :param: mapping - dict with tuple as keys """ return dict(mapping.keys()) def mapping_for_switch(mapping): """Return dict from values :param: mapping - dict with tuple as keys """ return {key[0]: value for key, value in mapping.items()} IPv4Address = core.ExprAdapter( core.Byte[4], encoder=lambda obj, ctx: netaddr.IPAddress(obj).words, decoder=lambda obj, ctx: str(netaddr.IPAddress(bytes_to_int(obj))) ) IPv6Address = core.ExprAdapter( core.Byte[16], encoder=lambda obj, ctx: netaddr.IPAddress(obj).words, decoder=lambda obj, ctx: str(netaddr.IPAddress(bytes_to_int(obj))) ) MACAddress = core.ExprAdapter( core.Byte[6], encoder=lambda obj, ctx: netaddr.EUI(obj).words, decoder=lambda obj, ctx: str(netaddr.EUI(bytes_to_int(obj), dialect=netaddr.mac_unix_expanded)) ) IANA_ADDRESS_FAMILY_ID_MAPPING = { ('ipv4', 1): IPv4Address, ('ipv6', 2): IPv6Address, ('mac', 6): MACAddress, } IANAAddress = core.Embedded(core.Struct( 'family' / core.Enum(core.Int8ub, **mapping_for_enum( IANA_ADDRESS_FAMILY_ID_MAPPING)), 'value' / core.Switch(construct.this.family, mapping_for_switch( IANA_ADDRESS_FAMILY_ID_MAPPING)))) # Note that 'GreedyString()' is used in cases where string len is not defined CHASSIS_ID_MAPPING = { ('entPhysAlias_c', 1): core.Struct('value' / core.GreedyString("utf8")), ('ifAlias', 2): core.Struct('value' / core.GreedyString("utf8")), ('entPhysAlias_p', 3): core.Struct('value' / core.GreedyString("utf8")), ('mac_address', 4): core.Struct('value' / MACAddress), ('IANA_address', 5): IANAAddress, ('ifName', 6): core.Struct('value' / core.GreedyString("utf8")), ('local', 7): core.Struct('value' / core.GreedyString("utf8")) } # # Basic Management Set TLV field definitions # # Chassis ID value is based on the subtype ChassisId = core.Struct( 'subtype' / core.Enum(core.Byte, **mapping_for_enum( CHASSIS_ID_MAPPING)), 'value' / core.Embedded(core.Switch(construct.this.subtype, mapping_for_switch(CHASSIS_ID_MAPPING))) ) PORT_ID_MAPPING = { ('ifAlias', 1): core.Struct('value' / core.GreedyString("utf8")), ('entPhysicalAlias', 2): core.Struct('value' / core.GreedyString("utf8")), ('mac_address', 3): core.Struct('value' / MACAddress), ('IANA_address', 4): IANAAddress, ('ifName', 5): core.Struct('value' / core.GreedyString("utf8")), ('local', 7): core.Struct('value' / core.GreedyString("utf8")) } # Port ID value is based on the subtype PortId = core.Struct( 'subtype' / core.Enum(core.Byte, **mapping_for_enum( PORT_ID_MAPPING)), 'value' / core.Embedded(core.Switch(construct.this.subtype, mapping_for_switch(PORT_ID_MAPPING))) ) PortDesc = core.Struct('value' / core.GreedyString("utf8")) SysName = core.Struct('value' / core.GreedyString("utf8")) SysDesc = core.Struct('value' / core.GreedyString("utf8")) MgmtAddress = core.Struct( 'len' / core.Int8ub, 'family' / core.Enum(core.Int8ub, **mapping_for_enum( IANA_ADDRESS_FAMILY_ID_MAPPING)), 'address' / core.Switch(construct.this.family, mapping_for_switch( IANA_ADDRESS_FAMILY_ID_MAPPING)) ) class LLDPBaseException(Exception): message = "An unknown exception occurred." def __init__(self, **kwargs): try: super(LLDPBaseException, self).__init__(self.message % kwargs) self.msg = self.message % kwargs except Exception: # at least get the core message out if something happened super(LLDPBaseException, self).__init__(self.message) def __str__(self): return self.msg class TlvNotFound(LLDPBaseException): message = 'Required %(tlv)s TLV not found in lldp: %(lldp)s.' class SwitchTypeNotSupported(LLDPBaseException): message = ('Could not find any supported switch type ' 'in System Description TLV: %(tlv)s') class Switch(object): def __init__(self, name): self.name = name # generate_json() is a function that takes two input strings of # specific syntax and creates a JSON string from specific portions # of those outputs. The input strings are generated from two specific # commands. As such, this function is tightly comupled to the outputs # of those commands. The first command is lldptool. The second is ls. # The exact syntax of these commands is shown in the main() function # in this file. If the outputs or commands change, the code in this # function must change with them. @abstractmethod def generate_json(self, interface, lldpinfo, vfinfo, ovsapi=None): pass def validate_lldp(self, lldpout): name = addr = port = None for tlv_type, tlv_data in lldpout: try: data = bytearray(binascii.a2b_hex(tlv_data)) except TypeError: # invalid data, not in hex, skipping continue if tlv_type == LLDP_TLV_SYS_NAME: name = SysName.parse(data) elif tlv_type == LLDP_TLV_MGMT_ADDRESS: mgmtaddr = MgmtAddress.parse(data) if mgmtaddr.family == 'ipv4': addr = mgmtaddr elif tlv_type == LLDP_TLV_PORT_ID: port = PortId.parse(data) if not addr: raise TlvNotFound(tlv='Management address (ipv4)', lldp=lldpout) if not port: raise TlvNotFound(tlv='Port ID', lldp=lldpout) return name, addr, port @staticmethod def create_system_json(vfinfo, neighborname, neighborip, neighborport, bridge=None): res = vfinfo entry = { 'neighbor-system-name': neighborname, 'neighbor-system-mgmt-ip': neighborip, 'neighbor-system-port': neighborport, 'ovs-bridge': bridge } res.update(entry) return res class NokiaSwitch(Switch): def __init__(self): super(NokiaSwitch, self).__init__('nokia') # convert_ifindex_to_ifname() is a function that converts the ifindex # we get from the Port ID TLV output of lldptool into the ifname # of the form x/y/z # The following schemes are supported: # 32 bit unsigned integer # Scheme B # None-connector 0110|Zero(5)|Slot(5)|MDA(4)|0|Zero(2)|Port(8)|Zero(3) # Connector 0110|Zero(5)|Slot(5)|MDA(4)|1|Zero(1)|Conn(6)|ConnPort(6) # Scheme C # None-connector 000|Slot(4)|Port-Hi(2)|MDA(2)|Port-Lo(6)|0|Zero(14) # Connector 000|Slot(4)|Zero(2)|MDA(2)|Conn(6)|1|Zero(8)|ConnPort(6) # Scheme D # None-connector 0x4D|isChannel(1)|0|slot(3)|mda(4)|0|0|Zero(5)|Port(8) # Connector 0x4D|isChannel(1)|0|slot(3)|mda(4)|0|1|0|Conn(6)|ConnPort(6) def convert_ifindex_to_ifname(self, ifindex): if not ifindex.isdigit(): return 'None' ifindex = int(ifindex) scheme, connector = self._get_scheme_decode_format(ifindex) # Scheme B if scheme == 3: slot = (ifindex >> 18) & 0x1f mda = (ifindex >> 14) & 0x0f if connector: return "%s/%s/c%s/%s" % ( slot, mda, (ifindex >> 6) & 0x3f, ifindex & 0x3f) else: return "%s/%s/%s" % ( slot, mda, (ifindex >> 3) & 0xff) # Scheme C elif scheme == 0: slot = ifindex >> 25 mda = (ifindex >> 21) & 0x03 if connector: return "%s/%s/c%s/%s" % ( slot, mda, (ifindex >> 15) & 0x3f, ifindex & 0x3f) else: return "%s/%s/%s" % ( slot, mda, (ifindex >> 15) & 0x3f | (ifindex >> 17) & 0xc0) # Scheme D elif scheme == 2: slot = (ifindex >> 19) & 0x07 mda = (ifindex >> 15) & 0x0f if connector: return "%s/%s/c%s/%s" % ( slot, mda, (ifindex >> 6) & 0x3f, ifindex & 0x3f) else: return "%s/%s/%s" % ( slot, mda, ifindex & 0xff) else: return 'None' @staticmethod def _get_scheme_decode_format(ifindex): scheme = ifindex >> 29 # Connector Bit - Masks 16384 (Scheme C) & 8192 (Scheme B,D) connector = ifindex & 16384 if not scheme else ifindex & 8192 return scheme, connector def generate_json(self, interface, lldpinfo, vfinfo, bridge=None): name, addr, port = self.validate_lldp(lldpinfo) if 'local' in port.subtype: neighborport = self.convert_ifindex_to_ifname(port.value) else: neighborport = port.value return self.create_system_json(vfinfo, name.value, addr.address, neighborport, bridge) class CiscoSwitch(Switch): def __init__(self, switch_type): super(CiscoSwitch, self).__init__('cisco') self.switch_type = switch_type def retrieve_port_number(self, neighborport): scratch = re.search(r'(\w+)([0-9]+(/[0-9]+)*)', str(neighborport)) if not scratch: return "None" if 'NX-OS' in self.switch_type: return str(scratch.group(1)[0:3].lower() + scratch.group(2)) elif 'NCS' in self.switch_type: return neighborport else: return "None" def generate_json(self, interface, lldpinfo, vfinfo, bridge=None): name, addr, port = self.validate_lldp(lldpinfo) # just get the port number neighborport = self.retrieve_port_number(port.value) return self.create_system_json(vfinfo, name.value, addr.address, neighborport, bridge) def get_switch(lldp_packet): switch = None sdtlv = next((tlv for tlv in lldp_packet if tlv[0] == LLDP_TLV_SYS_DESCRIPTION), None) if not sdtlv: raise TlvNotFound(tlv='System description', lldp=lldp_packet) data = bytearray(binascii.a2b_hex(sdtlv[1])) sysdesc = SysDesc.parse(data).value if re.search(r"Nokia|SRLinux|srlinux", sysdesc): switch = NokiaSwitch() else: cisco = re.search(r"NX-OS|NCS-55", sysdesc) if cisco: switch = CiscoSwitch(cisco.group(0)) if not switch: raise SwitchTypeNotSupported(tlv=sysdesc) return switch def main(): arg_spec = dict( system_name=dict(required=True), interfaces=dict(type='dict', required=True), ovs_bridges=dict(type='dict', required=False) ) module = AnsibleModule(argument_spec=arg_spec) system_name = module.params['system_name'] interfaces = module.params['interfaces'] ovs_bridges = module.params['ovs_bridges'] startd = datetime.datetime.now() # Determining the switch type from the LLDP output itself # get_switch() method will raise LLDPBaseException in case # - no System Description TLV in lldp packet # - System Description TLV does not contain any recognized # switch type patterns itf_list = [] for interface, data in interfaces.items(): try: switch = get_switch(data['lldp']) ovs_bridge = ovs_bridges.get(interface) itf_list.append(switch.generate_json( interface, data.get('lldp'), data.get('vfinfo'), ovs_bridge.get('bridge') if ovs_bridge else None)) except LLDPBaseException as e: module.fail_json(msg="Failed to process LLDP data " "for interface: %s" % interface, stdout=None, stderr=str(e)) module.exit_json(system_name=system_name, interfaces=interfaces, stdout=json.dumps(itf_list, indent=4), start=str(startd), end=str(datetime.datetime.now()), delta=str(datetime.datetime.now() - startd), changed=True) if __name__ == '__main__': main()
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,382
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/helper/script_logging.py
# Copyright 2017 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import contextlib import functools import logging import os import sys import time USER_LOGGING_LEVEL = logging.INFO + 1 CONSOLE_FORMAT = '%(spaces)s%%(message)s' FILE_FORMAT = '%%(asctime)s %%(levelname)s %(spaces)s%%(message)s' LOG = None log_file = "" indentations = 0 console_formatter = None file_formatter = None class SingleLevelFilter(logging.Filter): def __init__(self, passlevel): super(SingleLevelFilter, self).__init__() self.passlevel = passlevel def filter(self, record): return record.levelno == self.passlevel def init_logging(name): global log_file, console_formatter, file_formatter, LOG log_dir = os.path.expanduser('~') + '/nuage_logs' if not os.path.exists(log_dir): os.makedirs(log_dir) root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) for handler in root_logger.handlers: root_logger.removeHandler(handler) console_formatter = logging.Formatter() stdout = logging.StreamHandler(sys.stdout) stdout.setFormatter(console_formatter) stdout.setLevel(USER_LOGGING_LEVEL) stdout.addFilter(SingleLevelFilter(USER_LOGGING_LEVEL)) root_logger.addHandler(stdout) def user(self, message, *args, **kws): exc_info = kws.pop('exc_info', False) message = message.replace('\n', '\n%s' % (indentations * ' ')) args = tuple([arg.replace('\n', '\n%s' % (indentations * ' ')) for arg in args]) kws = {k: v.replace('\n', '\n%s' % (indentations * ' ')) for k, v in kws} if self.isEnabledFor(USER_LOGGING_LEVEL): self._log(USER_LOGGING_LEVEL, message, args, **kws) if exc_info: self._log(logging.ERROR, "", [], exc_info=True, **kws) logging.addLevelName(USER_LOGGING_LEVEL, "USER") logging.Logger.user = user file_formatter = logging.Formatter() log_file = log_dir + '/%s_%s.log' % ( name, time.strftime("%d-%m-%Y_%H:%M:%S")) hdlr = logging.FileHandler(log_file) hdlr.setFormatter(file_formatter) hdlr.setLevel(logging.NOTSET) root_logger.addHandler(hdlr) _update_formatters() root_logger.user("Logfile created at %s" % log_file) LOG = logging.getLogger() def _update_formatters(): global console_formatter, file_formatter console_formatter._fmt = CONSOLE_FORMAT % {'spaces': indentations * ' '} file_formatter._fmt = FILE_FORMAT % {'spaces': indentations * ' '} def _update_indentation(delta): global indentations indentations += delta _update_formatters() def indent(): _update_indentation(1) def unindent(): _update_indentation(-1) def step(description=None): """Decorator for indenting all logging for the duration of the method. :param description: if present, will log "start <description>" and end. :return: decorated function """ def decorator(fn): @functools.wraps(fn) def wrapped(*args, **kwargs): exception = False try: if description is not None: LOG.user('Start %s' % description) indent() return fn(*args, **kwargs) except BaseException: exception = True raise finally: unindent() if description is not None and not exception: LOG.user('Finished %s\n' % description) return wrapped return decorator @contextlib.contextmanager def indentation(): """contextmanager to indent logging for the duration of the context. Usage: with indentation(): # code where logging will be indented 1 level """ try: indent() yield finally: unindent() def iterate(iterable, resource_name_plural, message='%(total)s%(resource_name_plural)s remaining', newline=False): """helper method for loops to provide automatic progress logging. At the start of the loop it will print | Start <message> At the end of the loop it will print | Finished <message> Advised not to use when you may 'break' the loop early or return in it. The estimted time logging would be pointless as well as the 'Finished' message will not print. Any logging happening inside the loop will be automatically indented. Every 5 seconds a message will print on the console indicating the progress in the loop. Assuming not a stream was passed or certain generator without length available. It will print: | <time in loop> <message> (<percentage complete>, <estimated time left>) When a stream was passed as parameter, it will just print | <total time in loop> <msg> Usage: for port in nuage_logging.iterate(ports, "ports"): # code """ msg_args = {} try: total = len(iterable) msg_args['total'] = str(total) + ' ' except TypeError: # Streams or some generators won't have len property. total = None msg_args['total'] = '' message = 'processing %(total)s%(resource_name_plural)s' msg_args['resource_name_plural'] = resource_name_plural main_message = message[0].lower() + message[1:] main_message %= msg_args last_log_index = -1 start = time.time() last_log_time = start time_history = collections.deque(maxlen=5) LOG.user("Processing: " "%(resource_name_plural)s " % msg_args) LOG.user("Total resource left: %(total)s" % msg_args) try: with indentation(): for i, x in enumerate(iterable): yield x now = time.time() seconds_passed = now - last_log_time if (seconds_passed > 5 or i == 0) and i + 1 != total: _log_iter_progress(i + 1, total, resource_name_plural, message, start, now, seconds_passed, i - last_log_index, time_history) last_log_time = now last_log_index = i except Exception: raise else: LOG.user("Finished processing %(total)s%(resource_name_plural)s" % msg_args + ('\n' if newline else '')) def _log_iter_progress(i, total, resource_name_plural, message, start, now, seconds_passed, processed_items, time_history): message = "%(time_taken)s | " + message total_seconds_taken = now - start msg_args = {'resource_name_plural': resource_name_plural, 'time_taken': _seconds_to_hms_str(total_seconds_taken), 'total': str(total - i) + ' ' if total is not None else ''} if total is None: LOG.user(message % msg_args + ' (unknown time left)') else: percent_complete = int(round(100.0 * i / total)) seconds_per_item = float(seconds_passed) / processed_items time_history.append(seconds_per_item) avg_time_per_item = sum(time_history) / len(time_history) seconds_left = avg_time_per_item * (total - i) message += " (%(percentage)s%%, time left: ~%(time_left)s)" msg_args['percentage'] = percent_complete msg_args['time_left'] = _seconds_to_hms_str(seconds_left) LOG.user(message % msg_args) def _seconds_to_hms_str(seconds): m, s = divmod(seconds, 60) h, m = divmod(m, 60) return "%02d:%02d:%02d" % (h, m, s)
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,383
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/filter_plugins/items_to_dict.py
#!/usr/bin/python # Copyright 2020 Nokia # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ''' OSP13 has ansible 2.6 which does not have items2dict ''' def items_to_dict(mylist, key_name='key', value_name='value'): ''' takes a list of dicts with each having a 'key' and 'value' keys, and transforms the list into a dictionary, effectively as the reverse of dict2items ''' return dict((item[key_name], item[value_name]) for item in mylist) class FilterModule(object): ''' Query filter ''' def filters(self): return { 'items_to_dict': items_to_dict, }
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,384
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/populate_topology.py
#!/usr/bin/env python # Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import glob import os import sys # TODO(OPENSTACK-2892) : # This is temporary code for dealing with py2/py3 compatibility and have # unit tests pass, while the production code isn't deployed as a true # python package. This will be worked on in a subsequent release. try: from .helper import constants except (ImportError, ValueError): from helper import constants import topology_import def main(): if (not os.path.isdir(constants.OUTPUT_DIR)) \ and (not os.listdir(constants.OUTPUT_DIR)): sys.stdout.write('ERROR: No report to import. Please generate the ' 'topology report first.\n') sys.exit(1) output_file_regex_path = constants.OUTPUT_DIR + '/' + \ constants.OUTPUT_FILE_PREFIX + '*json' list_of_files = glob.glob(output_file_regex_path) if len(list_of_files) == 0: sys.stdout.write('ERROR: No files found under dir ' '%s' % constants.OUTPUT_DIR) # get the latest report file created in /tmp/topo-coll dir topo_repo_file_path = max(list_of_files, key=os.path.getctime) if os.path.exists(topo_repo_file_path): sys.stdout.write('Processing %s\n\n' % topo_repo_file_path) topology_import.main(['topology_import.py', topo_repo_file_path]) else: sys.stdout.write('ERROR: No file named %s found under ' '%s \n' % (topo_repo_file_path, constants.OUTPUT_DIR)) if __name__ == "__main__": main()
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,385
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/roles/tc-inventory/files/get_overcloud_agents.py
#!/usr/bin/python import json import os from keystoneauth1.identity import v3 from keystoneauth1 import session from neutronclient.v2_0 import client try: from urllib.parse import urljoin except ImportError: from urlparse import urljoin auth_url = os.environ["OS_AUTH_URL"] if os.environ.get("OS_IDENTITY_API_VERSION") == "3": if 'v3' not in auth_url: auth_url = urljoin(auth_url, 'v3') username = os.environ.get("OS_USERNAME") password = os.environ.get("OS_PASSWORD") project_name = os.environ.get("OS_TENANT_NAME", os.environ.get("OS_PROJECT_NAME")) user_domain_name = os.environ.get("OS_USER_DOMAIN_NAME") project_domain_name = os.environ.get("OS_PROJECT_DOMAIN_NAME") auth = v3.Password(auth_url=auth_url, username=username, password=password, project_name=project_name, user_domain_name=user_domain_name, project_domain_name=project_domain_name, ) session = session.Session(auth=auth, verify=False) neutron = client.Client(session=session) filter = { 'agent_type': 'NIC Switch agent' } agents = neutron.list_agents(**filter)['agents'] filter = { 'agent_type': 'Open vSwitch agent' } agents.extend(neutron.list_agents(**filter)['agents']) result = dict() for agent in agents: confs = dict() if agent['agent_type'] == 'NIC Switch agent': confs.update( {'device_mappings': agent['configurations']['device_mappings']}) else: confs.update( {'bridge_mappings': agent['configurations']['bridge_mappings']}) hostname = agent.get('host').split('.')[0] if hostname in result.keys(): result[hostname]['configurations'].update(confs) else: result[hostname] = { 'host': agent['host'], 'configurations': confs } print(json.dumps(result, indent=4))
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,386
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/compare_topology.py
#!/usr/bin/env python import json import os import sys # TODO(OPENSTACK-2892) : # This is temporary code for dealing with py2/py3 compatibility and have # unit tests pass, while the production code isn't deployed as a true # python package. This will be worked on in a subsequent release. try: from .helper import constants from .helper.utils import Utils except (ImportError, ValueError): from helper import constants from helper.utils import Utils def create_old_report(): # TODO(OPENSTACK-2892) : # This is temporary code for dealing with py2/py3 compatibility and # have unit tests pass, while the production code isn't deployed as a # true python package. This will be worked on in a subsequent release. try: from .helper.osclient import NeutronClient except (ImportError, ValueError): from helper.osclient import NeutronClient neutron_client = NeutronClient() neutron_client.authenticate() sw_maps = neutron_client.get_switchport_mapping() return sw_maps["switchport_mappings"] def generate_new_report_map(new_report_json): new_report_map = {} for compute in new_report_json["compute-hosts"]: for interface in compute["interfaces"]: for vf in interface["vf_info"]: new_report_map[(compute["service_host name"], vf["pci-id"])] \ = (interface["neighbor-system-mgmt-ip"], interface["neighbor-system-port"]) return new_report_map def print_tuple(pair): return ", ".join(str(x) for x in pair) def compare_reports(old_report_json, new_report_map): for port in old_report_json: new_port_info = new_report_map.pop((port["host_id"], port["pci_slot"]), None) if new_port_info is None: print("Port deleted : " + print_tuple((port["host_id"], port["pci_slot"])) + "\n") elif new_port_info != (port["switch_id"], port["port_id"]): print("Port Modified : " "" + print_tuple((port["host_id"], port["pci_slot"]))) print(print_tuple((port["switch_id"], port["port_id"])) + " ===> " "" + print_tuple(new_port_info) + "\n") for port in new_report_map.keys(): print("New Port added: " + print_tuple(port) + " ===> " "" + print_tuple(new_report_map[port]) + "\n") def main(argv): if len(sys.argv) != 2: sys.stdout.write("Please pass the new report as an argument.\n") sys.exit(1) new_report = argv[1] if not os.path.exists(new_report): sys.stdout.write("ERROR: The report %s does not exist.\n" % argv[1]) sys.exit(1) if not Utils.check_user(constants.STACK_USER): sys.stdout.write("ERROR: Run the script as %s " "user.\n" % constants.STACK_USER) sys.exit(1) if not os.path.isfile(constants.OVERCLOUDRC_FILE): sys.stdout.write("ERROR: %s does not exist." "\n" % constants.OVERCLOUDRC_FILE) sys.exit(1) Utils.source_rc_files(constants.OVERCLOUDRC_FILE) with open(new_report) as new_report_data: new_report_json = json.load(new_report_data) new_report_map = generate_new_report_map(new_report_json) old_report_json = create_old_report() if not old_report_json: sys.stdout.write("No existing imported topology.\n") return compare_reports(old_report_json, new_report_map) if __name__ == "__main__": main(sys.argv)
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,387
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/library/lldp.py
#!/usr/bin/python # Copyright 2018 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import binascii import ctypes import errno import fcntl import json import os import re import select import socket import struct import sys from ansible.module_utils.basic import AnsibleModule DOCUMENTATION = ''' --- module: lldp short_description: Given an interface list, returns ToR port information and VF PCI info in JSON options: interfaces: description: - List of network interfaces to query required: true ovs_bridges: description: - Dict with interface to bridge mappings required: true lldp_timeout: default: 30 description: - Max time to wait for LLDP packet to arrive ''' EXAMPLES = ''' - topology: interfaces: [eth0] ''' ANY_ETHERTYPE = 0x0003 IFF_PROMISC = 0x100 SIOCGIFFLAGS = 0x8913 SIOCSIFFLAGS = 0x8914 # TLV types LLDP_TLV_PORT_ID = 2 LLDP_TLV_SYS_NAME = 5 LLDP_TLV_SYS_DESCRIPTION = 6 LLDP_TLV_MGMT_ADDRESS = 8 SOL_SOCKET = 1 SO_ATTACH_FILTER = 26 class ifreq(ctypes.Structure): """Class for setting flags on a socket.""" _fields_ = [("ifr_ifrn", ctypes.c_char * 16), ("ifr_flags", ctypes.c_short)] class bpf_insn(ctypes.Structure): """"The BPF instruction data structure""" _fields_ = [("code", ctypes.c_ushort), ("jt", ctypes.c_ubyte), ("jf", ctypes.c_ubyte), ("k", ctypes.c_uint32)] class bpf_program(ctypes.Structure): """"Structure for BIOCSETF""" _fields_ = [("bf_len", ctypes.c_uint), ("bf_insns", ctypes.POINTER(bpf_insn))] # Shamelessly copied/modified from # ironic-python-agent class RawPromiscuousSockets(object): def __init__(self, interface_names, protocol, module): """Initialize context manager. :param interface_names: a list of interface names to bind to :param protocol: the protocol to listen for :returns: A list of tuple of (interface_name, bound_socket), or [] if there is an exception binding or putting the sockets in promiscuous mode """ if not interface_names: raise ValueError('interface_names must be a non-empty list of ' 'network interface names to bind to.') self.protocol = protocol self.module = module self.ovs_bridges = module.params['ovs_bridges'] # A 4-tuple of (interface_name, socket, ifreq object, sink) self.interfaces = [(name, self._get_socket(), ifreq(), self._get_iface_sink(name)) for name in interface_names] def __enter__(self): for interface_name, sock, ifr, sink in self.interfaces: iface = sink or interface_name try: self.module.log('Interface {} entering promiscuous ' 'mode to capture '.format(iface)) ifr.ifr_ifrn = iface.encode() # Get current flags fcntl.ioctl(sock.fileno(), SIOCGIFFLAGS, ifr) # G for Get # bitwise or the flags with promiscuous mode, set the new flags ifr.ifr_flags |= IFF_PROMISC fcntl.ioctl(sock.fileno(), SIOCSIFFLAGS, ifr) # S for Set # Bind the socket so it can be used self.module.log('Binding interface {} for protocol ' '{}'.format(iface, self.protocol)) sock.bind((iface, self.protocol)) # Attach kernel packet filter for lldp protocol bpf = self._get_bpf_filter() sock.setsockopt(SOL_SOCKET, SO_ATTACH_FILTER, bpf) # Drain the queue while True: try: sock.recv(1, socket.MSG_DONTWAIT) except socket.error as serr: if serr.errno == errno.EWOULDBLOCK: # assume no data to read break else: raise except Exception: self.module.log('Failed to open all RawPromiscuousSockets, ' 'attempting to close any opened sockets.') self.__exit__(*sys.exc_info()) raise # No need to return each interfaces ifreq. return [(sock[0], sock[1]) for sock in self.interfaces] def __exit__(self, exception_type, exception_val, trace): for name, sock, ifr, sink in self.interfaces: # bitwise or with the opposite of promiscuous mode to remove ifr.ifr_flags &= ~IFF_PROMISC try: fcntl.ioctl(sock.fileno(), SIOCSIFFLAGS, ifr) sock.close() if sink: bridge = self.ovs_bridges.get(name) self._clean_lldp_config(sink, bridge.get('bridge')) except Exception: self.module.log('Failed to close raw socket for interface ' '{}'.format(sink or name)) def _get_socket(self): return socket.socket(socket.AF_PACKET, socket.SOCK_RAW, self.protocol) def _get_bpf_filter(self): """ Kernel packet filter for lldp proto. Unfortunately instantiation of the class with lldp proto does not work for interfaces under OVS bridge. Black magic using kernel BPF filter follows /sbin/tcpdump -i <itf> -ddd -s 1600 \ 'ether proto 0x88cc and ether dst 01:80:c2:00:00:0e' """ filter = ['8\n', '40 0 0 12\n', '21 0 5 35020\n', '32 0 0 2\n', '21 0 3 3254779918\n', '40 0 0 0\n', '21 0 1 384\n', '6 0 0 1600\n', '6 0 0 0\n'] # Allocate BPF instructions size = int(filter[0]) bpf_insn_a = bpf_insn * size bip = bpf_insn_a() # Fill the BPF instruction structures with the byte code filter = filter[1:] i = 0 for line in filter: values = [int(v) for v in line.split()] bip[i].code = ctypes.c_ushort(values[0]) bip[i].jt = ctypes.c_ubyte(values[1]) bip[i].jf = ctypes.c_ubyte(values[2]) bip[i].k = ctypes.c_uint(values[3]) i += 1 # Create the BPF program return bpf_program(size, bip) def _get_iface_sink(self, interface): sink = None bridge = self.ovs_bridges.get(interface) try: if bridge and bridge['type'] == 'dpdk': sink = self._prepare_itf_for_lldp(interface, bridge['bridge']) except Exception: self.module.log('failed to create sink for interface {}'.format( interface)) finally: return sink def _prepare_itf_for_lldp(self, interface, bridge): if not bridge: return None # Setup a lldp sink port and ovs flow sink = 'lldp.' + interface params = { 'vsctl': self.module.get_bin_path("ovs-vsctl", True), 'ofctl': self.module.get_bin_path("ovs-ofctl", True), 'ip': self.module.get_bin_path("ip", True), 'br': bridge, 'ovsif': sink } # Setup a lldp sink port cmd = ("%(vsctl)s --may-exist add-port %(br)s %(ovsif)s -- " "set interface %(ovsif)s type=internal" % params) self.module.run_command(cmd, check_rc=True) # add flow to lldp port cmd = ("%(ofctl)s dump-ports-desc %(br)s" % params) _, out, _ = self.module.run_command(cmd, check_rc=True) # in port b = re.search(r'\s+(\d+)\({0}\):'.format(interface), out) params['in'] = b.group(1) if b else None # out port b = re.search(r'\s+(\d+)\({0}\):'.format(sink), out) params['out'] = b.group(1) if b else None cmd = ("%(ofctl)s add-flow %(br)s in_port=%(in)s," "dl_dst=01:80:c2:00:00:0e,dl_type=0x88cc," "actions=output:%(out)s" % params) self.module.run_command(cmd, check_rc=True) cmd = ("%(ip)s link set up dev %(ovsif)s" % params) self.module.run_command(cmd, check_rc=True) return sink def _clean_lldp_config(self, sink, bridge): params = { 'vsctl': self.module.get_bin_path("ovs-vsctl", True), 'ofctl': self.module.get_bin_path("ovs-ofctl", True), 'br': bridge, 'ovsif': sink } command = ("%(ofctl)s del-flows %(br)s " "dl_dst=01:80:c2:00:00:0e" % params) self.module.run_command(command, check_rc=True) cmd = ("%(vsctl)s del-port %(br)s %(ovsif)s" % params) self.module.run_command(cmd, check_rc=True) def get_lldp_info(interface_names, module): """Get LLDP info from the switch(es). Listens on either a single or all interfaces for LLDP packets, then parses them. If no LLDP packets are received before lldp_timeout, returns a dictionary in the form {'interface': [],...}. :param interface_names: The interface to listen for packets on. If None, will listen on each interface. :return: A dictionary in the form {'interface': [(lldp_type, lldp_data)],...} """ with RawPromiscuousSockets(interface_names, ANY_ETHERTYPE, module) as interfaces: try: return _get_lldp_info(interfaces, module) except Exception as e: module.log('Error while getting LLDP info: %s', str(e)) raise def _parse_tlv(buff): """Iterate over a buffer and generate structured TLV data. :param buff: An ethernet packet with the header trimmed off (first 14 bytes) """ lldp_info = [] while len(buff) >= 2: # TLV structure: type (7 bits), length (9 bits), val (0-511 bytes) tlvhdr = struct.unpack('!H', buff[:2])[0] tlvtype = (tlvhdr & 0xfe00) >> 9 tlvlen = (tlvhdr & 0x01ff) tlvdata = buff[2:tlvlen + 2] buff = buff[tlvlen + 2:] lldp_info.append((tlvtype, binascii.hexlify(tlvdata).decode())) return lldp_info def _receive_lldp_packets(sock): """Receive LLDP packets and process them. :param sock: A bound socket :return: A list of tuples in the form (lldp_type, lldp_data) """ pkt, sa_ll = sock.recvfrom(1600) # Filter outgoing packets if sa_ll[2] == socket.PACKET_OUTGOING: return [] # Filter invalid packets if not pkt or len(pkt) < 14: return [] # Skip header (dst MAC, src MAC, ethertype) pkt = pkt[14:] return _parse_tlv(pkt) def _get_lldp_info(interfaces, module): """Wait for packets on each socket, parse the received LLDP packets.""" module.log('Getting LLDP info for interfaces {}'.format(interfaces)) lldp_info = {} if not interfaces: return {} while interfaces: module.log('Waiting on LLDP info for interfaces: {}, ' 'timeout: {}'.format(interfaces, module.params['lldp_timeout'])) socks = [interface[1] for interface in interfaces] # rlist is a list of sockets ready for reading rlist, _, _ = select.select( socks, [], [], module.params['lldp_timeout']) if not rlist: # Empty read list means timeout on all interfaces module.log('LLDP timed out, remaining interfaces: {}'.format( interfaces)) break for s in rlist: # rlist is a list of sockets ready for reading rlist, _, _ = select.select( socks, [], [], module.params['lldp_timeout']) if not rlist: # Empty read list means timeout on all interfaces module.log('LLDP timed out, remaining interfaces: {}'.format( interfaces)) break for s in rlist: # Find interface name matching socket ready for read # Create a copy of interfaces to avoid deleting while iterating. for index, interface in enumerate(list(interfaces)): if s == interface[1]: try: lldp_info[interface[0]] = _receive_lldp_packets(s) except socket.error: module.log('Socket for network interface {} said ' 'that it was ready to read we were ' 'unable to read from the socket while ' 'trying to get LLDP packet. Skipping ' 'this network interface.'.format( interface[0])) del interfaces[index] else: # Remove interface from the list, if pkt is not # outgoing/short if lldp_info[interface[0]]: module.log( 'Found LLDP info for interface: {}'.format( interface[0])) del interfaces[index] # Add any interfaces that didn't get a packet as empty lists for name, _sock in interfaces: lldp_info[name] = [] return lldp_info def get_vf_devices(dev_name): VF_DEVICE_PATH = "/sys/class/net/%s/device" VIRTFN_FORMAT = r"^virtfn(?P<vf_index>\d+)" VIRTFN_REG_EX = re.compile(VIRTFN_FORMAT) devices = { "name": dev_name, "vf_info": [] } dev_path = VF_DEVICE_PATH % dev_name if os.path.isdir(dev_path): file_list = os.listdir(dev_path) for file_name in file_list: pattern_match = VIRTFN_REG_EX.match(file_name) if pattern_match: vf_name = pattern_match.group(0) file_path = os.path.join(dev_path, file_name) if os.path.islink(file_path): file_link = os.readlink(file_path) pci_slot = os.path.basename(file_link) entry = { 'device-name': vf_name, 'pci-id': pci_slot, } devices['vf_info'].append(entry) return devices def main(): arg_spec = dict( interfaces=dict(type='list', required=True), ovs_bridges=dict(type='dict', required=True), lldp_timeout=dict(type='int', required=False, default=30), ) module = AnsibleModule(argument_spec=arg_spec) interfaces = module.params['interfaces'] lldpinfo = get_lldp_info(interfaces, module) itfinfo = dict() for interface in interfaces: vfinfo = get_vf_devices(interface) itfinfo[interface] = { 'lldp': lldpinfo.get(interface), 'vfinfo': vfinfo } module.exit_json(interfaces=interfaces, stdout=json.dumps(itfinfo, indent=4), changed=True) if __name__ == '__main__': main()
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,388
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/generate_topology.py
#!/usr/bin/env python # Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import os import sys from ansible.utils.display import Display # TODO(OPENSTACK-2892) : # This is temporary code for dealing with py2/py3 compatibility and have # unit tests pass, while the production code isn't deployed as a true # python package. This will be worked on in a subsequent release. try: from .helper import constants from .helper.utils import Utils from .helper.utils import run_ansible except (ImportError, ValueError): from helper import constants from helper.utils import Utils from helper.utils import run_ansible def create_base_parser(prog, usage="", desc=None, epilog=None): """ Create an options parser for all ansible scripts """ # base opts parser = argparse.ArgumentParser( prog=prog, epilog=epilog, description=desc, conflict_handler='resolve', ) parser.add_argument('-v', '--verbose', dest='verbosity', default=0, action="count", help="verbose mode (-vvv for more, " "-vvvv to enable connection debugging)") parser.add_argument("-C", "--check", default=False, dest='check', action='store_true', help="don't make any changes; instead, try to predict " "some of the changes that may occur") parser.add_argument('-l', '--limit', default=None, dest='subset', help="further limit selected hosts " "to an additional pattern") return parser def main(options): if not Utils.check_user(constants.STACK_USER): sys.stdout.write("ERROR: Run the script as %s user.\n" % constants.STACK_USER) sys.exit(1) if not os.path.isfile(constants.STACKRC_FILE): sys.stdout.write("ERROR: %s does not exist." "\n" % constants.OVERCLOUDRC_FILE) sys.exit(1) if not os.path.isfile(constants.OVERCLOUDRC_FILE): sys.stdout.write("ERROR: %s does not exist." "\n" % constants.OVERCLOUDRC_FILE) sys.exit(1) topo_playbook_path = os.path.join(constants.NUAGE_TC_PATH, "get_topo.yml") run_ansible(topo_playbook_path, options) if __name__ == "__main__": parser = create_base_parser(os.path.basename(sys.argv[0])) options = parser.parse_args(sys.argv[1:]) if options.subset: options.subset += ',localhost' display = Display() display.verbosity = options.verbosity display.debug("starting run") main(options)
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,389
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/helper/osclient.py
# TODO(OPENSTACK-2892) : # This is temporary code for dealing with py2/py3 compatibility and have # unit tests pass, while the production code isn't deployed as a true # python package. This will be worked on in a subsequent release. try: from .utils import Utils except (ImportError, ValueError): from utils import Utils class KeystoneClient(object): def __init__(self): self.client = None self.session = None self.credentials = Utils.get_os_credentials() def authenticate(self, init_client=True): from keystoneauth1.exceptions.auth import AuthorizationFailure \ as KeyStoneAuthorizationFailure from keystoneauth1.identity import v2 as keystone_v2 from keystoneauth1.identity import v3 as keystone_v3 from keystoneauth1 import session as keystone_session from keystoneclient.v2_0 import client as keystone_v2_client from keystoneclient.v3 import client as keystone_client from osc_lib.exceptions import AuthorizationFailure from osc_lib.exceptions import Unauthorized try: if self.credentials.identity_api_version == 3: auth = keystone_v3.Password( auth_url=self.credentials.auth_url, username=self.credentials.username, password=self.credentials.password, project_name=self.credentials.project_name, project_domain_id=self.credentials.project_domain_id, project_domain_name=self.credentials.project_domain_name, user_domain_id=self.credentials.user_domain_id, user_domain_name=self.credentials.user_domain_name) self.session = keystone_session.Session( auth=auth, verify=(self.credentials.ca_cert if self.credentials.verify_ca and self.credentials. ca_cert else self.credentials.verify_ca)) if init_client: self.client = keystone_client.Client(session=self.session) else: auth = keystone_v2.Password( auth_url=self.credentials.auth_url, username=self.credentials.username, password=self.credentials.password, tenant_name=self.credentials.project_name) self.session = keystone_session.Session(auth=auth) if init_client: self.client = keystone_v2_client.Client( username=self.credentials.username, password=self.credentials.password, tenant_name=self.credentials.project_name, auth_url=self.credentials.auth_url) return self except (AuthorizationFailure, KeyStoneAuthorizationFailure, Unauthorized) as e: raise EnvironmentError('Authentication failure: ' + str(e)) class NeutronClient(object): def __init__(self): self.client = None self.switchport_mapping_path = "/net-topology/switchport_mappings" self.switchport_mappings_path = "/net-topology/switchport_mappings/%s" def authenticate(self): from neutronclient.neutron import client as neutron_client from neutronclient.v2_0 import client as neutron_client_v2 keystone_client = KeystoneClient().authenticate(init_client=False) self.client = ( neutron_client.Client( api_version='2.0', session=keystone_client.session) if keystone_client.session else neutron_client_v2.Client( username=keystone_client.credentials.username, password=keystone_client.credentials.password, tenant_name=keystone_client.credentials.project_name, auth_url=keystone_client.credentials.auth_url)) return self def get_switchport_mapping(self, retrieve_all=True, **_params): return self.client.list('switchport_mappings', self.switchport_mapping_path, retrieve_all, **_params) def create_switchport_mapping(self, body): return self.client.post(self.switchport_mapping_path, body) def update_switchport_mapping(self, id, body): return self.client.put(self.switchport_mappings_path % id, body)
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,390
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/library/linuxbond.py
#!/usr/bin/python # Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from ansible.module_utils.basic import AnsibleModule ANSIBLE_METADATA = { 'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: linuxbond short_description: Query linux bonds version_added: "2.4" description: - "Query linux bonds" author: Vlad Gridin (vladyslav.gridin@nokia.com) options: brinfo: default: {} description: - Dict of phy itf/bridge relations ''' EXAMPLES = ''' ''' RETURN = ''' brinfo: description: Dict with phy interface to ovs bridge mapping returned: always type: dict sample: { "eth0": "br-ex", "eth1": "br-public", "eth2": None } ''' def check_linux_bond(iface): slaves = list() try: bond = open('/proc/net/bonding/%s' % iface).read() for line in bond.splitlines(): m = re.match('^Slave Interface: (.*)', line) if m: slaves.append(m.groups()[0]) except IOError: pass return slaves def run_module(): module_args = dict( brinfo=dict(type='dict', required=False, default=dict()) ) result = dict( changed=False, brinfo=dict() ) module = AnsibleModule( argument_spec=module_args, supports_check_mode=True ) if module.check_mode: module.exit_json(**result) bridgeinfo = dict() for k, v in module.params['brinfo'].items(): bridgeinfo[k] = v slaves = check_linux_bond(k) for slave in slaves: bridgeinfo[slave] = {'bridge': v.get('bridge'), 'type': None} result['brinfo'] = bridgeinfo module.exit_json(**result) def main(): run_module() if __name__ == '__main__': main()
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,391
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/tests/test_compare_topology.py
import filecmp import json import mock import os import sys import testtools try: from StringIO import StringIO # for Python 2 except ImportError: from io import StringIO # for Python 3 from nuage_topology_collector.scripts import compare_topology from nuage_topology_collector.scripts.helper.utils import Utils TESTS_PATH = 'nuage_topology_collector/tests/' INPUTS_PATH = TESTS_PATH + 'inputs/' OUTPUT_PATH = TESTS_PATH + 'outputs/' def mock_old_report(): current_dir = os.getcwd() old_report_path = os.path.join( current_dir, INPUTS_PATH + 'compare_topology_old.json') with open(old_report_path) as old_report_data: old_report_json = json.load(old_report_data) return old_report_json old_report_json = mock_old_report() class Capturing(list): def __enter__(self): self._stdout = sys.stdout sys.stdout = self._stringio = StringIO() return self def __exit__(self, *args): self.extend(self._stringio.getvalue().splitlines()) del self._stringio # free up some memory sys.stdout = self._stdout class CompareTopology(testtools.TestCase): @mock.patch.object(compare_topology, 'create_old_report', return_value=old_report_json) @mock.patch.object(Utils, 'check_user', return_value=True) @mock.patch.object(os.path, 'isfile', return_value=True) @mock.patch.object(Utils, 'source_rc_files', return_value=None) def test_module_main(self, *mock): current_dir = os.getcwd() new_report_path = os.path.join( current_dir, INPUTS_PATH + 'compare_topology_new.json') mock_generated_output_path = os.path.join( current_dir, OUTPUT_PATH + 'generated_output.txt') mock_expected_output_path = os.path.join( current_dir, OUTPUT_PATH + 'test_compare_topology') with Capturing() as output: compare_topology.main([self, new_report_path]) with open(mock_generated_output_path, 'w') as generated_output: for line in output: generated_output.write(line + "\n") self.assertTrue(filecmp.cmp(mock_generated_output_path, mock_expected_output_path), 'The output does not match the expected output')
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,392
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/filter_plugins/dict_to_items.py
#!/usr/bin/python # Copyright 2020 Nokia # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ansible.errors import AnsibleFilterError from collections import Mapping ''' Cbis is on ansible 2.5 which does not have dict2items ''' def dict_to_items(mydict, key_name='key', value_name='value'): ''' takes a dictionary and transforms it into a list of dictionaries, with each having a 'key' and 'value' keys that correspond to the keys and values of the original ''' if not isinstance(mydict, Mapping): raise AnsibleFilterError("dict_to_items requires a dictionary, " "got %s instead." % type(mydict)) ret = [] for key in mydict: ret.append({key_name: key, value_name: mydict[key]}) return ret class FilterModule(object): ''' Query filter ''' def filters(self): return { 'dict_to_items': dict_to_items, }
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,393
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/roles/tc-inventory/files/get_overcloud_nodes.py
#!/usr/bin/python import json import os from keystoneauth1.identity import v3 from keystoneauth1 import session from novaclient import client try: from urllib.parse import urljoin except ImportError: from urlparse import urljoin auth_url = os.environ["OS_AUTH_URL"] if os.environ.get("OS_IDENTITY_API_VERSION") == "3": if 'v3' not in auth_url: auth_url = urljoin(auth_url, 'v3') username = os.environ.get("OS_USERNAME") password = os.environ.get("OS_PASSWORD") project_name = os.environ.get("OS_TENANT_NAME", os.environ.get("OS_PROJECT_NAME")) user_domain_name = os.environ.get("OS_USER_DOMAIN_NAME") project_domain_name = os.environ.get("OS_PROJECT_DOMAIN_NAME") auth = v3.Password(auth_url=auth_url, username=username, password=password, project_name=project_name, user_domain_name=user_domain_name, project_domain_name=project_domain_name, ) session = session.Session(auth=auth, verify=False) nova = client.Client(2, session=session) oc_servers = {server.name: server.networks['ctlplane'][0] for server in nova.servers.list() if server.networks.get('ctlplane')} print(json.dumps(oc_servers, indent=4))
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,394
nuagenetworks/topology-collector
refs/heads/master
/nuage_topology_collector/scripts/helper/constants.py
import os import yaml NUAGE_TC_PATH = '/opt/nuage/topology-collector/nuage_topology_collector' def get_env_variable(variable): USER_VARS = os.path.join(NUAGE_TC_PATH, "user_vars.yml") defaults = { 'output_dir': '/tmp/topo-coll/reports', 'output_file_prefix': 'topo_report', 'undercloud_env_file': str(os.getenv('HOME')) + '/stackrc', 'osc_env_file': str(os.getenv('HOME')) + '/overcloudrc' } if os.path.exists(USER_VARS): with open(USER_VARS, 'r') as stream: try: extra_vars = yaml.safe_load(stream) if extra_vars.get(variable) and len(extra_vars[variable]) > 0: return extra_vars[variable] except Exception: pass return defaults[variable] STACK_USER = 'stack' STACKRC_FILE = get_env_variable('undercloud_env_file') OVERCLOUDRC_FILE = get_env_variable('osc_env_file') OUTPUT_DIR = get_env_variable('output_dir') OUTPUT_FILE_PREFIX = get_env_variable('output_file_prefix')
{"/nuage_topology_collector/scripts/topology_import.py": ["/nuage_topology_collector/scripts/helper/osclient.py", "/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_topology_import.py": ["/nuage_topology_collector/scripts/topology_import.py"], "/nuage_topology_collector/scripts/compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py", "/nuage_topology_collector/scripts/helper/osclient.py"], "/nuage_topology_collector/scripts/generate_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/scripts/helper/osclient.py": ["/nuage_topology_collector/scripts/helper/utils.py"], "/nuage_topology_collector/tests/test_compare_topology.py": ["/nuage_topology_collector/scripts/helper/utils.py"]}
36,436
MaEvGoR/datamining_project
refs/heads/master
/test.py
import datetime timestart = datetime.datetime.strptime('181013123123', "%H%M%S%f") new_time = datetime.datetime.strptime('181028123123', "%H%M%S%f") diff = new_time - timestart if timestart == new_time: exit(0) if diff.microseconds == 0: print('T = 1') if diff.microseconds == 0 and diff.seconds % 5 == 0: print('T = 5') if diff.microseconds == 0 and diff.seconds % 15 == 0: print('T = 15') if diff.microseconds == 0 and diff.seconds % 30 == 0: print('T = 30') if diff.microseconds == 0 and diff.seconds == 0: print('T = 60')
{"/MaximOrderBookSpectrum.py": ["/Spectrum.py"], "/main.py": ["/MaximOrderBookSpectrum.py"]}
36,437
MaEvGoR/datamining_project
refs/heads/master
/MaximOrderBookSpectrum.py
from Spectrum import Spectrum import numpy as np import datetime class OrderBookSpectrum(): # def __init__(self, tradelog_labels): def __init__(self, price_step, spectrum_filename): self.ob_df = {} self.collisions = 0 self.spectrum = Spectrum(price_step) self.spectrum_file = open(spectrum_filename, "w+") self.start_time = None # self.order_book_spectrum = open("my_order_book_spectrum.txt", "w") # Add name for particular order book # self.price_step = 0.0025 def write_to_file(self, time): buy_spect = np.array(self.spectrum.spectrum_buy) sell_spect = np.array(self.spectrum.spectrum_sell) if buy_spect.sum() > 0 and sell_spect.sum() > 0: buy_norm = buy_spect / buy_spect.sum() sell_norm = sell_spect / sell_spect.sum() buy = ', '.join([str(i) for i in buy_norm]) sell = ', '.join([str(i) for i in sell_norm]) to_write = str(time) + ', ' + sell + ', ' + buy + '\n' # print(to_write) self.spectrum_file.write(to_write) def post_order(self, orderno, volume, buysell, price, time): # time segment check self.ob_df[orderno] = {'volume': volume, 'buysell': buysell, 'price': price, 'time': time} if buysell == 'B': self.spectrum.new_buy_order(price, volume) else: self.spectrum.new_sell_order(price, volume) def revoke_order(self, orderno, volume, buysell, price, time): if orderno in self.ob_df: if volume == self.ob_df[orderno]['volume']: self.ob_df.pop(orderno, None) if buysell == 'B': self.spectrum.delete_buy_order(price, volume) else: self.spectrum.delete_sell_order(price, volume) elif volume < self.ob_df[orderno]['volume']: self.ob_df[orderno]['volume'] -= volume if buysell == 'B': self.spectrum.delete_buy_order(price, volume) else: self.spectrum.delete_sell_order(price, volume) else: print('\nException: not possible volume for match: \n', orderno) self.collisions += 1 self.delete_collision(order) else: print('\nException: orderno does not exist: \n', orderno) self.collisions += 1 self.delete_collision(order) def match_order(self, orderno, volume, buysell, price, time): if orderno in self.ob_df: if volume == self.ob_df[orderno]['volume']: self.ob_df.pop(orderno, None) if buysell == 'B': self.spectrum.delete_buy_order(price, volume) else: self.spectrum.delete_sell_order(price, volume) elif volume < self.ob_df[orderno]['volume']: self.ob_df[orderno]['volume'] -= volume if buysell == 'B': self.spectrum.delete_buy_order(price, volume) else: self.spectrum.delete_sell_order(price, volume) else: print('\nException: not possible volume for match: \n', orderno) self.collisions += 1 else: print('\nException: orderno does not exist: \n', orderno) self.collisions += 1 def delete_collision(self, orderno): print('Delete collisioned orders with ORDERNO: ' + orderno) self.ob_df.pop(orderno, None) print('Current number of collisions: {}\n'.format(self.collisions)) def new_order(self, action, orderno, volume, buysell, price, time): if not self.start_time: self.start_time = datetime.datetime.strptime(time, "%H%M%S%f") new_time = datetime.datetime.strptime(time, "%H%M%S%f") time_diff = new_time - self.start_time if self.start_time == new_time: exit(0) if time_diff.microseconds == 0: print('T = 1') if time_diff.microseconds == 0 and time_diff.seconds % 5 == 0: print('T = 5') if time_diff.microseconds == 0 and time_diff.seconds % 15 == 0: print('T = 15') if time_diff.microseconds == 0 and time_diff.seconds % 30 == 0: print('T = 30') if time_diff.microseconds == 0 and time_diff.seconds == 0: print('T = 60') if action == 1: self.post_order(orderno, volume, buysell, price, time) elif action == 0: self.revoke_order(orderno, volume, buysell, price, time) elif action == 2: self.match_order(orderno, volume, buysell, price, time) else: self.collisions += 1 self.write_to_file(time)
{"/MaximOrderBookSpectrum.py": ["/Spectrum.py"], "/main.py": ["/MaximOrderBookSpectrum.py"]}
36,438
MaEvGoR/datamining_project
refs/heads/master
/data/data_separator.py
import pandas as pd from glob import glob import os from tqdm import tqdm instruments = ['USD000000TOD', 'USD000UTSTOM', 'EUR_RUB__TOD', 'EUR_RUB__TOM', 'EURUSD000TOM', 'EURUSD000TOD'] # for inst in instruments: # os.mkdir(f'fx_separated/{inst}') for ol_fn in tqdm(glob('./*/Order*')): ol_df = pd.read_csv(ol_fn) for inst in instruments: inst_df = ol_df[ol_df['SECCODE'] == inst] inst_df.to_csv(f'fx_separated/{inst}/{ol_fn.split("/")[-1]}')
{"/MaximOrderBookSpectrum.py": ["/Spectrum.py"], "/main.py": ["/MaximOrderBookSpectrum.py"]}
36,439
MaEvGoR/datamining_project
refs/heads/master
/Spectrum.py
import math import numpy as np class Spectrum: def __init__(self, price_step): self.price_step = price_step self.buy_bins = [] self.all_buy_bins = [] self.all_buy_volumes = [] self.all_sell_bins = [] self.all_sell_volumes = [] self.sell_bins = [] self.best_sell = -1 self.best_buy = -1 self.buy_volumes = [0 for i in range(50)] self.sell_volumes = [0 for i in range(50)] self.spectrum_sell_bins = [] self.spectrum_buy_bins = [] self.spectrum_sell = [0 for i in range(10)] self.spectrum_buy = [0 for i in range(10)] def insert_to_buy(self, price, volume): max_price = self.all_buy_bins[0] # new one is greater than the currently recorded maximum if price > max_price: dist = math.ceil((price - max_price) / self.price_step) self.all_buy_bins = [i for i in np.arange(price, max_price, -self.price_step)] + self.all_buy_bins self.all_buy_volumes = [0 for i in range(len(self.all_buy_bins) - len(self.all_buy_volumes))] + self.all_buy_volumes self.all_buy_volumes[0] += volume return 0 else: idx = math.ceil((max_price - price) / self.price_step) if idx < len(self.all_buy_bins): self.all_buy_volumes[idx] += volume return idx else: dist = idx - len(self.all_buy_bins) + 1 self.all_buy_bins = self.all_buy_bins + [i for i in np.arange(self.all_buy_bins[-1] - self.price_step, price - 1, -self.price_step)] self.all_buy_volumes = self.all_buy_volumes + [0 for i in range( len(self.all_buy_bins) - len(self.all_buy_volumes))] self.all_buy_volumes[idx] += volume return idx def insert_to_sell(self, price, volume): min_price = self.all_sell_bins[0] # new one is less than the currently recorded minimum if price < min_price: dist = math.ceil((min_price - price) / self.price_step) self.all_sell_bins = [i for i in np.arange(price, min_price, self.price_step)] + self.all_sell_bins self.all_sell_volumes = [0 for i in range( len(self.all_sell_bins) - len(self.all_sell_volumes))] + self.all_sell_volumes self.all_sell_volumes[0] += volume return 0 else: idx = math.ceil((price - min_price) / self.price_step) if idx < len(self.all_sell_bins): self.all_sell_volumes[idx] += volume return idx else: dist = idx - len(self.all_sell_bins) + 1 self.all_sell_bins = self.all_sell_bins + [i for i in np.arange(self.all_sell_bins[-1] + self.price_step, price + 1, self.price_step)] self.all_sell_volumes = self.all_sell_volumes + [0 for i in range( len(self.all_sell_bins) - len(self.all_sell_volumes))] self.all_sell_volumes[idx] += volume return idx def delete_from_buy(self, price, volume): max_price = self.all_buy_bins[0] idx = math.ceil((max_price - price) / self.price_step) if 0 <= idx < len(self.all_buy_bins): if volume < self.all_buy_volumes[idx]: self.all_buy_volumes[idx] -= volume return idx # find first non-zero element else: self.all_buy_volumes[idx] = 0 while self.all_buy_volumes[idx] == 0: if (idx == len(self.all_buy_volumes) - 1): break idx += 1 return idx else: return -1 def delete_from_sell(self, price, volume): min_price = self.all_sell_bins[0] idx = math.ceil((price - min_price) / self.price_step) if 0 <= idx < len(self.all_sell_bins): if volume < self.all_sell_volumes[idx]: self.all_sell_volumes[idx] -= volume return idx # find first non-zero element else: self.all_sell_volumes[idx] = 0 while self.all_sell_volumes[idx] == 0: if (idx == len(self.all_sell_volumes) - 1): break idx += 1 return idx else: return -1 def find_idx_sell(self, price): k = math.ceil((price - self.best_sell) / self.price_step) if k == 50: k = 49 return int(k) def find_idx_buy(self, price): k = math.ceil((self.best_buy - price) / self.price_step) if k == 50: k = 49 return int(k) def find_idx_spectrum_sell(self, price): k = math.ceil((price - self.best_sell) / self.price_step) // 5 if k == 10: k = 9 return k def find_idx_spectrum_buy(self, price): k = math.ceil((self.best_buy - price) / self.price_step) // 5 if k == 10: k = 9 return k def recalc_spectrum_sell(self): self.spectrum_sell_bins = [self.sell_bins[i] for i in range(0, 50, 5)] self.spectrum_sell = [sum(self.sell_volumes[i:i + 5]) for i in range(0, 50, 5)] def recalc_spectrum_buy(self): self.spectrum_buy_bins = [self.buy_bins[i] for i in range(0, 50, 5)] self.spectrum_buy = [sum(self.buy_volumes[i:i + 5]) for i in range(0, 50, 5)] def new_sell_order(self, price, volume): # no sell orders recorded yet if self.best_sell == -1: self.best_sell = price max_sell = self.best_sell + 50 * self.price_step self.sell_bins = [p for p in np.arange(self.best_sell, max_sell, self.price_step)] self.spectrum_sell_bins = [p for p in np.arange(self.best_sell, max_sell, self.price_step * 5)] self.sell_volumes[0] = volume self.spectrum_sell[0] = volume self.all_sell_bins = self.sell_bins.copy() self.all_sell_volumes = self.sell_volumes.copy() else: # sell order falls somewhere in the existing bins if self.best_sell <= price < self.best_sell + 50 * self.price_step: idx = self.find_idx_sell(price) if idx == 50: idx = 49 self.sell_volumes[idx] += volume spect_idx = self.find_idx_spectrum_sell(price) self.spectrum_sell[spect_idx] += volume _ = self.insert_to_sell(price, volume) else: # found new best, update everything if self.best_sell > price: idx = self.insert_to_sell(price, volume) self.best_sell = price if idx + 50 < len(self.all_sell_bins): self.sell_bins = self.all_sell_bins[idx:idx + 50] self.sell_volumes = self.all_sell_volumes[idx:idx + 50] else: self.sell_bins = [p for p in np.arange(self.best_sell, self.best_sell + 50 * self.price_step, self.price_step)] self.sell_volumes = self.all_sell_volumes[idx:] + [0 for i in range(50 - len(self.all_sell_volumes) + idx)] self.recalc_spectrum_sell() # save for the later usage else: _ = self.insert_to_sell(price, volume) def new_buy_order(self, price, volume): # no buy orders recorded yet if self.best_buy == -1: self.best_buy = price min_buy = self.best_buy - 50 * self.price_step self.buy_bins = [p for p in np.arange(self.best_buy, min_buy, -self.price_step)] self.spectrum_buy_bins = [p for p in np.arange(self.best_buy, min_buy, -self.price_step * 5)] self.buy_volumes[0] = volume self.spectrum_buy[0] = volume self.all_buy_bins = self.buy_bins.copy() self.all_buy_volumes = self.buy_volumes.copy() else: # buy order falls somewhere in the existing bins if self.best_buy >= price > self.best_buy - 50 * self.price_step: idx = self.find_idx_buy(price) if idx == 50: idx = 49 self.buy_volumes[idx] += volume spect_idx = self.find_idx_spectrum_buy(price) self.spectrum_buy[spect_idx] += volume _ = self.insert_to_buy(price, volume) else: # found new best, update everything if self.best_buy < price: idx = self.insert_to_buy(price, volume) self.best_buy = price if idx + 50 < len(self.all_buy_bins): self.buy_bins = self.all_buy_bins[idx:idx + 50] self.buy_volumes = self.all_buy_volumes[idx:idx + 50] else: self.buy_bins = [p for p in np.arange(self.best_buy, self.best_buy - 50 * self.price_step, -self.price_step)] self.buy_volumes = self.all_buy_volumes[idx:] + [0 for i in range(50 - len(self.all_buy_volumes) + idx)] self.recalc_spectrum_buy() # save for the later usage else: _ = self.insert_to_buy(price, volume) def delete_sell_order(self, price, volume): # does not remove current best if self.best_sell + 50 * self.price_step > price > self.best_sell or price == self.best_sell and volume < \ self.sell_volumes[0]: idx = self.find_idx_sell(price) self.sell_volumes[idx] = max(0, self.sell_volumes[idx] - volume) spect_idx = self.find_idx_spectrum_sell(price) self.spectrum_sell[spect_idx] = max(0, self.spectrum_sell[spect_idx] - volume) else: # if removes current best if price == self.best_sell and volume >= self.sell_volumes[0]: idx = self.delete_from_sell(price, volume) self.best_sell = self.all_sell_bins[idx] if idx + 50 < len(self.all_sell_bins): self.sell_bins = self.all_sell_bins[idx:idx + 50] self.sell_volumes = self.all_sell_volumes[idx:idx + 50] else: self.sell_bins = [p for p in np.arange(self.best_sell, self.best_sell + 50 * self.price_step, self.price_step)] self.sell_volumes = self.all_sell_volumes[idx:] + [0 for i in range(50 - len(self.all_sell_volumes) + idx)] self.recalc_spectrum_sell() # if does not fall in 50 steps elif price > self.best_sell + 50 * self.price_step: _ = self.delete_from_sell(price, volume) def delete_buy_order(self, price, volume): # does not remove current best if self.best_buy - 50 * self.price_step < price < self.best_buy or price == self.best_buy and volume < \ self.buy_volumes[0]: idx = self.find_idx_buy(price) self.buy_volumes[idx] = max(0, self.buy_volumes[idx] - volume) spect_idx = self.find_idx_spectrum_buy(price) self.spectrum_buy[spect_idx] = max(0, self.spectrum_buy[spect_idx] - volume) else: # if removes current best if price == self.best_buy and volume >= self.buy_volumes[0]: idx = self.delete_from_buy(price, volume) self.best_buy = self.all_buy_bins[idx] if idx + 50 < len(self.all_buy_bins): self.buy_bins = self.all_buy_bins[idx:idx + 50] self.buy_volumes = self.all_buy_volumes[idx:idx + 50] else: self.buy_bins = [p for p in np.arange(self.best_buy, self.best_buy - 50 * self.price_step, -self.price_step)] self.buy_volumes = self.all_buy_volumes[idx:] + [0 for i in range(50 - len(self.all_buy_volumes) + idx)] self.recalc_spectrum_buy() # if does not fall in 50 steps elif price > self.best_buy + 50 * self.price_step: _ = self.delete_from_buy(price, volume)
{"/MaximOrderBookSpectrum.py": ["/Spectrum.py"], "/main.py": ["/MaximOrderBookSpectrum.py"]}
36,440
MaEvGoR/datamining_project
refs/heads/master
/main.py
from MaximOrderBookSpectrum import OrderBookSpectrum import pandas as pd def generate_orderbook(orderlog_path, output_path, instrument, price_step, filename): df = pd.read_csv(orderlog_path) actions = df['ACTION'].to_numpy() ordernos = df['ORDERNO'].to_numpy() volumes = df['VOLUME'].to_numpy() buysells = df['BUYSELL'].to_numpy() prices = df['PRICE'].to_numpy() times = df['TIME'].to_numpy() ob = OrderBookSpectrum(price_step, filename) for i in range(len(df)): ob.new_order(actions[i], ordernos[i], volumes[i], buysells[i], prices[i], times[i]) # ob_file = open(output_path, "w+") # ob_file.write('time, orderno, buysell, price, volume\n') # for k, v in ob.ob_df.items(): # to_write = str(v['time']) + ', ' + str(k) + ', ' + str(v['buysell']) + ', ' + str(v['price']) + ', ' + str(v['volume']) + '\n' # ob_file.write(to_write) return ob
{"/MaximOrderBookSpectrum.py": ["/Spectrum.py"], "/main.py": ["/MaximOrderBookSpectrum.py"]}
36,444
emmikoivisto/codeclan-karaoke
refs/heads/master
/src/room.py
class Room: def __init__(self, name, guest_list): self.name = name self.guest_list = guest_list self.playlist = [] def add_songs_to_playlist(self, new_song): self.playlist.append(new_song) def guest_count(self): return len(self.guest_list) def add_guest_to_guestlist(self, new_guest): return self.guest_list.append(new_guest) def remove_guest_from_guestlist(self, guest_to_remove): self.guest_list.remove(guest_to_remove)
{"/tests/room_test.py": ["/src/room.py"], "/run_tests.py": ["/tests/guest_test.py", "/tests/room_test.py", "/tests/songs_test.py"]}
36,445
emmikoivisto/codeclan-karaoke
refs/heads/master
/tests/room_test.py
import unittest from src.room import Room from src.songs import Songs from src.guest import Guest class TestRoom(unittest.TestCase): def setUp(self): self.song_to_add_1 = Songs("Move", "Taemin") self.guest_1 = Guest("Tim", 30) self.guest_2 = Guest("John", 18) new_guests = [self.guest_1, self.guest_2] self.room = Room("Jam Jar", new_guests) def test_room_has_name(self): self.assertEqual("Jam Jar", self.room.name) def test_playlist_has_songs(self): self.room.add_songs_to_playlist(self.song_to_add_1) self.assertEqual(1, len(self.room.playlist)) def test_room_has_guestlist(self): self.assertEqual(2, self.room.guest_count()) def test_add_guest(self): guest_3 = Guest("Jess", 28) self.room.add_guest_to_guestlist(guest_3) self.assertEqual(3, self.room.guest_count()) def test_remove_guest(self): self.room.remove_guest_from_guestlist(self.guest_1) self.assertEqual(1, self.room.guest_count())
{"/tests/room_test.py": ["/src/room.py"], "/run_tests.py": ["/tests/guest_test.py", "/tests/room_test.py", "/tests/songs_test.py"]}
36,446
emmikoivisto/codeclan-karaoke
refs/heads/master
/run_tests.py
import unittest from tests.guest_test import TestGuest from tests.room_test import TestRoom from tests.songs_test import TestSongs if __name__ == '__main__': unittest.main()
{"/tests/room_test.py": ["/src/room.py"], "/run_tests.py": ["/tests/guest_test.py", "/tests/room_test.py", "/tests/songs_test.py"]}
36,447
emmikoivisto/codeclan-karaoke
refs/heads/master
/tests/songs_test.py
import unittest from src.songs import Songs class TestSongs(unittest.TestCase): def setUp(self): self.title = "Move" self.artist = "Taemin" def test_song_has_title(self): self.assertEqual("Move", self.title) def test_song_has_artist(self): self.assertEqual("Taemin", self.artist)
{"/tests/room_test.py": ["/src/room.py"], "/run_tests.py": ["/tests/guest_test.py", "/tests/room_test.py", "/tests/songs_test.py"]}
36,448
emmikoivisto/codeclan-karaoke
refs/heads/master
/tests/guest_test.py
import unittest from src.guest import Guest class TestGuest(unittest.TestCase): def setUp(self): self.guest = Guest("Jack", 25) def test_guest_has_name(self): self.assertEqual("Jack", self.guest.name) def test_guest_has_age(self): self.assertEqual(25, self.guest.age) def test_guest_has_wallet(self): self.assertEqual([], self.guest.money)
{"/tests/room_test.py": ["/src/room.py"], "/run_tests.py": ["/tests/guest_test.py", "/tests/room_test.py", "/tests/songs_test.py"]}
36,449
Pixelapse/pyunicode
refs/heads/master
/pyunicode/api.py
# -*- coding: utf-8 -*- # Default libs import logging # Installed libs # Project modules logger = logging.getLogger(__name__) def safely_decode(unicode_or_str, encoding='utf-8'): ''' Decodes byte <str> into <unicode>. Ignores any non-utf8 chars in <str>s ''' if isinstance(unicode_or_str, unicode): ustr = unicode_or_str elif isinstance(unicode_or_str, str): ustr = unicode_or_str.decode(encoding, 'ignore') else: raise Exception(u'Not of type unicode or str') return ustr def safely_encode(unicode_or_str, encoding='utf-8'): ''' Encodes <unicode> into byte <str>. Replaces any non utf8 chars ''' if isinstance(unicode_or_str, unicode): rstr = unicode_or_str.encode(encoding, 'replace') elif isinstance(unicode_or_str, str): rstr = unicode_or_str else: raise Exception(u'Not of type unicode or str') return rstr
{"/pyunicode/__init__.py": ["/pyunicode/api.py"]}
36,450
Pixelapse/pyunicode
refs/heads/master
/pyunicode/__init__.py
__title__ = 'pyunicode' __license__ = 'MIT' __copyright__ = 'Copyright 2014 Pixelapse' from .api import safely_decode, safely_encode
{"/pyunicode/__init__.py": ["/pyunicode/api.py"]}
36,471
matiasechaharria/Calibracion_termometros
refs/heads/master
/run.py
import datetime import os import socket from flask import Flask, request, flash, url_for, redirect, render_template from flask_sqlalchemy import SQLAlchemy from models import Equipo, Persona, Sonda, Medicion, Certificado,Certificado_INTI from app_things import app, db from flask_wtf import FlaskForm from wtforms import SelectField from calculos import error_tipico, interpolacion_patron, interpolacion_patron, temp_calibracion, cifra_correccion, incertidumbre, valores_para_certificado import crear_pdf.crear_pdf as PDF import numpy as np @app.route('/') def Indice(): return render_template('Indice.html') @app.route('/Cartificado_pdf') def Certificado_pdf(patron,ebp,medicion): obj = Certificado_INTI.query.order_by(Certificado_INTI.id.desc()).first() # print("obj") # print(obj.temp_0) certif_INTI=[[ obj.temp_n30,obj.correcion_n30,obj.incert_n30, 2], [obj.temp_0,obj.correcion_0,obj.incert_0, 2], [obj.temp_37,obj.correcion_37,obj.incert_37, 2], [obj.temp_100,obj.correcion_100,obj.incert_100, 2], [obj.temp_200,obj.correcion_200,obj.incert_200, 2]] print("average ebp =",np.average(ebp)) print("average patron =",np.average(patron)) temp,correccion,incert= valores_para_certificado(certif_INTI,patron,ebp) print("Temperatura = "+str(temp) +" Correccion = " +str(correccion) +" Incertidumbre = "+ str( incert)) PDF.export_certificado(temp,correccion,incert) #PDF.export_certificado(equipos = Equipo.query.all()) try: certificado=Certificado(medicion,temp,correccion,incert) db.session.add(certificado) except Exception as error: print("error al poner el certificado en la base") print(str(error)) db.session.rollback() flash('error al poner el certificado en la base', 'error') else: db.session.commit() flash('El certificado fue cargado correctamente') # return redirect(url_for('Listar_todos_los_certificados')) # # return render_template('Indice.html') @app.route('/Listar_todos_los_certificados') def Listar_todos_los_certificados(): result = db.engine.execute("SELECT Certificado.id as certificado_id, \ Equipo.nombre as nombre_equipo,\ Sonda.nombre as Sonda_nombre, \ Certificado.temp as certificado_temp, \ Certificado.correcion as certificado_correcion, \ Certificado.incert as certificado_incert \ FROM Equipo \ join Medicion on (Medicion.equipo_id = Equipo.id )\ join Sonda on (Sonda.id = Medicion.sonda_id )\ join Certificado on (Certificado.id = Medicion.id )\ ORDER BY Equipo.id;") print("imprimo lo que sale de la mega consulta") certificados = [] for lista in result: certificados.append(lista[:]) print(certificados) return render_template('Listar_todos_los_certificados2.html', certificados=certificados ) #return render_template('Listar_todos_los_certificados.html', certificados = Certificado.query.all() ) @app.route('/Alta_certificado', methods = ['GET', 'POST']) def Alta_certificado(): """Carga los certificados del INTI en la base de datos""" if request.method == 'POST': if( not request.form['temp_n30'] \ or not request.form['correcion_n30']\ or not request.form['incert_n30']\ or not request.form['temp_0']\ or not request.form['correcion_0']\ or not request.form['incert_0']\ or not request.form['temp_37']\ or not request.form['correcion_37']\ or not request.form['incert_37']\ or not request.form['temp_100']\ or not request.form['correcion_100']\ or not request.form['incert_100']\ or not request.form['temp_200']\ or not request.form['correcion_200']\ or not request.form['incert_200']\ or not request.form['fecha']): flash('Ingrese todos los campos', 'error') else: try: certificado_INTI = Certificado_INTI(float(request.form['temp_n30']), \ float(request.form['correcion_n30']),\ float(request.form['incert_n30']),\ float(request.form['temp_0']),\ float(request.form['correcion_0']),\ float(request.form['incert_0']),\ float(request.form['temp_37']),\ float(request.form['correcion_37']),\ float(request.form['incert_37']),\ float(request.form['temp_100']),\ float(request.form['correcion_100']),\ float(request.form['incert_100']),\ float(request.form['temp_200']),\ float(request.form['correcion_200']),\ float(request.form['incert_200']),\ request.form['fecha']) db.session.add(certificado_INTI) except Exception as error: print("error al poner el certificado del INTI en la base") print(str(error)) db.session.rollback() flash('error al poner el certificado del INTI en la base', 'error') else: db.session.commit() flash('El certificado fue cargado correctamente') return redirect(url_for('Listar_todos_los_certificados')) return render_template('Alta_certificado.html') @app.route('/Listar_todas_las_mediciones') def Listar_todas_las_mediciones(): return render_template('Listar_todas_las_mediciones.html', mediciones = Medicion.query.all() ) @app.route('/Alta_medicion', methods = ['GET', 'POST']) def Alta_medicion(): if request.method == 'POST': if( not request.form['temp_ambiente'] \ or not request.form['humedad'] \ or not request.form['persona_id'] \ or not request.form['ebp'] \ or not request.form['sonda'] \ or not request.form['temp_pat0']\ or not request.form['temp_pat1']\ or not request.form['temp_pat2']\ or not request.form['temp_pat3']\ or not request.form['temp_pat4']\ or not request.form['temp_pat5']\ or not request.form['temp_pat6']\ or not request.form['temp_pat7']\ or not request.form['temp_pat8']\ or not request.form['temp_pat9']\ or not request.form['temp_ebp0']\ or not request.form['temp_ebp1']\ or not request.form['temp_ebp2']\ or not request.form['temp_ebp3']\ or not request.form['temp_ebp4']\ or not request.form['temp_ebp5']\ or not request.form['temp_ebp6']\ or not request.form['temp_ebp7']\ or not request.form['temp_ebp8']\ or not request.form['temp_ebp9'] ): flash('Ingrese todos los campos', 'error') else: temp_ambiente=request.form['temp_ambiente'] humedad= request.form['humedad'] persona_id= request.form['persona_id'] ebp= request.form['ebp'] sonda= request.form['sonda'] temp_pat=[request.form['temp_pat0'],\ request.form['temp_pat1'],\ request.form['temp_pat2'],\ request.form['temp_pat3'],\ request.form['temp_pat4'],\ request.form['temp_pat5'],\ request.form['temp_pat6'],\ request.form['temp_pat7'],\ request.form['temp_pat8'],\ request.form['temp_pat9']] temp_ebp=[request.form['temp_ebp0'],\ request.form['temp_ebp1'],\ request.form['temp_ebp2'],\ request.form['temp_ebp3'],\ request.form['temp_ebp4'],\ request.form['temp_ebp5'],\ request.form['temp_ebp6'],\ request.form['temp_ebp7'],\ request.form['temp_ebp8'],\ request.form['temp_ebp9']] existe = Persona.query.filter(Persona.nombre == persona_id).first() if not existe: flash('"La persona no existe !"') else: existe = Equipo.query.filter(Equipo.nombre == ebp).first() if not existe: flash('"El equipo no existe !"') else: existe = Sonda.query.filter(Sonda.nombre == sonda).first() if not existe: flash('"La sonda no existe !"') else: try: print("cargando las mediciones") medicion = Medicion(temp_ambiente, humedad, persona_id, ebp, sonda,\ request.form['temp_pat0'],\ request.form['temp_pat1'],\ request.form['temp_pat2'],\ request.form['temp_pat3'],\ request.form['temp_pat4'],\ request.form['temp_pat5'],\ request.form['temp_pat6'],\ request.form['temp_pat7'],\ request.form['temp_pat8'],\ request.form['temp_pat9'],\ request.form['temp_ebp0'],\ request.form['temp_ebp1'],\ request.form['temp_ebp2'],\ request.form['temp_ebp3'],\ request.form['temp_ebp4'],\ request.form['temp_ebp5'],\ request.form['temp_ebp6'],\ request.form['temp_ebp7'],\ request.form['temp_ebp8'],\ request.form['temp_ebp9']) db.session.add(medicion) except Exception as error: print("error al cargar la medicion") print(str(error)) db.session.rollback() flash('error al cargar la medicion', 'error') else: db.session.commit() temp_pat=[float(request.form['temp_pat0']),\ float(request.form['temp_pat1']),\ float(request.form['temp_pat2']),\ float(request.form['temp_pat3']),\ float(request.form['temp_pat4']),\ float(request.form['temp_pat5']),\ float(request.form['temp_pat6']),\ float(request.form['temp_pat7']),\ float(request.form['temp_pat8']),\ float(request.form['temp_pat9'])] temp_ebp=[float(request.form['temp_ebp0']),\ float(request.form['temp_ebp1']),\ float(request.form['temp_ebp2']),\ float(request.form['temp_ebp3']),\ float(request.form['temp_ebp4']),\ float(request.form['temp_ebp5']),\ float(request.form['temp_ebp6']),\ float(request.form['temp_ebp7']),\ float(request.form['temp_ebp8']),\ float(request.form['temp_ebp9'])] Certificado_pdf(temp_pat,temp_ebp,medicion) flash('La medicion fue cargada correctamente') return redirect(url_for('Listar_todas_las_mediciones')) return render_template('Alta_medicion.html') @app.route('/Listar_todos_los_equipos') def Listar_todos_los_equipos(): return render_template('Listar_todos_los_equipos.html', equipos = Equipo.query.all() ) @app.route('/Alta_equipo', methods = ['GET', 'POST']) def Alta_equipo(): if request.method == 'POST': if not request.form['nombre'] or not request.form['marca']or not request.form['modelo']or not request.form['n_serie']or not request.form['fecha']: flash('Ingrese todos los campos', 'error') else: marca= request.form['marca'] nombre_equipo =request.form['nombre'] modelo=request.form['modelo'] n_serie= request.form['n_serie'] fecha_alta= request.form['fecha'] observaciones= request.form['observaciones'] existe = Equipo.query.filter(Equipo.nombre == nombre_equipo).first() if existe: flash('"El equipo ya existe !"') else: try: equipo = Equipo(nombre_equipo, marca, modelo, n_serie, fecha_alta, observaciones) db.session.add(equipo) except Exception as error: print("ERROR ! al cargar el equipo !") print(str(error)) db.session.rollback() flash('"ERROR ! al cargar el equipo !"') else: db.session.commit() flash('El equipo fue cargado correctamente') return redirect(url_for('Listar_todos_los_equipos')) return render_template('Alta_equipo.html') @app.route('/Alta_persona', methods = ['GET', 'POST']) def Alta_persona(): if request.method == 'POST': if not request.form['name'] or not request.form['city'] or not request.form['addr']: flash('Ingrese todos los campos', 'error') else: nombre =request.form['name'] email =request.form['city'] direccion =request.form['addr'] telefono =request.form['pin'] existe = Persona.query.filter(Persona.nombre == nombre).first() if existe: flash('"La persona ya existe !"') else: try: persona = Persona(nombre,email,direccion,telefono) db.session.add(persona) except Exception as error: print("ERROR ! al cargar la persona!") print(str(error)) db.session.rollback() flash('"ERROR ! al cargar la persona !"') else: db.session.commit() flash('La persona fue agregada correctamente') return redirect(url_for('Listar_todas_las_personas')) return render_template('Alta_persona.html') @app.route('/Listar_todas_las_personas') def Listar_todas_las_personas(): return render_template('Listar_todas_las_personas.html', personas = Persona.query.all() ) @app.route('/Alta_sonda', methods = ['GET', 'POST']) def Alta_sonda(): if request.method == 'POST': if not request.form['nombre'] or not request.form['fecha_alta']: flash('Ingrese todos los campos', 'error') else: nombre =request.form['nombre'] fecha =request.form['fecha_alta'] observaciones =request.form['observaciones'] existe = Sonda.query.filter(Sonda.nombre == nombre).first() if existe: flash('"La sonda ya existe !"') else: try: sonda = Sonda(nombre,fecha,observaciones) db.session.add(sonda) except Exception as error: print("ERROR ! al cargar la sonda!") print(str(error)) db.session.rollback() flash('"ERROR ! al cargar la sonda !"') else: db.session.commit() flash('La sonda fue agregada correctamente') return redirect(url_for('Listar_todas_las_sondas')) return render_template('Alta_sonda.html') @app.route('/Listar_todas_las_sondas') def Listar_todas_las_sondas(): return render_template('Listar_todas_las_sondas.html', sondas = Sonda.query.all() ) @app.route('/Historico_sondas') def Historico_sondas(): """"Genero el reporte de mediciones de equipos""" result = db.engine.execute("SELECT Equipo.nombre as nombre_equipo,\ Sonda.nombre as Sonda_nombre, \ Medicion.id as medcion_id, \ Certificado.id as certificado_id, \ Certificado.temp as certificado_temp, \ Certificado.correcion as certificado_correcion, \ Certificado.incert as certificado_incert \ FROM Equipo \ join Medicion on (Medicion.equipo_id = Equipo.id )\ join Sonda on (Sonda.id = Medicion.sonda_id )\ join Certificado on (Certificado.id = Medicion.id )\ ORDER BY Equipo.id;") print("imprimo lo que sale de la mega consulta") reporte = [] for lista in result: reporte.append(lista[:]) print(reporte) print("-----------") PDF.Historico_pdf_2(reporte) return render_template('Indice.html') #return render_template('Historico_sondas.html', ) if __name__ == '__main__': #creo el directorio para los certificados# now = datetime.datetime.now() nowAux = str(now) carpeta = nowAux[0:10] if not os.path.exists("certificados/"): os.makedirs("certificados/") print ("Carpeta creada:"+ "certificados/") if not os.path.exists("Historico_sondas/"): os.makedirs("Historico_sondas/") print ("Carpeta creada:"+ "Historico_sondas/") db.create_all() #app.run(debug = True)# para ambiente local #busco la ip local para poner en lan la app gw = os.popen("ip -4 route show default").read().split() s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((gw[2], 0)) ipaddr = s.getsockname()[0] gateway = gw[2] host = socket.gethostname() print("---Datos de configuracion LAN---") print ("IP:", ipaddr, " GW:", gateway, " Host:", host) print("--------------------------------") #app.run(host='127.0.1.1')# poner la iplan ejemple app.run(host=ipaddr)
{"/run.py": ["/models.py", "/app_things.py", "/calculos.py", "/crear_pdf/crear_pdf.py"], "/calculos.py": ["/crear_pdf/crear_pdf.py"], "/models.py": ["/app_things.py"]}
36,472
matiasechaharria/Calibracion_termometros
refs/heads/master
/crear_pdf/__init__.py
from crear_pdf import crear_pdf
{"/run.py": ["/models.py", "/app_things.py", "/calculos.py", "/crear_pdf/crear_pdf.py"], "/calculos.py": ["/crear_pdf/crear_pdf.py"], "/models.py": ["/app_things.py"]}
36,473
matiasechaharria/Calibracion_termometros
refs/heads/master
/app_things.py
from flask import Flask from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) #para mysql app.config['SQLALCHEMY_DATABASE_URI'] = "mysql+pymysql://flask_app:flask_pass@localhost/calibracion" app.config['MYSQL_DATABASE_USER'] = 'flask_app' app.config['MYSQL_DATABASE_PASSWORD'] = 'flask_pass' app.config['SECRET_KEY'] = "random string" db = SQLAlchemy(app)
{"/run.py": ["/models.py", "/app_things.py", "/calculos.py", "/crear_pdf/crear_pdf.py"], "/calculos.py": ["/crear_pdf/crear_pdf.py"], "/models.py": ["/app_things.py"]}
36,474
matiasechaharria/Calibracion_termometros
refs/heads/master
/crear_pdf/crear_pdf.py
#crea pdf con de los certificados #https://www.blog.pythonlibrary.org/2018/06/05/creating-pdfs-with-pyfpdf-and-python/ #relatorio # tambien sirve para exportar en distintos formatos from fpdf import FPDF, HTMLMixin import datetime import time class HTML2PDF(FPDF, HTMLMixin): pass class CustomPDF(FPDF): pass #def export_certificado(equipos): def export_certificado(temp,correccion,incertidumbre): now = datetime.datetime.now() nowAux = str(now) nombre_certificado = nowAux[0:20] pdf=FPDF(format='A4', unit='mm') pdf.add_page() pdf.set_font('Arial','',10.0) epw = pdf.w - 2*pdf.l_margin col_width = epw/4 # Since we do not need to draw lines anymore, there is no need to separate # headers from data matrix. th = pdf.font_size pdf.set_font('Times','B',14.0) pdf.cell(epw, 0.0, '', align='C') pdf.set_font('Times','',10.0) pdf.ln(0.5) # Here we add more padding by passing 2*th as height pdf.cell(col_width, 2*th, str( nombre_certificado ), border=1) pdf.cell(col_width, 2*th, str( temp ), border=1) pdf.cell(col_width, 2*th, str( correccion ), border=1) pdf.cell(col_width, 2*th, str( incertidumbre ), border=1) name = "certificados/"+"Certificado_"+str(nombre_certificado)+".pdf" pdf.output(name) def export_certificado2(temp,correccion,incertidumbre): pdf = HTML2PDF() now = datetime.datetime.now() nowAux = str(now) nombre_certificado = nowAux[0:10] print(temp) print(correccion) print(incertidumbre) table = ''' <h1 align="center">CERTIFICADO DE CALIBRACION</h1> <table border="0" align="center" width="50%"> <thead> <tr> <th width="30%">Temperatura</th> <th width="70%">Correccion</th> <th width="30%">Incertidumbre</th> </tr> </thead> <tbody> <tr><td>cell 1</td><td>cell 2</td><td>cell 3</td></tr> <tr><td>cell 4</td><td>cell 5</td><td>cell 6</td></tr> <tr><td>cell 7</td><td>cell 7</td><td>cell 9</td></tr> </tbody> </table> ''' pdf.add_page() pdf.write_html(table) name = str(nombre_certificado)+".pdf" pdf.output(name) def Historico_pdf(spacing=1): now = datetime.datetime.now() nowAux = str(now) nombre_certificado = nowAux[0:10] # data = [['First Name', 'Last Name', 'email', 'zip'], # ['Mike', 'Driscoll', 'mike@somewhere.com', '55555'], # ['John', 'Doe', 'jdoe@doe.com', '12345'], # ['Nina', 'Ma', 'inane@where.com', '54321'] # ] pdf = FPDF() pdf.set_font("Arial", size=12) pdf.add_page() col_width = pdf.w / 4.5 row_height = pdf.font_size for row in data: for item in row: pdf.cell(col_width, row_height*spacing, txt=item, border=1) pdf.ln(row_height*spacing) name = str(nombre_certificado)+"Historico_sondas"+".pdf" pdf.output(name) def simple_table(spacing=1): now = datetime.datetime.now() nowAux = str(now) nombre_certificado = nowAux[0:10] # data = [['First Name', 'Last Name', 'email', 'zip'], # ['Mike', 'Driscoll', 'mike@somewhere.com', '55555'], # ['John', 'Doe', 'jdoe@doe.com', '12345'], # ['Nina', 'Ma', 'inane@where.com', '54321'] # ] pdf = FPDF() pdf.set_font("Arial", size=12) pdf.add_page() col_width = pdf.w / 4.5 row_height = pdf.font_size for row in data: for item in row: pdf.cell(col_width, row_height*spacing, txt=item, border=1) pdf.ln(row_height*spacing) name = str(nombre_certificado)+".pdf" pdf.output(name) def Historico_pdf_2(data): pdf=FPDF(orientation = 'L',format='A4', unit='mm') pdf.add_page() pdf.set_font('Times','',10.0) # Effective page width, or just epw epw = pdf.w - 2*pdf.l_margin # Set column width to 1/4 of effective page width to distribute content # evenly across table and page col_width = epw/8 pdf.set_font('Times','B',14.0) pdf.cell(epw, 0.0, 'Historico de sondas', align='C') pdf.set_font('Times','',10.0) pdf.ln(5) th = pdf.font_size pdf.cell(col_width, 2*th, "Equipo", border=1) pdf.cell(col_width, 2*th, "Sonda", border=1) pdf.cell(col_width, 2*th, "Medicion", border=1) pdf.cell(col_width, 2*th, "Certificados", border=1) pdf.cell(col_width, 2*th, "Temperatura", border=1) pdf.cell(col_width, 2*th, "Correccion", border=1) pdf.cell(col_width, 2*th, "Incertidumbre", border=1) pdf.ln(th) pdf.ln(th) for row in data: for datum in row: pdf.cell(col_width, th, str(datum), border=1) pdf.ln(th) now = datetime.datetime.now() nowAux = str(now) nombre_pdf = nowAux[0:20] name = "Historico_sondas/"+str(nombre_pdf)+"Historico_pdf_2"+".pdf" pdf.output(name) if __name__ == '__main__': simple_table()
{"/run.py": ["/models.py", "/app_things.py", "/calculos.py", "/crear_pdf/crear_pdf.py"], "/calculos.py": ["/crear_pdf/crear_pdf.py"], "/models.py": ["/app_things.py"]}
36,475
matiasechaharria/Calibracion_termometros
refs/heads/master
/calculos.py
# Imports necesarios import numpy as np # importando numpy import matplotlib.pyplot as plt import pandas as pd # importando pandas import crear_pdf.crear_pdf as PDF #recta de regresion lineal def error_tipico(Certificado): """Esta funcion devuelve el valor del error tipico del equipo""" N=5 T=0.04 Sx=np.sum(Certificado, axis=0)[:1] Sy=np.sum(Certificado, axis=0)[1:2] Sxx_aux=np.power(Certificado[:],2) Sxx=np.sum(Sxx_aux, axis=0)[:1] #Sxy Sxy= Certificado[0][0]*Certificado[0][1]+Certificado[1][0]*Certificado[1][1]+ Certificado[2][0]*Certificado[2][1]+ Certificado[3][0]*Certificado[3][1]+Certificado[4][0]*Certificado[4][1] #Syy Syy_aux=np.power(Certificado,2) Syy=np.sum(Syy_aux, axis=0)[1:2] a_n_aux=N*Sxy-Sx*Sy b_n_aux=N*Sxx-Sx*Sx m=a_n_aux/b_n_aux a_m_aux=Sxx*Sy-Sx*Sxy n=a_m_aux/b_n_aux Cc=m*T+n #(n+m xi - yi)2 error_tipico= (n+m* Certificado[0][0]-Certificado[0][1])*(n+m* Certificado[0][0]-Certificado[0][1])+(n+m*Certificado[1][0]-Certificado[1][1])*(n+m*Certificado[1][0]-Certificado[1][1])+(n+m*Certificado[2][0]-Certificado[2][1])*(n+m*Certificado[2][0]-Certificado[2][1])+(n+m*Certificado[3][0]-Certificado[3][1])*(n+m*Certificado[3][0]-Certificado[3][1])+(n+m*Certificado[4][0]-Certificado[4][1])*(n+m*Certificado[4][0]-Certificado[4][1]) return(error_tipico) def interpolacion_patron(Certificado): """ busca el factor de correccion interpolado del certificado de calibracion""" #http://cs231n.github.io/python-numpy-tutorial/#numpy-arrays N=5 T=0.04 Sx=np.sum(Certificado, axis=0)[:1] Sy=np.sum(Certificado, axis=0)[1:2] Sxx_aux=np.power(Certificado[:],2) Sxx=np.sum(Sxx_aux, axis=0)[:1] #Sxy Sxy= Certificado[0][0]*Certificado[0][1]+Certificado[1][0]*Certificado[1][1]+ Certificado[2][0]*Certificado[2][1]+ Certificado[3][0]*Certificado[3][1]+Certificado[4][0]*Certificado[4][1] #Syy Syy_aux=np.power(Certificado,2) Syy=np.sum(Syy_aux, axis=0)[1:2] a_n_aux=N*Sxy-Sx*Sy b_n_aux=N*Sxx-Sx*Sx m=a_n_aux/b_n_aux a_m_aux=Sxx*Sy-Sx*Sxy n=a_m_aux/b_n_aux Cc=m*T+n #print("Cc= ", Cc) #print("recta de m*x+n = ", m,"T",n ) return(m,n) def temp_calibracion(Certificado,patron): """regresa el valor de Temperatura real de calibracion""" m,n=interpolacion_patron(Certificado) tcp=m*np.average(patron)+n return(tcp+np.average(patron)) def cifra_correccion(ebp,temp): """calcula la cifra de correccion""" return(temp-np.average(ebp)) def incertidumbre(Certificado,tcp): """busco valor de la incertidumbre tipo b""" if tcp<= Certificado[0][0] : return(Certificado[0][2]/2) elif Certificado[0][0] <= tcp and tcp<= Certificado[1][0]: return(Certificado[1][2]/2) elif Certificado[1][0] <= tcp and tcp<= Certificado[2][0]: return(Certificado[2][2]/2) elif Certificado[2][0] <= tcp and tcp<= Certificado[3][0]: return(Certificado[3][2]/2) elif Certificado[3][0] <= tcp and tcp<= Certificado[4][0]: return(Certificado[3][2]/2) elif Certificado[4][0] <= tcp: return(Certificado[4][2]/2) def valores_para_certificado(Certificado,patron,ebp,): """Retorna el valor de Temperatura, correccion e incertidumbre para el certificado de calibracion""" tcp=temp_calibracion(Certificado,patron) coorrecion=cifra_correccion(ebp,tcp) print("temp_calibracion",tcp) tipob=(incertidumbre(Certificado,tcp)) errort=error_tipico(Certificado) estabilidad=0.1/((12)**1/2) uniformidad=0.1/((12)**1/2) resolusion_patron=0.01/((12)**0.5) tipob_combinada=((tipob**2+estabilidad**2+uniformidad**2+resolusion_patron**2+errort**2)**0.5) resolusion_ebp=0.01/((12)**0.5) tipo_AB_combinada=(np.std(ebp)**2+resolusion_ebp**2+tipob_combinada**2 )**0.5 tipo_AB_combinada=tipo_AB_combinada*2 return(np.average(ebp),coorrecion ,tipo_AB_combinada) if __name__ == '__main__': patron=[-19.34, -19.34 ,-19.34 ,-19.34, -19.34 ,-19.34 ,-19.34, -19.34 ,-19.34 ,-19.34] ebp=[-18.42 ,-18.42, -18.42 ,-18.43, -18.43 ,-18.43, -18.43, -18.43, -18.43, -18.43] Certificado_INTI=[[ -31.14 , 0.27 ,0.04, 2], [-0.09, 0.09 ,0.05, 2], [37.14, -0.13 ,0.1, 2], [100.49 ,-0.35 ,0.04, 2], [200.74, -0.55 ,0.07, 2]] print("average ebp =",np.average(ebp)) print("average patron =",np.average(patron)) temp,correccion,incert= valores_para_certificado(Certificado_INTI,patron,ebp) print("Temperatura = "+str(temp) +" Correccion = " +str(correccion) +" Incertidumbre = "+ str( incert)) PDF.export_certificado(temp,correccion,incert) #PDF.simple_table()
{"/run.py": ["/models.py", "/app_things.py", "/calculos.py", "/crear_pdf/crear_pdf.py"], "/calculos.py": ["/crear_pdf/crear_pdf.py"], "/models.py": ["/app_things.py"]}
36,476
matiasechaharria/Calibracion_termometros
refs/heads/master
/models.py
from sqlalchemy import Table, Column, Integer, ForeignKey from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import mapper from app_things import db ##----Definiciones de class ##http://flask-sqlalchemy.pocoo.org/2.3/models/ class Sonda(db.Model): #__tablename__ = 'Sonda' id = db.Column( db.Integer, primary_key = True) nombre = db.Column(db.String(100)) observaciones = db.Column(db.String(100)) fecha_alta = db.Column(db.String(100)) fecha_baja = db.Column(db.String(100)) activa = db.Column(db.Boolean, default=True) mediciones=db.relationship('Medicion', back_populates='sonda') def __init__(self, nombre, fecha, observaciones): self.nombre = nombre self.fecha_alta = fecha self.observaciones = observaciones class Equipo(db.Model): #__tablename__ = 'equipo' #id = db.Column('Equipo.id', db.Integer, primary_key = True) id = db.Column(db.Integer, primary_key = True) nombre =db.Column(db.String(50)) marca = db.Column(db.String(500)) modelo = db.Column(db.String(50)) n_serie = db.Column(db.String(50)) observaciones = db.Column(db.String(10)) fecha_alta = db.Column(db.String(100)) fecha_baja = db.Column(db.String(100)) activo = db.Column(db.Boolean, default=True) mediciones=db.relationship('Medicion', back_populates='equipo') def __init__(self, name, marca, modelo,n_serie,fecha,observaciones): self.nombre=name self.marca = marca self.modelo = modelo self.n_serie = n_serie self.observaciones = observaciones self.fecha_alta=fecha class Persona(db.Model): #__tablename__ = 'persona' id = db.Column( db.Integer, primary_key = True) nombre = db.Column(db.String(100),nullable=False) email = db.Column(db.String(50)) direccion = db.Column(db.String(200)) telefono = db.Column(db.String(10)) mediciones=db.relationship('Medicion', back_populates='midio') fecha_alta = db.Column(db.String(100)) fecha_baja = db.Column(db.String(100)) activo = db.Column(db.Boolean, default=True) def __init__(self, name= None, city= None, addr= None, telefono = None): self.nombre = name self.email = city self.direccion = addr self.telefono= telefono class Certificado_INTI(db.Model): #__tablename__ = 'Certificado' id = db.Column(db.Integer, primary_key = True) Fecha_de_calibracion = db.Column(db.String(100)) temp_n30=db.Column(db.Float) correcion_n30=db.Column(db.Float) incert_n30=db.Column(db.Float) temp_0=db.Column(db.Float) correcion_0=db.Column(db.Float) incert_0=db.Column(db.Float) temp_37=db.Column(db.Float) correcion_37=db.Column(db.Float) incert_37=db.Column(db.Float) temp_100=db.Column(db.Float) correcion_100=db.Column(db.Float) incert_100=db.Column(db.Float) temp_200=db.Column(db.Float) correcion_200=db.Column(db.Float) incert_200=db.Column(db.Float) certificados_internos=db.relationship('Certificado', back_populates='certificado_INTI') def __init__(self, \ temp_n30,\ correcion_n30,\ incert_n30,\ temp_0,\ correcion_0,\ incert_0,\ temp_37,\ correcion_37,\ incert_37,\ temp_100,\ correcion_100,\ incert_100,\ temp_200,\ correcion_200,\ incert_200 , Fecha_de_calibracion ): self.Fecha_de_calibracion=Fecha_de_calibracion self.temp_n30=temp_n30 self.correcion_n30=correcion_n30 self.incert_n30=incert_n30 self.temp_0=temp_0 self.correcion_0=correcion_0 self.incert_0=incert_0 self.temp_37=temp_37 self.correcion_37=correcion_37 self.incert_37=incert_37 self.temp_100=temp_100 self.correcion_100=correcion_100 self.incert_100=incert_100 self.temp_200=temp_200 self.correcion_200=correcion_200 self.incert_200=incert_200 class Certificado(db.Model): #__tablename__ = 'Certificado' id = db.Column(db.Integer, primary_key = True) Fecha_de_calibracion = db.Column(db.String(100)) temp=db.Column(db.Float) correcion=db.Column(db.Float) incert=db.Column(db.Float) medicion_id = db.Column(db.Integer, db.ForeignKey('medicion.id')) medicion = db.relationship("Medicion", back_populates='certificado') certificado_INTI_id = db.Column(db.Integer, db.ForeignKey('certificado_INTI.id')) certificado_INTI = db.relationship("Certificado_INTI", back_populates='certificados_internos') def __init__(self, medicion=None,temp=None,correcion=None,incertidumbre=None): #self.Fecha_de_calibracion=Fecha_de_calibracion self.temp=temp self.correcion=correcion self.incert=incertidumbre self.medicion=Medicion.query.filter(Medicion.id == medicion.id).one() self.certificado_INTI=Certificado_INTI.query.order_by(Certificado_INTI.id.desc()).first() class Medicion(db.Model): #__tablename__ = 'medicion' id = db.Column(db.Integer, primary_key = True) temp_ambiente = db.Column(db.Integer) humedad = db.Column(db.Integer) midio_id = db.Column(db.Integer, db.ForeignKey('persona.id')) midio = db.relationship("Persona", back_populates='mediciones') equipo_id = db.Column(db.Integer, db.ForeignKey('equipo.id')) equipo = db.relationship("Equipo", back_populates='mediciones') sonda_id = db.Column(db.Integer, db.ForeignKey('sonda.id')) sonda = db.relationship("Sonda", back_populates='mediciones') certificado=db.relationship('Certificado',uselist=False, back_populates='medicion') temp_pat0=db.Column(db.Float) temp_pat1=db.Column(db.Float) temp_pat2=db.Column(db.Float) temp_pat3=db.Column(db.Float) temp_pat4=db.Column(db.Float) temp_pat5=db.Column(db.Float) temp_pat6=db.Column(db.Float) temp_pat7=db.Column(db.Float) temp_pat8=db.Column(db.Float) temp_pat9=db.Column(db.Float) temp_ebp0=db.Column(db.Float) temp_ebp1=db.Column(db.Float) temp_ebp2=db.Column(db.Float) temp_ebp3=db.Column(db.Float) temp_ebp4=db.Column(db.Float) temp_ebp5=db.Column(db.Float) temp_ebp6=db.Column(db.Float) temp_ebp7=db.Column(db.Float) temp_ebp8=db.Column(db.Float) temp_ebp9=db.Column(db.Float) def __init__(self, temp_ambiente= None, humedad= None, persona=None, edp=None, sonda= None,\ temp_pat0=None,\ temp_pat1=None,\ temp_pat2=None,\ temp_pat3=None,\ temp_pat4=None,\ temp_pat5=None,\ temp_pat6=None,\ temp_pat7=None,\ temp_pat8=None,\ temp_pat9=None,\ temp_ebp0=None,\ temp_ebp1=None,\ temp_ebp2=None,\ temp_ebp3=None,\ temp_ebp4=None,\ temp_ebp5=None,\ temp_ebp6=None,\ temp_ebp7=None,\ temp_ebp8=None,\ temp_ebp9=None): self.temp_ambiente =temp_ambiente self.humedad=humedad self.midio = Persona.query.filter(Persona.nombre == persona).one() self.equipo= Equipo.query.filter(Equipo.nombre == edp).one() self.sonda= Sonda.query.filter(Sonda.nombre == sonda).one() self.temp_pat0=temp_pat0 self.temp_pat1=temp_pat1 self.temp_pat2=temp_pat2 self.temp_pat3=temp_pat3 self.temp_pat4=temp_pat4 self.temp_pat5=temp_pat5 self.temp_pat6=temp_pat6 self.temp_pat7=temp_pat7 self.temp_pat8=temp_pat8 self.temp_pat9=temp_pat9 self.temp_ebp0=temp_ebp0 self.temp_ebp1=temp_ebp1 self.temp_ebp2=temp_ebp2 self.temp_ebp3=temp_ebp3 self.temp_ebp4=temp_ebp4 self.temp_ebp5=temp_ebp5 self.temp_ebp6=temp_ebp6 self.temp_ebp7=temp_ebp7 self.temp_ebp8=temp_ebp8 self.temp_ebp9=temp_ebp9
{"/run.py": ["/models.py", "/app_things.py", "/calculos.py", "/crear_pdf/crear_pdf.py"], "/calculos.py": ["/crear_pdf/crear_pdf.py"], "/models.py": ["/app_things.py"]}
36,488
nicecore/Work-Log-DB
refs/heads/master
/worklogdb.py
""" Work Log With Database A Treehouse Tech Degree Project =============================== Allows a user to enter work logs into a database and search existing logs by employee name, task name, date, or keyword. By Adam D Cameron May-June 2017 """ import peewee import datetime import sys import os import unittest import doctest db = peewee.SqliteDatabase('employees.db') class Employee(peewee.Model): """Database model""" name = peewee.CharField(max_length=255) task_name = peewee.CharField(max_length=255) date_time = peewee.DateTimeField(default=datetime.datetime.now) minutes = peewee.IntegerField(default=0) notes = peewee.TextField() class Meta: database = db def __str__(self): return """Employee Name: {} Task: {} Minutes Spent: {} Notes: {} """.format(self.name.title(), self.task_name, self.minutes, self.notes) employees = Employee.select().order_by(Employee.date_time.desc()) def c_s(): """Clear screen.""" os.system('cls' if os.name == 'nt' else 'clear') def printer(results, paginated=True): """Print out search results""" if results: c_s() for i in results: timestamp = i.date_time.strftime('%A %B %d, %Y %I:%M %p') print(timestamp) print('=' * len(timestamp)) print(i) if paginated: print("\nFor next entry, hit ENTER.") next_action = input( "To return to main menu, press q and ENTER.\n> ") if next_action == 'q': main_menu() c_s() else: input( "There were no results! Press ENTER to return to the main menu...") def add_entry(): """Add a new entry to the work log.""" still_entering = True while still_entering: c_s() name = input("Please enter your name:\n> ").lower().strip() c_s() task_name = input("Enter the task name:\n> ") c_s() minutes = int( input("Enter timehe time to complete task, in minutes:\n> ")) c_s() print( "Please enter any notes for this task and press ctrl+d when finished.") note = sys.stdin.read().strip() if note: if input( """\n\nDo you want to save this entry? Please enter Y for YES and N for NO. If you select NO you will be returned to the main menu. """).lower() != 'n': Employee.create( name=name, task_name=task_name, minutes=minutes, notes=note) if input("Create another entry? [Yn] ").lower() != 'n': add_entry() else: main_menu() else: main_menu() def search_menu(): """Allow users to select a type of search.""" searching = True while searching: c_s() print("Please select from one of the following search options:") choice = input(""" [a] Find by employee name [b] Find by date [c] Find by time spent [d] Find by search term > """).lower() if choice == 'a': return name_search elif choice == 'b': return date_search elif choice == 'c': return time_search elif choice == 'd': return term_search def display_names(): """Display all names for whom posts already exist.""" employees = Employee.select().order_by(Employee.date_time.desc()) c_s() names = [] for employee in employees: if employee.name not in names: names.append(employee.name) print("Here are the employees with existing records:\n") for name in names: print(name.title()) def name_search(): """Prompt user to provide a name for search.""" employees = Employee.select().order_by(Employee.date_time.desc()) c_s() print("Here are the employees with existing records:\n") display_names() search = input("\nPlease enter a name from the list above:\n> ").lower() printer(name_query(search)) def name_query(search): """Query the database for a name.""" employees = Employee.select().order_by(Employee.date_time.desc()) return employees.where(Employee.name.contains(search)) def display_dates(): """Display all dates for which posts already exist.""" employees = Employee.select().order_by(Employee.date_time.desc()) dates = [] for i in employees: date = i.date_time.strftime("%m/%d/%Y") if date not in dates: dates.append(date) return dates def date_search(): """Prompt user to provide a date in a specific format.""" c_s() print("There are entries available for the following dates:") for i in display_dates(): print(i) try: search = input("\nPlease enter a date in MM/DD/YYYY format:\n> ") except EOFError: search = None if search not in display_dates(): c_s() input("There aren't any posts for that date!" "Press ENTER to return to the main menu...") date_search = datetime.datetime.strptime(search, "%m/%d/%Y").date() printer(date_query(date_search)) def date_query(search): """Query the database for a date.""" employees = Employee.select().order_by(Employee.date_time.desc()) return employees.where(Employee.date_time.contains(search)) def time_search(): """Prompt user to provide a length of time in minutes.""" c_s() employees = Employee.select().order_by(Employee.date_time.desc()) search = input("Please enter a number of minutes:\n> ") printer(time_query(search)) def time_query(search): """Query the database for a length of time in minutes.""" c_s() employees = Employee.select().order_by(Employee.date_time.desc()) return employees.where(Employee.minutes == search) def term_search(): """Prompt user to provide a term to search.""" c_s() search = input("Please enter any word or phrase to search:\n> ") printer(term_db_query(search)) def term_db_query(search): """Query the database for a search term.""" employees = Employee.select().order_by(Employee.date_time.desc()) return employees.where( (Employee.task_name.contains(search)) | (Employee.task_name.contains(search.lower()) | (Employee.notes.contains(search)) | (Employee.notes.contains(search.lower())) )) def main_menu(): """Main program prompt.""" choosing = True while choosing: c_s() choice = input("""Welcome, wage slave! Keep reaching for that rainbow! This work log has been provided by your benevolent masters. Please input one of the options below and hit ENTER. [a] Create new work log entry [b] Search older work log entries [q] Exit the work log and get back to work > """).lower() if choice == 'a': add_entry() elif choice == 'b': # output of search_menu() will be stored in search_choice search_choice = search_menu() search_choice() elif choice == 'q': quit() if __name__ == '__main__': db.connect() db.create_tables([Employee], safe=True) main_menu()
{"/tests.py": ["/worklogdb.py"]}
36,489
nicecore/Work-Log-DB
refs/heads/master
/holder.py
def user_select(): """Allow user to select their name.""" users = Employee.select().order_by(Employee.name.desc()) def determine_user(): """Determine whether active user is existing or new.""" c_s() print( """Welcome, wage slave! Keep reaching for that rainbow!\n This work log has been provided by your benevolent masters.\n Please confirm your identity or select New User to open your account. """) if input("Please enter N for new user and ENTER for existing user:\n> ").lower() == 'n': c_s() global active_user active_user = input("Please enter your name:\n> ") main_menu() else: print("Please enter the NUMBER corresponding with your name and hit ENTER") employees = Employee.select().order_by(Employee.name.desc()) # The above var contains an iterable of ALL records - they are just SORTED by name. They don't actually contain just the name. # Below the records are looped through as an enumerate() object, with the start being 1 instead of 0. # This necessitates subtracting one from whatever choice the user enters to retrieve the proper name by index. holder = [] for employee in enumerate(employees, start=1): # The index+1 is printed out, followed by the .name attribute of index 1 of each tuple, which is still the whole record itself. holder.append(employee) print("[{}] {}".format(employee[0], employee[1].name)) choice = int(input("\n> ")) global active_user active_user = holder[choice-1][1] main_menu()
{"/tests.py": ["/worklogdb.py"]}
36,490
nicecore/Work-Log-DB
refs/heads/master
/tests.py
import unittest from test import support import peewee import datetime import sys import os import worklogdb from worklogdb import * from worklogdb import Employee employees = Employee.select().order_by(Employee.date_time.desc()) class DatabaseModelTests(unittest.TestCase): def test_class_str_method(self): entry = Employee(name="Chico", task_name="Sing a song", date_time=peewee.DateTimeField(default=datetime.datetime.now), notes="na") assert 'Chico' in str(entry) def test_employees_is_instance(self): self.assertIsInstance(worklogdb.employees, peewee.SelectQuery) class SearchTests(unittest.TestCase): def setUp(self): # Datetime date object self.chico_date = datetime.date(2017, 6, 7) # Temporary 'self.chico' instance with self.chico_date as the date_time of post self.chico = Employee(name="Chico Buarque", task_name="Sing a song", date_time=self.chico_date, minutes=30, notes="A note here") self.chico.save() def test_name_search(self): self.assertIn(self.chico, worklogdb.name_query('chico buarque')) def test_time_search(self): self.assertIn(self.chico, worklogdb.time_query('30')) def test_term_db_query(self): self.assertIn(self.chico, worklogdb.term_db_query('note')) def test_date_search(self): # Assert if a call of date_query() containing a date object reflecting today's date self.assertIn(self.chico, worklogdb.date_query(self.chico_date)) def tearDown(self): self.chico.delete_instance() class DisplayDatesTest(unittest.TestCase): def setUp(self): self.dates = ['06/07/2017'] def test_display_dates(self): assert '06/07/2017' in worklogdb.display_dates() class DisplayAllRecordsTest(unittest.TestCase): def setUp(self): self.chico = Employee(name="Chico Buarque", task_name="Sing a song", minutes=30, notes="A note here") self.chico.save() def test_printer(self): with support.captured_stdout() as stdout: queryset = worklogdb.name_query('chico buarque') worklogdb.printer(queryset, paginated=False) assert "Chico" in stdout.getvalue() def tearDown(self): self.chico.delete_instance() class SearchMenuTest(unittest.TestCase): def test_search_menu_name_return(self): with support.captured_stdin() as stdin: stdin.write('a\n') stdin.seek(0) next_step = worklogdb.search_menu() self.assertIs(next_step, name_search) def test_search_date_return(self): with support.captured_stdin() as stdin: stdin.write('b\n') stdin.seek(0) next_step = worklogdb.search_menu() self.assertIs(next_step, date_search) def test_search_time_return(self): with support.captured_stdin() as stdin: stdin.write('c\n') stdin.seek(0) next_step = worklogdb.search_menu() self.assertIs(next_step, time_search) def test_search_term_return(self): with support.captured_stdin() as stdin: stdin.write('d\n') stdin.seek(0) next_step = worklogdb.search_menu() self.assertIs(next_step, term_search) if __name__ == '__main__': unittest.main()
{"/tests.py": ["/worklogdb.py"]}
36,501
P0labrD/inbac
refs/heads/master
/tests/test_inbac.py
import os import unittest import unittest.mock as mock from inbac.inbac import Application class TestInbac(unittest.TestCase): @mock.patch('inbac.inbac.os.path.isfile') def test_find_available_name_returns_passed_name_if_file_does_not_exist(self, mock_path_isfile): directory = "/home/test/" filename = "test.jpg" mock_path_isfile.return_value = False returned_filename = Application.find_available_name( directory, filename) mock_path_isfile.assert_called_with(os.path.join(directory, filename)) self.assertEqual(filename, returned_filename) @mock.patch('inbac.inbac.os.path.isfile') def test_find_available_name_returns_name_with_number_if_file_exists(self, mock_path_isfile): directory = "/home/test/" filename = "test.jpg" new_filename = "test2.jpg" mock_path_isfile.side_effect = file_exist returned_filename = Application.find_available_name( directory, filename) calls = [mock.call(os.path.join(directory, filename)), mock.call(os.path.join(directory, new_filename))] mock_path_isfile.assert_has_calls(calls) self.assertEqual(new_filename, returned_filename) def test_selection_box_for_aspect_ratio_returns_box_with_aspect_ratio(self): aspect_ratio = 16.0/9.0 mouse_press_coord = (0.0, 0.0) mouse_move_coord = (15.0, 9.0) selection_box = (0.0, 0.0, 15.0, 9.0) expected_selection_box = (0.0, 0.0, 16.0, 9.0) returned_selection_box = Application.get_selection_box_for_aspect_ratio(selection_box, aspect_ratio, mouse_press_coord, mouse_move_coord) self.assertEqual(expected_selection_box, returned_selection_box) def test_get_selected_box_returns_correct_selection_box_when_selecting_from_upper_left_to_bottom_right(self): mouse_press_coord = (0.0, 0.0) mouse_move_coord = (15.0, 9.0) expected_selection_box = (0.0, 0.0, 15.0, 9.0) returned_selection_box = Application.get_selected_box( mouse_press_coord, mouse_move_coord, None) self.assertEqual(expected_selection_box, returned_selection_box) def test_get_selected_box_returns_correct_selection_box_when_selecting_from_bottom_right_to_upper_left(self): mouse_press_coord = (15.0, 9.0) mouse_move_coord = (0.0, 0.0) expected_selection_box = (0.0, 0.0, 15.0, 9.0) returned_selection_box = Application.get_selected_box( mouse_press_coord, mouse_move_coord, None) self.assertEqual(expected_selection_box, returned_selection_box) def test_get_real_box(self): selected_box = (2, 2, 4, 4) expected_real_box = (4, 4, 8, 8) returned_real_box = Application.get_real_box( selected_box, (10, 10), (5, 5)) self.assertEqual(expected_real_box, returned_real_box) @mock.patch('inbac.inbac.os.listdir') def test_load_images_with_wrong_filetype(self, mock_listdir): mock_listdir.return_value = ["test.txt", "test2"] returned_images = Application.load_image_list("/home/test/") self.assertListEqual([], returned_images) @mock.patch('inbac.inbac.os.listdir') def test_load_images(self, mock_listdir): mock_listdir.return_value = ["test.txt", "test2.jpg"] directory = "/home/test/" returned_images = Application.load_image_list(directory) self.assertListEqual(["test2.jpg"], returned_images) def file_exist(x): if x == "/home/test/test.jpg": return True return False def main(): unittest.main(module='tests.test_inbac') if __name__ == '__main__': main()
{"/tests/test_inbac.py": ["/inbac/inbac.py"]}
36,502
P0labrD/inbac
refs/heads/master
/inbac/inbac.py
import itertools import mimetypes import os import tkinter as tk from tkinter import filedialog from PIL import Image, ImageTk import inbac.parse_arguments as args class Application(tk.Frame): def __init__(self, args, master=None): super().__init__(master) self.master = master self.args = args self.pack(fill=tk.BOTH, expand=tk.YES) self.image_canvas = tk.Canvas(self, highlightthickness=0) self.image_canvas.pack(fill=tk.BOTH, expand=tk.YES) self.master.geometry( str(self.args.window_size[0]) + "x" + str(self.args.window_size[1])) self.master.update() if args.input_dir is None: args.input_dir = filedialog.askdirectory(parent = master) args.output_dir = getattr(args, "output_dir", os.path.join(args.input_dir, "crops")) self.images = self.load_image_list(self.args.input_dir) if not os.path.exists(self.args.output_dir): os.makedirs(self.args.output_dir) self.selection_box = None self.mouse_press_coord = (0, 0) self.mouse_move_coord = (0, 0) self.canvas_image = None self.current_image = None self.enabled_selection_mode = False self.box_selected = False self.master.bind('z', self.save_next) self.master.bind('x', self.save) self.master.bind('<Left>', self.previous_image) self.master.bind('<Right>', self.next_image) self.master.bind('<ButtonPress-3>', self.next_image) self.master.bind('<ButtonPress-2>', self.previous_image) self.master.bind('<ButtonPress-1>', self.on_mouse_down) self.master.bind('<B1-Motion>', self.on_mouse_drag) self.master.bind('<ButtonRelease-1>', self.on_mouse_up) self.master.bind('<KeyPress-Shift_L>', self.enable_selection_mode) self.master.bind('<KeyPress-Control_L>', self.enable_selection_mode) self.master.bind('<KeyRelease-Shift_L>', self.disable_selection_mode) self.master.bind('<KeyRelease-Control_L>', self.disable_selection_mode) self.current_file = 0 self.load_image(self.images[self.current_file]) self.image_canvas.bind('<Configure>', self.on_resize) def display_image_on_canvas(self, image): self.clear_canvas(self.image_canvas) self.current_image = image width = self.current_image.size[0] height = self.current_image.size[1] if width > self.image_canvas.winfo_width() or height > self.image_canvas.winfo_height(): width_ratio = float( self.image_canvas.winfo_width()) / float(width) height_ratio = float( self.image_canvas.winfo_height()) / float(height) ratio = min(width_ratio, height_ratio) width = int(float(width) * float(ratio)) height = int(float(height) * float(ratio)) self.displayed_image = self.current_image.copy() self.displayed_image.thumbnail( (width, height), Image.ANTIALIAS) self.displayed_image = ImageTk.PhotoImage(self.displayed_image) self.canvas_image = self.image_canvas.create_image( 0, 0, anchor=tk.NW, image=self.displayed_image) def enable_selection_mode(self, event=None): self.enabled_selection_mode = True def disable_selection_mode(self, event=None): self.enabled_selection_mode = False def clear_canvas(self, widget): self.clear_selection_box(widget) if self.canvas_image is not None: widget.delete(self.canvas_image) self.canvas_image = None def on_resize(self, event=None): self.display_image_on_canvas(self.current_image) def save_next(self, event=None): # check if image was selected then go to the next if self.save(): self.next_image() def save(self, event=None): if self.selection_box is None: return False selected_box = self.image_canvas.coords(self.selection_box) displayed_image_size = ( self.displayed_image.width(), self.displayed_image.height()) box = self.get_real_box( selected_box, self.current_image.size, displayed_image_size) new_filename = self.find_available_name( self.args.output_dir, self.images[self.current_file]) saved_image = self.current_image.copy().crop(box) if self.args.resize: saved_image = saved_image.resize( (self.args.resize[0], self.args.resize[1]), Image.LANCZOS) if self.args.image_format: new_filename, _ = os.path.splitext(new_filename) saved_image.save(os.path.join(self.args.output_dir, new_filename), self.args.image_format, quality=self.args.image_quality) self.clear_selection_box(self.image_canvas) return True def next_image(self, event=None): if self.current_file + 1 >= len(self.images): return self.current_file += 1 try: self.load_image(self.images[self.current_file]) except IOError: self.next_image() def previous_image(self, event=None): if self.current_file - 1 < 0: return self.current_file -= 1 try: self.load_image(self.images[self.current_file]) except IOError: self.previous_image() def load_image(self, image_name): if self.current_image is not None: self.current_image.close() self.current_image = None image = Image.open(os.path.join(self.args.input_dir, image_name)) self.display_image_on_canvas(image) self.master.title(image_name) def on_mouse_down(self, event): self.mouse_press_coord = (event.x, event.y) self.mouse_move_coord = (event.x, event.y) if self.enabled_selection_mode and self.selection_box is not None: selected_box = event.widget.coords(self.selection_box) self.box_selected = self.coordinates_in_selection_box(self.mouse_press_coord, selected_box) else: self.clear_selection_box(event.widget) def on_mouse_drag(self, event): if self.enabled_selection_mode and not self.box_selected: return prev_mouse_move_coord = self.mouse_move_coord self.mouse_move_coord = (event.x, event.y) if self.box_selected: x_delta = self.mouse_move_coord[0] - prev_mouse_move_coord[0] y_delta = self.mouse_move_coord[1] - prev_mouse_move_coord[1] event.widget.move(self.selection_box, x_delta, y_delta) else: self.update_selection_box(event.widget) def on_mouse_up(self, event): self.box_selected = False def clear_selection_box(self, widget): if self.selection_box is not None: widget.delete(self.selection_box) self.selection_box = None def update_selection_box(self, widget): selected_box = self.get_selected_box( self.mouse_press_coord, self.mouse_move_coord, self.args.aspect_ratio) if self.selection_box is None: self.selection_box = widget.create_rectangle( selected_box, outline=self.args.selection_box_color) else: widget.coords(self.selection_box, selected_box) @staticmethod def get_real_box(selected_box, original_image_size, displayed_image_size): return (int(selected_box[0] * original_image_size[0]/displayed_image_size[0]), int(selected_box[1] * original_image_size[1] / displayed_image_size[1]), int(selected_box[2] * original_image_size[0] / displayed_image_size[0]), int(selected_box[3] * original_image_size[1]/displayed_image_size[1])) @staticmethod def get_selected_box(mouse_press_coord, mouse_move_coord, aspect_ratio): selection_top_left_x = min(mouse_press_coord[0], mouse_move_coord[0]) selection_top_left_y = min(mouse_press_coord[1], mouse_move_coord[1]) selection_bottom_right_x = max( mouse_press_coord[0], mouse_move_coord[0]) selection_bottom_right_y = max( mouse_press_coord[1], mouse_move_coord[1]) selection_box = (selection_top_left_x, selection_top_left_y, selection_bottom_right_x, selection_bottom_right_y) if aspect_ratio is not None: aspect_ratio = float(aspect_ratio[0])/float(aspect_ratio[1]) try: selection_box = Application.get_selection_box_for_aspect_ratio(selection_box, aspect_ratio, mouse_press_coord, mouse_move_coord) except ZeroDivisionError: pass return tuple((lambda x: int(round(x)))(x) for x in selection_box) @staticmethod def load_image_list(directory): images = [] for filename in os.listdir(directory): filetype, _ = mimetypes.guess_type(filename) if filetype is None or filetype.split("/")[0] != "image": continue images.append(filename) return images @staticmethod def get_selection_box_for_aspect_ratio(selection_box, aspect_ratio, mouse_press_coord, mouse_move_coord): selection_box = list(selection_box) width = selection_box[2] - selection_box[0] height = selection_box[3] - selection_box[1] if float(width)/float(height) > aspect_ratio: height = width / aspect_ratio if mouse_move_coord[1] > mouse_press_coord[1]: selection_box[3] = selection_box[1] + height else: selection_box[1] = selection_box[3] - height else: width = height * aspect_ratio if mouse_move_coord[0] > mouse_press_coord[0]: selection_box[2] = selection_box[0] + width else: selection_box[0] = selection_box[2] - width return tuple(selection_box) @staticmethod def find_available_name(directory, filename): name, extension = os.path.splitext(filename) if not os.path.isfile(os.path.join(directory, filename)): return filename for num in itertools.count(2): if not os.path.isfile(os.path.join(directory, name + str(num) + extension)): return name + str(num) + extension @staticmethod def coordinates_in_selection_box(coordinates, selection_box): if (coordinates[0] > selection_box[0] and coordinates[0] < selection_box[2] and coordinates[1] > selection_box[1] and coordinates[1] < selection_box[3]): return True else: return False def main(): root = tk.Tk() app = Application(args.parse_arguments(), master=root) app.mainloop() if __name__ == "__main__": main()
{"/tests/test_inbac.py": ["/inbac/inbac.py"]}
36,503
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/models.py
from django.db import models from django.db.models import CharField, TextField class DisplayField: """ Generates individual field with dot notation accessible field name and value. Useful for looping over model fields in templates. """ def __init__(self, name, value): self.name = name self.value = value class Mineral(models.Model): name = CharField(max_length=255, unique=True) image_filename = CharField(max_length=200, null=True) image_caption = TextField(null=True) category = CharField(max_length=255, null=True) formula = TextField() group = CharField(max_length=255, null=True) strunz_classification = CharField(max_length=255, null=True) color = CharField(max_length=255, null=True) crystal_system = CharField(max_length=255, null=True) unit_cell = CharField(max_length=255, null=True) crystal_symmetry = CharField(max_length=255, null=True) cleavage = CharField(max_length=255, null=True) mohs_scale_hardness = CharField(max_length=255, null=True) luster = CharField(max_length=255, null=True) streak = CharField(max_length=255, null=True) diaphaneity = CharField(max_length=255, null=True) optical_properties = CharField(max_length=255, null=True) refractive_index = CharField(max_length=255, null=True) crystal_habit = CharField(max_length=255, null=True) specific_gravity = CharField(max_length=255, null=True) def __str__(self): return self.name @property def display_fields(self): """ Returns ordered list of fields with dot notation accessible names and values. Excludes fields with null values. """ names = [ 'category', 'group', 'formula', 'strunz classification', 'crystal system', 'mohs scale hardness', 'luster', 'color', 'specific gravity', 'cleavage', 'diaphaneity', 'crystal habit', 'streak', 'optical properties', 'refractive index', 'unit cell', 'crystal symmetry' ] values = [ self.category, self.group, self.formula, self.strunz_classification, self.crystal_system, self.mohs_scale_hardness, self.luster, self.color, self.specific_gravity, self.cleavage, self.diaphaneity, self.crystal_habit, self.streak, self.optical_properties, self.refractive_index, self.unit_cell, self.crystal_symmetry ] fields = list(zip(names, values)) return [DisplayField(name, value) for (name, value) in fields if value]
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,504
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/templatetags/mineral_filters.py
import re from django import template register = template.Library() @register.filter(name='base_name') def base_name(string): """ Removes suffixes and returns single base mineral name, leaving hyphenated names intact. ("Agardite-(Y)" will become "Agardite", but "Fluor-uvite" is unchanged.) """ pattern = r'^[\w-]+[\w]' match = re.match(pattern, string) if match: return match[0] else: return string
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,505
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/context_processors.py
def mineral_filters(request): mineral_groups = [ 'Arsenates', 'Borates', 'Carbonates', 'Halides', 'Native Elements', 'Organic Minerals', 'Oxides', 'Phosphates', 'Silicates', 'Sulfates', 'Sulfides', 'Sulfosalts', 'Other' ] mineral_colors = [ 'Black', 'Blue', 'Bronze', 'Brown', 'Golden', 'Gray', 'Green', 'Orange', 'Olive', 'Pink', 'Purple', 'Red', 'Silver', 'Violet', 'White', 'Yellow' ] mineral_lusters = [ 'Adamantine', 'Brilliant', 'Dull', 'Earthy', 'Glassy', 'Greasy', 'Metallic', 'Pearly', 'Resinous', 'Silky', 'Splendent', 'Subadamantine', 'Submetallic', 'Subresinous', 'Subvitreous', 'Vitreous', 'Waxy' ] return {'mineral_groups': mineral_groups, 'mineral_colors': mineral_colors, 'mineral_lusters': mineral_lusters}
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,506
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/migrations/0002_populate_database.py
# Generated by Django 2.2.3 on 2019-07-15 21:46 import sys from django.db import migrations def load_data(apps, schema_editor): import json with open('minerals/data/minerals.json') as file: data = json.load(file) Mineral = apps.get_model('minerals', 'Mineral') minerals = [] names = [] for entry in data: if entry['name'] in names: continue else: mineral = Mineral(**entry) names.append(mineral.name) minerals.append(mineral) try: Mineral.objects.bulk_create(minerals) except Exception as e: print("ERROR LOADING DATA:\n", e) class Migration(migrations.Migration): dependencies = [ ('minerals', '0001_initial'), ] operations = [ migrations.RunPython(load_data) ] if 'test' not in sys.argv[1:] else []
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,507
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/admin.py
from django.contrib import admin from minerals.models import Mineral admin.site.register(Mineral)
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,508
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/urls.py
from django.urls import path, include from . import views app_name = 'minerals' urlpatterns = [ path('', views.index, name='index'), path('minerals/<int:mineral_id>/', views.detail, name='detail'), path('random_mineral/', views.random_mineral, name='random_mineral'), path('search/', views.search, name='search'), path('letter_filter/<str:letter>/', views.letter_filter, name='letter_filter'), path('property_filter/<str:property>/<str:value>/', views.property_filter, name='property_filter'), ]
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,509
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/tests.py
from django.shortcuts import reverse from django.test import TestCase from minerals.models import Mineral, DisplayField from minerals.templatetags.mineral_filters import base_name mineral_names = [ 'Arkhamite', 'Azraelite', 'Alfredite', 'Ambienite', 'Gordonite', 'Gothamite', 'Kalelite', 'Kryptonite', 'Karaite', 'Zorelite', ] def create_mineral( name, all_fields=True, image_filename='test.jpg', image_caption='caption test', group='group test', category='category test', formula='formula test', strunz_classification='strunz test', crystal_system='crystal system test', unit_cell='unit cell test', color='color test', crystal_symmetry='crystal symmetry test', cleavage='cleavage test', mohs_scale_hardness='mohs scale hardness test', luster='luster test', streak='streak test', diaphaneity='diaphaneity test', optical_properties='optical properties test', refractive_index='refractive index test', crystal_habit='crystal_habit test', specific_gravity='specific gravity test' ): if all_fields: return Mineral.objects.create( name=name, image_filename=image_filename, image_caption=image_caption, category=category, formula=formula, strunz_classification=strunz_classification, crystal_system=crystal_system, unit_cell=unit_cell, color=color, crystal_symmetry=crystal_symmetry, cleavage=cleavage, mohs_scale_hardness=mohs_scale_hardness, luster=luster, streak=streak, diaphaneity=diaphaneity, optical_properties=optical_properties, refractive_index=refractive_index, crystal_habit=crystal_habit, specific_gravity=specific_gravity, group=group ) else: return Mineral.objects.create( name='name', image_filename=image_filename, image_caption=image_caption, category='category test limited fields', formula='formula test limited fields' ) class ModelsTests(TestCase): def test_display_field_class(self): """ DisplayField returns fields with dot notation accessible field name and value. """ gothamite = create_mineral('Gothamite') field = DisplayField('formula', gothamite.formula) self.assertEqual(field.name, 'formula') self.assertEqual(field.value, 'formula test') def test_mineral_model_display_fields_all_fields(self): """ Mineral model's display_fields returns list of fields with field names and values accessible by dot notation. """ gothamite = create_mineral('Gothamite') fields = gothamite.display_fields self.assertEqual(len(fields), 17) self.assertEqual(fields[4].name, 'crystal system') self.assertEqual(fields[4].value, 'crystal system test') def test_mineral_model_display_fields_not_all_fields(self): """ Mineral model's display_fields excludes fields without a value. """ gothamite = create_mineral('Gothamite', all_fields=False) fields = gothamite.display_fields self.assertEqual(len(fields), 2) self.assertEqual(fields[0].name, 'category') self.assertEqual(fields[0].value, 'category test limited fields') self.assertEqual(fields[1].name, 'formula') self.assertEqual(fields[1].value, 'formula test limited fields') class IndexViewTests(TestCase): def setUp(self): create_mineral('Gothamite') create_mineral('Kryptonite') def test_mineral_list(self): """Index view's queryset is equal to list of created minerals.""" response = self.client.get(reverse('minerals:index')) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Gothamite>', '<Mineral: Kryptonite>'], ordered=False ) def test_display_minerals(self): """Created minerals are listed on the index page.""" response = self.client.get(reverse('minerals:index')) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Gothamite') self.assertContains(response, 'Kryptonite') class DetailViewTests(TestCase): def test_no_wrap(self): """ Display template splits mineral names with multiple variants to prevent wrapping on hyphen. """ mineral = create_mineral("Gothamite-(Y), Gothamite-(Ce), Gothamite-(Nd)") url = reverse('minerals:detail', args=(mineral.id,)) response = self.client.get(url) self.assertContains(response, 'white-space: nowrap;') def test_detail_display_all_fields(self): mineral = create_mineral("Gothamite") url = reverse('minerals:detail', args=(mineral.id,)) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Crystal Habit') def test_detail_display_not_all_fields(self): mineral = create_mineral("Gothamite", all_fields=False) url = reverse('minerals:detail', args=(mineral.id,)) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertContains(response, 'Formula') self.assertNotContains(response, 'Crystal Habit') class RandomMineralViewTests(TestCase): def setUp(self): for mineral_name in mineral_names: create_mineral(mineral_name) def test_random_mineral_redirects(self): """Random mineral returns existing mineral.""" response = self.client.get(reverse('minerals:random_mineral')) self.assertEqual(response.status_code, 302) class SingleParameterSearchViewTests(TestCase): def setUp(self): for mineral_name in mineral_names: create_mineral(mineral_name) def test_search_name_single_result(self): query = 'krypt' url = reverse('minerals:search') context = {'query': query} response = self.client.get(url, context) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Kryptonite>'], ) def test_search_name_multiple_results(self): query = 'nite' url = reverse('minerals:search') context = {'query': query} response = self.client.get(url, context) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Ambienite>', '<Mineral: Gordonite>', '<Mineral: Kryptonite>'], ordered=False ) class MultiParameterSearchViewTests(TestCase): def setUp(self): create_mineral('Arkhamite', group='Silicates', color='greenish blue') create_mineral('Kalelite', category='Silicate', luster='Brilliant') create_mineral('Gothamite', group='Arsenates', color='reddish orange, crimson') create_mineral('Kryptonite', group='Other', color='Usually green or red.') create_mineral('Robinite', image_caption='A very silly looking mineral.') def test_search_single_result(self): context = {'query': 'Kryp', 'all_fields': True} url = reverse('minerals:search') response = self.client.get(url, context) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Kryptonite>'], ) def test_search_multiple_results(self): context = {'query': 'Sil', 'all_fields': True} url = reverse('minerals:search') response = self.client.get(url, context) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Arkhamite>', '<Mineral: Kalelite>', '<Mineral: Robinite>'], ordered=False ) def test_search_no_results(self): context = {'query': 'Revenge!!', 'all_fields': True} url = reverse('minerals:search') response = self.client.get(url, context) self.assertEqual(response.status_code, 200) mineral_list = response.context['mineral_list'] self.assertEqual(mineral_list.count(), 0) class LetterFilterViewTests(TestCase): def setUp(self): for mineral_name in mineral_names: create_mineral(mineral_name) def test_filter_a(self): url = reverse('minerals:letter_filter', args=('A',)) response = self.client.get(url) self.assertEqual(response.status_code, 200) mineral_list = response.context['mineral_list'] for mineral_name in mineral_list: self.assertTrue(mineral_name.name.startswith('A')) def test_filter_z(self): url = reverse('minerals:letter_filter', args=('Z',)) response = self.client.get(url) self.assertEqual(response.status_code, 200) mineral_list = response.context['mineral_list'] for mineral_name in mineral_list: self.assertTrue(mineral_name.name.startswith('Z')) def test_filter_no_results(self): url = reverse('minerals:letter_filter', args=('Q',)) response = self.client.get(url) self.assertEqual(response.status_code, 200) mineral_list = response.context['mineral_list'] self.assertEqual(mineral_list.count(), 0) class PropertyFilterViewTests(TestCase): def setUp(self): create_mineral('Arkhamite', group='Silicates', color='greenish blue') create_mineral('Ambienite', group='Sulfosalts', color='green') create_mineral('Gothamite', group='Silicates', color='reddish orange, crimson') create_mineral('Kryptonite', group='Other', color='Usually green or red.') def test_group_filter_multiple_results(self): kwargs = {'property': 'group', 'value': 'Silicates'} url = reverse('minerals:property_filter', kwargs=kwargs) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Arkhamite>', '<Mineral: Gothamite>', ], ordered=False ) def test_group_filter_single_result(self): kwargs = {'property': 'group', 'value': 'Other'} url = reverse('minerals:property_filter', kwargs=kwargs) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Kryptonite>'] ) def test_group_filter_no_results(self): kwargs = {'property': 'group', 'value': 'Ambien'} url = reverse('minerals:property_filter', kwargs=kwargs) response = self.client.get(url) self.assertEqual(response.status_code, 200) mineral_list = response.context['mineral_list'] self.assertEqual(mineral_list.count(), 0) def test_color_filter_multiple_results(self): kwargs = {'property': 'color', 'value': 'green'} url = reverse('minerals:property_filter', kwargs=kwargs) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Arkhamite>', '<Mineral: Ambienite>', '<Mineral: Kryptonite>' ], ordered=False ) def test_color_filter_single_result(self): kwargs = {'property': 'color', 'value': 'orange'} url = reverse('minerals:property_filter', kwargs=kwargs) response = self.client.get(url) self.assertEqual(response.status_code, 200) self.assertQuerysetEqual( response.context['mineral_list'], ['<Mineral: Gothamite>'] ) def test_color_filter_no_results(self): kwargs = {'property': 'color', 'value': 'Steve'} url = reverse('minerals:property_filter', kwargs=kwargs) response = self.client.get(url) self.assertEqual(response.status_code, 200) mineral_list = response.context['mineral_list'] self.assertEqual(mineral_list.count(), 0) class TemplateTagTets(TestCase): def test_base_name_no_hyphen(self): mineral_name = 'Gothamite-(Y), Gothamite-(Ce), Gothamite-(Nd)' mineral_base_name = base_name(mineral_name) self.assertEqual(mineral_base_name, 'Gothamite') def test_base_name_with_hyphen(self): mineral_name = 'Fluor-uvite-(Y), Fluor-uvite-(Hello)' mineral_base_name = base_name(mineral_name) self.assertEqual(mineral_base_name, 'Fluor-uvite')
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,510
AndrewHawes/techdegree-project-8
refs/heads/master
/minerals/views.py
from random import choice from django.db.models import Q, AutoField from django.shortcuts import get_object_or_404, render, reverse from django.http import HttpResponseRedirect from .models import Mineral def index(request): mineral_list = Mineral.objects.all() return render(request, 'minerals/index.html', {'mineral_list': mineral_list}) def detail(request, mineral_id=None): mineral = get_object_or_404(Mineral, pk=mineral_id) return render(request, 'minerals/detail.html', {'mineral': mineral}) def random_mineral(request): mineral_list = Mineral.objects.all() mineral = choice(mineral_list) return HttpResponseRedirect(reverse('minerals:detail', args=(mineral.id,))) def search(request): search_term = request.GET.get('query') all_fields = request.GET.get('all_fields') if all_fields: fields = [f for f in Mineral._meta.fields if not isinstance(f, AutoField)] queries = [Q(**{field.name + '__icontains': search_term}) for field in fields] qs = Q() for query in queries: qs = qs | query mineral_list = Mineral.objects.filter(qs) else: mineral_list = Mineral.objects.filter(name__icontains=search_term) return render(request, 'minerals/index.html', {'mineral_list': mineral_list}) def letter_filter(request, letter): mineral_list = Mineral.objects.filter(name__startswith=letter) context = {'mineral_list': mineral_list, 'active_filter': letter} return render(request, 'minerals/index.html', context) def property_filter(request, property, value): query = Q(**{property + '__icontains': value}) mineral_list = Mineral.objects.filter(query) context = {'mineral_list': mineral_list, 'active_filter': value} return render(request, 'minerals/index.html', context)
{"/minerals/admin.py": ["/minerals/models.py"], "/minerals/tests.py": ["/minerals/models.py", "/minerals/templatetags/mineral_filters.py"], "/minerals/views.py": ["/minerals/models.py"]}
36,511
ArtoLord/yandex_delivery
refs/heads/master
/app/views.py
from rest_framework.viewsets import ModelViewSet from rest_framework import serializers from rest_framework.views import Response from .serializers import CourierSerializer, UpdateCourierSerializer, OrderSerializer, AssignationSerializer from .models import Courier, Order, Assignation from rest_framework.decorators import action from .logic import get_possible_orders, assign_to, complete from django.utils.timezone import datetime class CourierViewSet(ModelViewSet): queryset = Courier.objects.all() def get_serializer_class(self): if self.action == 'partial_update': return UpdateCourierSerializer return CourierSerializer def create(self, request, *args, **kwargs): data = request.data.get('data') couriers_to_save = [] errors = [] if not data or not isinstance(data, list): raise serializers.ValidationError('Data not present or is not a list') for courier_data in data: courier = CourierSerializer(data=courier_data) if courier.is_valid(): couriers_to_save.append(courier) continue errors.append({'id': courier_data.get('courier_id'), 'errors': {**courier.errors}}) if errors: raise serializers.ValidationError({"validation_error": {"couriers": errors}}) for courier in couriers_to_save: courier.save() return Response({"couriers": [{"id": courier.data['courier_id']} for courier in couriers_to_save]}, status=201) class OrderViewSet(ModelViewSet): queryset = Order.objects.all() serializer_class = OrderSerializer @action(["POST"], detail=False) def complete(self, request): courier = Courier.objects.filter(courier_id=request.data.get('courier_id')) order = Order.objects.filter(order_id=request.data.get('order_id')) complete_time = request.data.get('complete_time') if not complete_time or not courier or not order: return Response(status=400) courier = courier.first() order = order.first() if courier != order.assigned_to.courier: return Response(status=400) if order.complete_time: return Response({"order_id": order.order_id}) complete(order, courier, complete_time) return Response({"order_id": order.order_id}) @action(["POST"], detail=False) def assign(self, request): courier_id = request.data.get("courier_id") courier = Courier.objects.filter(courier_id=courier_id) if not courier: return Response({"error": "wrong courier id"}, status=400) courier = courier.first() orders = get_possible_orders(courier) assign = assign_to(courier, orders) if not assign.orders.all(): return Response({"orders": []}) return Response(AssignationSerializer(assign).data) def create(self, request, *args, **kwargs): data = request.data.get('data') orders_to_save = [] errors = [] if not data or not isinstance(data, list): raise serializers.ValidationError('Data not present or is not a list') for order_data in data: order = OrderSerializer(data=order_data) if order.is_valid(): orders_to_save.append(order) continue errors.append({'id': order_data.get('order_id'), 'errors': {**order.errors}}) if errors: raise serializers.ValidationError({"validation_error": {"orders": errors}}) for order in orders_to_save: order.save() return Response({"orders": [{"id": order.data['order_id']} for order in orders_to_save]}, status=201)
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,512
ArtoLord/yandex_delivery
refs/heads/master
/app/migrations/0001_initial.py
# Generated by Django 3.1.7 on 2021-03-06 07:53 import django.contrib.postgres.fields from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Region', fields=[ ('region_id', models.IntegerField(primary_key=True, serialize=False, unique=True)), ], ), migrations.CreateModel( name='Order', fields=[ ('order_id', models.IntegerField(primary_key=True, serialize=False, unique=True)), ('weight', models.FloatField()), ('delivery_hours', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=16), size=None)), ('region', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orders', to='app.region')), ], ), migrations.CreateModel( name='Courier', fields=[ ('courier_id', models.IntegerField(primary_key=True, serialize=False, unique=True)), ('courier_type', models.CharField(choices=[('foot', 'Foot'), ('bike', 'Bike'), ('car', 'Car')], max_length=4)), ('working_hours', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=16), size=None)), ('rating', models.FloatField()), ('earnings', models.IntegerField()), ('regions', models.ManyToManyField(related_name='couriers', to='app.Region')), ], ), ]
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,513
ArtoLord/yandex_delivery
refs/heads/master
/app/migrations/0005_auto_20210311_0949.py
# Generated by Django 3.1.7 on 2021-03-11 09:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('app', '0004_auto_20210311_0911'), ] operations = [ migrations.RemoveField( model_name='order', name='delivery_time' ), migrations.AddField( model_name='order', name='delivery_time', field=models.DurationField(default=None, null=True), ), ]
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,514
ArtoLord/yandex_delivery
refs/heads/master
/app/serializers.py
from .models import Courier, Region, Order, Assignation from rest_framework import serializers from django.core import validators class CourierSerializer(serializers.ModelSerializer): class Meta: model = Courier fields = ['courier_id', 'courier_type', 'working_hours', 'regions', 'rating', 'earnings'] read_only_fields = ['rating', 'earnings'] working_hours = serializers.ListSerializer( allow_empty=True, child=serializers.CharField( max_length=16, validators=(validators.RegexValidator(r'^\d\d:\d\d-\d\d:\d\d$', message="Wrong timerange format"),) ) ) def to_internal_value(self, data): if not data.get('regions'): return super(CourierSerializer, self).to_internal_value(data) for region_id in data['regions']: if not isinstance(region_id, int): continue Region.objects.get_or_create(region_id=region_id) return super(CourierSerializer, self).to_internal_value(data) class UpdateCourierSerializer(CourierSerializer): class Meta: model = Courier fields = ['courier_id', 'courier_type', 'working_hours', 'regions'] read_only_fields = ['courier_id'] class OrderSerializer(serializers.ModelSerializer): class Meta: model = Order fields = ['order_id', 'weight', 'region', 'delivery_hours'] delivery_hours = serializers.ListSerializer( allow_empty=False, child=serializers.CharField( max_length=16, validators=(validators.RegexValidator(r'^\d\d:\d\d-\d\d:\d\d$', message="Wrong timerange format"),) ) ) def to_internal_value(self, data): region_id = data.get('region') if isinstance(region_id, int): Region.objects.get_or_create(region_id=region_id) return super(OrderSerializer, self).to_internal_value(data) class OrderIdSerializer(serializers.ModelSerializer): class Meta: model = Order fields = ['id'] id = serializers.IntegerField(source='order_id') class AssignationSerializer(serializers.ModelSerializer): class Meta: model = Assignation fields = ['orders', 'assign_time'] orders = OrderIdSerializer(many=True, source="not_completed_orders")
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,515
ArtoLord/yandex_delivery
refs/heads/master
/app/migrations/0002_auto_20210310_1545.py
# Generated by Django 3.1.7 on 2021-03-10 15:45 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('app', '0001_initial'), ] operations = [ migrations.AddField( model_name='order', name='assigned_to', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assigned_orders', to='app.courier'), ), migrations.AlterField( model_name='courier', name='earnings', field=models.IntegerField(default=0), ), migrations.AlterField( model_name='courier', name='rating', field=models.FloatField(default=0), ), ]
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,516
ArtoLord/yandex_delivery
refs/heads/master
/app/migrations/0003_auto_20210310_1612.py
# Generated by Django 3.1.7 on 2021-03-10 16:12 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('app', '0002_auto_20210310_1545'), ] operations = [ migrations.AddField( model_name='order', name='complete_time', field=models.DateTimeField(default=None, null=True), ), migrations.CreateModel( name='Assignation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('assign_time', models.DateTimeField(auto_now=True)), ('courier', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='assignation', to='app.courier')), ], ), migrations.AlterField( model_name='order', name='assigned_to', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='orders', to='app.assignation'), ), ]
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,517
ArtoLord/yandex_delivery
refs/heads/master
/app/models.py
from django.db import models from django.db.models import Avg, Min from django.contrib.postgres.fields import ArrayField, RangeField class CourierTypes(models.TextChoices): FOOT = "foot" BIKE = "bike" CAR = "car" class Region(models.Model): region_id = models.IntegerField(primary_key=True, unique=True) def __repr__(self): return str(self.region_id) class Courier(models.Model): courier_id = models.IntegerField(primary_key=True, unique=True) courier_type = models.CharField( choices=CourierTypes.choices, max_length=4 ) working_hours = ArrayField(models.CharField(max_length=16)) regions = models.ManyToManyField(Region, related_name="couriers") assigned_now = models.BooleanField(default=False) __type_to_max_weight_dict = { CourierTypes.FOOT: 10, CourierTypes.BIKE: 15, CourierTypes.CAR: 50 } @property def current_assignation(self): if self.assigned_now: return self.assignations.order_by('-assign_time').first() return None @property def max_weight(self): return self.__type_to_max_weight_dict[CourierTypes(self.courier_type)] @property def rating(self): if not self.completed_orders: return None rating = self.completed_orders.values('region').annotate( avg=Avg('delivery_time') ).aggregate(Min('avg')).get('avg_min', 0) return (60 * 60 - min(rating, 60 * 60))/(60 * 60) * 5 __type_to_c_dict = { CourierTypes.FOOT: 2, CourierTypes.BIKE: 5, CourierTypes.CAR: 9 } @property def earnings(self): n = self.assignations.exclude(pk=self.current_assignation.pk if self.assigned_now else -1).count() return n * 500 * self.__type_to_c_dict[CourierTypes(self.courier_type)] @property def completed_orders(self): return Order.objects.filter( complete_time__isnull=False, assigned_to__courier=self ) class Assignation(models.Model): courier = models.ForeignKey(Courier, on_delete=models.CASCADE, related_name="assignations") assign_time = models.DateTimeField(auto_now=True) @property def not_completed_orders(self): return self.orders.filter(complete_time=None) class Order(models.Model): assigned_to = models.ForeignKey(Assignation, on_delete=models.SET_NULL, null=True, related_name="orders") complete_time = models.DateTimeField(null=True, default=None) order_id = models.IntegerField(primary_key=True, unique=True) weight = models.FloatField() region = models.ForeignKey(Region, on_delete=models.CASCADE, related_name="orders") delivery_hours = ArrayField(models.CharField(max_length=16)) delivery_time = models.DurationField(null=True, default=None)
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,518
ArtoLord/yandex_delivery
refs/heads/master
/app/tests.py
from rest_framework.test import APITestCase from django.test import Client import json from .logic import TimeRange class CourierApiTestCase(APITestCase): def setUp(self): self.client = Client() def test_creation(self): data = { "data": [ { "courier_id": 1, "courier_type": "car", "regions": [ 1, 2 ], "working_hours": [ "10:20-11:30" ] }, { "courier_id": 2, "courier_type": "foot", "regions": [ 3, 4 ], "working_hours": [ ] } ] } response = self.client.post( '/couriers', json.dumps(data), content_type='application/json') self.assertEqual(response.status_code, 201) self.assertEqual(json.loads(response.content), { "couriers": [ { "id": 1 }, { "id": 2 } ] }) def test_not_unique_id(self): data = { "data": [ { "courier_id": 1, "courier_type": "car", "regions": [], "working_hours": [] }, { "courier_id": 1, "courier_type": "foot", "regions": [], "working_hours": [] } ] } response = self.client.post( '/couriers', json.dumps(data), content_type='application/json') self.assertEquals(response.status_code, 400) def test_wrong_time_format(self): data = { "data": [ { "courier_id": 1, "courier_type": "car", "regions": [], "working_hours": ["abc"] } ] } response = self.client.post( '/couriers', json.dumps(data), content_type='application/json') self.assertEquals(response.status_code, 400) def test_get(self): data = { "data": [ { "courier_id": 1, "courier_type": "car", "regions": [1], "working_hours": [] } ] } response = self.client.post( '/couriers', json.dumps(data), content_type='application/json') self.assertEquals(response.status_code, 201) response = self.client.get('/couriers/1') self.assertEquals(response.status_code, 200) response = self.client.get('/couriers/2') self.assertEquals(response.status_code, 404) def test_patch(self): data = { "data": [ { "courier_id": 1, "courier_type": "car", "regions": [1], "working_hours": [] } ] } response = self.client.post( '/couriers', json.dumps(data), content_type='application/json') self.assertEquals(response.status_code, 201) response = self.client.patch( '/couriers/1', json.dumps( { "courier_type": "bike", "regions": [1, 3, 4], "working_hours": ["10:20-11:30"] } ), content_type='application/json' ) self.assertEquals(response.status_code, 200) self.assertEquals( { "courier_id": 1, "courier_type": "bike", "regions": [1, 3, 4], "working_hours": ["10:20-11:30"] }, json.loads(response.content) ) class TimeRangeTestCase(APITestCase): def test_intersect(self): data1 = ["10:20-10:40", "10:45-10:50"] data2 = ["10:25-10:35"] self.assertTrue(TimeRange(data1).intersect(TimeRange(data2))) data2 = ["10:40-10:44"] self.assertTrue(TimeRange(data1).intersect(TimeRange(data2))) data2 = ["10:41-10:45"] self.assertTrue(TimeRange(data1).intersect(TimeRange(data2))) data2 = ["10:41-10:44"] self.assertFalse(TimeRange(data1).intersect(TimeRange(data2))) class OrderTestCase(APITestCase): def setUp(self): self.client = Client() def couriers_setUp(self): couriers = { "data": [ { "courier_id": 1, "courier_type": "foot", "regions": [100], "working_hours": ["11:35-14:05", "09:00-11:00"] }, { "courier_id": 2, "courier_type": "bike", "regions": [1, 12], "working_hours": ["09:00-18:00"] }, { "courier_id": 3, "courier_type": "car", "regions": [12, 22, 23, 33], "working_hours": [] } ] } self.client.post( '/couriers', json.dumps(couriers), content_type='application/json' ) def orders_SetUp(self): orders = { "data": [ { "order_id": 1, "weight": 0.23, "region": 12, "delivery_hours": ["09:00-18:00"] }, { "order_id": 2, "weight": 15, "region": 1, "delivery_hours": ["09:00-18:00"] }, { "order_id": 3, "weight": 0.01, "region": 22, "delivery_hours": ["09:00-12:00", "16:00-21:30"] } ] } self.client.post( "/orders", json.dumps(orders), content_type='application/json' ) def test_creation(self): data = { "data": [ { "order_id": 1, "weight": 0.23, "region": 12, "delivery_hours": ["09:00-18:00"] }, { "order_id": 2, "weight": 15, "region": 1, "delivery_hours": ["09:00-18:00"] }, { "order_id": 3, "weight": 0.01, "region": 22, "delivery_hours": ["09:00-12:00", "16:00-21:30"] } ] } response = self.client.post("/orders", json.dumps(data), content_type='application/json') self.assertEquals(response.status_code, 201) self.assertEquals(json.loads(response.content), {"orders": [{"id": 1}, {"id": 2}, {"id": 3}]}) def test_assign(self): self.couriers_setUp() self.orders_SetUp() data = { "courier_id": 2 } response = self.client.post( "/orders/assign", json.dumps(data), content_type='application/json' ) self.assertEquals(response.status_code, 200) self.assertEquals( json.loads(response.content)['orders'], [{"id": 1}, {"id": 2}] ) self.assertIn("assign_time", json.loads(response.content).keys()) def test_complete(self): self.test_assign() data = { "courier_id": 2, "order_id": 2, "complete_time": "2021-01-10T10:33:01.42Z" } response = self.client.post( "/orders/complete", json.dumps(data), content_type='application/json' ) self.assertEquals(response.status_code, 200) self.assertEquals( json.loads(response.content), { "order_id": 2 } ) def test_assign_wrong_courier(self): self.orders_SetUp() self.couriers_setUp() data = { "courier_id": 110 } response = self.client.post( "/orders/assign", json.dumps(data), content_type='application/json' ) self.assertEquals(response.status_code, 400) def test_assign_not_found(self): self.orders_SetUp() self.couriers_setUp() data = { "courier_id": 1 } response = self.client.post( "/orders/assign", json.dumps(data), content_type='application/json' ) self.assertEquals(response.status_code, 200) self.assertEquals( json.loads(response.content), { "orders": [] } )
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,519
ArtoLord/yandex_delivery
refs/heads/master
/app/migrations/0004_auto_20210311_0911.py
# Generated by Django 3.1.7 on 2021-03-11 09:11 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('app', '0003_auto_20210310_1612'), ] operations = [ migrations.RemoveField( model_name='courier', name='rating', ), migrations.AddField( model_name='courier', name='assigned_now', field=models.BooleanField(default=False), ), migrations.AddField( model_name='order', name='delivery_time', field=models.DateTimeField(default=None, null=True), ), migrations.AlterField( model_name='assignation', name='courier', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assignations', to='app.courier'), ), ]
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,520
ArtoLord/yandex_delivery
refs/heads/master
/app/logic.py
from .models import Order, Assignation, Courier from sortedcontainers import SortedList from django.utils.dateparse import parse_datetime from django.utils.timezone import datetime from django.db.models.signals import pre_save from django.dispatch import receiver class TimeRange: @staticmethod def __parse_time(timerange): time1 = int(timerange[:2]) * 60 + int(timerange[3:5]) timerange = timerange[6:] time2 = int(timerange[:2]) * 60 + int(timerange[3:5]) return time1, time2 def __init__(self, times): self.times = SortedList() for time1, time2 in map(self.__parse_time, times): self.times.update([(time1, -1), (time2, 1)]) def intersect(self, other): new = SortedList() new.update(self.times) # copy times count = [0, 0] for time, typ in other.times: new.add((time, 2 * typ)) for time, typ in new: count[typ % 2] += typ if count[0] * count[1] != 0: return True return False def get_possible_orders(courier, assigned_to=None): if courier.assigned_now and not assigned_to: return [] orders = Order.objects.filter( region__in=courier.regions.all(), weight__lte=courier.max_weight, assigned_to=assigned_to ) courier_times = TimeRange(courier.working_hours) return [order for order in orders if courier_times.intersect(TimeRange(order.delivery_hours))] def assign_to(courier, orders): if courier.assigned_now: return courier.current_assignation if not orders: if not courier.assigned_now: return Assignation(courier=courier, assign_time=datetime.now()) assign = Assignation.objects.create(courier=courier) for order in orders: order.assigned_to = assign order.save() courier.assigned_now = True courier.save() return assign def complete(order, courier, complete_time): time = parse_datetime(complete_time) orders = courier.completed_orders.order_by('-complete_time') if not orders: order.delivery_time = time - courier.current_assignation.assign_time order.complete_time = time order.save() return order order.delivery_time = time - orders.first().complete_time order.complete_time = complete_time order.save() courier.assigned_now = bool(courier.current_assignation.not_completed_orders) courier.save() return order @receiver(pre_save, sender=Courier) def courier_post_save(sender, instance, *args, **kwargs): if assignation := instance.current_assignation: orders = get_possible_orders(instance, assigned_to=assignation) for order in assignation.orders.all(): if order not in orders: order.assigned_to = None order.save() if assignation.orders.count() == 0: assignation.delete() instance.assigned_now = False
{"/app/views.py": ["/app/serializers.py", "/app/models.py", "/app/logic.py"], "/app/serializers.py": ["/app/models.py"], "/app/tests.py": ["/app/logic.py"], "/app/logic.py": ["/app/models.py"]}
36,561
65usami/transferbyftp-python
refs/heads/master
/dirwatch.py
#!/usr/bin/env python """ 実行コマンド  $ python dirwatch.py -d ${監視対象のディレクトリ} """ from __future__ import print_function import sys import time import subprocess import os import zipfile import argparse from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler # パーサーを作る parser = argparse.ArgumentParser( prog='dirwatch', # プログラム名 usage='ディレクトリを監視し、ファイル転送をトリガーに任意の処理を実行する', # プログラムの利用方法 description='description', # 引数のヘルプの前に表示 epilog='end', # 引数のヘルプの後で表示 add_help=True, # -h/–help オプションの追加 ) # 引数の追加 parser.add_argument('-d', '--watch_dir', help='監視するディレクトリを指定する', required=True) # 引数を解析する args = parser.parse_args() class MyHandler(PatternMatchingEventHandler): def __init__(self, patterns): super(MyHandler, self).__init__(patterns=patterns) def _run_command(self): # ファイル更新をトリガーに実行するpythonを記述 print("Hello World!!") def on_created(self, event): filepath = event.src_path filename = os.path.basename(filepath) file_save_dir = "/home/ubuntu/files/." # 解凍したファイルの保存先 print("クライアントからzipファイルを受信しました") print(filepath) with zipfile.ZipFile(filepath,'r') as zf: zf.extractall(file_save_dir) print(file_save_dir+"へzipファイルを解凍しました") self._run_command() # ファイル監視の開始(watchdogを使用) def watch(target_dir, extension): event_handler = MyHandler(extension) observer = Observer() observer.schedule(event_handler, target_dir, recursive=True) observer.start() # 処理が終了しないようスリープを挟んで無限ループ try: while True: time.sleep(0.1) except KeyboardInterrupt: observer.stop() observer.join() if __name__ == "__main__": if 2 > len(sys.argv): print("監視対象のディレクトリを第1引数に設定してください") else: watch(args.watch_dir, ["*zip"])
{"/file_transfer_tkinter.py": ["/file_transfer.py"]}
36,562
65usami/transferbyftp-python
refs/heads/master
/file_transfer_tkinter.py
import argparse import logging import os from tkinter import * from tkinter import ttk from tkinter import filedialog from tkinter import messagebox import file_transfer logging.basicConfig(filename='log/file_transfer.log', level=logging.DEBUG) # GUIプログラムtkinter # button1(参照)クリック時の処理 def button1_clicked(): iDir = os.path.abspath(os.path.dirname(__file__)) filepath = filedialog.askopenfilename(initialdir = iDir) file1.set(filepath) # button2(start)クリック時の処理 def button2_clicked(parser): if file1.get(): parser.add_argument('-local_path', default=file1.get()) argparse_args = parser.parse_args() #ファイル転送 result_flag = file_transfer.file_transfer(argparse_args) if result_flag: messagebox.showinfo('転送完了', '対象ファイルをサーバへ転送しました。\n対象ファイル:' + file1.get()) else: messagebox.showinfo('転送失敗','転送に失敗しました。\n選択したファイルが正しいか確認してください。') else: messagebox.showinfo('ファイル未選択','ファイルを選択してください。') def tkinter_start(parser): # rootの作成 root = Tk() root.title('ファイル転送') root.resizable(False, False) # Frame1の作成 frame1 = ttk.Frame(root, padding=10) frame1.grid() # 参照ボタンの作成 button1 = ttk.Button(root, text=u'参照', command=button1_clicked) button1.grid(row=0, column=3) # 「ファイル」ラベルの作成 s = StringVar() s.set('対象ファイル>>') label1 = ttk.Label(frame1, textvariable=s) label1.grid(row=0, column=0) # 参照ファイルパス表示ラベルの作成 global file1 file1 = StringVar() file1_entry = ttk.Entry(frame1, textvariable=file1, width=50) file1_entry.grid(row=0, column=2) # Frame2の作成 frame2 = ttk.Frame(root, padding=(0,5)) frame2.grid(row=1) # Startボタンの作成 button2 = ttk.Button(frame2, text='Start', command=lambda:button2_clicked(parser)) button2.pack(side=LEFT) # Cancelボタンの作成 button3 = ttk.Button(frame2, text='Cancel', command=quit) button3.pack(side=LEFT) root.mainloop() if __name__ == "__main__": parser = argparse.ArgumentParser(conflict_handler='resolve') parser.add_argument('-remote_path', default="/home/ubuntu/") parser.add_argument('-u', default='ubuntu') parser.add_argument('-I', default='') parser.add_argument('-P', default=22) parser.add_argument('-i', default='') tkinter_start(parser)
{"/file_transfer_tkinter.py": ["/file_transfer.py"]}
36,563
65usami/transferbyftp-python
refs/heads/master
/file_transfer.py
import argparse import paramiko import zipfile import logging import datetime import random import os logging.basicConfig(filename='log/file_transfer.log', level=logging.DEBUG) random.seed(24) def get_random_number_str(random_size: int = 1) -> str: random_num_str = '' for i in range(random_size): random_num_str += str(int(random.uniform(0, 9))) return random_num_str def file_transfer(argparse_args:"ArgumentParser オブジェクト") ->"実行結果": local_path = argparse_args.local_path remote_path = argparse_args.remote_path user = argparse_args.u host = argparse_args.I port = argparse_args.P rsa_key_file = argparse_args.i # 秘密鍵ファイルからキーを取得 rsa_key = paramiko.RSAKey.from_private_key_file(rsa_key_file) current_time = datetime.datetime.today().strftime("%Y%m%d-%H%M%S") logging.info(f"現在時刻: {current_time}") logging.info(f"配信対象ファイル: {local_path}") logging.info(f"配信先PATH: {remote_path}") random_number_str = get_random_number_str(4) if local_path is None or remote_path is None: logging.error("local_pathとremote_pathを設定してください。") print("local_pathとremote_pathを設定してください。") return # ファイル名を作成、対N通信を想定し同時送信対策とファイル名が被らないよう日付+乱数でユニークにする unique_suffix = "file" + current_time + "-" + random_number_str file_name = os.path.splitext(os.path.basename(local_path))[0] + "_" + unique_suffix zip_file = file_name + ".zip" csv_file = file_name + ".csv" try: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(host, port, user, pkey=rsa_key) sftp = client.open_sftp() # ZIPファイルを作成/ZIPファイルに追加 with zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED) as f: f.write(local_path, csv_file) logging.info(f"配信先ZIPファイル: {zip_file}") sftp.put(zip_file, remote_path + "/" + zip_file) logging.info(f"転送完了しました: {zip_file}") result_flag = True except Exception as e: logging.error(f"転送失敗!!: {e}") print("転送に失敗しました。\n選択したファイルが正しいか確認してください。") result_flag = False finally: sftp.close() client.close() #ファイル転送後、ローカルのzipファイル削除 os.remove(zip_file) return result_flag if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-local_path', default=None) parser.add_argument('-remote_path', default="/home/ubuntu/") parser.add_argument('-u', default='ubuntu') parser.add_argument('-I', default='') parser.add_argument('-P', default=22) parser.add_argument('-i', default='') argparse_args = parser.parse_args() #ファイル転送 file_transfer(argparse_args)
{"/file_transfer_tkinter.py": ["/file_transfer.py"]}
36,564
michalkurdziel/sudoku
refs/heads/master
/tests/test_main.py
from unittest import TestCase from unittest import mock from src.consts import puzzle from src.consts import puzzle_results from src.main import Cell from src.main import CellList from src.main import get_border FULL_SET = [x for x in range(1, 10)] TEST_MATRIX = [ [0, 3, 6, 0, 4, 7, 5, 2, 0], [0, 4, 0, 6, 2, 5, 0, 0, 8], [0, 0, 0, 3, 1, 0, 0, 7, 0], [0, 1, 0, 5, 0, 6, 7, 0, 0], [3, 0, 0, 0, 0, 0, 0, 0, 5], [0, 0, 5, 7, 0, 4, 0, 8, 0], [0, 2, 0, 0, 6, 8, 0, 0, 0], [4, 0, 0, 2, 5, 3, 0, 9, 0], [0, 5, 9, 4, 7, 0, 6, 3, 0] ] CORRECT_MATRIX = [ [1, 3, 6, 8, 4, 7, 5, 2, 9], [9, 4, 7, 6, 2, 5, 3, 1, 8], [5, 8, 2, 3, 1, 9, 4, 7, 6], [2, 1, 8, 5, 9, 6, 7, 4, 3], [3, 7, 4, 1, 8, 2, 9, 6, 5], [6, 9, 5, 7, 3, 4, 2, 8, 1], [7, 2, 3, 9, 6, 8, 1, 5, 4], [4, 6, 1, 2, 5, 3, 8, 9, 7], [8, 5, 9, 4, 7, 1, 6, 3, 2] ] class TestGrubAllvalues(TestCase): def setUp(self): self.cells = CellList(TEST_MATRIX) def tearDown(self): self.cells = None def test_get_values_from_row(self): expected_results = [4, 6, 2, 5, 8] results = self.cells.get_values_from_row(1) assert expected_results == results, 'Expected {} but results is {}'.format(expected_results, results) def test_get_values_from_column(self): expected_results = [6, 5, 9] results = self.cells.get_values_from_column(2) assert expected_results == results, 'Expected {} but results is {}'.format(expected_results, results) def test_get_first_small_square_indexes(self): expected_results = [3, 6, 4] self.cells1 = CellList(TEST_MATRIX) values_in_square = self.cells1.get_values_in_small_square(0, 0) self.assertListEqual(values_in_square, expected_results) def test_get_fourth_small_square_indexes(self): expected_results = [5, 6, 7, 4] results = self.cells.get_values_in_small_square(4, 4) assert expected_results == results, 'Expected {} but results is {}'.format(expected_results, results) def test_get_border(self): self.assertListEqual([0, 1, 2], get_border(1)) self.assertListEqual([0, 1, 2], get_border(2)) self.assertListEqual([3, 4, 5], get_border(3)) self.assertListEqual([3, 4, 5], get_border(4)) self.assertListEqual([3, 4, 5], get_border(5)) self.assertListEqual([6, 7, 8], get_border(6)) self.assertListEqual([6, 7, 8], get_border(7)) self.assertListEqual([6, 7, 8], get_border(8)) class TestBaseConfiguration(TestCase): full_set = [1, 2, 3, 4, 5, 6, 7, 8, 9] def test_full_set(self): assert self.full_set == FULL_SET class TestCellValidator(TestCase): def setUp(self): self.cells = CellList(TEST_MATRIX) def test_validate_by_row(self): cell = Cell(0, 0, 0) self.cells._set_possible_values_by_row(cell, [3, 6, 4]) self.assertListEqual(cell.value, [1, 2, 5, 7, 8, 9]) def test_validate_by_column(self): cell = Cell(0, 0, 0) self.cells._set_possible_values_by_column(cell, [3, 4]) self.assertListEqual(cell.value, [1, 2, 5, 6, 7, 8, 9]) def test_validate_by_square(self): cell = Cell(0, 0, 0) self.cells._set_possible_values_by_square(cell, [3, 4, 6]) self.assertListEqual(cell.value, [1, 2, 5, 7, 8, 9]) def test_validate_by_three_dimenssions(self): cell = Cell(0, 0, 0) self.cells.run(cell) self.assertListEqual(cell.value, [1, 8, 9]) class TestCellList(TestCase): @mock.patch('src.main.Cell', autospec=True) def test_list_stores_proper_values(self, mock_cell): cells = CellList([[mock_cell, mock_cell, mock_cell]]) self.assertEqual(len(cells), 3, len(cells)) def test_status_check(self): cells = CellList(TEST_MATRIX) cells.print() res = cells.isFinished() self.assertFalse(res, "res: " + str(res)) cells = CellList(CORRECT_MATRIX) res = cells.isFinished() self.assertTrue(res) class TestCell(TestCase): def test_create_intsance(self): cell = Cell(1, 1, 1) self.assertIsInstance(cell, Cell) class TestCheckInputMatrixes(TestCase): def test_compare_two_matrix(self): for i in range(9): for j in range(9): if TEST_MATRIX[i][j] != 0: self.assertEqual(TEST_MATRIX[i][j], CORRECT_MATRIX[i][j], "i: {}, j {}".format(i, j)) def test_general_validation_1(self): cells = CellList(TEST_MATRIX) while not cells.isFinished(): cells.validate() matrix = cells.generate_matrix() for i in range(9): for j in range(9): self.assertEqual(matrix[i][j], CORRECT_MATRIX[i][j], "i= {}, j= {}".format(i, j)) def test_general_validation_2(self): cells = CellList(puzzle) while not cells.isFinished(): cells.validate() matrix = cells.generate_matrix() for i in range(9): for j in range(9): self.assertEqual(matrix[i][j], puzzle_results[i][j], "i= {}, j= {}".format(i, j))
{"/tests/test_main.py": ["/src/main.py"], "/tests/__init__.py": ["/src/main.py"]}
36,565
michalkurdziel/sudoku
refs/heads/master
/tests/__init__.py
from src.main import * from src.consts import *
{"/tests/test_main.py": ["/src/main.py"], "/tests/__init__.py": ["/src/main.py"]}
36,566
michalkurdziel/sudoku
refs/heads/master
/src/main.py
from src.consts import puzzle FULL_SET = [x for x in range(1, 10)] def get_border(value): if value <= 2: return [0, 1, 2] elif 3 <= value <= 5: return [3, 4, 5] else: return [6, 7, 8] class Cell: _value = None def __init__(self, row, col, value): self.row = row self.col = col self.value = value if self.value == 0: self.value = FULL_SET def __str__(self): return "row: {} col: {} value: {}".format(self.row, self.col, self.value) @property def value(self): return self._value @value.setter def value(self, value): if isinstance(value, list): if len(value) == 1: self._value = value[0] elif len(value) > 1: self._value = sorted(value) else: self._value = value class CellList(list): CELL_TYPE = Cell def __init__(self, matrix): for row_no, row in enumerate(matrix): for column_no, cell in enumerate(row): self.append(Cell(row_no, column_no, cell)) def append(self, item): if not isinstance(item, self.CELL_TYPE): raise TypeError('item is not of type {}'.format(self.CELL_TYPE)) super(CellList, self).append(item) def validate(self): for cell in self: self.run(cell) def print(self): for i in self: print(i) def check_state(self): if self.isFinished(): print("That's ok!") else: print("Still missing values") def isFinished(self): for item in self: if item.value == 0 or isinstance(item.value, list): return False return True def get_values_from_row(self, row): return [cell.value for cell in self if cell.row == row and isinstance(cell.value, int)] def get_values_from_column(self, col): return [cell.value for cell in self if cell.col == col and isinstance(cell.value, int)] def get_values_in_small_square(self, row, col): rows_numbers = get_border(row) cols_numbers = get_border(col) return [cell.value for cell in self if cell.row in rows_numbers and cell.col in cols_numbers and isinstance(cell.value, int)] def _set_possible_values_by_row(self, cell, set_to_compare): cell.value = self.minus_two_sets(cell.value, set_to_compare) def _set_possible_values_by_column(self, cell, set_to_compare): cell.value = self.minus_two_sets(cell.value, set_to_compare) def _set_possible_values_by_square(self, cell, set_to_compare): cell.value = self.minus_two_sets(cell.value, set_to_compare) def minus_two_sets(self, first_set, second_set): if isinstance(first_set, int): first_set = [first_set] return list(set(first_set) - set(second_set)) def run(self, cell): if not isinstance(cell.value, int): self._set_possible_values_by_row(cell, self.get_values_from_row(cell.row)) self._set_possible_values_by_column(cell, self.get_values_from_column(cell.col)) self._set_possible_values_by_square(cell, self.get_values_in_small_square(cell.row, cell.col)) def generate_matrix(self): matrix = [[0 for i in range(9)] for j in range(9)] for cell in self: matrix[cell.row][cell.col] = cell.value return matrix if __name__ == '__main__': cells = CellList(puzzle) while not cells.isFinished(): cells.validate() cells.check_state() cells.print() print(cells.generate_matrix())
{"/tests/test_main.py": ["/src/main.py"], "/tests/__init__.py": ["/src/main.py"]}
36,589
siddsar/wf_classifier
refs/heads/master
/collect.py
import json import subprocess import os import signal import argparse parser = argparse.ArgumentParser(description='Covert to json.') parser.add_argument('--link', default='eth1', help='The connection on which packets are to be captured') args = parser.parse_args() link = args.link def rmdir_recursive(dir): """Remove a directory, and all its contents if it is not already empty.""" for name in os.listdir(dir): full_name = os.path.join(dir, name) # on Windows, if we don't have write permission we can't remove # the file/directory either, so turn that on if not os.access(full_name, os.W_OK): os.chmod(full_name, 0600) if os.path.isdir(full_name): rmdir_recursive(full_name) else: os.remove(full_name) os.rmdir(dir) with open ('config.json') as fp: print("Reading closed world websites...") cw = json.load(fp) j=0; for domain in cw['pcaps']: if not os.path.exists("./pcaps/pcap-{}".format(j)): os.makedirs("./pcaps/pcaps-{}".format(j)) if not os.path.exists("./csv/csv-{}".format(j)): os.makedirs("./csv/csv-{}".format(j)) for i in range(1,41): fname = str(i) proc = subprocess.Popen(["sudo tcpdump -vv -x -X -i %s -A tcp and port not 22 -w ./pcaps/pcaps-%s/%s.pcap &" %(link,str(j),fname)],shell =True) subprocess.call("wget -p %s" %(domain),shell =True) proc.terminate() subprocess.call("tshark -r ./pcaps/pcaps-%s/%s.pcap -T fields -e frame.len -e ip.src -e ip.dst -E separator=, > ./csv/csv-%s/%s.csv" %(str(j),fname,str(j),fname),shell=True) rmdir_recursive("pcaps/pcaps-%s"%(str(j))) j= j+1
{"/train.py": ["/fingerprint.py"]}
36,590
siddsar/wf_classifier
refs/heads/master
/getsite.py
import json import subprocess import os import signal with open ('closed_world.json') as fp: print("Reading closed world websites...") cw = json.load(fp) for domain in cw['pcaps']: subprocess.call("sudo wget -e robots=off --wait 1 -H -p -k %s" %(domain),shell =True)
{"/train.py": ["/fingerprint.py"]}
36,591
siddsar/wf_classifier
refs/heads/master
/fingerprint.py
# This python script takes in a tshark traffic instance saved as a csv file, # and creates a fingerprint of that instance by filtering out unneeded information # import csv import argparse import numpy as np import pandas as pd def make_fingerprint(filename,ip): def custom_legend(colors,labels, legend_location = 'upper left', legend_boundary = (1,1)): # Create custom legend for colors recs = [] for i in range(0,len(colors)): recs.append(mpatches.Rectangle((0,0),1,1,fc=colors[i])) pyp.legend(recs,labels,loc=legend_location, bbox_to_anchor=legend_boundary) sizelist = [] instance = {} # Open packet capture file and read it in and then close it with open(filename, 'rb') as csvfile: filereader = csv.reader(csvfile,delimiter= ',') for row in filereader: # Identify direction size = int(row[0]) direction = "+" if row[2] == ip: direction = "-" sizelist.append((direction, size)) # Filter out packets with size 52 - actually, 66 filterlist = [] for sizetuple in sizelist: size = sizetuple[1] if not size == 66: filterlist.append(sizetuple) # Insert size markers at every direction change sizemarkerlist = [] previousDirection = '+' sizeMarker = 0 for sizetuple in filterlist: direction = sizetuple[0] size = sizetuple[1] if direction == previousDirection: sizeMarker += size else: # if the direction has changed sizemarkerlist.append(('S', (sizeMarker/610+1)*600)) datakey = 'S' + previousDirection + str((sizeMarker/610+1)*600) if not instance.get(datakey): instance[datakey] = 0 instance[datakey]+=1 sizeMarker = size previousDirection = direction sizemarkerlist.append(sizetuple) # Append size marker for the last set of packets after going through the list sizemarkerlist.append(('S', (sizeMarker/610+1)*600)) # Insert total transmitted byte markers at the end totalByteList = [] totalSizeP = 0 # total byte count for outgoing packets totalSizeN = 0 # total byte count for incoming packets for sizetuple in sizemarkerlist: direction = sizetuple[0] size = sizetuple[1] if not direction in ['+', '-']: pass elif direction == '+': totalSizeP += size elif direction == '-': totalSizeN += size totalByteList.append(sizetuple) totalByteList.append(('TS+', ((totalSizeP-1)/10000+1)*10000)) # Append total number of bytes marker totalByteList.append(('TS-', ((totalSizeN-1)/10000+1)*10000)) instance['0-B'+ str(((totalSizeP-1)/10000+1)*10000)] = 1 #Bandwidth up instance['1-B'+ str(((totalSizeN-1)/10000+1)*10000)] = 1#Bandwidth down # Insert HTML marker htmlMarkerList = [] previousDirection = '+' htmlMarker = 0 htmlFlagStart = 0 htmlFlagEnd = 0 htmlMarkerSize = 0 for sizetuple in totalByteList: direction = sizetuple[0] size = sizetuple[1] if not direction in ['+', '-']: # If the row is a marker pass # do nothing elif direction in ['+', '-'] and htmlFlagStart != 3: htmlFlagStart += 1 elif direction == '-' and htmlFlagEnd == 0 and htmlFlagStart == 3: #If the packet is part of the html document htmlMarker += size previousDirection = '-' # After the last html packet has been received elif direction == '+' and htmlFlagEnd == 0 and previousDirection == '-': htmlMarkerList.append(('H', (htmlMarker/610+1)*600)) instance['H'+str((htmlMarker/610+1)*600)] = 1 # Append the html marker htmlFlagEnd = 1 # Reading html request has finished htmlMarkerList.append(sizetuple) htmlMarkerSize = size def roundNumberMarker(n): if n==4 or n==5: return 3 elif n==7 or n==8: return 6 elif n==10 or n==11 or n==12 or n==13: return 9 else: return n # Insert number markers numberMarkerList = [] onlyNumberMarkerList = [] previousDirection = '+' numberCount = 0 for sizetuple in htmlMarkerList: direction = sizetuple[0] size = sizetuple[1] if not direction in ['+', '-']: pass elif direction != previousDirection: #Change in direction, insert number marker numberMarkerList.append(('N', numberCount)) onlyNumberMarkerList.append(('N', numberCount)) numberkey = 'N'+ previousDirection + str(roundNumberMarker(numberCount)) if not instance.get(numberkey): instance[numberkey] = 0 instance[numberkey] += 1 previousDirection = direction numberCount = 0 if direction in ['+', '-']: numberCount += 1 numberMarkerList.append(sizetuple) newList = [tup for tup in numberMarkerList if tup[0] in ['S', 'N', '+', '-']] newNewList = [] for newTup in newList: if newTup[0]=='-': newNewList.append(('Size and Direction',-1*newTup[1])) elif newTup[0]=='+': newNewList.append(('Size and Direction',newTup[1])) elif newTup[0]=='N': newNewList.append(('Number Marker', newTup[1])) elif newTup[0]=='S': newNewList.append(('Size Marker', newTup[1])) filename = filename.replace(".csv", "") df = pd.DataFrame(newNewList, columns = ['Header', 'Value']) df['Index'] = range(1, len(df) + 1) # This list will be for markers that are appended at the end, for creating a # table of useful marker information as part of the fingerprint endListMarkers = [] # Append HTML marker endListMarkers.append(('HTML', (htmlMarker/610+1)*600)) # Append total number of bytes markers endListMarkers.append(('TS+', ((totalSizeP-1)/10000+1)*10000)) endListMarkers.append(('TS-', ((totalSizeN-1)/10000+1)*10000)) # Insert occurring packet size markers occurringList = [] uniqueP = [] uniquePFlag = 0 uniqueN = [] uniqueNFlag = 0 for sizetuple in numberMarkerList: direction = sizetuple[0] size = sizetuple[1] if not direction in ['+', '-']: pass elif direction == '+': for A in uniqueP: if(A == size): # If we find a match, raise a flag and stop uniquePFlag = 1 break if(uniquePFlag == 0): # If there was no match in the list, append uniqueP.append(size) else: uniquePFlag = 0 elif direction == '-': for A in uniqueN: if(A == size): # If we find a match, raise a flag and stop uniqueNFlag = 1 break if(uniqueNFlag == 0): # If there was no match in the list, append uniqueN.append(size) else: uniqueNFlag = 0 occurringList.append(sizetuple) occurringList.append(('OP+', (((len(uniqueP)-1)/2)+1)*2)) # Append occurring packet marker instance['uniquePacketSizesUp'] =((((len(uniqueP)-1)/2)+1)*2) occurringList.append(('OP-', (((len(uniqueN)-1)/2)+1)*2)) instance['uniquePacketSizesDown'] = ((((len(uniqueN)-1)/2)+1)*2) endListMarkers.append(('OP+', (((len(uniqueP)-1)/2)+1)*2)) endListMarkers.append(('OP-', (((len(uniqueN)-1)/2)+1)*2)) # Insert percent incoming/outgoing packet marker and total number of packets markers packetList = [] nPacketsP = 0 nPacketsN = 0 for sizetuple in occurringList: size = sizetuple[1] direction = sizetuple[0] if not direction in ['+', '-']: pass elif direction == '+': nPacketsP += 1 elif direction == '-': nPacketsN += 1 packetList.append(sizetuple) percentPoverN = float(nPacketsP)/nPacketsN # calculate incoming/outgoing percentage # Append the incoming/outgoing percent marker packetList.append(('PP-', "%.2f" % (float((int(((((percentPoverN-.01)*100))/5)+1)*5))/100))) instance['PP-'] = (float((int(((((percentPoverN-.01)*100))/5)+1)*5))/100) # Append the total number of packet markers for both outgoing and incoming traffic packetList.append(('NP+', (((nPacketsP-1)/15)+1)*15)) instance['numberUp'] = ((((nPacketsP-1)/15)+1)*15) packetList.append(('NP-', (((nPacketsN-1)/15)+1)*15)) instance['numberDown'] = ((((nPacketsN-1)/15)+1)*15) endListMarkers.append(('PP-', "%.2f" % (float((int(((((percentPoverN-.01)*100))/5)+1)*5))/100))) endListMarkers.append(('NP+', (((nPacketsP-1)/15)+1)*15)) endListMarkers.append(('NP-', (((nPacketsN-1)/15)+1)*15)) # Create a table for the special markers that are appended at the end of the list of tuples df = pd.DataFrame(endListMarkers, columns = ['Marker', 'Packet Information']) return instance
{"/train.py": ["/fingerprint.py"]}
36,592
siddsar/wf_classifier
refs/heads/master
/capture.py
import json import subprocess import os import signal import argparse parser = argparse.ArgumentParser(description='Covert to json.') parser.add_argument('--link', default='eth1', help='The connection on which packets are to be captured') args = parser.parse_args() link = args.link with open ('config.json') as fp: print("Reading closed world websites...") cw = json.load(fp) j=0; for domain in cw['pcaps']: if not os.path.exists("./csv/csv-{}".format(j)): os.makedirs("./csv/csv-{}".format(j)) for i in range(1,41): fname = str(i) proc = subprocess.Popen(["sudo tshark -i %s -n -T fields -e frame.len -e ip.src -e ip.dst -E separator=, > ./csv/csv-%s/%s.csv &" %(link,str(j),fname)],shell =True) subprocess.call("proxychains wget -p %s" %(domain),shell =True) proc.terminate() j= j+1
{"/train.py": ["/fingerprint.py"]}
36,593
siddsar/wf_classifier
refs/heads/master
/makeJSON.py
import json import csv import argparse parser = argparse.ArgumentParser(description='Covert to json.') parser.add_argument('--filename', default='top-1m.csv', help='Name of packet capture file.') parser.add_argument('--num', default='20', help='IP address of client.') args = parser.parse_args() filename = args.filename num = int(args.num) dict={} dict['pcaps']=[] i = 0 with open(filename,'r') as file: for row in file: dict['pcaps'].append(row.lstrip('0123456789.,').rstrip('\n')) i+=1 if(i == num): break; with open('config.json', 'w') as outfile: json.dump(dict, outfile, sort_keys = True, indent = 4, ensure_ascii = False)
{"/train.py": ["/fingerprint.py"]}
36,594
siddsar/wf_classifier
refs/heads/master
/train.py
import os import fingerprint import json import numpy as np from sklearn.feature_extraction import DictVectorizer from sklearn import svm from sklearn.metrics import accuracy_score import argparse import subprocess parser = argparse.ArgumentParser(description='Process a packet capture.') parser.add_argument('--thisIP',default='192.168.3.100', help='IP address of this computer.') parser.add_argument('--ip', default='192.168.3.100', help='IP address of client.') parser.add_argument('--predict',default='trace.csv', help ="*.pcap file which needs to be processed") parser.add_argument('--datacount',default = 40 , help="total number of training and test instances for each website") args = parser.parse_args() ip_t = args.thisIP ip = args.ip filename = args.predict datacount = args.datacount print(filename) print(ip) print(ip_t) with open('closed_world.json') as fp: cw=json.load(fp) j=0 Data = [] labels = [] instance = {} websites={} trainingcount = 70 * int(datacount) / 100 datacount = int(datacount) for domain in cw['pcaps']: for i in range(1,trainingcount): instance = fingerprint.make_fingerprint("./csv/csv-%s/%s.csv"%(str(j),str(i)),(ip_t)) Data.append(instance) labels.append(j) websites[j] = domain j+=1 j=0 for domain in cw['pcaps']: for i in range(trainingcount,datacount+1): instance = fingerprint.make_fingerprint("./csv/csv-%s/%s.csv"%(str(j),str(i)),(ip_t)) Data.append(instance) labels.append(j) j+=1 instance = fingerprint.make_fingerprint("%s"%(filename),(ip)) Data.append(instance) v = DictVectorizer(sparse=False) data = v.fit_transform(Data) X = data y = np.array(labels) X_train = X[:trainingcount*j-j,:] Y_train = y[:trainingcount*j-j] print(X) print(X_train) print(Y_train) print(trainingcount) classifier = svm.SVC(decision_function_shape = 'ovo',probability=True) classifier.fit(X_train,Y_train) x_test = X[trainingcount*j-j:,:] y_test = y[trainingcount*j-j:] print(x_test.shape) print(y_test.shape) y_predict = classifier.predict(x_test) print(y_predict,y_test) print("Accuracy: %s%%" % (accuracy_score(y_test, y_predict[:-1]) * 100,)) print("Website detected: %s" %(websites[y_predict[-1]]) ) probability = classifier.predict_proba(x_test) print("confidence %.2f" %(probability[-1,y_predict[-1]]*100))
{"/train.py": ["/fingerprint.py"]}
36,596
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/jwtmessages.py
from flask import jsonify from blacklist import BLACKLIST from app import jwt @jwt.expired_token_loader def expired_token_callback(): return jsonify({ "message": "The token has expired", "error": "token_expired" }), 401 @jwt.invalid_token_loader def invalid_token_callback(error): return jsonify({ "message": "Signature verification failed", "error": "invalid_token" }), 401 @jwt.unauthorized_loader def missing_token_callback(error): return jsonify({ "message": "Request does not contain an access token", "error":"authorization_required" }), 401 @jwt.needs_fresh_token_loader def needs_fresh_token_callback(error): return jsonify({ "message":"The token is not fresh", "error":"fresh_token_required" }), 401 @jwt.revoked_token_loader def revoked_token_callback(): return jsonify({ "message":"The token has been revoked", "error":"token_revoked" }), 401 @jwt.token_in_blacklist_loader def check_if_token_in_blacklist(decrypted_token): return decrypted_token["jti"] in BLACKLIST
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,597
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/models/category.py
from db import db from datetime import datetime from sqlalchemy.exc import IntegrityError from config import DEBUG class CategoryModel(db.Model): __tablename__ = "categories" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(20), nullable=False) created = db.Column(db.DateTime, server_default=db.func.now()) updated = db.Column(db.DateTime, server_default=db.func.now()) deleted = db.Column(db.DateTime, server_default=None) image_name = db.Column(db.String(20), db.ForeignKey("images.name")) image = db.relationship("ImageModel") tasks = db.relationship("TaskModel", lazy="dynamic") achievements = db.relationship("AchievementModel", lazy="dynamic") def __init__(self, name, image): self.name = name self.image_name = image def json(self): if DEBUG: return { "id": self.id, "name": self.name, "image": self.image_name, "created": self.created.timestamp(), "updated": self.updated.timestamp(), "deleted": None if self.deleted is None else self.deleted.timestamp() } return { "name": self.name, "image": self.image_name } @classmethod def find_by_id(cls, id): return cls.query.filter_by(id=id).first() @classmethod def find_existing_by_id(cls, id): return cls.query.filter_by(id=id).filter_by(deleted=None).first() @classmethod def find_by_name(cls, name): return cls.query.filter_by(name=name).first() @classmethod def find_existing_by_name(cls, name): return cls.query.filter_by(name=name).filter_by(deleted=None).first() @classmethod def find_all(cls): return cls.query.all() @classmethod def find_all_existing(cls): return cls.query.filter_by(deleted=None).all() @classmethod def find_new(cls, last_fetch): return cls.query.filter( cls.created > datetime.fromtimestamp(last_fetch), cls.deleted == None ) @classmethod def find_deleted(cls, last_fetch): return cls.query.filter( cls.created <= datetime.fromtimestamp(last_fetch), cls.deleted > datetime.fromtimestamp(last_fetch) ) @classmethod def find_updated(cls, last_fetch): return cls.query.filter( cls.created <= datetime.fromtimestamp(last_fetch), cls.deleted == None, cls.updated > datetime.fromtimestamp(last_fetch) ) def update(self, data): for k in data: if k == "image": setattr(self, "image_name", data[k]) else: setattr(self, k, data[k]) setattr(self, "updated", datetime.now()) def save_to_db(self): db.session.add(self) db.session.commit() def delete_from_db(self): if self.achievements.filter_by(deleted=None).count() or self.tasks.filter_by(deleted=None).count(): raise IntegrityError( "Cannot delete a category if it's associated with tasks or achievements", params=None, orig=None) self.deleted = datetime.now() db.session.add(self) db.session.commit()
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,598
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/resources/category.py
from flask_restful import Resource, reqparse from flask_jwt_extended import jwt_required, jwt_optional, get_jwt_identity from models.category import CategoryModel from sqlalchemy.exc import IntegrityError from datetime import datetime from config import DEBUG class Category(Resource): parser = reqparse.RequestParser() parser.add_argument("image", type=str, required=True, help="This field must not be empty." ) @classmethod def get(cls, name): category = CategoryModel.find_existing_by_name(name) if not category: return {"message": "Category not found."}, 404 return category.json(), 200 @classmethod @jwt_required def post(cls, name): category = CategoryModel.find_existing_by_name(name) if category: return {"message": "Category with name '{}' already exists".format(name)}, 400 data = cls.parser.parse_args() category = CategoryModel(name, **data) try: category.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during insertion."}, 500 return category.json(), 201 @classmethod @jwt_required def put(cls, name): category = CategoryModel.find_existing_by_name(name) data = cls.parser.parse_args() if not category: category = CategoryModel(name, **data) else: category.update(data) try: category.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during the update."}, 500 return category.json(), 201 @classmethod @jwt_required def delete(cls, name): category = CategoryModel.find_existing_by_name(name) if not category: return {"message": "Category not found"}, 404 try: category.delete_from_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except Exception as e: return {"message": "Internal error occurred during deletion."}, 500 return {"message": "Category deleted from database."}, 200 class CategoryList(Resource): parser = reqparse.RequestParser() parser.add_argument("last_fetch", type=float, required=False, help="This field must be a date in unix timestamp in float format.", default=0.0 ) @classmethod @jwt_optional def get(cls, last_fetch=None): user = get_jwt_identity() if user: return { "categories": [category.json() for category in CategoryModel.find_all_existing()] } last_fetch = last_fetch if last_fetch is not None else cls.parser.parse_args()[ "last_fetch"] if DEBUG: return {"new": [category.json() for category in CategoryModel.find_new(last_fetch)], "deleted": [category.json() for category in CategoryModel.find_deleted(last_fetch)], "updated": [category.json() for category in CategoryModel.find_updated(last_fetch)], "all": [category.json() for category in CategoryModel.find_all()] } return {"new": [category.json() for category in CategoryModel.find_new(last_fetch)], "deleted": [category.json() for category in CategoryModel.find_deleted(last_fetch)], "updated": [category.json() for category in CategoryModel.find_updated(last_fetch)] }
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,599
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/resources/task.py
from flask_restful import Resource, reqparse from models.task import TaskModel from flask_jwt_extended import get_jwt_identity, jwt_optional, jwt_required from sqlalchemy.exc import IntegrityError from datetime import datetime from config import DEBUG parser = reqparse.RequestParser() parser.add_argument("desc", type=str, required=True, help="Missing or incorrect field") parser.add_argument("ecoPoints", type=int, required=True, help="Missing or incorrect field") parser.add_argument("savings", type=float, required=True, help="Missing or incorrect field") parser.add_argument("weekly", type=bool, required=True, help="Missing or incorrect field") parser.add_argument("category", type=str, required=True, help="Missing or incorrect field") class Task(Resource): @classmethod def get(cls, id): task = TaskModel.find_existing_by_id(id) if not task: return {"message": "Task not found"}, 404 return task.json(), 200 @classmethod @jwt_required def put(cls, id): data = parser.parse_args() task = TaskModel.find_existing_by_id(id) if not task: task = TaskModel(**data) else: task.update(data) try: task.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during the update."}, 500 return task.json(), 201 @classmethod @jwt_required def delete(cls, id): task = TaskModel.find_existing_by_id(id) if not task: return {"message": "Task not found"}, 404 try: task.delete_from_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during the update."}, 500 return {"message": "Task deleted from database"}, 200 class TaskCreator(Resource): @classmethod @jwt_required def post(cls): data = parser.parse_args() try: task = TaskModel(**data) task.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except Exception as e: return {"message": "Internal error occurred during insertion."}, 500 return task.json(), 201 class TaskList(Resource): parser = reqparse.RequestParser() parser.add_argument("last_fetch", type=float, required=False, help="This field must be a date in unix timestamp in float format.", default=0.0 ) @classmethod @jwt_optional def get(cls, last_fetch=None): user = get_jwt_identity() if user: return { "tasks": [task.json() for task in TaskModel.find_all_existing()] } last_fetch = last_fetch if last_fetch is not None else cls.parser.parse_args()["last_fetch"] if DEBUG: return {"new": [task.json() for task in TaskModel.find_new(last_fetch)], "deleted": [task.json() for task in TaskModel.find_deleted(last_fetch)], "updated": [task.json() for task in TaskModel.find_updated(last_fetch)], "all": [task.json() for task in TaskModel.find_all()] } return {"new": [task.json() for task in TaskModel.find_new(last_fetch)], "deleted": [task.json() for task in TaskModel.find_deleted(last_fetch)], "updated": [task.json() for task in TaskModel.find_updated(last_fetch)] }
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,600
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/resources/user.py
from config import MASTER_PASSWORD from models.user import UserModel from flask_restful import Resource, reqparse from werkzeug.security import generate_password_hash, check_password_hash from flask_jwt_extended import ( create_access_token, create_refresh_token, jwt_required, jwt_refresh_token_required, get_jwt_identity, get_raw_jwt ) from blacklist import BLACKLIST class User(Resource): @classmethod @jwt_required def get(cls, id): user = UserModel.find_by_id(id) if not user: return {"message": "User not found"}, 404 return user.json(), 201 @classmethod @jwt_required def delete(cls, id): user = UserModel.find_by_id(id) if not user: return {"message": "User not found"}, 404 user.delete_from_db() return {"message": "User deleted"}, 200 _user_parser = reqparse.RequestParser() _user_parser.add_argument("username", type=str, required=True, help="Username should be non-empty string" ) _user_parser.add_argument("password", type=str, required=True, help="Password should be non-empty string" ) _user_parser.add_argument("master_password", type=str, required=False, help="This field must be a string") class UserList(Resource): @classmethod @jwt_required def get(cls): return [user.json() for user in UserModel.find_all()] class UserRegister(Resource): @classmethod def post(cls): data = _user_parser.parse_args() if UserModel.find_by_username(data["username"]): return {"message": "User with username '{}' already exists".format(data["username"])}, 400 if not check_password_hash(MASTER_PASSWORD, data["master_password"]): return {"message": "Wrong master_password, please try again or contact your administrator"} user = UserModel( data["username"], generate_password_hash(data["password"], "sha256") ) user.save_to_db() return {"message": "User created successfully"}, 201 class UserLogin(Resource): @classmethod def post(cls): data = _user_parser.parse_args() user = UserModel.find_by_username(data['username']) if not user: return {"message": "User not found"}, 404 if not check_password_hash(user.password, data["password"]): return {"message": "Incorrect password"}, 401 # Not authorized access_token = create_access_token(identity=user.id, fresh=True) refresh_token = create_refresh_token(identity=user.id) return {"access_token": access_token, "refresh_token": refresh_token} class UserLogout(Resource): @classmethod @jwt_required def post(cls): # jti is "JWT ID", a unique identifier for JWT jti = get_raw_jwt()['jti'] BLACKLIST.add(jti) return {"message": "Successfully logged out"} class TokenRefresh(Resource): @classmethod @jwt_refresh_token_required def post(cls): current_user = get_jwt_identity() new_token = create_access_token(identity=current_user, fresh=False) return {"access_token": new_token}
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,601
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/app.py
from flask_restful import Api from flask_jwt_extended import JWTManager from blacklist import BLACKLIST from resources.image import Image, ImageList from resources.category import Category, CategoryList from resources.task import Task, TaskCreator, TaskList from resources.achievement import Achievement, AchievementList from resources.update import Update from resources.user import UserRegister, UserLogin, UserLogout, User, UserList, TokenRefresh def create_app(): from flask import Flask app = Flask(__name__) app.config.from_object('config.Config') return app app = create_app() api = Api(app) @app.route("/") def home(): return "Hello World" jwt = JWTManager(app) api.add_resource(ImageList, "/images") api.add_resource(Image, "/image/<string:name>") api.add_resource(CategoryList, "/categories") api.add_resource(Category, "/category/<string:name>") api.add_resource(TaskList, "/tasks") api.add_resource(TaskCreator, "/task") api.add_resource(Task, "/task/<int:id>") api.add_resource(AchievementList, "/achievements") api.add_resource(Achievement, "/achievement/<string:name>") api.add_resource(Update, "/update") api.add_resource(UserRegister, "/register") api.add_resource(UserLogin, "/login") api.add_resource(UserLogout, "/logout") api.add_resource(User, "/user/<int:id>") api.add_resource(UserList, "/users") api.add_resource(TokenRefresh, "/refresh") @jwt.token_in_blacklist_loader def check_if_token_in_blacklist(decrypted_token): jti = decrypted_token['jti'] return jti in BLACKLIST if __name__ == "__main__": from db import db db.init_app(app) @app.before_first_request def create_tables(): db.create_all() app.run()
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,602
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/models/achievement.py
from db import db from datetime import datetime from models.category import CategoryModel from config import DEBUG class AchievementModel(db.Model): __tablename__ = "achievements" id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(20), nullable=False) desc = db.Column(db.String(128), nullable=False) goal = db.Column(db.Integer, nullable=False) below = db.Column(db.Boolean, nullable=False) created = db.Column(db.DateTime, server_default=db.func.now()) updated = db.Column(db.DateTime, server_default=db.func.now()) deleted = db.Column(db.DateTime, server_default=None) category_id = db.Column(db.Integer, db.ForeignKey("categories.id"), nullable=False) category = db.relationship("CategoryModel") __table_args__ = (db.CheckConstraint(goal > 0, name="positive_goal"),) def __init__(self, name, desc, goal, below, category): self.name = name self.desc = desc self.goal = goal self.below = below self.category_id = getattr( CategoryModel.find_existing_by_name(category), "id", None) def json(self): if DEBUG: return { "id": self.id, "name": self.name, "desc": self.desc, "goal": self.goal, "below": self.below, "category": getattr(self.category, "name", None), "image": getattr(self.category, "image_name", None), "created": self.created.timestamp(), "updated": self.updated.timestamp(), "deleted": None if self.deleted is None else self.deleted.timestamp() } return { "name": self.name, "desc": self.desc, "goal": self.goal, "below": self.below, "category": getattr(self.category, "name", None), "image": getattr(self.category, "image_name", None) } @classmethod def find_by_name(cls, name): return cls.query.filter_by(name=name).first() @classmethod def find_existing_by_name(cls, name): return cls.query.filter_by(name=name).filter_by(deleted=None).first() @classmethod def find_all(cls): return cls.query.all() @classmethod def find_new(cls, last_fetch): return cls.query.filter( cls.created > datetime.fromtimestamp(last_fetch), cls.deleted == None ) @classmethod def find_deleted(cls, last_fetch): return cls.query.filter( cls.created <= datetime.fromtimestamp(last_fetch), cls.deleted > datetime.fromtimestamp(last_fetch) ) @classmethod def find_updated(cls, last_fetch): return cls.query.filter( cls.created <= datetime.fromtimestamp(last_fetch), cls.deleted == None, cls.updated > datetime.fromtimestamp(last_fetch) ) def update(self, data): for k in data: if k == "category": setattr(self, "category_id", getattr(CategoryModel.find_existing_by_name(data[k]), "id", None)) else: setattr(self, k, data[k]) setattr(self, "updated", datetime.now()) def save_to_db(self): db.session.add(self) db.session.commit() def delete_from_db(self): self.deleted = datetime.now() db.session.add(self) db.session.commit()
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,603
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/resources/image.py
from flask_restful import Resource from models.image import ImageModel from flask_jwt_extended import jwt_required, jwt_optional, get_jwt_identity from sqlalchemy.exc import IntegrityError class Image(Resource): @classmethod def get(cls, name): image = ImageModel.find_by_name(name) if not image: return {"message": "Image not found."}, 404 return image.json(), 200 @classmethod @jwt_required def post(cls, name): image = ImageModel.find_by_name(name) if image: return {"message": "An image with name '{}' already exists.".format(name)}, 400 image = ImageModel(name) try: image.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during insertion."}, 500 return image.json(), 201 @classmethod @jwt_required def put(cls, name): image = ImageModel.find_by_name(name) if not image: image = ImageModel(name) else: image.name = name try: image.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during the update."}, 500 return image.json(), 201 @classmethod @jwt_required def delete(cls, name): image = ImageModel.find_by_name(name) if not image: return {"message": "Image not found"}, 404 try: image.delete_from_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except Exception as e: return {"message": "Internal error occurred during deletion."+str(e)}, 500 return {"message": "Image deleted from database."}, 200 class ImageList(Resource): @classmethod def get(cls): return {"images": [image.json() for image in ImageModel.find_all()]}, 200
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,604
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/config.py
from os import getenv from common.utils import get_env_variable from dotenv import load_dotenv from pathlib import Path # python3 only env_path = Path('.') / '.env' load_dotenv(dotenv_path=env_path) # Dynamic (Environmental) configurations # Crashes if not present in environment SECRET_KEY = get_env_variable('SECRET_KEY') MASTER_PASSWORD = get_env_variable('MASTER_PASSWORD') # DB_URL is postgres one if launched from Heroku or sqlite one for local testing DB_URL = getenv("DATABASE_URL", 'sqlite:///data.db') # Debug and testing only if explicitly stated in environment DEBUG = getenv("DEBUG") == "TRUE" TESTING = getenv("TESTING") == "TRUE" ENVIRONMENT = "development" if getenv("DEVELOPMENT") == "TRUE" else "production" class Config: """Flask config class.""" SQLALCHEMY_DATABASE_URI = DB_URL # silence the deprecation warning SQLALCHEMY_TRACK_MODIFICATIONS = False PROPAGATE_EXCEPTIONS = True # jwt configs JWT_BLACKLIST_ENABLED = True # enable blacklist feature # allow blacklisting for access and refresh tokens JWT_BLACKLIST_TOKEN_CHECKS = ["access", "refresh"] # token expire time in seconds JWT_TOKEN_EXPIRES = 3600 JWT_SECRET_KEY = SECRET_KEY DEBUG = DEBUG TESTING = TESTING ENV = "development"
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,605
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/common/utils.py
def get_env_variable(name): """Makes the system crash if the environment is not set correctly Arguments: name {string} -- the environment variable to retrieve Raises: Exception: if the chosen environment variable is missing Returns: string -- the chosen environment variable """ from os import environ try: return environ[name] except KeyError: message = "Expected environment variable '{}' not set.".format(name) print(message) return ""
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,606
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/models/task.py
from db import db from datetime import datetime from models.category import CategoryModel from config import DEBUG class TaskModel(db.Model): __tablename__ = "tasks" id = db.Column(db.Integer, primary_key=True) desc = db.Column(db.String(128), nullable=False) ecoPoints = db.Column(db.Integer, nullable=False) savings = db.Column(db.DECIMAL(precision=8, scale=2), nullable=False) weekly = db.Column(db.Boolean, nullable=False) created = db.Column(db.DateTime, server_default=db.func.now()) updated = db.Column(db.DateTime, server_default=db.func.now()) deleted = db.Column(db.DateTime, server_default=None) category_id = db.Column(db.Integer, db.ForeignKey("categories.id"), nullable=False) category = db.relationship("CategoryModel") def __init__(self, desc, ecoPoints, savings, weekly, category): self.desc = desc self.ecoPoints = ecoPoints self.savings = savings self.weekly = weekly self.category_id = getattr(CategoryModel.find_existing_by_name(category), "id", None) def json(self): if DEBUG: return { "id": self.id, "desc": self.desc, "ecoPoints": self.ecoPoints, "savings": format(float(self.savings), ".2f"), "weekly": self.weekly, "category": getattr(CategoryModel.find_existing_by_id(self.category_id), "name", None), "created": self.created.timestamp(), "updated": self.updated.timestamp(), "deleted": None if self.deleted is None else self.deleted.timestamp() } return { "id": self.id, "desc": self.desc, "ecoPoints": self.ecoPoints, "savings": format(float(self.savings), ".2f"), "weekly": self.weekly, "category": getattr(CategoryModel.find_existing_by_id(self.category_id), "name", None) } @classmethod def find_by_id(cls, id): return cls.query.filter_by(id=id).first() @classmethod def find_existing_by_id(cls, id): return cls.query.filter_by(id=id).filter_by(deleted=None).first() @classmethod def find_all(cls): return cls.query.all() @classmethod def find_all_existing(cls): return cls.query.filter_by(deleted=None).all() @classmethod def find_new(cls, last_fetch): return cls.query.filter( cls.created > datetime.fromtimestamp(last_fetch), cls.deleted == None ) @classmethod def find_deleted(cls, last_fetch): return cls.query.filter( cls.created <= datetime.fromtimestamp(last_fetch), cls.deleted > datetime.fromtimestamp(last_fetch) ) @classmethod def find_updated(cls, last_fetch): return cls.query.filter( cls.created <= datetime.fromtimestamp(last_fetch), cls.deleted == None, cls.updated > datetime.fromtimestamp(last_fetch) ) def update(self, data): for k in data: if k == "category": setattr(self, "category_id", getattr(CategoryModel.find_existing_by_name(data[k]), "id", None)) else: setattr(self, k, data[k]) setattr(self, "updated", datetime.now()) def save_to_db(self): db.session.add(self) db.session.commit() def delete_from_db(self): self.deleted = datetime.now() db.session.add(self) db.session.commit()
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,607
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/models/image.py
from db import db from models.category import CategoryModel from sqlalchemy.exc import IntegrityError class ImageModel(db.Model): __tablename__ = "images" name = db.Column(db.String(20), primary_key=True) categories = db.relationship("CategoryModel") def __init__(self, name): self.name = name def json(self): return {"name": self.name} def is_taken(self): return len(categories) > 0 @classmethod def find_by_name(cls, name): return cls.query.filter_by(name=name).first() @classmethod def find_all(cls): return cls.query.all() @classmethod def find_all_existing(cls): return cls.query.filter_by(deleted=None).all() def save_to_db(self): db.session.add(self) db.session.commit() def delete_from_db(self): for category in self.categories: if not category.deleted: raise IntegrityError("Cannot delete an image if it's associated with existing categories", params=None, orig=None) for category in self.categories: category.image = None db.session.delete(self) db.session.commit()
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,608
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/resources/achievement.py
from flask_restful import Resource, reqparse from models.achievement import AchievementModel from flask_jwt_extended import jwt_required, jwt_optional, get_jwt_identity from sqlalchemy.exc import IntegrityError from datetime import datetime from config import DEBUG class Achievement(Resource): parser = reqparse.RequestParser() parser.add_argument("desc", type=str, required=True, help="Missing or incorrect field") parser.add_argument("goal", type=int, required=True, help="Missing or incorrect field") parser.add_argument("below", type=bool, required=True, help="Missing or incorrect field") parser.add_argument("category", type=str, required=True, help="Missing or incorrect field") @classmethod def get(cls, name): achievement = AchievementModel.find_existing_by_name(name) if not achievement: return {"message": "Achievement not found"}, 404 return achievement.json(), 200 @classmethod @jwt_required def post(cls, name): achievement = AchievementModel.find_existing_by_name(name) if achievement: return {"message": "An Achievement with name '{}' already exists".format(name)}, 400 data = cls.parser.parse_args() achievement = AchievementModel(name, **data) try: achievement.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during insertion."}, 500 return achievement.json(), 201 @classmethod @jwt_required def put(cls, name): data = cls.parser.parse_args() achievement = AchievementModel.find_existing_by_name(name) if not achievement: print(data) achievement = AchievementModel(name, **data) else: achievement.update(data) try: achievement.save_to_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except Exception as e: return {"message": "Internal error occurred during the update."}, 500 return achievement.json(), 201 @classmethod @jwt_required def delete(cls, name): achievement = AchievementModel.find_existing_by_name(name) if not achievement: return {"message": "Achievement not found"}, 404 try: achievement.delete_from_db() except IntegrityError as e: return {"database_exception": str(e)}, 400 except: return {"message": "Internal error occurred during the update."}, 500 return {"message": "Achievement deleted from database"}, 200 class AchievementList(Resource): parser = reqparse.RequestParser() parser.add_argument("last_fetch", type=float, required=False, help="This field must be a date in unix timestamp in float format.", default=0.0 ) @classmethod @jwt_optional def get(cls, last_fetch=None): user = get_jwt_identity() if user: return { "achievement": [achievement.json() for achievement in AchievementModel.find_all_existing()] } last_fetch = last_fetch if last_fetch is not None else cls.parser.parse_args()["last_fetch"] if DEBUG: return {"new": [achievement.json() for achievement in AchievementModel.find_new(last_fetch)], "deleted": [achievement.json() for achievement in AchievementModel.find_deleted(last_fetch)], "updated": [achievement.json() for achievement in AchievementModel.find_updated(last_fetch)], "all": [achievement.json() for achievement in AchievementModel.find_all()] } return {"new": [achievement.json() for achievement in AchievementModel.find_new(last_fetch)], "deleted": [achievement.json() for achievement in AchievementModel.find_deleted(last_fetch)], "updated": [achievement.json() for achievement in AchievementModel.find_updated(last_fetch)] }
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,609
UniversityProjectsAtUnisa/greenist-backend_afp
refs/heads/master
/resources/update.py
from flask_restful import Resource, reqparse from resources.task import TaskList from resources.achievement import AchievementList from resources.category import CategoryList from datetime import datetime class Update(Resource): parser = reqparse.RequestParser() parser.add_argument("last_fetch", type=float, required=False, help="This field must be a date in unix timestamp in float format.", default=0.0 ) @classmethod def post(cls): data = cls.parser.parse_args() print(data["last_fetch"]) categories = CategoryList.get(data["last_fetch"]) achievements = AchievementList.get(data["last_fetch"]) tasks = TaskList.get(data["last_fetch"]) return {"last_fetch": datetime.now().timestamp(), "categories": categories, "achievements": achievements, "tasks": tasks}
{"/jwtmessages.py": ["/app.py"], "/models/category.py": ["/config.py"], "/resources/category.py": ["/models/category.py", "/config.py"], "/resources/task.py": ["/models/task.py", "/config.py"], "/resources/user.py": ["/config.py"], "/app.py": ["/resources/image.py", "/resources/category.py", "/resources/task.py", "/resources/achievement.py", "/resources/update.py", "/resources/user.py"], "/models/achievement.py": ["/models/category.py", "/config.py"], "/resources/image.py": ["/models/image.py"], "/config.py": ["/common/utils.py"], "/models/task.py": ["/models/category.py", "/config.py"], "/models/image.py": ["/models/category.py"], "/resources/achievement.py": ["/models/achievement.py", "/config.py"], "/resources/update.py": ["/resources/task.py", "/resources/achievement.py", "/resources/category.py"]}
36,610
maybeluwang/pytorch_FFTNet
refs/heads/master
/FFTNet_vocoder.py
import torch import torch.backends.cudnn as cudnn import torch.optim as optim import torch.nn.functional as F from torch.utils.data import DataLoader, TensorDataset import torchaudio from torchaudio import transforms import numpy as np from scipy.signal import resample from scipy.interpolate import interp1d from sklearn.preprocessing import StandardScaler from datetime import datetime import argparse # import matplotlib.pyplot as plt from models import general_FFTNet from python_speech_features import mfcc import pyworld as pw import pysptk as sptk parser = argparse.ArgumentParser(description='FFTNet vocoder.') parser.add_argument('outfile', type=str, help='output file name') parser.add_argument('--seq_M', type=int, default=2500, help='training sequence length') parser.add_argument('--depth', type=int, default=10, help='model depth. The receptive field will be 2^depth.') parser.add_argument('--batch_size', type=int, default=10) parser.add_argument('--channels', type=int, default=256, help='quantization channels') parser.add_argument('--lr', type=float, default=0.001, help='learning rate') parser.add_argument('--steps', type=int, default=50000, help='iteration number') parser.add_argument('-c', type=float, default=2., help='a constant multiply before softmax layer in generation') parser.add_argument('--file_size', type=float, default=5., help='generated wav file size (in seconds)') parser.add_argument('--feature_size', type=int, default=26, help='generated wav file size (in seconds)') sr = 8000 winstep = 0.01 winlen = 0.025 if __name__ == '__main__': args = parser.parse_args() seq_M = args.seq_M batch_size = args.batch_size depth = args.depth radixs = [2] * depth N = np.prod(radixs) channels = args.channels lr = args.lr steps = args.steps c = args.c generation_time = args.file_size filename = args.outfile features_size = args.feature_size print('==> Downloading YesNo Dataset..') transform = transforms.Compose([transforms.Scale()]) data = torchaudio.datasets.YESNO('./data', download=True, transform=transform) data_loader = DataLoader(data, batch_size=1, num_workers=2) print('==> Extracting features..') train_wav = [] train_features = [] train_targets = [] for batch_idx, (inputs, _) in enumerate(data_loader): inputs = inputs.view(-1).numpy() targets = np.roll(inputs, shift=-1) #h = mfcc(inputs, sr, winlen=winlen, winstep=winstep, numcep=features_size - 1, winfunc=np.hamming) x = inputs.astype(float) f0, t = pw.dio(x, sr, f0_floor=40, f0_ceil=500, frame_period=winstep * 1000) f0 = pw.stonemask(x, f0, t, sr) spc = pw.cheaptrick(x, f0, t, sr) mcep = sptk.sp2mc(spc, features_size-2, 0.31) h = np.hstack((mcep, f0[:, None])) # interpolation x = np.arange(h.shape[0]) * winstep * sr f = interp1d(x, h, copy=False, axis=0) inputs = inputs[:x[-1].astype(int)] targets = targets[:x[-1].astype(int)] inputs = inputs[:len(inputs) // seq_M * seq_M] targets = targets[:len(targets) // seq_M * seq_M] h = f(np.arange(1, len(inputs) + 1)) train_wav.append(inputs) train_features.append(h) train_targets.append(targets) train_wav = np.concatenate(train_wav) train_features = np.vstack(train_features) train_targets = np.concatenate(train_targets) enc = transforms.MuLawEncoding(channels) dec = transforms.MuLawExpanding(channels) train_wav = enc(train_wav) train_targets = enc(train_targets) scaler = StandardScaler() train_features = scaler.fit_transform(train_features) train_wav = train_wav.reshape(-1, seq_M) train_features = np.rollaxis(train_features.reshape(-1, seq_M, features_size), 2, 1) train_targets = train_targets.reshape(-1, seq_M) train_wav = torch.from_numpy(train_wav).long() train_features = torch.from_numpy(train_features).float() train_targets = torch.from_numpy(train_targets).long() print(train_features.shape, train_wav.shape, train_targets.shape) test_features = train_features[:int(sr * generation_time / seq_M)] test_features = test_features.transpose(0, 1).contiguous().view(1, features_size, -1).cuda() print(test_features.shape) print('==> Construct Tensor Dataloader...') dataset = TensorDataset(train_wav, train_features, train_targets) data_loader = DataLoader(dataset, batch_size=batch_size, num_workers=2, shuffle=True) print('==> Building model..') net = general_FFTNet(radixs, 128, channels, aux_channels=features_size).cuda() print(sum(p.numel() for p in net.parameters() if p.requires_grad), "of parameters.") optimizer = optim.Adam(net.parameters(), lr=lr) criterion = torch.nn.CrossEntropyLoss() print("Start Training.") a = datetime.now().replace(microsecond=0) step = 0 while step < steps: for batch_idx, (inputs, features, targets) in enumerate(data_loader): inputs, features, targets = inputs.cuda(), features.cuda(), targets.cuda() optimizer.zero_grad() logits = net(inputs, features) loss = criterion(logits.unsqueeze(-1), targets.unsqueeze(-1)) loss.backward() optimizer.step() print(step, "{:.4f}".format(loss.item())) step += 1 if step > steps: break print("Training time cost:", datetime.now().replace(microsecond=0) - a) print("Start to generate some noise...") net = net.cpu() net.eval() with torch.no_grad(): a = datetime.now().replace(microsecond=0) generation = net.fast_generate(h=test_features, c=c) generation = dec(generation) torchaudio.save(filename, generation, sr) cost = datetime.now().replace(microsecond=0) - a print("Generation time cost:", cost, ". Speed:", generation.size(0)/cost.total_seconds(), "samples/sec.")
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,611
maybeluwang/pytorch_FFTNet
refs/heads/master
/hparams.py
class hparams: seed = 0 ################################ # Audio # ################################ num_mels = 80 num_freq = 1025 sample_rate = 22050 frame_shift = 200 frame_length = 800 preemphasis = 0.97 min_level_db = -100 ref_level_db = 20 gl_iters = 100 power = 2 seg_l = 16000
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,612
maybeluwang/pytorch_FFTNet
refs/heads/master
/train.py
import torch from torch.utils.data import DataLoader import torch.backends.cudnn as cudnn import argparse import os from preprocess import preprocess_cmu from models import general_FFTNet from dataset import CMU_Dataset from datetime import datetime from hparams import hparams parser = argparse.ArgumentParser() parser.add_argument('--preprocess', action='store_true') parser.add_argument('--wav_dir', type=str, default='/host/data_dsk1/dataset/CMU_ARCTIC_Databases/cmu_us_slt_arctic/wav') parser.add_argument('data_dir', type=str) parser.add_argument('--feature_type', type=str, default='melspectrogram') parser.add_argument('--feature_dim', type=int, default=25, help='number of mcc coefficients') parser.add_argument('--mcep_alpha', type=float, default=0.42, help='''all-pass filter constant. 16khz: 0.42, 10khz: 0.35, 8khz: 0.31.''') parser.add_argument('--window_length', type=float, default=0.025) parser.add_argument('--window_step', type=float, default=0.01) parser.add_argument('--minimum_f0', type=float, default=71) parser.add_argument('--maximum_f0', type=float, default=800) parser.add_argument('--q_channels', type=int, default=256, help='quantization channels') parser.add_argument('--interp_method', type=str, default='linear') parser.add_argument('--fft_channels', type=int, default=128, help='fftnet layer channels') parser.add_argument('--seq_M', type=int, default=5000, help='training sequence length') parser.add_argument('--radixs', nargs='+', type=int, default=[2] * 11) parser.add_argument('--batch_size', type=int, default=5) parser.add_argument('--lr', type=float, default=0.001, help='learning rate') parser.add_argument('--steps', type=int, default=100000, help='iteration number') parser.add_argument('--injected_noise', action='store_true') parser.add_argument('--model_file', type=str, default='slt_fftnet') parser.add_argument('checkpoint_dir', type=str, help='Directory to save checkpoints.') parser.add_argument('--checkpoint_step', type=int, default=5000) parser.add_argument('--transpose', action='store_true') parser.add_argument('--predict_dist', type=int, default=1) def main(): args = parser.parse_args() if args.preprocess: print('==> Preprocessing data ...') preprocess_cmu(args.wav_dir, args.data_dir, q_channels=args.q_channels, winlen=args.window_length, winstep=args.window_step, n_mcep=args.feature_dim, mcep_alpha=args.mcep_alpha, minf0=args.minimum_f0, maxf0=args.maximum_f0, type=args.feature_type) print('==> Loading Dataset..') training_dataset = CMU_Dataset(args.data_dir, args.seq_M, 256, hopsize=hparams.frame_shift, interp_method=args.interp_method, injected_noise=args.injected_noise, predict_dist=args.predict_dist) training_loader = DataLoader(training_dataset, batch_size=args.batch_size, num_workers=4, shuffle=True) print('==> Building model..') device = 'cuda' if torch.cuda.is_available() else 'cpu' net = general_FFTNet(radixs=args.radixs, fft_channels=args.fft_channels, classes=args.q_channels, aux_channels=hparams.num_mels, transpose=args.transpose, predict_dist=args.predict_dist).to(device) if torch.cuda.device_count() > 1: net = torch.nn.DataParallel(net) if device == 'cuda': cudnn.benchmark = True print(sum(p.numel() for p in net.parameters() if p.requires_grad), "of parameters.") criterion = torch.nn.CrossEntropyLoss() optimizer = torch.optim.Adam(net.parameters(), lr=args.lr) os.makedirs(args.checkpoint_dir, exist_ok=True) print("Start Training.") a = datetime.now().replace(microsecond=0) global_step = 0 while global_step < args.steps: for batch_idx, (inputs, targets, features) in enumerate(training_loader): inputs, targets, features = inputs.cuda(), targets.cuda(), features.cuda() optimizer.zero_grad() logits = net(inputs, features) loss = criterion(logits.unsqueeze(-1), targets.unsqueeze(-1)) loss.backward() optimizer.step() print(global_step, "{:.4f}".format(loss.item())) global_step += 1 if global_step > args.steps: break if global_step % args.checkpoint_step == 0: model = net.module if isinstance(net, torch.nn.DataParallel) else net torch.save(model, os.path.join(args.checkpoint_dir, args.model_file + "_{}.pth".format(global_step))) print("Checkpoint saved.") print("Training time cost:", datetime.now().replace(microsecond=0) - a) net = net.module if isinstance(net, torch.nn.DataParallel) else net torch.save(net, args.model_file + ".pth") print("Model saved to", args.model_file) if __name__ == '__main__': main()
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,613
maybeluwang/pytorch_FFTNet
refs/heads/master
/utils.py
import numpy as np import torch from torch.nn import functional as F from scipy.special import expn from torchaudio.transforms import MuLawEncoding, MuLawDecoding def np_mulaw_quantized(x, quantization_channels): x_mu = np_mulaw(x, quantization_channels) return float2class(x_mu, quantization_channels) def encoder(quantization_channels): return MuLawEncoding(quantization_channels) def decoder(quantization_channels): return MuLawDecoding(quantization_channels) def np_mulaw(x, quantization_channels): mu = quantization_channels - 1 x_mu = np.sign(x) * np.log1p(mu * np.abs(x)) / np.log1p(mu) return x_mu def np_inv_mulaw(x, quantization_channels): mu = quantization_channels - 1 x = np.sign(x) * (np.exp(np.abs(x) * np.log1p(mu)) - 1.) / mu return x def float2class(x, classes): mu = classes - 1 return np.rint((x + 1) / 2 * mu).astype(np.uint8) def class2float(x, classes): mu = classes - 1 return x.astype(float) / mu * 2 - 1. def zero_padding(x, maxlen, dim=0): diff = maxlen - x.shape[dim] if diff <= 0: return x else: pad_shape = () for i in range(len(x.shape)): if i != dim: pad_shape += ((0, 0),) else: pad_shape += ((0, diff),) return np.pad(x, pad_shape, 'constant') def repeat_last_padding(x, maxlen): diff = maxlen - x.shape[-1] if diff <= 0: return x else: pad_value = np.tile(x[..., [-1]], diff) return np.concatenate((x, pad_value), axis=-1) # this function is copied from https://github.com/braindead/logmmse/blob/master/logmmse.py # change numpy to tensor def logmmse(x, sr, noise_std=1 / 256): window_size = int(0.02 * sr) if window_size % 2 == 1: window_size += 1 # noverlap = len1; hop_size = len2; window_size = len noverlap = int(window_size * 0.75) hop_size = window_size - noverlap win = torch.hann_window(window_size) win *= hop_size / win.sum() nfft = 2 ** (window_size - 1).bit_length() pad_pos = (nfft - window_size) // 2 noise = torch.randn(6, window_size) * noise_std noise_fft = torch.rfft(F.pad(win * noise, (pad_pos, pad_pos)), 1) noise_mean = noise_fft.pow(2).sum(-1).sqrt() noise_mu = noise_mean.mean(0) noise_mu2 = noise_mu.pow(2) spec = torch.stft(x, nfft, hop_length=hop_size, win_length=window_size, window=win, center=False) spec_copy = spec.clone() sig2 = spec.pow(2).sum(-1) vad_curve = vad(x, S=spec).float() aa = 0.98 ksi_min = 10 ** (-25 / 10) gammak = torch.min(sig2 / noise_mu2.unsqueeze(-1), torch.Tensor([40])) for n in range(spec.size(1)): gammak_n = gammak[:, n] if n == 0: ksi = aa + (1 - aa) * F.relu(gammak_n - 1) else: ksi = aa * spec_copy[:, n - 1].pow(2).sum(-1) / noise_mu2 + (1 - aa) * F.relu(gammak_n - 1) ksi = torch.max(ksi, torch.Tensor([ksi_min])) A = ksi / (1 + ksi) vk = A * gammak_n ei_vk = 0.5 * expint(vk) hw = A * ei_vk.exp() spec_copy[:, n] *= hw.unsqueeze(-1) xi_w = torch.irfft(spec_copy.transpose(0, 1), 1, signal_sizes=torch.Size([nfft]))[:, pad_pos:-pad_pos] origin = torch.irfft(spec.transpose(0, 1), 1, signal_sizes=torch.Size([nfft]))[:, pad_pos:-pad_pos] xi_w_mask = vad_curve / 2 + 0.5 orign_mask = (1 - vad_curve) / 2 final_framed = xi_w * xi_w_mask.unsqueeze(-1) + origin * orign_mask.unsqueeze(-1) xfinal = torch.zeros(final_framed.size(0) * hop_size + noverlap) k = 0 for n in range(final_framed.size(0)): xfinal[k:k + window_size] += final_framed[n] k += hop_size return xfinal def expint(x): x = x.detach().cpu().numpy() x = expn(1, x) return torch.from_numpy(x).float() def vad(x, hop_size=256, S=None, k=5, med_num=9): if S is None: S = torch.stft(x, hop_size * 4, hop_length=hop_size) energy = S.pow(2).sum(-1).mean(0).sqrt() energy /= energy.max() return _vad_energy(energy, k, med_num) def vad_mel(melspec, k=5, med_num=9): energy = melspec.pow(2).sum(-1).mean(0).sqrt() energy /= energy.max() sorted_E, _ = energy.sort() sorted_E_d = sorted_E[2:] - sorted_E[:-2] smoothed = F.pad(sorted_E_d, (7, 7)).unfold(0, 15, 1).mean(-1) sorted_E_d_peak = F.relu(smoothed[1:-1] - smoothed[:-2]) * F.relu(smoothed[1:-1] - smoothed[2:]) first, *dummy = torch.nonzero(sorted_E_d_peak) + 2 E_th = sorted_E[:first].mean() * k decision = torch.gt(energy, E_th) pad = (med_num // 2, med_num // 2) decision = F.pad(decision, pad) decision = decision.unfold(0, med_num, 1) decision, _ = decision.median(dim=-1) return decision def _vad_energy(energy, k, med_num): sorted_E, _ = energy.sort() sorted_E_d = sorted_E[2:] - sorted_E[:-2] smoothed = F.pad(sorted_E_d, (7, 7)).unfold(0, 15, 1).mean(-1) sorted_E_d_peak = F.relu(smoothed[1:-1] - smoothed[:-2]) * F.relu(smoothed[1:-1] - smoothed[2:]) first, *dummy = torch.nonzero(sorted_E_d_peak) + 2 E_th = sorted_E[:first].mean() * k decision = torch.gt(energy, E_th) pad = (med_num // 2, med_num // 2) decision = F.pad(decision, pad) decision = decision.unfold(0, med_num, 1) decision = decision.int().sort()[0].bool() return decision
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,614
maybeluwang/pytorch_FFTNet
refs/heads/master
/models.py
import torch import torch.nn as nn import torch.nn.functional as F from operator import mul from functools import reduce class One_Hot(nn.Module): def __init__(self, depth): super().__init__() self.depth = depth self.ones = nn.Parameter(torch.eye(depth).float(), requires_grad=False) def forward(self, x): return self.ones.index_select(0, x.view(-1)).view(x.size() + torch.Size([self.depth])) class general_FFTLayer(nn.Module): def __init__(self, in_channels, out_channels, N, *, radix=2, aux_channels=None): super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.radix = radix self.aux_channels = aux_channels # this should mathematically equal to having 2 1x1 kernel self.W_lr = nn.Conv1d(in_channels, out_channels, kernel_size=radix, dilation=N // radix) if aux_channels is not None: self.V_lr = nn.Conv1d(aux_channels, out_channels, kernel_size=radix, dilation=N // radix) self.W_o = nn.Conv1d(out_channels, out_channels, kernel_size=1) self.pad = nn.ConstantPad1d((N - N // radix, 0), 0.) def forward(self, x, h=None, zeropad=True, input_onehot=False): M = x.size(-1) x = self.pad(x) if zeropad else x if input_onehot: x[:, self.in_channels // 2, :x.size(2) - M] = 1 if h is None: z = F.relu(self.W_lr(x)) else: h = self.pad(h[:, :, -M:]) if zeropad else h[:, :, -M:] z = F.relu(self.W_lr(x) + self.V_lr(h)) return F.relu(self.W_o(z)) class general_FFTNet(nn.Module): def __init__(self, radixs=[2] * 11, fft_channels=128, classes=256, *, aux_channels=None, transpose=False, predict_dist=1): super().__init__() self.channels = fft_channels self.aux_channels = aux_channels self.classes = classes self.predict_dist = predict_dist if transpose: N_seq = [reduce(mul, radixs[:i + 1]) for i in range(len(radixs))] else: N_seq = [reduce(mul, radixs[i:]) for i in range(len(radixs))] self.r_field = reduce(mul, radixs) self.radixs = radixs self.N_seq = N_seq # transform input to one hot self.one_hot = One_Hot(classes) self.fft_layers = nn.ModuleList() in_channels = classes for N, r in zip(N_seq, radixs): self.fft_layers.append(general_FFTLayer(in_channels, fft_channels, N, radix=r, aux_channels=aux_channels)) in_channels = fft_channels self.fc_out = nn.Linear(in_channels, classes) def forward(self, x, h=None, zeropad=True): x = self.one_hot(x).transpose(1, 2) first_layer = True for fft_layer in self.fft_layers: x = fft_layer(x, h, zeropad, first_layer) first_layer = False x = self.fc_out(x.transpose(1, 2)) return x.transpose(1, 2) def get_receptive_field(self): return self.r_field def get_predict_distance(self): return self.predict_dist def conditional_sampling(self, logits): probs = F.softmax(logits, dim=1) dist = torch.distributions.Categorical(probs) return dist.sample() def argmax(self, logits): _, sample = logits.max(1) return sample def init_buf(self): if next(self.parameters()).is_cuda: device = 'cuda' else: device = 'cpu' if hasattr(self, "buffers_"): for buf in self.buffers_: buf.fill_(0.).to(device) else: self.buffers_ = [ torch.zeros(1, self.classes, self.N_seq[0] - self.N_seq[0] // self.radixs[0] + self.predict_dist).float().to(device)] self.buffers_ += [torch.zeros(1, self.channels, N - N // r + self.predict_dist).float().to(device) for N, r in zip(self.N_seq[1:], self.radixs[1:])] self.buffers_[0][:, self.classes // 2] = 1 def one_sample_generate(self, samples, h=None, c=1., method='sampling'): samples = self.one_hot(samples).t() for i in range(len(self.buffers_)): torch.cat((self.buffers_[i][:, :, self.predict_dist:], samples.view(1, -1, self.predict_dist)), 2, out=self.buffers_[i]) samples = self.fft_layers[i](self.buffers_[i], h, False) logits = self.fc_out(samples.transpose(1, 2)).view(self.predict_dist, self.classes) * c if method == 'argmax': samples = self.argmax(logits) else: samples = self.conditional_sampling(logits) return samples
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,615
maybeluwang/pytorch_FFTNet
refs/heads/master
/decode.py
import librosa import torch from torch.nn import functional as F from tqdm import tqdm import os import argparse import numpy as np from torchaudio import save from sklearn.preprocessing import StandardScaler from scipy.interpolate import interp1d from datetime import datetime from utils import decoder from logmmse import logmmse from preprocess import get_features import audio from hparams import hparams parser = argparse.ArgumentParser() parser.add_argument('--scaler_file', type=str, default=None) parser.add_argument('--infile', type=str, default=None) parser.add_argument('--save_path', type=str, default=None) parser.add_argument('--data_dir', type=str, default='slt_mcc_data') parser.add_argument('--feature_type', type=str, default='mcc') parser.add_argument('--feature_dim', type=int, default=25, help='number of mcc coefficients') parser.add_argument('--mcep_alpha', type=float, default=0.42, help='''all-pass filter constant. 16khz: 0.42, 10khz: 0.35, 8khz: 0.31.''') parser.add_argument('--window_length', type=float, default=0.025) parser.add_argument('--window_step', type=float, default=0.01) parser.add_argument('--minimum_f0', type=float, default=71) parser.add_argument('--maximum_f0', type=float, default=800) parser.add_argument('--q_channels', type=int, default=256, help='quantization channels') parser.add_argument('--interp_method', type=str, default='linear') parser.add_argument('-c', type=float, default=2., help='a constant multiply before softmax.') parser.add_argument('--model_file', type=str, default='slt_fftnet.pth') parser.add_argument('--cuda', action='store_true') parser.add_argument('--denoise', action='store_true') parser.add_argument('--noise_std', type=float, default=0.005) if __name__ == '__main__': args = parser.parse_args() net = torch.load(args.model_file) scaler = StandardScaler() scaler_info = np.load(args.scaler_file) scaler.mean_ = scaler_info['mean'] scaler.scale_ = scaler_info['scale'] filename = args.infile net.eval() if not args.cuda: net = net.cpu() else: net = net.cuda() print(args.model_file, "has", sum(p.numel() for p in net.parameters() if p.requires_grad), "of parameters.") with torch.no_grad(): if args.infile is None: # haven't implement pass elif args.save_path is not None: x = audio.load_wav(filename) h = audio.melspectrogram(x) id = os.path.basename(filename).replace(".wav", "") h = scaler.transform(h.T).T # interpolation hopsize = hparams.frame_shift if args.interp_method == 'linear': xx = np.arange(h.shape[1]) * hopsize f = interp1d(xx, h, copy=False, axis=1, fill_value="extrapolate") h = f(np.arange(xx[-1])) elif args.interp_method == 'repeat': h = np.repeat(h, hopsize, axis=1) else: print("interpolation method", args.interp_method, "is not implemented.") exit(1) h = torch.from_numpy(h).unsqueeze(0).float() r_field = net.get_receptive_field() pred_dist = net.get_predict_distance() zcr = librosa.feature.zero_crossing_rate(x,frame_length = hparams.frame_length, hop_length = hparams.frame_shift) vad_curve = (zcr <=0.2) vad_curve = np.repeat(vad_curve, hopsize) output_buf = torch.empty(h.size(2)).long() h = F.pad(h, (r_field, 0)) samples = torch.zeros(pred_dist).long() if args.cuda: h = h.cuda() samples = samples.cuda() net.init_buf() a = datetime.now().replace(microsecond=0) for pos in tqdm(range(r_field + pred_dist, h.size(2) + 1, pred_dist)): out_pos = pos - r_field - pred_dist decision = np.mean(vad_curve[out_pos:out_pos + pred_dist]) if decision > 0.5: samples = net.one_sample_generate(samples, h=h[:, :, :pos], c=args.c) else: samples = net.one_sample_generate(samples, h=h[:, :, :pos]) output_buf[out_pos:out_pos + pred_dist] = samples cost = datetime.now().replace(microsecond=0) - a dec = decoder(args.q_channels) generation = dec(output_buf) result = generation.cpu().numpy() if args.denoise: result = logmmse(result, hparams.sample_rate) audio.save_wav(result, args.save_path) print("Speed:", generation.size(0) / cost.total_seconds(), "samples/sec.") print('file saved in', args.save_path) else: print("Please enter output file name.")
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,616
maybeluwang/pytorch_FFTNet
refs/heads/master
/preprocess.py
import os import sys from multiprocessing import Pool, cpu_count from concurrent.futures import ProcessPoolExecutor from functools import partial from tqdm import tqdm from itertools import repeat from librosa.core import load, stft from librosa.feature import mfcc from librosa.util import frame import pyworld as world import pysptk as sptk import numpy as np from utils import repeat_last_padding, encoder, np_mulaw_quantized from sklearn.preprocessing import StandardScaler import argparse import audio def get_features(filename, *, winlen, winstep, n_mcep, mcep_alpha, minf0, maxf0, type): wav, sr = load(filename, sr=None) # get f0 x = wav.astype(float) _f0, t = world.harvest(x, sr, f0_floor=minf0, f0_ceil=maxf0, frame_period=winstep * 1000) f0 = world.stonemask(x, _f0, t, sr) window_size = int(sr * winlen) hop_size = int(sr * winstep) # get mel if type == 'mcc': spec = world.cheaptrick(x, f0, t, sr, f0_floor=minf0) h = sptk.sp2mc(spec, n_mcep - 1, mcep_alpha).T else: h = mfcc(x, sr, n_mfcc=n_mcep, n_fft=window_size, hop_length=hop_size) h = np.vstack((h, f0)) maxlen = len(x) // hop_size + 2 h = repeat_last_padding(h, maxlen) id = os.path.basename(filename).replace(".wav", "") return (id, x, h) def calc_stats(npzfile, out_dir): scaler = StandardScaler() data_dict = np.load(npzfile) for name, x in data_dict.items(): if name[-2:] == '_h': scaler.partial_fit(x.T) mean = scaler.mean_ scale = scaler.scale_ np.savez(os.path.join(out_dir, 'scaler.npz'), mean=np.float32(mean), scale=np.float32(scale)) def preprocess_cmu(wav_dir, output, *, q_channels, winlen, winstep, n_mcep, mcep_alpha, minf0, maxf0, type): in_dir = os.path.join(wav_dir) out_dir = os.path.join(output) train_data = os.path.join(out_dir, 'train.npz') test_data = os.path.join(out_dir, 'test.npz') os.makedirs(out_dir, exist_ok=True) files = [os.path.join(in_dir, f) for f in os.listdir(in_dir)] files.sort() train_files = files[:1032] test_files = files[1032:] feature_fn = partial(get_features, winlen=winlen, winstep=winstep, n_mcep=n_mcep, mcep_alpha=mcep_alpha, minf0=minf0, maxf0=maxf0, type=type) n_workers = cpu_count() // 2 print("Running", n_workers, "processes.") data_dict = {} enc = encoder(q_channels) print("Processing training data ...") with ProcessPoolExecutor(n_workers) as executor: futures = [executor.submit(feature_fn, f) for f in train_files] for future in tqdm(futures): name, data, feature = future.result() data_dict[name] = enc(data).astype(np.uint8) data_dict[name + '_h'] = feature np.savez(train_data, **data_dict) data_dict = {} print("Processing test data ...") with ProcessPoolExecutor(n_workers) as executor: futures = [executor.submit(feature_fn, f) for f in test_files] for future in tqdm(futures): name, data, feature = future.result() data_dict[name] = enc(data).astype(np.uint8) data_dict[name + '_h'] = feature np.savez(test_data, **data_dict) calc_stats(train_data, out_dir) def _process_wav(file_list, wav_dir, outfile, winlen, winstep, n_mcep, mcep_alpha, minf0, maxf0, q_channels, vocoderinput): data_dict = {} enc = encoder(q_channels) for f in tqdm(file_list): file = os.path.join(wav_dir, f) wav, sr = load(file, sr=None) x = wav.astype(float) _f0, t = world.harvest(x, sr, f0_floor=minf0, f0_ceil=maxf0, frame_period=winstep * 1000) # can't adjust window size f0 = world.stonemask(x, _f0, t, sr) window_size = int(sr * winlen) hop_size = int(sr * winstep) # get mel if vocoderinput == 'mcc': nfft = 2 ** (window_size - 1).bit_length() spec = np.abs(stft(x, n_fft=nfft, hop_length=hop_size, win_length=window_size, window='blackman')) ** 2 h = sptk.mcep(spec, n_mcep - 1, mcep_alpha, eps=-60, etype=2, itype=4).T else: h = mfcc(x, sr, n_mfcc=n_mcep, n_fft=int(sr * winlen), hop_length=int(sr * winstep)) h = np.vstack((h, f0)) # mulaw encode wav = enc(x).astype(np.uint8) id = os.path.basename(f).replace(".wav", "") data_dict[id] = wav data_dict[id + "_h"] = h np.savez(outfile, **data_dict) def _process_wav_melspectrogram(file_list, wav_dir, out_dir, q_channels=256): data_dict = {} for file_id in tqdm(file_list): filepath = os.path.join(wav_dir, file_id+'.wav') wav = audio.load_wav(filepath) if len(wav) < 5000: wav = np.tile(wav, ceil(5000/len(wav))) melspectrogram = audio.melspectrogram(wav) wav_quantized = np_mulaw_quantized(wav, q_channels) data_dict[file_id] = wav_quantized data_dict[file_id+'_h'] = melspectrogram np.savez(out_dir, **data_dict) def get_wavfiles_list(listfile): with open(listfile, 'r') as f: all_lines = f.readlines() wav_files = [line.split('|')[0] for line in all_lines] return wav_files def melspectrogram_preprocess(in_dir, out_dir, vocoderinput="melspectrogram", **kwargs): os.makedirs(out_dir, exist_ok=True) wav_dir = os.path.join(in_dir, "wavs") train_list_path = os.path.join(in_dir, "train.csv") test_list_path = os.path.join(in_dir, "test.csv") train_data = os.path.join(out_dir, 'train.npz') test_data = os.path.join(out_dir, 'test.npz') train_files = get_wavfiles_list(train_list_path) test_files = get_wavfiles_list(test_list_path) print("Processing testing data ...") _process_wav_melspectrogram(test_files, wav_dir, test_data, **kwargs) print("Processing training data ...") _process_wav_melspectrogram(train_files, wav_dir, train_data, **kwargs) calc_stats(train_data, out_dir) if __name__ == '__main__': melspectrogram_preprocess(sys.argv[1],sys.argv[2], "melspectrogram", q_channels=256)
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,617
maybeluwang/pytorch_FFTNet
refs/heads/master
/FFTNet_generator.py
import torch import torch.optim as optim import torch.nn.functional as F from torch.utils.data import DataLoader import torchaudio from torchaudio import transforms import numpy as np from datetime import datetime import argparse # import matplotlib.pyplot as plt from models import general_FFTNet parser = argparse.ArgumentParser(description='FFTNet audio generation.') parser.add_argument('outfile', type=str, help='output file name') parser.add_argument('--seq_M', type=int, default=2500, help='training sequence length') parser.add_argument('--depth', type=int, default=10, help='model depth. The receptive field will be 2^depth.') parser.add_argument('--batch_size', type=int, default=10) parser.add_argument('--channels', type=int, default=256, help='quantization channels') parser.add_argument('--lr', type=float, default=0.001, help='learning rate') parser.add_argument('--steps', type=int, default=10000, help='iteration number') parser.add_argument('-c', type=float, default=2., help='a constant multiply before softmax layer in generation') parser.add_argument('--file_size', type=float, default=5., help='generated wav file size (in seconds)') sr = 8000 if __name__ == '__main__': args = parser.parse_args() seq_M = args.seq_M batch_size = args.batch_size depth = args.depth radixs = [2] * depth N = np.prod(radixs) channels = args.channels lr = args.lr steps = args.steps c = args.c generation_time = args.file_size filename = args.outfile maxlen = 50000 print('==> Downloading YesNo Dataset..') transform = transforms.Compose( [transforms.Scale(), transforms.PadTrim(maxlen), transforms.MuLawEncoding(quantization_channels=channels)]) data = torchaudio.datasets.YESNO('./data', download=True, transform=transform) data_loader = DataLoader(data, batch_size=batch_size, num_workers=4, shuffle=True) print('==> Building model..') net = general_FFTNet(radixs, 128, channels).cuda() print(sum(p.numel() for p in net.parameters() if p.requires_grad), "of parameters.") optimizer = optim.Adam(net.parameters(), lr=lr) criterion = torch.nn.CrossEntropyLoss() print("Start Training.") a = datetime.now().replace(microsecond=0) step = 0 seq_idx = torch.arange(seq_M).view(1, -1) while step < steps: for batch_idx, (inputs, _) in enumerate(data_loader): inputs = inputs.squeeze(-1) targets = torch.cat((inputs[:, 1:], inputs[:, 0:1]), 1) # random sample segments from batch randn_idx = torch.LongTensor(inputs.size(0)).random_(maxlen - seq_M) randn_seq_idx = seq_idx.expand(inputs.size(0), -1) + randn_idx.unsqueeze(-1) inputs = torch.gather(inputs, 1, randn_seq_idx).long().cuda() targets = torch.gather(targets, 1, randn_seq_idx).long().cuda() optimizer.zero_grad() logits = net(inputs) loss = criterion(logits.unsqueeze(-1), targets.unsqueeze(-1)) loss.backward() optimizer.step() print(step, "{:.4f}".format(loss.item())) step += 1 if step > steps: break """ x_sequences = list(torch.split(inputs, seq_M, 2)) y_sequences = list(torch.split(targets, seq_M, 1)) for x, y in zip(x_sequences, y_sequences): x, y = x.cuda(), y.cuda() optimizer.zero_grad() logits = net(x)[:, :, 1:] loss = criterion(logits.unsqueeze(-1), y.unsqueeze(-1)) loss.backward() optimizer.step() print(step, "{:.4f}".format(loss.item())) step += 1 if step > steps: break """ print("Training time cost:", datetime.now().replace(microsecond=0) - a) print("Start to generate some noise...") net = net.cpu() net.eval() with torch.no_grad(): a = datetime.now().replace(microsecond=0) generation = net.fast_generate(int(sr * generation_time), c=c) decoder = transforms.MuLawExpanding(channels) generation = decoder(generation) torchaudio.save(filename, generation, sr) print("Generation time cost:", datetime.now().replace(microsecond=0) - a)
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,618
maybeluwang/pytorch_FFTNet
refs/heads/master
/dataset.py
from torch.utils.data import Dataset import torch import os import numpy as np from sklearn.preprocessing import StandardScaler from scipy.interpolate import interp1d class CMU_Dataset(Dataset): def __init__(self, folder, sample_size, quantization_channels, hopsize, interp_method, *, predict_dist=1, train=True, injected_noise=True): self.train = train self.sample_size = sample_size self.channels = quantization_channels self.hopsize = hopsize self.interp_method = interp_method self.injected_noise = injected_noise self.predict_dist = predict_dist if train: npzfile = os.path.join(folder, "train.npz") else: npzfile = os.path.join(folder, "test.npz") scaler = StandardScaler() scaler_info = np.load(os.path.join(folder, 'scaler.npz')) scaler.mean_ = scaler_info['mean'] scaler.scale_ = scaler_info['scale'] self.transform_fn = scaler.transform self.names_list = [] data_dict = np.load(npzfile) self.data_buffer = {} for name, x in data_dict.items(): if name[-2:] != '_h': self.names_list.append(name) self.data_buffer[name] = x else: self.data_buffer[name] = self.transform_fn(x.T).T def __len__(self): return len(self.names_list) def __getitem__(self, index): name = self.names_list[index] audio = self.data_buffer[name] local_condition = self.data_buffer[name + '_h'] #print(name) if self.train: rand_pos = np.random.randint(0, len(audio) - self.sample_size - self.predict_dist) target = audio[rand_pos + self.predict_dist:rand_pos + self.predict_dist + self.sample_size] audio = audio[rand_pos:rand_pos + self.sample_size] if self.injected_noise: audio = np.clip(audio+np.rint(np.random.randn(self.sample_size)), 0, self.channels-1) # interpolation if self.interp_method == 'linear': x = np.arange(local_condition.shape[1]) * self.hopsize f = interp1d(x, local_condition, copy=False, axis=1, fill_value="extrapolate") local_condition = f( np.arange(rand_pos + self.predict_dist, rand_pos + self.predict_dist + self.sample_size)) elif self.interp_method == 'repeat': local_condition = np.repeat(local_condition, self.hopsize, axis=1) local_condition = local_condition[:, rand_pos + self.predict_dist:rand_pos + self.predict_dist + self.sample_size] else: print("interpolation method", self.interp_method, "is not implemented.") exit(1) return torch.from_numpy(audio).long(), torch.from_numpy(target).long(), torch.from_numpy( local_condition).float() else: name_code = [ord(c) for c in name] # the batch size should be 1 in test mode return torch.LongTensor(name_code), torch.from_numpy(audio).long(), torch.from_numpy( local_condition).float()
{"/FFTNet_vocoder.py": ["/models.py"], "/train.py": ["/preprocess.py", "/models.py", "/dataset.py", "/hparams.py"], "/decode.py": ["/utils.py", "/preprocess.py", "/hparams.py"], "/preprocess.py": ["/utils.py"], "/FFTNet_generator.py": ["/models.py"]}
36,621
edose/photrix
refs/heads/master
/photrix/scratch2.py
__author__ = "Eric Dose, Albuquerque" """ This module: """ # Python core: import os import imaplib import email from email.header import decode_header # External packages: # Author's packages: THIS_PACKAGE_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) INI_DIRECTORY = os.path.join(THIS_PACKAGE_ROOT_DIRECTORY, 'ini')
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
36,622
edose/photrix
refs/heads/master
/test/test_web.py
import os import pytest # so we can have the @pytest... decorator import ephem from photrix.web import * from photrix.user import Astronight __author__ = "Eric Dose :: Bois d'Arc Observatory, Kansas" PHOTRIX_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) TEST_DATA_DIRECTORY = os.path.join(PHOTRIX_ROOT_DIRECTORY, "test", "$data_for_test") @pytest.mark.webtest def test_get_aavso_webobs_raw_table(): # ======================================================== # Valid star case (for reasonableness only, as webobs data may become updated at any time). star_name = "ST Tri" num_obs = 60 df = get_aavso_webobs_raw_table(star_name, num_obs) assert 25 <= len(df) <= MAX_WEBOBS_LINES # webobs API always returns at least 25 lines. assert len(df[df['filter'] == "V"]) >= 1 required_cols = ['target_name', 'jd', 'date_string', 'mag', 'error', 'filter', 'observer'] assert all([colname in list(df.columns) for colname in required_cols]) assert all(name.lower() == "st tri" for name in df['target_name']) assert all(10 < mag < 20 for mag in df['mag']) assert all(0 <= err < 1 for err in df['error']) assert 2457372 < df['jd'].max() < ephem.julian_date() # reasonable JD values. assert df['jd'].min() > 2457372 # day before my own known obs date assert all([df['jd'].iloc[i] >= df['jd'].iloc[i+1] for i in range(len(df)-1)]) # sorted JDs. assert all(observer.isalpha() and (2 <= len(observer) <= 4) for observer in df['observer']) # Test invalid star (no data in AAVSO webobs): star_name = "Not a Star" df = get_aavso_webobs_raw_table(star_name) assert len(df) == 0 # empty dataframe required_cols = ['target_name', 'jd', 'date_string', 'mag', 'error', 'filter', 'observer'] assert all([colname in list(df.columns) for colname in required_cols]) # but w/required cols. # Test valid star with start and end Julian Dates (but enough to cover num_results): star_name = "ST Tri" num_obs = 150 jd_start = 2455190.87 # webobs has 192 obs between these two Julian Dates (1/20/2017). jd_end = 2457400 df = get_aavso_webobs_raw_table(star_name, num_obs, jd_start, jd_end) assert len(df) == 150 assert jd_start <= min(df['jd']) < max(df['jd']) < jd_end assert all([df['jd'].iloc[i] >= df['jd'].iloc[i+1] for i in range(len(df)-1)]) # sorted JDs. assert set(df['observer']) == set(['DERA']) data_fullpath = os.path.join(TEST_DATA_DIRECTORY, "ST_Tri_150.csv") df150 = pd.read_csv(data_fullpath, index_col=0) assert len(df) == len(df150) assert all([df['date_string'].iloc[irow] == df150['date_string'].iloc[irow] for irow in range(len(df))]) # data_fullpath = os.path.join(TEST_DATA_DIRECTORY, "ST_Tri_150.csv") # df.to_csv(data_fullpath) # Test valid star with start and end Julian Dates (but NOT enough to cover num_results): star_name = "ST Tri" num_obs = 200 jd_start = 2455190.87 # webobs has 192 obs between these two Julian Dates (1/20/2017). jd_end = 2457400 df = get_aavso_webobs_raw_table(star_name, num_obs, jd_start, jd_end) assert len(df) == 192 assert jd_start <= min(df['jd']) < max(df['jd']) < jd_end assert all([df['jd'].iloc[i] >= df['jd'].iloc[i+1] for i in range(len(df)-1)]) # sorted JDs. assert set(df['observer']) == set(['DERA', 'SX']) data_fullpath = os.path.join(TEST_DATA_DIRECTORY, "ST_Tri_192.csv") df192 = pd.read_csv(data_fullpath, index_col=0) assert len(df) == len(df192) assert all([df['date_string'].iloc[irow] == df192['date_string'].iloc[irow] for irow in range(len(df))]) # data_fullpath = os.path.join(TEST_DATA_DIRECTORY, "ST_Tri_192.csv") # df.to_csv(data_fullpath) # # Test .most_recent_jd_mag(): # filter_code = "V" # filter_bools = df['filter'].str.lower() == filter_code.lower() # df2 = df[filter_bools] # jd_target = df2['jd'].max() # jd_bools = df2['jd'] == jd_target # df3 = df2[jd_bools] # assert obj.most_recent_jd_mag(filter='V')[0] == jd_target # assert obj.most_recent_jd_mag(filter='V')[1] == df3['mag'].iloc[0] # # Test .days_gap_jd(): # an_date_string = "20160910" # site_string = "BDO_Kansas" # an = Astronight(an_date_string, site_string) # days_target = an.local_middark_jd - jd_target # from prev test section. # assert obj.days_gap_jd(an.local_middark_jd, filter='V') == days_target # # # Test .days_gap_an(): # jd_an = an.local_middark_jd # assert obj.days_gap_an(an, filter='V') == jd_an - obj.most_recent_jd_mag(filter='V')[0]
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
36,623
edose/photrix
refs/heads/master
/photrix/fov.py
import os import sys import pandas as pd import json from collections import Counter from datetime import datetime, timezone from .util import * from .web import get_aavso_vsp_chart __author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque" FOV_DIRECTORY = "C:/Dev/Photometry/FOV/" CHART_DIRECTORY = 'C:/Dev/Photometry/Chart' DEFAULT_PUNCHES_TEXT_PATH = 'C:/Dev/Photometry/punches.txt' VALID_FOV_OBSERVING_STYLES = ["Standard", "Stare", "Monitor", "LPV"] CURRENT_SCHEMA_VERSION = "1.5" LPV_MAG_SINE_FRACTION = 0.5 VR_FRACTION_OF_VI = 0.5 class Fov: """ Object: holds info for one Field Of View (generally maps to one AAVSO sequence or chart). For Schema 1.5 of April 2017 (first version defined initially in python/photrix). Observation types must be one of: "Stare", "Monitor", "LPV", or "Standard" Usage: fov = FOV("ST Tri") or fov = FOV("ST Tri", "C:/Dev/Photometry/FOV1_5/") """ def __init__(self, fov_name, fov_directory=FOV_DIRECTORY, warn_on_no_fov_file=True): fov_fullpath = os.path.join(fov_directory, fov_name + ".txt") if os.path.exists(fov_fullpath) and os.path.isfile(fov_fullpath): with open(fov_fullpath) as fov_file: lines = fov_file.readlines() self.is_valid = True # conditional on parsing in rest of __init__() else: # TODO: replace error print stmts by a new field "reason_invalid", # TODO: and a new signature parameter 'print_errors=False.' if warn_on_no_fov_file: print('>>>>> FOV file \'' + fov_fullpath + '\' not found. FOV object invalid.') self.is_valid = False return lines = [line.split(";")[0] for line in lines] # remove all comments. lines = [line.strip() for line in lines] # remove leading and trailing whitespace. # ---------- Header section (all directives are required). self.fov_name = Fov._directive_value(lines, "#FOV_NAME") if self.fov_name != fov_name: print(fov_name + ': Fov name doesn\'t match file name. Fov object invalid.') self.is_valid = False return self.format_version = Fov._directive_value(lines, '#FORMAT_VERSION') if self.format_version != CURRENT_SCHEMA_VERSION: print(fov_name + ': Fov Version Error. Fov object invalid.') self.is_valid = False return ra_str, dec_str = Fov._directive_words(lines, "#CENTER")[:2] self.ra = ra_as_degrees(ra_str) self.dec = dec_as_degrees(dec_str) self.chart = Fov._directive_value(lines, "#CHART") self.fov_date = Fov._directive_words(lines, "#DATE")[0] # ---------- Main-target section. self.main_target = Fov._directive_value(lines, "#MAIN_TARGET") self.target_type = Fov._directive_value(lines, "#TARGET_TYPE") self.motive = Fov._directive_value(lines, '#MOTIVE', default_value='') self.acp_comments = Fov._directive_value(lines, "#ACP_COMMENTS", default_value='') words = Fov._directive_words(lines, "#PERIOD") if words is not None: self.period = float(words[0]) else: self.period = None # As of schema v 1.4, require 2 or 3 values for JD, Mag_V, Color_VI (lengths must match). self.JD_bright, self.JD_faint, self.JD_second = (None, None, None) # default words = Fov._directive_words(lines, "#JD") if words is not None: self.JD_bright = float(words[0]) self.JD_faint = float(words[1]) if len(words) >= 3: self.JD_second = float(words[2]) self.mag_V_bright, self.mag_V_faint, self.mag_V_second = (None, None, None) # default words = Fov._directive_words(lines, "#MAG_V") if words is not None: self.mag_V_bright = float(words[0]) self.mag_V_faint = float(words[1]) if len(words) >= 3: self.mag_V_second = float(words[2]) self.color_VI_bright, self.color_VI_faint, \ self.color_VI_second = (None, None, None) # default words = Fov._directive_words(lines, "#COLOR_VI") if words is not None: self.color_VI_bright = float(words[0]) self.color_VI_faint = float(words[1]) if len(words) >= 3: self.color_VI_second = float(words[2]) # ---------- Observing section. obs_style_words = Fov._directive_words(lines, "#OBSERVING_STYLE") obs_style = obs_style_words[0] obs_values = obs_style_words[1:] if obs_style not in VALID_FOV_OBSERVING_STYLES: print('FOV \'' + self.fov_name + ': obs_style +' + obs_style + '\' is not a valid Observing Style. Fov object not valid.') self.is_valid = False self.observing_style = obs_style self.alert = None # default self.observing_list = [] for val in obs_values: items = val.split("=") tag = items[0] # Handle non-filter entries on #OBSERVING_STYLE line. if tag == "ALERT" and len(items) >= 2: self.alert = float(items[1]) continue # (here, insert any additional future non-filter values like "ALERT=n") # Handle filter entries on #OBSERVING_STYLE line. this_filter, this_mag, this_count = None, None, None if len(items) == 1: # cases without specified magnitude bits = items[0].split("(") if len(bits) == 1: # case "V" for LPVs, one exposure this_filter = bits[0] this_mag = None this_count = 1 elif len(bits) == 2: # case "V(2) for LPVs, 2 exposures, e.g. this_filter = bits[0] this_mag = None this_count = int(bits[1].replace(")", "")) elif len(items) == 2: # cases with specified magnitude bits = items[1].split("(") this_filter = items[0] if len(bits) == 1: # case "V=13.2" for monitors etc, one exposure this_mag = float(bits[0]) this_count = 1 elif len(bits) == 2: # case "V=13.2(2)" for stares this_mag = float(bits[0]) this_count = int(bits[1].replace(")", "")) self.observing_list.append((this_filter, this_mag, this_count)) if this_filter is None: raise FovError max_exp_value = Fov._directive_value(lines, "#MAX_EXPOSURE") if max_exp_value is not None: self.max_exposure = float(max_exp_value) if self.max_exposure <= 0: self.max_exposure = None else: self.max_exposure = None value = Fov._directive_value(lines, "#PRIORITY") if value is not None: self.priority = float(value) else: self.priority = None value = Fov._directive_value(lines, "#GAP_SCORE_DAYS") if value is not None: gap_score_words = value.split() if len(gap_score_words) >= 3: self.gap_score_days = [float(word) for word in gap_score_words[:3]] else: self.gap_score_days = [self.period * fraction for fraction in [0.01, 0.02, 0.05]] else: self.gap_score_days = None # ---------- AAVSO Sequence section. self.punches = Fov._get_punch_values(lines) self.aavso_stars = Fov._get_aavso_stars(lines) # ---------- Diagnostics and messages before finalizing object. if not self.observing_style.lower() == 'standard': star_is_check = [star.star_type == "check" for star in self.aavso_stars] if not any(star_is_check): print(">>>>> WARNING: FOV file ", self.fov_name, " seems not to be a Standard FOV, but has NO CHECK STAR.") # TODO: Add final diagnostics to set final fov.is_valid. @staticmethod def _directive_value(lines, directive_string, default_value=None): for line in lines: if line.upper().startswith(directive_string): return line[len(directive_string):].strip() return default_value # if directive absent. @staticmethod def _directive_words(lines, directive_string): value = Fov._directive_value(lines, directive_string, default_value=None) if value is None: return None return value.split() @staticmethod def _get_punch_values(lines): punch_values = [] for line in lines: if line.upper().startswith("#PUNCH"): value_string = line[len("#PUNCH"):].strip() star_id = (value_string.split(":")[0]) terms = (value_string.split(":")[1]).split() if len(terms) == 2: punch_item = (star_id.strip(), float(terms[0]), float(terms[1])) # tuple punch_values.append(punch_item) return punch_values # list of tuples @staticmethod def _get_aavso_stars(lines): aavso_stars = [] stars_line_found = False for line in lines: if line.upper().startswith("#STARS"): stars_line_found = True else: if stars_line_found: aavso_star = AavsoSequenceStar_WithMagErrors(line) # aavso_star = AavsoSequenceStar_MagsOnly(line) # legacy, from FOV Schema 1.4 if aavso_star.is_valid: aavso_stars.append(aavso_star) return aavso_stars # = list of AavsoSequenceStar_WithMagError objects, one per star. def calc_gap_score(self, days): if days < self.gap_score_days[0]: return 0 elif days < self.gap_score_days[1]: return (days - self.gap_score_days[0]) / \ (self.gap_score_days[1] - self.gap_score_days[0]) elif days < self.gap_score_days[2]: return 1 + (days - self.gap_score_days[1]) / \ (self.gap_score_days[2] - self.gap_score_days[1]) else: return 2 def calc_priority_score(self, days): return self.priority * self.calc_gap_score(days) def estimate_lpv_mags(self, jd): if self.period <= 0 or self.mag_V_bright >= self.mag_V_faint: return None jd_bright, jd_faint = self.JD_bright, self.JD_faint v_bright, v_faint = self.mag_V_bright, self.mag_V_faint color_bright, color_faint = self.color_VI_bright, self.color_VI_faint period = self.period # First, determine whether brightness is increasing or decreasing at jd. # phases must be in [0,1], where phase = 0 means max brightness. phase_jd = get_phase(jd, jd_bright, period) phase_faint = get_phase(jd_faint, jd_bright, period) if 0 <= phase_jd <= phase_faint: # brightness decreasing at jd time_fract = phase_jd / phase_faint v_start = v_bright color_start = color_bright v_change = v_faint - v_bright color_change = color_faint - color_bright elif phase_faint <= phase_jd <= 1: # brightness increasing at jd time_fract = (phase_jd - phase_faint) / (1 - phase_faint) v_start = v_faint color_start = color_faint v_change = v_bright - v_faint color_change = color_bright - color_faint else: return None # phase_jd must be outside [0,1] # Now, calculate linear and sine components and blend them (each filter). linear_mag_fract = time_fract sine_mag_fract = (1 + math.sin((time_fract-0.5)*math.pi)) / 2 mag_fract = LPV_MAG_SINE_FRACTION * sine_mag_fract + \ (1 - LPV_MAG_SINE_FRACTION) * linear_mag_fract # Render mag in each filter. mags = dict() mags['V'] = v_start + mag_fract * v_change mags['I'] = (v_start - color_start) + mag_fract * (v_change - color_change) mags['R'] = mags['V'] + VR_FRACTION_OF_VI * (mags['I'] - mags['V']) return mags def __str__(self): return "FOV '" + self.fov_name + "' with " + str(len(self.aavso_stars)) + " sequence stars." class AavsoSequenceStar_MagsOnly: """ Object: holds parsed mag info (NO mag error info) for one star of an AAVSO sequence. FOV 1.4 and earlier only. For FOV 1.5+, this is still needed to read (1.4-compatible) pre-FOVs to be merged with chart data (by insert_chart_data()). """ def __init__(self, star_string): words = star_string.strip().split("\t") # AAVSO sequence star lines have tab delimiters. words = [word.strip() for word in words] # strip whitespace from all items. self.star_id, self.star_type = ("", "") # invalid defaults. if len(words) >= 7: # Get raw data strings from line. self.star_id = words[0] self.ra = float(words[1]) # already in degrees RA self.dec = float(words[2]) # already in degrees Dec # Extract star type. star_type_char = words[4] star_type_dict = {"C": "comp", "H": "check", "T": "target"} if star_type_char in star_type_dict.keys(): self.star_type = star_type_dict[star_type_char] else: self.star_type = None # Extract magnitudes. (This code probably sub-optimal.) mag_string = words[3] self.magB, self.magV, self.magR, self.magI, self.magU = (None, None, None, None, None) if self.star_type != "target": mag_words = mag_string.split("|") mag_words = [mag_word.strip() for mag_word in mag_words] # strip whitespace for mag_word in mag_words: mag_split = mag_word.split("_") this_mag = float(mag_split[1]) if this_mag != 0: if mag_split[0] == "1": self.magB = this_mag if mag_split[0] == "2": self.magV = this_mag if mag_split[0] == "4": self.magR = this_mag if mag_split[0] == "8": self.magI = this_mag if mag_split[0] == "1024": self.magU = this_mag # Finally, validate this object, or not. self.is_valid = (len(self.star_id) >= 2) & (self.star_type is not None) def __str__(self): return "AAVSO Sequence Star '" + self.star_id + "', is_valid=" + str(self.is_valid) class AavsoSequenceStar_WithMagErrors: """ Object: holds parsed mag AND mag_error info for ONE star of an AAVSO sequence. For FOV 1.5+. """ def __init__(self, star_string): words = star_string.strip().split("\t") # AAVSO sequence star lines have tab delimiters. words = [word.strip() for word in words] # strip whitespace from all items. self.star_id, self.star_type = "", "" # invalid defaults. self.mags = {} # dict of filter:(mag,error); empty will suffice for target stars. if len(words) >= 4: # Get raw data strings from line: self.star_id = words[0] self.ra = float(words[1]) # already in degrees RA self.dec = float(words[2]) # already in degrees Dec if words[3].lower() in ['check', 'target', 'comp']: self.star_type = words[3].lower() # Get magnitudes and errors for check and comp stars: if self.star_type in ['check', 'comp']: mag_words = words[4].split(" ") mag_words = [mag_word.strip() for mag_word in mag_words] # strip whitespace mag_words = [mag_word for mag_word in mag_words if mag_word != ''] # delete empties for mag_word in mag_words: mag_split = mag_word.split("=") this_filter = mag_split[0] try: mag_error = mag_split[1].split("(") except IndexError: print('Error reading star string: \'' + star_string + '\'') exit(0) this_mag = float(mag_error[0]) this_error = float(mag_error[1].replace(")", "")) / 1000.0 # mMag to Mag if this_mag > 0.0: # mag missing from mags means no data available. self.mags[this_filter] = (this_mag, this_error) # Finally, validate this object, or not. NB: mag_error[0] is the magnitude. num_positive_mags = sum([mag_error[0] > 0.0 for mag_error in self.mags.values()]) self.is_valid = (len(self.star_id) >= 2) and \ (self.star_type is not None) and \ ((num_positive_mags >= 1) or (self.star_type == 'target')) def __str__(self): return "AAVSO Sequence Star '" + self.star_id + "', is_valid=" + str(self.is_valid) class FovError(Exception): pass class FovVersionError(Exception): pass def all_fov_names(fov_directory=FOV_DIRECTORY): """ Returns list of all FOV names (from filenames in FOV_DIRECTORY). """ fov_names = [fname[:-4] for fname in os.listdir(fov_directory) if (fname.endswith(".txt")) and (not fname.startswith("$"))] return fov_names def make_fov_dict(fov_directory=FOV_DIRECTORY, fov_names_selected=None): """ Returns dict of FOVs, as: FOV_name:FOV object. Usage: d = make_fov_dict() --> returns dict of *all* FOVs. Usage: d = make_fov_dict(fov_names_selected=name_list) --> returns selected FOVs. """ fov_all_names = all_fov_names(fov_directory) if fov_names_selected is None: fov_names = fov_all_names else: fov_names = list(set(fov_all_names) & set(fov_names_selected)) fov_dict = {fov_name: Fov(fov_name, fov_directory) for fov_name in fov_names} return fov_dict def print_fov_one_directive_line(fov_directory, all_names, directive, one_line_only=True): print("\n\nDirective = '" + directive + '":') for fov_name in all_names: fov_fullpath = os.path.join(fov_directory, fov_name + ".txt") with open(fov_fullpath) as fov_file: lines = fov_file.readlines() lines = [line.split(";")[0] for line in lines] # remove all comments lines = [line.strip() for line in lines] directive_lines = [line for line in lines if line.startswith(directive)] error_prefix = "ERROR >>>>>" spaces = len(error_prefix) * ' ' if one_line_only is True: if len(directive_lines) == 0: print(error_prefix, fov_name, "has NO directive lines for", directive) if len(directive_lines) > 1: for line in directive_lines: print(error_prefix, fov_name, "MULTIPLE", line) if len(directive_lines) == 1: print(spaces, fov_name, directive_lines[0]) else: for line in directive_lines: print(spaces, fov_name, line) def fov_diag(fov_directory=FOV_DIRECTORY): """ Comprehensive diagnostics for FOV files. This code is for FOV format 1.4. :param fov_directory: all .txt files in this directory_path will be checked [string] :return: Number of errors found [integer] """ # Collect all FOVs into a dictionary: fd = make_fov_dict(fov_directory=fov_directory) fov_names = list(fd.keys()) fov_names.sort() print(len(fov_names), ' FOV files found to test in directory_path \'', fov_directory, "\'.") # Make empty error dictionary: error_dict = [] # key=fov_name, value=list of error messages for name in fov_names: error_dict[name] = [] # First, verify format versions. print("FOV format version required to be : \'" + CURRENT_SCHEMA_VERSION + '\'') for name in fov_names: fov = fd[name] if fov.format_version != CURRENT_SCHEMA_VERSION: error_dict[name].append('Format version \'' + fov.format_version + '\'') # Verify reasonable JD values: jd_min = 2451544.5 # January 1 2000 jd_max = jd_from_datetime_utc(datetime.now(timezone.utc)) # time at this check print("JD limits applied: " + '{0:.3f}'.format(jd_min) + ' to ' + '{0:.3f}'.format(jd_max)) for name in fov_names: fov = fd[name] if fov.target_type.lower() != "standard": if not jd_min <= fov.JD_bright <= jd_max: error_dict[name].append("JD_bright '" + '{0:.3f}'.format(fov.JD_bright) + "' unreasonable.") if not jd_min <= fov.JD_faint <= jd_max: error_dict[name].append(": JD_faint '" + '{0:.3f}'.format(fov.JD_faint) + "' unreasonable.") if fov.JD_second is not None: if not jd_min <= fov.JD_second <= jd_max: error_dict[name].append(": JD_second '" + '{0:.3f}'.format(fov.JD_second) + "' unreasonable.") # Verify reasonable mag and color values: mag_bright = 5.0 mag_faint = 18.0 color_min = -0.2 color_max = +7.5 print("Mag limits: " + '{0:.3f}'.format(mag_bright) + ' to ' + '{0:.3f}'.format(mag_faint)) print("Color limits: " + '{0:.3f}'.format(color_min) + ' to ' + '{0:.3f}'.format(color_max)) for name in fov_names: fov = fd[name] if fov.target_type.lower() != "standard": if not mag_bright <= fov.mag_V_bright <= mag_faint: error_dict[name].append("mag_V_bright '" + '{0:.3f}'.format(fov.mag_V_bright) + "' unreasonable.") if not mag_bright <= fov.mag_V_faint <= mag_faint: error_dict[name].append("mag_V_faint '" + '{0:.3f}'.format(fov.mag_V_faint) + "' unreasonable.") if fov.mag_V_second is not None: if not mag_bright <= fov.mag_V_second <= mag_faint: error_dict[name].append("mag_V_second '" + '{0:.3f}'.format(fov.mag_V_second) + "' seems unreasonable.") if not color_min <= fov.color_VI_bright <= color_max: error_dict[name].append("color_VI_bright '" + '{0:.3f}'.format(fov.color_VI_bright) + "' seems unreasonable.") if not color_min <= fov.color_VI_faint <= color_max: error_dict[name].append("color_VI_faint '" + '{0:.3f}'.format(fov.color_VI_faint) + "' seems unreasonable.") if fov.color_VI_second is not None: if not color_min <= fov.color_VI_second <= color_max: error_dict[name].append("color_VI_second '" + '{0:.3f}'.format(fov.color_VI_second) + "' seems unreasonable.") # Ensure main target is in star list, as a target (skip standard FOVs): print(" Ensure main_target in star list, as a target [skip standard FOVs]") for name in fov_names: fov = fd[name] if fov.target_type.lower() != "standard": main_target_star_type = [star.star_type for star in fov.aavso_stars if star.star_id.lower() == fov.main_target.lower()] if len(main_target_star_type) <= 0: error_dict[name].append("main_target '" + fov.main_target + "' absent from star list.") if len(main_target_star_type) > 1: error_dict[name].append("main_target '" + fov.main_target + "' in star list more than once.") if len(main_target_star_type) == 1: if main_target_star_type[0] != "target": error_dict[name].append("main_target '" + fov.main_target + "' is in star list once but not as type 'target'.") # Ensure Observing styles are valid: print("Ensure Observing styles are valid.") valid_obs_styles_lower = \ [valid_obs_style.lower() for valid_obs_style in VALID_FOV_OBSERVING_STYLES] for name in fov_names: fov = fd[name] if fov.observing_style.lower() not in valid_obs_styles_lower: error_dict[name].append("invalid obs_style \'" + fov.observing_style + "\'") # Ensure JD, mag, color are consistent (skip standard FOVs): print("\n", 10*"=", " Ensure mutual consistency of: JD, mag, color (skip standard FOVs)") for name in fov_names: fov = fd[name] if fov.target_type.lower() != "standard": # Ensure all present with at least 2 values: if None in [fov.JD_bright, fov.JD_faint]: error_dict[name].append('missing JD') if None in [fov.mag_V_bright, fov.mag_V_faint]: error_dict[name].append('missing mag.') if None in [fov.color_VI_bright, fov.color_VI_faint]: error_dict[name].append('missing color.') # Ensure secondary min values are either all present or all absent: all_present = None not in [fov.JD_second, fov.mag_V_second, fov.color_VI_second] all_absent = fov.JD_second is None and \ fov.mag_V_second is None and \ fov.color_VI_second is None if not (all_present or all_absent): error_dict[name].append('mismatched JD, mag, color (secondary min?).') # Alert on out-of-spec phases, or non-positive periods (Eclipser-like only): print("Alert on out-of-spec phases & non-positive periods (Eclipser-like only)") for name in fov_names: fov = fd[name] if fov.target_type.lower() in ['eclipser', 'exoplanet']: if fov.period <= 0: error_dict[name].append("PERIOD=" + '{0:8.3f}'.format(fov.period)) else: # Verify that max JD is reasonable. phase_max = ((fov.JD_bright - fov.JD_faint) / fov.period) % 1.0 if abs(phase_max-0.25) > 0.05 or abs(phase_max-0.75) > 0.05: error_dict[name].append('Max phase of ' + '{0:.3f}'.format(phase_max) + ' unreasonable.') if fov.JD_second is not None: phase_second = ((fov.JD_second - fov.JD_faint) / fov.period) % 1.0 if abs(phase_second-0.5) > 0.02: error_dict[name].append('Secondary phase of ' + '{0:.3f}'.format(phase_second) + ' unreasonable.') # Finally, write out all errors, by fov name: num_errors = 0 for name in fov_names: num_errors += len(error_dict[name]) print(str(num_errors) + ' errors found.') for name in fov_names: fov_errors = error_dict[name] if len(fov_errors) >= 1: print('\n' + name + ':') for error in fov_errors: print(4*'' + error) def fovs_by_ra(fov_directory=FOV_DIRECTORY): fov_dict = make_fov_dict(fov_directory=fov_directory) fov_names = list(fov_dict.keys()) df_fov = pd.DataFrame({'fov_name': fov_names}) # 1 column ('fov_name') only. df_fov['obs_style'] = [fov_dict[name].observing_style for name in fov_names] df_fov['ra_hours'] = [fov_dict[name].ra/15.0 for name in fov_names] dict_category = {'Stare':['Stare'], 'LPV/Monitor':['LPV', 'Monitor']} print('\nFOV Counts as of ' + '{:%Y-%m-%d %H:%M UTC}'.format(datetime.now(timezone.utc))) for category, styles in dict_category.items(): styles_lower = [style.lower() for style in styles] matches_style = [s.lower() in styles_lower for s in df_fov['obs_style']] df_style = df_fov[matches_style] ra_list = df_style['ra_hours'] hist = 24*[0] for ra in ra_list: int_ra = int(min(23.0, floor(ra))) hist[int_ra] += 1 print('\n\nObs category \'' + category + '\' (' + str(len(ra_list)) + ' FOVs):') for i_ra in range(24): print(' ' + '{:2d}'.format(i_ra) + ': ' + '{:4d}'.format(hist[i_ra])) def delete_directive(fov_directory=FOV_DIRECTORY, out_fov_directory=None, directive_to_remove=None): """ Not Tested. :param fov_directory: :param out_fov_directory: :param directive_to_remove: :return: [None] """ if out_fov_directory is None or directive_to_remove is None: print('\n\nPlease give a new FOV directory_path and a directive to remove.\n\n') return names = all_fov_names(fov_directory) print(str(len(names)) + ' FOVs to adjust.') os.makedirs(out_fov_directory, exist_ok=True) # make output directory_path if doesn't exist. for name in names: in_fullpath = os.path.join(fov_directory, name + ".txt") with open(in_fullpath) as fov_file: lines = fov_file.readlines() out_lines = [] for line in lines: if not line.startswith(directive_to_remove): out_lines.append(line) out_fullpath = os.path.join(out_fov_directory, name + ".txt") with open(out_fullpath, 'w') as out_file: out_file.writelines(out_lines) print('Done.') def move_directive(fov_directory=FOV_DIRECTORY, out_fov_directory=None, directive_to_move=None, directive_before_new_position=None): """ :param fov_directory: :param out_fov_directory: :param directive_to_move: :param directive_before_new_position: :return: [None] """ if out_fov_directory is None or directive_to_move is None or \ directive_before_new_position is None: print('\n\nPlease give a new FOV directory_path. directive to move, and ' 'directive to insert after.\n\n') return names = all_fov_names(fov_directory) print(str(len(names)) + ' FOVs to adjust.') os.makedirs(out_fov_directory, exist_ok=True) # make output directory_path if doesn't exist. for name in names: in_fullpath = os.path.join(fov_directory, name + ".txt") with open(in_fullpath) as fov_file: lines = fov_file.readlines() # Extract line to move: for line in lines: if line.startswith(directive_to_move): line_to_move = line break lines.remove(line_to_move) # Find new location and do insertion. new_lines = [] for line in lines: new_lines.append(line) if line.startswith(directive_before_new_position): new_lines.append(line_to_move) out_fullpath = os.path.join(out_fov_directory, name + ".txt") with open(out_fullpath, 'w') as out_file: out_file.writelines(new_lines) print('Done.') def change_directive_value(fov_directory=FOV_DIRECTORY, out_fov_directory=None, directive_to_change=None, new_value=None, new_comment=None): """ May be used to update FORMAT_VERSION, especially. :param fov_directory: source directory_path [string] :param out_fov_directory: output directory_path, may be same as source directory_path [string] :param directive_to_change: e.g., '#FORMAT_VERSION' :param new_value: e.g., '1.5' [string] :param new_comment: [string, or None] :return: [nothing] """ if fov_directory is None or directive_to_change is None or \ new_value is None: print('\n\nPlease give a new FOV directory_path. directive to change, and ' 'new directive value.\n\n') return names = all_fov_names(fov_directory) os.makedirs(out_fov_directory, exist_ok=True) # make output directory_path if doesn't exist. for name in names: new_lines = [] in_fullpath = os.path.join(fov_directory, name + ".txt") with open(in_fullpath) as fov_file: lines = fov_file.readlines() for line in lines: if line.startswith(directive_to_change): if ';' in line: halves = line.split(';', maxsplit=1) value = (halves[0])[len(directive_to_change):] comment = halves[1] else: value = line[len(directive_to_change):] comment = None num_leading_spaces = len(value) - len(value.lstrip()) if new_comment is not None: comment = new_comment new_line = directive_to_change + (num_leading_spaces * ' ') +\ new_value + ' ;' + comment + '\n' new_lines.append(new_line) else: new_lines.append(line) out_fullpath = os.path.join(out_fov_directory, name + ".txt") with open(out_fullpath, 'w') as out_file: out_file.writelines(new_lines) # print(name) print('Done.') def insert_chart_data(fov_name, fov_directory=FOV_DIRECTORY): """ Takes one pre-FOV file already having VPhot sequence mags, adds VSP chart data (mag errors) to render a valid FOV 1.5+ file which overwrites the pre-FOV file. Gets chart ID from FOV #CHART directive. Looks in CHART_DIRECTORY for chart JSON file, or failing that downloads it from AAVSO and saves it. Handles only filters U,B,V,R,I as of 7/1/2017. :param fov_name: one FOV name [string] :param fov_directory: input and output directory_path (same) [string] :return: list of warning lines [list of strings] """ # global mag_error_string warning_lines = [] # Load FOV text file with VPhot data only, divide into top lines & #STARS lists: fov_fullpath = os.path.join(fov_directory, fov_name + ".txt") with open(fov_fullpath) as fov_file: lines = fov_file.readlines() top_lines = [] stars_lines = [] above_stars_directive = True for line in lines: if above_stars_directive: top_lines.append(line) # top_lines includes the #STARS line. if line.startswith("#STARS"): above_stars_directive = False else: stars_lines.append(line) if len(stars_lines) <= 0: error_line = "Either no #STARS line or no star lines after #STARS." print(error_line) warning_lines.append(error_line) return warning_lines # Read JSON chart file, or download from VSP (JSON format) and cache it: os.makedirs(CHART_DIRECTORY, exist_ok=True) # create directory_path if doesn't already exist. chart_id = None for line in top_lines: if line.startswith("#CHART"): chart_id = line.split(";")[0][6:].strip() break if chart_id is None: error_line = "No #CHART directive in fov " + fov_name print(error_line) warning_lines.append(error_line) return warning_lines chart_fullpath = os.path.join(CHART_DIRECTORY, chart_id + ".txt") if os.path.exists(chart_fullpath): with open(chart_fullpath, 'r') as chart_file: chart_json_text = chart_file.read() else: chart_json_text = get_aavso_vsp_chart(chart_id) print('Downloading chart \'' + chart_id + '\' for FOV \'' + fov_name + '\'') if chart_json_text == '': error_line = '>>>>> No chart \'' + chart_id +\ '\' in AAVSO VSP (or chart directory_path. No change made to fov \'' + \ fov_name + '\'.' print(error_line) warning_lines.append(error_line) return warning_lines with open(chart_fullpath, 'w') as fov_file: fov_file.write(chart_json_text) # cache json json_obj = json.loads(chart_json_text) chart_stars = json_obj['photometry'] # Match a chart star to each FOV star, make a new text line with both mags and errors: new_star_lines = [] for line in stars_lines: if line.split(";")[0].strip() == '': new_star_lines.append(line) # a comment line: copy as is. continue fov_star = AavsoSequenceStar_MagsOnly(line.strip()) if fov_star.star_type.lower() in ['comp', 'check']: star_prefix = int(fov_star.star_id.split("_")[0]) # int from before any "_". star_radec = RaDec(fov_star.ra, fov_star.dec) filter_dict = dict() filter_dict['U'] = (fov_star.magU, 0.0) # mag, default mag_error filter_dict['B'] = (fov_star.magB, 0.0) filter_dict['V'] = (fov_star.magV, 0.0) filter_dict['R'] = (fov_star.magR, 0.0) filter_dict['I'] = (fov_star.magI, 0.0) # Redefine mag=None to mag=0.0 (FOV 1.5+ convention). for filter_name in filter_dict.keys(): mag, error = filter_dict[filter_name] filter_dict[filter_name] = (mag if mag is not None else 0.0, error) # Find chart star entry (if any) that matches FOV star line. chart_star_found = False for chart_star in chart_stars: if chart_star['label'] == star_prefix: chart_star_radec = RaDec(chart_star['ra'], chart_star['dec']) if star_radec.degrees_from(chart_star_radec) < 20.0 / 3600.0: # Here, we've found the chart star matching the FOV star. chart_star_found = True # Take union of FOV filters & chart bands for this star, parse mags, errors: # Then update with chart mag errors, adding a dict entry if absent: for band in chart_star['bands']: # which is a list of dicts band_translate = {'Ic': 'I', 'Rc': 'R'} # chart -> FOV band names fov_filter_name = band_translate.get(band['band'], band['band']) fov_filter_data = filter_dict.get(fov_filter_name, None) if fov_filter_data is not None: new_mag = filter_dict[fov_filter_name][0] else: new_mag = band['mag'] # Chart may have either null or 0 to signify 'no error data': new_error = band['error'] if band['error'] is not None else 0.0 filter_dict[fov_filter_name] = (new_mag, new_error) break # go to next chart star. if not chart_star_found: warning_lines.append('FOV ' + fov_name + ', star ' + fov_star.star_id + ': no matching star found in chart ' + chart_id + ' w/ maglimit ' + str(json_obj['maglimit'])) # Mags & errors for this star are ready; add text to this output line: mag_error_list = [] for filter_name in filter_dict.keys(): mag, error = filter_dict[filter_name] if error is None: # TODO: add error handling dummy = 0 if mag == 0 and error == 0: mag_error_list.append(filter_name + '=0(0)') # abbreviated else: mag_error_list.append(filter_name + '={0:.3f}'.format(mag) + '({0:d})'.format(round(1000 * error))) mag_error_string = ' '.join(mag_error_list) else: mag_error_string = '' # for a target star. new_star_text_list = [str(fov_star.star_id), '{0:9.5f}'.format(fov_star.ra), '{0:9.5f}'.format(fov_star.dec), fov_star.star_type, mag_error_string] new_star_line = '\t'.join(new_star_text_list) + '\n' new_star_lines.append(new_star_line) # Build list of all output lines, then write to new FOV file: new_lines = top_lines.copy() new_lines.extend(new_star_lines) with open(fov_fullpath, 'w') as fov_file: fov_file.writelines(new_lines) return warning_lines def get_chart_error(bands, chart_filter_name): """ Helper fn for insert_chart_data(). :param bands: list of band dicts, as extracted from chart json. :param chart_filter_name: chart-style filter name, as "B" or "Ic" (not "I"). :return: error if found, zero otherwise [float] """ chart_error = 0 # default if entry for filter not found. for i in range(len(bands)): if bands[i]['band'] == chart_filter_name: chart_error = bands[i]['error'] if bands[i]['error'] is not None else 0 break return chart_error def add_punches_from_text(punches_txt_path=DEFAULT_PUNCHES_TEXT_PATH, delim='\t'): # TESTED OK (exhaustive, manual) September 26, 2017. """ User fn to add all new punches from punches.txt into appropriate FOV. DOES CHANGE FOV FILES. :param punches_txt_path: text file containing punch information, gen from Excel file [string] :param delim: delimiter between fields in punches.txt, prob always tab character [string] :return: [null] """ FOV_STAR_TOLERANCE = 5 # in arcseconds MIN_PUNCH_DIST_FROM_TARGET = 2 # in arcseconds MAX_PUNCH_DIST_FROM_TARGET = 20 # in arcseconds df_in = pd.read_table('C:/Dev/Photometry/punches.txt', sep='\t', header=0, dtype='str', comment=';')[['FOV', 'Target', 'RA', 'Dec']] df_in = df_in[pd.notnull(df_in['RA'])] # remove blank lines (those without a RA entry). punch_list = [] for i in range(len(df_in)): line_fov = df_in['FOV'].iloc[i] if pd.isnull(line_fov): line_fov = '' line_target = df_in['Target'].iloc[i] if pd.isnull(line_target): line_target = '' line_ra = df_in['RA'].iloc[i] line_dec = df_in['Dec'].iloc[i] if len(line_fov) >= 1: # This is a target line. Set target variables for punch lines to follow. target_fov, target, target_ra, target_dec = \ line_fov, line_target, ra_as_degrees(line_ra), dec_as_degrees(line_dec) continue # This target line is now parsed. Move on. if line_fov == '' and line_target == '' and len(line_ra) >= 1 and len(line_dec) >= 1: # This is a punch line. Add row to df_punches. punch_ra, punch_dec = ra_as_degrees(line_ra), dec_as_degrees(line_dec) distance = 3600 * RaDec(punch_ra, punch_dec).degrees_from(RaDec(target_ra, target_dec)) d_north = round(3600 * (punch_dec - target_dec), 2) d_east = round(3600 * (punch_ra - target_ra) * math.cos((math.pi / 180.0) * target_dec), 2) this_punch = {'i': i, 'FOV': target_fov, 'Target': target, 'Target_RA': target_ra, 'Target_Dec': target_dec, 'Punch_RA': punch_ra, 'Punch_Dec': punch_dec, 'D_North': d_north, 'D_East': d_east, 'Distance': distance} punch_list.append(this_punch) continue # This punch line has been added to dict that will construct df_punches. print('>>>>> Cannot parse input line ' + str(i) + ': ' + ', '.join([line_fov, line_target, line_ra, line_dec])) sys.exit() df_punches = pd.DataFrame(punch_list) # --> BEFORE writing into FOV files, verify here that the master data frame df_punches: # (1) each FOV entry actually has a valid FOV file associated with it, # (2) each punch target is present in the FOV file, and # (3) each punch target RA and Dec is close to that in the FOV file. all_ok = True # default value to be falsified by any failure to verify. fov_list = df_punches['FOV'].unique() for fov_name in fov_list: print(fov_name) this_fov = Fov(fov_name) if not this_fov.is_valid: print('>>>>> FOV file ' + fov_name + ' does not exist.') all_ok = False continue fov_stars = this_fov.aavso_stars df_this_fov = df_punches.loc[df_punches['FOV'] == fov_name, :] for i_target in range(len(df_this_fov)): target = df_this_fov['Target'].iloc[i_target] target_ra = df_this_fov['Target_RA'].iloc[i_target] target_dec = df_this_fov['Target_Dec'].iloc[i_target] fov_star_ids = [star.star_id for star in this_fov.aavso_stars] # Verify this target in present in this FOV: if target not in fov_star_ids: print('>>>>> Target ' + target + ' is missing from FOV file ' + fov_name) all_ok = False continue # Verify target's RA,Dec position in punch file is very close to that in FOV file: target_index = fov_star_ids.index(target) fov_target_star = this_fov.aavso_stars[target_index] distance = 3600.0 * RaDec(target_ra, target_dec)\ .degrees_from(RaDec(fov_target_star.ra, fov_target_star.dec)) if distance > FOV_STAR_TOLERANCE: print('>>>>> Target ' + target + ' in FOV ' + fov_name + 'has punch list RA,Dec=(' + target_ra + ',' + target_dec + ') which does not match FOV file\'s RA,Dec=(' + fov_target_star.ra + ',' + fov_target_star.dec + ').') all_ok = False continue # Print any warnings: df_too_close = df_punches.loc[df_punches['Distance'] < MIN_PUNCH_DIST_FROM_TARGET, :] num_too_close = len(df_too_close) if num_too_close >= 1: print('\n>>>>> WARNING: ' + str(len(df_too_close)) + 'requested punches have punches too close to targets:') for i in df_too_close.index: print(' '.join([df_too_close.loc[i, 'FOV'], df_too_close.loc[i, 'Target'], df_too_close.loc[i, 'Distance']])) # Print summary: print('\n' + str(len(df_punches)) + ' punches read in from text file.') print('Largest Target-Punch distance = ' + '{:.2f}'.format(max(df_punches['Distance'])) + ' arcsec.') # If user approves it, write all #PUNCH lines into the FOV files: recommend_proceed = all_ok \ and num_too_close == 0 \ and max(df_punches['Distance']) <= MAX_PUNCH_DIST_FROM_TARGET answer = input('Proceed to write punches into FOV files? Recommend ' + ('Yes' if recommend_proceed else 'NO!!!') + ' (y/n):') if answer.strip().lower()[0] == 'y': for i in df_punches.index: radec_target = RaDec(df_punches.loc[i, 'Target_RA'], df_punches.loc[i, 'Target_Dec']) radec_punch = RaDec(df_punches.loc[i, 'Punch_RA'], df_punches.loc[i, 'Punch_Dec']) add_one_punch(df_punches.loc[i, 'FOV'], df_punches.loc[i, 'Target'], radec_target, radec_punch, False) print('All ' + str(len(df_punches)) + ' punches written into ' + str(len(fov_list)) + ' FOV files.') else: print('No change to FOV files.') # return df_punches def add_one_punch(fov_name, star_id, star_ra_dec, punch_ra_dec, user_must_confirm=False): """ Service fn that inserts one punch line into one FOV file (usually called by add_punches()). :param fov_name: (string) :param star_id: (string) :param star_ra_dec: sky location of star as found during user det of punch (RaDec object). :param punch_ra_dec: punch location of star as found during user det of punch (RaDec object). :param user_must_confirm: if True, fn stops to ask user to confirm writing into FOV file. :return: True if insertion happened reasonably, else False. """ if fov_name is None or star_id is None: return False star_id = star_id.strip() fov_fullpath = os.path.join(FOV_DIRECTORY, fov_name + '.txt') with open(fov_fullpath) as fov_file: lines = fov_file.readlines() # Find line beginning with '#STARS': for i, line in enumerate(lines): if line.upper().startswith('#STARS'): i_stars = i break # Verify that star_id is in FOV's list of stars: fov = Fov(fov_name) stars_list = fov.aavso_stars star_id_list = [s.star_id for s in stars_list] if star_id not in star_id_list: return False # Build text line to insert into FOV file: d_north = 3600.0 * (punch_ra_dec.dec - star_ra_dec.dec) # arcseconds d_east = 3600.0 * (punch_ra_dec.ra - star_ra_dec.ra) * \ math.cos((math.pi / 180.0) * star_ra_dec.dec) new_punch_line = '#PUNCH ' + star_id + ' : ' +\ '{:.2f}'.format(d_north).rjust(8) + '{:.2f}'.format(d_east).rjust(8) + \ ' ; dNorth dEast of punch center vs target star, in arcsec' + '\n' this_print_line = fov_name + ' ::: ' + '#PUNCH ' + star_id + ' : ' +\ '{:.2f}'.format(d_north).rjust(8) + '{:.2f}'.format(d_east).rjust(8) # Confirm if required, then write in to FOV file: if user_must_confirm: answer = input('FOV ' + fov_name + ':\n' + new_punch_line) ok_to_write = (answer.strip().lower()[0] == 'y') else: ok_to_write = True if ok_to_write: lines.insert(i_stars, new_punch_line) # insert just before #STARS line, then exit. with open(fov_fullpath, 'w') as fov_file: fov_file.writelines(lines) print(this_print_line) def change_directive_value2(fov_directory=FOV_DIRECTORY, out_fov_directory=None): """ Updates Mira FOVs' longest GAP_SCORE_DAYS. :param fov_directory: source directory_path [string] :param out_fov_directory: output directory_path, may be same as source directory_path [string] :return: [nothing] """ NEW_LONGEST_GSD = 4 # probably was 5% of period if out_fov_directory is None: print('\n\nPlease give a new output FOV directory_path.\n\n') return names = all_fov_names(fov_directory) os.makedirs(out_fov_directory, exist_ok=True) # make output directory_path if doesn't exist. for name in names: fov = Fov(fov_name=name) if fov.target_type.lower() == 'mira': new_lines = [] in_fullpath = os.path.join(fov_directory, name + ".txt") with open(in_fullpath) as fov_file: lines = fov_file.readlines() for line in lines: if line.startswith('#GAP_SCORE_DAYS'): if ';' in line: halves = line.split(';', maxsplit=1) comment = halves[1] else: comment = '' new_gsd = fov.gap_score_days.copy() new_gsd[2] = min(new_gsd[2], (NEW_LONGEST_GSD / 100.0) * fov.period) gsd_strings = [] for i in range(3): gsd = '{:.2f}'.format(new_gsd[i]) # if gsd.endswith('.00'): # gsd = gsd[:-3] gsd_strings.append(gsd) new_line = '#GAP_SCORE_DAYS'.ljust(20) + ' '.join(gsd_strings) +\ ' ;' + comment new_lines.append(new_line) print(name.ljust(30) + ': ' + str(fov.gap_score_days[2]) + ' ' + gsd_strings[2]) else: new_lines.append(line) out_fullpath = os.path.join(out_fov_directory, name + ".txt") with open(out_fullpath, 'w') as out_file: out_file.writelines(new_lines) print('Done.')
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
36,624
edose/photrix
refs/heads/master
/photrix/acps.py
from .user import Instrument __author__ = "Eric Dose :: New Mexico Mira Project, Albuquerque" # package photrix.ACPS: generate RTML for ACP Scheduler # Typical usage: # # from photrix.acps import * # project = ACPS_project("AN20160630-BOREA") # for the whole night on one instrument. # for plan_source in plan_sources: # plan = project.make_plan(id) # plan.horizon = 30 # override class defaults (optional) # plan.priority = 4 # " # obs = ACPS_observation(id, RA_deg, dec_deg) # obs.add_imageset(name1, count1, exposure1, filter1) # obs.add_imageset(name2, count2, exposure2, filter2) ... # plan.add_observation(obs) # project.add_plan(plan) # rtml_text = project.rtml() class ACPS_project: """ """ def __init__(self, project_name=None, instrument_name="Borea", user="Eric", email="astro@ericdose.com", organization="Bois d'Arc Observatory, Kansas"): if project_name is None: raise ValueError('project name may not be null.') if len(project_name) <= 0: raise ValueError('project name may not be zero-length.') self.project_name = project_name self.instrument = Instrument(instrument_name) self.telescope = instrument_name self.user = user self.email = email self.organization = organization self.plan_list = [] def make_plan(self, plan_id): plan = ACPS_plan(plan_id, self.project_name, self.telescope, self.user) plan.horizon = self.instrument.min_altitude plan.moon_distance = self.instrument.min_distance_full_moon plan.min_exposure = self.instrument.camera["shortest_exposure"] return plan def add_plan(self, new_acps_plan): if isinstance(new_acps_plan, ACPS_plan): self.plan_list.append(new_acps_plan) def rtml(self): rtml_text = '<?xml version="1.0"?>\n' + \ '<RTML xmlns:xs="http://www.w3.org/2001/XMLSchema-instance" version="2.3">\n' + \ xml_open(1, "Contact") + \ xml_line(2, "User", self.user) + \ xml_line(2, "Email", self.email) + \ xml_line(2, "Organization", self.organization) + \ xml_close(1, "Contact") for plan in self.plan_list: rtml_text += plan.rtml() rtml_text += xml_close(0, "RTML") return rtml_text class ACPS_plan: """ ACPS plan is """ def __init__(self, plan_id, project_name, telescope, user): if plan_id is None: raise ValueError('plan id may not be null.') if len(plan_id) <= 0: raise ValueError('plan id may not be zero-length.') self.plan_id = plan_id self.user_name = user self.description = "" self.reason = "" self.project_name = project_name self.airmass = None # default self.airmass_range_minimum = None # default self.airmass_range_maximum = None # default self.horizon = 28 # default, in degrees self.hour_angle_range_east = None # default self.hour_angle_range_west = None # default self.sky_condition = "Fair" # default self.moon_distance = 50 # default, in degrees self.moon_width = 10 # default, in days self.earliest = None self.latest = None self.priority = 4 # default; NB: mean for all plans post-normalized to 0.5 self.telescope = telescope self.min_exposure = 0 # default self.observation_list = [] def add_observation(self, ACPS_observation_to_add): self.observation_list.append(ACPS_observation_to_add) def rtml(self): rtml_text = xml_open(1, "Request") + \ xml_line(2, "ID", self.plan_id) + \ xml_line(2, "UserName", self.user_name) + \ xml_line(2, "Description", self.description) + \ xml_line(2, "Reason", self.reason) + \ xml_line(2, "Project", self.project_name) + \ xml_open(2, "Schedule") # TODO: airmass vs airmass range vs horizon probably s/b handled with @property # ...(so that setting one turns off the others). this_airmass_minimum = self.airmass_range_minimum this_airmass_maximum = self.airmass if (self.airmass is not None) and (self.airmass_range_maximum is not None): this_airmass_maximum = min(self.airmass, self.airmass_range_maximum) if this_airmass_minimum is not None: rtml_text += xml_open(3, "AirmassRange") + xml_line(4, "Minimum", this_airmass_minimum) if this_airmass_maximum is not None: rtml_text += xml_line(4, "Maximum", this_airmass_maximum) rtml_text += xml_close(3, "AirmassRange") else: if this_airmass_maximum is not None: rtml_text += xml_open(3, "Airmass") + \ xml_line(4, "Airmass", this_airmass_maximum) +\ xml_close(3, "Airmass") if self.horizon is not None: rtml_text += xml_line(3, "Horizon", str(self.horizon)) if (self.hour_angle_range_east is not None) and (self.hour_angle_range_west is not None): rtml_text += xml_open(3, "HourAngleRange") + \ xml_line(4, "East", "{0:.2f}".format(self.hour_angle_range_east)) + \ xml_line(4, "West", "{0:.2f}".format(self.hour_angle_range_west)) + \ xml_line(3, "HourAngleRange") rtml_text += xml_line(3, "SkyCondition", self.sky_condition) rtml_text += xml_open(3, "Moon") + \ xml_line(4, "Distance", "{0:.2f}".format(self.moon_distance)) + \ xml_line(4, "Width", "{0:.2f}".format(self.moon_width)) + \ xml_close(3, "Moon") if (self.earliest is not None) or (self.latest is not None): rtml_text += xml_open(3, "TimeRange") if self.earliest is not None: rtml_text += xml_line(4, "Earliest", self.earliest) if self.latest is not None: rtml_text += xml_line(4, "Latest", self.latest) rtml_text += xml_close(3, "TimeRange") rtml_text += xml_line(3, "Priority", "{0:d}".format(self.priority)) rtml_text += xml_close(2, "Schedule") rtml_text += xml_line(2, "Telescope", self.telescope) for observation in self.observation_list: rtml_text += observation.rtml() rtml_text += xml_close(1, "Request") return rtml_text class ACPS_observation: """ Object: holds one observation (RTML Target) Usage: obs = ACPS_observation('ST Tri', 34.555, +21.334) obs.add_imageset('ST Tri, 3, 120, 1, 'V') """ def __init__(self, obs_id, RA_deg, dec_deg, autofocus=False, count=1): if obs_id is None: raise ValueError('observation id may not be null.') if len(obs_id) <= 0: raise ValueError('observation id may not be zero-length.') self.id = obs_id self.ra = RA_deg self.dec = dec_deg self.autofocus = autofocus self.count = count self.imageset_list = [] def add_imageset(self, name, count, exposure, filter): # TODO: construct default name if given imageset name == "" or None imageset_to_add = ACPS_imageset(name, count, exposure, filter) self.imageset_list.append(imageset_to_add) def rtml(self): open_text = 'Target count=\"' + "{0:d}".format(self.count) + '\"' if self.autofocus is True: open_text += ' autofocus=\"True\"' rtml_text = xml_open(3, open_text) rtml_text += xml_line(3, "ID", self.id) rtml_text += xml_line(3, "Name", self.id) # ID == Name, for now. rtml_text += xml_open(3, "Coordinates") + \ xml_line(4, "RightAscension", "{0:.4f}".format(self.ra)) + \ xml_line(4, "Declination", "{0:.4f}".format(self.dec)) + \ xml_close(4, "Coordinates") for imageset in self.imageset_list: rtml_text += imageset.rtml() rtml_text += xml_close(2, 'Target') return rtml_text class ACPS_imageset: """ Pseudo-private class. Object: holds one image set (RTML Picture) Usage: is = ACPS_imageset('ST Tri', 3, 120, 'V') acps_obs.add_imageset(is) """ def __init__(self, name, count, exposure, filter): self.name = name self.count = count self.exposure = max(0, exposure) self.binning = 1 # ALWAYS BINNING == 1 self.filter = filter def rtml(self): rtml_text = xml_open(3, 'Picture count=\"' + "{0:d}".format(self.count) + '\"') rtml_text += xml_line(4, 'Name', self.name) rtml_text += xml_line(4, 'Description', self.name) # Name == Description, for now. rtml_text += xml_line(4, 'ExposureTime', "{0:d}".format(self.exposure)) rtml_text += xml_line(4, 'Binning', "{0:d}".format(self.binning)) rtml_text += xml_line(4, 'Filter', self.filter) rtml_text += xml_line(4, 'Dither', '0') # Dither always zero, for now. rtml_text += xml_close(3, 'Picture') return rtml_text def xml_line(n_tabs, tag, content=''): return n_tabs * '\t' + '<' + tag + '>' + content + '</' + tag + '>\n' def xml_open(n_tabs, tag): return n_tabs * '\t' + '<' + tag + '>\n' def xml_close(n_tabs, tag): return n_tabs * '\t' + '</' + tag + '>\n'
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
36,625
edose/photrix
refs/heads/master
/photrix/roof_nms.py
__author__ = "Eric Dose, Albuquerque" """ This module: """ # Python core: import os from time import sleep import winsound from collections import defaultdict from datetime import datetime, timezone # External packages: import requests from bs4 import BeautifulSoup # Author's packages: from util import hhmm_from_datetime_utc from imap import get_most_recent_relevant_email THIS_PACKAGE_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) INI_DIRECTORY = os.path.join(THIS_PACKAGE_ROOT_DIRECTORY, 'ini') NMS_STATUS_URL = 'https://www.nmskies.com/weather.php' NMS_STATUS_DICT = defaultdict(lambda: 'unknown', # default if given image name is unknown. {'daylight.jpg': 'closed', 'clouds.jpg': 'closed', 'snow.jpg': 'closed', 'fog.jpg': 'closed', 'wind.jpg': 'closed', # 'gust.jpg': 'closed', 'rain.jpg': 'closed', 'open.jpg': 'open'}) # HTML CONSTANTS: HTTP_OK_CODE = 200 # "OK. The request has succeeded." SECONDS_BETWEEN_HTML_QUERIES = 120 MAX_CONSECUTIVE_HTML_TIMEOUTS = 3 SECONDS_BETWEEN_CONSECUTIVE_HTML_TIMEOUTS = 20 # retry cadence. STR_LEFT_OF_IMAGE_NAME = 'images/' STR_RIGHT_OF_IMAGE_NAME = '?image=' # E-MAIL CONSTANTS: SECONDS_BETWEEN_EMAIL_QUERIES = 180 NMS_WEATHER_SUBJECT_START = '[NMSWX]' NMS_TECHSUPPORT_FROM_STRINGS = ['tech', '@nmskies.com'] NMS_OPEN = ['OK to open'] NMS_CLOSED = ['opening delayed', 'closed due to'] NMS_EMAIL_SOUND = 'SystemExclamation' SOUND_REPETITIONS_ON_STATUS_CHANGE = 40 SOUND_ON_OPENING = 'SystemAsterisk' # = Win 10 Asterisk; use Sonata/Windows Error.wav SOUND_ON_CLOSING = 'SystemHand' # = Win 10 Critical Stop; use Sonata/Windows Critical Stop.wav SOUND_REPETITIONS_ON_NEW_EMAIL_ONLY = 1 def play_sound_alias(sound, count): for i in range(count): winsound.PlaySound(sound, winsound.SND_ALIAS) def play_sound_file(sound_file, count): for i in range(count): winsound.PlaySound(sound_file, winsound.SND_FILENAME|winsound.SND_NOWAIT) __________HTML_ACCESS________________________________________________________ = 0 def get_nms_weather_html_request(url): """ Get and return HTML (request object) representing current NMS Weather webpage. """ r = None # default if r never set. for n_timeouts in range(MAX_CONSECUTIVE_HTML_TIMEOUTS): try: r = requests.get(url) except requests.exceptions.Timeout: print(' >>>>> Warning:', str(n_timeouts), 'consecutive timeouts.') sleep(SECONDS_BETWEEN_CONSECUTIVE_HTML_TIMEOUTS) continue except requests.exceptions.RequestException as e: print(e) return 'ERROR: RequestException (URL not found).' if r.status_code != HTTP_OK_CODE: print(' >>>>> Could not get', NMS_STATUS_URL) return r return r def get_nms_status_image_name(): """ Extract image name (e.g., 'daylight.jpg') from HTML of NMS Weather webpage. """ image_name = '' r = get_nms_weather_html_request(NMS_STATUS_URL) if r.status_code == HTTP_OK_CODE: soup = BeautifulSoup(r.text, 'html.parser') img_tags = soup.find_all('div', class_='img') # NB: "class_" not "class" (reserved). s = [x for x in img_tags][0].contents[1]['src'] # source code of page's very first image. i_left = s.find(STR_LEFT_OF_IMAGE_NAME, 0) i_right = s.find(STR_RIGHT_OF_IMAGE_NAME, len(STR_LEFT_OF_IMAGE_NAME)) if i_left >= 0 and i_right >= len(STR_LEFT_OF_IMAGE_NAME): image_name = s[i_left + len(STR_RIGHT_OF_IMAGE_NAME):i_right].strip() return image_name def monitor_nms_status_via_html(): """ Make noise if NMS *observatory* (not just a roof) opens or closes, according to New Mexico Skies' weather web page. (logic here is simpler than for Deep Sky West polling, as NMS web page needs no fault tolerance.) """ print('Playing OPENED sound twice, then CLOSED sound twice...') play_sound_alias(SOUND_ON_OPENING, 2) play_sound_alias(SOUND_ON_CLOSING, 2) # STATUS LOOP: previous_status = None last_event_string = '' while True: # status = get_nms_status_from_image_name() image_name = get_nms_status_image_name() status = NMS_STATUS_DICT[image_name] hhmm = hhmm_from_datetime_utc(datetime.now(timezone.utc)) print(hhmm + ': NMS is', status.upper(), last_event_string) if previous_status is not None: if status == 'open' and previous_status == 'closed': print(32 * '*', '\n >>>>> OPENED at', hhmm) last_event_string = ' (since ' + hhmm + ')' play_sound_alias(SOUND_ON_OPENING, SOUND_REPETITIONS_ON_STATUS_CHANGE) elif status == 'closed' and previous_status == 'open': print(32 * '*', '\n >>>>> CLOSED at', hhmm) last_event_string = ' (since ' + hhmm + ')' play_sound_alias(SOUND_ON_CLOSING, SOUND_REPETITIONS_ON_STATUS_CHANGE) elif status not in ['closed', 'open']: # only get here if there is a problem. play_sound_alias(SOUND_ON_CLOSING, 4) print('STOPPING ON ERROR: status is >' + status + '<') break previous_status = status sleep(SECONDS_BETWEEN_HTML_QUERIES) __________E_MAIL_ACCESS________________________________________________________ = 0 def test_monitor(): print() wx_dict = get_most_recent_relevant_email(subject_start=NMS_WEATHER_SUBJECT_START, from_strings=NMS_TECHSUPPORT_FROM_STRINGS) print('WX:', wx_dict) other_dict = get_most_recent_relevant_email(subject_start=None, from_strings=NMS_TECHSUPPORT_FROM_STRINGS) print('Other:', other_dict) def monitor_nms_status_via_e_mail(): """ Make specific noise whenever NMS techsupport sends an e-mail.""" print('Playing OPENED sound twice, then CLOSED sound twice...') play_sound_alias(NMS_EMAIL_SOUND, 4) wx_dict = get_most_recent_relevant_email(subject_start=NMS_WEATHER_SUBJECT_START, from_strings=NMS_TECHSUPPORT_FROM_STRINGS) other_dict = get_most_recent_relevant_email(subject_start=None, from_strings=NMS_TECHSUPPORT_FROM_STRINGS) if other_dict == wx_dict: other_dict = None most_recent_wx_dict = wx_dict most_recent_other_dict = other_dict # Display most recent WX subject, if any WX message exists: if wx_dict is not None: print('\nMOST RECENT WEATHER E-MAIL at', wx_dict['date'].strip() + '\n' + wx_dict['subject']) else: print('\nNo most recent weather e-mail found.') # Display most recent Other subject, if any non-WX message exists: if other_dict != wx_dict: if other_dict is not None: print('\nMOST RECENT NON-WEATHER E-MAIL at', other_dict['date'].strip() + '\n' + other_dict['subject']) # Monitoring loop: while True: sleep(SECONDS_BETWEEN_EMAIL_QUERIES) wx_dict = get_most_recent_relevant_email(subject_start=NMS_WEATHER_SUBJECT_START, from_strings=NMS_TECHSUPPORT_FROM_STRINGS) other_dict = get_most_recent_relevant_email(subject_start=None, from_strings=NMS_TECHSUPPORT_FROM_STRINGS) # If new weather e-mail, display subject and sound long audible alarm: if wx_dict is not None and wx_dict != most_recent_wx_dict: print('\nNEW WEATHER E-MAIL at', wx_dict['date'].strip() + '\n' + wx_dict['subject']) play_sound_alias(NMS_EMAIL_SOUND, 20) most_recent_wx_dict = wx_dict.copy() # If new non-weather e-mail, display subject and sound short audible alarm: if other_dict is not None and other_dict != wx_dict and other_dict != most_recent_other_dict: print('\nNEW NON-WEATHER E-MAIL at', other_dict['date'].strip() + '\n' + other_dict['subject']) play_sound_alias(NMS_EMAIL_SOUND, 5) most_recent_other_dict = other_dict.copy() # def extract_nms_status_from_email(msg_dict): # """Return status string as indicated by subject line of NMS status e-mail; or None if no e-mail found. # :param msg_dict: dict of e-mail fields from probable NMS status e-mail. [py dict] # :return: nms_status_string, or None if no message was passed in. [string] # """ # if msg_dict is None: # return None # for open_string in NMS_OPEN: # if open_string.lower() in msg_dict['subject'].lower(): # return 'open', msg_dict['subject'] # for closed_string in NMS_CLOSED: # if closed_string.lower() in msg_dict['subject'].lower(): # return 'closed', msg_dict['subject'] # raise StatusEmailParsingError('Subject line >' + msg_dict['subject'] + # '< cannot be parsed as open or closed.') __________OTHER_FUNCTIONS___________________________________________________ = 0 def record_nms_status_image_name(): """ Repeatedly get NMS observatory status image name from NMS weather page, so that we eventually learn all image names. """ # slow_interval = 300 # seconds, for normal monitoring # fast_interval = 60 # seconds, for monitoring when roof expected to open or close # timer = RoofTimer(slow_interval, fast_interval) current_interval = 240 # seconds. fullpath = 'C:/Astro/NMS/record_nms_status_image_name.txt' with open(fullpath, 'a') as f: # will append to file. start_string = '\nSTARTING:\n' print(start_string) f.write(start_string) while True: # timer.wait_interval() image_name = get_nms_status_image_name() utc_latest_capture = datetime.now(timezone.utc) utc_string = utc_latest_capture.strftime('%Y-%m-%d %H:%M:%S') write_string = utc_string + ': NMS image file >' + image_name + '<' print(write_string) f.write(write_string + '\n') f.flush() sleep(current_interval) if __name__ == '__main__': monitor_nms_status_via_e_mail() # monitor_nms_status_via_html() # record_nms_status_image_name()
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}
36,626
edose/photrix
refs/heads/master
/photrix/imap.py
__author__ = "Eric Dose, Albuquerque" """ This module: web.py Various e-mail access routines. """ # Python core: import os import imaplib import email from email.header import decode_header THIS_PACKAGE_ROOT_DIRECTORY = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) HTTP_OK_CODE = 200 # "OK. The request has succeeded." MAX_WEBOBS_LINES = 200 # for safety (& as dictated in any case by webobs API (as of Jan 2017)). __________E_MAIL_ACCESS______________________________________________________ = 0 def get_field_string(msg, field_name): content, encoding = decode_header(msg[field_name])[0] if isinstance(content, bytes): return content.decode(encoding) return content def get_imap_credentials(): fullpath = os.path.join('C:/Astro/NMS/creds', 'creds.txt') with open(fullpath) as f: lines = f.readlines() creds_dict = {} for line in lines: items = [s.strip() for s in line.split(':')] if len(items) == 2: creds_dict[items[0]] = items[1] return creds_dict def get_most_recent_relevant_email(subject_start, from_strings): if isinstance(from_strings, str): from_strings = [from_strings] creds = get_imap_credentials() imap = imaplib.IMAP4_SSL(creds['imap']) # authenticate imap.login(creds['login'], creds['fargelbnurrr']) status, messages = imap.select('INBOX') n_in_inbox = int(messages[0]) n_to_retrieve = n_in_inbox for i in range(n_in_inbox, n_in_inbox - n_to_retrieve, -1): # fetches latest to earliest. # TODO: try/except around imap.fetch() call, try again a few times before raising own exception. # TODO: **or** use imap.search() to avoid the problem. # res, msg = imap.fetch(str(i), "(RFC822)") res, msg = imap.fetch(str(i), '(BODY.PEEK[HEADER])') # fetches headers only. for response in msg: if res == 'OK' and isinstance(response, tuple): this_msg = email.message_from_bytes(response[1]) date = get_field_string(this_msg, 'Date') subject = get_field_string(this_msg, 'Subject') from_str = get_field_string(this_msg, 'From') subject_start_relevant = subject_start is None or \ subject.lower().startswith(subject_start.lower()) from_field_relevant = from_strings is None or \ all([f.lower() in from_str.lower() for f in from_strings]) if subject_start_relevant and from_field_relevant: return {'i': i, 'date': date, 'subject': subject, 'from_str': from_str} # if subject.lower().startswith(subject_start.lower()) and\ # all([f.lower() in from_str.lower() for f in from_strings]): # return {'i': i, 'date': date, 'subject': subject, 'from_str': from_str} return None
{"/test/test_web.py": ["/photrix/web.py", "/photrix/user.py"], "/photrix/fov.py": ["/photrix/util.py", "/photrix/web.py"], "/photrix/acps.py": ["/photrix/user.py"], "/test/test_util.py": ["/photrix/__init__.py"], "/test/test_planning.py": ["/photrix/__init__.py", "/photrix/fov.py", "/photrix/user.py"], "/photrix/image.py": ["/photrix/util.py"], "/photrix/web.py": ["/photrix/util.py"], "/test/test_image.py": ["/photrix/__init__.py", "/photrix/util.py"], "/photrix/user.py": ["/photrix/util.py"], "/photrix/process.py": ["/photrix/image.py", "/photrix/user.py", "/photrix/util.py", "/photrix/fov.py"], "/test/test_user.py": ["/photrix/__init__.py", "/photrix/util.py", "/photrix/fov.py"], "/photrix/planning.py": ["/photrix/fov.py", "/photrix/user.py", "/photrix/util.py", "/photrix/web.py"], "/test/test_fov.py": ["/photrix/__init__.py"], "/test/test_process.py": ["/photrix/__init__.py", "/photrix/user.py", "/photrix/util.py"], "/test/test_acps.py": ["/photrix/__init__.py"]}