code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import math
def oversample(all_paths, per_class_split, oversample_ids, class_names):
union = set()
all_sum = 0
print('Oversample stats:')
print('Total images before =', len(all_paths[0]))
for i in oversample_ids:
duplicates = 1
print(f'id = {i} -> {class_names[i]} : num of oversampled =', len(per_class_split[i]))
all_sum += len(per_class_split[i])
for idx in per_class_split[i]:
if idx not in union:
union.add(idx)
for j in range(duplicates):
for paths in all_paths:
paths.append(paths[idx])
print('Total oversampled =', all_sum, '/ union =', len(union))
print('Total images after =', len(all_paths[0]))
def oversample_end(all_paths, num):
for paths in all_paths:
oversample = []
for i in range(num):
oversample.append(paths[-1-i])
paths.extend(oversample)
def print_class_colors(dataset):
for color, name in zip(dataset.class_colors, dataset.class_names):
print(color, '\t', name)
def get_pyramid_loss_scales(downsampling_factor, upsampling_factor):
num_scales = int(math.log2(downsampling_factor // upsampling_factor))
scales = [downsampling_factor]
for i in range(num_scales - 1):
assert scales[-1] % 2 == 0
scales.append(scales[-1] // 2)
return scales
def get_data_bound(dataset):
min_val = (-dataset.mean.max()) / dataset.std.min()
max_val = (255-dataset.mean.min()) / dataset.std.min()
return float(min_val), float(max_val)
| [
"math.log2"
] | [((1073, 1124), 'math.log2', 'math.log2', (['(downsampling_factor // upsampling_factor)'], {}), '(downsampling_factor // upsampling_factor)\n', (1082, 1124), False, 'import math\n')] |
from cdm.objectmodel import CdmCorpusDefinition, CdmManifestDefinition
from cdm.storage import LocalAdapter
from cdm.enums import CdmObjectType
def generate_manifest(local_root_path: str) -> 'CdmManifestDefinition':
"""
Creates a manifest used for the tests.
"""
cdmCorpus = CdmCorpusDefinition()
cdmCorpus.storage.default_namespace = 'local'
adapter = LocalAdapter(root=local_root_path)
cdmCorpus.storage.mount('local', adapter)
# add cdm namespace
cdmCorpus.storage.mount('cdm', adapter)
manifest = CdmManifestDefinition(cdmCorpus.ctx, 'manifest')
manifest.folder_path = '/'
manifest.namespace = 'local'
return manifest
def create_document_for_entity(cdm_corpus: 'CdmCorpusDefinition', entity: 'CdmEntityDefinition', nameSpace: str = 'local'):
"""
For an entity, it creates a document that will contain the entity.
"""
cdm_folder_def = cdm_corpus.storage.fetch_root_folder(nameSpace)
entity_doc = cdm_corpus.ctx.corpus.make_object(CdmObjectType.DOCUMENT_DEF, '{}.cdm.json'.format(entity.entity_name), False)
cdm_folder_def.documents.append(entity_doc)
entity_doc.definitions.append(entity)
return entity_doc
| [
"cdm.storage.LocalAdapter",
"cdm.objectmodel.CdmManifestDefinition",
"cdm.objectmodel.CdmCorpusDefinition"
] | [((297, 318), 'cdm.objectmodel.CdmCorpusDefinition', 'CdmCorpusDefinition', ([], {}), '()\n', (316, 318), False, 'from cdm.objectmodel import CdmCorpusDefinition, CdmManifestDefinition\n'), ((383, 417), 'cdm.storage.LocalAdapter', 'LocalAdapter', ([], {'root': 'local_root_path'}), '(root=local_root_path)\n', (395, 417), False, 'from cdm.storage import LocalAdapter\n'), ((548, 596), 'cdm.objectmodel.CdmManifestDefinition', 'CdmManifestDefinition', (['cdmCorpus.ctx', '"""manifest"""'], {}), "(cdmCorpus.ctx, 'manifest')\n", (569, 596), False, 'from cdm.objectmodel import CdmCorpusDefinition, CdmManifestDefinition\n')] |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 UnitedStack Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2013-2018 Wind River Systems, Inc.
#
import jsonpatch
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from sysinv.api.controllers.v1 import base
from sysinv.api.controllers.v1 import collection
from sysinv.api.controllers.v1 import link
from sysinv.api.controllers.v1 import types
from sysinv.api.controllers.v1 import utils
from sysinv.common import constants
from sysinv.common import exception
from sysinv.common import health
from sysinv.common import utils as cutils
from sysinv import objects
from sysinv.openstack.common import log
from sysinv.openstack.common.gettextutils import _
from fm_api import constants as fm_constants
from sysinv.common.storage_backend_conf import StorageBackendConfig
LOG = log.getLogger(__name__)
class ControllerFsPatchType(types.JsonPatchType):
@staticmethod
def mandatory_attrs():
return []
class ControllerFs(base.APIBase):
"""API representation of a controller_fs.
This class enforces type checking and value constraints, and converts
between the internal object model and the API representation of
a ControllerFs.
The database GiB of controller_fs - maps to
/var/lib/postgresql (pgsql-lv)
The image GiB of controller_fs - maps to
/opt/cgcs (cgcs-lv)
The image conversion GiB of controller_fs - maps to
/opt/img-conversions (img-conversions-lv)
The backup GiB of controller_fs - maps to
/opt/backups (backup-lv)
The scratch GiB of controller_fs - maps to
/scratch (scratch-lv)
The extension GiB of controller_fs - maps to
/opt/extension (extension-lv)
The gnocchi GiB of controller_fs - maps to
/opt/gnocchi (gnocchi-lv)
"""
uuid = types.uuid
"Unique UUID for this controller_fs"
name = wsme.wsattr(wtypes.text, mandatory=True)
size = int
logical_volume = wsme.wsattr(wtypes.text)
replicated = bool
state = wtypes.text
"The state of controller_fs indicates a drbd file system resize operation"
forisystemid = int
"The isystemid that this controller_fs belongs to"
isystem_uuid = types.uuid
"The UUID of the system this controller_fs belongs to"
action = wtypes.text
"Represent the action on the controller_fs"
links = [link.Link]
"A list containing a self link and associated controller_fs links"
created_at = wtypes.datetime.datetime
updated_at = wtypes.datetime.datetime
def __init__(self, **kwargs):
self.fields = list(objects.controller_fs.fields.keys())
for k in self.fields:
setattr(self, k, kwargs.get(k))
# API-only attribute)
self.fields.append('action')
setattr(self, 'action', kwargs.get('action', None))
@classmethod
def convert_with_links(cls, rpc_controller_fs, expand=True):
controller_fs = ControllerFs(**rpc_controller_fs.as_dict())
if not expand:
controller_fs.unset_fields_except(['created_at',
'updated_at',
'uuid',
'name',
'size',
'logical_volume',
'replicated',
'state',
'isystem_uuid'])
# never expose the isystem_id attribute
controller_fs.isystem_id = wtypes.Unset
# we display the cgcs file system as glance to the customer
if controller_fs.name == constants.FILESYSTEM_NAME_CGCS:
controller_fs.name = constants.FILESYSTEM_DISPLAY_NAME_CGCS
# never expose the isystem_id attribute, allow exposure for now
# controller_fs.forisystemid = wtypes.Unset
controller_fs.links = [
link.Link.make_link('self', pecan.request.host_url,
'controller_fs', controller_fs.uuid),
link.Link.make_link('bookmark', pecan.request.host_url,
'controller_fs', controller_fs.uuid,
bookmark=True)
]
return controller_fs
class ControllerFsCollection(collection.Collection):
"""API representation of a collection of ControllerFs."""
controller_fs = [ControllerFs]
"A list containing ControllerFs objects"
def __init__(self, **kwargs):
self._type = 'controller_fs'
@classmethod
def convert_with_links(cls, rpc_controller_fs, limit, url=None,
expand=False, **kwargs):
collection = ControllerFsCollection()
collection.controller_fs = [ControllerFs.convert_with_links(p, expand)
for p in rpc_controller_fs]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
def _total_size_controller_multi_fs(controller_fs_new_list):
"""This function is called to verify file system capability on
controller with primary (initial) storage backend already configured
calling from initial config (config_controller stage) will result in
failure
"""
total_size = 0
for fs in controller_fs_new_list:
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
total_size += (2 * fs.size)
else:
total_size += fs.size
return total_size
def _total_size_controller_fs(controller_fs_new, controller_fs_list):
"""This function is called to verify file system capability on
controller with primary (initial) storage backend already configured
calling from initial config (config_controller stage) will result in
failure
"""
total_size = 0
for fs in controller_fs_list:
size = fs['size']
if controller_fs_new and fs['name'] == controller_fs_new['name']:
size = controller_fs_new['size']
if fs['name'] == "database":
size = size * 2
total_size += size
LOG.info(
"_total_size_controller_fs total filesysem size %s" % total_size)
return total_size
def _check_relative_controller_multi_fs(controller_fs_new_list):
"""
This function verifies the relative controller_fs sizes.
:param controller_fs_new_list:
:return: None. Raise Client exception on failure.
"""
if cutils.is_virtual():
return
backup_gib_min = constants.BACKUP_OVERHEAD
for fs in controller_fs_new_list:
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
database_gib = fs.size
backup_gib_min += fs.size
elif fs.name == constants.FILESYSTEM_NAME_CGCS:
cgcs_gib = fs.size
backup_gib_min += fs.size
elif fs.name == constants.FILESYSTEM_NAME_BACKUP:
backup_gib = fs.size
if backup_gib < backup_gib_min:
raise wsme.exc.ClientSideError(_("backup size of %d is "
"insufficient. "
"Minimum backup size of %d is "
"required based upon glance size %d "
"and database size %d. "
"Rejecting modification "
"request." %
(backup_gib,
backup_gib_min,
cgcs_gib,
database_gib
)))
def _check_controller_multi_fs(controller_fs_new_list,
ceph_mon_gib_new=None,
cgtsvg_growth_gib=None):
ceph_mons = pecan.request.dbapi.ceph_mon_get_list()
if not ceph_mon_gib_new:
if ceph_mons:
ceph_mon_gib_new = ceph_mons[0].ceph_mon_gib
else:
ceph_mon_gib_new = 0
LOG.info("_check_controller__multi_fs ceph_mon_gib_new = %s" % ceph_mon_gib_new)
cgtsvg_max_free_GiB = _get_controller_cgtsvg_limit()
LOG.info("_check_controller_multi_fs cgtsvg_max_free_GiB = %s " %
cgtsvg_max_free_GiB)
_check_relative_controller_multi_fs(controller_fs_new_list)
LOG.info("_check_controller_multi_fs ceph_mon_gib_new = %s" % ceph_mon_gib_new)
rootfs_configured_size_GiB = \
_total_size_controller_multi_fs(controller_fs_new_list) + ceph_mon_gib_new
LOG.info("_check_controller_multi_fs rootfs_configured_size_GiB = %s" %
rootfs_configured_size_GiB)
if cgtsvg_growth_gib and (cgtsvg_growth_gib > cgtsvg_max_free_GiB):
if ceph_mon_gib_new:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, "
"scratch, backup, extension and ceph-mon exceeds "
"growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
else:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, scratch, "
"backup and extension exceeds growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
raise wsme.exc.ClientSideError(msg)
def _check_relative_controller_fs(controller_fs_new, controller_fs_list):
"""
This function verifies the relative controller_fs sizes.
:param controller_fs_new:
:param controller_fs_list:
:return: None. Raise Client exception on failure.
"""
if cutils.is_virtual():
return
backup_gib = 0
database_gib = 0
cgcs_gib = 0
for fs in controller_fs_list:
if controller_fs_new and fs['name'] == controller_fs_new['name']:
fs['size'] = controller_fs_new['size']
if fs['name'] == "backup":
backup_gib = fs['size']
elif fs['name'] == constants.DRBD_CGCS:
cgcs_gib = fs['size']
elif fs['name'] == "database":
database_gib = fs['size']
if backup_gib == 0:
LOG.info(
"_check_relative_controller_fs backup filesystem not yet setup")
return
# Required mininum backup filesystem size
backup_gib_min = cgcs_gib + database_gib + constants.BACKUP_OVERHEAD
if backup_gib < backup_gib_min:
raise wsme.exc.ClientSideError(_("backup size of %d is "
"insufficient. "
"Minimum backup size of %d is "
"required based on upon "
"glance=%d and database=%d and "
"backup overhead of %d. "
"Rejecting modification "
"request." %
(backup_gib,
backup_gib_min,
cgcs_gib,
database_gib,
constants.BACKUP_OVERHEAD
)))
def _check_controller_state():
"""
This function verifies the administrative, operational, availability of
each controller.
"""
chosts = pecan.request.dbapi.ihost_get_by_personality(
constants.CONTROLLER)
for chost in chosts:
if (chost.administrative != constants.ADMIN_UNLOCKED or
chost.availability != constants.AVAILABILITY_AVAILABLE or
chost.operational != constants.OPERATIONAL_ENABLED):
# A node can become degraded due to not free space available in a FS
# and thus block the resize operation. If the only alarm that degrades
# a controller node is a filesystem alarm, we shouldn't block the resize
# as the resize itself will clear the degrade.
health_helper = health.Health(pecan.request.dbapi)
degrade_alarms = health_helper.get_alarms_degrade(
pecan.request.context,
alarm_ignore_list=[fm_constants.FM_ALARM_ID_FS_USAGE],
entity_instance_id_filter="controller-")
allowed_resize = False
if (not degrade_alarms and
chost.availability == constants.AVAILABILITY_DEGRADED):
allowed_resize = True
if not allowed_resize:
alarm_explanation = ""
if degrade_alarms:
alarm_explanation = "Check alarms with the following IDs: %s" % str(degrade_alarms)
raise wsme.exc.ClientSideError(
_("This operation requires controllers to be %s, %s, %s. "
"Current status is %s, %s, %s. %s." %
(constants.ADMIN_UNLOCKED, constants.OPERATIONAL_ENABLED,
constants.AVAILABILITY_AVAILABLE,
chost.administrative, chost.operational,
chost.availability, alarm_explanation)))
return True
def _get_controller_cgtsvg_limit():
"""Calculate space for controller fs
returns: cgtsvg_max_free_GiB
"""
cgtsvg0_free_mib = 0
cgtsvg1_free_mib = 0
cgtsvg_max_free_GiB = 0
chosts = pecan.request.dbapi.ihost_get_by_personality(
constants.CONTROLLER)
for chost in chosts:
if chost.hostname == constants.CONTROLLER_0_HOSTNAME:
ipvs = pecan.request.dbapi.ipv_get_by_ihost(chost.uuid)
for ipv in ipvs:
if (ipv.lvm_vg_name == constants.LVG_CGTS_VG and
ipv.pv_state != constants.PROVISIONED):
msg = _("Cannot resize filesystem. There are still "
"unprovisioned physical volumes on controller-0.")
raise wsme.exc.ClientSideError(msg)
ilvgs = pecan.request.dbapi.ilvg_get_by_ihost(chost.uuid)
for ilvg in ilvgs:
if (ilvg.lvm_vg_name == constants.LVG_CGTS_VG and
ilvg.lvm_vg_size and ilvg.lvm_vg_total_pe):
cgtsvg0_free_mib = (int(ilvg.lvm_vg_size) *
int(ilvg.lvm_vg_free_pe) / int(
ilvg.lvm_vg_total_pe)) / (1024 * 1024)
break
else:
ipvs = pecan.request.dbapi.ipv_get_by_ihost(chost.uuid)
for ipv in ipvs:
if (ipv.lvm_vg_name == constants.LVG_CGTS_VG and
ipv.pv_state != constants.PROVISIONED):
msg = _("Cannot resize filesystem. There are still "
"unprovisioned physical volumes on controller-1.")
raise wsme.exc.ClientSideError(msg)
ilvgs = pecan.request.dbapi.ilvg_get_by_ihost(chost.uuid)
for ilvg in ilvgs:
if (ilvg.lvm_vg_name == constants.LVG_CGTS_VG and
ilvg.lvm_vg_size and ilvg.lvm_vg_total_pe):
cgtsvg1_free_mib = (int(ilvg.lvm_vg_size) *
int(ilvg.lvm_vg_free_pe) / int(
ilvg.lvm_vg_total_pe)) / (1024 * 1024)
break
LOG.info("_get_controller_cgtsvg_limit cgtsvg0_free_mib=%s, "
"cgtsvg1_free_mib=%s" % (cgtsvg0_free_mib, cgtsvg1_free_mib))
if cgtsvg0_free_mib > 0 and cgtsvg1_free_mib > 0:
cgtsvg_max_free_GiB = min(cgtsvg0_free_mib, cgtsvg1_free_mib) / 1024
LOG.info("min of cgtsvg0_free_mib=%s and cgtsvg1_free_mib=%s is "
"cgtsvg_max_free_GiB=%s" %
(cgtsvg0_free_mib, cgtsvg1_free_mib, cgtsvg_max_free_GiB))
elif cgtsvg1_free_mib > 0:
cgtsvg_max_free_GiB = cgtsvg1_free_mib / 1024
else:
cgtsvg_max_free_GiB = cgtsvg0_free_mib / 1024
LOG.info("SYS_I filesystem limits cgtsvg0_free_mib=%s, "
"cgtsvg1_free_mib=%s, cgtsvg_max_free_GiB=%s"
% (cgtsvg0_free_mib, cgtsvg1_free_mib, cgtsvg_max_free_GiB))
return cgtsvg_max_free_GiB
def _check_controller_fs(controller_fs_new=None,
ceph_mon_gib_new=None,
cgtsvg_growth_gib=None,
controller_fs_list=None):
ceph_mons = pecan.request.dbapi.ceph_mon_get_list()
if not controller_fs_list:
controller_fs_list = pecan.request.dbapi.controller_fs_get_list()
if not ceph_mon_gib_new:
if ceph_mons:
ceph_mon_gib_new = ceph_mons[0].ceph_mon_gib
else:
ceph_mon_gib_new = 0
else:
if ceph_mons:
cgtsvg_growth_gib = ceph_mon_gib_new - ceph_mons[0].ceph_mon_gib
else:
cgtsvg_growth_gib = ceph_mon_gib_new
cgtsvg_max_free_GiB = _get_controller_cgtsvg_limit()
LOG.info("_check_controller_fs ceph_mon_gib_new = %s" % ceph_mon_gib_new)
LOG.info("_check_controller_fs cgtsvg_growth_gib = %s" % cgtsvg_growth_gib)
LOG.info("_check_controller_fs cgtsvg_max_free_GiB = %s" % cgtsvg_max_free_GiB)
_check_relative_controller_fs(controller_fs_new, controller_fs_list)
rootfs_configured_size_GiB = \
_total_size_controller_fs(controller_fs_new,
controller_fs_list) + ceph_mon_gib_new
LOG.info("_check_controller_fs rootfs_configured_size_GiB = %s" %
rootfs_configured_size_GiB)
if cgtsvg_growth_gib and (cgtsvg_growth_gib > cgtsvg_max_free_GiB):
if ceph_mon_gib_new:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, "
"scratch, backup, extension and ceph-mon exceeds "
"growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
else:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, scratch, "
"backup and extension exceeds growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
raise wsme.exc.ClientSideError(msg)
def _check_controller_multi_fs_data(context, controller_fs_list_new,
modified_fs):
""" Check controller filesystem data and return growth
returns: cgtsvg_growth_gib
"""
cgtsvg_growth_gib = 0
# Check if we need img_conversions
img_conversion_required = False
lvdisplay_keys = [constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_DATABASE],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_CGCS],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_BACKUP],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_SCRATCH],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_GNOCCHI]]
# On primary region, img-conversions always exists in controller_fs DB table.
# On secondary region, if both glance and cinder are sharing from the primary
# region, img-conversions won't exist in controller_fs DB table. We already
# have semantic check not to allow img-conversions resizing.
if (StorageBackendConfig.has_backend(pecan.request.dbapi, constants.SB_TYPE_LVM) or
StorageBackendConfig.has_backend(pecan.request.dbapi, constants.SB_TYPE_CEPH)):
img_conversion_required = True
lvdisplay_keys.append(constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_IMG_CONVERSIONS])
if (constants.FILESYSTEM_NAME_IMG_CONVERSIONS in modified_fs and
not img_conversion_required):
raise wsme.exc.ClientSideError(
_("%s is not modifiable: no cinder backend is "
"currently configured.") % constants.FILESYSTEM_NAME_IMG_CONVERSIONS)
lvdisplay_dict = pecan.request.rpcapi.get_controllerfs_lv_sizes(context)
for key in lvdisplay_keys:
if not lvdisplay_dict.get(key, None):
raise wsme.exc.ClientSideError(_("Unable to determine the "
"current size of %s. "
"Rejecting modification "
"request." % key))
for fs in controller_fs_list_new:
lv = fs.logical_volume
if lvdisplay_dict.get(lv, None):
orig = int(float(lvdisplay_dict[lv]))
new = int(fs.size)
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
orig = orig / 2
if orig > new:
raise wsme.exc.ClientSideError(_("'%s' must be at least: "
"%s" % (fs.name, orig)))
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
cgtsvg_growth_gib += 2 * (new - orig)
else:
cgtsvg_growth_gib += (new - orig)
LOG.info("_check_controller_multi_fs_data cgtsvg_growth_gib=%s" %
cgtsvg_growth_gib)
return cgtsvg_growth_gib
LOCK_NAME = 'ControllerFsController'
class ControllerFsController(rest.RestController):
"""REST controller for ControllerFs."""
_custom_actions = {
'detail': ['GET'],
'update_many': ['PUT'],
}
def __init__(self, from_isystems=False):
self._from_isystems = from_isystems
def _get_controller_fs_collection(self, isystem_uuid, marker, limit,
sort_key, sort_dir, expand=False,
resource_url=None):
if self._from_isystems and not isystem_uuid:
raise exception.InvalidParameterValue(_(
"System id not specified."))
limit = utils.validate_limit(limit)
sort_dir = utils.validate_sort_dir(sort_dir)
marker_obj = None
if marker:
marker_obj = objects.controller_fs.get_by_uuid(
pecan.request.context, marker)
if isystem_uuid:
controller_fs = pecan.request.dbapi.controller_fs_get_by_isystem(
isystem_uuid, limit,
marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
else:
controller_fs = \
pecan.request.dbapi.controller_fs_get_list(limit, marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
return ControllerFsCollection.convert_with_links(controller_fs, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
@wsme_pecan.wsexpose(ControllerFsCollection, types.uuid, types.uuid, int,
wtypes.text, wtypes.text)
def get_all(self, isystem_uuid=None, marker=None, limit=None,
sort_key='id', sort_dir='asc'):
"""Retrieve a list of controller_fs."""
return self._get_controller_fs_collection(isystem_uuid, marker, limit,
sort_key, sort_dir)
@wsme_pecan.wsexpose(ControllerFsCollection, types.uuid, types.uuid, int,
wtypes.text, wtypes.text)
def detail(self, isystem_uuid=None, marker=None, limit=None,
sort_key='id', sort_dir='asc'):
"""Retrieve a list of controller_fs with detail."""
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "controller_fs":
raise exception.HTTPNotFound
expand = True
resource_url = '/'.join(['controller_fs', 'detail'])
return self._get_controller_fs_collection(isystem_uuid, marker, limit,
sort_key, sort_dir,
expand, resource_url)
@wsme_pecan.wsexpose(ControllerFs, types.uuid)
def get_one(self, controller_fs_uuid):
"""Retrieve information about the given controller_fs."""
if self._from_isystems:
raise exception.OperationNotPermitted
rpc_controller_fs = \
objects.controller_fs.get_by_uuid(pecan.request.context,
controller_fs_uuid)
return ControllerFs.convert_with_links(rpc_controller_fs)
@cutils.synchronized(LOCK_NAME)
@wsme.validate(types.uuid, [ControllerFsPatchType])
@wsme_pecan.wsexpose(ControllerFs, types.uuid,
body=[ControllerFsPatchType])
def patch(self, controller_fs_uuid, patch):
"""Update the current controller_fs configuration."""
raise exception.OperationNotPermitted
@cutils.synchronized(LOCK_NAME)
@wsme.validate(types.uuid, [ControllerFsPatchType])
@wsme_pecan.wsexpose(ControllerFs, types.uuid, body=[[ControllerFsPatchType]])
def update_many(self, isystem_uuid, patch):
"""Update the current controller_fs configuration."""
if self._from_isystems and not isystem_uuid:
raise exception.InvalidParameterValue(_(
"System id not specified."))
# Validate input filesystem names
controller_fs_list = pecan.request.dbapi.controller_fs_get_list()
valid_fs_list = []
if controller_fs_list:
valid_fs_list = {fs.name: fs.size for fs in controller_fs_list}
reinstall_required = False
reboot_required = False
force_resize = False
modified_fs = []
for p_list in patch:
p_obj_list = jsonpatch.JsonPatch(p_list)
for p_obj in p_obj_list:
if p_obj['path'] == '/action':
value = p_obj['value']
patch.remove(p_list)
if value == constants.FORCE_ACTION:
force_resize = True
LOG.info("Force action resize selected")
break
for p_list in patch:
p_obj_list = jsonpatch.JsonPatch(p_list)
for p_obj in p_obj_list:
if p_obj['path'] == '/name':
fs_display_name = p_obj['value']
if fs_display_name == constants.FILESYSTEM_DISPLAY_NAME_CGCS:
fs_name = constants.FILESYSTEM_NAME_CGCS
else:
fs_name = fs_display_name
elif p_obj['path'] == '/size':
size = p_obj['value']
if fs_name not in valid_fs_list.keys() or fs_display_name == constants.FILESYSTEM_NAME_CGCS:
msg = _("ControllerFs update failed: invalid filesystem "
"'%s' " % fs_display_name)
raise wsme.exc.ClientSideError(msg)
elif not cutils.is_int_like(size):
msg = _("ControllerFs update failed: filesystem '%s' "
"size must be an integer " % fs_display_name)
raise wsme.exc.ClientSideError(msg)
elif int(size) <= int(valid_fs_list[fs_name]):
msg = _("ControllerFs update failed: size for filesystem '%s' "
"should be bigger than %s " % (
fs_display_name, valid_fs_list[fs_name]))
raise wsme.exc.ClientSideError(msg)
elif (fs_name == constants.FILESYSTEM_NAME_CGCS and
StorageBackendConfig.get_backend(pecan.request.dbapi,
constants.CINDER_BACKEND_CEPH)):
if force_resize:
LOG.warn("Force resize ControllerFs: %s, though Ceph "
"storage backend is configured" % fs_display_name)
else:
raise wsme.exc.ClientSideError(
_("ControllerFs %s size is not modifiable as Ceph is "
"configured. Update size via Ceph Storage Pools." %
fs_display_name))
if fs_name in constants.SUPPORTED_REPLICATED_FILEYSTEM_LIST:
if utils.is_drbd_fs_resizing():
raise wsme.exc.ClientSideError(
_("A drbd sync operation is currently in progress. "
"Retry again later.")
)
modified_fs += [fs_name]
controller_fs_list_new = []
for fs in controller_fs_list:
replaced = False
for p_list in patch:
p_obj_list = jsonpatch.JsonPatch(p_list)
for p_obj in p_obj_list:
if p_obj['path'] == '/name':
if p_obj['value'] == constants.FILESYSTEM_DISPLAY_NAME_CGCS:
p_obj['value'] = constants.FILESYSTEM_NAME_CGCS
if p_obj['value'] == fs['name']:
try:
controller_fs_list_new += [ControllerFs(
**jsonpatch.apply_patch(fs.as_dict(), p_obj_list))]
replaced = True
break
except utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=p_list, reason=e)
if replaced:
break
if not replaced:
controller_fs_list_new += [fs]
cgtsvg_growth_gib = _check_controller_multi_fs_data(
pecan.request.context,
controller_fs_list_new,
modified_fs)
if _check_controller_state():
_check_controller_multi_fs(controller_fs_list_new,
cgtsvg_growth_gib=cgtsvg_growth_gib)
for fs in controller_fs_list_new:
if fs.name in modified_fs:
value = {'size': fs.size}
if fs.replicated:
value.update({'state': constants.CONTROLLER_FS_RESIZING_IN_PROGRESS})
pecan.request.dbapi.controller_fs_update(fs.uuid, value)
try:
# perform rpc to conductor to perform config apply
pecan.request.rpcapi.update_storage_config(
pecan.request.context,
update_storage=False,
reinstall_required=reinstall_required,
reboot_required=reboot_required,
filesystem_list=modified_fs
)
except Exception as e:
msg = _("Failed to update filesystem size ")
LOG.error("%s with patch %s with exception %s" % (msg, patch, e))
raise wsme.exc.ClientSideError(msg)
@wsme_pecan.wsexpose(None, types.uuid, status_code=204)
def delete(self, controller_fs_uuid):
"""Delete a controller_fs."""
raise exception.OperationNotPermitted
@cutils.synchronized(LOCK_NAME)
@wsme_pecan.wsexpose(ControllerFs, body=ControllerFs)
def post(self, controllerfs):
"""Create a new controller_fs."""
raise exception.OperationNotPermitted
| [
"pecan.request.dbapi.ilvg_get_by_ihost",
"sysinv.openstack.common.gettextutils._",
"wsme.wsattr",
"sysinv.api.controllers.v1.link.Link.make_link",
"sysinv.api.controllers.v1.utils.validate_sort_dir",
"pecan.request.dbapi.controller_fs_get_by_isystem",
"sysinv.objects.controller_fs.get_by_uuid",
"wsme.... | [((1471, 1494), 'sysinv.openstack.common.log.getLogger', 'log.getLogger', (['__name__'], {}), '(__name__)\n', (1484, 1494), False, 'from sysinv.openstack.common import log\n'), ((2544, 2584), 'wsme.wsattr', 'wsme.wsattr', (['wtypes.text'], {'mandatory': '(True)'}), '(wtypes.text, mandatory=True)\n', (2555, 2584), False, 'import wsme\n'), ((2623, 2647), 'wsme.wsattr', 'wsme.wsattr', (['wtypes.text'], {}), '(wtypes.text)\n', (2634, 2647), False, 'import wsme\n'), ((7189, 7208), 'sysinv.common.utils.is_virtual', 'cutils.is_virtual', ([], {}), '()\n', (7206, 7208), True, 'from sysinv.common import utils as cutils\n'), ((8606, 8645), 'pecan.request.dbapi.ceph_mon_get_list', 'pecan.request.dbapi.ceph_mon_get_list', ([], {}), '()\n', (8643, 8645), False, 'import pecan\n'), ((10516, 10535), 'sysinv.common.utils.is_virtual', 'cutils.is_virtual', ([], {}), '()\n', (10533, 10535), True, 'from sysinv.common import utils as cutils\n'), ((12311, 12377), 'pecan.request.dbapi.ihost_get_by_personality', 'pecan.request.dbapi.ihost_get_by_personality', (['constants.CONTROLLER'], {}), '(constants.CONTROLLER)\n', (12355, 12377), False, 'import pecan\n'), ((14296, 14362), 'pecan.request.dbapi.ihost_get_by_personality', 'pecan.request.dbapi.ihost_get_by_personality', (['constants.CONTROLLER'], {}), '(constants.CONTROLLER)\n', (14340, 14362), False, 'import pecan\n'), ((17314, 17353), 'pecan.request.dbapi.ceph_mon_get_list', 'pecan.request.dbapi.ceph_mon_get_list', ([], {}), '()\n', (17351, 17353), False, 'import pecan\n'), ((20946, 21001), 'pecan.request.rpcapi.get_controllerfs_lv_sizes', 'pecan.request.rpcapi.get_controllerfs_lv_sizes', (['context'], {}), '(context)\n', (20992, 21001), False, 'import pecan\n'), ((23960, 24062), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['ControllerFsCollection', 'types.uuid', 'types.uuid', 'int', 'wtypes.text', 'wtypes.text'], {}), '(ControllerFsCollection, types.uuid, types.uuid, int,\n wtypes.text, wtypes.text)\n', (23979, 24062), True, 'import wsmeext.pecan as wsme_pecan\n'), ((24402, 24504), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['ControllerFsCollection', 'types.uuid', 'types.uuid', 'int', 'wtypes.text', 'wtypes.text'], {}), '(ControllerFsCollection, types.uuid, types.uuid, int,\n wtypes.text, wtypes.text)\n', (24421, 24504), True, 'import wsmeext.pecan as wsme_pecan\n'), ((25145, 25190), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['ControllerFs', 'types.uuid'], {}), '(ControllerFs, types.uuid)\n', (25164, 25190), True, 'import wsmeext.pecan as wsme_pecan\n'), ((25620, 25650), 'sysinv.common.utils.synchronized', 'cutils.synchronized', (['LOCK_NAME'], {}), '(LOCK_NAME)\n', (25639, 25650), True, 'from sysinv.common import utils as cutils\n'), ((25656, 25706), 'wsme.validate', 'wsme.validate', (['types.uuid', '[ControllerFsPatchType]'], {}), '(types.uuid, [ControllerFsPatchType])\n', (25669, 25706), False, 'import wsme\n'), ((25712, 25787), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['ControllerFs', 'types.uuid'], {'body': '[ControllerFsPatchType]'}), '(ControllerFs, types.uuid, body=[ControllerFsPatchType])\n', (25731, 25787), True, 'import wsmeext.pecan as wsme_pecan\n'), ((25975, 26005), 'sysinv.common.utils.synchronized', 'cutils.synchronized', (['LOCK_NAME'], {}), '(LOCK_NAME)\n', (25994, 26005), True, 'from sysinv.common import utils as cutils\n'), ((26011, 26061), 'wsme.validate', 'wsme.validate', (['types.uuid', '[ControllerFsPatchType]'], {}), '(types.uuid, [ControllerFsPatchType])\n', (26024, 26061), False, 'import wsme\n'), ((26067, 26144), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['ControllerFs', 'types.uuid'], {'body': '[[ControllerFsPatchType]]'}), '(ControllerFs, types.uuid, body=[[ControllerFsPatchType]])\n', (26086, 26144), True, 'import wsmeext.pecan as wsme_pecan\n'), ((32028, 32082), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['None', 'types.uuid'], {'status_code': '(204)'}), '(None, types.uuid, status_code=204)\n', (32047, 32082), True, 'import wsmeext.pecan as wsme_pecan\n'), ((32215, 32245), 'sysinv.common.utils.synchronized', 'cutils.synchronized', (['LOCK_NAME'], {}), '(LOCK_NAME)\n', (32234, 32245), True, 'from sysinv.common import utils as cutils\n'), ((32251, 32303), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['ControllerFs'], {'body': 'ControllerFs'}), '(ControllerFs, body=ControllerFs)\n', (32270, 32303), True, 'import wsmeext.pecan as wsme_pecan\n'), ((5647, 5692), 'sysinv.api.controllers.v1.collection.get_next', 'collection.get_next', (['limit'], {'url': 'url'}), '(limit, url=url, **kwargs)\n', (5666, 5692), False, 'from sysinv.api.controllers.v1 import collection\n'), ((10209, 10238), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (10233, 10238), False, 'import wsme\n'), ((17415, 17459), 'pecan.request.dbapi.controller_fs_get_list', 'pecan.request.dbapi.controller_fs_get_list', ([], {}), '()\n', (17457, 17459), False, 'import pecan\n'), ((19208, 19237), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (19232, 19237), False, 'import wsme\n'), ((20314, 20390), 'sysinv.common.storage_backend_conf.StorageBackendConfig.has_backend', 'StorageBackendConfig.has_backend', (['pecan.request.dbapi', 'constants.SB_TYPE_LVM'], {}), '(pecan.request.dbapi, constants.SB_TYPE_LVM)\n', (20346, 20390), False, 'from sysinv.common.storage_backend_conf import StorageBackendConfig\n'), ((20406, 20483), 'sysinv.common.storage_backend_conf.StorageBackendConfig.has_backend', 'StorageBackendConfig.has_backend', (['pecan.request.dbapi', 'constants.SB_TYPE_CEPH'], {}), '(pecan.request.dbapi, constants.SB_TYPE_CEPH)\n', (20438, 20483), False, 'from sysinv.common.storage_backend_conf import StorageBackendConfig\n'), ((22825, 22852), 'sysinv.api.controllers.v1.utils.validate_limit', 'utils.validate_limit', (['limit'], {}), '(limit)\n', (22845, 22852), False, 'from sysinv.api.controllers.v1 import utils\n'), ((22872, 22905), 'sysinv.api.controllers.v1.utils.validate_sort_dir', 'utils.validate_sort_dir', (['sort_dir'], {}), '(sort_dir)\n', (22895, 22905), False, 'from sysinv.api.controllers.v1 import utils\n'), ((25425, 25501), 'sysinv.objects.controller_fs.get_by_uuid', 'objects.controller_fs.get_by_uuid', (['pecan.request.context', 'controller_fs_uuid'], {}), '(pecan.request.context, controller_fs_uuid)\n', (25458, 25501), False, 'from sysinv import objects\n'), ((26479, 26523), 'pecan.request.dbapi.controller_fs_get_list', 'pecan.request.dbapi.controller_fs_get_list', ([], {}), '()\n', (26521, 26523), False, 'import pecan\n'), ((3261, 3296), 'sysinv.objects.controller_fs.fields.keys', 'objects.controller_fs.fields.keys', ([], {}), '()\n', (3294, 3296), False, 'from sysinv import objects\n'), ((4679, 4771), 'sysinv.api.controllers.v1.link.Link.make_link', 'link.Link.make_link', (['"""self"""', 'pecan.request.host_url', '"""controller_fs"""', 'controller_fs.uuid'], {}), "('self', pecan.request.host_url, 'controller_fs',\n controller_fs.uuid)\n", (4698, 4771), False, 'from sysinv.api.controllers.v1 import link\n'), ((4813, 4924), 'sysinv.api.controllers.v1.link.Link.make_link', 'link.Link.make_link', (['"""bookmark"""', 'pecan.request.host_url', '"""controller_fs"""', 'controller_fs.uuid'], {'bookmark': '(True)'}), "('bookmark', pecan.request.host_url, 'controller_fs',\n controller_fs.uuid, bookmark=True)\n", (4832, 4924), False, 'from sysinv.api.controllers.v1 import link\n'), ((7734, 7956), 'sysinv.openstack.common.gettextutils._', '_', (["('backup size of %d is insufficient. Minimum backup size of %d is required based upon glance size %d and database size %d. Rejecting modification request.'\n % (backup_gib, backup_gib_min, cgcs_gib, database_gib))"], {}), "(\n 'backup size of %d is insufficient. Minimum backup size of %d is required based upon glance size %d and database size %d. Rejecting modification request.'\n % (backup_gib, backup_gib_min, cgcs_gib, database_gib))\n", (7735, 7956), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((9558, 9781), 'sysinv.openstack.common.gettextutils._', '_', (["('Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup, extension and ceph-mon exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))"], {}), "(\n 'Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup, extension and ceph-mon exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))\n", (9559, 9781), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((9907, 10120), 'sysinv.openstack.common.gettextutils._', '_', (["('Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup and extension exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))"], {}), "(\n 'Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup and extension exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))\n", (9908, 10120), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((11332, 11605), 'sysinv.openstack.common.gettextutils._', '_', (["('backup size of %d is insufficient. Minimum backup size of %d is required based on upon glance=%d and database=%d and backup overhead of %d. Rejecting modification request.'\n % (backup_gib, backup_gib_min, cgcs_gib, database_gib, constants.\n BACKUP_OVERHEAD))"], {}), "(\n 'backup size of %d is insufficient. Minimum backup size of %d is required based on upon glance=%d and database=%d and backup overhead of %d. Rejecting modification request.'\n % (backup_gib, backup_gib_min, cgcs_gib, database_gib, constants.\n BACKUP_OVERHEAD))\n", (11333, 11605), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((12957, 12991), 'sysinv.common.health.Health', 'health.Health', (['pecan.request.dbapi'], {}), '(pecan.request.dbapi)\n', (12970, 12991), False, 'from sysinv.common import health\n'), ((14478, 14526), 'pecan.request.dbapi.ipv_get_by_ihost', 'pecan.request.dbapi.ipv_get_by_ihost', (['chost.uuid'], {}), '(chost.uuid)\n', (14514, 14526), False, 'import pecan\n'), ((14914, 14963), 'pecan.request.dbapi.ilvg_get_by_ihost', 'pecan.request.dbapi.ilvg_get_by_ihost', (['chost.uuid'], {}), '(chost.uuid)\n', (14951, 14963), False, 'import pecan\n'), ((15383, 15431), 'pecan.request.dbapi.ipv_get_by_ihost', 'pecan.request.dbapi.ipv_get_by_ihost', (['chost.uuid'], {}), '(chost.uuid)\n', (15419, 15431), False, 'import pecan\n'), ((15819, 15868), 'pecan.request.dbapi.ilvg_get_by_ihost', 'pecan.request.dbapi.ilvg_get_by_ihost', (['chost.uuid'], {}), '(chost.uuid)\n', (15856, 15868), False, 'import pecan\n'), ((18557, 18780), 'sysinv.openstack.common.gettextutils._', '_', (["('Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup, extension and ceph-mon exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))"], {}), "(\n 'Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup, extension and ceph-mon exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))\n", (18558, 18780), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((18906, 19119), 'sysinv.openstack.common.gettextutils._', '_', (["('Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup and extension exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))"], {}), "(\n 'Total target growth size %s GiB for database (doubled for upgrades), glance, img-conversions, scratch, backup and extension exceeds growth limit of %s GiB.'\n % (cgtsvg_growth_gib, cgtsvg_max_free_GiB))\n", (18907, 19119), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((22977, 23041), 'sysinv.objects.controller_fs.get_by_uuid', 'objects.controller_fs.get_by_uuid', (['pecan.request.context', 'marker'], {}), '(pecan.request.context, marker)\n', (23010, 23041), False, 'from sysinv import objects\n'), ((23112, 23235), 'pecan.request.dbapi.controller_fs_get_by_isystem', 'pecan.request.dbapi.controller_fs_get_by_isystem', (['isystem_uuid', 'limit', 'marker_obj'], {'sort_key': 'sort_key', 'sort_dir': 'sort_dir'}), '(isystem_uuid, limit,\n marker_obj, sort_key=sort_key, sort_dir=sort_dir)\n', (23160, 23235), False, 'import pecan\n'), ((23357, 23461), 'pecan.request.dbapi.controller_fs_get_list', 'pecan.request.dbapi.controller_fs_get_list', (['limit', 'marker_obj'], {'sort_key': 'sort_key', 'sort_dir': 'sort_dir'}), '(limit, marker_obj, sort_key=\n sort_key, sort_dir=sort_dir)\n', (23399, 23461), False, 'import pecan\n'), ((26835, 26862), 'jsonpatch.JsonPatch', 'jsonpatch.JsonPatch', (['p_list'], {}), '(p_list)\n', (26854, 26862), False, 'import jsonpatch\n'), ((27282, 27309), 'jsonpatch.JsonPatch', 'jsonpatch.JsonPatch', (['p_list'], {}), '(p_list)\n', (27301, 27309), False, 'import jsonpatch\n'), ((31504, 31700), 'pecan.request.rpcapi.update_storage_config', 'pecan.request.rpcapi.update_storage_config', (['pecan.request.context'], {'update_storage': '(False)', 'reinstall_required': 'reinstall_required', 'reboot_required': 'reboot_required', 'filesystem_list': 'modified_fs'}), '(pecan.request.context,\n update_storage=False, reinstall_required=reinstall_required,\n reboot_required=reboot_required, filesystem_list=modified_fs)\n', (31546, 31700), False, 'import pecan\n'), ((20792, 20861), 'sysinv.openstack.common.gettextutils._', '_', (['"""%s is not modifiable: no cinder backend is currently configured."""'], {}), "('%s is not modifiable: no cinder backend is currently configured.')\n", (20793, 20861), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((21123, 21219), 'sysinv.openstack.common.gettextutils._', '_', (["('Unable to determine the current size of %s. Rejecting modification request.'\n % key)"], {}), "(\n 'Unable to determine the current size of %s. Rejecting modification request.'\n % key)\n", (21124, 21219), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((22760, 22789), 'sysinv.openstack.common.gettextutils._', '_', (['"""System id not specified."""'], {}), "('System id not specified.')\n", (22761, 22789), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((24716, 24745), 'pecan.request.path.split', 'pecan.request.path.split', (['"""/"""'], {}), "('/')\n", (24740, 24745), False, 'import pecan\n'), ((26359, 26388), 'sysinv.openstack.common.gettextutils._', '_', (['"""System id not specified."""'], {}), "('System id not specified.')\n", (26360, 26388), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((27885, 27960), 'sysinv.openstack.common.gettextutils._', '_', (['("ControllerFs update failed: invalid filesystem \'%s\' " % fs_display_name)'], {}), '("ControllerFs update failed: invalid filesystem \'%s\' " % fs_display_name)\n', (27886, 27960), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((28010, 28039), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (28034, 28039), False, 'import wsme\n'), ((29373, 29400), 'sysinv.api.controllers.v1.utils.is_drbd_fs_resizing', 'utils.is_drbd_fs_resizing', ([], {}), '()\n', (29398, 29400), False, 'from sysinv.api.controllers.v1 import utils\n'), ((29805, 29832), 'jsonpatch.JsonPatch', 'jsonpatch.JsonPatch', (['p_list'], {}), '(p_list)\n', (29824, 29832), False, 'import jsonpatch\n'), ((31857, 31895), 'sysinv.openstack.common.gettextutils._', '_', (['"""Failed to update filesystem size """'], {}), "('Failed to update filesystem size ')\n", (31858, 31895), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((31992, 32021), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (32016, 32021), False, 'import wsme\n'), ((13692, 13978), 'sysinv.openstack.common.gettextutils._', '_', (["('This operation requires controllers to be %s, %s, %s. Current status is %s, %s, %s. %s.'\n % (constants.ADMIN_UNLOCKED, constants.OPERATIONAL_ENABLED, constants.\n AVAILABILITY_AVAILABLE, chost.administrative, chost.operational, chost.\n availability, alarm_explanation))"], {}), "(\n 'This operation requires controllers to be %s, %s, %s. Current status is %s, %s, %s. %s.'\n % (constants.ADMIN_UNLOCKED, constants.OPERATIONAL_ENABLED, constants.\n AVAILABILITY_AVAILABLE, chost.administrative, chost.operational, chost.\n availability, alarm_explanation))\n", (13693, 13978), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((14711, 14810), 'sysinv.openstack.common.gettextutils._', '_', (['"""Cannot resize filesystem. There are still unprovisioned physical volumes on controller-0."""'], {}), "('Cannot resize filesystem. There are still unprovisioned physical volumes on controller-0.'\n )\n", (14712, 14810), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((14863, 14892), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (14887, 14892), False, 'import wsme\n'), ((15616, 15715), 'sysinv.openstack.common.gettextutils._', '_', (['"""Cannot resize filesystem. There are still unprovisioned physical volumes on controller-1."""'], {}), "('Cannot resize filesystem. There are still unprovisioned physical volumes on controller-1.'\n )\n", (15617, 15715), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((15768, 15797), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (15792, 15797), False, 'import wsme\n'), ((21716, 21765), 'sysinv.openstack.common.gettextutils._', '_', (['("\'%s\' must be at least: %s" % (fs.name, orig))'], {}), '("\'%s\' must be at least: %s" % (fs.name, orig))\n', (21717, 21765), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((28061, 28085), 'sysinv.common.utils.is_int_like', 'cutils.is_int_like', (['size'], {}), '(size)\n', (28079, 28085), True, 'from sysinv.common import utils as cutils\n'), ((28109, 28204), 'sysinv.openstack.common.gettextutils._', '_', (['("ControllerFs update failed: filesystem \'%s\' size must be an integer " %\n fs_display_name)'], {}), '("ControllerFs update failed: filesystem \'%s\' size must be an integer " %\n fs_display_name)\n', (28110, 28204), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((28250, 28279), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (28274, 28279), False, 'import wsme\n'), ((31358, 31414), 'pecan.request.dbapi.controller_fs_update', 'pecan.request.dbapi.controller_fs_update', (['fs.uuid', 'value'], {}), '(fs.uuid, value)\n', (31398, 31414), False, 'import pecan\n'), ((28361, 28498), 'sysinv.openstack.common.gettextutils._', '_', (['("ControllerFs update failed: size for filesystem \'%s\' should be bigger than %s "\n % (fs_display_name, valid_fs_list[fs_name]))'], {}), '(\n "ControllerFs update failed: size for filesystem \'%s\' should be bigger than %s "\n % (fs_display_name, valid_fs_list[fs_name]))\n', (28362, 28498), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((28567, 28596), 'wsme.exc.ClientSideError', 'wsme.exc.ClientSideError', (['msg'], {}), '(msg)\n', (28591, 28596), False, 'import wsme\n'), ((29478, 29549), 'sysinv.openstack.common.gettextutils._', '_', (['"""A drbd sync operation is currently in progress. Retry again later."""'], {}), "('A drbd sync operation is currently in progress. Retry again later.')\n", (29479, 29549), False, 'from sysinv.openstack.common.gettextutils import _\n'), ((28679, 28768), 'sysinv.common.storage_backend_conf.StorageBackendConfig.get_backend', 'StorageBackendConfig.get_backend', (['pecan.request.dbapi', 'constants.CINDER_BACKEND_CEPH'], {}), '(pecan.request.dbapi, constants.\n CINDER_BACKEND_CEPH)\n', (28711, 28768), False, 'from sysinv.common.storage_backend_conf import StorageBackendConfig\n'), ((30502, 30546), 'sysinv.common.exception.PatchError', 'exception.PatchError', ([], {'patch': 'p_list', 'reason': 'e'}), '(patch=p_list, reason=e)\n', (30522, 30546), False, 'from sysinv.common import exception\n'), ((29103, 29233), 'sysinv.openstack.common.gettextutils._', '_', (["('ControllerFs %s size is not modifiable as Ceph is configured. Update size via Ceph Storage Pools.'\n % fs_display_name)"], {}), "(\n 'ControllerFs %s size is not modifiable as Ceph is configured. Update size via Ceph Storage Pools.'\n % fs_display_name)\n", (29104, 29233), False, 'from sysinv.openstack.common.gettextutils import _\n')] |
import scrapy
from locations.items import GeojsonPointItem
from urllib.parse import urlencode
from scrapy.selector import Selector
from locations.hours import OpeningHours
Days = ["Su", "Mo", "Tu", "We", "Th", "Fr", "Sa"]
class DollaramaSpider(scrapy.Spider):
name = "dollarama"
item_attributes = {"brand": "Dollarama"}
allowed_domains = ["dollarama.com"]
def start_requests(self):
base_url = "https://www.dollarama.com/en-CA/locations/anydata-api?"
params = {"distance": "100", "units": "miles"}
with open(
"./locations/searchable_points/ca_centroids_100mile_radius.csv"
) as points:
next(points)
for point in points:
_, lat, lon = point.strip().split(",")
params.update({"latitude": lat, "longitude": lon})
yield scrapy.Request(url=base_url + urlencode(params))
def parse_hours(self, hours):
hrs = hours.split("|")
opening_hours = OpeningHours()
for day, hour in zip(Days, hrs):
if hour == "Closed":
continue
open_time, close_time = hour.split("-")
opening_hours.add_range(
day=day,
open_time=open_time,
close_time=close_time,
time_format="%I:%M%p",
)
return opening_hours.as_opening_hours()
def parse(self, response):
data = response.json()
for row in data.get("StoreLocations", []):
properties = {
"ref": row["LocationNumber"],
"name": row["Name"],
"addr_full": row["ExtraData"]["Address"]["AddressNonStruct_Line1"],
"city": row["ExtraData"]["Address"]["Locality"],
"state": row["ExtraData"]["Address"]["Region"],
"postcode": row["ExtraData"]["Address"]["PostalCode"],
"lat": row["Location"]["coordinates"][1],
"lon": row["Location"]["coordinates"][0],
"phone": row["ExtraData"]["Phone"],
}
hours = self.parse_hours(row["ExtraData"]["Hours of operations"])
if hours:
properties["opening_hours"] = hours
yield GeojsonPointItem(**properties)
| [
"urllib.parse.urlencode",
"locations.hours.OpeningHours",
"locations.items.GeojsonPointItem"
] | [((993, 1007), 'locations.hours.OpeningHours', 'OpeningHours', ([], {}), '()\n', (1005, 1007), False, 'from locations.hours import OpeningHours\n'), ((2264, 2294), 'locations.items.GeojsonPointItem', 'GeojsonPointItem', ([], {}), '(**properties)\n', (2280, 2294), False, 'from locations.items import GeojsonPointItem\n'), ((883, 900), 'urllib.parse.urlencode', 'urlencode', (['params'], {}), '(params)\n', (892, 900), False, 'from urllib.parse import urlencode\n')] |
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import datasets.registry
from foundations import hparams
from foundations.step import Step
from lottery.branch import base
import models.registry
from pruning.mask import Mask
from pruning.pruned_model import PrunedModel
from training import train
class Branch(base.Branch):
def branch_function(
self,
retrain_d: hparams.DatasetHparams,
retrain_t: hparams.TrainingHparams,
start_at_step_zero: bool = False
):
# Get the mask and model.
m = models.registry.load(self.level_root, self.lottery_desc.train_start_step, self.lottery_desc.model_hparams)
m = PrunedModel(m, Mask.load(self.level_root))
start_step = Step.from_iteration(0 if start_at_step_zero else self.lottery_desc.train_start_step.iteration,
datasets.registry.iterations_per_epoch(retrain_d))
train.standard_train(m, self.branch_root, retrain_d, retrain_t, start_step=start_step, verbose=self.verbose)
@staticmethod
def description():
return "Retrain the model with different hyperparameters."
@staticmethod
def name():
return 'retrain'
| [
"training.train.standard_train",
"pruning.mask.Mask.load"
] | [((1052, 1165), 'training.train.standard_train', 'train.standard_train', (['m', 'self.branch_root', 'retrain_d', 'retrain_t'], {'start_step': 'start_step', 'verbose': 'self.verbose'}), '(m, self.branch_root, retrain_d, retrain_t, start_step=\n start_step, verbose=self.verbose)\n', (1072, 1165), False, 'from training import train\n'), ((808, 834), 'pruning.mask.Mask.load', 'Mask.load', (['self.level_root'], {}), '(self.level_root)\n', (817, 834), False, 'from pruning.mask import Mask\n')] |
"""
Created on Thu Mar 22 15:07:43 2018
@author: Tanvee
First attempt at an program for the EV3 bot.
The main aim of this is to develop an algorithm to search clockwise for and identify
close objects, before rushing to meet them.
"""
print(0)
from time import sleep
import sys, os
# Import the ev3dev specific library
from ev3dev.ev3 import *
print(1)
# Connect motors
rightMotor = LargeMotor(OUTPUT_C)
assert rightMotor.connected
leftMotor = LargeMotor(OUTPUT_B)
assert leftMotor.connected
# Connect sensors
print(2)
tsRIGHT = TouchSensor(INPUT_3)
assert tsRIGHT.connected
tsLEFT = TouchSensor(INPUT_2)
assert tsLEFT.connected
us = UltrasonicSensor()
assert us.connected
cs = ColorSensor(INPUT_4)
assert cs.connected
print("All Connected")
# The gyro is reset when the mode is changed, so the first line is extra, just so we
# can change the mode the 'GYRO-ANGLE', which is what we want
# gs.mode = 'GYRO-RATE' # Changing the mode resets the gyro
# gs.mode = 'GYRO-ANG' # Set gyro mode to return compass angle
# We will need to check EV3 buttons state.
btn = Button()
# FUNCTION DEFINITIONS
def drive(left, right):
"""
Start both motors at the given speeds.
"""
leftMotor.run_direct(duty_cycle_sp=left)
rightMotor.run_direct(duty_cycle_sp=right)
def stop():
# Stop both motors
leftMotor.stop(stop_action='brake')
rightMotor.stop(stop_action='brake')
def main():
print(btn.buttons_pressed)
if btn.left:
stop()
if btn.right:
print("The button was pressed")
drive(100, -100)
sleep(3)
stop()
"""
The default action is to spin around in an attempt to detect any object
within a certain radius using the ultrasonic sensor.
If the ultrasonic detects anything within 500mm the robot's reacts by "charging" at the object
"""
while True:
main()
| [
"time.sleep"
] | [((1569, 1577), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (1574, 1577), False, 'from time import sleep\n')] |
from unittest import TestCase, mock
from modelgen import ModelGenerator, Base
from os import getcwd, path
class TestModelgen(TestCase):
@classmethod
def setUpClass(self):
self.yaml = {'tables': {'userinfo':{'columns':
[{'name': 'firstname', 'type': 'varchar'},
{'name': 'lastname', 'type': 'varchar'},
{'name': 'dob', 'type': 'date'},
{'name': 'contact', 'type': 'numeric'},
{'name': 'address', 'type': 'varchar'}]}}}
self.logger = Base().logger
@mock.patch('modelgen.modelgenerator.Validate')
@mock.patch('modelgen.ModelGenerator.__init__')
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.Parser')
@mock.patch('modelgen.modelgenerator.Template')
def test_create_model_wo_alembic(self, mock_templt, mock_prsr, mock_pth,
mock_wrtf, mock_init, mock_validate):
'''
Test create_model function without setting alembic
support to True
'''
mock_init.return_value = None
mock_validate.validate.return_value = True
mock_wrtf.return_value = True
mock_prsr.data.return_value = self.yaml
model_obj = ModelGenerator()
response = model_obj._create_model('test')
self.assertEqual(True, response)
mock_prsr.assert_called_with(filepath=path.join(getcwd(), 'templates/test.yaml'))
mock_wrtf.assert_called_with(path=path.join(getcwd(), 'models/test.py'),
data=mock_templt().render())
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_alembic_meta')
@mock.patch('modelgen.modelgenerator.Validate')
@mock.patch('modelgen.ModelGenerator.__init__')
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.Parser')
@mock.patch('modelgen.modelgenerator.Template')
def test_create_model_w_alembic(self, mock_templt, mock_prsr, mock_pth,
mock_wrtf, mock_init, mock_validate, mock_cam):
'''
Test _create_model function with setting alembic
support to True
'''
mock_init.return_value = None
mock_validate.validate.return_value = True
mock_wrtf.return_value = True
mock_prsr.data.return_value = self.yaml
mock_cam.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_model(datasource='./test', alembic=True)
self.assertEqual(True, response)
mock_prsr.assert_called_with(filepath=path.join(getcwd(), 'templates/./test.yaml'))
mock_wrtf.assert_called_with(path=path.join(getcwd(), 'models/./test.py'),
data=mock_templt().render())
@mock.patch('modelgen.modelgenerator.Validate')
@mock.patch('modelgen.ModelGenerator.__init__')
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.Parser')
@mock.patch('modelgen.modelgenerator.Template')
def test_create_alembic_meta(self, mock_templt, mock_prsr, mock_pth,
mock_wrtf, mock_init, mock_validate):
'''
Test _create_alembic_meta function. Function creates
alembic support by a folder called metadata and
a file __init__.py in the folder. This file contains
sqlalchemy metadata imported from all the sqlalchemy
model files
'''
mock_init.return_value = None
mock_validate.validate.return_value = True
mock_wrtf.return_value = True
mock_prsr.data.return_value = self.yaml
model_obj = ModelGenerator()
response = model_obj._create_alembic_meta()
self.assertEqual(True, response)
mock_wrtf.assert_called_with(path=path.join(getcwd(), 'metadata/__init__.py'),
data=mock_templt().render())
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_template_folder(self, mock_cpyfile, mock_pth, mock_ospth):
'''
Test _create_template_folder function. Function creates
templates folder structure when modelgen is initialized
'''
mock_ospth.join.side_effects = ['./test', './test', './test', './test']
mock_ospth.exists.return_value = False
mock_pth.mkdir.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_template_folder(init='./testfolder')
self.assertEqual(response, True)
mock_cpyfile.assert_called_with(mock_ospth.join(), mock_ospth.join())
@mock.patch('modelgen.ModelGenerator._create_alembic_folder')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_template_folder_exists(self, mock_cpyfile, mock_ospth, mock_pth, mock_caf):
'''
Test _create_template_folder function when folder already exists
Function throws FileExistsError.
'''
mock_pth.mkdir.return_value = FileExistsError
mock_caf.return_value = True
mock_ospth.join.side_effects = ['./test', './test', './test', './test']
mock_ospth.exists.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
with self.assertRaises(FileExistsError) as err:
model_obj._create_template_folder(init='./models')
@mock.patch('modelgen.modelgenerator.copytree')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_alembic_folder(self, mock_cpyfile, mock_pth, mock_ospth,
mock_cptr):
'''
Test _create_alembic_folder function. Tests the
creation of folders alembic/versions, alembic/alembic.ini,
alembic/env.py. Relative path is passed in this
test
'''
mock_cptr.return_value = True
mock_ospth.join.return_value = './testfolder'
mock_ospth.isabs.return_value = False
mock_ospth.exists.return_value = False
mock_pth.mkdir.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_alembic_folder(init='./testfolder')
self.assertEqual(response, True)
mock_cptr.assert_called_with(mock_ospth.join(), mock_ospth.join())
@mock.patch('modelgen.modelgenerator.copytree')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_alembic_folder_absolute_path(self, mock_cpyfile, mock_pth, mock_ospth,
mock_cptr):
'''
Test _create_alembic_folder function. Tests the
creation of folders alembic/versions, alembic/alembic.ini,
alembic/env.py. Absolute path is passed in this
test.
'''
mock_cptr.return_value = True
mock_ospth.join.return_value = '/testfolder'
mock_ospth.exists.return_value = False
mock_pth.mkdir.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_alembic_folder(init='/testfolder')
self.assertEqual(response, True)
mock_cptr.assert_called_with(mock_ospth.join(), mock_ospth.join())
@mock.patch('modelgen.ModelGenerator._create_template_folder')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.copytree')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_alembic_folder_exists(self, mock_cpyfile, mock_cptr, mock_ospth, mock_ctf):
'''
Test _create_alembic_folder function when folder
already exists. The function raises FileExistsError
'''
mock_ctf.return_value = True
mock_cptr.return_value = True
mock_ospth.join.side_effects = ['./test', './test', './test', './test']
mock_ospth.exists.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
with self.assertRaises(FileExistsError) as err:
model_obj._create_alembic_folder(init='./docs')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_alembic_folder')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_template_folder')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_checkpoint_file')
def test_modelgenerator_init(self, mock_cafldr, mock_ctfldr, mock_cchk):
obj = ModelGenerator(init='./test')
mock_cafldr.assert_called_with(init='./test')
mock_cchk.assert_called_with(init='./test')
mock_ctfldr.assert_called_with(init='./test')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_init_create_model_elif_w_yaml_extn(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test modelgen/modelgenerator.py file's __init__ method
when schema yaml file with extension .yaml is passed
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
obj = ModelGenerator(createmodel=True, file='./test.yaml')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_init_create_model_elif_w_yml_extn(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test modelgen/modelgenerator.py file's __init__ method
when schema yaml file with extension .yml is passed
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
obj = ModelGenerator(createmodel=True, file='./test.yml')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_init_create_model_elif_wo_yaml_extn(self, mock_fcf, mock_cm, mock_ospth):
'''
Test modelgen/modelgenerator.py file's __init__ method
when schema file without .yaml or .yml is passed. The
function will throw NameError
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
with self.assertRaises(NameError) as err:
obj = ModelGenerator(createmodel=True, file='./test.txt')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_createmodel_find_checkpoint_file_true(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test _find_checkpoint_file_ when the checkpoint file,
.modelgen, exists.
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
obj = ModelGenerator(createmodel=True, file='./test.yaml')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_createmodel_find_checkpoint_file_false(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test _find_checkpoint_file_ when the checkpoint file,
.modelgen, doesn't exists.
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf.return_value = False
obj = ModelGenerator(createmodel=True, file='./test.yaml')
mock_fcf.assert_called_with()
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
def test_create_checkpoint_file(self, mock_wrtf):
'''
Test _create_checkpoint_file. The checkpoint file
is created when the modelgen is initialized for the
first time
'''
mock_wrtf.return_value = True
obj = ModelGenerator()
obj._create_checkpoint_file(init='./dummy')
mock_wrtf.assert_called_with(path='./dummy/.modelgen', data='')
@mock.patch('modelgen.modelgenerator.path')
def test_find_checkpoint_file_exists(self, mock_ospth):
mock_ospth.exists.return_value = True
obj = ModelGenerator()
response = obj._find_checkpoint_file()
self.assertEqual(response, True)
mock_ospth.exists.assert_called_with(mock_ospth.join())
@mock.patch('modelgen.modelgenerator.path')
def test_find_checkpoint_file_not_found(self, mock_ospth):
mock_ospth.exists.return_value = False
obj = ModelGenerator()
with self.assertRaises(FileNotFoundError) as err:
obj._find_checkpoint_file()
@classmethod
def tearDownClass(self):
pass | [
"modelgen.ModelGenerator",
"os.getcwd",
"unittest.mock.patch",
"modelgen.Base"
] | [((688, 734), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Validate"""'], {}), "('modelgen.modelgenerator.Validate')\n", (698, 734), False, 'from unittest import TestCase, mock\n'), ((740, 786), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.ModelGenerator.__init__"""'], {}), "('modelgen.ModelGenerator.__init__')\n", (750, 786), False, 'from unittest import TestCase, mock\n'), ((792, 850), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Helper.write_to_file"""'], {}), "('modelgen.modelgenerator.Helper.write_to_file')\n", (802, 850), False, 'from unittest import TestCase, mock\n'), ((856, 898), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Path"""'], {}), "('modelgen.modelgenerator.Path')\n", (866, 898), False, 'from unittest import TestCase, mock\n'), ((904, 948), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Parser"""'], {}), "('modelgen.modelgenerator.Parser')\n", (914, 948), False, 'from unittest import TestCase, mock\n'), ((954, 1000), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Template"""'], {}), "('modelgen.modelgenerator.Template')\n", (964, 1000), False, 'from unittest import TestCase, mock\n'), ((1803, 1876), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_alembic_meta"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_alembic_meta')\n", (1813, 1876), False, 'from unittest import TestCase, mock\n'), ((1882, 1928), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Validate"""'], {}), "('modelgen.modelgenerator.Validate')\n", (1892, 1928), False, 'from unittest import TestCase, mock\n'), ((1934, 1980), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.ModelGenerator.__init__"""'], {}), "('modelgen.ModelGenerator.__init__')\n", (1944, 1980), False, 'from unittest import TestCase, mock\n'), ((1986, 2044), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Helper.write_to_file"""'], {}), "('modelgen.modelgenerator.Helper.write_to_file')\n", (1996, 2044), False, 'from unittest import TestCase, mock\n'), ((2050, 2092), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Path"""'], {}), "('modelgen.modelgenerator.Path')\n", (2060, 2092), False, 'from unittest import TestCase, mock\n'), ((2098, 2142), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Parser"""'], {}), "('modelgen.modelgenerator.Parser')\n", (2108, 2142), False, 'from unittest import TestCase, mock\n'), ((2148, 2194), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Template"""'], {}), "('modelgen.modelgenerator.Template')\n", (2158, 2194), False, 'from unittest import TestCase, mock\n'), ((3072, 3118), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Validate"""'], {}), "('modelgen.modelgenerator.Validate')\n", (3082, 3118), False, 'from unittest import TestCase, mock\n'), ((3124, 3170), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.ModelGenerator.__init__"""'], {}), "('modelgen.ModelGenerator.__init__')\n", (3134, 3170), False, 'from unittest import TestCase, mock\n'), ((3176, 3234), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Helper.write_to_file"""'], {}), "('modelgen.modelgenerator.Helper.write_to_file')\n", (3186, 3234), False, 'from unittest import TestCase, mock\n'), ((3240, 3282), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Path"""'], {}), "('modelgen.modelgenerator.Path')\n", (3250, 3282), False, 'from unittest import TestCase, mock\n'), ((3288, 3332), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Parser"""'], {}), "('modelgen.modelgenerator.Parser')\n", (3298, 3332), False, 'from unittest import TestCase, mock\n'), ((3338, 3384), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Template"""'], {}), "('modelgen.modelgenerator.Template')\n", (3348, 3384), False, 'from unittest import TestCase, mock\n'), ((4287, 4329), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (4297, 4329), False, 'from unittest import TestCase, mock\n'), ((4335, 4377), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Path"""'], {}), "('modelgen.modelgenerator.Path')\n", (4345, 4377), False, 'from unittest import TestCase, mock\n'), ((4383, 4429), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copyfile"""'], {}), "('modelgen.modelgenerator.copyfile')\n", (4393, 4429), False, 'from unittest import TestCase, mock\n'), ((5108, 5168), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.ModelGenerator._create_alembic_folder"""'], {}), "('modelgen.ModelGenerator._create_alembic_folder')\n", (5118, 5168), False, 'from unittest import TestCase, mock\n'), ((5174, 5216), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Path"""'], {}), "('modelgen.modelgenerator.Path')\n", (5184, 5216), False, 'from unittest import TestCase, mock\n'), ((5222, 5264), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (5232, 5264), False, 'from unittest import TestCase, mock\n'), ((5270, 5316), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copyfile"""'], {}), "('modelgen.modelgenerator.copyfile')\n", (5280, 5316), False, 'from unittest import TestCase, mock\n'), ((5972, 6018), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copytree"""'], {}), "('modelgen.modelgenerator.copytree')\n", (5982, 6018), False, 'from unittest import TestCase, mock\n'), ((6024, 6066), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (6034, 6066), False, 'from unittest import TestCase, mock\n'), ((6072, 6114), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Path"""'], {}), "('modelgen.modelgenerator.Path')\n", (6082, 6114), False, 'from unittest import TestCase, mock\n'), ((6120, 6166), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copyfile"""'], {}), "('modelgen.modelgenerator.copyfile')\n", (6130, 6166), False, 'from unittest import TestCase, mock\n'), ((7010, 7056), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copytree"""'], {}), "('modelgen.modelgenerator.copytree')\n", (7020, 7056), False, 'from unittest import TestCase, mock\n'), ((7062, 7104), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (7072, 7104), False, 'from unittest import TestCase, mock\n'), ((7110, 7152), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Path"""'], {}), "('modelgen.modelgenerator.Path')\n", (7120, 7152), False, 'from unittest import TestCase, mock\n'), ((7158, 7204), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copyfile"""'], {}), "('modelgen.modelgenerator.copyfile')\n", (7168, 7204), False, 'from unittest import TestCase, mock\n'), ((8015, 8076), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.ModelGenerator._create_template_folder"""'], {}), "('modelgen.ModelGenerator._create_template_folder')\n", (8025, 8076), False, 'from unittest import TestCase, mock\n'), ((8082, 8124), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (8092, 8124), False, 'from unittest import TestCase, mock\n'), ((8130, 8176), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copytree"""'], {}), "('modelgen.modelgenerator.copytree')\n", (8140, 8176), False, 'from unittest import TestCase, mock\n'), ((8182, 8228), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.copyfile"""'], {}), "('modelgen.modelgenerator.copyfile')\n", (8192, 8228), False, 'from unittest import TestCase, mock\n'), ((8867, 8942), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_alembic_folder"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_alembic_folder')\n", (8877, 8942), False, 'from unittest import TestCase, mock\n'), ((8948, 9024), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_template_folder"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_template_folder')\n", (8958, 9024), False, 'from unittest import TestCase, mock\n'), ((9030, 9106), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_checkpoint_file"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_checkpoint_file')\n", (9040, 9106), False, 'from unittest import TestCase, mock\n'), ((9394, 9436), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (9404, 9436), False, 'from unittest import TestCase, mock\n'), ((9442, 9508), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_model"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_model')\n", (9452, 9508), False, 'from unittest import TestCase, mock\n'), ((9514, 9588), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._find_checkpoint_file"""'], {}), "('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')\n", (9524, 9588), False, 'from unittest import TestCase, mock\n'), ((10076, 10118), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (10086, 10118), False, 'from unittest import TestCase, mock\n'), ((10124, 10190), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_model"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_model')\n", (10134, 10190), False, 'from unittest import TestCase, mock\n'), ((10196, 10270), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._find_checkpoint_file"""'], {}), "('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')\n", (10206, 10270), False, 'from unittest import TestCase, mock\n'), ((10755, 10797), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (10765, 10797), False, 'from unittest import TestCase, mock\n'), ((10803, 10869), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_model"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_model')\n", (10813, 10869), False, 'from unittest import TestCase, mock\n'), ((10875, 10949), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._find_checkpoint_file"""'], {}), "('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')\n", (10885, 10949), False, 'from unittest import TestCase, mock\n'), ((11473, 11515), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (11483, 11515), False, 'from unittest import TestCase, mock\n'), ((11521, 11587), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_model"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_model')\n", (11531, 11587), False, 'from unittest import TestCase, mock\n'), ((11593, 11667), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._find_checkpoint_file"""'], {}), "('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')\n", (11603, 11667), False, 'from unittest import TestCase, mock\n'), ((12122, 12164), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (12132, 12164), False, 'from unittest import TestCase, mock\n'), ((12170, 12236), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._create_model"""'], {}), "('modelgen.modelgenerator.ModelGenerator._create_model')\n", (12180, 12236), False, 'from unittest import TestCase, mock\n'), ((12242, 12316), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.ModelGenerator._find_checkpoint_file"""'], {}), "('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')\n", (12252, 12316), False, 'from unittest import TestCase, mock\n'), ((12832, 12890), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.Helper.write_to_file"""'], {}), "('modelgen.modelgenerator.Helper.write_to_file')\n", (12842, 12890), False, 'from unittest import TestCase, mock\n'), ((13305, 13347), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (13315, 13347), False, 'from unittest import TestCase, mock\n'), ((13643, 13685), 'unittest.mock.patch', 'mock.patch', (['"""modelgen.modelgenerator.path"""'], {}), "('modelgen.modelgenerator.path')\n", (13653, 13685), False, 'from unittest import TestCase, mock\n'), ((1446, 1462), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (1460, 1462), False, 'from modelgen import ModelGenerator, Base\n'), ((2684, 2700), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (2698, 2700), False, 'from modelgen import ModelGenerator, Base\n'), ((4011, 4027), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (4025, 4027), False, 'from modelgen import ModelGenerator, Base\n'), ((4892, 4908), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (4906, 4908), False, 'from modelgen import ModelGenerator, Base\n'), ((5830, 5846), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (5844, 5846), False, 'from modelgen import ModelGenerator, Base\n'), ((6798, 6814), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (6812, 6814), False, 'from modelgen import ModelGenerator, Base\n'), ((7804, 7820), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (7818, 7820), False, 'from modelgen import ModelGenerator, Base\n'), ((8728, 8744), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (8742, 8744), False, 'from modelgen import ModelGenerator, Base\n'), ((9198, 9227), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {'init': '"""./test"""'}), "(init='./test')\n", (9212, 9227), False, 'from modelgen import ModelGenerator, Base\n'), ((10017, 10069), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {'createmodel': '(True)', 'file': '"""./test.yaml"""'}), "(createmodel=True, file='./test.yaml')\n", (10031, 10069), False, 'from modelgen import ModelGenerator, Base\n'), ((10697, 10748), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {'createmodel': '(True)', 'file': '"""./test.yml"""'}), "(createmodel=True, file='./test.yml')\n", (10711, 10748), False, 'from modelgen import ModelGenerator, Base\n'), ((12063, 12115), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {'createmodel': '(True)', 'file': '"""./test.yaml"""'}), "(createmodel=True, file='./test.yaml')\n", (12077, 12115), False, 'from modelgen import ModelGenerator, Base\n'), ((12735, 12787), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {'createmodel': '(True)', 'file': '"""./test.yaml"""'}), "(createmodel=True, file='./test.yaml')\n", (12749, 12787), False, 'from modelgen import ModelGenerator, Base\n'), ((13158, 13174), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (13172, 13174), False, 'from modelgen import ModelGenerator, Base\n'), ((13468, 13484), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (13482, 13484), False, 'from modelgen import ModelGenerator, Base\n'), ((13810, 13826), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {}), '()\n', (13824, 13826), False, 'from modelgen import ModelGenerator, Base\n'), ((668, 674), 'modelgen.Base', 'Base', ([], {}), '()\n', (672, 674), False, 'from modelgen import ModelGenerator, Base\n'), ((11407, 11458), 'modelgen.ModelGenerator', 'ModelGenerator', ([], {'createmodel': '(True)', 'file': '"""./test.txt"""'}), "(createmodel=True, file='./test.txt')\n", (11421, 11458), False, 'from modelgen import ModelGenerator, Base\n'), ((1614, 1622), 'os.getcwd', 'getcwd', ([], {}), '()\n', (1620, 1622), False, 'from os import getcwd, path\n'), ((1701, 1709), 'os.getcwd', 'getcwd', ([], {}), '()\n', (1707, 1709), False, 'from os import getcwd, path\n'), ((2879, 2887), 'os.getcwd', 'getcwd', ([], {}), '()\n', (2885, 2887), False, 'from os import getcwd, path\n'), ((2968, 2976), 'os.getcwd', 'getcwd', ([], {}), '()\n', (2974, 2976), False, 'from os import getcwd, path\n'), ((4175, 4183), 'os.getcwd', 'getcwd', ([], {}), '()\n', (4181, 4183), False, 'from os import getcwd, path\n')] |
from heisen.config import settings
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': []}
for instance_number in range(settings.INSTANCE_COUNT):
servers['self'].append((
'localhost', settings.RPC_PORT + instance_number, username, passowrd
))
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
| [
"jsonrpclib.request.ConnectionPool"
] | [((517, 569), 'jsonrpclib.request.ConnectionPool', 'ConnectionPool', (['servers', '"""heisen"""', 'settings.APP_NAME'], {}), "(servers, 'heisen', settings.APP_NAME)\n", (531, 569), False, 'from jsonrpclib.request import ConnectionPool\n')] |
import logging
import discord
import discord.ext.commands as commands
_LOG = logging.getLogger('discord-util').getChild("uwu")
class Uwu(commands.Cog):
@commands.Cog.listener()
async def on_message(self, message: discord.Message):
if message.content.lower().startswith('hello bot') or message.content.lower().startswith('hewwo bot'):
await message.channel.send('Hewwo uwu')
return
if message.content.lower().startswith('good bot'):
await message.add_reaction("\N{FLUSHED FACE}")
| [
"discord.ext.commands.Cog.listener",
"logging.getLogger"
] | [((160, 183), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (181, 183), True, 'import discord.ext.commands as commands\n'), ((79, 112), 'logging.getLogger', 'logging.getLogger', (['"""discord-util"""'], {}), "('discord-util')\n", (96, 112), False, 'import logging\n')] |
import math
import os
from curses_tools import draw_frame, get_frame_size
def _limit(value, min_value, max_value):
"""Limit value by min_value and max_value."""
if value < min_value:
return min_value
if value > max_value:
return max_value
return value
def _apply_acceleration(speed, speed_limit, forward=True):
"""Change speed — accelerate or brake — according to force direction."""
speed_limit = abs(speed_limit)
speed_fraction = speed / speed_limit
# если корабль стоит на месте, дергаем резко
# если корабль уже летит быстро, прибавляем медленно
delta = math.cos(speed_fraction) * 0.75
if forward:
result_speed = speed + delta
else:
result_speed = speed - delta
result_speed = _limit(result_speed, -speed_limit, speed_limit)
# если скорость близка к нулю, то останавливаем корабль
if abs(result_speed) < 0.1:
result_speed = 0
return result_speed
class Rocket:
def __init__(self, canvas, init_x, init_y, delay):
self.canvas = canvas
self.x = init_x
self.y = init_y
frames = []
for n in 1, 2:
with open(os.path.join('rocket', f'rocket_frame_{n}.txt')) as f:
frames.append(f.read())
self.frames = []
for frame in frames:
for i in range(delay):
self.frames.append(frame)
self.current_frame = 0
self.height, self.width = get_frame_size(self.frames[0])
self.row_speed = 0
self.column_speed = 0
def update_speed(self, rows_direction, columns_direction, row_speed_limit=2, column_speed_limit=2, fading=0.9):
"""Update speed smootly to make control handy for player. Return new speed value (row_speed, column_speed)
rows_direction — is a force direction by rows axis. Possible values:
-1 — if force pulls up
0 — if force has no effect
1 — if force pulls down
columns_direction — is a force direction by colums axis. Possible values:
-1 — if force pulls left
0 — if force has no effect
1 — if force pulls right
"""
if rows_direction not in (-1, 0, 1):
raise ValueError(f'Wrong rows_direction value {rows_direction}. Expects -1, 0 or 1.')
if columns_direction not in (-1, 0, 1):
raise ValueError(f'Wrong columns_direction value {columns_direction}. Expects -1, 0 or 1.')
if fading < 0 or fading > 1:
raise ValueError(f'Wrong fading value {fading}. Expects float between 0 and 1.')
# гасим скорость, чтобы корабль останавливался со временем
self.row_speed *= fading
self.column_speed *= fading
row_speed_limit, column_speed_limit = abs(row_speed_limit), abs(column_speed_limit)
if rows_direction != 0:
self.row_speed = _apply_acceleration(self.row_speed, row_speed_limit, rows_direction > 0)
if columns_direction != 0:
self.column_speed = _apply_acceleration(self.column_speed, column_speed_limit, columns_direction > 0)
def update(self):
h, w = self.canvas.getmaxyx()
draw_frame(
self.canvas,
self.y,
self.x,
self.frames[int(self.current_frame)],
negative=True
)
self.x += self.column_speed
if not 0 < self.x < w - self.width:
self.x -= self.column_speed
self.y += self.row_speed
if not 0 < self.y < h - self.height:
self.y -= self.row_speed
self.current_frame = (self.current_frame + 0.5) % len(self.frames)
draw_frame(
self.canvas,
self.y,
self.x,
self.frames[int(self.current_frame)]
)
def destroy(self):
draw_frame(
self.canvas,
self.y,
self.x,
self.frames[int(self.current_frame)],
negative=True
)
| [
"math.cos",
"curses_tools.get_frame_size",
"os.path.join"
] | [((623, 647), 'math.cos', 'math.cos', (['speed_fraction'], {}), '(speed_fraction)\n', (631, 647), False, 'import math\n'), ((1472, 1502), 'curses_tools.get_frame_size', 'get_frame_size', (['self.frames[0]'], {}), '(self.frames[0])\n', (1486, 1502), False, 'from curses_tools import draw_frame, get_frame_size\n'), ((1181, 1228), 'os.path.join', 'os.path.join', (['"""rocket"""', 'f"""rocket_frame_{n}.txt"""'], {}), "('rocket', f'rocket_frame_{n}.txt')\n", (1193, 1228), False, 'import os\n')] |
import secrets
import asyncio
from datetime import datetime, timedelta
import discord
from discord.ext import commands
from database import DatabasePersonality, DatabaseDeck
class Roll(commands.Cog):
def __init__(self, bot):
"""Initial the cog with the bot."""
self.bot = bot
#### Commands ####
@commands.command(description='Roll a random idom and get the possibility to claim it.')
async def roll(self, ctx):
minutes = min_until_next_roll(ctx.guild.id, ctx.author.id)
if minutes != 0:
await ctx.send(f'You cannot roll right now. '
f'Next rolls reset **<t:{int((datetime.now().replace(minute=0) + timedelta(hours=1)).timestamp())}:R>**.')
return
perso = None
id_perso = None
msg_embed = ''
while not perso:
id_perso = DatabasePersonality.get().get_random_perso_id()
perso = DatabasePersonality.get().get_perso_information(id_perso)
# Update roll information in database
DatabaseDeck.get().update_last_roll(ctx.guild.id, ctx.author.id)
user_nb_rolls = DatabaseDeck.get().get_nb_rolls(ctx.guild.id, ctx.author.id)
DatabaseDeck.get().set_nb_rolls(ctx.guild.id, ctx.author.id, user_nb_rolls + 1)
max_rolls = DatabaseDeck.get().get_rolls_per_hour(ctx.guild.id)
if max_rolls - user_nb_rolls - 1 == 2:
msg_embed += f'{ctx.author.name if ctx.author.nick is None else ctx.author.nick}, 2 uses left.\n'
# Get badges information
badges_with_perso = DatabaseDeck.get().get_badges_with(ctx.guild.id, id_perso)
if badges_with_perso:
msg_embed += f'**Required for {",".join([badge["name"] for badge in badges_with_perso])}' \
f' badge{"" if len(badges_with_perso) == 1 else "s"}!**\n'
current_image = DatabaseDeck.get().get_perso_current_image(ctx.guild.id, id_perso)
embed = discord.Embed(title=perso['name'], description=perso['group'], colour=secrets.randbelow(0xffffff))
if current_image:
embed.set_image(url=current_image)
id_owner = DatabaseDeck.get().perso_belongs_to(ctx.guild.id, id_perso)
if id_owner:
owner = ctx.guild.get_member(id_owner)
# Could be None if the user left the server
if owner:
text = f'Belongs to {owner.name if not owner.nick else owner.nick}'
if owner.avatar:
embed.set_footer(icon_url=owner.avatar.url, text=text)
else:
embed.set_footer(text=text)
# Mention users if they wish for this personality
id_members = DatabaseDeck.get().get_wished_by(ctx.guild.id, id_perso)
wish_msg = ''
for id_member in id_members:
member = ctx.guild.get_member(id_member)
# Could be None if the user left the server
if member:
wish_msg += f'{member.mention} '
if wish_msg:
msg_embed += f'Wished by {wish_msg}'
class ClaimButton(discord.ui.View):
def __init__(self, timeout: int):
super().__init__(timeout=timeout)
self.is_claimed = False
self.user_claim = None
@discord.ui.button(label="Claim", emoji='💕', style=discord.ButtonStyle.green)
async def claim(self, button: discord.ui.Button, interaction: discord.Interaction):
self.user_claim = interaction.user
self.is_claimed = True
self.disable()
async def interaction_check(self, interaction: discord.Interaction) -> bool:
time_until_claim = min_until_next_claim(interaction.guild.id, interaction.user.id)
if time_until_claim != 0:
cant_claiming_username = interaction.user.name if interaction.user.nick is None else interaction.user.nick
await interaction.response.send_message(f'{cant_claiming_username}, you can\'t claim right now. '
f'Ready **<t:{int((datetime.now() + timedelta(minutes=time_until_claim)).timestamp())}:R>**.')
return False
return True
def disable(self):
for child in self.children:
child.disabled = True
self.stop()
claim_timeout = DatabaseDeck.get().get_server_configuration(ctx.guild.id)["time_to_claim"]
claim_button_view = ClaimButton(timeout=claim_timeout)
# Cannot claim if perso already claim
if id_owner:
await ctx.send(msg_embed, embed=embed)
return
msg = await ctx.send(msg_embed, embed=embed, view=claim_button_view)
await claim_button_view.wait()
# Timeout
if not claim_button_view.is_claimed:
claim_button_view.disable()
await msg.edit(view=claim_button_view)
else:
user = claim_button_view.user_claim
username = user.name if user.nick is None else user.nick
DatabaseDeck.get().add_to_deck(ctx.guild.id, perso['id'], user.id)
await ctx.send(f'{username} claims {perso["name"]}!')
if user.avatar:
embed.set_footer(icon_url=user.avatar.url, text=f'Belongs to {username}')
else:
embed.set_footer(text=f'Belongs to {username}')
await msg.edit(embed=embed, view=claim_button_view)
if badges_with_perso:
ids_deck = DatabaseDeck.get().get_user_deck(ctx.guild.id, user.id)
msg_badges_progression = ''
for badge in badges_with_perso:
perso_in_badge = DatabaseDeck.get().get_perso_in_badge(badge['id'])
count = sum([id_perso in ids_deck for id_perso in perso_in_badge])
nb_perso = len(perso_in_badge)
if perso['id'] in perso_in_badge and count == nb_perso:
await ctx.send(f'**{user.mention}, you have just unlocked {badge["name"]} badge!**')
msg_badges_progression += f'{badge["name"]} {count}/{nb_perso}\n'
badge_embed = discord.Embed(title=f'Badges progression with {perso["name"]}',
description=msg_badges_progression)
await ctx.send(embed=badge_embed)
#### Utilities functions ####
def min_until_next_claim(id_server, id_user):
"""Return minutes until next claim (0 if the user can claim now)."""
last_claim = DatabaseDeck.get().get_last_claim(id_server, id_user)
time_until_claim = 0
if last_claim:
claim_interval = DatabaseDeck.get().get_server_configuration(id_server)['claim_interval']
date_last_claim = datetime.strptime(last_claim, '%Y-%m-%d %H:%M:%S')
minute_since_last_claim = int(divmod((datetime.now() - date_last_claim).total_seconds(), 60)[0])
if minute_since_last_claim < claim_interval:
time_until_claim = claim_interval - minute_since_last_claim
return time_until_claim
def min_until_next_roll(id_server, id_user):
"""Return minutes until next roll (0 if the user can roll now)."""
last_roll = DatabaseDeck.get().get_last_roll(id_server, id_user)
if not last_roll:
return 0
last_roll = datetime.strptime(last_roll, '%Y-%m-%d %H:%M:%S')
now = datetime.now()
# If a new hour began
if now.date() != last_roll.date() or (now.date() == last_roll.date() and now.hour != last_roll.hour):
DatabaseDeck.get().set_nb_rolls(id_server, id_user, 0)
return 0
max_rolls = DatabaseDeck.get().get_rolls_per_hour(id_server)
user_nb_rolls = DatabaseDeck.get().get_nb_rolls(id_server, id_user)
if user_nb_rolls < max_rolls:
return 0
else:
return 60 - now.minute
| [
"database.DatabaseDeck.get",
"database.DatabasePersonality.get",
"secrets.randbelow",
"datetime.datetime.strptime",
"discord.ui.button",
"datetime.datetime.now",
"datetime.timedelta",
"discord.Embed",
"discord.ext.commands.command"
] | [((330, 422), 'discord.ext.commands.command', 'commands.command', ([], {'description': '"""Roll a random idom and get the possibility to claim it."""'}), "(description=\n 'Roll a random idom and get the possibility to claim it.')\n", (346, 422), False, 'from discord.ext import commands\n'), ((7444, 7493), 'datetime.datetime.strptime', 'datetime.strptime', (['last_roll', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(last_roll, '%Y-%m-%d %H:%M:%S')\n", (7461, 7493), False, 'from datetime import datetime, timedelta\n'), ((7504, 7518), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7516, 7518), False, 'from datetime import datetime, timedelta\n'), ((6889, 6939), 'datetime.datetime.strptime', 'datetime.strptime', (['last_claim', '"""%Y-%m-%d %H:%M:%S"""'], {}), "(last_claim, '%Y-%m-%d %H:%M:%S')\n", (6906, 6939), False, 'from datetime import datetime, timedelta\n'), ((3317, 3393), 'discord.ui.button', 'discord.ui.button', ([], {'label': '"""Claim"""', 'emoji': '"""💕"""', 'style': 'discord.ButtonStyle.green'}), "(label='Claim', emoji='💕', style=discord.ButtonStyle.green)\n", (3334, 3393), False, 'import discord\n'), ((6665, 6683), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (6681, 6683), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((7334, 7352), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (7350, 7352), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((7749, 7767), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (7765, 7767), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((7818, 7836), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (7834, 7836), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((1052, 1070), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (1068, 1070), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((1141, 1159), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (1157, 1159), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((1210, 1228), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (1226, 1228), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((1311, 1329), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (1327, 1329), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((1582, 1600), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (1598, 1600), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((1884, 1902), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (1900, 1902), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((2038, 2065), 'secrets.randbelow', 'secrets.randbelow', (['(16777215)'], {}), '(16777215)\n', (2055, 2065), False, 'import secrets\n'), ((2161, 2179), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (2177, 2179), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((2714, 2732), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (2730, 2732), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((6302, 6406), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Badges progression with {perso[\'name\']}"""', 'description': 'msg_badges_progression'}), '(title=f"Badges progression with {perso[\'name\']}", description\n =msg_badges_progression)\n', (6315, 6406), False, 'import discord\n'), ((7660, 7678), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (7676, 7678), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((871, 896), 'database.DatabasePersonality.get', 'DatabasePersonality.get', ([], {}), '()\n', (894, 896), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((939, 964), 'database.DatabasePersonality.get', 'DatabasePersonality.get', ([], {}), '()\n', (962, 964), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((4475, 4493), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (4491, 4493), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((5167, 5185), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (5183, 5185), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((6790, 6808), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (6806, 6808), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((5627, 5645), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (5643, 5645), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((5812, 5830), 'database.DatabaseDeck.get', 'DatabaseDeck.get', ([], {}), '()\n', (5828, 5830), False, 'from database import DatabasePersonality, DatabaseDeck\n'), ((6986, 7000), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6998, 7000), False, 'from datetime import datetime, timedelta\n'), ((691, 709), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (700, 709), False, 'from datetime import datetime, timedelta\n'), ((656, 670), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (668, 670), False, 'from datetime import datetime, timedelta\n'), ((4166, 4180), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4178, 4180), False, 'from datetime import datetime, timedelta\n'), ((4183, 4218), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'time_until_claim'}), '(minutes=time_until_claim)\n', (4192, 4218), False, 'from datetime import datetime, timedelta\n')] |
from __future__ import print_function
import time
from sr.robot import *
SEARCHING = "SEARCHING"
DRIVING = "DRIVING"
R = Robot()
def drive(speed, seconds):
R.motors[0].m0.power = speed
R.motors[0].m1.power = speed
time.sleep(seconds)
R.motors[0].m0.power = 0
R.motors[0].m1.power = 0
def turn(speed, seconds):
R.motors[0].m0.power = speed
R.motors[0].m1.power = -speed
time.sleep(seconds)
R.motors[0].m0.power = 0
R.motors[0].m1.power = 0
state = SEARCHING
def get_gold_tokens():
gold_tokens = []
for token in R.see():
if token.info.marker_type is MARKER_TOKEN_GOLD:
gold_tokens.append(token)
# Sort list with the closest token first
gold_tokens.sort(key=lambda m: m.dist)
return gold_tokens
while True:
if state == SEARCHING:
print("Searching for gold tokens...")
tokens = get_gold_tokens()
print(tokens)
if len(tokens) > 0:
m = tokens[0]
# TODO: Pick the closest token, not just any token.
print("Token sighted. {0} is {1}m away, bearing {2} degrees." \
.format(m.info.offset, m.dist, m.rot_y))
state = DRIVING
else:
print("Can't see anything.")
turn(25, 0.3)
time.sleep(0.2)
elif state == DRIVING:
print("Aligning...")
tokens = get_gold_tokens()
if len(tokens) == 0:
state = SEARCHING
else:
m = tokens[0]
if m.dist < 0.4:
print("Found it!")
if R.grab():
print("Gotcha!")
turn(50, 0.5)
drive(50, 1)
R.release()
drive(-50, 0.5)
else:
print("Aww, I'm not close enough.")
exit()
elif -15 <= m.rot_y <= 15:
print("Ah, that'll do.")
drive(50, 0.5)
elif m.rot_y < -15:
print("Left a bit...")
turn(-12.5, 0.5)
elif m.rot_y > 15:
print("Right a bit...")
turn(12.5, 0.5)
| [
"time.sleep"
] | [((231, 250), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (241, 250), False, 'import time\n'), ((407, 426), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (417, 426), False, 'import time\n'), ((1300, 1315), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (1310, 1315), False, 'import time\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ======================================================================
# Copyright 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================================================
import pytest
from supvisors.mainloop import *
from supvisors.ttypes import AddressStates
from supvisors.utils import DeferredRequestHeaders
from threading import Thread
from unittest.mock import call, patch, Mock, DEFAULT
from .base import DummyRpcInterface
@pytest.fixture
def mocked_rpc():
""" Fixture for the instance to test. """
rpc_patch = patch('supvisors.mainloop.getRPCInterface')
mocked_rpc = rpc_patch.start()
yield mocked_rpc
rpc_patch.stop()
@pytest.fixture
def main_loop(supvisors):
return SupvisorsMainLoop(supvisors)
def test_creation(supvisors, mocked_rpc, main_loop):
""" Test the values set at construction. """
assert isinstance(main_loop, Thread)
assert main_loop.supvisors is supvisors
assert not main_loop.stop_event.is_set()
assert main_loop.env == {'SUPERVISOR_SERVER_URL': 'http://127.0.0.1:65000',
'SUPERVISOR_USERNAME': '',
'SUPERVISOR_PASSWORD': ''}
assert mocked_rpc.call_args_list == [call('localhost', main_loop.env)]
def test_stopping(mocked_rpc, main_loop):
""" Test the get_loop method. """
assert not main_loop.stopping()
main_loop.stop_event.set()
assert main_loop.stopping()
def test_stop(mocker, mocked_rpc, main_loop):
""" Test the stopping of the main loop thread. """
mocked_join = mocker.patch.object(main_loop, 'join')
# try to stop main loop before it is started
main_loop.stop()
assert not main_loop.stop_event.is_set()
assert not mocked_join.called
# stop main loop when alive
mocker.patch.object(main_loop, 'is_alive', return_value=True)
main_loop.stop()
assert main_loop.stop_event.is_set()
assert mocked_join.call_count == 1
def test_run(mocker, main_loop):
""" Test the running of the main loop thread. """
mocked_evt = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.check_events')
mocked_req = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.check_requests')
mocked_poll = mocker.patch('supvisors.supvisorszmq.SupvisorsZmq.poll')
# patch one loops
mocker.patch.object(main_loop, 'stopping', side_effect=[False, False, True])
main_loop.run()
# test that poll was called once
assert mocked_poll.call_args_list == [call()]
# test that check_requests was called once
assert mocked_evt.call_count == 1
# test that check_events was called once
assert mocked_req.call_count == 1
def test_check_events(mocker, main_loop):
""" Test the processing of the events received. """
mocked_send = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.send_remote_comm_event')
# prepare context
mocked_sockets = Mock(**{'check_subscriber.return_value': None})
# test with empty socks
main_loop.check_events(mocked_sockets, 'poll result')
assert mocked_sockets.check_subscriber.call_args_list == [call('poll result')]
assert not mocked_send.called
# reset mocks
mocked_sockets.check_subscriber.reset_mock()
# test with appropriate socks but with exception
mocked_sockets.check_subscriber.return_value = 'a message'
main_loop.check_events(mocked_sockets, 'poll result')
assert mocked_sockets.check_subscriber.call_args_list == [call('poll result')]
assert mocked_send.call_args_list == [call('event', '"a message"')]
def test_check_requests(mocker, main_loop):
""" Test the processing of the requests received. """
mocked_send = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.send_request')
# prepare context
mocked_sockets = Mock(**{'check_puller.return_value': None})
# test with empty socks
main_loop.check_requests(mocked_sockets, 'poll result')
assert mocked_sockets.check_puller.call_args_list == [call('poll result')]
assert not mocked_sockets.disconnect_subscriber.called
assert not mocked_send.called
# reset mocks
mocked_sockets.check_puller.reset_mock()
# test with appropriate socks but with exception
mocked_sockets.check_puller.return_value = DeferredRequestHeaders.ISOLATE_NODES, 'a message'
main_loop.check_requests(mocked_sockets, 'poll result')
assert mocked_sockets.check_puller.call_args_list == [call('poll result')]
assert mocked_sockets.disconnect_subscriber.call_args_list == [call('a message')]
assert not mocked_send.called
# reset mocks
mocked_sockets.check_puller.reset_mock()
mocked_sockets.disconnect_subscriber.reset_mock()
# test with appropriate socks but with exception
mocked_sockets.check_puller.return_value = 'event', 'a message'
main_loop.check_requests(mocked_sockets, 'poll result')
assert mocked_sockets.check_puller.call_args_list == [call('poll result')]
assert not mocked_sockets.disconnect_subscriber.called
assert mocked_send.call_args_list == [call('event', 'a message')]
def test_check_node(mocker, mocked_rpc, main_loop):
""" Test the protocol to get the processes handled by a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
mocked_evt = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.send_remote_comm_event')
# test rpc error: no event is sent to local Supervisor
mocked_rpc.side_effect = ValueError
main_loop.check_node('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_evt.call_count == 0
# test with a mocked rpc interface
dummy_info = [{'name': 'proc', 'group': 'appli', 'state': 10, 'start': 5,
'now': 10, 'pid': 1234, 'spawnerr': ''}]
rpc_intf = DummyRpcInterface()
mocked_all = rpc_intf.supervisor.getAllProcessInfo = Mock()
mocked_local = rpc_intf.supvisors.get_all_local_process_info = Mock(return_value=dummy_info)
mocked_addr = rpc_intf.supvisors.get_address_info = Mock()
rpc_intf.supvisors.get_master_address = Mock(return_value='10.0.0.5')
rpc_intf.supvisors.get_supvisors_state = Mock(return_value={'statename': 'RUNNING'})
mocked_rpc.return_value = rpc_intf
mocked_rpc.side_effect = None
mocked_rpc.reset_mock()
# test with address in isolation
for state in [AddressStates.ISOLATING, AddressStates.ISOLATED]:
mocked_addr.return_value = {'statecode': state}
main_loop.check_node('10.0.0.1')
assert mocked_rpc.call_args_list == [call('10.0.0.1', main_loop.env)]
expected = 'node_name:10.0.0.1 authorized:False master_node_name:10.0.0.5 supvisors_state:RUNNING'
assert mocked_evt.call_args_list == [call('auth', expected)]
assert not mocked_all.called
# reset counters
mocked_evt.reset_mock()
mocked_rpc.reset_mock()
# test with address not in isolation
for state in [AddressStates.UNKNOWN, AddressStates.CHECKING, AddressStates.RUNNING, AddressStates.SILENT]:
mocked_addr.return_value = {'statecode': state}
main_loop.check_node('10.0.0.1')
assert mocked_rpc.call_count == 1
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_evt.call_count == 2
assert mocked_local.call_count == 1
# reset counters
mocked_evt.reset_mock()
mocked_local.reset_mock()
mocked_rpc.reset_mock()
def test_start_process(mocker, mocked_rpc, main_loop):
""" Test the protocol to start a process handled by a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = KeyError
main_loop.start_process('10.0.0.1', 'dummy_process', 'extra args')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supvisors = mocker.patch.object(rpc_intf.supvisors, 'start_args')
main_loop.start_process('10.0.0.1', 'dummy_process', 'extra args')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supvisors.call_count == 1
assert mocked_supvisors.call_args == call('dummy_process', 'extra args', False)
def test_stop_process(mocker, mocked_rpc, main_loop):
""" Test the protocol to stop a process handled by a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = ConnectionResetError
main_loop.stop_process('10.0.0.1', 'dummy_process')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supervisor = mocker.patch.object(rpc_intf.supervisor, 'stopProcess')
main_loop.stop_process('10.0.0.1', 'dummy_process')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supervisor.call_count == 1
assert mocked_supervisor.call_args == call('dummy_process', False)
def test_restart(mocker, mocked_rpc, main_loop):
""" Test the protocol to restart a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = OSError
main_loop.restart('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supervisor = mocker.patch.object(rpc_intf.supervisor, 'restart')
main_loop.restart('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supervisor.call_count == 1
assert mocked_supervisor.call_args == call()
def test_shutdown(mocker, mocked_rpc, main_loop):
""" Test the protocol to shutdown a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = RPCError(12)
main_loop.shutdown('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_shutdown = mocker.patch.object(rpc_intf.supervisor, 'shutdown')
main_loop.shutdown('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_shutdown.call_count == 1
assert mocked_shutdown.call_args == call()
def test_restart_all(mocker, mocked_rpc, main_loop):
""" Test the protocol to restart Supvisors. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = OSError
main_loop.restart_all('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supervisor = mocker.patch.object(rpc_intf.supvisors, 'restart')
main_loop.restart_all('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supervisor.call_count == 1
assert mocked_supervisor.call_args == call()
def test_shutdown_all(mocker, mocked_rpc, main_loop):
""" Test the protocol to shutdown Supvisors. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = RPCError(12)
main_loop.shutdown_all('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_shutdown = mocker.patch.object(rpc_intf.supvisors, 'shutdown')
main_loop.shutdown_all('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_shutdown.call_count == 1
assert mocked_shutdown.call_args == call()
def test_comm_event(mocker, mocked_rpc, main_loop):
""" Test the protocol to send a comm event to the local Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocker.patch.object(main_loop.proxy.supervisor, 'sendRemoteCommEvent', side_effect=RPCError(100))
main_loop.send_remote_comm_event('event type', 'event data')
# test with a mocked rpc interface
mocked_supervisor = mocker.patch.object(main_loop.proxy.supervisor, 'sendRemoteCommEvent')
main_loop.send_remote_comm_event('event type', 'event data')
assert mocked_supervisor.call_args_list == [call('event type', 'event data')]
def check_call(main_loop, mocked_loop, method_name, request, args):
""" Perform a main loop request and check what has been called. """
# send request
main_loop.send_request(request.value, args)
# test mocked main loop
for key, mocked in mocked_loop.items():
if key == method_name:
assert mocked.call_count == 1
assert mocked.call_args == call(*args)
mocked.reset_mock()
else:
assert not mocked.called
def test_send_request(mocker, main_loop):
""" Test the execution of a deferred Supervisor request. """
# patch main loop subscriber
mocked_loop = mocker.patch.multiple(main_loop, check_node=DEFAULT,
start_process=DEFAULT, stop_process=DEFAULT,
restart=DEFAULT, shutdown=DEFAULT,
restart_all=DEFAULT, shutdown_all=DEFAULT)
# test check address
check_call(main_loop, mocked_loop, 'check_node',
DeferredRequestHeaders.CHECK_NODE, ('10.0.0.2',))
# test start process
check_call(main_loop, mocked_loop, 'start_process',
DeferredRequestHeaders.START_PROCESS, ('10.0.0.2', 'dummy_process', 'extra args'))
# test stop process
check_call(main_loop, mocked_loop, 'stop_process',
DeferredRequestHeaders.STOP_PROCESS, ('10.0.0.2', 'dummy_process'))
# test restart
check_call(main_loop, mocked_loop, 'restart',
DeferredRequestHeaders.RESTART, ('10.0.0.2',))
# test shutdown
check_call(main_loop, mocked_loop, 'shutdown',
DeferredRequestHeaders.SHUTDOWN, ('10.0.0.2',))
# test restart_all
check_call(main_loop, mocked_loop, 'restart_all',
DeferredRequestHeaders.RESTART_ALL, ('10.0.0.2',))
# test shutdown
check_call(main_loop, mocked_loop, 'shutdown_all',
DeferredRequestHeaders.SHUTDOWN_ALL, ('10.0.0.2',))
| [
"unittest.mock.call",
"unittest.mock.patch",
"unittest.mock.Mock"
] | [((1120, 1163), 'unittest.mock.patch', 'patch', (['"""supvisors.mainloop.getRPCInterface"""'], {}), "('supvisors.mainloop.getRPCInterface')\n", (1125, 1163), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((3462, 3509), 'unittest.mock.Mock', 'Mock', ([], {}), "(**{'check_subscriber.return_value': None})\n", (3466, 3509), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((4340, 4383), 'unittest.mock.Mock', 'Mock', ([], {}), "(**{'check_puller.return_value': None})\n", (4344, 4383), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((6446, 6452), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (6450, 6452), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((6520, 6549), 'unittest.mock.Mock', 'Mock', ([], {'return_value': 'dummy_info'}), '(return_value=dummy_info)\n', (6524, 6549), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((6606, 6612), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (6610, 6612), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((6657, 6686), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '"""10.0.0.5"""'}), "(return_value='10.0.0.5')\n", (6661, 6686), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((6732, 6775), 'unittest.mock.Mock', 'Mock', ([], {'return_value': "{'statename': 'RUNNING'}"}), "(return_value={'statename': 'RUNNING'})\n", (6736, 6775), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((6107, 6138), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (6111, 6138), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((8417, 8448), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (8421, 8448), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((8817, 8848), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (8821, 8848), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((8934, 8976), 'unittest.mock.call', 'call', (['"""dummy_process"""', '"""extra args"""', '(False)'], {}), "('dummy_process', 'extra args', False)\n", (8938, 8976), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((9359, 9390), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (9363, 9390), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((9747, 9778), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (9751, 9778), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((9866, 9894), 'unittest.mock.call', 'call', (['"""dummy_process"""', '(False)'], {}), "('dummy_process', False)\n", (9870, 9894), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((10219, 10250), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (10223, 10250), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((10581, 10612), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (10585, 10612), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((10700, 10706), 'unittest.mock.call', 'call', ([], {}), '()\n', (10704, 10706), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((11039, 11070), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (11043, 11070), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((11401, 11432), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (11405, 11432), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((11516, 11522), 'unittest.mock.call', 'call', ([], {}), '()\n', (11520, 11522), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((11845, 11876), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (11849, 11876), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((12210, 12241), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (12214, 12241), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((12329, 12335), 'unittest.mock.call', 'call', ([], {}), '()\n', (12333, 12335), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((12666, 12697), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (12670, 12697), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((13031, 13062), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (13035, 13062), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((13146, 13152), 'unittest.mock.call', 'call', ([], {}), '()\n', (13150, 13152), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((1792, 1824), 'unittest.mock.call', 'call', (['"""localhost"""', 'main_loop.env'], {}), "('localhost', main_loop.env)\n", (1796, 1824), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((3049, 3055), 'unittest.mock.call', 'call', ([], {}), '()\n', (3053, 3055), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((3658, 3677), 'unittest.mock.call', 'call', (['"""poll result"""'], {}), "('poll result')\n", (3662, 3677), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((4016, 4035), 'unittest.mock.call', 'call', (['"""poll result"""'], {}), "('poll result')\n", (4020, 4035), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((4079, 4107), 'unittest.mock.call', 'call', (['"""event"""', '""""a message\\""""'], {}), '(\'event\', \'"a message"\')\n', (4083, 4107), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((4530, 4549), 'unittest.mock.call', 'call', (['"""poll result"""'], {}), "('poll result')\n", (4534, 4549), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((4975, 4994), 'unittest.mock.call', 'call', (['"""poll result"""'], {}), "('poll result')\n", (4979, 4994), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((5063, 5080), 'unittest.mock.call', 'call', (['"""a message"""'], {}), "('a message')\n", (5067, 5080), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((5472, 5491), 'unittest.mock.call', 'call', (['"""poll result"""'], {}), "('poll result')\n", (5476, 5491), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((5594, 5620), 'unittest.mock.call', 'call', (['"""event"""', '"""a message"""'], {}), "('event', 'a message')\n", (5598, 5620), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((7789, 7820), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (7793, 7820), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((13764, 13796), 'unittest.mock.call', 'call', (['"""event type"""', '"""event data"""'], {}), "('event type', 'event data')\n", (13768, 13796), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((7124, 7155), 'unittest.mock.call', 'call', (['"""10.0.0.1"""', 'main_loop.env'], {}), "('10.0.0.1', main_loop.env)\n", (7128, 7155), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((7309, 7331), 'unittest.mock.call', 'call', (['"""auth"""', 'expected'], {}), "('auth', expected)\n", (7313, 7331), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n'), ((14191, 14202), 'unittest.mock.call', 'call', (['*args'], {}), '(*args)\n', (14195, 14202), False, 'from unittest.mock import call, patch, Mock, DEFAULT\n')] |
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist, TransformStamped
from std_msgs.msg import String
from enum import Enum
import tf2_ros
import math
class mission_states(Enum):
STOP = -1
SUBMERGE = 0
MOVE_TO_GATE = 1
MOVE_THROUGH_GATE = 2
def checkTolerance(current, wanted):
tolerance = 0.1
return current < wanted + tolerance and current > wanted - tolerance
def mission():
rospy.init_node('mission_controller', anonymous=True)
state = mission_states.SUBMERGE
goal_pub = rospy.Publisher('wolf_control/goal', Twist, queue_size=10)
state_pub = rospy.Publisher('wolf_control/mission_state', String, queue_size=10)
tf_buffer = tf2_ros.Buffer()
listener = tf2_ros.TransformListener(tf_buffer)
rate = rospy.Rate(10) # 10hz
submerge_depth = -1.5
timer = 0
saved_goal = None
while not rospy.is_shutdown():
try:
odom: TransformStamped = tf_buffer.lookup_transform("odom", "base_link", rospy.Time(0))
if state == mission_states.STOP:
goal = Twist()
goal.linear.z = submerge_depth
goal_pub.publish(goal)
if state == mission_states.SUBMERGE:
goal = Twist()
goal.linear.z = submerge_depth
goal.angular.z = odom.transform.rotation.z
goal_pub.publish(goal)
if checkTolerance(odom.transform.translation.z, submerge_depth):
state = mission_states.MOVE_TO_GATE
timer = 0
saved_goal = None
elif state == mission_states.MOVE_TO_GATE:
gate_vector: TransformStamped = tf_buffer.lookup_transform("odom", "gate", rospy.Time(0))
goal = Twist()
goal.linear.x = gate_vector.transform.translation.x * 0.1
goal.linear.y = gate_vector.transform.translation.y * 0.1
goal.linear.z = submerge_depth
goal_pub.publish(goal)
if timer > 80:
saved_goal = goal
state = mission_states.MOVE_THROUGH_GATE
timer = 0
elif state == mission_states.MOVE_THROUGH_GATE:
goal_pub.publish(saved_goal)
if timer > 170:
timer = 0
saved_goal = None
state = mission_states.STOP
timer += 1
state_pub.publish(state.name)
except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
rospy.logerr("mission_code: error finding frame")
rate.sleep()
if __name__ == '__main__':
try:
mission()
except rospy.ROSInterruptException:
pass
| [
"rospy.logerr",
"rospy.is_shutdown",
"tf2_ros.TransformListener",
"rospy.init_node",
"geometry_msgs.msg.Twist",
"tf2_ros.Buffer",
"rospy.Time",
"rospy.Rate",
"rospy.Publisher"
] | [((436, 489), 'rospy.init_node', 'rospy.init_node', (['"""mission_controller"""'], {'anonymous': '(True)'}), "('mission_controller', anonymous=True)\n", (451, 489), False, 'import rospy\n'), ((541, 599), 'rospy.Publisher', 'rospy.Publisher', (['"""wolf_control/goal"""', 'Twist'], {'queue_size': '(10)'}), "('wolf_control/goal', Twist, queue_size=10)\n", (556, 599), False, 'import rospy\n'), ((616, 684), 'rospy.Publisher', 'rospy.Publisher', (['"""wolf_control/mission_state"""', 'String'], {'queue_size': '(10)'}), "('wolf_control/mission_state', String, queue_size=10)\n", (631, 684), False, 'import rospy\n'), ((701, 717), 'tf2_ros.Buffer', 'tf2_ros.Buffer', ([], {}), '()\n', (715, 717), False, 'import tf2_ros\n'), ((733, 769), 'tf2_ros.TransformListener', 'tf2_ros.TransformListener', (['tf_buffer'], {}), '(tf_buffer)\n', (758, 769), False, 'import tf2_ros\n'), ((781, 795), 'rospy.Rate', 'rospy.Rate', (['(10)'], {}), '(10)\n', (791, 795), False, 'import rospy\n'), ((880, 899), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (897, 899), False, 'import rospy\n'), ((999, 1012), 'rospy.Time', 'rospy.Time', (['(0)'], {}), '(0)\n', (1009, 1012), False, 'import rospy\n'), ((1082, 1089), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (1087, 1089), False, 'from geometry_msgs.msg import Twist, TransformStamped\n'), ((1248, 1255), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (1253, 1255), False, 'from geometry_msgs.msg import Twist, TransformStamped\n'), ((2637, 2686), 'rospy.logerr', 'rospy.logerr', (['"""mission_code: error finding frame"""'], {}), "('mission_code: error finding frame')\n", (2649, 2686), False, 'import rospy\n'), ((1800, 1807), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (1805, 1807), False, 'from geometry_msgs.msg import Twist, TransformStamped\n'), ((1762, 1775), 'rospy.Time', 'rospy.Time', (['(0)'], {}), '(0)\n', (1772, 1775), False, 'import rospy\n')] |
#!/usr/bin/python
# Copyright 2021 Northern.tech AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from common import (
init_users,
init_users_f,
init_users_mt,
init_users_mt_f,
cli,
api_client_mgmt,
mongo,
make_auth,
)
import bravado
import pytest
import tenantadm
class TestManagementApiPostUsersBase:
def _do_test_ok(self, api_client_mgmt, init_users, new_user, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
_, r = api_client_mgmt.create_user(new_user, auth)
assert r.status_code == 201
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users) + 1
found_user = [u for u in users if u.email == new_user["email"]]
assert len(found_user) == 1
found_user = found_user[0]
def _do_test_fail_unprocessable_entity(
self, api_client_mgmt, init_users, new_user, tenant_id=None
):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
try:
api_client_mgmt.create_user(new_user, auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 422
class TestManagementApiPostUsers(TestManagementApiPostUsersBase):
def test_ok(self, api_client_mgmt, init_users):
new_user = {"email": "<EMAIL>", "password": "<PASSWORD>"}
self._do_test_ok(api_client_mgmt, init_users, new_user)
def test_fail_malformed_body(self, api_client_mgmt):
new_user = {"foo": "bar"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_no_password(self, api_client_mgmt):
new_user = {"email": "foobar"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_no_email(self, api_client_mgmt):
new_user = {"password": "<PASSWORD>"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_not_an_email(self, api_client_mgmt):
new_user = {"email": "foobar", "password": "<PASSWORD>"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_pwd_too_short(self, api_client_mgmt):
new_user = {"email": "<EMAIL>", "password": "<PASSWORD>"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 422
def test_fail_duplicate_email(self, api_client_mgmt, init_users):
new_user = {"email": "<EMAIL>", "password": "<PASSWORD>"}
self._do_test_fail_unprocessable_entity(api_client_mgmt, init_users, new_user)
class TestManagementApiPostUsersEnterprise(TestManagementApiPostUsersBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
new_user = {"email": "<EMAIL>", "password": "<PASSWORD>"}
with tenantadm.run_fake_create_user(new_user):
self._do_test_ok(
api_client_mgmt, init_users_mt[tenant_id], new_user, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_duplicate_email(self, tenant_id, api_client_mgmt, init_users_mt):
new_user = {"email": "<EMAIL>", "password": "<PASSWORD>"}
with tenantadm.run_fake_create_user(new_user, 422):
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_mt[tenant_id], new_user, tenant_id
)
class TestManagementApiGetUserBase:
def _do_test_ok(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
for u in init_users:
found = api_client_mgmt.get_user(u.id, auth)
assert found.id == u.id
assert found.email == u.email
assert found.created_ts == u.created_ts
assert found.updated_ts == u.updated_ts
def _do_test_fail_not_found(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
try:
not_found = api_client_mgmt.get_user("madeupid", auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 404
class TestManagementApiGetUser(TestManagementApiGetUserBase):
def test_ok(self, api_client_mgmt, init_users):
self._do_test_ok(api_client_mgmt, init_users)
def test_fail_not_found(self, api_client_mgmt, init_users):
self._do_test_fail_not_found(api_client_mgmt, init_users)
class TestManagementApiGetUserEnterprise(TestManagementApiGetUserBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_ok(api_client_mgmt, init_users_mt[tenant_id], tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_not_found(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_fail_not_found(
api_client_mgmt, init_users_mt[tenant_id], tenant_id
)
class TestManagementApiGetUsersBase:
def _do_test_ok(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users)
def _do_test_no_users(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
users = api_client_mgmt.get_users(auth)
assert len(users) == 0
class TestManagementApiGetUsersOk(TestManagementApiGetUsersBase):
def test_ok(self, api_client_mgmt, init_users):
self._do_test_ok(api_client_mgmt, init_users)
class TestManagementApiGetUsersNoUsers(TestManagementApiGetUsersBase):
def test_no_users(self, api_client_mgmt):
self._do_test_no_users(api_client_mgmt)
class TestManagementApiGetUsersEnterprise(TestManagementApiGetUsersBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_ok(api_client_mgmt, init_users_mt[tenant_id], tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_no_users(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_no_users(api_client_mgmt, "non_existing_tenant_id")
class TestManagementApiDeleteUserBase:
def _do_test_ok(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
rsp = api_client_mgmt.delete_user(init_users[0]["id"], auth)
assert rsp.status_code == 204
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users) - 1
found = [u for u in users if u.id == init_users[0]["id"]]
assert len(found) == 0
def _do_test_not_found(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
rsp = api_client_mgmt.delete_user("nonexistent_id", auth)
assert rsp.status_code == 204
class TestManagementApiDeleteUser(TestManagementApiDeleteUserBase):
def test_ok(self, api_client_mgmt, init_users):
self._do_test_ok(api_client_mgmt, init_users)
def test_not_found(self, api_client_mgmt, init_users):
self._do_test_not_found(api_client_mgmt)
class TestManagementApiDeleteUserEnterprise(TestManagementApiDeleteUserBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
with tenantadm.run_fake_delete_user(
tenant_id, init_users_mt[tenant_id][0]["id"]
):
self._do_test_ok(api_client_mgmt, init_users_mt[tenant_id], tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_not_found(self, tenant_id, api_client_mgmt):
with tenantadm.run_fake_delete_user():
self._do_test_not_found(api_client_mgmt, tenant_id)
class TestManagementApiPutUserBase:
def _do_test_ok_email(
self, api_client_mgmt, init_users, user, update, tenant_id=None
):
_, r = api_client_mgmt.login(user.email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
# test update
_, r = api_client_mgmt.update_user(user.id, update, auth)
assert r.status_code == 204
# get/verify users
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users)
found = [u for u in users if u.email == update["email"]]
assert len(found) == 1
def _do_test_ok_email_or_pass(
self, api_client_mgmt, init_users, user, update, tenant_id=None
):
_, r = api_client_mgmt.login(user.email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
# test update
_, r = api_client_mgmt.update_user(user.id, update, auth)
assert r.status_code == 204
# get/verify users
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users)
# find the user via (new?) email
email = user.email
new_email = update.get("email", None)
if new_email != None and new_email != user.email:
email = new_email
found = [u for u in users if u.email == email]
assert len(found) == 1
# try if login still works
_, r = api_client_mgmt.login(email, update["password"])
assert r.status_code == 200
def _do_test_fail_not_found(
self, api_client_mgmt, init_users, update, tenant_id=None
):
_, r = api_client_mgmt.login(init_users[0].email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
try:
_, r = api_client_mgmt.update_user("madeupid", update, auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 404
def _do_test_fail_bad_update(self, api_client_mgmt, init_users, tenant_id=None):
try:
_, r = api_client_mgmt.update_user(init_users[0].id, {"foo": "bar"})
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def _do_test_fail_unprocessable_entity(
self, api_client_mgmt, init_users, user, update, tenant_id=None
):
_, r = api_client_mgmt.login(user.email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
try:
_, r = api_client_mgmt.update_user(user.id, update, auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 422
class TestManagementApiPutUser(TestManagementApiPutUserBase):
def test_ok_email(self, api_client_mgmt, init_users_f):
update = {"email": "<EMAIL>"}
self._do_test_ok_email(api_client_mgmt, init_users_f, init_users_f[0], update)
def test_ok_pass(self, api_client_mgmt, init_users_f):
update = {
"current_password": "<PASSWORD>",
"password": "<PASSWORD>",
}
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_f, init_users_f[0], update
)
def test_ok_email_and_pass(self, api_client_mgmt, init_users_f):
update = {
"email": "<EMAIL>",
"current_password": "<PASSWORD>",
"password": "<PASSWORD>",
}
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_f, init_users_f[0], update
)
def test_fail_password_mismatch(self, api_client_mgmt, init_users_f):
update = {"current_password": "<PASSWORD>", "password": "<PASSWORD>"}
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_f, init_users_f[0], update
)
def test_fail_not_found(self, api_client_mgmt, init_users_f):
update = {"email": "<EMAIL>", "password": "<PASSWORD>"}
self._do_test_fail_not_found(api_client_mgmt, init_users_f, update)
def test_fail_bad_update(self, api_client_mgmt, init_users_f):
self._do_test_fail_bad_update(api_client_mgmt, init_users_f)
def test_fail_duplicate_email(self, api_client_mgmt, init_users_f):
update = {"email": init_users_f[1].email, "password": "<PASSWORD>"}
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_f, init_users_f[0], update
)
class TestManagementApiPutUserEnterprise(TestManagementApiPutUserBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok_email(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][0]
update = {"email": "<EMAIL>"}
with tenantadm.run_fake_update_user(tenant_id, user.id, update):
self._do_test_ok_email(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok_pass(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][1]
with tenantadm.run_fake_get_tenants(tenant_id):
update = {
"password": "<PASSWORD>",
"current_password": "<PASSWORD>",
}
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok_email_and_pass(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][2]
update = {
"email": "<EMAIL>",
"current_password": "<PASSWORD>",
"password": "<PASSWORD>",
}
with tenantadm.run_fake_update_user(tenant_id, user.id, update):
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_not_found(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][3]
update = {
"email": "<EMAIL>",
"current_password": "<PASSWORD>",
"password": "<PASSWORD>",
}
with tenantadm.run_fake_update_user(tenant_id, user.id, update, 404):
self._do_test_fail_not_found(
api_client_mgmt, init_users_mt_f[tenant_id], update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_bad_update(self, api_client_mgmt, init_users_mt_f, tenant_id):
self._do_test_fail_bad_update(api_client_mgmt, init_users_mt_f[tenant_id])
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_duplicate_email(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][0]
update = {
"email": init_users_mt_f[tenant_id][1].email,
"password": "<PASSWORD>",
}
with tenantadm.run_fake_update_user(tenant_id, user.id, update, 422):
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
class TestManagementApiSettingsBase:
def _do_test_ok(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
# nonempty
self._set_and_verify(
{"foo": "foo-val", "bar": "bar-val"}, api_client_mgmt, auth
)
# empty
self._set_and_verify({}, api_client_mgmt, auth)
def _do_test_no_settings(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
found = api_client_mgmt.get_settings(auth)
assert found.json() == {}
def _set_and_verify(self, settings, api_client_mgmt, auth):
r = api_client_mgmt.post_settings(settings, auth)
assert r.status_code == 201
found = api_client_mgmt.get_settings(auth)
assert found.json() == settings
def _do_test_fail_bad_request(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
try:
r = api_client_mgmt.post_settings("asdf", auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
class TestManagementApiSettings(TestManagementApiSettingsBase):
def test_ok(self, api_client_mgmt):
self._do_test_ok(api_client_mgmt)
def test_no_settings(self, api_client_mgmt):
self._do_test_no_settings(api_client_mgmt)
def test_bad_request(self, api_client_mgmt):
self._do_test_fail_bad_request(api_client_mgmt)
class TestManagementApiSettingsEnterprise(TestManagementApiSettingsBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, api_client_mgmt, init_users_mt_f, tenant_id):
self._do_test_ok(api_client_mgmt, tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_bad_request(self, api_client_mgmt, tenant_id):
self._do_test_fail_bad_request(api_client_mgmt, tenant_id)
| [
"common.api_client_mgmt.get_users",
"common.api_client_mgmt.update_user",
"tenantadm.run_fake_delete_user",
"common.api_client_mgmt.get_settings",
"common.api_client_mgmt.get_user",
"pytest.mark.parametrize",
"common.api_client_mgmt.login",
"common.api_client_mgmt.create_user",
"common.make_auth",
... | [((3664, 3728), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (3687, 3728), False, 'import pytest\n'), ((4045, 4109), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (4068, 4109), False, 'import pytest\n'), ((5691, 5755), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (5714, 5755), False, 'import pytest\n'), ((5907, 5971), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (5930, 5971), False, 'import pytest\n'), ((7139, 7203), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (7162, 7203), False, 'import pytest\n'), ((7355, 7419), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (7378, 7419), False, 'import pytest\n'), ((8728, 8792), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (8751, 8792), False, 'import pytest\n'), ((9061, 9125), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (9084, 9125), False, 'import pytest\n'), ((14059, 14123), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (14082, 14123), False, 'import pytest\n'), ((14495, 14559), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (14518, 14559), False, 'import pytest\n'), ((15012, 15076), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (15035, 15076), False, 'import pytest\n'), ((15572, 15636), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (15595, 15636), False, 'import pytest\n'), ((16126, 16190), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (16149, 16190), False, 'import pytest\n'), ((16361, 16425), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (16384, 16425), False, 'import pytest\n'), ((18609, 18673), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (18632, 18673), False, 'import pytest\n'), ((18801, 18865), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tenant_id"""', "['tenant1id', 'tenant2id']"], {}), "('tenant_id', ['tenant1id', 'tenant2id'])\n", (18824, 18865), False, 'import pytest\n'), ((1063, 1106), 'common.api_client_mgmt.create_user', 'api_client_mgmt.create_user', (['new_user', 'auth'], {}), '(new_user, auth)\n', (1090, 1106), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((1160, 1191), 'common.api_client_mgmt.get_users', 'api_client_mgmt.get_users', (['auth'], {}), '(auth)\n', (1185, 1191), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((6392, 6423), 'common.api_client_mgmt.get_users', 'api_client_mgmt.get_users', (['auth'], {}), '(auth)\n', (6417, 6423), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((6654, 6685), 'common.api_client_mgmt.get_users', 'api_client_mgmt.get_users', (['auth'], {}), '(auth)\n', (6679, 6685), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((7795, 7849), 'common.api_client_mgmt.delete_user', 'api_client_mgmt.delete_user', (["init_users[0]['id']", 'auth'], {}), "(init_users[0]['id'], auth)\n", (7822, 7849), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((7905, 7936), 'common.api_client_mgmt.get_users', 'api_client_mgmt.get_users', (['auth'], {}), '(auth)\n', (7930, 7936), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((8268, 8319), 'common.api_client_mgmt.delete_user', 'api_client_mgmt.delete_user', (['"""nonexistent_id"""', 'auth'], {}), "('nonexistent_id', auth)\n", (8295, 8319), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((9454, 9516), 'common.api_client_mgmt.login', 'api_client_mgmt.login', (['user.email', '"""correcthorsebatterystaple"""'], {}), "(user.email, 'correcthorsebatterystaple')\n", (9475, 9516), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((9666, 9716), 'common.api_client_mgmt.update_user', 'api_client_mgmt.update_user', (['user.id', 'update', 'auth'], {}), '(user.id, update, auth)\n', (9693, 9716), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((9797, 9828), 'common.api_client_mgmt.get_users', 'api_client_mgmt.get_users', (['auth'], {}), '(auth)\n', (9822, 9828), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((10101, 10163), 'common.api_client_mgmt.login', 'api_client_mgmt.login', (['user.email', '"""correcthorsebatterystaple"""'], {}), "(user.email, 'correcthorsebatterystaple')\n", (10122, 10163), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((10313, 10363), 'common.api_client_mgmt.update_user', 'api_client_mgmt.update_user', (['user.id', 'update', 'auth'], {}), '(user.id, update, auth)\n', (10340, 10363), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((10444, 10475), 'common.api_client_mgmt.get_users', 'api_client_mgmt.get_users', (['auth'], {}), '(auth)\n', (10469, 10475), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((10862, 10910), 'common.api_client_mgmt.login', 'api_client_mgmt.login', (['email', "update['password']"], {}), "(email, update['password'])\n", (10883, 10910), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((11070, 11141), 'common.api_client_mgmt.login', 'api_client_mgmt.login', (['init_users[0].email', '"""correcthorsebatterystaple"""'], {}), "(init_users[0].email, 'correcthorsebatterystaple')\n", (11091, 11141), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((11855, 11917), 'common.api_client_mgmt.login', 'api_client_mgmt.login', (['user.email', '"""correcthorsebatterystaple"""'], {}), "(user.email, 'correcthorsebatterystaple')\n", (11876, 11917), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((17505, 17539), 'common.api_client_mgmt.get_settings', 'api_client_mgmt.get_settings', (['auth'], {}), '(auth)\n', (17533, 17539), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((17651, 17696), 'common.api_client_mgmt.post_settings', 'api_client_mgmt.post_settings', (['settings', 'auth'], {}), '(settings, auth)\n', (17680, 17696), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((17750, 17784), 'common.api_client_mgmt.get_settings', 'api_client_mgmt.get_settings', (['auth'], {}), '(auth)\n', (17778, 17784), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((1019, 1046), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (1028, 1046), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((1578, 1605), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (1587, 1605), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((1632, 1675), 'common.api_client_mgmt.create_user', 'api_client_mgmt.create_user', (['new_user', 'auth'], {}), '(new_user, auth)\n', (1659, 1675), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((2141, 2178), 'common.api_client_mgmt.create_user', 'api_client_mgmt.create_user', (['new_user'], {}), '(new_user)\n', (2168, 2178), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((2396, 2433), 'common.api_client_mgmt.create_user', 'api_client_mgmt.create_user', (['new_user'], {}), '(new_user)\n', (2423, 2433), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((2655, 2692), 'common.api_client_mgmt.create_user', 'api_client_mgmt.create_user', (['new_user'], {}), '(new_user)\n', (2682, 2692), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((2937, 2974), 'common.api_client_mgmt.create_user', 'api_client_mgmt.create_user', (['new_user'], {}), '(new_user)\n', (2964, 2974), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((3221, 3258), 'common.api_client_mgmt.create_user', 'api_client_mgmt.create_user', (['new_user'], {}), '(new_user)\n', (3248, 3258), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((3874, 3914), 'tenantadm.run_fake_create_user', 'tenantadm.run_fake_create_user', (['new_user'], {}), '(new_user)\n', (3904, 3914), False, 'import tenantadm\n'), ((4273, 4318), 'tenantadm.run_fake_create_user', 'tenantadm.run_fake_create_user', (['new_user', '(422)'], {}), '(new_user, 422)\n', (4303, 4318), False, 'import tenantadm\n'), ((4649, 4676), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (4658, 4676), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((4727, 4763), 'common.api_client_mgmt.get_user', 'api_client_mgmt.get_user', (['u.id', 'auth'], {}), '(u.id, auth)\n', (4751, 4763), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((5104, 5131), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (5113, 5131), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((5170, 5212), 'common.api_client_mgmt.get_user', 'api_client_mgmt.get_user', (['"""madeupid"""', 'auth'], {}), "('madeupid', auth)\n", (5194, 5212), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((6347, 6374), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (6356, 6374), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((6609, 6636), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (6618, 6636), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((7752, 7779), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (7761, 7779), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((8225, 8252), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (8234, 8252), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((8872, 8948), 'tenantadm.run_fake_delete_user', 'tenantadm.run_fake_delete_user', (['tenant_id', "init_users_mt[tenant_id][0]['id']"], {}), "(tenant_id, init_users_mt[tenant_id][0]['id'])\n", (8902, 8948), False, 'import tenantadm\n'), ((9197, 9229), 'tenantadm.run_fake_delete_user', 'tenantadm.run_fake_delete_user', ([], {}), '()\n', (9227, 9229), False, 'import tenantadm\n'), ((11286, 11339), 'common.api_client_mgmt.update_user', 'api_client_mgmt.update_user', (['"""madeupid"""', 'update', 'auth'], {}), "('madeupid', update, auth)\n", (11313, 11339), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((11556, 11617), 'common.api_client_mgmt.update_user', 'api_client_mgmt.update_user', (['init_users[0].id', "{'foo': 'bar'}"], {}), "(init_users[0].id, {'foo': 'bar'})\n", (11583, 11617), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((12062, 12112), 'common.api_client_mgmt.update_user', 'api_client_mgmt.update_user', (['user.id', 'update', 'auth'], {}), '(user.id, update, auth)\n', (12089, 12112), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((14294, 14352), 'tenantadm.run_fake_update_user', 'tenantadm.run_fake_update_user', (['tenant_id', 'user.id', 'update'], {}), '(tenant_id, user.id, update)\n', (14324, 14352), False, 'import tenantadm\n'), ((14691, 14732), 'tenantadm.run_fake_get_tenants', 'tenantadm.run_fake_get_tenants', (['tenant_id'], {}), '(tenant_id)\n', (14721, 14732), False, 'import tenantadm\n'), ((15363, 15421), 'tenantadm.run_fake_update_user', 'tenantadm.run_fake_update_user', (['tenant_id', 'user.id', 'update'], {}), '(tenant_id, user.id, update)\n', (15393, 15421), False, 'import tenantadm\n'), ((15920, 15983), 'tenantadm.run_fake_update_user', 'tenantadm.run_fake_update_user', (['tenant_id', 'user.id', 'update', '(404)'], {}), '(tenant_id, user.id, update, 404)\n', (15950, 15983), False, 'import tenantadm\n'), ((16695, 16758), 'tenantadm.run_fake_update_user', 'tenantadm.run_fake_update_user', (['tenant_id', 'user.id', 'update', '(422)'], {}), '(tenant_id, user.id, update, 422)\n', (16725, 16758), False, 'import tenantadm\n'), ((17084, 17111), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (17093, 17111), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((17460, 17487), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (17469, 17487), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((17973, 18000), 'common.make_auth', 'make_auth', (['"""foo"""', 'tenant_id'], {}), "('foo', tenant_id)\n", (17982, 18000), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n'), ((18031, 18074), 'common.api_client_mgmt.post_settings', 'api_client_mgmt.post_settings', (['"""asdf"""', 'auth'], {}), "('asdf', auth)\n", (18060, 18074), False, 'from common import init_users, init_users_f, init_users_mt, init_users_mt_f, cli, api_client_mgmt, mongo, make_auth\n')] |
import os
chrom_bins = {}
with open("GSE88952_Sc_Su.32000.bed") as in_file:
for line in in_file:
line = line.strip().split()
chrom_bins[line[3]] = "{}\t{}\t{}".format(line[0], line[1], line[2])
in_file.close()
if not os.path.isfile("ctrl_32kb.bed"):
with open("ctrl_32kb.bed", "w") as out_file:
with open("ctrl_32kb_matrix.txt") as in_file:
for line in in_file:
line = line.strip().split()
bin1 = line[0]
chrom_string1 = chrom_bins[bin1]
bin2 = line[1]
chrom_string2 = chrom_bins[bin2]
if float(line[3]) != 0:
out_file.write("\t".join((chrom_string1, chrom_string2, line[3])))
out_file.write("\n")
in_file.close()
out_file.close()
if not os.path.isfile("galactose_32kb.bed"):
with open("galactose_32kb.bed", "w") as out_file:
with open("galactose_32kb_matrix.txt") as in_file:
for line in in_file:
line = line.strip().split()
bin1 = line[0]
chrom_string1 = chrom_bins[bin1]
bin2 = line[1]
chrom_string2 = chrom_bins[bin2]
if float(line[3]) != 0:
out_file.write("\t".join((chrom_string1, chrom_string2, line[3])))
out_file.write("\n")
in_file.close()
out_file.close()
| [
"os.path.isfile"
] | [((226, 257), 'os.path.isfile', 'os.path.isfile', (['"""ctrl_32kb.bed"""'], {}), "('ctrl_32kb.bed')\n", (240, 257), False, 'import os\n'), ((693, 729), 'os.path.isfile', 'os.path.isfile', (['"""galactose_32kb.bed"""'], {}), "('galactose_32kb.bed')\n", (707, 729), False, 'import os\n')] |
# Author: <NAME>
# email: <EMAIL>
import matplotlib.pyplot as plt, numpy as np
# import seaborn as sns
# from pandas import DataFrame
# from sklearn.neighbors import NearestNeighbors
from terminaltables import AsciiTable
from collections import Counter
from .private import save_vis_close_helper, get_fig_ax_helper
from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple
from xinshuo_math import calculate_truncated_mse
color_set = ['r', 'b', 'g', 'c', 'm', 'y', 'k', 'w', 'lime', 'cyan', 'aqua']
linestyle_set = ['-', '--', '-.', ':', None, ' ', 'solid', 'dashed']
dpi = 80
def visualize_ced(normed_mean_error_dict, error_threshold, normalized=True, truncated_list=None, display2terminal=True, display_list=None, title='2D PCK curve', debug=True, vis=False, pck_savepath=None, table_savepath=None, closefig=True):
'''
visualize the cumulative error distribution curve (alse called NME curve or pck curve)
all parameters are represented by percentage
parameter:
normed_mean_error_dict: a dictionary whose keys are the method name and values are (N, ) numpy array to represent error in evaluation
error_threshold: threshold to display in x axis
return:
AUC: area under the curve
MSE: mean square error
'''
if debug:
assert isdict(normed_mean_error_dict), 'the input normalized mean error dictionary is not correct'
assert islogical(normalized), 'the normalization flag should be logical'
if normalized: assert error_threshold > 0 and error_threshold < 100, 'threshold percentage is not well set'
if save:
assert is_path_exists_or_creatable(pck_savepath), 'please provide a valid path to save the pck results'
assert is_path_exists_or_creatable(table_savepath), 'please provide a valid path to save the table results'
assert isstring(title), 'title is not correct'
if truncated_list is not None: assert islistofscalar(truncated_list), 'the input truncated list is not correct'
if display_list is not None:
assert islist(display_list) and len(display_list) == len(normed_mean_error_dict), 'the input display list is not correct'
assert CHECK_EQ_LIST_UNORDERED(display_list, normed_mean_error_dict.keys(), debug=debug), 'the input display list does not match the error dictionary key list'
else: display_list = normed_mean_error_dict.keys()
# set display parameters
width, height = 1000, 800
legend_fontsize = 10
scale_distance = 48.8
line_index, color_index = 0, 0
figsize = width / float(dpi), height / float(dpi)
fig = plt.figure(figsize=figsize)
# set figure handle
num_bins = 1000
if normalized:
maximum_x = 1
scale = num_bins / 100
else:
maximum_x = error_threshold + 1
scale = num_bins / maximum_x
x_axis = np.linspace(0, maximum_x, num_bins) # error axis, percentage of normalization factor
y_axis = np.zeros(num_bins)
interval_y = 10
interval_x = 1
plt.xlim(0, error_threshold)
plt.ylim(0, 100)
plt.yticks(np.arange(0, 100 + interval_y, interval_y))
plt.xticks(np.arange(0, error_threshold + interval_x, interval_x))
plt.grid()
plt.title(title, fontsize=20)
if normalized: plt.xlabel('Normalized error euclidean distance (%)', fontsize=16)
else: plt.xlabel('Absolute error euclidean distance', fontsize=16)
# calculate metrics for each method
num_methods = len(normed_mean_error_dict)
num_images = len(normed_mean_error_dict.values()[0])
metrics_dict = dict()
metrics_table = list()
table_title = ['Method Name / Metrics', 'AUC', 'MSE']
append2title = False
assert num_images > 0, 'number of error array should be larger than 0'
for ordered_index in range(num_methods):
method_name = display_list[ordered_index]
normed_mean_error = normed_mean_error_dict[method_name]
if debug:
assert isnparray(normed_mean_error) and normed_mean_error.ndim == 1, 'shape of error distance is not good'
assert len(normed_mean_error) == num_images, 'number of testing images should be equal for all methods'
assert len(linestyle_set) * len(color_set) >= len(normed_mean_error_dict)
color_tmp = color_set[color_index]
line_tmp = linestyle_set[line_index]
for i in range(num_bins):
y_axis[i] = float((normed_mean_error < x_axis[i]).sum()) / num_images # percentage of error
# calculate area under the curve and mean square error
entry = dict()
entry['AUC'] = np.sum(y_axis[:error_threshold * scale]) / (error_threshold * scale) # bigger, better
entry['MSE'] = np.mean(normed_mean_error) # smaller, better
metrics_table_tmp = [str(method_name), '%.2f' % (entry['AUC']), '%.1f' % (entry['MSE'])]
if truncated_list is not None:
tmse_dict = calculate_truncated_mse(normed_mean_error.tolist(), truncated_list, debug=debug)
for threshold in truncated_list:
entry['AUC/%s'%threshold] = np.sum(y_axis[:error_threshold * scale]) / (error_threshold * scale) # bigger, better
entry['MSE/%s'%threshold] = tmse_dict[threshold]['T-MSE']
entry['percentage/%s'%threshold] = tmse_dict[threshold]['percentage']
if not append2title:
table_title.append('AUC/%s'%threshold)
table_title.append('MSE/%s'%threshold)
table_title.append('pct/%s'%threshold)
metrics_table_tmp.append('%.2f' % (entry['AUC/%s'%threshold]))
metrics_table_tmp.append('%.1f' % (entry['MSE/%s'%threshold]))
metrics_table_tmp.append('%.1f' % (100 * entry['percentage/%s'%threshold]) + '%')
# print metrics_table_tmp
metrics_table.append(metrics_table_tmp)
append2title = True
metrics_dict[method_name] = entry
# draw
label = '%s, AUC: %.2f, MSE: %.1f (%.0f um)' % (method_name, entry['AUC'], entry['MSE'], entry['MSE'] * scale_distance)
if normalized: plt.plot(x_axis*100, y_axis*100, color=color_tmp, linestyle=line_tmp, label=label, lw=3)
else: plt.plot(x_axis, y_axis*100, color=color_tmp, linestyle=line_tmp, label=label, lw=3)
plt.legend(loc=4, fontsize=legend_fontsize)
color_index += 1
if color_index / len(color_set) == 1:
line_index += 1
color_index = color_index % len(color_set)
# plt.grid()
plt.ylabel('{} Test Images (%)'.format(num_images), fontsize=16)
save_vis_close_helper(fig=fig, ax=None, vis=vis, transparent=False, save_path=pck_savepath, debug=debug, closefig=closefig)
# reorder the table
order_index_list = [display_list.index(method_name_tmp) for method_name_tmp in normed_mean_error_dict.keys()]
order_index_list = [0] + [order_index_tmp + 1 for order_index_tmp in order_index_list]
# print table to terminal
metrics_table = [table_title] + metrics_table
# metrics_table = list_reorder([table_title] + metrics_table, order_index_list, debug=debug)
table = AsciiTable(metrics_table)
if display2terminal:
print('\nprint detailed metrics')
print(table.table)
# save table to file
if table_savepath is not None:
table_file = open(table_savepath, 'w')
table_file.write(table.table)
table_file.close()
if display2terminal: print('\nsave detailed metrics to %s' % table_savepath)
return metrics_dict, metrics_table
def visualize_nearest_neighbor(featuremap_dict, num_neighbor=5, top_number=5, vis=True, save_csv=False, csv_save_path=None, save_vis=False, save_img=False, save_thumb_name='nearest_neighbor.png', img_src_folder=None, ext_filter='.jpg', nn_save_folder=None, debug=True):
'''
visualize nearest neighbor for featuremap from images
parameter:
featuremap_dict: a dictionary contains image path as key, and featuremap as value, the featuremap needs to be numpy array with any shape. No flatten needed
num_neighbor: number of neighbor to visualize, the first nearest is itself
top_number: number of top to visualize, since there might be tons of featuremap (length of dictionary), we choose the top ten with lowest distance with their nearest neighbor
csv_save_path: path to save .csv file which contains indices and distance array for all elements
nn_save_folder: save the nearest neighbor images for top featuremap
return:
all_sorted_nearest_id: a 2d matrix, each row is a feature followed by its nearest neighbor in whole feature dataset, the column is sorted by the distance of all nearest neighbor each row
selected_nearest_id: only top number of sorted nearest id
'''
print('processing feature map to nearest neightbor.......')
if debug:
assert isdict(featuremap_dict), 'featuremap should be dictionary'
assert all(isnparray(featuremap_tmp) for featuremap_tmp in featuremap_dict.values()), 'value of dictionary should be numpy array'
assert isinteger(num_neighbor) and num_neighbor > 1, 'number of neighborhodd is an integer larger than 1'
if save_csv and csv_save_path is not None:
assert is_path_exists_or_creatable(csv_save_path), 'path to save .csv file is not correct'
if save_vis or save_img:
if nn_save_folder is not None: # save image directly
assert isstring(ext_filter), 'extension filter is not correct'
assert is_path_exists(img_src_folder), 'source folder for image is not correct'
assert all(isstring(path_tmp) for path_tmp in featuremap_dict.keys()) # key should be the path for the image
assert is_path_exists_or_creatable(nn_save_folder), 'folder to save top visualized images is not correct'
assert isstring(save_thumb_name), 'name of thumbnail is not correct'
if ext_filter.find('.') == -1:
ext_filter = '.%s' % ext_filter
# flatten the feature map
nn_feature_dict = dict()
for key, featuremap_tmp in featuremap_dict.items():
nn_feature_dict[key] = featuremap_tmp.flatten()
num_features = len(nn_feature_dict)
# nearest neighbor
featuremap = np.array(nn_feature_dict.values())
nearbrs = NearestNeighbors(n_neighbors=num_neighbor, algorithm='ball_tree').fit(featuremap)
distances, indices = nearbrs.kneighbors(featuremap)
if debug:
assert featuremap.shape[0] == num_features, 'shape of feature map is not correct'
assert indices.shape == (num_features, num_neighbor), 'shape of indices is not correct'
assert distances.shape == (num_features, num_neighbor), 'shape of indices is not correct'
# convert the nearest indices for all featuremap to the key accordingly
id_list = nn_feature_dict.keys()
max_length = len(max(id_list, key=len)) # find the maximum length of string in the key
nearest_id = np.chararray(indices.shape, itemsize=max_length+1)
for x in range(nearest_id.shape[0]):
for y in range(nearest_id.shape[1]):
nearest_id[x, y] = id_list[indices[x, y]]
if debug:
assert list(nearest_id[:, 0]) == id_list, 'nearest neighbor has problem'
# sort the feature based on distance
print('sorting the feature based on distance')
featuremap_distance = np.sum(distances, axis=1)
if debug:
assert featuremap_distance.shape == (num_features, ), 'distance is not correct'
sorted_indices = np.argsort(featuremap_distance)
all_sorted_nearest_id = nearest_id[sorted_indices, :]
# save to the csv file
if save_csv and csv_save_path is not None:
print('Saving nearest neighbor result as .csv to path: %s' % csv_save_path)
with open(csv_save_path, 'w+') as file:
np.savetxt(file, distances, delimiter=',', fmt='%f')
np.savetxt(file, all_sorted_nearest_id, delimiter=',', fmt='%s')
file.close()
# choose the best to visualize
selected_sorted_indices = sorted_indices[0:top_number]
if debug:
for i in range(num_features-1):
assert featuremap_distance[sorted_indices[i]] < featuremap_distance[sorted_indices[i+1]], 'feature map is not well sorted based on distance'
selected_nearest_id = nearest_id[selected_sorted_indices, :]
if save_vis:
fig, axarray = plt.subplots(top_number, num_neighbor)
for index in range(top_number):
for nearest_index in range(num_neighbor):
img_path = os.path.join(img_src_folder, '%s%s'%(selected_nearest_id[index, nearest_index], ext_filter))
if debug:
print('loading image from %s'%img_path)
img = imread(img_path)
if isgrayimage_dimension(img):
axarray[index, nearest_index].imshow(img, cmap='gray')
elif iscolorimage_dimension(img):
axarray[index, nearest_index].imshow(img)
else:
assert False, 'unknown error'
axarray[index, nearest_index].axis('off')
save_thumb = os.path.join(nn_save_folder, save_thumb_name)
fig.savefig(save_thumb)
if vis:
plt.show()
plt.close(fig)
# save top visualization to the folder
if save_img and nn_save_folder is not None:
for top_index in range(top_number):
file_list = selected_nearest_id[top_index]
save_subfolder = os.path.join(nn_save_folder, file_list[0])
mkdir_if_missing(save_subfolder)
for file_tmp in file_list:
file_src = os.path.join(img_src_folder, '%s%s'%(file_tmp, ext_filter))
save_path = os.path.join(save_subfolder, '%s%s'%(file_tmp, ext_filter))
if debug:
print('saving %s to %s' % (file_src, save_path))
shutil.copyfile(file_src, save_path)
return all_sorted_nearest_id, selected_nearest_id
def visualize_distribution(data, bin_size=None, vis=False, save_path=None, debug=True, closefig=True):
'''
visualize the histogram of a data, which can be a dictionary or list or numpy array or tuple or a list of list
'''
if debug:
assert istuple(data) or isdict(data) or islist(data) or isnparray(data), 'input data is not correct'
# convert data type
if istuple(data):
data = list(data)
elif isdict(data):
data = data.values()
elif isnparray(data):
data = data.tolist()
num_bins = 1000.0
fig, ax = get_fig_ax_helper(fig=None, ax=None)
# calculate bin size
if bin_size is None:
if islistoflist(data):
max_value = np.max(np.max(data))
min_value = np.min(np.min(data))
else:
max_value = np.max(data)
min_value = np.min(data)
bin_size = (max_value - min_value) / num_bins
else:
try:
bin_size = float(bin_size)
except TypeError:
print('size of bin should be an float value')
# plot
if islistoflist(data):
max_value = np.max(np.max(data))
min_value = np.min(np.min(data))
bins = np.arange(min_value - bin_size, max_value + bin_size, bin_size) # fixed bin size
plt.xlim([min_value - bin_size, max_value + bin_size])
for data_list_tmp in data:
if debug:
assert islist(data_list_tmp), 'the nested list is not correct!'
# plt.hist(data_list_tmp, bins=bins, alpha=0.3)
sns.distplot(data_list_tmp, bins=bins, kde=False)
# sns.distplot(data_list_tmp, bins=bins, kde=False)
else:
bins = np.arange(min(data) - 10 * bin_size, max(data) + 10 * bin_size, bin_size) # fixed bin size
plt.xlim([min(data) - bin_size, max(data) + bin_size])
plt.hist(data, bins=bins, alpha=0.5)
plt.title('distribution of data')
plt.xlabel('data (bin size = %f)' % bin_size)
plt.ylabel('count')
return save_vis_close_helper(fig=fig, ax=ax, vis=vis, save_path=save_path, debug=debug, closefig=closefig)
def visualize_bar(data, bin_size=2.0, title='Bar Graph of Key-Value Pair', xlabel='index', ylabel='count', vis=True, save_path=None, debug=True, closefig=True):
'''
visualize the bar graph of a data, which can be a dictionary or list of dictionary
different from function of visualize_bar_graph, this function does not depend on panda and dataframe, it's simpler but with less functionality
also the key of this function takes continuous scalar variable
'''
if debug:
assert isstring(title) and isstring(xlabel) and isstring(ylabel), 'title/xlabel/ylabel is not correct'
assert isdict(data) or islist(data), 'input data is not correct'
assert isscalar(bin_size), 'the bin size is not a floating number'
if isdict(data):
index_list = data.keys()
if debug:
assert islistofscalar(index_list), 'the input dictionary does not contain a scalar key'
frequencies = data.values()
else:
index_list = range(len(data))
frequencies = data
index_str_list = scalarlist2strlist(index_list, debug=debug)
index_list = np.array(index_list)
fig, ax = get_fig_ax_helper(fig=None, ax=None)
# ax.set_xticks(index_list)
# ax.set_xticklabels(index_str_list)
plt.bar(index_list, frequencies, bin_size, color='r', alpha=0.5)
plt.title(title, fontsize=20)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
return save_vis_close_helper(fig=fig, ax=ax, vis=vis, save_path=save_path, debug=debug, transparent=False, closefig=closefig)
def visualize_bar_graph(data, title='Bar Graph of Key-Value Pair', xlabel='pixel error', ylabel='keypoint index', label=False, label_list=None, vis=True, save_path=None, debug=True, closefig=True):
'''
visualize the bar graph of a data, which can be a dictionary or list of dictionary
inside each dictionary, the keys (string) should be the same which is the y label, the values should be scalar
'''
if debug:
assert isstring(title) and isstring(xlabel) and isstring(ylabel), 'title/xlabel/ylabel is not correct'
assert isdict(data) or islistofdict(data), 'input data is not correct'
if isdict(data):
assert all(isstring(key_tmp) for key_tmp in data.keys()), 'the keys are not all strings'
assert all(isscalar(value_tmp) for value_tmp in data.values()), 'the keys are not all strings'
else:
assert len(data) <= len(color_set), 'number of data set is larger than number of color to use'
keys = sorted(data[0].keys())
for dict_tmp in data:
if not (sorted(dict_tmp.keys()) == keys):
print(dict_tmp.keys())
print(keys)
assert False, 'the keys are not equal across different input set'
assert all(isstring(key_tmp) for key_tmp in dict_tmp.keys()), 'the keys are not all strings'
assert all(isscalar(value_tmp) for value_tmp in dict_tmp.values()), 'the values are not all scalars'
# convert dictionary to DataFrame
data_new = dict()
if isdict(data):
key_list = data.keys()
sorted_index = sorted(range(len(key_list)), key=lambda k: key_list[k])
data_new['names'] = (np.asarray(key_list)[sorted_index]).tolist()
data_new['values'] = (np.asarray(data.values())[sorted_index]).tolist()
else:
key_list = data[0].keys()
sorted_index = sorted(range(len(key_list)), key=lambda k: key_list[k])
data_new['names'] = (np.asarray(key_list)[sorted_index]).tolist()
num_sets = len(data)
for set_index in range(num_sets):
data_new['value_%03d'%set_index] = (np.asarray(data[set_index].values())[sorted_index]).tolist()
dataframe = DataFrame(data_new)
# plot
width = 2000
height = 2000
alpha = 0.5
figsize = width / float(dpi), height / float(dpi)
fig = plt.figure(figsize=figsize)
sns.set(style='whitegrid')
# fig, ax = get_fig_ax_helper(fig=None, ax=None)
if isdict(data):
g = sns.barplot(x='values', y='names', data=dataframe, label='data', color='b')
plt.legend(ncol=1, loc='lower right', frameon=True, fontsize=5)
else:
num_sets = len(data)
for set_index in range(num_sets):
if set_index == 0:
sns.set_color_codes('pastel')
else:
sns.set_color_codes('muted')
if label:
sns.barplot(x='value_%03d'%set_index, y='names', data=dataframe, label=label_list[set_index], color=color_set[set_index], alpha=alpha)
else:
sns.barplot(x='value_%03d'%set_index, y='names', data=dataframe, color=solor_set[set_index], alpha=alpha)
plt.legend(ncol=len(data), loc='lower right', frameon=True, fontsize=5)
sns.despine(left=True, bottom=True)
plt.title(title, fontsize=20)
plt.xlim([0, 50])
plt.xlabel(xlabel)
plt.ylabel(ylabel)
num_yticks = len(data_new['names'])
adaptive_fontsize = -0.0555556 * num_yticks + 15.111
plt.yticks(fontsize=adaptive_fontsize)
return save_vis_close_helper(fig=fig, vis=vis, save_path=save_path, debug=debug, closefig=closefig) | [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.hist",
"matplotlib.pyplot.ylabel",
"numpy.argsort",
"numpy.array",
"xinshuo_miscellaneous.iscolorimage_dimension",
"xinshuo_miscellaneous.isinteger",
"numpy.arange",
"numpy.mean",
"xinshuo_miscellaneous.isnparray",
"matplotlib.pyplot.xlabel",
"matpl... | [((2895, 2922), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (2905, 2922), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3148, 3183), 'numpy.linspace', 'np.linspace', (['(0)', 'maximum_x', 'num_bins'], {}), '(0, maximum_x, num_bins)\n', (3159, 3183), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3261, 3279), 'numpy.zeros', 'np.zeros', (['num_bins'], {}), '(num_bins)\n', (3269, 3279), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3323, 3351), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', 'error_threshold'], {}), '(0, error_threshold)\n', (3331, 3351), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3356, 3372), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(100)'], {}), '(0, 100)\n', (3364, 3372), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3507, 3517), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (3515, 3517), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3522, 3551), 'matplotlib.pyplot.title', 'plt.title', (['title'], {'fontsize': '(20)'}), '(title, fontsize=20)\n', (3531, 3551), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((7604, 7629), 'terminaltables.AsciiTable', 'AsciiTable', (['metrics_table'], {}), '(metrics_table)\n', (7614, 7629), False, 'from terminaltables import AsciiTable\n'), ((11504, 11556), 'numpy.chararray', 'np.chararray', (['indices.shape'], {'itemsize': '(max_length + 1)'}), '(indices.shape, itemsize=max_length + 1)\n', (11516, 11556), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((11914, 11939), 'numpy.sum', 'np.sum', (['distances'], {'axis': '(1)'}), '(distances, axis=1)\n', (11920, 11939), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((12063, 12094), 'numpy.argsort', 'np.argsort', (['featuremap_distance'], {}), '(featuremap_distance)\n', (12073, 12094), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((14954, 14967), 'xinshuo_miscellaneous.istuple', 'istuple', (['data'], {}), '(data)\n', (14961, 14967), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((15655, 15673), 'xinshuo_miscellaneous.islistoflist', 'islistoflist', (['data'], {}), '(data)\n', (15667, 15673), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((16482, 16515), 'matplotlib.pyplot.title', 'plt.title', (['"""distribution of data"""'], {}), "('distribution of data')\n", (16491, 16515), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((16520, 16565), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (["('data (bin size = %f)' % bin_size)"], {}), "('data (bin size = %f)' % bin_size)\n", (16530, 16565), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((16570, 16589), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""count"""'], {}), "('count')\n", (16580, 16589), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((17463, 17475), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (17469, 17475), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((17761, 17804), 'xinshuo_miscellaneous.scalarlist2strlist', 'scalarlist2strlist', (['index_list'], {'debug': 'debug'}), '(index_list, debug=debug)\n', (17779, 17804), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((17822, 17842), 'numpy.array', 'np.array', (['index_list'], {}), '(index_list)\n', (17830, 17842), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((17971, 18035), 'matplotlib.pyplot.bar', 'plt.bar', (['index_list', 'frequencies', 'bin_size'], {'color': '"""r"""', 'alpha': '(0.5)'}), "(index_list, frequencies, bin_size, color='r', alpha=0.5)\n", (17978, 18035), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((18040, 18069), 'matplotlib.pyplot.title', 'plt.title', (['title'], {'fontsize': '(20)'}), '(title, fontsize=20)\n', (18049, 18069), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((18074, 18092), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xlabel'], {}), '(xlabel)\n', (18084, 18092), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((18097, 18115), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (18107, 18115), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((19813, 19825), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (19819, 19825), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((20639, 20666), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (20649, 20666), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((20758, 20770), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (20764, 20770), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((21592, 21621), 'matplotlib.pyplot.title', 'plt.title', (['title'], {'fontsize': '(20)'}), '(title, fontsize=20)\n', (21601, 21621), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((21626, 21643), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, 50]'], {}), '([0, 50])\n', (21634, 21643), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((21648, 21666), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['xlabel'], {}), '(xlabel)\n', (21658, 21666), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((21671, 21689), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (21681, 21689), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((21792, 21830), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'fontsize': 'adaptive_fontsize'}), '(fontsize=adaptive_fontsize)\n', (21802, 21830), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((1563, 1593), 'xinshuo_miscellaneous.isdict', 'isdict', (['normed_mean_error_dict'], {}), '(normed_mean_error_dict)\n', (1569, 1593), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((1670, 1691), 'xinshuo_miscellaneous.islogical', 'islogical', (['normalized'], {}), '(normalized)\n', (1679, 1691), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((2122, 2137), 'xinshuo_miscellaneous.isstring', 'isstring', (['title'], {}), '(title)\n', (2130, 2137), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((3388, 3430), 'numpy.arange', 'np.arange', (['(0)', '(100 + interval_y)', 'interval_y'], {}), '(0, 100 + interval_y, interval_y)\n', (3397, 3430), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3447, 3501), 'numpy.arange', 'np.arange', (['(0)', '(error_threshold + interval_x)', 'interval_x'], {}), '(0, error_threshold + interval_x, interval_x)\n', (3456, 3501), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3571, 3637), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Normalized error euclidean distance (%)"""'], {'fontsize': '(16)'}), "('Normalized error euclidean distance (%)', fontsize=16)\n", (3581, 3637), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((3648, 3708), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Absolute error euclidean distance"""'], {'fontsize': '(16)'}), "('Absolute error euclidean distance', fontsize=16)\n", (3658, 3708), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((5048, 5074), 'numpy.mean', 'np.mean', (['normed_mean_error'], {}), '(normed_mean_error)\n', (5055, 5074), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((6750, 6793), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '(4)', 'fontsize': 'legend_fontsize'}), '(loc=4, fontsize=legend_fontsize)\n', (6760, 6793), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((9375, 9398), 'xinshuo_miscellaneous.isdict', 'isdict', (['featuremap_dict'], {}), '(featuremap_dict)\n', (9381, 9398), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((12936, 12974), 'matplotlib.pyplot.subplots', 'plt.subplots', (['top_number', 'num_neighbor'], {}), '(top_number, num_neighbor)\n', (12948, 12974), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((13824, 13838), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (13833, 13838), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((15004, 15016), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (15010, 15016), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((15238, 15256), 'xinshuo_miscellaneous.islistoflist', 'islistoflist', (['data'], {}), '(data)\n', (15250, 15256), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((15772, 15835), 'numpy.arange', 'np.arange', (['(min_value - bin_size)', '(max_value + bin_size)', 'bin_size'], {}), '(min_value - bin_size, max_value + bin_size, bin_size)\n', (15781, 15835), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((15866, 15920), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[min_value - bin_size, max_value + bin_size]'], {}), '([min_value - bin_size, max_value + bin_size])\n', (15874, 15920), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((16436, 16472), 'matplotlib.pyplot.hist', 'plt.hist', (['data'], {'bins': 'bins', 'alpha': '(0.5)'}), '(data, bins=bins, alpha=0.5)\n', (16444, 16472), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((17395, 17413), 'xinshuo_miscellaneous.isscalar', 'isscalar', (['bin_size'], {}), '(bin_size)\n', (17403, 17413), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((18878, 18890), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (18884, 18890), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((20868, 20931), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'ncol': '(1)', 'loc': '"""lower right"""', 'frameon': '(True)', 'fontsize': '(5)'}), "(ncol=1, loc='lower right', frameon=True, fontsize=5)\n", (20878, 20931), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((1888, 1929), 'xinshuo_miscellaneous.is_path_exists_or_creatable', 'is_path_exists_or_creatable', (['pck_savepath'], {}), '(pck_savepath)\n', (1915, 1929), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((2005, 2048), 'xinshuo_miscellaneous.is_path_exists_or_creatable', 'is_path_exists_or_creatable', (['table_savepath'], {}), '(table_savepath)\n', (2032, 2048), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((4931, 4971), 'numpy.sum', 'np.sum', (['y_axis[:error_threshold * scale]'], {}), '(y_axis[:error_threshold * scale])\n', (4937, 4971), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((6554, 6650), 'matplotlib.pyplot.plot', 'plt.plot', (['(x_axis * 100)', '(y_axis * 100)'], {'color': 'color_tmp', 'linestyle': 'line_tmp', 'label': 'label', 'lw': '(3)'}), '(x_axis * 100, y_axis * 100, color=color_tmp, linestyle=line_tmp,\n label=label, lw=3)\n', (6562, 6650), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((6657, 6748), 'matplotlib.pyplot.plot', 'plt.plot', (['x_axis', '(y_axis * 100)'], {'color': 'color_tmp', 'linestyle': 'line_tmp', 'label': 'label', 'lw': '(3)'}), '(x_axis, y_axis * 100, color=color_tmp, linestyle=line_tmp, label=\n label, lw=3)\n', (6665, 6748), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((9587, 9610), 'xinshuo_miscellaneous.isinteger', 'isinteger', (['num_neighbor'], {}), '(num_neighbor)\n', (9596, 9610), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((9756, 9798), 'xinshuo_miscellaneous.is_path_exists_or_creatable', 'is_path_exists_or_creatable', (['csv_save_path'], {}), '(csv_save_path)\n', (9783, 9798), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((12372, 12424), 'numpy.savetxt', 'np.savetxt', (['file', 'distances'], {'delimiter': '""","""', 'fmt': '"""%f"""'}), "(file, distances, delimiter=',', fmt='%f')\n", (12382, 12424), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((12437, 12501), 'numpy.savetxt', 'np.savetxt', (['file', 'all_sorted_nearest_id'], {'delimiter': '""","""', 'fmt': '"""%s"""'}), "(file, all_sorted_nearest_id, delimiter=',', fmt='%s')\n", (12447, 12501), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((13805, 13815), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13813, 13815), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((14828, 14841), 'xinshuo_miscellaneous.istuple', 'istuple', (['data'], {}), '(data)\n', (14835, 14841), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((14845, 14857), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (14851, 14857), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((14861, 14873), 'xinshuo_miscellaneous.islist', 'islist', (['data'], {}), '(data)\n', (14867, 14873), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((14877, 14892), 'xinshuo_miscellaneous.isnparray', 'isnparray', (['data'], {}), '(data)\n', (14886, 14892), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((15056, 15071), 'xinshuo_miscellaneous.isnparray', 'isnparray', (['data'], {}), '(data)\n', (15065, 15071), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((15386, 15398), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (15392, 15398), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((15423, 15435), 'numpy.min', 'np.min', (['data'], {}), '(data)\n', (15429, 15435), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((15702, 15714), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (15708, 15714), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((15743, 15755), 'numpy.min', 'np.min', (['data'], {}), '(data)\n', (15749, 15755), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((17211, 17226), 'xinshuo_miscellaneous.isstring', 'isstring', (['title'], {}), '(title)\n', (17219, 17226), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((17231, 17247), 'xinshuo_miscellaneous.isstring', 'isstring', (['xlabel'], {}), '(xlabel)\n', (17239, 17247), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((17252, 17268), 'xinshuo_miscellaneous.isstring', 'isstring', (['ylabel'], {}), '(ylabel)\n', (17260, 17268), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((17322, 17334), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (17328, 17334), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((17338, 17350), 'xinshuo_miscellaneous.islist', 'islist', (['data'], {}), '(data)\n', (17344, 17350), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((18692, 18707), 'xinshuo_miscellaneous.isstring', 'isstring', (['title'], {}), '(title)\n', (18700, 18707), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((18712, 18728), 'xinshuo_miscellaneous.isstring', 'isstring', (['xlabel'], {}), '(xlabel)\n', (18720, 18728), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((18733, 18749), 'xinshuo_miscellaneous.isstring', 'isstring', (['ylabel'], {}), '(ylabel)\n', (18741, 18749), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((18803, 18815), 'xinshuo_miscellaneous.isdict', 'isdict', (['data'], {}), '(data)\n', (18809, 18815), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((2338, 2358), 'xinshuo_miscellaneous.islist', 'islist', (['display_list'], {}), '(display_list)\n', (2344, 2358), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((4261, 4289), 'xinshuo_miscellaneous.isnparray', 'isnparray', (['normed_mean_error'], {}), '(normed_mean_error)\n', (4270, 4289), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((9453, 9478), 'xinshuo_miscellaneous.isnparray', 'isnparray', (['featuremap_tmp'], {}), '(featuremap_tmp)\n', (9462, 9478), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((9971, 9991), 'xinshuo_miscellaneous.isstring', 'isstring', (['ext_filter'], {}), '(ext_filter)\n', (9979, 9991), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((10050, 10080), 'xinshuo_miscellaneous.is_path_exists', 'is_path_exists', (['img_src_folder'], {}), '(img_src_folder)\n', (10064, 10080), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((10275, 10318), 'xinshuo_miscellaneous.is_path_exists_or_creatable', 'is_path_exists_or_creatable', (['nn_save_folder'], {}), '(nn_save_folder)\n', (10302, 10318), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((10397, 10422), 'xinshuo_miscellaneous.isstring', 'isstring', (['save_thumb_name'], {}), '(save_thumb_name)\n', (10405, 10422), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((13333, 13359), 'xinshuo_miscellaneous.isgrayimage_dimension', 'isgrayimage_dimension', (['img'], {}), '(img)\n', (13354, 13359), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((15289, 15301), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (15295, 15301), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((15334, 15346), 'numpy.min', 'np.min', (['data'], {}), '(data)\n', (15340, 15346), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((16001, 16022), 'xinshuo_miscellaneous.islist', 'islist', (['data_list_tmp'], {}), '(data_list_tmp)\n', (16007, 16022), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((5492, 5532), 'numpy.sum', 'np.sum', (['y_axis[:error_threshold * scale]'], {}), '(y_axis[:error_threshold * scale])\n', (5498, 5532), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((13457, 13484), 'xinshuo_miscellaneous.iscolorimage_dimension', 'iscolorimage_dimension', (['img'], {}), '(img)\n', (13479, 13484), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((18915, 18932), 'xinshuo_miscellaneous.isstring', 'isstring', (['key_tmp'], {}), '(key_tmp)\n', (18923, 18932), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((19016, 19035), 'xinshuo_miscellaneous.isscalar', 'isscalar', (['value_tmp'], {}), '(value_tmp)\n', (19024, 19035), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((19966, 19986), 'numpy.asarray', 'np.asarray', (['key_list'], {}), '(key_list)\n', (19976, 19986), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((20243, 20263), 'numpy.asarray', 'np.asarray', (['key_list'], {}), '(key_list)\n', (20253, 20263), True, 'import matplotlib.pyplot as plt, numpy as np\n'), ((10150, 10168), 'xinshuo_miscellaneous.isstring', 'isstring', (['path_tmp'], {}), '(path_tmp)\n', (10158, 10168), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((19543, 19560), 'xinshuo_miscellaneous.isstring', 'isstring', (['key_tmp'], {}), '(key_tmp)\n', (19551, 19560), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n'), ((19652, 19671), 'xinshuo_miscellaneous.isscalar', 'isscalar', (['value_tmp'], {}), '(value_tmp)\n', (19660, 19671), False, 'from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple\n')] |
import sys
N = int(sys.stdin.readline())
dis = list(map(int, sys.stdin.readline().split()))
coin = list(map(int, sys.stdin.readline().split()))
use_coin = coin[0]
tot = dis[0] * use_coin
for i in range(1, N - 1):
if coin[i] < use_coin:
use_coin = coin[i]
tot += dis[i] * use_coin
print(tot) | [
"sys.stdin.readline"
] | [((20, 40), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (38, 40), False, 'import sys\n'), ((62, 82), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (80, 82), False, 'import sys\n'), ((114, 134), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (132, 134), False, 'import sys\n')] |
import napari
import time
from napari._qt.qthreading import thread_worker
import numpy as np
# create a viewer window
viewer = napari.Viewer()
# https://napari.org/guides/stable/threading.html
@thread_worker
def loop_run():
while True: # endless loop
print("Hello world", time.time())
time.sleep(0.5)
yield np.random.random((2, 2))
def update_layer(image):
"""
Updates the image in the layer 'result'
or adds this layer.
"""
try:
viewer.layers['result'].data = image
except KeyError:
viewer.add_image(image, name='result')
# Start the loop
worker = loop_run()
worker.yielded.connect(update_layer)
worker.start()
# Start napari
napari.run()
| [
"napari.Viewer",
"numpy.random.random",
"time.sleep",
"napari.run",
"time.time"
] | [((128, 143), 'napari.Viewer', 'napari.Viewer', ([], {}), '()\n', (141, 143), False, 'import napari\n'), ((701, 713), 'napari.run', 'napari.run', ([], {}), '()\n', (711, 713), False, 'import napari\n'), ((307, 322), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (317, 322), False, 'import time\n'), ((286, 297), 'time.time', 'time.time', ([], {}), '()\n', (295, 297), False, 'import time\n'), ((337, 361), 'numpy.random.random', 'np.random.random', (['(2, 2)'], {}), '((2, 2))\n', (353, 361), True, 'import numpy as np\n')] |
from math import erf, sqrt
from functools import partial
from ..library.multinomial import multinomial, to_multinomial
def gaussian_cdf(x, mu, sigma):
y = (1.0 + erf((x - mu) / (sigma * sqrt(2.0)))) / 2.0
y = (1.0 + erf((x) / (sqrt(2.0)))) / 2.0
assert y >= 0 and y <= 1.0, 'y is not a valid probability: y={}'.format(y)
return y
def gaussian_cdfp(mu, sigma):
return partial(gaussian_cdf, mu=mu, sigma=sigma)
def gaussian(mu, sigma, block, kernel=None):
'''
Construct to create a discrete approximation of the gaussian distribution using mu and sigma
(gaussian 0 1 blocka)
'''
return multinomial(*multinomial(-3, 3, 64, gaussian_cdfp(float(mu), float(sigma))), offset=block, definitions=kernel.definitions)
| [
"math.sqrt",
"functools.partial"
] | [((390, 431), 'functools.partial', 'partial', (['gaussian_cdf'], {'mu': 'mu', 'sigma': 'sigma'}), '(gaussian_cdf, mu=mu, sigma=sigma)\n', (397, 431), False, 'from functools import partial\n'), ((237, 246), 'math.sqrt', 'sqrt', (['(2.0)'], {}), '(2.0)\n', (241, 246), False, 'from math import erf, sqrt\n'), ((192, 201), 'math.sqrt', 'sqrt', (['(2.0)'], {}), '(2.0)\n', (196, 201), False, 'from math import erf, sqrt\n')] |
from arguments import get_args
import numpy as np
from network.models import MLP_Net
from utils.utils import get_env_params
import torch
import os, gym
"""
script to watch the demo of the ESIL
"""
# process the inputs
def process_inputs(o, g, o_mean, o_std, g_mean, g_std, args):
o_clip = np.clip(o, -args.clip_obs, args.clip_obs)
g_clip = np.clip(g, -args.clip_obs, args.clip_obs)
o_norm = np.clip((o_clip - o_mean) / (o_std), -args.clip_range, args.clip_range)
g_norm = np.clip((g_clip - g_mean) / (g_std), -args.clip_range, args.clip_range)
inputs = np.concatenate([o_norm, g_norm])
inputs = torch.tensor(inputs, dtype=torch.float32).unsqueeze(0)
return inputs
if __name__ == '__main__':
args = get_args()
# create environment
env = gym.make(args.env_name)
# get the environment parameters
env_params = get_env_params(env)
# start to create model
model_path = '{}/{}/model.pt'.format(args.save_dir, args.env_name)
network = MLP_Net(env_params['obs'] + env_params['goal'], env_params['action'], args.dist)
network_model, obs_mean, obs_std, g_mean, g_std = torch.load(model_path, map_location='cpu')
network.load_state_dict(network_model)
network.eval()
# start to do the testing
for i in range(args.demo_length):
observation = env.reset()
# start to do the demo
obs, g = observation['observation'], observation['desired_goal']
for t in range(env._max_episode_steps):
if args.render:
env.render()
inputs = process_inputs(obs, g, obs_mean, obs_std, g_mean, g_std, args)
with torch.no_grad():
_, pi = network(inputs)
if args.dist == 'gauss':
mean, std = pi
input_actions = mean.detach().cpu().numpy().squeeze()
else:
raise NotImplementedError
# put actions into the environment
observation_new, reward, _, info = env.step(input_actions)
obs = observation_new['observation']
print('the episode is: {}, is success: {}'.format(i, info['is_success']))
| [
"numpy.clip",
"utils.utils.get_env_params",
"torch.load",
"torch.tensor",
"torch.no_grad",
"network.models.MLP_Net",
"numpy.concatenate",
"arguments.get_args",
"gym.make"
] | [((295, 336), 'numpy.clip', 'np.clip', (['o', '(-args.clip_obs)', 'args.clip_obs'], {}), '(o, -args.clip_obs, args.clip_obs)\n', (302, 336), True, 'import numpy as np\n'), ((350, 391), 'numpy.clip', 'np.clip', (['g', '(-args.clip_obs)', 'args.clip_obs'], {}), '(g, -args.clip_obs, args.clip_obs)\n', (357, 391), True, 'import numpy as np\n'), ((405, 474), 'numpy.clip', 'np.clip', (['((o_clip - o_mean) / o_std)', '(-args.clip_range)', 'args.clip_range'], {}), '((o_clip - o_mean) / o_std, -args.clip_range, args.clip_range)\n', (412, 474), True, 'import numpy as np\n'), ((490, 559), 'numpy.clip', 'np.clip', (['((g_clip - g_mean) / g_std)', '(-args.clip_range)', 'args.clip_range'], {}), '((g_clip - g_mean) / g_std, -args.clip_range, args.clip_range)\n', (497, 559), True, 'import numpy as np\n'), ((575, 607), 'numpy.concatenate', 'np.concatenate', (['[o_norm, g_norm]'], {}), '([o_norm, g_norm])\n', (589, 607), True, 'import numpy as np\n'), ((733, 743), 'arguments.get_args', 'get_args', ([], {}), '()\n', (741, 743), False, 'from arguments import get_args\n'), ((779, 802), 'gym.make', 'gym.make', (['args.env_name'], {}), '(args.env_name)\n', (787, 802), False, 'import os, gym\n'), ((857, 876), 'utils.utils.get_env_params', 'get_env_params', (['env'], {}), '(env)\n', (871, 876), False, 'from utils.utils import get_env_params\n'), ((990, 1075), 'network.models.MLP_Net', 'MLP_Net', (["(env_params['obs'] + env_params['goal'])", "env_params['action']", 'args.dist'], {}), "(env_params['obs'] + env_params['goal'], env_params['action'], args.dist\n )\n", (997, 1075), False, 'from network.models import MLP_Net\n'), ((1125, 1167), 'torch.load', 'torch.load', (['model_path'], {'map_location': '"""cpu"""'}), "(model_path, map_location='cpu')\n", (1135, 1167), False, 'import torch\n'), ((621, 662), 'torch.tensor', 'torch.tensor', (['inputs'], {'dtype': 'torch.float32'}), '(inputs, dtype=torch.float32)\n', (633, 662), False, 'import torch\n'), ((1642, 1657), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1655, 1657), False, 'import torch\n')] |
from datetime import datetime
from kickbase_api.models._transforms import parse_date, parse_key_value_array_to_dict
from kickbase_api.models.base_model import BaseModel
from kickbase_api.models.league_user_season_stats import LeagueUserSeasonStats
class LeagueUserStats(BaseModel):
name: str = None
profile_image_path: str = None
cover_image_path: str = None
flags: int = None
placement: int = None
points: int = None
team_value: float = None
seasons: [LeagueUserSeasonStats] = None
team_values: {datetime: float}
def __init__(self, d: dict = {}):
self._json_transform = {
"teamValues": parse_key_value_array_to_dict(lambda o: parse_date(o["d"]), lambda o: o["v"]),
"seasons": lambda v: [LeagueUserSeasonStats(_d) for _d in v]
}
self._json_mapping = {
"profileUrl": "profile_image_path",
"coverUrl": "cover_image_path",
"teamValue": "team_value",
"teamValues": "team_values"
}
super().__init__(d)
| [
"kickbase_api.models.league_user_season_stats.LeagueUserSeasonStats",
"kickbase_api.models._transforms.parse_date"
] | [((700, 718), 'kickbase_api.models._transforms.parse_date', 'parse_date', (["o['d']"], {}), "(o['d'])\n", (710, 718), False, 'from kickbase_api.models._transforms import parse_date, parse_key_value_array_to_dict\n'), ((773, 798), 'kickbase_api.models.league_user_season_stats.LeagueUserSeasonStats', 'LeagueUserSeasonStats', (['_d'], {}), '(_d)\n', (794, 798), False, 'from kickbase_api.models.league_user_season_stats import LeagueUserSeasonStats\n')] |
from django.contrib import admin
from .models import Post, Comment, UserProfile
class ProfileAdmin(admin.ModelAdmin):
filter_horizontal =("followers", "following",)
# Register your models here.
admin.site.register(Post)
admin.site.register(Comment)
admin.site.register(UserProfile, admin_class=ProfileAdmin)
| [
"django.contrib.admin.site.register"
] | [((200, 225), 'django.contrib.admin.site.register', 'admin.site.register', (['Post'], {}), '(Post)\n', (219, 225), False, 'from django.contrib import admin\n'), ((226, 254), 'django.contrib.admin.site.register', 'admin.site.register', (['Comment'], {}), '(Comment)\n', (245, 254), False, 'from django.contrib import admin\n'), ((255, 313), 'django.contrib.admin.site.register', 'admin.site.register', (['UserProfile'], {'admin_class': 'ProfileAdmin'}), '(UserProfile, admin_class=ProfileAdmin)\n', (274, 313), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-27 14:43
from __future__ import unicode_literals
import ckeditor.fields
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
import jagdreisencheck.custom_fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accounts', '0006_auto_20181121_2205'),
('cms', '0020_old_tree_cleanup'),
]
operations = [
migrations.CreateModel(
name='AccommodationPrice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('acc_type', models.CharField(choices=[('S', 'Self Organized'), ('C', 'Camping Sight'), ('B', 'Bungalow/Simple Accomodation'), ('BB', 'Bed & Breakfast'), ('H', 'Hotel')], max_length=2, null=True, verbose_name='Accommodation')),
('price_hunter', models.FloatField(null=True, verbose_name='Price per hunter')),
('price_non_hunter', models.FloatField(null=True, verbose_name='Price per accompanying person')),
('calc_base', models.CharField(choices=[('DAY', 'Per day')], max_length=3, null=True, verbose_name='Calculation base')),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True, verbose_name='Name')),
('pub_date', models.DateTimeField(auto_now_add=True, verbose_name='Date of Creation')),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Creator')),
],
options={
'verbose_name': 'Game',
'verbose_name_plural': 'Games',
},
),
migrations.CreateModel(
name='GamePrice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(blank=True, max_length=75, null=True, verbose_name='Gender/Type')),
('calc_base', models.CharField(choices=[('CIC', 'CIC points'), ('PCS', 'Pieces'), ('KGS', 'Per kg'), ('AGE', 'Age class')], max_length=3, null=True, verbose_name='Calculation base')),
('base_range', models.CharField(max_length=20, null=True, verbose_name='Range')),
('trophy_costs', models.FloatField(null=True, verbose_name='Trophy costs')),
('wounded_costs', models.FloatField(null=True, verbose_name='Wounded but not found - costs')),
('private_notes', models.TextField(blank=True, null=True, verbose_name='Private notes')),
('public_notes', models.TextField(blank=True, null=True, verbose_name='Public notes')),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='travelling.Game', verbose_name='Game')),
],
),
migrations.CreateModel(
name='PriceList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('last_modified', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Last modified')),
],
),
migrations.CreateModel(
name='Rating',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language', models.CharField(max_length=6, null=True, verbose_name='Language')),
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Creation Date')),
('last_modified', models.DateTimeField(blank=True, null=True, verbose_name='Last Modified')),
('agree_to_rules_of_contribution', models.BooleanField(default=False, verbose_name='Agree to Rules of Contribution')),
('name', models.CharField(max_length=90, null=True, verbose_name='Title')),
('description', models.TextField(blank=True, max_length=3000, null=True, verbose_name='Detailed Trip Description')),
('nps_indication', models.PositiveIntegerField(choices=[(1, 'No recommendation'), (2, 'Rather no recommendation'), (3, 'Indifferent'), (4, 'Recommendation'), (5, 'Definite recommendation')], default=3, null=True, verbose_name='Would you recommend the trip?')),
('trophies', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True, verbose_name='Trophies')),
('meal_option', models.CharField(choices=[('N', 'No Meals Included'), ('B', 'Breakfast Included'), ('H', 'Breakfast & Dinner Included'), ('A', 'All Inclusive')], max_length=2, null=True, verbose_name='Catering Option')),
('meal_quality', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Catering Quality')),
('accommodation_type', models.CharField(choices=[('S', 'Self Organized'), ('C', 'Camping Sight'), ('B', 'Bungalow/Simple Accomodation'), ('BB', 'Bed & Breakfast'), ('H', 'Hotel')], default='S', max_length=2, null=True, verbose_name='Accommodation Type')),
('accommodation_rating', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Accommodation Rating')),
('support_with_issues', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Operator Support with Issues')),
('price_utility', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Price/Utility')),
('use_of_dogs', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Did you make use of dogs?')),
('dog_purpose', models.CharField(blank=True, choices=[('NO', 'No Dogs were needed'), ('NH', 'Chasing Dogs'), ('DR', 'Joint Hunt'), ('PI', 'Deerstalking Support')], max_length=3, null=True, verbose_name='What did you use the dogs for?')),
('dog_quality', models.IntegerField(blank=True, choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Quality of dogs')),
('game_density', models.IntegerField(choices=[(1, 'Too sparse'), (3, 'Rather too sparse'), (5, 'Optimal density'), (3, 'Rather too dense'), (1, 'Too dense')], null=True, verbose_name='How dense was the wildlife?')),
('game_age_dist', models.IntegerField(choices=[(1, 'Too young'), (3, 'Rather too young'), (5, 'Optimal'), (3, 'Rather too old'), (1, 'Too old'), (0, 'Unknown')], null=True, verbose_name="How was the wildlife's age distributed?")),
('game_gender_dist', models.IntegerField(choices=[(1, 'Predominantly female game'), (3, 'Slight overweight of female game'), (5, 'Good gender distribution'), (3, 'Slight overweight of male game'), (1, 'Predominantly male game'), (0, 'Unknown')], null=True, verbose_name="How did you experience the wildlife's gender distribution?")),
('hunt_in_wilderness', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Did you hunt in the wilderness?')),
('check_strike_pos', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Was the strike position of your rifle checked?')),
('check_hunt_license', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Was your hunting license validated?')),
('professional_hunter_quality', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Quality of the professional hunter')),
('customer_support', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Customer Support')),
('hunting_introduction', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Introduction to local hunting conditions')),
('staff_languages', jagdreisencheck.custom_fields.ListField(null=True, verbose_name='Languages spoken at the hunting site')),
('communication_quality', models.IntegerField(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Communication between staff and yourself')),
('alternative_program', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Did you make use of alternative program')),
('quality_alternative_program', models.IntegerField(blank=True, choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Quality of the alternative program')),
('economic_rating', models.DecimalField(decimal_places=4, max_digits=5, null=True, verbose_name='Economic Rating')),
('ecologic_rating', models.DecimalField(decimal_places=4, max_digits=5, null=True, verbose_name='Ecologic Rating')),
('social_rating', models.DecimalField(decimal_places=4, max_digits=5, null=True, verbose_name='Socio-Cultural Rating')),
('overall_rating', models.DecimalField(decimal_places=4, max_digits=5, null=True, verbose_name='Total Rating')),
],
),
migrations.CreateModel(
name='TravelInquiry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, null=True, verbose_name='Name')),
('email', models.EmailField(max_length=254, null=True, verbose_name='E-Mail')),
('kind_of_inquiry', models.CharField(choices=[('S', 'Solo Travel'), ('HG', 'Group Travel (Only Hunters)'), ('MG', 'Group Travel (Hunters/Non-Hunters)'), ('OT', 'Other')], max_length=2, null=True, verbose_name='Kind of Inquiry')),
('inquiry', ckeditor.fields.RichTextField(null=True, verbose_name='Travel Inquiry')),
('consent_to_be_contacted', models.BooleanField(default=False, verbose_name='Consent to be contacted')),
('date', models.DateTimeField(auto_now_add=True, verbose_name='Date of Inquiry')),
('status', models.BooleanField(default=True, verbose_name='Status')),
],
),
migrations.CreateModel(
name='Trip',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('consent_to_travel_rules', models.BooleanField(default=False, verbose_name='Consent to Publishing Rules')),
('name', models.CharField(blank=True, max_length=150, null=True, verbose_name='Name')),
('country', django_countries.fields.CountryField(max_length=2, verbose_name='Country')),
('region', models.CharField(max_length=300, verbose_name='Region / Territory')),
('available_accommodation_types', jagdreisencheck.custom_fields.ListField(blank=True, null=True, verbose_name='Available Accommodations')),
('private_parking', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Private Parking')),
('airport_transfer', models.NullBooleanField(choices=[(False, 'No'), (True, 'Yes'), (None, 'Unknown')], default=None, verbose_name='Airport Transfer')),
('available_hunting_types', jagdreisencheck.custom_fields.ListField(verbose_name='Hunting Types')),
('rifle_rentals', models.NullBooleanField(choices=[(False, 'No'), (True, 'Yes'), (None, 'Unknown')], default=None, verbose_name='Rifle Rentals')),
('hunting_start_time', models.IntegerField(choices=[(1, 'January'), (2, 'February'), (3, 'March'), (4, 'April'), (5, 'May'), (6, 'June'), (7, 'July'), (8, 'August'), (9, 'September'), (10, 'October'), (11, 'November'), (12, 'December')], default=5, verbose_name='Start of Season')),
('hunting_end_time', models.IntegerField(choices=[(1, 'January'), (2, 'February'), (3, 'March'), (4, 'April'), (5, 'May'), (6, 'June'), (7, 'July'), (8, 'August'), (9, 'September'), (10, 'October'), (11, 'November'), (12, 'December')], default=10, verbose_name='End of Season')),
('family_offers', models.NullBooleanField(choices=[(False, 'No'), (True, 'Yes'), (None, 'Unknown')], default=None, verbose_name='Family Offers')),
('alternative_activities', models.NullBooleanField(choices=[(False, 'No'), (True, 'Yes'), (None, 'Unknown')], default=None, verbose_name='Alternative Offers')),
('available_meal_options', jagdreisencheck.custom_fields.ListField(blank=True, null=True, verbose_name='Catering Options')),
('staff_languages', jagdreisencheck.custom_fields.ListField(verbose_name='Staff Languages')),
('interpreter_at_site', models.NullBooleanField(choices=[(False, 'No'), (True, 'Yes'), (None, 'Unknown')], default=None, verbose_name='Interpreting Service')),
('wireless_coverage', models.NullBooleanField(choices=[(False, 'No'), (True, 'Yes'), (None, 'Unknown')], default=None, verbose_name='Wireless Coverage')),
('broadband_internet', models.NullBooleanField(choices=[(False, 'No'), (True, 'Yes'), (None, 'Unknown')], default=None, verbose_name='Broadband Internet')),
('vendor_link', models.URLField(blank=True, null=True, verbose_name='Vendor Link')),
('description', ckeditor.fields.RichTextField(blank=True, max_length=8000, null=True, verbose_name='Trip Description')),
('featured', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Featured')),
('featured_start_date', models.DateTimeField(auto_now=True, null=True, verbose_name='Featuring Start')),
('featured_end_date', models.DateTimeField(blank=True, null=True, verbose_name='Featuring End')),
('sponsored', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Sponsored')),
('sponsored_start_date', models.DateTimeField(auto_now=True, null=True, verbose_name='Sponsoring Start')),
('sponsored_end_date', models.DateTimeField(blank=True, null=True, verbose_name='Sponsoring End')),
('reviewed', models.BooleanField(choices=[(False, 'No'), (True, 'Yes')], default=False, verbose_name='Reviewed')),
('overall_rating', models.DecimalField(decimal_places=4, max_digits=6, null=True, verbose_name='Overall Rating')),
('rating_economic', models.DecimalField(decimal_places=4, max_digits=6, null=True, verbose_name='Economic Rating')),
('rating_ecologic', models.DecimalField(decimal_places=4, max_digits=6, null=True, verbose_name='Ecologic Rating')),
('rating_sociocultural', models.DecimalField(decimal_places=4, max_digits=6, null=True, verbose_name='Socio-Cultural Rating')),
('slogan', models.CharField(blank=True, max_length=75, null=True, verbose_name='Slogan')),
('pub_date', models.DateTimeField(auto_now=True, verbose_name='Publication Date')),
('last_modified', models.DateTimeField(auto_now=True, null=True, verbose_name='Last Modified')),
('views', models.IntegerField(default=0, verbose_name='Views')),
('tech_name', models.CharField(max_length=30, null=True, verbose_name='Technical Name')),
('slug', models.SlugField(null=True, verbose_name='Absolute URL')),
('headline_image', models.ImageField(blank=True, null=True, upload_to='trips/headline_images/', verbose_name='Title Image')),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.CompanyName', verbose_name='Company')),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='creator', to=settings.AUTH_USER_MODEL, verbose_name='Creator')),
('game', models.ManyToManyField(to='travelling.Game', verbose_name='Game')),
('reviewed_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='reviewer', to=settings.AUTH_USER_MODEL, verbose_name='Reviewed By')),
],
options={
'verbose_name': 'Trip',
'verbose_name_plural': 'Trips',
},
),
migrations.CreateModel(
name='TripBestOfModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='travelling_tripbestofmodel', serialize=False, to='cms.CMSPlugin')),
('name', models.CharField(max_length=75, verbose_name='Name')),
('num_objects', models.IntegerField(default=10, verbose_name='Number of Entries')),
('set_featured', models.BooleanField(default=False, verbose_name='Show Featured Only')),
('set_sponsored', models.BooleanField(default=False, verbose_name='Show Sponsored Only')),
('template', models.CharField(choices=[('travelling/components/trip-thumbnail.html', 'Standard Template')], default='travelling/components/trip-thumbnail.html', max_length=300, verbose_name='Template')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='TripCarouselConfig',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='travelling_tripcarouselconfig', serialize=False, to='cms.CMSPlugin')),
('name', models.CharField(max_length=75, verbose_name='Name')),
('application', models.CharField(max_length=75, verbose_name='Application')),
('model', models.CharField(max_length=75, verbose_name='Database Model')),
('num_objects', models.IntegerField(default=10, verbose_name='Number of Entries')),
('set_featured', models.BooleanField(default=False, verbose_name='Show Featured Only')),
('set_sponsored', models.BooleanField(default=False, verbose_name='Show Sponsored Only')),
('selection_criteria', models.CharField(blank=True, max_length=450, null=True, verbose_name='Selection Criteria')),
('template', models.CharField(choices=[('travelling/components/trip-thumbnail.html', 'Default Template')], max_length=300, verbose_name='Template')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='TripCatalogueModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='travelling_tripcataloguemodel', serialize=False, to='cms.CMSPlugin')),
('name', models.CharField(max_length=75, verbose_name='Name')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='Trophy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('weight', models.DecimalField(blank=True, decimal_places=4, max_digits=8, null=True, verbose_name='Weight (kg)')),
('length', models.DecimalField(blank=True, decimal_places=4, max_digits=8, null=True, verbose_name='Length (cm)')),
('cic_pt', models.DecimalField(blank=True, decimal_places=4, max_digits=8, null=True, verbose_name='CIC Points')),
('game', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='travelling.Game', verbose_name='Game')),
('rating', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='trophy_rating', to='travelling.Rating', verbose_name='Associated Rating')),
('trip', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='travelling.Trip', verbose_name='Assiciated Trip')),
],
),
migrations.CreateModel(
name='AccommodationPriceList',
fields=[
('pricelist_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='travelling.PriceList')),
('name', models.CharField(max_length=75, verbose_name='Price list name')),
],
bases=('travelling.pricelist',),
),
migrations.CreateModel(
name='GamePriceList',
fields=[
('pricelist_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='travelling.PriceList')),
('name', models.CharField(max_length=75, verbose_name='Price list name')),
],
bases=('travelling.pricelist',),
),
migrations.AddField(
model_name='travelinquiry',
name='trip',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='travelling.Trip', verbose_name='Trip'),
),
migrations.AddField(
model_name='travelinquiry',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.IndividualProfile', verbose_name='User'),
),
migrations.AddField(
model_name='rating',
name='trip',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='travelling.Trip', verbose_name='Assiciated Trip'),
),
migrations.AddField(
model_name='rating',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Author'),
),
migrations.AddField(
model_name='pricelist',
name='last_modified_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Author'),
),
migrations.AddField(
model_name='pricelist',
name='trip',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='travelling.Trip', verbose_name='Associated Trip'),
),
migrations.AddField(
model_name='pricelist',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='inital_creator', to=settings.AUTH_USER_MODEL, verbose_name='Author'),
),
migrations.AddField(
model_name='gameprice',
name='price_list',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='travelling.PriceList', verbose_name='Associated Price List'),
),
migrations.AddField(
model_name='accommodationprice',
name='price_list',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='travelling.PriceList', verbose_name='Associated Price List'),
),
migrations.AlterUniqueTogether(
name='trip',
unique_together=set([('company', 'country', 'region')]),
),
migrations.AlterUniqueTogether(
name='rating',
unique_together=set([('user', 'trip')]),
),
]
| [
"django.db.models.EmailField",
"django.db.models.OneToOneField",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.NullBooleanField",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField",
"d... | [((448, 505), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (479, 505), False, 'from django.db import migrations, models\n'), ((22575, 22685), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.Trip"""', 'verbose_name': '"""Trip"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'travelling.Trip', verbose_name='Trip')\n", (22592, 22685), False, 'from django.db import migrations, models\n'), ((22805, 22950), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""accounts.IndividualProfile"""', 'verbose_name': '"""User"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, to='accounts.IndividualProfile', verbose_name='User')\n", (22822, 22950), False, 'from django.db import migrations, models\n'), ((23063, 23194), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.Trip"""', 'verbose_name': '"""Assiciated Trip"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='travelling.Trip', verbose_name='Assiciated Trip')\n", (23080, 23194), False, 'from django.db import migrations, models\n'), ((23308, 23438), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Author"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to=settings.AUTH_USER_MODEL, verbose_name='Author')\n", (23325, 23438), False, 'from django.db import migrations, models\n'), ((23567, 23697), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Author"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to=settings.AUTH_USER_MODEL, verbose_name='Author')\n", (23584, 23697), False, 'from django.db import migrations, models\n'), ((23814, 23935), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.Trip"""', 'verbose_name': '"""Associated Trip"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'travelling.Trip', verbose_name='Associated Trip')\n", (23831, 23935), False, 'from django.db import migrations, models\n'), ((24051, 24216), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""inital_creator"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Author"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n related_name='inital_creator', to=settings.AUTH_USER_MODEL,\n verbose_name='Author')\n", (24068, 24216), False, 'from django.db import migrations, models\n'), ((24335, 24477), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.PriceList"""', 'verbose_name': '"""Associated Price List"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='travelling.PriceList', verbose_name='Associated Price List')\n", (24352, 24477), False, 'from django.db import migrations, models\n'), ((24609, 24751), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.PriceList"""', 'verbose_name': '"""Associated Price List"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='travelling.PriceList', verbose_name='Associated Price List')\n", (24626, 24751), False, 'from django.db import migrations, models\n'), ((739, 832), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (755, 832), False, 'from django.db import migrations, models\n'), ((860, 1080), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('S', 'Self Organized'), ('C', 'Camping Sight'), ('B',\n 'Bungalow/Simple Accomodation'), ('BB', 'Bed & Breakfast'), ('H', 'Hotel')]", 'max_length': '(2)', 'null': '(True)', 'verbose_name': '"""Accommodation"""'}), "(choices=[('S', 'Self Organized'), ('C', 'Camping Sight'),\n ('B', 'Bungalow/Simple Accomodation'), ('BB', 'Bed & Breakfast'), ('H',\n 'Hotel')], max_length=2, null=True, verbose_name='Accommodation')\n", (876, 1080), False, 'from django.db import migrations, models\n'), ((1108, 1169), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'verbose_name': '"""Price per hunter"""'}), "(null=True, verbose_name='Price per hunter')\n", (1125, 1169), False, 'from django.db import migrations, models\n'), ((1209, 1283), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'verbose_name': '"""Price per accompanying person"""'}), "(null=True, verbose_name='Price per accompanying person')\n", (1226, 1283), False, 'from django.db import migrations, models\n'), ((1316, 1424), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('DAY', 'Per day')]", 'max_length': '(3)', 'null': '(True)', 'verbose_name': '"""Calculation base"""'}), "(choices=[('DAY', 'Per day')], max_length=3, null=True,\n verbose_name='Calculation base')\n", (1332, 1424), False, 'from django.db import migrations, models\n'), ((1550, 1643), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1566, 1643), False, 'from django.db import migrations, models\n'), ((1667, 1733), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'unique': '(True)', 'verbose_name': '"""Name"""'}), "(max_length=150, unique=True, verbose_name='Name')\n", (1683, 1733), False, 'from django.db import migrations, models\n'), ((1765, 1837), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Date of Creation"""'}), "(auto_now_add=True, verbose_name='Date of Creation')\n", (1785, 1837), False, 'from django.db import migrations, models\n'), ((1871, 2001), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Creator"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to=settings.AUTH_USER_MODEL, verbose_name='Creator')\n", (1888, 2001), False, 'from django.db import migrations, models\n'), ((2257, 2350), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2273, 2350), False, 'from django.db import migrations, models\n'), ((2374, 2461), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(75)', 'null': '(True)', 'verbose_name': '"""Gender/Type"""'}), "(blank=True, max_length=75, null=True, verbose_name=\n 'Gender/Type')\n", (2390, 2461), False, 'from django.db import migrations, models\n'), ((2489, 2665), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('CIC', 'CIC points'), ('PCS', 'Pieces'), ('KGS', 'Per kg'), ('AGE',\n 'Age class')]", 'max_length': '(3)', 'null': '(True)', 'verbose_name': '"""Calculation base"""'}), "(choices=[('CIC', 'CIC points'), ('PCS', 'Pieces'), ('KGS',\n 'Per kg'), ('AGE', 'Age class')], max_length=3, null=True, verbose_name\n ='Calculation base')\n", (2505, 2665), False, 'from django.db import migrations, models\n'), ((2690, 2754), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'null': '(True)', 'verbose_name': '"""Range"""'}), "(max_length=20, null=True, verbose_name='Range')\n", (2706, 2754), False, 'from django.db import migrations, models\n'), ((2790, 2847), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'verbose_name': '"""Trophy costs"""'}), "(null=True, verbose_name='Trophy costs')\n", (2807, 2847), False, 'from django.db import migrations, models\n'), ((2884, 2958), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'verbose_name': '"""Wounded but not found - costs"""'}), "(null=True, verbose_name='Wounded but not found - costs')\n", (2901, 2958), False, 'from django.db import migrations, models\n'), ((2995, 3064), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Private notes"""'}), "(blank=True, null=True, verbose_name='Private notes')\n", (3011, 3064), False, 'from django.db import migrations, models\n'), ((3100, 3168), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Public notes"""'}), "(blank=True, null=True, verbose_name='Public notes')\n", (3116, 3168), False, 'from django.db import migrations, models\n'), ((3196, 3306), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.Game"""', 'verbose_name': '"""Game"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'travelling.Game', verbose_name='Game')\n", (3213, 3306), False, 'from django.db import migrations, models\n'), ((3436, 3529), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3452, 3529), False, 'from django.db import migrations, models\n'), ((3562, 3647), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)', 'verbose_name': '"""Last modified"""'}), "(auto_now_add=True, null=True, verbose_name='Last modified'\n )\n", (3582, 3647), False, 'from django.db import migrations, models\n'), ((3774, 3867), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3790, 3867), False, 'from django.db import migrations, models\n'), ((3895, 3961), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(6)', 'null': '(True)', 'verbose_name': '"""Language"""'}), "(max_length=6, null=True, verbose_name='Language')\n", (3911, 3961), False, 'from django.db import migrations, models\n'), ((3997, 4082), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)', 'verbose_name': '"""Creation Date"""'}), "(auto_now_add=True, null=True, verbose_name='Creation Date'\n )\n", (4017, 4082), False, 'from django.db import migrations, models\n'), ((4114, 4187), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Last Modified"""'}), "(blank=True, null=True, verbose_name='Last Modified')\n", (4134, 4187), False, 'from django.db import migrations, models\n'), ((4241, 4327), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Agree to Rules of Contribution"""'}), "(default=False, verbose_name=\n 'Agree to Rules of Contribution')\n", (4260, 4327), False, 'from django.db import migrations, models\n'), ((4350, 4414), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(90)', 'null': '(True)', 'verbose_name': '"""Title"""'}), "(max_length=90, null=True, verbose_name='Title')\n", (4366, 4414), False, 'from django.db import migrations, models\n'), ((4449, 4552), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'max_length': '(3000)', 'null': '(True)', 'verbose_name': '"""Detailed Trip Description"""'}), "(blank=True, max_length=3000, null=True, verbose_name=\n 'Detailed Trip Description')\n", (4465, 4552), False, 'from django.db import migrations, models\n'), ((4585, 4837), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'choices': "[(1, 'No recommendation'), (2, 'Rather no recommendation'), (3,\n 'Indifferent'), (4, 'Recommendation'), (5, 'Definite recommendation')]", 'default': '(3)', 'null': '(True)', 'verbose_name': '"""Would you recommend the trip?"""'}), "(choices=[(1, 'No recommendation'), (2,\n 'Rather no recommendation'), (3, 'Indifferent'), (4, 'Recommendation'),\n (5, 'Definite recommendation')], default=3, null=True, verbose_name=\n 'Would you recommend the trip?')\n", (4612, 4837), False, 'from django.db import migrations, models\n'), ((4985, 5195), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('N', 'No Meals Included'), ('B', 'Breakfast Included'), ('H',\n 'Breakfast & Dinner Included'), ('A', 'All Inclusive')]", 'max_length': '(2)', 'null': '(True)', 'verbose_name': '"""Catering Option"""'}), "(choices=[('N', 'No Meals Included'), ('B',\n 'Breakfast Included'), ('H', 'Breakfast & Dinner Included'), ('A',\n 'All Inclusive')], max_length=2, null=True, verbose_name='Catering Option')\n", (5001, 5195), False, 'from django.db import migrations, models\n'), ((5223, 5385), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Catering Quality"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Catering Quality')\n", (5242, 5385), False, 'from django.db import migrations, models\n'), ((5418, 5661), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('S', 'Self Organized'), ('C', 'Camping Sight'), ('B',\n 'Bungalow/Simple Accomodation'), ('BB', 'Bed & Breakfast'), ('H', 'Hotel')]", 'default': '"""S"""', 'max_length': '(2)', 'null': '(True)', 'verbose_name': '"""Accommodation Type"""'}), "(choices=[('S', 'Self Organized'), ('C', 'Camping Sight'),\n ('B', 'Bungalow/Simple Accomodation'), ('BB', 'Bed & Breakfast'), ('H',\n 'Hotel')], default='S', max_length=2, null=True, verbose_name=\n 'Accommodation Type')\n", (5434, 5661), False, 'from django.db import migrations, models\n'), ((5692, 5858), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Accommodation Rating"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Accommodation Rating')\n", (5711, 5858), False, 'from django.db import migrations, models\n'), ((5892, 6066), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Operator Support with Issues"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Operator Support with Issues')\n", (5911, 6066), False, 'from django.db import migrations, models\n'), ((6094, 6248), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Price/Utility"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name='Price/Utility')\n", (6113, 6248), False, 'from django.db import migrations, models\n'), ((6279, 6399), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Did you make use of dogs?"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Did you make use of dogs?')\n", (6298, 6399), False, 'from django.db import migrations, models\n'), ((6430, 6657), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('NO', 'No Dogs were needed'), ('NH', 'Chasing Dogs'), ('DR', 'Joint Hunt'\n ), ('PI', 'Deerstalking Support')]", 'max_length': '(3)', 'null': '(True)', 'verbose_name': '"""What did you use the dogs for?"""'}), "(blank=True, choices=[('NO', 'No Dogs were needed'), ('NH',\n 'Chasing Dogs'), ('DR', 'Joint Hunt'), ('PI', 'Deerstalking Support')],\n max_length=3, null=True, verbose_name='What did you use the dogs for?')\n", (6446, 6657), False, 'from django.db import migrations, models\n'), ((6684, 6857), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Quality of dogs"""'}), "(blank=True, choices=[(1, 'Bad'), (2, 'Rather Bad'), (3,\n 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Quality of dogs')\n", (6703, 6857), False, 'from django.db import migrations, models\n'), ((6884, 7090), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Too sparse'), (3, 'Rather too sparse'), (5, 'Optimal density'), (3,\n 'Rather too dense'), (1, 'Too dense')]", 'null': '(True)', 'verbose_name': '"""How dense was the wildlife?"""'}), "(choices=[(1, 'Too sparse'), (3, 'Rather too sparse'), (\n 5, 'Optimal density'), (3, 'Rather too dense'), (1, 'Too dense')], null\n =True, verbose_name='How dense was the wildlife?')\n", (6903, 7090), False, 'from django.db import migrations, models\n'), ((7117, 7335), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Too young'), (3, 'Rather too young'), (5, 'Optimal'), (3,\n 'Rather too old'), (1, 'Too old'), (0, 'Unknown')]", 'null': '(True)', 'verbose_name': '"""How was the wildlife\'s age distributed?"""'}), '(choices=[(1, \'Too young\'), (3, \'Rather too young\'), (5,\n \'Optimal\'), (3, \'Rather too old\'), (1, \'Too old\'), (0, \'Unknown\')],\n null=True, verbose_name="How was the wildlife\'s age distributed?")\n', (7136, 7335), False, 'from django.db import migrations, models\n'), ((7367, 7696), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Predominantly female game'), (3, 'Slight overweight of female game'),\n (5, 'Good gender distribution'), (3, 'Slight overweight of male game'),\n (1, 'Predominantly male game'), (0, 'Unknown')]", 'null': '(True)', 'verbose_name': '"""How did you experience the wildlife\'s gender distribution?"""'}), '(choices=[(1, \'Predominantly female game\'), (3,\n \'Slight overweight of female game\'), (5, \'Good gender distribution\'), (\n 3, \'Slight overweight of male game\'), (1, \'Predominantly male game\'), (\n 0, \'Unknown\')], null=True, verbose_name=\n "How did you experience the wildlife\'s gender distribution?")\n', (7386, 7696), False, 'from django.db import migrations, models\n'), ((7719, 7845), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Did you hunt in the wilderness?"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Did you hunt in the wilderness?')\n", (7738, 7845), False, 'from django.db import migrations, models\n'), ((7881, 8022), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Was the strike position of your rifle checked?"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Was the strike position of your rifle checked?')\n", (7900, 8022), False, 'from django.db import migrations, models\n'), ((8060, 8190), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Was your hunting license validated?"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Was your hunting license validated?')\n", (8079, 8190), False, 'from django.db import migrations, models\n'), ((8237, 8417), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Quality of the professional hunter"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Quality of the professional hunter')\n", (8256, 8417), False, 'from django.db import migrations, models\n'), ((8448, 8610), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Customer Support"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Customer Support')\n", (8467, 8610), False, 'from django.db import migrations, models\n'), ((8645, 8831), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Introduction to local hunting conditions"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Introduction to local hunting conditions')\n", (8664, 8831), False, 'from django.db import migrations, models\n'), ((9009, 9195), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Communication between staff and yourself"""'}), "(choices=[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'),\n (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Communication between staff and yourself')\n", (9028, 9195), False, 'from django.db import migrations, models\n'), ((9229, 9363), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Did you make use of alternative program"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Did you make use of alternative program')\n", (9248, 9363), False, 'from django.db import migrations, models\n'), ((9410, 9602), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'choices': "[(1, 'Bad'), (2, 'Rather Bad'), (3, 'Neutral'), (4, 'Rather Good'), (5, 'Good')\n ]", 'null': '(True)', 'verbose_name': '"""Quality of the alternative program"""'}), "(blank=True, choices=[(1, 'Bad'), (2, 'Rather Bad'), (3,\n 'Neutral'), (4, 'Rather Good'), (5, 'Good')], null=True, verbose_name=\n 'Quality of the alternative program')\n", (9429, 9602), False, 'from django.db import migrations, models\n'), ((9632, 9731), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(5)', 'null': '(True)', 'verbose_name': '"""Economic Rating"""'}), "(decimal_places=4, max_digits=5, null=True, verbose_name\n ='Economic Rating')\n", (9651, 9731), False, 'from django.db import migrations, models\n'), ((9765, 9864), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(5)', 'null': '(True)', 'verbose_name': '"""Ecologic Rating"""'}), "(decimal_places=4, max_digits=5, null=True, verbose_name\n ='Ecologic Rating')\n", (9784, 9864), False, 'from django.db import migrations, models\n'), ((9896, 10001), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(5)', 'null': '(True)', 'verbose_name': '"""Socio-Cultural Rating"""'}), "(decimal_places=4, max_digits=5, null=True, verbose_name\n ='Socio-Cultural Rating')\n", (9915, 10001), False, 'from django.db import migrations, models\n'), ((10034, 10130), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(5)', 'null': '(True)', 'verbose_name': '"""Total Rating"""'}), "(decimal_places=4, max_digits=5, null=True, verbose_name\n ='Total Rating')\n", (10053, 10130), False, 'from django.db import migrations, models\n'), ((10264, 10357), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (10280, 10357), False, 'from django.db import migrations, models\n'), ((10381, 10445), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'null': '(True)', 'verbose_name': '"""Name"""'}), "(max_length=150, null=True, verbose_name='Name')\n", (10397, 10445), False, 'from django.db import migrations, models\n'), ((10474, 10541), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(254)', 'null': '(True)', 'verbose_name': '"""E-Mail"""'}), "(max_length=254, null=True, verbose_name='E-Mail')\n", (10491, 10541), False, 'from django.db import migrations, models\n'), ((10580, 10799), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('S', 'Solo Travel'), ('HG', 'Group Travel (Only Hunters)'), ('MG',\n 'Group Travel (Hunters/Non-Hunters)'), ('OT', 'Other')]", 'max_length': '(2)', 'null': '(True)', 'verbose_name': '"""Kind of Inquiry"""'}), "(choices=[('S', 'Solo Travel'), ('HG',\n 'Group Travel (Only Hunters)'), ('MG',\n 'Group Travel (Hunters/Non-Hunters)'), ('OT', 'Other')], max_length=2,\n null=True, verbose_name='Kind of Inquiry')\n", (10596, 10799), False, 'from django.db import migrations, models\n'), ((10936, 11010), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Consent to be contacted"""'}), "(default=False, verbose_name='Consent to be contacted')\n", (10955, 11010), False, 'from django.db import migrations, models\n'), ((11038, 11109), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Date of Inquiry"""'}), "(auto_now_add=True, verbose_name='Date of Inquiry')\n", (11058, 11109), False, 'from django.db import migrations, models\n'), ((11139, 11195), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'verbose_name': '"""Status"""'}), "(default=True, verbose_name='Status')\n", (11158, 11195), False, 'from django.db import migrations, models\n'), ((11325, 11418), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (11341, 11418), False, 'from django.db import migrations, models\n'), ((11461, 11539), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Consent to Publishing Rules"""'}), "(default=False, verbose_name='Consent to Publishing Rules')\n", (11480, 11539), False, 'from django.db import migrations, models\n'), ((11567, 11643), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(150)', 'null': '(True)', 'verbose_name': '"""Name"""'}), "(blank=True, max_length=150, null=True, verbose_name='Name')\n", (11583, 11643), False, 'from django.db import migrations, models\n'), ((11778, 11845), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'verbose_name': '"""Region / Territory"""'}), "(max_length=300, verbose_name='Region / Territory')\n", (11794, 11845), False, 'from django.db import migrations, models\n'), ((12040, 12150), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Private Parking"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Private Parking')\n", (12059, 12150), False, 'from django.db import migrations, models\n'), ((12186, 12319), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes'), (None, 'Unknown')]", 'default': 'None', 'verbose_name': '"""Airport Transfer"""'}), "(choices=[(False, 'No'), (True, 'Yes'), (None,\n 'Unknown')], default=None, verbose_name='Airport Transfer')\n", (12209, 12319), False, 'from django.db import migrations, models\n'), ((12468, 12598), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes'), (None, 'Unknown')]", 'default': 'None', 'verbose_name': '"""Rifle Rentals"""'}), "(choices=[(False, 'No'), (True, 'Yes'), (None,\n 'Unknown')], default=None, verbose_name='Rifle Rentals')\n", (12491, 12598), False, 'from django.db import migrations, models\n'), ((12636, 12905), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'January'), (2, 'February'), (3, 'March'), (4, 'April'), (5, 'May'), (\n 6, 'June'), (7, 'July'), (8, 'August'), (9, 'September'), (10,\n 'October'), (11, 'November'), (12, 'December')]", 'default': '(5)', 'verbose_name': '"""Start of Season"""'}), "(choices=[(1, 'January'), (2, 'February'), (3, 'March'),\n (4, 'April'), (5, 'May'), (6, 'June'), (7, 'July'), (8, 'August'), (9,\n 'September'), (10, 'October'), (11, 'November'), (12, 'December')],\n default=5, verbose_name='Start of Season')\n", (12655, 12905), False, 'from django.db import migrations, models\n'), ((12933, 13201), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'January'), (2, 'February'), (3, 'March'), (4, 'April'), (5, 'May'), (\n 6, 'June'), (7, 'July'), (8, 'August'), (9, 'September'), (10,\n 'October'), (11, 'November'), (12, 'December')]", 'default': '(10)', 'verbose_name': '"""End of Season"""'}), "(choices=[(1, 'January'), (2, 'February'), (3, 'March'),\n (4, 'April'), (5, 'May'), (6, 'June'), (7, 'July'), (8, 'August'), (9,\n 'September'), (10, 'October'), (11, 'November'), (12, 'December')],\n default=10, verbose_name='End of Season')\n", (12952, 13201), False, 'from django.db import migrations, models\n'), ((13226, 13356), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes'), (None, 'Unknown')]", 'default': 'None', 'verbose_name': '"""Family Offers"""'}), "(choices=[(False, 'No'), (True, 'Yes'), (None,\n 'Unknown')], default=None, verbose_name='Family Offers')\n", (13249, 13356), False, 'from django.db import migrations, models\n'), ((13398, 13533), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes'), (None, 'Unknown')]", 'default': 'None', 'verbose_name': '"""Alternative Offers"""'}), "(choices=[(False, 'No'), (True, 'Yes'), (None,\n 'Unknown')], default=None, verbose_name='Alternative Offers')\n", (13421, 13533), False, 'from django.db import migrations, models\n'), ((13823, 13960), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes'), (None, 'Unknown')]", 'default': 'None', 'verbose_name': '"""Interpreting Service"""'}), "(choices=[(False, 'No'), (True, 'Yes'), (None,\n 'Unknown')], default=None, verbose_name='Interpreting Service')\n", (13846, 13960), False, 'from django.db import migrations, models\n'), ((13997, 14131), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes'), (None, 'Unknown')]", 'default': 'None', 'verbose_name': '"""Wireless Coverage"""'}), "(choices=[(False, 'No'), (True, 'Yes'), (None,\n 'Unknown')], default=None, verbose_name='Wireless Coverage')\n", (14020, 14131), False, 'from django.db import migrations, models\n'), ((14169, 14304), 'django.db.models.NullBooleanField', 'models.NullBooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes'), (None, 'Unknown')]", 'default': 'None', 'verbose_name': '"""Broadband Internet"""'}), "(choices=[(False, 'No'), (True, 'Yes'), (None,\n 'Unknown')], default=None, verbose_name='Broadband Internet')\n", (14192, 14304), False, 'from django.db import migrations, models\n'), ((14335, 14401), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Vendor Link"""'}), "(blank=True, null=True, verbose_name='Vendor Link')\n", (14350, 14401), False, 'from django.db import migrations, models\n'), ((14570, 14673), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Featured"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Featured')\n", (14589, 14673), False, 'from django.db import migrations, models\n'), ((14712, 14790), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)', 'verbose_name': '"""Featuring Start"""'}), "(auto_now=True, null=True, verbose_name='Featuring Start')\n", (14732, 14790), False, 'from django.db import migrations, models\n'), ((14831, 14904), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Featuring End"""'}), "(blank=True, null=True, verbose_name='Featuring End')\n", (14851, 14904), False, 'from django.db import migrations, models\n'), ((14937, 15041), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Sponsored"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Sponsored')\n", (14956, 15041), False, 'from django.db import migrations, models\n'), ((15081, 15160), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)', 'verbose_name': '"""Sponsoring Start"""'}), "(auto_now=True, null=True, verbose_name='Sponsoring Start')\n", (15101, 15160), False, 'from django.db import migrations, models\n'), ((15202, 15276), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Sponsoring End"""'}), "(blank=True, null=True, verbose_name='Sponsoring End')\n", (15222, 15276), False, 'from django.db import migrations, models\n'), ((15308, 15411), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'choices': "[(False, 'No'), (True, 'Yes')]", 'default': '(False)', 'verbose_name': '"""Reviewed"""'}), "(choices=[(False, 'No'), (True, 'Yes')], default=False,\n verbose_name='Reviewed')\n", (15327, 15411), False, 'from django.db import migrations, models\n'), ((15445, 15543), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(6)', 'null': '(True)', 'verbose_name': '"""Overall Rating"""'}), "(decimal_places=4, max_digits=6, null=True, verbose_name\n ='Overall Rating')\n", (15464, 15543), False, 'from django.db import migrations, models\n'), ((15577, 15676), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(6)', 'null': '(True)', 'verbose_name': '"""Economic Rating"""'}), "(decimal_places=4, max_digits=6, null=True, verbose_name\n ='Economic Rating')\n", (15596, 15676), False, 'from django.db import migrations, models\n'), ((15710, 15809), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(6)', 'null': '(True)', 'verbose_name': '"""Ecologic Rating"""'}), "(decimal_places=4, max_digits=6, null=True, verbose_name\n ='Ecologic Rating')\n", (15729, 15809), False, 'from django.db import migrations, models\n'), ((15848, 15953), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(4)', 'max_digits': '(6)', 'null': '(True)', 'verbose_name': '"""Socio-Cultural Rating"""'}), "(decimal_places=4, max_digits=6, null=True, verbose_name\n ='Socio-Cultural Rating')\n", (15867, 15953), False, 'from django.db import migrations, models\n'), ((15978, 16055), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(75)', 'null': '(True)', 'verbose_name': '"""Slogan"""'}), "(blank=True, max_length=75, null=True, verbose_name='Slogan')\n", (15994, 16055), False, 'from django.db import migrations, models\n'), ((16087, 16155), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Publication Date"""'}), "(auto_now=True, verbose_name='Publication Date')\n", (16107, 16155), False, 'from django.db import migrations, models\n'), ((16192, 16268), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)', 'verbose_name': '"""Last Modified"""'}), "(auto_now=True, null=True, verbose_name='Last Modified')\n", (16212, 16268), False, 'from django.db import migrations, models\n'), ((16297, 16349), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""Views"""'}), "(default=0, verbose_name='Views')\n", (16316, 16349), False, 'from django.db import migrations, models\n'), ((16382, 16455), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)', 'verbose_name': '"""Technical Name"""'}), "(max_length=30, null=True, verbose_name='Technical Name')\n", (16398, 16455), False, 'from django.db import migrations, models\n'), ((16483, 16539), 'django.db.models.SlugField', 'models.SlugField', ([], {'null': '(True)', 'verbose_name': '"""Absolute URL"""'}), "(null=True, verbose_name='Absolute URL')\n", (16499, 16539), False, 'from django.db import migrations, models\n'), ((16577, 16685), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""trips/headline_images/"""', 'verbose_name': '"""Title Image"""'}), "(blank=True, null=True, upload_to='trips/headline_images/',\n verbose_name='Title Image')\n", (16594, 16685), False, 'from django.db import migrations, models\n'), ((16712, 16830), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""accounts.CompanyName"""', 'verbose_name': '"""Company"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'accounts.CompanyName', verbose_name='Company')\n", (16729, 16830), False, 'from django.db import migrations, models\n'), ((16859, 17018), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""creator"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Creator"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n related_name='creator', to=settings.AUTH_USER_MODEL, verbose_name='Creator'\n )\n", (16876, 17018), False, 'from django.db import migrations, models\n'), ((17037, 17102), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""travelling.Game"""', 'verbose_name': '"""Game"""'}), "(to='travelling.Game', verbose_name='Game')\n", (17059, 17102), False, 'from django.db import migrations, models\n'), ((17137, 17314), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""reviewer"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Reviewed By"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.SET_NULL, related_name='reviewer', to=settings.AUTH_USER_MODEL,\n verbose_name='Reviewed By')\n", (17154, 17314), False, 'from django.db import migrations, models\n'), ((17582, 17792), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'related_name': '"""travelling_tripbestofmodel"""', 'serialize': '(False)', 'to': '"""cms.CMSPlugin"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, related_name=\n 'travelling_tripbestofmodel', serialize=False, to='cms.CMSPlugin')\n", (17602, 17792), False, 'from django.db import migrations, models\n'), ((17810, 17862), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'verbose_name': '"""Name"""'}), "(max_length=75, verbose_name='Name')\n", (17826, 17862), False, 'from django.db import migrations, models\n'), ((17897, 17962), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(10)', 'verbose_name': '"""Number of Entries"""'}), "(default=10, verbose_name='Number of Entries')\n", (17916, 17962), False, 'from django.db import migrations, models\n'), ((17998, 18067), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Show Featured Only"""'}), "(default=False, verbose_name='Show Featured Only')\n", (18017, 18067), False, 'from django.db import migrations, models\n'), ((18104, 18174), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Show Sponsored Only"""'}), "(default=False, verbose_name='Show Sponsored Only')\n", (18123, 18174), False, 'from django.db import migrations, models\n'), ((18206, 18407), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('travelling/components/trip-thumbnail.html', 'Standard Template')]", 'default': '"""travelling/components/trip-thumbnail.html"""', 'max_length': '(300)', 'verbose_name': '"""Template"""'}), "(choices=[('travelling/components/trip-thumbnail.html',\n 'Standard Template')], default=\n 'travelling/components/trip-thumbnail.html', max_length=300,\n verbose_name='Template')\n", (18222, 18407), False, 'from django.db import migrations, models\n'), ((18659, 18872), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'related_name': '"""travelling_tripcarouselconfig"""', 'serialize': '(False)', 'to': '"""cms.CMSPlugin"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, related_name=\n 'travelling_tripcarouselconfig', serialize=False, to='cms.CMSPlugin')\n", (18679, 18872), False, 'from django.db import migrations, models\n'), ((18890, 18942), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'verbose_name': '"""Name"""'}), "(max_length=75, verbose_name='Name')\n", (18906, 18942), False, 'from django.db import migrations, models\n'), ((18977, 19036), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'verbose_name': '"""Application"""'}), "(max_length=75, verbose_name='Application')\n", (18993, 19036), False, 'from django.db import migrations, models\n'), ((19065, 19127), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'verbose_name': '"""Database Model"""'}), "(max_length=75, verbose_name='Database Model')\n", (19081, 19127), False, 'from django.db import migrations, models\n'), ((19162, 19227), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(10)', 'verbose_name': '"""Number of Entries"""'}), "(default=10, verbose_name='Number of Entries')\n", (19181, 19227), False, 'from django.db import migrations, models\n'), ((19263, 19332), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Show Featured Only"""'}), "(default=False, verbose_name='Show Featured Only')\n", (19282, 19332), False, 'from django.db import migrations, models\n'), ((19369, 19439), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Show Sponsored Only"""'}), "(default=False, verbose_name='Show Sponsored Only')\n", (19388, 19439), False, 'from django.db import migrations, models\n'), ((19481, 19576), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(450)', 'null': '(True)', 'verbose_name': '"""Selection Criteria"""'}), "(blank=True, max_length=450, null=True, verbose_name=\n 'Selection Criteria')\n", (19497, 19576), False, 'from django.db import migrations, models\n'), ((19603, 19741), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('travelling/components/trip-thumbnail.html', 'Default Template')]", 'max_length': '(300)', 'verbose_name': '"""Template"""'}), "(choices=[('travelling/components/trip-thumbnail.html',\n 'Default Template')], max_length=300, verbose_name='Template')\n", (19619, 19741), False, 'from django.db import migrations, models\n'), ((20002, 20215), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'related_name': '"""travelling_tripcataloguemodel"""', 'serialize': '(False)', 'to': '"""cms.CMSPlugin"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, related_name=\n 'travelling_tripcataloguemodel', serialize=False, to='cms.CMSPlugin')\n", (20022, 20215), False, 'from django.db import migrations, models\n'), ((20233, 20285), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'verbose_name': '"""Name"""'}), "(max_length=75, verbose_name='Name')\n", (20249, 20285), False, 'from django.db import migrations, models\n'), ((20527, 20620), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (20543, 20620), False, 'from django.db import migrations, models\n'), ((20646, 20752), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(4)', 'max_digits': '(8)', 'null': '(True)', 'verbose_name': '"""Weight (kg)"""'}), "(blank=True, decimal_places=4, max_digits=8, null=True,\n verbose_name='Weight (kg)')\n", (20665, 20752), False, 'from django.db import migrations, models\n'), ((20778, 20884), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(4)', 'max_digits': '(8)', 'null': '(True)', 'verbose_name': '"""Length (cm)"""'}), "(blank=True, decimal_places=4, max_digits=8, null=True,\n verbose_name='Length (cm)')\n", (20797, 20884), False, 'from django.db import migrations, models\n'), ((20910, 21015), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'blank': '(True)', 'decimal_places': '(4)', 'max_digits': '(8)', 'null': '(True)', 'verbose_name': '"""CIC Points"""'}), "(blank=True, decimal_places=4, max_digits=8, null=True,\n verbose_name='CIC Points')\n", (20929, 21015), False, 'from django.db import migrations, models\n'), ((21039, 21159), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.Game"""', 'verbose_name': '"""Game"""'}), "(null=True, on_delete=django.db.models.deletion.CASCADE,\n to='travelling.Game', verbose_name='Game')\n", (21056, 21159), False, 'from django.db import migrations, models\n'), ((21185, 21367), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""trophy_rating"""', 'to': '"""travelling.Rating"""', 'verbose_name': '"""Associated Rating"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, related_name='trophy_rating', to='travelling.Rating',\n verbose_name='Associated Rating')\n", (21202, 21367), False, 'from django.db import migrations, models\n'), ((21386, 21530), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""travelling.Trip"""', 'verbose_name': '"""Assiciated Trip"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='travelling.Trip', verbose_name='Assiciated Trip')\n", (21403, 21530), False, 'from django.db import migrations, models\n'), ((21684, 21858), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""travelling.PriceList"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'travelling.PriceList')\n", (21704, 21858), False, 'from django.db import migrations, models\n'), ((21876, 21939), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'verbose_name': '"""Price list name"""'}), "(max_length=75, verbose_name='Price list name')\n", (21892, 21939), False, 'from django.db import migrations, models\n'), ((22134, 22308), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""travelling.PriceList"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'travelling.PriceList')\n", (22154, 22308), False, 'from django.db import migrations, models\n'), ((22326, 22389), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(75)', 'verbose_name': '"""Price list name"""'}), "(max_length=75, verbose_name='Price list name')\n", (22342, 22389), False, 'from django.db import migrations, models\n')] |
from Bio.SeqIO import parse
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
from Bio.Seq import Seq
from Bio.Alphabet import IUPAC, generic_dna
from Bio import Alphabet, Entrez
from Bio.SeqUtils import GC
from Bio.Blast import NCBIWWW, NCBIXML
from Bio import pairwise2
from Bio.pairwise2 import format_alignment
from Bio import motifs
__author__ = '<NAME>'
__version__ = '09122019'
class CurrentSession:
def __init__(self, sequence = None, comp_seq = None, transcribed_seq = None,
translated_seq = None, id = None, name = None, desc = None, gc_perc = None,
record = None):
self.sequence = None
self.comp_seq = None
self.transcribed_seq = None
self.translated_seq = None
self.id = None
self.name = None
self.desc = None
self.gc_perc = None
self.record = SeqRecord(self.sequence, id = self.id)
self.file_session = None
def openFile(self, filename):
file = open(filename + '.fasta')
self.file_session = str(filename + '.fasta')
return file
def closeFile(self, file_handle):
file_handle.close()
def getSequenceInfo(self, file_handle):
records = parse(file_handle, "fasta")
for record in records:
self.id = record.id
self.name = record.name
self.desc = record.description
self.sequence = Seq(str(record.seq), IUPAC.ambiguous_dna)
print("ID: {}".format(self.id))
print("Name: {}".format(self.name))
print("Description: {}".format(self.desc))
print("Sequence: {}".format(self.sequence))
# print("Complementary sequence: {}".format(sequence.complement()))
print("------------------------------------------------------------")
return
def getComplementarySequence(self, file_handle):
records = parse(file_handle, "fasta")
for record in records:
self.sequence = Seq(str(record.seq), IUPAC.unambiguous_dna)
self.name = record.name
self.comp_seq = self.sequence.complement()
print("Name: {}".format(self.name))
if Alphabet._verify_alphabet(self.sequence) == True:
print("Sequence: {}".format(self.sequence))
print("Complementary sequence: {}".format(self.comp_seq))
print("------------------------------------------------------------")
else:
print("This sequence is not a DNA, can't get a complementary of that. Load correct sequence.")
return
def transcribeSequence(self, file_handle):
records = parse(file_handle, "fasta")
for record in records:
self.sequence = Seq(str(record.seq), IUPAC.unambiguous_dna)
self.name = record.name
self.transcribed_seq = self.sequence.transcribe()
print("Name: {}".format(self.name))
if Alphabet._verify_alphabet(self.sequence) == True:
print("Sequence: {}".format(self.sequence))
print("Transcribed sequence: {}".format(self.transcribed_seq))
print("------------------------------------------------------------")
else:
print("This sequence is not a DNA, can't get a complementary of that. Load correct sequence.")
def translateSequence(self, file_handle, stop):
records = parse(file_handle, "fasta")
for record in records:
self.sequence = Seq(str(record.seq), IUPAC.unambiguous_rna)
self.name = record.name
print("Name: {}".format(self.name))
if Alphabet._verify_alphabet(self.sequence) == True and stop == 'y':
self.translated_seq = self.sequence.translate(to_stop=True)
print("Sequence: {}".format(self.sequence))
print("Translated sequence: {}".format(self.translated_seq))
print("------------------------------------------------------------")
elif Alphabet._verify_alphabet(self.sequence) == True and stop == 'n':
self.translated_seq = self.sequence.translate()
print("Sequence: {}".format(self.sequence))
print("Translated sequence: {}".format(self.translated_seq))
print("------------------------------------------------------------")
else:
print("This sequence is not a RNA, can't translate that. Load correct sequence.")
def get_GC_Content(self, file_handle):
records = parse(file_handle, "fasta")
for record in records:
self.sequence = Seq(str(record.seq), IUPAC.unambiguous_dna)
self.name = record.name
self.gc_perc = GC(self.sequence)
print("Name {}".format(self.name))
if Alphabet._verify_alphabet(self.sequence) == True:
print("Sequence: {}".format(self.sequence))
print("GC content: {}%".format(self.gc_perc))
print("------------------------------------------------------------")
else:
print("This sequence is not a DNA, only calculate GC content in DNA. Load correct sequence.")
def fetchRecord(self, db, accession):
Entrez.email = "<EMAIL>"
handle = Entrez.efetch(db = db, id = accession, rettype = "fasta")
#print(handle.read())
record = SeqIO.read(handle, "fasta")
print(record)
filename = record.name
file = open(filename + ".fasta", "w")
SeqIO.write(record, file, "fasta")
return filename
def runBlast(self, type, database):
seq_record = next(SeqIO.parse(open(self.file_session), 'fasta'))
print("Requesting BLAST (might take a few minutes...)")
request_handle = NCBIWWW.qblast(type, database, seq_record.seq)
print("BLAST succeeded.")
with open("{}_blast.xml".format(self.file_session), "w") as save_f:
save_f.write(request_handle.read())
request_handle.close()
print("BLAST results saved.")
def alignPairwise(self, file_handle, alignment_type):
try:
records = parse(file_handle, "fasta")
number = 1
seq1 = None
seq2 = None
for record in records:
if number == 1:
seq1 = record.seq
elif number == 2:
seq2 = record.seq
number += 1
if seq2 is None:
print("Error: There is only one sequence in the file.")
return
if alignment_type == str(1):
alignments = pairwise2.align.globalxx(seq1, seq2)
elif alignment_type == str(2):
alignments = pairwise2.align.localxx(seq1, seq2)
elif alignment_type == str(3):
match = int(input("Define points given for match: "))
mismatch = int(input("Define points deduced for mismatch: "))
o_gap = int(input("Define penalty for gap opening: "))
ext_gap = int(input("Define penalty for gap extension: "))
alignments = pairwise2.align.globalms(seq1, seq2, match, mismatch,
o_gap, ext_gap)
for alignment in alignments:
print("RAW ALIGNMENT: ")
print(alignment)
print("FORMATTED ALIGNMENT: ")
print(format_alignment(*alignment))
except Exception as e1:
print("Error, problably there is only one sequence.")
def createMotif(self, file_handle):
records = parse(file_handle, "fasta")
logofile = self.file_session + "_logo.png"
seqs_motif = []
for record in records:
self.sequence = Seq(str(record.seq))
seqs_motif.append(self.sequence)
seqs = motifs.create(seqs_motif)
print(seqs.counts)
seqs.weblogo(logofile)
print("Weblogo saved.")
def getElems(self, length):
for base in range(len(self.sequence)):
if base + length > len(self.sequence):
break
else:
yield self.sequence[base:base + length]
def saveActions(self):
if self.file_session != None:
with open("{}_actions.txt".format(self.id), "w") as save_f:
save_f.write("""ID: {}
NAME: {}
DESCRIPTION: {}
ORIGINAL SEQUENCE: {}
COMPLEMENTARY SEQUENCE: {}
TRANSCRIBED SEQUENCE: {}
TRANSLATED SEQUENCE: {}
G/C PERCENTAGE: {}%""".format(self.id, self.name, self.desc, self.sequence, self.comp_seq,
self.transcribed_seq, self.translated_seq, self.gc_perc))
save_f.close()
print("Your actions were saved!")
else:
print("Nothing to save, probably you haven't loaded any file before.")
# def convertFASTAtoGENBANK(self, filename):
# file = open(filename + ".fasta")
# record = SeqIO.read(file, "fasta")
# record.seq.alphabet = generic_dna
# file_genbank = open(filename + ".gbk", "w")
# SeqIO.write(record, file_genbank, "genbank")
# file_genbank.close()
# file.close()
if __name__ == '__main__':
session = CurrentSession()
while True:
print("""////Bio Python////
1. Load FASTA file
2. Load record info
3. Get complementary sequence
4. Transcribe sequence
5. Translate sequence
6. Get GC content
7. Fetch and load FASTA from Entrez
*8. Convert FASTA to GenBank
9. Run BLAST
10. Perform pairwise aligment
11. Create motifs and weblogo
12. Save your actions made on FASTA file to txt file
13. Print sequence substrings
=== Current session file: {} ===
Type 'help' for help.
Type 'quit' to exit.""".format(session.file_session))
menu_pos = input('>>').lower()
if menu_pos == str(1):
try:
print("Type name of FASTA file to process: ")
filename = input()
file_handle = session.openFile(filename)
print("FASTA loaded!")
except Exception as e:
print("No such file or directory.")
elif menu_pos == str(2):
try:
file_handle = session.openFile(filename)
session.getSequenceInfo(file_handle)
session.closeFile(file_handle)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(3):
try:
file_handle = session.openFile(filename)
session.getComplementarySequence(file_handle)
session.closeFile(file_handle)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(4):
try:
file_handle = session.openFile(filename)
session.transcribeSequence(file_handle)
session.closeFile(file_handle)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(5):
stop = input('Stop translating at first stop codon? [y/n]').lower()
try:
file_handle = session.openFile(filename)
session.translateSequence(file_handle, stop)
session.closeFile(file_handle)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(6):
try:
file_handle = session.openFile(filename)
session.get_GC_Content(file_handle)
session.closeFile(file_handle)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(7):
try:
db = input("Type database name: ").lower()
accession = input("Type accession to find: ")
filename = session.fetchRecord(db, accession)
file_handle = session.openFile(filename)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(8):
try:
print("Type name of FASTA file to process: ")
filename = input()
# session.convertFASTAtoGENBANK(filename)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(9):
try:
file_handle = session.openFile(filename)
type = input("Type the type of BLAST: ")
database = input("Type database name: ")
session.runBlast(type, database)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(10):
try:
print("""Choose type of aligment:
1. Global Pairwise (default parameters)
2. Local Pairwise (default parameters)
3. Global Pairwise with custom parameters""")
alignment_type = input('>>')
file_handle = session.openFile(filename)
session.alignPairwise(file_handle, alignment_type)
session.closeFile(file_handle)
except Exception as e:
print("File is not loaded.")
elif menu_pos == str(11):
try:
file_handle = session.openFile(filename)
session.createMotif(file_handle)
session.closeFile(file_handle)
except Exception as e:
print("File is not loaded")
elif menu_pos == str(12):
session.saveActions()
elif menu_pos == str(13):
try:
length = int(input("Length of substrings:"))
iterator = session.getElems(length)
print(session.sequence)
i = 0
for base in iterator:
print(' ' * i + base)
i += 1
print(' ' * i + next(iterator))
i += 1
except StopIteration:
pass
except Exception as e:
print("File is not loaded")
elif menu_pos == 'debug':
print("{}\n{}\n{}\n{}\n{}\n{}\n{}\n{}\n".format(session.id, session.name,
session.desc,session.sequence,
session.comp_seq, session.transcribed_seq,
session.translated_seq, session.gc_perc))
elif menu_pos == 'quit':
break
elif menu_pos == 'help':
print("""
quickHELP:
Indent operations in menu needs file to be opened first.
Be patient while doing BLAST.
If in menu something is marked with an asterisk, then it is not usable.
Have fun!
""")
else:
print("Unknown command.")
| [
"Bio.pairwise2.align.globalms",
"Bio.motifs.create",
"Bio.Blast.NCBIWWW.qblast",
"Bio.SeqRecord.SeqRecord",
"Bio.pairwise2.align.localxx",
"Bio.SeqUtils.GC",
"Bio.pairwise2.align.globalxx",
"Bio.SeqIO.parse",
"Bio.SeqIO.write",
"Bio.SeqIO.read",
"Bio.Entrez.efetch",
"Bio.pairwise2.format_align... | [((877, 913), 'Bio.SeqRecord.SeqRecord', 'SeqRecord', (['self.sequence'], {'id': 'self.id'}), '(self.sequence, id=self.id)\n', (886, 913), False, 'from Bio.SeqRecord import SeqRecord\n'), ((1231, 1258), 'Bio.SeqIO.parse', 'parse', (['file_handle', '"""fasta"""'], {}), "(file_handle, 'fasta')\n", (1236, 1258), False, 'from Bio.SeqIO import parse\n'), ((1925, 1952), 'Bio.SeqIO.parse', 'parse', (['file_handle', '"""fasta"""'], {}), "(file_handle, 'fasta')\n", (1930, 1952), False, 'from Bio.SeqIO import parse\n'), ((2692, 2719), 'Bio.SeqIO.parse', 'parse', (['file_handle', '"""fasta"""'], {}), "(file_handle, 'fasta')\n", (2697, 2719), False, 'from Bio.SeqIO import parse\n'), ((3461, 3488), 'Bio.SeqIO.parse', 'parse', (['file_handle', '"""fasta"""'], {}), "(file_handle, 'fasta')\n", (3466, 3488), False, 'from Bio.SeqIO import parse\n'), ((4606, 4633), 'Bio.SeqIO.parse', 'parse', (['file_handle', '"""fasta"""'], {}), "(file_handle, 'fasta')\n", (4611, 4633), False, 'from Bio.SeqIO import parse\n'), ((5361, 5412), 'Bio.Entrez.efetch', 'Entrez.efetch', ([], {'db': 'db', 'id': 'accession', 'rettype': '"""fasta"""'}), "(db=db, id=accession, rettype='fasta')\n", (5374, 5412), False, 'from Bio import Alphabet, Entrez\n'), ((5467, 5494), 'Bio.SeqIO.read', 'SeqIO.read', (['handle', '"""fasta"""'], {}), "(handle, 'fasta')\n", (5477, 5494), False, 'from Bio import SeqIO\n'), ((5602, 5636), 'Bio.SeqIO.write', 'SeqIO.write', (['record', 'file', '"""fasta"""'], {}), "(record, file, 'fasta')\n", (5613, 5636), False, 'from Bio import SeqIO\n'), ((5865, 5911), 'Bio.Blast.NCBIWWW.qblast', 'NCBIWWW.qblast', (['type', 'database', 'seq_record.seq'], {}), '(type, database, seq_record.seq)\n', (5879, 5911), False, 'from Bio.Blast import NCBIWWW, NCBIXML\n'), ((7747, 7774), 'Bio.SeqIO.parse', 'parse', (['file_handle', '"""fasta"""'], {}), "(file_handle, 'fasta')\n", (7752, 7774), False, 'from Bio.SeqIO import parse\n'), ((7991, 8016), 'Bio.motifs.create', 'motifs.create', (['seqs_motif'], {}), '(seqs_motif)\n', (8004, 8016), False, 'from Bio import motifs\n'), ((4801, 4818), 'Bio.SeqUtils.GC', 'GC', (['self.sequence'], {}), '(self.sequence)\n', (4803, 4818), False, 'from Bio.SeqUtils import GC\n'), ((6235, 6262), 'Bio.SeqIO.parse', 'parse', (['file_handle', '"""fasta"""'], {}), "(file_handle, 'fasta')\n", (6240, 6262), False, 'from Bio.SeqIO import parse\n'), ((2211, 2251), 'Bio.Alphabet._verify_alphabet', 'Alphabet._verify_alphabet', (['self.sequence'], {}), '(self.sequence)\n', (2236, 2251), False, 'from Bio import Alphabet, Entrez\n'), ((2985, 3025), 'Bio.Alphabet._verify_alphabet', 'Alphabet._verify_alphabet', (['self.sequence'], {}), '(self.sequence)\n', (3010, 3025), False, 'from Bio import Alphabet, Entrez\n'), ((4881, 4921), 'Bio.Alphabet._verify_alphabet', 'Alphabet._verify_alphabet', (['self.sequence'], {}), '(self.sequence)\n', (4906, 4921), False, 'from Bio import Alphabet, Entrez\n'), ((6735, 6771), 'Bio.pairwise2.align.globalxx', 'pairwise2.align.globalxx', (['seq1', 'seq2'], {}), '(seq1, seq2)\n', (6759, 6771), False, 'from Bio import pairwise2\n'), ((3692, 3732), 'Bio.Alphabet._verify_alphabet', 'Alphabet._verify_alphabet', (['self.sequence'], {}), '(self.sequence)\n', (3717, 3732), False, 'from Bio import Alphabet, Entrez\n'), ((6844, 6879), 'Bio.pairwise2.align.localxx', 'pairwise2.align.localxx', (['seq1', 'seq2'], {}), '(seq1, seq2)\n', (6867, 6879), False, 'from Bio import pairwise2\n'), ((7559, 7587), 'Bio.pairwise2.format_alignment', 'format_alignment', (['*alignment'], {}), '(*alignment)\n', (7575, 7587), False, 'from Bio.pairwise2 import format_alignment\n'), ((4074, 4114), 'Bio.Alphabet._verify_alphabet', 'Alphabet._verify_alphabet', (['self.sequence'], {}), '(self.sequence)\n', (4099, 4114), False, 'from Bio import Alphabet, Entrez\n'), ((7246, 7315), 'Bio.pairwise2.align.globalms', 'pairwise2.align.globalms', (['seq1', 'seq2', 'match', 'mismatch', 'o_gap', 'ext_gap'], {}), '(seq1, seq2, match, mismatch, o_gap, ext_gap)\n', (7270, 7315), False, 'from Bio import pairwise2\n')] |
import pymongo
client = pymongo.MongoClient('''mongodb://dakotapope:<PASSWORD>wrd@
cluster0-shard-00-00-iaoct.mongodb.net:27017,cluster0-shard-00
-01-iaoct.mongodb.net:27017,cluster0-shard-00-02-iaoct.mongodb.
net:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=
admin&retryWrites=true''')
# investigate the databases already initialized
client.database_names()
# -->['rpg_data', 'test', 'admin', 'local']
# since I created the table on the Mongo Atlas dashboard I wil use it here
rpgs = client.rpg_data.rpg
# loadout the json file to prep for dumping into a mongo db table
with open('''C:/Users/dakot/Documents/GitHub/DS-Unit-3-Sprint-2-SQL-and-
Databases/module3-nosql-and-document-oriented-databases/rpg.json''') as f:
file_data = json.load(f)
# make a space for the data to go
rpg_table = rpg['rpg_data']
# Dump the json data into the mongodb cloud.
rpg_table.insert_many(file_data)
# <pymongo.results.InsertManyResult at 0x2c80a7c8688>
# And DONE!
| [
"pymongo.MongoClient"
] | [((25, 311), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""mongodb://dakotapope:<PASSWORD>wrd@\ncluster0-shard-00-00-iaoct.mongodb.net:27017,cluster0-shard-00\n-01-iaoct.mongodb.net:27017,cluster0-shard-00-02-iaoct.mongodb.\nnet:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=\nadmin&retryWrites=true"""'], {}), '(\n """mongodb://dakotapope:<PASSWORD>wrd@\ncluster0-shard-00-00-iaoct.mongodb.net:27017,cluster0-shard-00\n-01-iaoct.mongodb.net:27017,cluster0-shard-00-02-iaoct.mongodb.\nnet:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=\nadmin&retryWrites=true"""\n )\n', (44, 311), False, 'import pymongo\n')] |
"""Promises, promises, promises."""
from __future__ import absolute_import, unicode_literals
import re
from collections import namedtuple
from .abstract import Thenable
from .promises import promise
from .synchronization import barrier
from .funtools import (
maybe_promise, ensure_promise,
ppartial, preplace, starpromise, transform, wrap,
)
__version__ = '1.3.0'
__author__ = '<NAME>'
__contact__ = '<EMAIL>'
__homepage__ = 'http://github.com/celery/vine'
__docformat__ = 'restructuredtext'
# -eof meta-
version_info_t = namedtuple('version_info_t', (
'major', 'minor', 'micro', 'releaselevel', 'serial',
))
# bump version can only search for {current_version}
# so we have to parse the version here.
_temp = re.match(
r'(\d+)\.(\d+).(\d+)(.+)?', __version__).groups()
VERSION = version_info = version_info_t(
int(_temp[0]), int(_temp[1]), int(_temp[2]), _temp[3] or '', '')
del(_temp)
del(re)
__all__ = [
'Thenable', 'promise', 'barrier',
'maybe_promise', 'ensure_promise',
'ppartial', 'preplace', 'starpromise', 'transform', 'wrap',
]
| [
"collections.namedtuple",
"re.match"
] | [((538, 625), 'collections.namedtuple', 'namedtuple', (['"""version_info_t"""', "('major', 'minor', 'micro', 'releaselevel', 'serial')"], {}), "('version_info_t', ('major', 'minor', 'micro', 'releaselevel',\n 'serial'))\n", (548, 625), False, 'from collections import namedtuple\n'), ((730, 782), 're.match', 're.match', (['"""(\\\\d+)\\\\.(\\\\d+).(\\\\d+)(.+)?"""', '__version__'], {}), "('(\\\\d+)\\\\.(\\\\d+).(\\\\d+)(.+)?', __version__)\n", (738, 782), False, 'import re\n')] |
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
from LLC_Membranes.timeseries.forecast_ctrw import System
from LLC_Membranes.llclib import file_rw
import names
residues = ["GCL", "SOH"]
wt = 10
path = "/home/bcoscia/Documents/Gromacs/Transport/NaGA3C11"
colors = ['blue', 'red']
opacity = 1
nbins = 25
lw = 2
fig, ax = plt.subplots(1, 2, figsize=(10, 5))
for j, r in enumerate(residues):
obj = file_rw.load_object('%s/%s/%swt/forecast_%s.pl' % (path, r, wt, r))
hops = []
for i in obj.hop_lengths:
hops += i
print(max(hops))
if j == 0:
hop_hist, edges = np.histogram(hops, density=True, bins=nbins)
bounds = [edges[0], edges[-1]]
else:
hop_hist, edges = np.histogram(hops, density=True, bins=np.linspace(bounds[0], bounds[1], nbins + 1))
hop_outline = np.zeros([len(hop_hist)*2 + 2, 2])
hop_outline[::2, 0] = edges
hop_outline[1::2, 0] = edges
hop_outline[1:-1:2, 1] = hop_hist
hop_outline[2:-1:2, 1] = hop_hist
if j == 0:
dwell_hist, edges = np.histogram(obj.dwell_times, density=True, bins=nbins)
bounds_power = [edges[0], edges[-1]]
else:
dwell_hist, edges = np.histogram(obj.dwell_times, density=True, bins=np.linspace(bounds_power[0], bounds_power[1], nbins + 1))
dwell_outline = np.zeros([len(dwell_hist)*2 + 2, 2])
dwell_outline[::2, 0] = edges
dwell_outline[1::2, 0] = edges
dwell_outline[1:-1:2, 1] = dwell_hist
dwell_outline[2:-1:2, 1] = dwell_hist
ax[0].plot(hop_outline[:, 0], hop_outline[:, 1], color=colors[j], alpha=opacity, linewidth=lw)
ax[1].plot(dwell_outline[:, 0], dwell_outline[:, 1], color=colors[j], alpha=opacity, label=names.res_to_name[r], linewidth=lw)
ax[0].tick_params(labelsize=14)
ax[1].tick_params(labelsize=14)
ax[1].legend(fontsize=14)
ax[0].set_ylabel('Frequency', fontsize=14)
ax[0].set_xlabel('Hop Length (nm)', fontsize=14)
ax[1].set_xlabel('Dwell Time (ns)', fontsize=14)
plt.tight_layout()
plt.savefig('dwell_hop_%s.pdf' % '_'.join(residues))
plt.show()
| [
"numpy.histogram",
"LLC_Membranes.llclib.file_rw.load_object",
"numpy.linspace",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((348, 383), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(10, 5)'}), '(1, 2, figsize=(10, 5))\n', (360, 383), True, 'import matplotlib.pyplot as plt\n'), ((1882, 1900), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1898, 1900), True, 'import matplotlib.pyplot as plt\n'), ((1954, 1964), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1962, 1964), True, 'import matplotlib.pyplot as plt\n'), ((425, 492), 'LLC_Membranes.llclib.file_rw.load_object', 'file_rw.load_object', (["('%s/%s/%swt/forecast_%s.pl' % (path, r, wt, r))"], {}), "('%s/%s/%swt/forecast_%s.pl' % (path, r, wt, r))\n", (444, 492), False, 'from LLC_Membranes.llclib import file_rw\n'), ((593, 637), 'numpy.histogram', 'np.histogram', (['hops'], {'density': '(True)', 'bins': 'nbins'}), '(hops, density=True, bins=nbins)\n', (605, 637), True, 'import numpy as np\n'), ((998, 1053), 'numpy.histogram', 'np.histogram', (['obj.dwell_times'], {'density': '(True)', 'bins': 'nbins'}), '(obj.dwell_times, density=True, bins=nbins)\n', (1010, 1053), True, 'import numpy as np\n'), ((736, 780), 'numpy.linspace', 'np.linspace', (['bounds[0]', 'bounds[1]', '(nbins + 1)'], {}), '(bounds[0], bounds[1], nbins + 1)\n', (747, 780), True, 'import numpy as np\n'), ((1171, 1227), 'numpy.linspace', 'np.linspace', (['bounds_power[0]', 'bounds_power[1]', '(nbins + 1)'], {}), '(bounds_power[0], bounds_power[1], nbins + 1)\n', (1182, 1227), True, 'import numpy as np\n')] |
"""Added tariff
Revision ID: 4dbaa3104f4
Revises: <PASSWORD>
Create Date: 2015-05-27 16:00:09.343862
"""
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_account():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tariff',
sa.Column('tariff_id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('currency', sa.String(length=3), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=True),
sa.Column('deleted', sa.DateTime(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('modified', sa.DateTime(), nullable=True),
sa.Column('mutable', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['tariff.tariff_id'], ),
sa.PrimaryKeyConstraint('tariff_id')
)
op.create_table('service_price',
sa.Column('service_id', sa.String(length=32), nullable=False),
sa.Column('price', sa.DECIMAL(precision=20, scale=6), nullable=True),
sa.Column('tariff_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['tariff_id'], ['tariff.tariff_id'], ),
sa.PrimaryKeyConstraint('service_id', 'tariff_id')
)
op.create_table('tariff_history',
sa.Column('history_id', sa.Integer(), nullable=False),
sa.Column('event', sa.String(length=8), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('tariff_id', sa.Integer(), nullable=True),
sa.Column('customer_id', sa.Integer(), nullable=True),
sa.Column('date', sa.DateTime(), nullable=True),
sa.Column('snapshot', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['customer_id'], ['customer.customer_id'], ),
sa.ForeignKeyConstraint(['tariff_id'], ['tariff.tariff_id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.user_id'], ),
sa.PrimaryKeyConstraint('history_id')
)
op.create_table('tariff_localization',
sa.Column('language', sa.String(length=2), nullable=False),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('localized_name', sa.String(length=254), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['tariff.tariff_id'], ),
sa.PrimaryKeyConstraint('language', 'parent_id'),
sa.UniqueConstraint('localized_name', 'language', name='uix_tariff_localization')
)
### end Alembic commands ###
def downgrade_account():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tariff_localization')
op.drop_table('tariff_history')
op.drop_table('service_price')
op.drop_table('tariff')
### end Alembic commands ###
def upgrade_fitter():
pass
def downgrade_fitter():
pass
| [
"sqlalchemy.DECIMAL",
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.DateTime",
"alembic.op.drop_table",
"sqlalchemy.Text",
"sqlalchemy.Boolean",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.Integer",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.String"
] | [((2756, 2792), 'alembic.op.drop_table', 'op.drop_table', (['"""tariff_localization"""'], {}), "('tariff_localization')\n", (2769, 2792), False, 'from alembic import op\n'), ((2797, 2828), 'alembic.op.drop_table', 'op.drop_table', (['"""tariff_history"""'], {}), "('tariff_history')\n", (2810, 2828), False, 'from alembic import op\n'), ((2833, 2863), 'alembic.op.drop_table', 'op.drop_table', (['"""service_price"""'], {}), "('service_price')\n", (2846, 2863), False, 'from alembic import op\n'), ((2868, 2891), 'alembic.op.drop_table', 'op.drop_table', (['"""tariff"""'], {}), "('tariff')\n", (2881, 2891), False, 'from alembic import op\n'), ((1016, 1076), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['parent_id']", "['tariff.tariff_id']"], {}), "(['parent_id'], ['tariff.tariff_id'])\n", (1039, 1076), True, 'import sqlalchemy as sa\n'), ((1084, 1120), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""tariff_id"""'], {}), "('tariff_id')\n", (1107, 1120), True, 'import sqlalchemy as sa\n'), ((1367, 1427), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['tariff_id']", "['tariff.tariff_id']"], {}), "(['tariff_id'], ['tariff.tariff_id'])\n", (1390, 1427), True, 'import sqlalchemy as sa\n'), ((1435, 1485), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""service_id"""', '"""tariff_id"""'], {}), "('service_id', 'tariff_id')\n", (1458, 1485), True, 'import sqlalchemy as sa\n'), ((1930, 1996), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['customer_id']", "['customer.customer_id']"], {}), "(['customer_id'], ['customer.customer_id'])\n", (1953, 1996), True, 'import sqlalchemy as sa\n'), ((2004, 2064), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['tariff_id']", "['tariff.tariff_id']"], {}), "(['tariff_id'], ['tariff.tariff_id'])\n", (2027, 2064), True, 'import sqlalchemy as sa\n'), ((2072, 2126), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['user.user_id']"], {}), "(['user_id'], ['user.user_id'])\n", (2095, 2126), True, 'import sqlalchemy as sa\n'), ((2134, 2171), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""history_id"""'], {}), "('history_id')\n", (2157, 2171), True, 'import sqlalchemy as sa\n'), ((2418, 2478), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['parent_id']", "['tariff.tariff_id']"], {}), "(['parent_id'], ['tariff.tariff_id'])\n", (2441, 2478), True, 'import sqlalchemy as sa\n'), ((2486, 2534), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""language"""', '"""parent_id"""'], {}), "('language', 'parent_id')\n", (2509, 2534), True, 'import sqlalchemy as sa\n'), ((2540, 2626), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""localized_name"""', '"""language"""'], {'name': '"""uix_tariff_localization"""'}), "('localized_name', 'language', name=\n 'uix_tariff_localization')\n", (2559, 2626), True, 'import sqlalchemy as sa\n'), ((581, 593), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (591, 593), True, 'import sqlalchemy as sa\n'), ((641, 650), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (648, 650), True, 'import sqlalchemy as sa\n'), ((694, 713), 'sqlalchemy.String', 'sa.String', ([], {'length': '(3)'}), '(length=3)\n', (703, 713), True, 'import sqlalchemy as sa\n'), ((758, 770), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (768, 770), True, 'import sqlalchemy as sa\n'), ((813, 826), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (824, 826), True, 'import sqlalchemy as sa\n'), ((869, 882), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (880, 882), True, 'import sqlalchemy as sa\n'), ((926, 939), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (937, 939), True, 'import sqlalchemy as sa\n'), ((982, 994), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (992, 994), True, 'import sqlalchemy as sa\n'), ((1192, 1212), 'sqlalchemy.String', 'sa.String', ([], {'length': '(32)'}), '(length=32)\n', (1201, 1212), True, 'import sqlalchemy as sa\n'), ((1254, 1287), 'sqlalchemy.DECIMAL', 'sa.DECIMAL', ([], {'precision': '(20)', 'scale': '(6)'}), '(precision=20, scale=6)\n', (1264, 1287), True, 'import sqlalchemy as sa\n'), ((1332, 1344), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1342, 1344), True, 'import sqlalchemy as sa\n'), ((1558, 1570), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1568, 1570), True, 'import sqlalchemy as sa\n'), ((1612, 1631), 'sqlalchemy.String', 'sa.String', ([], {'length': '(8)'}), '(length=8)\n', (1621, 1631), True, 'import sqlalchemy as sa\n'), ((1674, 1686), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1684, 1686), True, 'import sqlalchemy as sa\n'), ((1731, 1743), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1741, 1743), True, 'import sqlalchemy as sa\n'), ((1790, 1802), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1800, 1802), True, 'import sqlalchemy as sa\n'), ((1842, 1855), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1853, 1855), True, 'import sqlalchemy as sa\n'), ((1899, 1908), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (1906, 1908), True, 'import sqlalchemy as sa\n'), ((2247, 2266), 'sqlalchemy.String', 'sa.String', ([], {'length': '(2)'}), '(length=2)\n', (2256, 2266), True, 'import sqlalchemy as sa\n'), ((2312, 2324), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (2322, 2324), True, 'import sqlalchemy as sa\n'), ((2375, 2396), 'sqlalchemy.String', 'sa.String', ([], {'length': '(254)'}), '(length=254)\n', (2384, 2396), True, 'import sqlalchemy as sa\n')] |
import ctypes
import struct
import time
from fixate.core.common import bits
from fixate.core.exceptions import InstrumentError, InstrumentNotConnected
import fixate.config
import fixate.core.discover
def open(ftdi_description=""):
"""Open is the public api for the bit bang driver for discovering and opening a connection
to a valid bit bang device
:param restrictions:
A dictionary containing the extents of the required equipment
:return:
A instantiated class connected to a valid bit_bang class
"""
devices = fixate.core.discover.discover_ftdi()
for dev in devices:
if ftdi_description.encode() == dev.Description or ftdi_description == "":
return FTDI2xx(dev.Description)
raise InstrumentNotConnected("No valid ftdi found by description '{}'".format(ftdi_description))
# Definitions
UCHAR = ctypes.c_ubyte
PCHAR = ctypes.POINTER(ctypes.c_char)
PUCHAR = ctypes.POINTER(ctypes.c_ubyte)
DWORD = ctypes.c_ulong
LPDWORD = ctypes.POINTER(ctypes.c_ulong)
FT_HANDLE = DWORD
class FT_STATUS(object):
FT_OK = DWORD(0)
FT_INVALID_HANDLE = DWORD(1)
FT_DEVICE_NOT_FOUND = DWORD(2)
FT_DEVICE_NOT_OPENED = DWORD(3)
FT_IO_ERROR = DWORD(4)
FT_INSUFFICIENT_RESOURCES = DWORD(5)
FT_INVALID_PARAMETER = DWORD(6)
FT_INVALID_BAUD_RATE = DWORD(7)
FT_DEVICE_NOT_OPENED_FOR_ERASE = DWORD(8)
FT_DEVICE_NOT_OPENED_FOR_WRITE = DWORD(9)
FT_FAILED_TO_WRITE_DEVICE = DWORD(10)
FT_EEPROM_READ_FAILED = DWORD(11)
FT_EEPROM_WRITE_FAILED = DWORD(12)
FT_EEPROM_ERASE_FAILED = DWORD(13)
FT_EEPROM_NOT_PRESENT = DWORD(14)
FT_EEPROM_NOT_PROGRAMMED = DWORD(15)
FT_INVALID_ARGS = DWORD(16)
FT_NOT_SUPPORTED = DWORD(17)
FT_OTHER_ERROR = DWORD(18)
class FT_DEVICE(object):
FT_DEVICE_232BM = DWORD(0)
FT_DEVICE_232AM = DWORD(1)
FT_DEVICE_100AX = DWORD(2)
FT_DEVICE_UNKNOWN = DWORD(3)
FT_DEVICE_2232C = DWORD(4)
FT_DEVICE_232R = DWORD(5)
FT_DEVICE_2232H = DWORD(6)
FT_DEVICE_4232H = DWORD(7)
FT_DEVICE_232H = DWORD(8)
FT_DEVICE_X_SERIES = DWORD(9)
class FLAGS(object):
FT_OPEN_BY_SERIAL_NUMBER = DWORD(1)
FT_OPEN_BY_DESCRIPTION = DWORD(2)
FT_OPEN_BY_LOCATION = DWORD(4)
class BIT_MODE(object):
FT_BITMODE_RESET = DWORD(0x00)
FT_BITMODE_ASYNC_BITBANG = DWORD(0x01)
FT_BITMODE_MPSSE = DWORD(0x02)
FT_BITMODE_SYNC_BITBANG = DWORD(0x04)
FT_BITMODE_MCU_HOST = DWORD(0x08)
FT_BITMODE_FAST_SERIAL = DWORD(0x10)
FT_BITMODE_CBUS_BITBANG = DWORD(0x20)
FT_BITMODE_SYNC_FIFO = DWORD(0x40)
# Add null padding if 64 bit
if struct.calcsize("P") == 8:
class FT_DEVICE_LIST_INFO_NODE(ctypes.Structure):
_fields_ = [("Flags", DWORD),
("Type", DWORD),
("ID", DWORD),
("LocId", DWORD),
("SerialNumber", ctypes.c_char * 16),
("Description", ctypes.c_char * 64),
("ftHandle", DWORD),
("null_padding", DWORD)]
else: # 32 bit
class FT_DEVICE_LIST_INFO_NODE(ctypes.Structure):
_fields_ = [("Flags", DWORD),
("Type", DWORD),
("ID", DWORD),
("LocId", DWORD),
("SerialNumber", ctypes.c_char * 16),
("Description", ctypes.c_char * 64),
("ftHandle", DWORD)]
class WORD_LENGTH(object):
FT_BITS_8 = UCHAR(8)
FT_BITS_7 = UCHAR(7)
class STOP_BITS(object):
FT_STOP_BITS_1 = UCHAR(0)
FT_STOP_BITS_2 = UCHAR(2)
class PARITY(object):
FT_PARITY_NONE = UCHAR(0)
FT_PARITY_ODD = UCHAR(1)
FT_PARITY_EVEN = UCHAR(2)
FT_PARITY_MARK = UCHAR(3)
FT_PARITY_SPACE = UCHAR(4)
try:
ftdI2xx = ctypes.WinDLL("FTD2XX.dll")
except Exception as e:
raise ImportError("Unable to find FTD2XX.dll.\nPlugging in FDTI device will install DLL.") from e
_ipdwNumDevs = DWORD(0)
_p_ipdwNumDevs = LPDWORD(_ipdwNumDevs)
def create_device_info_list():
# FT_CreateDeviceInfoList needs to be called before info can be retrieved
stat = DWORD()
stat.value = ftdI2xx.FT_CreateDeviceInfoList(_p_ipdwNumDevs)
# print(stat)
if stat.value != FT_STATUS.FT_OK.value:
pass
# print(stat)
# print(type(stat))
# print(type(FT_STATUS.FT_OK))
# print(ipdwNumDevs)
def _get_device_info_detail(pDest):
# FT_GetDeviceInfoDetail
stat = DWORD()
dev = pDest[0]
handle = DWORD()
flags = DWORD()
typeid = DWORD()
id = DWORD()
locid = DWORD()
sn = ctypes.create_string_buffer(16)
desc = ctypes.create_string_buffer(64)
stat.value = ftdI2xx.FT_GetDeviceInfoDetail(dev,
flags,
typeid,
id,
locid,
sn,
desc,
ctypes.byref(handle))
if stat.value != FT_STATUS.FT_OK.value:
raise Exception("FT_GetDeviceInfoDetail failed")
# print("flags {}".format(flags))
# print("typeid {}".format(typeid))
# print("id {}".format(id))
# print("locid {}".format(locid))
# print("sn {}".format(sn))
# print("desc {}".format(desc))
# print("handle {}".format(handle))
# FT_GetDeviceInfoList
def get_device_info_list():
stat = DWORD()
pDest = (FT_DEVICE_LIST_INFO_NODE * _ipdwNumDevs.value)()
# for num in range(_ipdwNumDevs.value):
# print(dir(pDest[num]))
# print(pDest[num].Flags)
stat.value = ftdI2xx.FT_GetDeviceInfoList(pDest, ctypes.byref(_ipdwNumDevs))
if stat.value != FT_STATUS.FT_OK.value:
raise Exception("FT_GetDeviceInfoList failed")
# for field in FT_DEVICE_LIST_INFO_NODE._fields_:
# print("{}: {} - {}".format(field[0].upper(), getattr(pDest[0], field[0]), type(getattr(pDest[0], field[0]))))
return pDest
class FTDI2xx(object):
INSTR_TYPE = "FTDI"
def __init__(self, ftdi_description):
"""
:param handle:
handle from device info
:param flag:
FLAGS
FLAGS.FT_OPEN_BY_SERIAL_NUMBER
FLAGS.FT_OPEN_BY_DESCRIPTION
FLAGS.FT_OPEN_BY_LOCATION
:param search_term:
Accompanying search term set by the flag
:return:
"""
self.handle = DWORD()
self.ftdi_description = ftdi_description
self.cmd_status = DWORD()
self._connect()
self._baud_rate = None
self.baud_rate = 9600
self.bit_mode = BIT_MODE.FT_BITMODE_CBUS_BITBANG
self.pin_value_mask = 0b111
self.std_delay = 0.01
self.delay = time.sleep
# Data characteristics
self._word_length = WORD_LENGTH.FT_BITS_8
self._stop_bits = STOP_BITS.FT_STOP_BITS_1
self._parity = PARITY.FT_PARITY_NONE
self._data_characteristics_set = False
self.bb_data = 1 << 0
self.bb_clk = 1 << 1
self.bb_latch = 1 << 2
self.bb_bytes = 1
self.bb_inv_mask = 0
def _connect(self):
self.cmd_status.value = ftdI2xx.FT_OpenEx(ctypes.c_char_p(self.ftdi_description),
FLAGS.FT_OPEN_BY_DESCRIPTION,
ctypes.byref(self.handle))
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_OpenEx failed")
def close(self):
self.cmd_status.value = ftdI2xx.FT_Close(self.handle)
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_Close failed {}".format(self.cmd_status.value))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
@property
def word_length(self):
return self._word_length
@word_length.setter
def word_length(self, val):
if str(val) == '8':
self._word_length = WORD_LENGTH.FT_BITS_8
elif str(val) == '7':
self._word_length = WORD_LENGTH.FT_BITS_7
else:
raise ValueError("Word Length must be either 7 or 8")
self._data_characteristics_set = False
@property
def stop_bits(self):
return self._stop_bits
@stop_bits.setter
def stop_bits(self, val):
if str(val) == '1':
self._stop_bits = STOP_BITS.FT_STOP_BITS_1
elif str(val) == '2':
self._stop_bits = STOP_BITS.FT_STOP_BITS_2
else:
raise ValueError("Stop bits must be either 1 or 2")
self._data_characteristics_set = False
@property
def parity(self):
return self._parity
@parity.setter
def parity(self, val):
try:
parity = [itm for itm in PARITY.__dict__ if itm.startswith('FT_PARITY') and val.upper() in itm][0]
except IndexError:
raise ValueError("Invalid parity: Please select from {}".
format(','.join([itm for itm in PARITY.__dict__ if itm.startswith('FT_PARITY')])))
self._parity = getattr(PARITY, parity)
self._data_characteristics_set = False
@property
def baud_rate(self):
return self._baud_rate
@baud_rate.setter
def baud_rate(self, rate):
try:
self.cmd_status.value = ftdI2xx.FT_SetBaudRate(self.handle, DWORD(rate))
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_SetBaudRate failed")
self._baud_rate = rate
except:
self._baud_rate = None
raise
def write_bit_mode(self, mask, validate=False):
"""
handle; gained from device info
mask; value to write for the mask
for BIT_MODE.FT_BITMODE_CBUS_BITBANG
upper nibble is input (0) output (1)
lower nibble is pin value low (0) high (1)
bit_mode; Type BIT_MODE
"""
self.cmd_status.value = ftdI2xx.FT_SetBitMode(self.handle, UCHAR(mask), self.bit_mode)
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_SetBitMode failed")
data_bus = UCHAR()
if validate:
self.cmd_status.value = ftdI2xx.FT_GetBitMode(self.handle, ctypes.byref(data_bus))
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_GetBitMode failed")
return data_bus.value & self.pin_value_mask == mask & self.pin_value_mask
def get_cbus_pins(self):
try:
self.cmd_status.value = ftdI2xx.FT_SetBitMode(self.handle, UCHAR(0), BIT_MODE.FT_BITMODE_CBUS_BITBANG)
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_SetBitMode failed")
data_bus = UCHAR()
self.cmd_status.value = ftdI2xx.FT_GetBitMode(self.handle, ctypes.byref(data_bus))
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_GetBitMode failed")
finally:
self.cmd_status.value = ftdI2xx.FT_SetBitMode(self.handle, UCHAR(self.pin_value_mask), self.bit_mode)
return data_bus.value
# self.write_bit_mode(self.pin_value_mask)
def write(self, data, size=None):
if not self._data_characteristics_set:
self._set_data_characteristics()
if size is None:
size = len(data)
buffer = ctypes.create_string_buffer(bytes(data), size)
bytes_written = DWORD()
self.cmd_status.value = ftdI2xx.FT_Write(self.handle,
buffer,
ctypes.sizeof(buffer),
ctypes.byref(bytes_written))
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_Write failed")
def read(self):
buffer = self._read()
return buffer.value
def read_raw(self):
buffer = self._read()
return buffer.raw
def _read(self):
if not self._data_characteristics_set:
self._set_data_characteristics()
amount_in_rx_queue = DWORD()
amount_in_tx_queue = DWORD()
status = DWORD()
self.cmd_status.value = ftdI2xx.FT_GetStatus(self.handle,
ctypes.byref(amount_in_rx_queue),
ctypes.byref(amount_in_tx_queue),
ctypes.byref(status))
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_GetStatus failed")
buffer = ctypes.create_string_buffer(amount_in_rx_queue.value)
bytes_read = DWORD()
self.cmd_status.value = ftdI2xx.FT_Read(self.handle,
ctypes.byref(buffer),
amount_in_rx_queue,
ctypes.byref(bytes_read))
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_Read failed")
return buffer
def _set_data_characteristics(self):
if not [x for x in [self.word_length, self.stop_bits, self.parity] if x is None]:
self.cmd_status.value = ftdI2xx.FT_SetDataCharacteristics(self.handle,
self.word_length,
self.stop_bits,
self.parity)
if self.cmd_status.value != FT_STATUS.FT_OK.value:
raise InstrumentError("FT_SetDatCharacteristics failed")
self._data_characteristics_set = True
return
raise ValueError("Please ensure that word length, stop bits and parity are set")
def serial_shift_bit_bang(self, data, bytes_required=None):
bytes_required = bytes_required or self.bb_bytes
if self.bit_mode == BIT_MODE.FT_BITMODE_CBUS_BITBANG:
bit_bang = self._serial_shift_bit_bang(data, bytes_required,
bb_mask=(self.bb_clk + self.bb_data + self.bb_latch) << 4)
for byte in bit_bang:
self.write_bit_mode(byte)
else:
bit_bang = self._serial_shift_bit_bang(data, bytes_required, bb_mask=0)
self.write(bit_bang)
def configure_bit_bang(self, bit_mode, bytes_required, latch_mask=1, clk_mask=2, data_mask=4, invert_mask=0b000):
"""
:param bit_mode:
:param bytes_required:
:param latch_mask: CBUS Pin for latch. 1 Default for Relay Matrix
:param clk_mask: CBUS Pin for clock. 2 Default for Relay Matrix
:param data_mask: CBUS Pin for data. 4 Default for Relay Matrix
:param invert_mask: Mask for inverting. 0b111 For all inverted 0b000 for all non inverted
based on MSB 0b<latch><clock><data> LSB
:return:
"""
self.bb_bytes = bytes_required
self.bit_mode = bit_mode
self.write_bit_mode(self.pin_value_mask)
self.bb_data = data_mask
self.bb_clk = clk_mask
self.bb_latch = latch_mask
self.bb_inv_mask = 0
if (1 << 2) & invert_mask:
self.bb_inv_mask += self.bb_latch
if (1 << 1) & invert_mask:
self.bb_inv_mask += self.bb_clk
if 1 & invert_mask:
self.bb_inv_mask += self.bb_data
def _serial_shift_bit_bang(self, data, bytes_required, bb_mask):
data_out = bytearray()
data_out.append(bb_mask + self.bb_inv_mask)
for b in bits(data, num_bytes=bytes_required):
# Write Data
if b:
data_out.append(bb_mask + self.bb_data ^ self.bb_inv_mask)
# Clock Up
data_out.append(bb_mask + (self.bb_data + self.bb_clk) ^ self.bb_inv_mask)
else:
data_out.append(bb_mask + self.bb_inv_mask)
# Clock Up
data_out.append(bb_mask + self.bb_clk ^ self.bb_inv_mask)
# Latch to output
data_out.append(bb_mask + self.bb_inv_mask)
data_out.append(bb_mask + self.bb_latch ^ self.bb_inv_mask)
data_out.append(bb_mask + self.bb_inv_mask)
return data_out
| [
"struct.calcsize",
"ctypes.byref",
"ctypes.POINTER",
"ctypes.create_string_buffer",
"ctypes.WinDLL",
"fixate.core.exceptions.InstrumentError",
"ctypes.c_char_p",
"fixate.core.common.bits",
"ctypes.sizeof"
] | [((882, 911), 'ctypes.POINTER', 'ctypes.POINTER', (['ctypes.c_char'], {}), '(ctypes.c_char)\n', (896, 911), False, 'import ctypes\n'), ((921, 951), 'ctypes.POINTER', 'ctypes.POINTER', (['ctypes.c_ubyte'], {}), '(ctypes.c_ubyte)\n', (935, 951), False, 'import ctypes\n'), ((985, 1015), 'ctypes.POINTER', 'ctypes.POINTER', (['ctypes.c_ulong'], {}), '(ctypes.c_ulong)\n', (999, 1015), False, 'import ctypes\n'), ((2602, 2622), 'struct.calcsize', 'struct.calcsize', (['"""P"""'], {}), "('P')\n", (2617, 2622), False, 'import struct\n'), ((3767, 3794), 'ctypes.WinDLL', 'ctypes.WinDLL', (['"""FTD2XX.dll"""'], {}), "('FTD2XX.dll')\n", (3780, 3794), False, 'import ctypes\n'), ((4585, 4616), 'ctypes.create_string_buffer', 'ctypes.create_string_buffer', (['(16)'], {}), '(16)\n', (4612, 4616), False, 'import ctypes\n'), ((4628, 4659), 'ctypes.create_string_buffer', 'ctypes.create_string_buffer', (['(64)'], {}), '(64)\n', (4655, 4659), False, 'import ctypes\n'), ((5085, 5105), 'ctypes.byref', 'ctypes.byref', (['handle'], {}), '(handle)\n', (5097, 5105), False, 'import ctypes\n'), ((5785, 5811), 'ctypes.byref', 'ctypes.byref', (['_ipdwNumDevs'], {}), '(_ipdwNumDevs)\n', (5797, 5811), False, 'import ctypes\n'), ((12989, 13042), 'ctypes.create_string_buffer', 'ctypes.create_string_buffer', (['amount_in_rx_queue.value'], {}), '(amount_in_rx_queue.value)\n', (13016, 13042), False, 'import ctypes\n'), ((16051, 16087), 'fixate.core.common.bits', 'bits', (['data'], {'num_bytes': 'bytes_required'}), '(data, num_bytes=bytes_required)\n', (16055, 16087), False, 'from fixate.core.common import bits\n'), ((7332, 7370), 'ctypes.c_char_p', 'ctypes.c_char_p', (['self.ftdi_description'], {}), '(self.ftdi_description)\n', (7347, 7370), False, 'import ctypes\n'), ((7502, 7527), 'ctypes.byref', 'ctypes.byref', (['self.handle'], {}), '(self.handle)\n', (7514, 7527), False, 'import ctypes\n'), ((7606, 7641), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_OpenEx failed"""'], {}), "('FT_OpenEx failed')\n", (7621, 7641), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((10364, 10403), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_SetBitMode failed"""'], {}), "('FT_SetBitMode failed')\n", (10379, 10403), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((11954, 11975), 'ctypes.sizeof', 'ctypes.sizeof', (['buffer'], {}), '(buffer)\n', (11967, 11975), False, 'import ctypes\n'), ((12026, 12053), 'ctypes.byref', 'ctypes.byref', (['bytes_written'], {}), '(bytes_written)\n', (12038, 12053), False, 'import ctypes\n'), ((12132, 12166), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_Write failed"""'], {}), "('FT_Write failed')\n", (12147, 12166), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((12660, 12692), 'ctypes.byref', 'ctypes.byref', (['amount_in_rx_queue'], {}), '(amount_in_rx_queue)\n', (12672, 12692), False, 'import ctypes\n'), ((12747, 12779), 'ctypes.byref', 'ctypes.byref', (['amount_in_tx_queue'], {}), '(amount_in_tx_queue)\n', (12759, 12779), False, 'import ctypes\n'), ((12834, 12854), 'ctypes.byref', 'ctypes.byref', (['status'], {}), '(status)\n', (12846, 12854), False, 'import ctypes\n'), ((12933, 12971), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_GetStatus failed"""'], {}), "('FT_GetStatus failed')\n", (12948, 12971), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((13181, 13201), 'ctypes.byref', 'ctypes.byref', (['buffer'], {}), '(buffer)\n', (13193, 13201), False, 'import ctypes\n'), ((13319, 13343), 'ctypes.byref', 'ctypes.byref', (['bytes_read'], {}), '(bytes_read)\n', (13331, 13343), False, 'import ctypes\n'), ((13422, 13455), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_Read failed"""'], {}), "('FT_Read failed')\n", (13437, 13455), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((9679, 9719), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_SetBaudRate failed"""'], {}), "('FT_SetBaudRate failed')\n", (9694, 9719), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((10523, 10545), 'ctypes.byref', 'ctypes.byref', (['data_bus'], {}), '(data_bus)\n', (10535, 10545), False, 'import ctypes\n'), ((10632, 10671), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_GetBitMode failed"""'], {}), "('FT_GetBitMode failed')\n", (10647, 10671), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((11001, 11040), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_SetBitMode failed"""'], {}), "('FT_SetBitMode failed')\n", (11016, 11040), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((11143, 11165), 'ctypes.byref', 'ctypes.byref', (['data_bus'], {}), '(data_bus)\n', (11155, 11165), False, 'import ctypes\n'), ((11252, 11291), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_GetBitMode failed"""'], {}), "('FT_GetBitMode failed')\n", (11267, 11291), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n'), ((14035, 14085), 'fixate.core.exceptions.InstrumentError', 'InstrumentError', (['"""FT_SetDatCharacteristics failed"""'], {}), "('FT_SetDatCharacteristics failed')\n", (14050, 14085), False, 'from fixate.core.exceptions import InstrumentError, InstrumentNotConnected\n')] |
import unittest
from nose.tools import eq_, ok_, raises
from sqlalchemy import create_engine, MetaData, Column, Integer, func
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from geoalchemy2 import Geometry
from sqlalchemy.exc import DataError, IntegrityError, InternalError
engine = create_engine('postgresql://gis:gis@localhost/gis', echo=True)
metadata = MetaData(engine)
Base = declarative_base(metadata=metadata)
class Lake(Base):
__tablename__ = 'lake'
id = Column(Integer, primary_key=True)
geom = Column(Geometry(geometry_type='LINESTRING', srid=4326))
def __init__(self, geom):
self.geom = geom
session = sessionmaker(bind=engine)()
postgis_version = session.execute(func.postgis_version()).scalar()
if not postgis_version.startswith('2.'):
# With PostGIS 1.x the AddGeometryColumn and DropGeometryColumn
# management functions should be used.
Lake.__table__.c.geom.type.management = True
class IndexTest(unittest.TestCase):
def setUp(self):
metadata.drop_all(checkfirst=True)
metadata.create_all()
def tearDown(self):
session.rollback()
metadata.drop_all()
def test_LakeIndex(self):
""" Make sure the Lake table has an index on the geom column """
from sqlalchemy.engine import reflection
inspector = reflection.Inspector.from_engine(engine)
indices = inspector.get_indexes(Lake.__tablename__)
eq_(len(indices), 1)
index = indices[0]
eq_(index.get('unique'), False)
eq_(index.get('column_names'), [u'geom'])
class InsertionTest(unittest.TestCase):
def setUp(self):
metadata.drop_all(checkfirst=True)
metadata.create_all()
def tearDown(self):
session.rollback()
metadata.drop_all()
@raises(DataError, IntegrityError)
def test_WKT(self):
# With PostGIS 1.5:
# IntegrityError: (IntegrityError) new row for relation "lake" violates
# check constraint "enforce_srid_geom"
#
# With PostGIS 2.0:
# DataError: (DataError) Geometry SRID (0) does not match column SRID
# (4326)
l = Lake('LINESTRING(0 0,1 1)')
session.add(l)
session.flush()
def test_WKTElement(self):
from geoalchemy2 import WKTElement, WKBElement
l = Lake(WKTElement('LINESTRING(0 0,1 1)', srid=4326))
session.add(l)
session.flush()
session.expire(l)
ok_(isinstance(l.geom, WKBElement))
wkt = session.execute(l.geom.ST_AsText()).scalar()
eq_(wkt, 'LINESTRING(0 0,1 1)')
srid = session.execute(l.geom.ST_SRID()).scalar()
eq_(srid, 4326)
class CallFunctionTest(unittest.TestCase):
def setUp(self):
metadata.drop_all(checkfirst=True)
metadata.create_all()
def tearDown(self):
session.rollback()
metadata.drop_all()
def _create_one(self):
from geoalchemy2 import WKTElement
l = Lake(WKTElement('LINESTRING(0 0,1 1)', srid=4326))
session.add(l)
session.flush()
return l.id
def test_ST_GeometryType(self):
from sqlalchemy.sql import select, func
lake_id = self._create_one()
s = select([func.ST_GeometryType(Lake.__table__.c.geom)])
r1 = session.execute(s).scalar()
eq_(r1, 'ST_LineString')
lake = session.query(Lake).get(lake_id)
r2 = session.execute(lake.geom.ST_GeometryType()).scalar()
eq_(r2, 'ST_LineString')
r3 = session.query(Lake.geom.ST_GeometryType()).scalar()
eq_(r3, 'ST_LineString')
r4 = session.query(Lake).filter(
Lake.geom.ST_GeometryType() == 'ST_LineString').one()
ok_(isinstance(r4, Lake))
eq_(r4.id, lake_id)
def test_ST_Buffer(self):
from sqlalchemy.sql import select, func
from geoalchemy2 import WKBElement, WKTElement
lake_id = self._create_one()
s = select([func.ST_Buffer(Lake.__table__.c.geom, 2)])
r1 = session.execute(s).scalar()
ok_(isinstance(r1, WKBElement))
lake = session.query(Lake).get(lake_id)
r2 = session.execute(lake.geom.ST_Buffer(2)).scalar()
ok_(isinstance(r2, WKBElement))
r3 = session.query(Lake.geom.ST_Buffer(2)).scalar()
ok_(isinstance(r3, WKBElement))
ok_(r1.data == r2.data == r3.data)
r4 = session.query(Lake).filter(
func.ST_Within(WKTElement('POINT(0 0)', srid=4326),
Lake.geom.ST_Buffer(2))).one()
ok_(isinstance(r4, Lake))
eq_(r4.id, lake_id)
@raises(InternalError)
def test_ST_Buffer_Mixed_SRID(self):
from sqlalchemy.sql import func
self._create_one()
session.query(Lake).filter(
func.ST_Within('POINT(0 0)',
Lake.geom.ST_Buffer(2))).one()
class ReflectionTest(unittest.TestCase):
def setUp(self):
metadata.drop_all(checkfirst=True)
metadata.create_all()
def tearDown(self):
metadata.drop_all()
def test_reflection(self):
from sqlalchemy import Table
from geoalchemy2 import Geometry
t = Table('lake', MetaData(), autoload=True, autoload_with=engine)
type_ = t.c.geom.type
ok_(isinstance(type_, Geometry))
if not postgis_version.startswith('2.'):
eq_(type_.geometry_type, 'GEOMETRY')
eq_(type_.srid, -1)
else:
eq_(type_.geometry_type, 'LINESTRING')
eq_(type_.srid, 4326)
| [
"sqlalchemy.orm.sessionmaker",
"nose.tools.eq_",
"sqlalchemy.sql.func.postgis_version",
"sqlalchemy.create_engine",
"sqlalchemy.engine.reflection.Inspector.from_engine",
"sqlalchemy.sql.func.ST_Buffer",
"sqlalchemy.MetaData",
"nose.tools.raises",
"sqlalchemy.ext.declarative.declarative_base",
"geo... | [((336, 398), 'sqlalchemy.create_engine', 'create_engine', (['"""postgresql://gis:gis@localhost/gis"""'], {'echo': '(True)'}), "('postgresql://gis:gis@localhost/gis', echo=True)\n", (349, 398), False, 'from sqlalchemy import create_engine, MetaData, Column, Integer, func\n'), ((410, 426), 'sqlalchemy.MetaData', 'MetaData', (['engine'], {}), '(engine)\n', (418, 426), False, 'from sqlalchemy import create_engine, MetaData, Column, Integer, func\n'), ((434, 469), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {'metadata': 'metadata'}), '(metadata=metadata)\n', (450, 469), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((526, 559), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (532, 559), False, 'from sqlalchemy import create_engine, MetaData, Column, Integer, func\n'), ((695, 720), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'bind': 'engine'}), '(bind=engine)\n', (707, 720), False, 'from sqlalchemy.orm import sessionmaker\n'), ((1850, 1883), 'nose.tools.raises', 'raises', (['DataError', 'IntegrityError'], {}), '(DataError, IntegrityError)\n', (1856, 1883), False, 'from nose.tools import eq_, ok_, raises\n'), ((4679, 4700), 'nose.tools.raises', 'raises', (['InternalError'], {}), '(InternalError)\n', (4685, 4700), False, 'from nose.tools import eq_, ok_, raises\n'), ((578, 625), 'geoalchemy2.Geometry', 'Geometry', ([], {'geometry_type': '"""LINESTRING"""', 'srid': '(4326)'}), "(geometry_type='LINESTRING', srid=4326)\n", (586, 625), False, 'from geoalchemy2 import Geometry\n'), ((1379, 1419), 'sqlalchemy.engine.reflection.Inspector.from_engine', 'reflection.Inspector.from_engine', (['engine'], {}), '(engine)\n', (1411, 1419), False, 'from sqlalchemy.engine import reflection\n'), ((2617, 2648), 'nose.tools.eq_', 'eq_', (['wkt', '"""LINESTRING(0 0,1 1)"""'], {}), "(wkt, 'LINESTRING(0 0,1 1)')\n", (2620, 2648), False, 'from nose.tools import eq_, ok_, raises\n'), ((2715, 2730), 'nose.tools.eq_', 'eq_', (['srid', '(4326)'], {}), '(srid, 4326)\n', (2718, 2730), False, 'from nose.tools import eq_, ok_, raises\n'), ((3391, 3415), 'nose.tools.eq_', 'eq_', (['r1', '"""ST_LineString"""'], {}), "(r1, 'ST_LineString')\n", (3394, 3415), False, 'from nose.tools import eq_, ok_, raises\n'), ((3540, 3564), 'nose.tools.eq_', 'eq_', (['r2', '"""ST_LineString"""'], {}), "(r2, 'ST_LineString')\n", (3543, 3564), False, 'from nose.tools import eq_, ok_, raises\n'), ((3639, 3663), 'nose.tools.eq_', 'eq_', (['r3', '"""ST_LineString"""'], {}), "(r3, 'ST_LineString')\n", (3642, 3663), False, 'from nose.tools import eq_, ok_, raises\n'), ((3814, 3833), 'nose.tools.eq_', 'eq_', (['r4.id', 'lake_id'], {}), '(r4.id, lake_id)\n', (3817, 3833), False, 'from nose.tools import eq_, ok_, raises\n'), ((4412, 4446), 'nose.tools.ok_', 'ok_', (['(r1.data == r2.data == r3.data)'], {}), '(r1.data == r2.data == r3.data)\n', (4415, 4446), False, 'from nose.tools import eq_, ok_, raises\n'), ((4653, 4672), 'nose.tools.eq_', 'eq_', (['r4.id', 'lake_id'], {}), '(r4.id, lake_id)\n', (4656, 4672), False, 'from nose.tools import eq_, ok_, raises\n'), ((758, 780), 'sqlalchemy.sql.func.postgis_version', 'func.postgis_version', ([], {}), '()\n', (778, 780), False, 'from sqlalchemy.sql import func\n'), ((2387, 2431), 'geoalchemy2.WKTElement', 'WKTElement', (['"""LINESTRING(0 0,1 1)"""'], {'srid': '(4326)'}), "('LINESTRING(0 0,1 1)', srid=4326)\n", (2397, 2431), False, 'from geoalchemy2 import WKBElement, WKTElement\n'), ((3039, 3083), 'geoalchemy2.WKTElement', 'WKTElement', (['"""LINESTRING(0 0,1 1)"""'], {'srid': '(4326)'}), "('LINESTRING(0 0,1 1)', srid=4326)\n", (3049, 3083), False, 'from geoalchemy2 import WKBElement, WKTElement\n'), ((5272, 5282), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (5280, 5282), False, 'from sqlalchemy import create_engine, MetaData, Column, Integer, func\n'), ((5453, 5489), 'nose.tools.eq_', 'eq_', (['type_.geometry_type', '"""GEOMETRY"""'], {}), "(type_.geometry_type, 'GEOMETRY')\n", (5456, 5489), False, 'from nose.tools import eq_, ok_, raises\n'), ((5502, 5521), 'nose.tools.eq_', 'eq_', (['type_.srid', '(-1)'], {}), '(type_.srid, -1)\n', (5505, 5521), False, 'from nose.tools import eq_, ok_, raises\n'), ((5548, 5586), 'nose.tools.eq_', 'eq_', (['type_.geometry_type', '"""LINESTRING"""'], {}), "(type_.geometry_type, 'LINESTRING')\n", (5551, 5586), False, 'from nose.tools import eq_, ok_, raises\n'), ((5599, 5620), 'nose.tools.eq_', 'eq_', (['type_.srid', '(4326)'], {}), '(type_.srid, 4326)\n', (5602, 5620), False, 'from nose.tools import eq_, ok_, raises\n'), ((3296, 3339), 'sqlalchemy.sql.func.ST_GeometryType', 'func.ST_GeometryType', (['Lake.__table__.c.geom'], {}), '(Lake.__table__.c.geom)\n', (3316, 3339), False, 'from sqlalchemy.sql import func\n'), ((4027, 4067), 'sqlalchemy.sql.func.ST_Buffer', 'func.ST_Buffer', (['Lake.__table__.c.geom', '(2)'], {}), '(Lake.__table__.c.geom, 2)\n', (4041, 4067), False, 'from sqlalchemy.sql import func\n'), ((4516, 4551), 'geoalchemy2.WKTElement', 'WKTElement', (['"""POINT(0 0)"""'], {'srid': '(4326)'}), "('POINT(0 0)', srid=4326)\n", (4526, 4551), False, 'from geoalchemy2 import WKBElement, WKTElement\n')] |
from dateutil.relativedelta import relativedelta
from uuid import uuid4
import unittest
import pytz
from django.test import TestCase
from django.utils.timezone import datetime
from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper
from service.machine import process_machine_request
class CoreMachineRequestTestCase(unittest.TestCase):
"""
Add here any specific assertions to a 'MachineRequest' test case
"""
# Super-helpful private methods
def _new_instance_of(self, machine, start_date):
# Create an instance of this machine
instance_helper = CoreInstanceHelper(
"Mock Instance", uuid4(),
start_date, machine=machine)
instance = instance_helper.to_core_instance()
return instance
def _process_new_fork_request(
self,
machine,
new_name,
new_version,
uuid_suffix,
fork_date=None):
if not fork_date:
fork_date = self.start_time
instance = self._new_instance_of(machine, fork_date)
# Create a MachineRequest for newly created Instance
new_app_request_helper = CoreMachineRequestHelper(
new_name, fork_date, new_version, True, instance)
new_app_request = new_app_request_helper.to_core_machine_request()
process_machine_request(new_app_request, 'machine-%s' % uuid_suffix,
update_cloud=False)
new_machine = new_app_request.new_machine
return new_machine
def _process_new_update_request(
self,
machine,
new_name,
new_version,
uuid_suffix,
update_date=None):
if not update_date:
update_date = self.start_time
instance = self._new_instance_of(machine, update_date)
update_request_helper = CoreMachineRequestHelper(
new_name, update_date, new_version, False, instance)
core_request = update_request_helper.to_core_machine_request()
process_machine_request(core_request, 'machine-%s' % uuid_suffix,
update_cloud=False)
new_machine = core_request.new_machine
return new_machine
# Custom assertions
def assertMachineVersionEquals(self, machine, version_test):
self.assertEqual(machine.version, version_test)
def assertApplicationNameEquals(self, machine, name_test):
self.assertEqual(machine.application.name, name_test)
class TestVersionAndForking(CoreMachineRequestTestCase):
def setUp(self):
self.start_time = datetime(2015, 1, 1, tzinfo=pytz.utc)
provider_machine_helper = CoreProviderMachineHelper(
'First machine', 'machine-1', 'openstack', self.start_time)
self.machine_1 = provider_machine_helper.to_core_machine()
self.instance_helper = CoreInstanceHelper(
"test_instance", "1234-1234-1234-1234",
self.start_time, machine=self.machine_1)
self.instance_1 = self.instance_helper.to_core_instance()
pass
def test_single_version_updating(self):
"""
This test meant to represent which rules will succed/fail as
'acceptable' versions. Currently, all version strings are acceptable.
As these rules change, the tests will change/grow..
"""
provider_machine_helper = CoreProviderMachineHelper(
'Test Versioning',
'machine-version-1',
'openstack',
self.start_time)
machine_1 = provider_machine_helper.to_core_machine()
machine_1.update_version('1')
self.assertMachineVersionEquals(machine_1, '1')
machine_1.update_version('1.2.1')
self.assertMachineVersionEquals(machine_1, '1.2.1')
machine_1.update_version('one-two-two')
self.assertMachineVersionEquals(machine_1, 'one-two-two')
machine_1.update_version('man-bear-pig')
self.assertMachineVersionEquals(machine_1, 'man-bear-pig')
pass
def test_update_then_fork(self):
provider_machine_helper = CoreProviderMachineHelper(
'New Machine', 'new-machine-1', 'openstack', self.start_time)
machine_1 = provider_machine_helper.to_core_machine()
machine_2 = self._process_new_update_request(
machine_1,
"New Name, Same Version",
"2.0",
2)
self.assertApplicationNameEquals(machine_2, "New Name, Same Version")
self.assertMachineVersionEquals(machine_2, "2.0")
machine_3 = self._process_new_fork_request(
machine_2,
"Totally different",
"1.0",
3)
self.assertApplicationNameEquals(machine_3, "Totally different")
self.assertMachineVersionEquals(machine_3, "1.0")
pass
def test_complex_fork_tree(self):
# Boot strap the first machine
provider_machine_helper = CoreProviderMachineHelper(
'Complex Fork Test-New Machine',
'new-machine-1234', 'openstack', self.start_time)
machine_1 = provider_machine_helper.to_core_machine()
machine_2 = self._process_new_update_request(
machine_1, machine_1.application.name, "2.0", 2)
self.assertApplicationNameEquals(machine_2, machine_1.application.name)
self.assertMachineVersionEquals(machine_2, "2.0")
machine_3 = self._process_new_update_request(
machine_1, machine_1.application.name, "3.0", 3)
self.assertApplicationNameEquals(machine_3, machine_1.application.name)
self.assertMachineVersionEquals(machine_3, "3.0")
machine_4 = self._process_new_update_request(
machine_1, machine_1.application.name, "4.0", 4)
self.assertApplicationNameEquals(machine_4, machine_1.application.name)
self.assertMachineVersionEquals(machine_4, "4.0")
self.assertApplicationNameEquals(machine_1, machine_4.application.name)
fork_level_2 = self._process_new_fork_request(
machine_2, "I am not machine 2", "1.0.0", 5)
self.assertNotEqual(fork_level_2.application.name,
machine_2.application.name)
update_fork_2 = self._process_new_update_request(
fork_level_2, "not machine 2, but an update", "2.0.0", 6)
self.assertApplicationNameEquals(
fork_level_2,
"not machine 2, but an update")
self.assertApplicationNameEquals(
update_fork_2,
"not machine 2, but an update")
self.assertMachineVersionEquals(fork_level_2, "1.0.0")
self.assertMachineVersionEquals(update_fork_2, "2.0.0")
fork_level_3 = self._process_new_fork_request(
machine_3, "I am different from machine 3", "3.0.5", 7)
self.assertNotEqual(fork_level_3.application.name,
machine_3.application.name)
update_fork_3 = self._process_new_update_request(
fork_level_3, fork_level_3.application.name, "3.0.6", 8)
self.assertApplicationNameEquals(
fork_level_3,
"I am different from machine 3")
self.assertApplicationNameEquals(
update_fork_3,
"I am different from machine 3")
self.assertMachineVersionEquals(fork_level_3, "3.0.5")
self.assertMachineVersionEquals(update_fork_3, "3.0.6")
pass
| [
"service.machine.process_machine_request",
"core.tests.helpers.CoreInstanceHelper",
"django.utils.timezone.datetime",
"uuid.uuid4",
"core.tests.helpers.CoreMachineRequestHelper",
"core.tests.helpers.CoreProviderMachineHelper"
] | [((1211, 1285), 'core.tests.helpers.CoreMachineRequestHelper', 'CoreMachineRequestHelper', (['new_name', 'fork_date', 'new_version', '(True)', 'instance'], {}), '(new_name, fork_date, new_version, True, instance)\n', (1235, 1285), False, 'from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper\n'), ((1382, 1474), 'service.machine.process_machine_request', 'process_machine_request', (['new_app_request', "('machine-%s' % uuid_suffix)"], {'update_cloud': '(False)'}), "(new_app_request, 'machine-%s' % uuid_suffix,\n update_cloud=False)\n", (1405, 1474), False, 'from service.machine import process_machine_request\n'), ((1925, 2002), 'core.tests.helpers.CoreMachineRequestHelper', 'CoreMachineRequestHelper', (['new_name', 'update_date', 'new_version', '(False)', 'instance'], {}), '(new_name, update_date, new_version, False, instance)\n', (1949, 2002), False, 'from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper\n'), ((2095, 2184), 'service.machine.process_machine_request', 'process_machine_request', (['core_request', "('machine-%s' % uuid_suffix)"], {'update_cloud': '(False)'}), "(core_request, 'machine-%s' % uuid_suffix,\n update_cloud=False)\n", (2118, 2184), False, 'from service.machine import process_machine_request\n'), ((2666, 2703), 'django.utils.timezone.datetime', 'datetime', (['(2015)', '(1)', '(1)'], {'tzinfo': 'pytz.utc'}), '(2015, 1, 1, tzinfo=pytz.utc)\n', (2674, 2703), False, 'from django.utils.timezone import datetime\n'), ((2739, 2829), 'core.tests.helpers.CoreProviderMachineHelper', 'CoreProviderMachineHelper', (['"""First machine"""', '"""machine-1"""', '"""openstack"""', 'self.start_time'], {}), "('First machine', 'machine-1', 'openstack', self.\n start_time)\n", (2764, 2829), False, 'from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper\n'), ((2937, 3040), 'core.tests.helpers.CoreInstanceHelper', 'CoreInstanceHelper', (['"""test_instance"""', '"""1234-1234-1234-1234"""', 'self.start_time'], {'machine': 'self.machine_1'}), "('test_instance', '1234-1234-1234-1234', self.start_time,\n machine=self.machine_1)\n", (2955, 3040), False, 'from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper\n'), ((3451, 3550), 'core.tests.helpers.CoreProviderMachineHelper', 'CoreProviderMachineHelper', (['"""Test Versioning"""', '"""machine-version-1"""', '"""openstack"""', 'self.start_time'], {}), "('Test Versioning', 'machine-version-1',\n 'openstack', self.start_time)\n", (3476, 3550), False, 'from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper\n'), ((4169, 4261), 'core.tests.helpers.CoreProviderMachineHelper', 'CoreProviderMachineHelper', (['"""New Machine"""', '"""new-machine-1"""', '"""openstack"""', 'self.start_time'], {}), "('New Machine', 'new-machine-1', 'openstack', self\n .start_time)\n", (4194, 4261), False, 'from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper\n'), ((5015, 5127), 'core.tests.helpers.CoreProviderMachineHelper', 'CoreProviderMachineHelper', (['"""Complex Fork Test-New Machine"""', '"""new-machine-1234"""', '"""openstack"""', 'self.start_time'], {}), "('Complex Fork Test-New Machine',\n 'new-machine-1234', 'openstack', self.start_time)\n", (5040, 5127), False, 'from core.tests.helpers import CoreProviderMachineHelper, CoreMachineRequestHelper, CoreInstanceHelper\n'), ((686, 693), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (691, 693), False, 'from uuid import uuid4\n')] |
from rest_framework.views import APIView
from rest_framework.response import Response
from django.shortcuts import render
from django.http.response import JsonResponse
from nitmis_admin.serializers.UserSerializer import UserSerializer
def create_user(role="Guest"):
"""
"""
def fun_wrapper(func):
def wrapper(*args, **kwargs):
serializer = UserSerializer(data=args[1].data)
#
# If the data is valid, create a new user
# and return the access token details.
if serializer.is_valid():
serializer.save(role=role)
return JsonResponse(serializer.data)
return JsonResponse({"errors": serializer.errors}, status=422)
return wrapper
return fun_wrapper
class Register(APIView):
'''
Parent register controller. Post requests create
a general Guest account
'''
def get(self, request):
'''
Renders the base layout on GET request. Frontend
handles the rendering of forms
'''
return render(request, 'base.html')
@create_user()
def post(self, request):
'''
Registers a new user and assigns the user
a Guest role.
'''
class AdminRegister(Register):
'''
Register controller for administrators.
'''
@create_user(role="Administrator")
def post(self, request):
'''
Overriden post function. Registers the user as
an administrator
'''
| [
"django.shortcuts.render",
"nitmis_admin.serializers.UserSerializer.UserSerializer",
"django.http.response.JsonResponse"
] | [((1070, 1098), 'django.shortcuts.render', 'render', (['request', '"""base.html"""'], {}), "(request, 'base.html')\n", (1076, 1098), False, 'from django.shortcuts import render\n'), ((374, 407), 'nitmis_admin.serializers.UserSerializer.UserSerializer', 'UserSerializer', ([], {'data': 'args[1].data'}), '(data=args[1].data)\n', (388, 407), False, 'from nitmis_admin.serializers.UserSerializer import UserSerializer\n'), ((680, 735), 'django.http.response.JsonResponse', 'JsonResponse', (["{'errors': serializer.errors}"], {'status': '(422)'}), "({'errors': serializer.errors}, status=422)\n", (692, 735), False, 'from django.http.response import JsonResponse\n'), ((631, 660), 'django.http.response.JsonResponse', 'JsonResponse', (['serializer.data'], {}), '(serializer.data)\n', (643, 660), False, 'from django.http.response import JsonResponse\n')] |
from django.utils.timezone import now
class NotHistorical(TypeError):
"""No related history model found."""
def get_history_model_for_model(model):
"""Find the history model for a given app model."""
try:
manager_name = model._meta.simple_history_manager_attribute
except AttributeError:
raise NotHistorical("Cannot find a historical model for "
"{model}.".format(model=model))
return getattr(model, manager_name).model
def bulk_history_create(model, history_model):
"""Save a copy of all instances to the historical model."""
historical_instances = [
history_model(
history_date=getattr(instance, '_history_date', now()),
history_user=getattr(instance, '_history_user', None),
**dict((field.attname, getattr(instance, field.attname))
for field in instance._meta.fields)
) for instance in model.objects.all()]
history_model.objects.bulk_create(historical_instances)
| [
"django.utils.timezone.now"
] | [((713, 718), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (716, 718), False, 'from django.utils.timezone import now\n')] |
from flask_login import LoginManager
from PhoenixNow.model import User
login_manager = LoginManager()
login_manager.login_view = "regular.signin"
@login_manager.user_loader
def load_user(user_id):
user = User.query.filter_by(id=user_id).first()
return user
| [
"flask_login.LoginManager",
"PhoenixNow.model.User.query.filter_by"
] | [((88, 102), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (100, 102), False, 'from flask_login import LoginManager\n'), ((210, 242), 'PhoenixNow.model.User.query.filter_by', 'User.query.filter_by', ([], {'id': 'user_id'}), '(id=user_id)\n', (230, 242), False, 'from PhoenixNow.model import User\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-16 07:49
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.DateTimeField(auto_now_add=True)),
('last_active', models.DateTimeField(auto_now=True)),
('first_player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='game_first_player', to=settings.AUTH_USER_MODEL)),
('next_to_move', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='games_to_move', to=settings.AUTH_USER_MODEL)),
('second_player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='game_second_player', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Move',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('x', models.IntegerField()),
('y', models.IntegerField()),
('comment', models.CharField(max_length=300)),
('game', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Game')),
],
),
]
| [
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((312, 369), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (343, 369), False, 'from django.db import migrations, models\n'), ((498, 591), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (514, 591), False, 'from django.db import migrations, models\n'), ((621, 660), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (641, 660), False, 'from django.db import migrations, models\n'), ((695, 730), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (715, 730), False, 'from django.db import migrations, models\n'), ((766, 896), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""game_first_player"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='game_first_player', to=settings.AUTH_USER_MODEL)\n", (783, 896), False, 'from django.db import migrations, models\n'), ((927, 1053), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""games_to_move"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='games_to_move', to=settings.AUTH_USER_MODEL)\n", (944, 1053), False, 'from django.db import migrations, models\n'), ((1085, 1216), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""game_second_player"""', 'to': 'settings.AUTH_USER_MODEL'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='game_second_player', to=settings.AUTH_USER_MODEL)\n", (1102, 1216), False, 'from django.db import migrations, models\n'), ((1341, 1434), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1357, 1434), False, 'from django.db import migrations, models\n'), ((1455, 1476), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1474, 1476), False, 'from django.db import migrations, models\n'), ((1501, 1522), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1520, 1522), False, 'from django.db import migrations, models\n'), ((1553, 1585), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (1569, 1585), False, 'from django.db import migrations, models\n'), ((1613, 1691), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""main.Game"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='main.Game')\n", (1630, 1691), False, 'from django.db import migrations, models\n')] |
import math
import unittest
import torch
from nuscenes.prediction.models import mtp
class TestMTPLoss(unittest.TestCase):
"""
Test each component of MTPLoss as well as the
__call__ method.
"""
def test_get_trajectories_and_modes(self):
loss_n_modes_5 = mtp.MTPLoss(5, 0, 0)
loss_n_modes_1 = mtp.MTPLoss(1, 0, 0)
xy_pred = torch.arange(60).view(1, -1).repeat(1, 5).view(-1, 60)
mode_pred = torch.arange(5).view(1, -1)
prediction_bs_1 = torch.cat([xy_pred.reshape(1, -1), mode_pred], dim=1)
prediction_bs_2 = prediction_bs_1.repeat(2, 1)
# Testing many modes with batch size 1.
traj, modes = loss_n_modes_5._get_trajectory_and_modes(prediction_bs_1)
self.assertTrue(torch.allclose(traj, xy_pred.unsqueeze(0).reshape(1, 5, 30, 2)))
self.assertTrue(torch.allclose(modes, mode_pred))
# Testing many modes with batch size > 1.
traj, modes = loss_n_modes_5._get_trajectory_and_modes(prediction_bs_2)
self.assertTrue(torch.allclose(traj, xy_pred.repeat(1, 2).unsqueeze(0).reshape(2, 5, 30, 2)))
self.assertTrue(torch.allclose(modes, mode_pred.repeat(2, 1)))
xy_pred = torch.arange(60).view(1, -1).repeat(1, 1).view(-1, 60)
mode_pred = torch.arange(1).view(1, -1)
prediction_bs_1 = torch.cat([xy_pred.reshape(1, -1), mode_pred], dim=1)
prediction_bs_2 = prediction_bs_1.repeat(2, 1)
# Testing one mode with batch size 1.
traj, modes = loss_n_modes_1._get_trajectory_and_modes(prediction_bs_1)
self.assertTrue(torch.allclose(traj, xy_pred.unsqueeze(0).reshape(1, 1, 30, 2)))
self.assertTrue(torch.allclose(modes, mode_pred))
# Testing one mode with batch size > 1.
traj, modes = loss_n_modes_1._get_trajectory_and_modes(prediction_bs_2)
self.assertTrue(torch.allclose(traj, xy_pred.repeat(1, 2).unsqueeze(0).reshape(2, 1, 30, 2)))
self.assertTrue(torch.allclose(modes, mode_pred.repeat(2, 1)))
def test_angle_between_trajectories(self):
def make_trajectory(last_point):
traj = torch.zeros((12, 2))
traj[-1] = torch.Tensor(last_point)
return traj
loss = mtp.MTPLoss(0, 0, 0)
# test angle is 0.
self.assertEqual(loss._angle_between(make_trajectory([0, 0]), make_trajectory([0, 0])), 0.)
self.assertEqual(loss._angle_between(make_trajectory([15, 15]), make_trajectory([15, 15])), 0.)
# test angle is 15.
self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 1]),
make_trajectory([math.sqrt(3)/2, 0.5])), 15., places=4)
# test angle is 30.
self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 0]),
make_trajectory([math.sqrt(3)/2, 0.5])), 30., places=4)
# test angle is 45.
self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 1]),
make_trajectory([0, 1])), 45., places=4)
# test angle is 90.
self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 1]),
make_trajectory([-1, 1])), 90., places=4)
self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 0]),
make_trajectory([0, 1])), 90., places=4)
# test angle is 180.
self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 0]),
make_trajectory([-1, 0])), 180., places=4)
self.assertAlmostEqual(loss._angle_between(make_trajectory([0, 1]),
make_trajectory([0, -1])), 180., places=4)
self.assertAlmostEqual(loss._angle_between(make_trajectory([3, 1]),
make_trajectory([-3, -1])), 180., places=4)
def test_compute_best_mode_nothing_below_threshold(self):
angles = [(90, 0), (80, 1), (70, 2)]
target = None
traj = None
loss = mtp.MTPLoss(3, 0, 5)
self.assertTrue(loss._compute_best_mode(angles, target, traj) in {0, 1, 2})
loss = mtp.MTPLoss(3, 0, 65)
self.assertTrue(loss._compute_best_mode(angles, target, traj) in {0, 1, 2})
def test_compute_best_mode_only_one_below_threshold(self):
angles = [(30, 1), (3, 0), (25, 2)]
target = torch.ones((1, 6, 2))
trajectory = torch.zeros((3, 6, 2))
loss = mtp.MTPLoss(3, 0, 5)
self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 0)
def test_compute_best_mode_multiple_below_threshold(self):
angles = [(2, 2), (4, 1), (10, 0)]
target = torch.ones((1, 6, 2))
trajectory = torch.zeros((3, 6, 2))
trajectory[1] = 1
loss = mtp.MTPLoss(3, 0, 5)
self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 1)
def test_compute_best_mode_only_one_mode(self):
angles = [(25, 0)]
target = torch.ones((1, 6, 2))
trajectory = torch.zeros((1, 6, 2))
loss = mtp.MTPLoss(1, 0, 5)
self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 0)
trajectory[0] = 1
self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 0)
def test_loss_single_mode(self):
targets = torch.zeros((16, 1, 30, 2))
targets[:, :, :, 1] = torch.arange(start=0, end=3, step=0.1)
predictions = torch.ones((16, 61))
predictions[:, :60] = targets[0, 0, :, :].reshape(-1, 60)
predictions[:, 60] = 1/10
loss = mtp.MTPLoss(1, 1, angle_threshold_degrees=20)
# Only regression loss in single mode case.
self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()),
0, places=4)
# Now the best mode differs by 1 from the ground truth.
# Smooth l1 loss subtracts 0.5 from l1 norm if diff >= 1.
predictions[:, :60] += 1
self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()), 0.5,
places=4)
# In this case, one element has perfect regression, the others are off by 1.
predictions[1, :60] -= 1
self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()),
(15/16)*0.5,
places=4)
def test_loss_many_modes(self):
targets = torch.zeros((16, 1, 30, 2))
targets[:, :, :, 1] = torch.arange(start=0, end=3, step=0.1)
predictions = torch.ones((16, 610))
predictions[:, 540:600] = targets[0, 0, :, :].reshape(-1, 60)
predictions[:, -10:] = 1/10
loss = mtp.MTPLoss(10, 1, angle_threshold_degrees=20)
# Since one mode exactly matches gt, loss should only be classification error.
self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()),
-math.log(1/10), places=4)
# Now the best mode differs by 1 from the ground truth.
# Smooth l1 loss subtracts 0.5 from l1 norm if diff >= 1.
predictions[:, 540:600] += 1
self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()),
-math.log(1/10) + 0.5,
places=4)
# In this case, one element has perfect regression, the others are off by 1.
predictions[1, 540:600] -= 1
self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()),
-math.log(1/10) + (15/16)*0.5,
places=4)
| [
"torch.Tensor",
"math.sqrt",
"math.log",
"nuscenes.prediction.models.mtp.MTPLoss",
"torch.allclose",
"torch.zeros",
"torch.arange",
"torch.ones"
] | [((288, 308), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(5)', '(0)', '(0)'], {}), '(5, 0, 0)\n', (299, 308), False, 'from nuscenes.prediction.models import mtp\n'), ((334, 354), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(1)', '(0)', '(0)'], {}), '(1, 0, 0)\n', (345, 354), False, 'from nuscenes.prediction.models import mtp\n'), ((2245, 2265), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (2256, 2265), False, 'from nuscenes.prediction.models import mtp\n'), ((4172, 4192), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(3)', '(0)', '(5)'], {}), '(3, 0, 5)\n', (4183, 4192), False, 'from nuscenes.prediction.models import mtp\n'), ((4293, 4314), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(3)', '(0)', '(65)'], {}), '(3, 0, 65)\n', (4304, 4314), False, 'from nuscenes.prediction.models import mtp\n'), ((4525, 4546), 'torch.ones', 'torch.ones', (['(1, 6, 2)'], {}), '((1, 6, 2))\n', (4535, 4546), False, 'import torch\n'), ((4568, 4590), 'torch.zeros', 'torch.zeros', (['(3, 6, 2)'], {}), '((3, 6, 2))\n', (4579, 4590), False, 'import torch\n'), ((4607, 4627), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(3)', '(0)', '(5)'], {}), '(3, 0, 5)\n', (4618, 4627), False, 'from nuscenes.prediction.models import mtp\n'), ((4833, 4854), 'torch.ones', 'torch.ones', (['(1, 6, 2)'], {}), '((1, 6, 2))\n', (4843, 4854), False, 'import torch\n'), ((4876, 4898), 'torch.zeros', 'torch.zeros', (['(3, 6, 2)'], {}), '((3, 6, 2))\n', (4887, 4898), False, 'import torch\n'), ((4941, 4961), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(3)', '(0)', '(5)'], {}), '(3, 0, 5)\n', (4952, 4961), False, 'from nuscenes.prediction.models import mtp\n'), ((5140, 5161), 'torch.ones', 'torch.ones', (['(1, 6, 2)'], {}), '((1, 6, 2))\n', (5150, 5161), False, 'import torch\n'), ((5183, 5205), 'torch.zeros', 'torch.zeros', (['(1, 6, 2)'], {}), '((1, 6, 2))\n', (5194, 5205), False, 'import torch\n'), ((5222, 5242), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(1)', '(0)', '(5)'], {}), '(1, 0, 5)\n', (5233, 5242), False, 'from nuscenes.prediction.models import mtp\n'), ((5488, 5515), 'torch.zeros', 'torch.zeros', (['(16, 1, 30, 2)'], {}), '((16, 1, 30, 2))\n', (5499, 5515), False, 'import torch\n'), ((5546, 5584), 'torch.arange', 'torch.arange', ([], {'start': '(0)', 'end': '(3)', 'step': '(0.1)'}), '(start=0, end=3, step=0.1)\n', (5558, 5584), False, 'import torch\n'), ((5608, 5628), 'torch.ones', 'torch.ones', (['(16, 61)'], {}), '((16, 61))\n', (5618, 5628), False, 'import torch\n'), ((5745, 5790), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(1)', '(1)'], {'angle_threshold_degrees': '(20)'}), '(1, 1, angle_threshold_degrees=20)\n', (5756, 5790), False, 'from nuscenes.prediction.models import mtp\n'), ((6606, 6633), 'torch.zeros', 'torch.zeros', (['(16, 1, 30, 2)'], {}), '((16, 1, 30, 2))\n', (6617, 6633), False, 'import torch\n'), ((6664, 6702), 'torch.arange', 'torch.arange', ([], {'start': '(0)', 'end': '(3)', 'step': '(0.1)'}), '(start=0, end=3, step=0.1)\n', (6676, 6702), False, 'import torch\n'), ((6726, 6747), 'torch.ones', 'torch.ones', (['(16, 610)'], {}), '((16, 610))\n', (6736, 6747), False, 'import torch\n'), ((6870, 6916), 'nuscenes.prediction.models.mtp.MTPLoss', 'mtp.MTPLoss', (['(10)', '(1)'], {'angle_threshold_degrees': '(20)'}), '(10, 1, angle_threshold_degrees=20)\n', (6881, 6916), False, 'from nuscenes.prediction.models import mtp\n'), ((855, 887), 'torch.allclose', 'torch.allclose', (['modes', 'mode_pred'], {}), '(modes, mode_pred)\n', (869, 887), False, 'import torch\n'), ((1691, 1723), 'torch.allclose', 'torch.allclose', (['modes', 'mode_pred'], {}), '(modes, mode_pred)\n', (1705, 1723), False, 'import torch\n'), ((2136, 2156), 'torch.zeros', 'torch.zeros', (['(12, 2)'], {}), '((12, 2))\n', (2147, 2156), False, 'import torch\n'), ((2180, 2204), 'torch.Tensor', 'torch.Tensor', (['last_point'], {}), '(last_point)\n', (2192, 2204), False, 'import torch\n'), ((449, 464), 'torch.arange', 'torch.arange', (['(5)'], {}), '(5)\n', (461, 464), False, 'import torch\n'), ((1287, 1302), 'torch.arange', 'torch.arange', (['(1)'], {}), '(1)\n', (1299, 1302), False, 'import torch\n'), ((7120, 7136), 'math.log', 'math.log', (['(1 / 10)'], {}), '(1 / 10)\n', (7128, 7136), False, 'import math\n'), ((7429, 7445), 'math.log', 'math.log', (['(1 / 10)'], {}), '(1 / 10)\n', (7437, 7445), False, 'import math\n'), ((7730, 7746), 'math.log', 'math.log', (['(1 / 10)'], {}), '(1 / 10)\n', (7738, 7746), False, 'import math\n'), ((2671, 2683), 'math.sqrt', 'math.sqrt', (['(3)'], {}), '(3)\n', (2680, 2683), False, 'import math\n'), ((2883, 2895), 'math.sqrt', 'math.sqrt', (['(3)'], {}), '(3)\n', (2892, 2895), False, 'import math\n'), ((374, 390), 'torch.arange', 'torch.arange', (['(60)'], {}), '(60)\n', (386, 390), False, 'import torch\n'), ((1212, 1228), 'torch.arange', 'torch.arange', (['(60)'], {}), '(60)\n', (1224, 1228), False, 'import torch\n')] |
from Tkinter import *
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.autograd as autograd
from torch.autograd import Variable
master = Tk()
goal = 0
var_goal = StringVar()
GAMMA = 0.9
last_state = Variable(torch.Tensor([0,0,0,0,0,0])).unsqueeze(0)
last_action = 0
last_reward = 0
class Policy(nn.Module):
def __init__(self):
super(Policy, self).__init__()
self.lstm = nn.LSTMCell(6, 6)
self.fc = nn.Linear(6, 2)
#self.softmax = nn.LogSoftmax()
self.states = []
self.next_states = []
self.actions = []
self.rewards = []
self.hiddens = []
self.cells = []
def forward(self, input, hidden):
hx,cx = self.lstm(input,hidden)
output = self.fc(hx)
#output = self.softmax(output)
return output, hx, cx
def initHidden(self):
self.cell_state = Variable(torch.zeros(1,6))
self.hidden_state = Variable(torch.zeros(1,6))
model = Policy()
model.initHidden()
last_hidden = model.hidden_state
last_cell = model.cell_state
optimizer = optim.Adam(model.parameters(), lr=0.01)
def select_action(state):
output, model.hidden_state, model.cell_state = model(state, [model.hidden_state, model.cell_state])
print('val '+str(output.data))
probs = F.softmax(output)
print('probs '+str(probs.data))
action = probs.multinomial()
return action.data[0,0]
def learn(indice):
state = model.states[indice]
next_state = model.next_states[indice].detach()
action = model.actions[indice]
reward = model.rewards[indice]
hidden = model.hiddens[indice]
cell = model.cells[indice]
output, next_hidden, next_cell = model(state, [hidden, cell])
value = output[0,action]
output,_,_ = model(next_state, [next_hidden.detach(), next_hidden.detach()])
#'''
next_action_probs = F.softmax(output)
next_action = next_action_probs.multinomial().data[0,0]
next_value = output[0,next_action]
'''
next_value = output.max(1)[0]
#'''
expected = GAMMA*next_value + reward
td_loss = F.smooth_l1_loss(value, expected)
optimizer.zero_grad()
td_loss.backward(retain_variables=True)
optimizer.step()
def update(signal):
global last_action
global last_state
global last_reward
global last_hidden
global last_cell
state = Variable(torch.Tensor([signal,signal,signal,signal,signal,signal]).float()).unsqueeze(0)
if np.abs(last_reward)>0 or np.random.rand()>0.9 or len(model.states)<10:
model.states.append(last_state)
model.next_states.append(state)
model.rewards.append(last_reward)
model.actions.append(last_action)
model.hiddens.append(last_hidden)
model.cells.append(last_cell)
last_hidden = model.hidden_state
last_cell = model.cell_state
action = select_action(state)
print(action)
reward = 0
if action==1 and goal==1:
reward = 1
if action==1 and goal==0:
reward = -1
if action==0:
learn(np.random.choice(len(model.states)))
else:
learn(-1)
last_action = action
last_state = state
last_reward = reward
def set_goal(new_goal):
global goal
goal = new_goal
print("goal = "+str(goal))
var_goal.set('goal = '+str(goal))
Button(master, text='S1', height = 10, width = 30, command=lambda:update(0)).grid(row=0, column=0, sticky=W, pady=4)
Button(master, text='S2', height = 10, width = 30, command=lambda:update(1)).grid(row=0, column=1, sticky=W, pady=4)
Button(master, text='goal 0', height = 10, width = 30, command=lambda:set_goal(0)).grid(row=1, column=0, sticky=W, pady=4)
Button(master, text='goal 1', height = 10, width = 30, command=lambda:set_goal(1)).grid(row=1, column=1, sticky=W, pady=4)
Label(master, height = 10, textvariable = var_goal).grid(row=2, sticky=EW, pady=4)
mainloop( )
| [
"numpy.abs",
"numpy.random.rand",
"torch.nn.LSTMCell",
"torch.Tensor",
"torch.nn.functional.smooth_l1_loss",
"torch.nn.Linear",
"torch.zeros",
"torch.nn.functional.softmax"
] | [((1365, 1382), 'torch.nn.functional.softmax', 'F.softmax', (['output'], {}), '(output)\n', (1374, 1382), True, 'import torch.nn.functional as F\n'), ((1931, 1948), 'torch.nn.functional.softmax', 'F.softmax', (['output'], {}), '(output)\n', (1940, 1948), True, 'import torch.nn.functional as F\n'), ((2154, 2187), 'torch.nn.functional.smooth_l1_loss', 'F.smooth_l1_loss', (['value', 'expected'], {}), '(value, expected)\n', (2170, 2187), True, 'import torch.nn.functional as F\n'), ((474, 491), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['(6)', '(6)'], {}), '(6, 6)\n', (485, 491), True, 'import torch.nn as nn\n'), ((510, 525), 'torch.nn.Linear', 'nn.Linear', (['(6)', '(2)'], {}), '(6, 2)\n', (519, 525), True, 'import torch.nn as nn\n'), ((290, 322), 'torch.Tensor', 'torch.Tensor', (['[0, 0, 0, 0, 0, 0]'], {}), '([0, 0, 0, 0, 0, 0])\n', (302, 322), False, 'import torch\n'), ((963, 980), 'torch.zeros', 'torch.zeros', (['(1)', '(6)'], {}), '(1, 6)\n', (974, 980), False, 'import torch\n'), ((1018, 1035), 'torch.zeros', 'torch.zeros', (['(1)', '(6)'], {}), '(1, 6)\n', (1029, 1035), False, 'import torch\n'), ((2523, 2542), 'numpy.abs', 'np.abs', (['last_reward'], {}), '(last_reward)\n', (2529, 2542), True, 'import numpy as np\n'), ((2548, 2564), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (2562, 2564), True, 'import numpy as np\n'), ((2435, 2497), 'torch.Tensor', 'torch.Tensor', (['[signal, signal, signal, signal, signal, signal]'], {}), '([signal, signal, signal, signal, signal, signal])\n', (2447, 2497), False, 'import torch\n')] |
import docker
import os
import sys
import pandas as pd
import warnings
from src.PRM import PRM
from pathlib import Path
from src.util import prepare_path_docker
__all__ = ['PathLinker']
class PathLinker(PRM):
required_inputs = ['nodetypes', 'network']
@staticmethod
def generate_inputs(data, filename_map):
"""
Access fields from the dataset and write the required input files
@param data: dataset
@param filename_map: a dict mapping file types in the required_inputs to the filename for that type
@return:
"""
for input_type in PathLinker.required_inputs:
if input_type not in filename_map:
raise ValueError(f"{input_type} filename is missing")
#Get sources and targets for node input file
sources_targets = data.request_node_columns(["sources", "targets"])
if sources_targets is None:
return False
both_series = sources_targets.sources & sources_targets.targets
for index,row in sources_targets[both_series].iterrows():
warn_msg = row.NODEID+" has been labeled as both a source and a target."
warnings.warn(warn_msg)
#Create nodetype file
input_df = sources_targets[["NODEID"]].copy()
input_df.columns = ["#Node"]
input_df.loc[sources_targets["sources"] == True,"Node type"]="source"
input_df.loc[sources_targets["targets"] == True,"Node type"]="target"
input_df.to_csv(filename_map["nodetypes"],sep="\t",index=False,columns=["#Node","Node type"])
#This is pretty memory intensive. We might want to keep the interactome centralized.
data.get_interactome().to_csv(filename_map["network"],sep="\t",index=False,columns=["Interactor1","Interactor2","Weight"],header=["#Interactor1","Interactor2","Weight"])
# Skips parameter validation step
@staticmethod
def run(nodetypes=None, network=None, output_file=None, k=None):
"""
Run PathLinker with Docker
@param nodetypes: input node types with sources and targets (required)
@param network: input network file (required)
@param output_file: path to the output pathway file (required)
@param k: path length (optional)
"""
# Add additional parameter validation
# Do not require k
# Use the PathLinker default
# Could consider setting the default here instead
if not nodetypes or not network or not output_file:
raise ValueError('Required PathLinker arguments are missing')
# Initialize a Docker client using environment variables
client = docker.from_env()
# work dir set as the root of the repository
work_dir = Path(__file__).parent.parent.absolute()
# create path objects for input files
node_file = Path(nodetypes)
network_file = Path(network)
out_dir = Path(output_file).parent
# When renaming the output file, the output directory must already exist
Path(work_dir, out_dir).mkdir(parents=True, exist_ok=True)
command = ['python', '/home/run.py', '/home/spras/'+network_file.as_posix(),
'/home/spras/'+node_file.as_posix()]
# Add optional argument
if k is not None:
command.extend(['-k', str(k)])
#Don't perform this step on systems where permissions aren't an issue like windows
need_chown = True
try:
uid = os.getuid()
except AttributeError:
need_chown = False
try:
container_output = client.containers.run(
'reedcompbio/pathlinker',
command,
stderr=True,
volumes={
prepare_path_docker(work_dir): {'bind': '/home/spras', 'mode': 'rw'}
},
working_dir='/home/spras/')
print(container_output.decode('utf-8'))
if need_chown:
#This command changes the ownership of output files so we don't
# get a permissions error when snakemake tries to touch the files
# PathLinker writes output files to the working directory
chown_command = " ".join(['chown',str(uid),'./out*-ranked-edges.txt'])
client.containers.run('reedcompbio/pathlinker',
chown_command,
stderr=True,
volumes={prepare_path_docker(work_dir): {'bind': '/home/spras', 'mode': 'rw'}},
working_dir='/home/spras/')
finally:
# Not sure whether this is needed
client.close()
# Rename the primary output file to match the desired output filename
# Currently PathLinker only writes one output file so we do not need to delete others
Path(output_file).unlink(missing_ok=True)
# We may not know the value of k that was used
output_edges = Path(next(work_dir.glob('out*-ranked-edges.txt')))
output_edges.rename(output_file)
@staticmethod
def parse_output(raw_pathway_file, standardized_pathway_file):
"""
Convert a predicted pathway into the universal format
@param raw_pathway_file: pathway file produced by an algorithm's run function
@param standardized_pathway_file: the same pathway written in the universal format
"""
# Questions: should there be a header/optional columns?
# What about multiple raw_pathway_files
# We should not allow spaces in the node names if we use space separator.
df = pd.read_csv(raw_pathway_file,sep='\t').take([0,1,2],axis=1)
df.to_csv(standardized_pathway_file, header=False,index=False,sep=' ')
| [
"pandas.read_csv",
"pathlib.Path",
"os.getuid",
"docker.from_env",
"warnings.warn",
"src.util.prepare_path_docker"
] | [((2666, 2683), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (2681, 2683), False, 'import docker\n'), ((2864, 2879), 'pathlib.Path', 'Path', (['nodetypes'], {}), '(nodetypes)\n', (2868, 2879), False, 'from pathlib import Path\n'), ((2903, 2916), 'pathlib.Path', 'Path', (['network'], {}), '(network)\n', (2907, 2916), False, 'from pathlib import Path\n'), ((1171, 1194), 'warnings.warn', 'warnings.warn', (['warn_msg'], {}), '(warn_msg)\n', (1184, 1194), False, 'import warnings\n'), ((2936, 2953), 'pathlib.Path', 'Path', (['output_file'], {}), '(output_file)\n', (2940, 2953), False, 'from pathlib import Path\n'), ((3508, 3519), 'os.getuid', 'os.getuid', ([], {}), '()\n', (3517, 3519), False, 'import os\n'), ((3050, 3073), 'pathlib.Path', 'Path', (['work_dir', 'out_dir'], {}), '(work_dir, out_dir)\n', (3054, 3073), False, 'from pathlib import Path\n'), ((4950, 4967), 'pathlib.Path', 'Path', (['output_file'], {}), '(output_file)\n', (4954, 4967), False, 'from pathlib import Path\n'), ((5718, 5757), 'pandas.read_csv', 'pd.read_csv', (['raw_pathway_file'], {'sep': '"""\t"""'}), "(raw_pathway_file, sep='\\t')\n", (5729, 5757), True, 'import pandas as pd\n'), ((2757, 2771), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2761, 2771), False, 'from pathlib import Path\n'), ((3792, 3821), 'src.util.prepare_path_docker', 'prepare_path_docker', (['work_dir'], {}), '(work_dir)\n', (3811, 3821), False, 'from src.util import prepare_path_docker\n'), ((4541, 4570), 'src.util.prepare_path_docker', 'prepare_path_docker', (['work_dir'], {}), '(work_dir)\n', (4560, 4570), False, 'from src.util import prepare_path_docker\n')] |
import pandas as pd
import numpy as np
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler
from sklearn.cross_validation import train_test_split
import utils
import glob, os
import pca.dataanalyzer as da, pca.pca as pca
from sklearn.metrics import accuracy_score
# visulaize the important characteristics of the dataset
import matplotlib.pyplot as plt
seed = 0
num_headers = 16
data_len = 54*num_headers #1460
dirs = ["C:/Users/salik/Documents/Data/LinuxChrome/{}/".format(num_headers),
"C:/Users/salik/Documents/Data/WindowsFirefox/{}/".format(num_headers),
"C:/Users/salik/Documents/Data/WindowsChrome/{}/".format(num_headers),
"C:/Users/salik/Documents/Data/WindowsSalik/{}/".format(num_headers),
"C:/Users/salik/Documents/Data/WindowsAndreas/{}/".format(num_headers)]
# dirs = ["E:/Data/h5/https/", "E:/Data/h5/netflix/"]
# step 1: get the data
dataframes = []
num_examples = 0
for dir in dirs:
for fullname in glob.iglob(dir + '*.h5'):
filename = os.path.basename(fullname)
df = utils.load_h5(dir, filename)
dataframes.append(df)
num_examples = len(df.values)
# create one large dataframe
data = pd.concat(dataframes)
data.sample(frac=1, random_state=seed).reset_index(drop=True)
num_rows = data.shape[0]
columns = data.columns
print(columns)
# step 2: get features (x) and convert it to numpy array
x = da.getbytes(data, data_len)
# step 3: get class labels y and then encode it into number
# get class label data
y = data['label'].values
# encode the class label
class_labels = np.unique(y)
label_encoder = LabelEncoder()
y = label_encoder.fit_transform(y)
# step 4: split the data into training set and test set
test_percentage = 0.5
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=test_percentage, random_state=seed)
plot_savename = "histogram_payload"
from matplotlib import rcParams
# Make room for xlabel which is otherwise cut off
rcParams.update({'figure.autolayout': True})
# scatter plot the sample points among 5 classes
# markers = ('s', 'd', 'o', '^', 'v', ".", ",", "<", ">", "8", "p", "P", "*", "h", "H", "+", "x", "X", "D", "|", "_")
color_map = {0: '#487fff', 1: '#d342ff', 2: '#4eff4e', 3: '#2ee3ff', 4: '#ffca43', 5:'#ff365e', 6:'#626663'}
plt.figure()
for idx, cl in enumerate(np.unique(y_test)):
# Get count of unique values
values, counts = np.unique(x_test[y_test == cl], return_counts=True)
# Maybe remove zero as there is a lot of zeros in the header
# values = values[1:]
# counts = counts[1:]
n, bins, patches = plt.hist(values, weights=counts, bins=256, facecolor=color_map[idx], label=class_labels[cl], alpha=0.8)
plt.legend(loc='upper right')
plt.title('Histogram of : {}'.format(class_labels))
plt.tight_layout()
# plt.savefig('{0}{1}.png'.format(plot_savename, int(perplexity)), dpi=300)
plt.show() | [
"sklearn.preprocessing.LabelEncoder",
"utils.load_h5",
"matplotlib.pyplot.hist",
"numpy.unique",
"matplotlib.rcParams.update",
"glob.iglob",
"matplotlib.pyplot.figure",
"pca.dataanalyzer.getbytes",
"sklearn.cross_validation.train_test_split",
"matplotlib.pyplot.tight_layout",
"os.path.basename",... | [((1219, 1240), 'pandas.concat', 'pd.concat', (['dataframes'], {}), '(dataframes)\n', (1228, 1240), True, 'import pandas as pd\n'), ((1428, 1455), 'pca.dataanalyzer.getbytes', 'da.getbytes', (['data', 'data_len'], {}), '(data, data_len)\n', (1439, 1455), True, 'import pca.dataanalyzer as da, pca.pca as pca\n'), ((1606, 1618), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (1615, 1618), True, 'import numpy as np\n'), ((1635, 1649), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (1647, 1649), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((1799, 1867), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': 'test_percentage', 'random_state': 'seed'}), '(x, y, test_size=test_percentage, random_state=seed)\n', (1815, 1867), False, 'from sklearn.cross_validation import train_test_split\n'), ((1988, 2032), 'matplotlib.rcParams.update', 'rcParams.update', (["{'figure.autolayout': True}"], {}), "({'figure.autolayout': True})\n", (2003, 2032), False, 'from matplotlib import rcParams\n'), ((2312, 2324), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2322, 2324), True, 'import matplotlib.pyplot as plt\n'), ((2722, 2751), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (2732, 2751), True, 'import matplotlib.pyplot as plt\n'), ((2804, 2822), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2820, 2822), True, 'import matplotlib.pyplot as plt\n'), ((2899, 2909), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2907, 2909), True, 'import matplotlib.pyplot as plt\n'), ((997, 1021), 'glob.iglob', 'glob.iglob', (["(dir + '*.h5')"], {}), "(dir + '*.h5')\n", (1007, 1021), False, 'import glob, os\n'), ((2350, 2367), 'numpy.unique', 'np.unique', (['y_test'], {}), '(y_test)\n', (2359, 2367), True, 'import numpy as np\n'), ((2424, 2475), 'numpy.unique', 'np.unique', (['x_test[y_test == cl]'], {'return_counts': '(True)'}), '(x_test[y_test == cl], return_counts=True)\n', (2433, 2475), True, 'import numpy as np\n'), ((2616, 2724), 'matplotlib.pyplot.hist', 'plt.hist', (['values'], {'weights': 'counts', 'bins': '(256)', 'facecolor': 'color_map[idx]', 'label': 'class_labels[cl]', 'alpha': '(0.8)'}), '(values, weights=counts, bins=256, facecolor=color_map[idx], label=\n class_labels[cl], alpha=0.8)\n', (2624, 2724), True, 'import matplotlib.pyplot as plt\n'), ((1042, 1068), 'os.path.basename', 'os.path.basename', (['fullname'], {}), '(fullname)\n', (1058, 1068), False, 'import glob, os\n'), ((1082, 1110), 'utils.load_h5', 'utils.load_h5', (['dir', 'filename'], {}), '(dir, filename)\n', (1095, 1110), False, 'import utils\n')] |
import time
import uuid
from random import random
def now():
return int(time.time() * 1000)
def uuid1():
return str(uuid.uuid1())
def millis(s):
return s * 1000
def seconds(ms):
return ms / 1000
def exponential_backoff(
attempts,
base_delay,
max_delay=None,
jitter=True,
):
"""
Get the next delay for retries in exponential backoff.
attempts: Number of attempts so far
base_delay: Base delay, in seconds
max_delay: Max delay, in seconds. If None (default), there is no max.
jitter: If True, add a random jitter to the delay
"""
if max_delay is None:
max_delay = float("inf")
backoff = min(max_delay, base_delay * 2 ** max(attempts - 1, 0))
if jitter:
backoff = backoff * random()
return backoff
| [
"uuid.uuid1",
"random.random",
"time.time"
] | [((128, 140), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (138, 140), False, 'import uuid\n'), ((78, 89), 'time.time', 'time.time', ([], {}), '()\n', (87, 89), False, 'import time\n'), ((770, 778), 'random.random', 'random', ([], {}), '()\n', (776, 778), False, 'from random import random\n')] |
from dagster import job, op
@op
def get_name():
return "dagster"
@op
def hello(name: str):
print(f"Hello, {name}!")
@job(description="Hello world Dagster pipeline")
def hello_dagster():
hello(get_name()) | [
"dagster.job"
] | [((131, 178), 'dagster.job', 'job', ([], {'description': '"""Hello world Dagster pipeline"""'}), "(description='Hello world Dagster pipeline')\n", (134, 178), False, 'from dagster import job, op\n')] |
# -*- coding: utf-8 -*-
import time
import numpy as np
from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector
class Driver(BaseDriver):
error_command = '*ESR?'
support_models = ['AFG3102']
quants = [
QOption('Output',ch=1,
set_cmd='OUTP%(ch)d %(option)s', get_cmd='OUTP%(ch)d?',
options=[('OFF', 'OFF'), ('ON', 'ON')]), # must set chanel
QOption('Function',ch=1,set_cmd='SOUR%(ch)d:FUNC %(option)s',get_cmd='SOUR%(ch)d:FUNC?',
options=[('Sin','SIN'),('Square','SQU'),('Pulse','PULS'),('Ramp','RAMP'),
('PRNoise','PRN'),('DC','DC'),('SINC','SINC'),('Gaussian','GAUS'),
('Lorentz','LOR'),('Erise','ERIS'),('Edecay','EDEC'),('Haversine','HAV'),
('User','USER'),('User2','USER2')]),
QReal('Frequency',unit='Hz',ch=1,set_cmd='SOUR%(ch)d:FREQ %(value)e%(unit)s',get_cmd='SOUR%(ch)d:FREQ?'),
QReal('Phase',unit='rad',ch=1,set_cmd='SOUR%(ch)d:PHAS %(value)f%(unit)s',get_cmd='SOUR%(ch)d:PHAS?'),
QReal('Pulse Delay',unit='s',ch=1,set_cmd='SOUR%(ch)d:PULS:DEL %(value).9e%(unit)s',get_cmd='SOUR%(ch)d:PULS:DEL?'),
QReal('Pulse Period',unit='s',ch=1,set_cmd='SOUR%(ch)d:PULS:PER %(value).9e%(unit)s',get_cmd='SOUR%(ch)d:PULS:PER?'),
QReal('Pulse Width',unit='s',ch=1,set_cmd='SOUR%(ch)d:PULS:WIDT %(value).9e%(unit)s',get_cmd='SOUR%(ch)d:PULS:WIDT?'),
#Burst Mode
QReal('Burst Tdelay',unit='s',ch=1,set_cmd='SOUR%(ch)d:BURS:TDEL %(value).9e%(unit)s',get_cmd='SOUR%(ch)d:BURS:TDEL?'),
QReal('Burst Ncycles',ch=1,set_cmd='SOUR%(ch)d:BURS:NCYC %(value)d',get_cmd='SOUR%(ch)d:BURS:NCYC?'),
##
QReal('Frequency',unit='Hz',ch=1,set_cmd='SOUR%(ch)d:FREQ %(value)e%(unit)s',get_cmd='SOUR%(ch)d:FREQ?'),
QReal('Phase',unit='DEG',ch=1,set_cmd='SOUR%(ch)d:PHAS %(value)f%(unit)s',get_cmd='SOUR%(ch)d:PHAS?'),
QReal('High Level',unit='V',ch=1,set_cmd='SOUR%(ch)d:VOLT:HIGH %(value)f%(unit)s',get_cmd='SOUR%(ch)d:VOLT:HIGH?'),
QReal('Low Level',unit='V',ch=1,set_cmd='SOUR%(ch)d:VOLT:LOW %(value)f%(unit)s',get_cmd='SOUR%(ch)d:VOLT:LOW?'),
QReal('Offset',unit='V',ch=1,set_cmd='SOUR%(ch)d:VOLT:OFFS %(value)f%(unit)s',get_cmd='SOUR%(ch)d:VOLT:OFFS?'),
QReal('Amplitude',unit='VPP',ch=1,set_cmd='SOUR%(ch)d:VOLT:AMPL %(value)f%(unit)s',get_cmd='SOUR%(ch)d:VOLT:AMPL?'),
]
def reset(self,delay1=0,delay2=0):
#init
self.write('*CLS')
self.write('*RST')
#set external clock;external source;burst mode&cycle=1&trigdelay=0
self.write('SOURce:ROSCillator:SOURce EXT')
self.write('TRIGger:SEQuence:SOURce EXTernal')
self.write('SOURce1:BURSt:STATe ON')
self.write('SOURce1:BURSt:NCYCles 1')
self.write('SOURce1:BURSt:MODE TRIGgered')
self.write('SOURce1:BURSt:DELay %fus' %delay1)
self.write('SOURce2:BURSt:STATe ON')
self.write('SOURce2:BURSt:NCYCles 1')
self.write('SOURce2:BURSt:MODE TRIGgered')
self.write('SOURce2:BURSt:TDELay %fns' %delay2)
#在创建好的波形文件中,写入或者更新具体波形
def upwave(self,points,ch=1,T0=100):
pointslen=len(points)
pointslen2=2*pointslen
#写入波形数据
self.write('DATA:DEFine EMEMory,%d' %pointslen)
self.write('DATA:POINts EMEMory, %d' %pointslen)
message=':DATA:DATA EMEMory,'# % (len(str(pointslen2)),pointslen2)
points = points.clip(-1,1)
values=np.zeros(pointslen).astype(np.uint16)
#乘积选用8191是为了防止最终值大于16383
values = (points * 8191).astype(np.uint16)+8192 #.astype(np.uint16)
byte=np.zeros(pointslen2).astype(np.uint8)
#将原先的两比特数据点,分割为高低两个比特
byte[1:pointslen2:2]=(values & 0b11111111).astype(np.uint8)
byte[0:pointslen2:2]=((values & 0b11111100000000) >> 8).astype(np.uint8)
#write_binary_value中的message参数不要包括#42048的信息,因为pyvisa可以自动算出结果。详见pyvisa中util.py内的to_binary_block
#AFG3102选用big_endian。这表示程序按照我给的顺序将二进制包写进去
self.write_binary_values(message, byte, datatype='B',is_big_endian=False,termination=None, encoding=None)
# self.write('enable' )
self.write('TRAC:COPY USER%d,EMEM' %ch)
self.write('SOURce%d:FUNCTION USER%d' %(ch,ch))
#set frequency:because the wave total length is set by this parameter,typical for 1Mhz means the wave length is set to 1us!!
self.write('SOURce%d:FREQuency:FIXed %fkHz' %(ch,1e3/T0))
self.write('OUTPut%d:STATe ON' %ch)
| [
"qulab.device.QReal",
"qulab.device.QOption",
"numpy.zeros"
] | [((252, 376), 'qulab.device.QOption', 'QOption', (['"""Output"""'], {'ch': '(1)', 'set_cmd': '"""OUTP%(ch)d %(option)s"""', 'get_cmd': '"""OUTP%(ch)d?"""', 'options': "[('OFF', 'OFF'), ('ON', 'ON')]"}), "('Output', ch=1, set_cmd='OUTP%(ch)d %(option)s', get_cmd=\n 'OUTP%(ch)d?', options=[('OFF', 'OFF'), ('ON', 'ON')])\n", (259, 376), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((416, 806), 'qulab.device.QOption', 'QOption', (['"""Function"""'], {'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:FUNC %(option)s"""', 'get_cmd': '"""SOUR%(ch)d:FUNC?"""', 'options': "[('Sin', 'SIN'), ('Square', 'SQU'), ('Pulse', 'PULS'), ('Ramp', 'RAMP'), (\n 'PRNoise', 'PRN'), ('DC', 'DC'), ('SINC', 'SINC'), ('Gaussian', 'GAUS'),\n ('Lorentz', 'LOR'), ('Erise', 'ERIS'), ('Edecay', 'EDEC'), ('Haversine',\n 'HAV'), ('User', 'USER'), ('User2', 'USER2')]"}), "('Function', ch=1, set_cmd='SOUR%(ch)d:FUNC %(option)s', get_cmd=\n 'SOUR%(ch)d:FUNC?', options=[('Sin', 'SIN'), ('Square', 'SQU'), (\n 'Pulse', 'PULS'), ('Ramp', 'RAMP'), ('PRNoise', 'PRN'), ('DC', 'DC'), (\n 'SINC', 'SINC'), ('Gaussian', 'GAUS'), ('Lorentz', 'LOR'), ('Erise',\n 'ERIS'), ('Edecay', 'EDEC'), ('Haversine', 'HAV'), ('User', 'USER'), (\n 'User2', 'USER2')])\n", (423, 806), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((826, 939), 'qulab.device.QReal', 'QReal', (['"""Frequency"""'], {'unit': '"""Hz"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:FREQ %(value)e%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:FREQ?"""'}), "('Frequency', unit='Hz', ch=1, set_cmd=\n 'SOUR%(ch)d:FREQ %(value)e%(unit)s', get_cmd='SOUR%(ch)d:FREQ?')\n", (831, 939), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((940, 1050), 'qulab.device.QReal', 'QReal', (['"""Phase"""'], {'unit': '"""rad"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:PHAS %(value)f%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:PHAS?"""'}), "('Phase', unit='rad', ch=1, set_cmd=\n 'SOUR%(ch)d:PHAS %(value)f%(unit)s', get_cmd='SOUR%(ch)d:PHAS?')\n", (945, 1050), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1051, 1175), 'qulab.device.QReal', 'QReal', (['"""Pulse Delay"""'], {'unit': '"""s"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:PULS:DEL %(value).9e%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:PULS:DEL?"""'}), "('Pulse Delay', unit='s', ch=1, set_cmd=\n 'SOUR%(ch)d:PULS:DEL %(value).9e%(unit)s', get_cmd='SOUR%(ch)d:PULS:DEL?')\n", (1056, 1175), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1176, 1301), 'qulab.device.QReal', 'QReal', (['"""Pulse Period"""'], {'unit': '"""s"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:PULS:PER %(value).9e%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:PULS:PER?"""'}), "('Pulse Period', unit='s', ch=1, set_cmd=\n 'SOUR%(ch)d:PULS:PER %(value).9e%(unit)s', get_cmd='SOUR%(ch)d:PULS:PER?')\n", (1181, 1301), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1302, 1433), 'qulab.device.QReal', 'QReal', (['"""Pulse Width"""'], {'unit': '"""s"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:PULS:WIDT %(value).9e%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:PULS:WIDT?"""'}), "('Pulse Width', unit='s', ch=1, set_cmd=\n 'SOUR%(ch)d:PULS:WIDT %(value).9e%(unit)s', get_cmd='SOUR%(ch)d:PULS:WIDT?'\n )\n", (1307, 1433), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1449, 1581), 'qulab.device.QReal', 'QReal', (['"""Burst Tdelay"""'], {'unit': '"""s"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:BURS:TDEL %(value).9e%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:BURS:TDEL?"""'}), "('Burst Tdelay', unit='s', ch=1, set_cmd=\n 'SOUR%(ch)d:BURS:TDEL %(value).9e%(unit)s', get_cmd='SOUR%(ch)d:BURS:TDEL?'\n )\n", (1454, 1581), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1577, 1684), 'qulab.device.QReal', 'QReal', (['"""Burst Ncycles"""'], {'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:BURS:NCYC %(value)d"""', 'get_cmd': '"""SOUR%(ch)d:BURS:NCYC?"""'}), "('Burst Ncycles', ch=1, set_cmd='SOUR%(ch)d:BURS:NCYC %(value)d',\n get_cmd='SOUR%(ch)d:BURS:NCYC?')\n", (1582, 1684), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1698, 1811), 'qulab.device.QReal', 'QReal', (['"""Frequency"""'], {'unit': '"""Hz"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:FREQ %(value)e%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:FREQ?"""'}), "('Frequency', unit='Hz', ch=1, set_cmd=\n 'SOUR%(ch)d:FREQ %(value)e%(unit)s', get_cmd='SOUR%(ch)d:FREQ?')\n", (1703, 1811), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1812, 1922), 'qulab.device.QReal', 'QReal', (['"""Phase"""'], {'unit': '"""DEG"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:PHAS %(value)f%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:PHAS?"""'}), "('Phase', unit='DEG', ch=1, set_cmd=\n 'SOUR%(ch)d:PHAS %(value)f%(unit)s', get_cmd='SOUR%(ch)d:PHAS?')\n", (1817, 1922), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1923, 2046), 'qulab.device.QReal', 'QReal', (['"""High Level"""'], {'unit': '"""V"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:VOLT:HIGH %(value)f%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:VOLT:HIGH?"""'}), "('High Level', unit='V', ch=1, set_cmd=\n 'SOUR%(ch)d:VOLT:HIGH %(value)f%(unit)s', get_cmd='SOUR%(ch)d:VOLT:HIGH?')\n", (1928, 2046), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((2047, 2167), 'qulab.device.QReal', 'QReal', (['"""Low Level"""'], {'unit': '"""V"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:VOLT:LOW %(value)f%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:VOLT:LOW?"""'}), "('Low Level', unit='V', ch=1, set_cmd=\n 'SOUR%(ch)d:VOLT:LOW %(value)f%(unit)s', get_cmd='SOUR%(ch)d:VOLT:LOW?')\n", (2052, 2167), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((2168, 2287), 'qulab.device.QReal', 'QReal', (['"""Offset"""'], {'unit': '"""V"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:VOLT:OFFS %(value)f%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:VOLT:OFFS?"""'}), "('Offset', unit='V', ch=1, set_cmd=\n 'SOUR%(ch)d:VOLT:OFFS %(value)f%(unit)s', get_cmd='SOUR%(ch)d:VOLT:OFFS?')\n", (2173, 2287), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((2288, 2412), 'qulab.device.QReal', 'QReal', (['"""Amplitude"""'], {'unit': '"""VPP"""', 'ch': '(1)', 'set_cmd': '"""SOUR%(ch)d:VOLT:AMPL %(value)f%(unit)s"""', 'get_cmd': '"""SOUR%(ch)d:VOLT:AMPL?"""'}), "('Amplitude', unit='VPP', ch=1, set_cmd=\n 'SOUR%(ch)d:VOLT:AMPL %(value)f%(unit)s', get_cmd='SOUR%(ch)d:VOLT:AMPL?')\n", (2293, 2412), False, 'from qulab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((3485, 3504), 'numpy.zeros', 'np.zeros', (['pointslen'], {}), '(pointslen)\n', (3493, 3504), True, 'import numpy as np\n'), ((3645, 3665), 'numpy.zeros', 'np.zeros', (['pointslen2'], {}), '(pointslen2)\n', (3653, 3665), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
"""
输入3个句子,每个句子由5个单词构成,每个单词词向量10维
batch=3, seq_len=5, Embedding=10
"""
# 设置LSTM参数,词向量维数10,隐藏元维度20,2个LSTM隐藏层,双向LSTM
bilstm = nn.LSTM(input_size=10, hidden_size=20, num_layers=2, bidirectional=True)
# 如下表示输入句子
input = torch.randn(5, 3, 10)
# 初始化的隐藏元和记忆元,通常维度一样
h0 = torch.randn(4, 3, 20) # [bidirection*num_layers, batch_size, hidden_size]
c0 = torch.randn(4, 3, 20) # [bidirection*num_layers, batch_size, hidden_size]
# 这里有2层lstm,output是最后一层lstm的每个词向量对应隐藏层的输出,与层数无关,只与序列长度有关
output, (hn, cn) = bilstm(input, (h0, c0))
print("output shape:", output.shape) # shape:torch.Size([5,3,40]),[seq_len,batch_size,2*hidden_size]
print("hn shape:", hn.shape) # shape:torch.Size([4,3,20]),[bidirection*num_layers,batch_size,hidden_size]
print("cn shape:", cn.shape) # shape:torch.Size([4,3,20]),[bidirection*num_layers,batch_size,hidden_size]
# 将输出数据做一个二分类
output = output.permute(1, 0, 2) # torch.Size([3,5,40]),[batch_size,seq_len,2*hidden_size]
output = output.contiguous() # torch.view()前做了permute需要contiguous,因为view需要tensor在连续的内存
batch_size = output.size(0)
output = output.view(batch_size, -1) # torch.Size([3,200]),[batch_size,seq_len*2*hidden_size]
fully_connected = nn.Linear(200, 2)
output = fully_connected(output)
print(output.shape) # torch.Size([3,2]),[batch_size,class]
print(output)
| [
"torch.nn.LSTM",
"torch.randn",
"torch.nn.Linear"
] | [((184, 256), 'torch.nn.LSTM', 'nn.LSTM', ([], {'input_size': '(10)', 'hidden_size': '(20)', 'num_layers': '(2)', 'bidirectional': '(True)'}), '(input_size=10, hidden_size=20, num_layers=2, bidirectional=True)\n', (191, 256), True, 'import torch.nn as nn\n'), ((277, 298), 'torch.randn', 'torch.randn', (['(5)', '(3)', '(10)'], {}), '(5, 3, 10)\n', (288, 298), False, 'import torch\n'), ((325, 346), 'torch.randn', 'torch.randn', (['(4)', '(3)', '(20)'], {}), '(4, 3, 20)\n', (336, 346), False, 'import torch\n'), ((405, 426), 'torch.randn', 'torch.randn', (['(4)', '(3)', '(20)'], {}), '(4, 3, 20)\n', (416, 426), False, 'import torch\n'), ((1232, 1249), 'torch.nn.Linear', 'nn.Linear', (['(200)', '(2)'], {}), '(200, 2)\n', (1241, 1249), True, 'import torch.nn as nn\n')] |
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from apps.meiduo_admin.serializers.order import OrderInfoSerializer
from apps.meiduo_admin.utils import PageNum
from apps.orders.models import OrderInfo
class OrderModelViewSet(ModelViewSet):
queryset = OrderInfo.objects.all()
serializer_class = OrderInfoSerializer
pagination_class = PageNum
def destroy(self, request, *args, **kwargs):
return Response({'msg': '妖怪,吃俺老孙一棒,敢删除我的数据!'})
@action(methods=['PUT'],detail=True)
def status(self,request,pk):
# 1.查询订单
try:
order=OrderInfo.objects.get(order_id=pk)
except OrderInfo.DoesNotExist:
from rest_framework import status
return Response(status=status.HTTP_400_BAD_REQUEST)
# order=self.get_object()
# 2.修改订单状态
order.status=request.data.get('status')
order.save()
#3.返回相应
return Response({
'order_id':pk,
'status':order.status
})
"""
GET
{
"order_id": "20190909155657000000003",
"create_time": "2019-09-09T15:56:57.524510+08:00",
"update_time": "2019-09-09T15:57:02.595491+08:00",
"total_count": 1,
"total_amount": "11.00",
"freight": "10.00",
"pay_method": 2,
"status": 1,
"user": 3,
"address": 4,
"goods":[{},{},{},{}]
}
""" | [
"apps.orders.models.OrderInfo.objects.get",
"rest_framework.response.Response",
"rest_framework.decorators.action",
"apps.orders.models.OrderInfo.objects.all"
] | [((350, 373), 'apps.orders.models.OrderInfo.objects.all', 'OrderInfo.objects.all', ([], {}), '()\n', (371, 373), False, 'from apps.orders.models import OrderInfo\n'), ((562, 598), 'rest_framework.decorators.action', 'action', ([], {'methods': "['PUT']", 'detail': '(True)'}), "(methods=['PUT'], detail=True)\n", (568, 598), False, 'from rest_framework.decorators import action\n'), ((516, 555), 'rest_framework.response.Response', 'Response', (["{'msg': '妖怪,吃俺老孙一棒,敢删除我的数据!'}"], {}), "({'msg': '妖怪,吃俺老孙一棒,敢删除我的数据!'})\n", (524, 555), False, 'from rest_framework.response import Response\n'), ((1017, 1067), 'rest_framework.response.Response', 'Response', (["{'order_id': pk, 'status': order.status}"], {}), "({'order_id': pk, 'status': order.status})\n", (1025, 1067), False, 'from rest_framework.response import Response\n'), ((679, 713), 'apps.orders.models.OrderInfo.objects.get', 'OrderInfo.objects.get', ([], {'order_id': 'pk'}), '(order_id=pk)\n', (700, 713), False, 'from apps.orders.models import OrderInfo\n'), ((818, 862), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(status=status.HTTP_400_BAD_REQUEST)\n', (826, 862), False, 'from rest_framework.response import Response\n')] |
import nekos
from ..utils import admin_cmd
@borg.on(admin_cmd(pattern = "tcat$"))
async def hmm(cat):
if cat.fwd_from:
return
reactcat = nekos.textcat()
await cat.edit(reactcat)
@borg.on(admin_cmd(pattern = "why$"))
async def hmm(cat):
if cat.fwd_from:
return
whycat = nekos.why()
await cat.edit(whycat)
@borg.on(admin_cmd(pattern = "fact$"))
async def hmm(cat):
if cat.fwd_from:
return
factcat = nekos.fact()
await cat.edit(factcat)
| [
"nekos.textcat",
"nekos.fact",
"nekos.why"
] | [((154, 169), 'nekos.textcat', 'nekos.textcat', ([], {}), '()\n', (167, 169), False, 'import nekos\n'), ((315, 326), 'nekos.why', 'nekos.why', ([], {}), '()\n', (324, 326), False, 'import nekos\n'), ((472, 484), 'nekos.fact', 'nekos.fact', ([], {}), '()\n', (482, 484), False, 'import nekos\n')] |
import copy
import torch.nn as nn
from .transformer import (Encoder,
EncoderLayer,
MultiHeadedAttention,
PositionwiseFeedforward,
PositionalEncoding)
class TransformerEncoder(nn.Module):
"""Transformer Encoder"""
def __init__(self, embedding_dim, hidden_sizes, num_layers=6, num_heads=8,
dropout=0.1, batch_first=True, use_cuda=True):
"""Take a batch of representations and add context transformer-style
Parameters
----------
embedding_dim : TODO
hidden_sizes : TODO
num_layers : TODO, optional
num_heads : TODO, optional
dropout : TODO, optional
batch_first: TODO, optional
use_cuda : TODO, optional
"""
if not batch_first:
raise NotImplementedError
super(TransformerEncoder, self).__init__()
self.embedding_dim = embedding_dim
self.hidden_sizes = hidden_sizes
self.num_layers = num_layers
self.num_heads = num_heads
self.dropout = dropout
self.use_cuda = use_cuda
self.out_dim = embedding_dim
# FIXME: I don't know how will deepcopies work within a pytorch module
# <2018-06-25 12:06:59, <NAME>>
c = copy.deepcopy
attn = MultiHeadedAttention(self.num_heads, self.embedding_dim)
ff = PositionwiseFeedforward(self.embedding_dim, self.hidden_sizes,
self.dropout)
position = PositionalEncoding(self.embedding_dim, self.dropout)
self.encoder = Encoder(
EncoderLayer(embedding_dim, c(attn), c(ff), dropout), self.num_layers
)
self.positional_embedding = c(position)
for p in self.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
def forward(self, emb_batch, masks=None, lengths=None):
"""Add context to a batch of vectors
Parameters
----------
emb_batch : torch.FloatTensor, dim(batch_size, seq_len, hidden_dim)
mask : torch.Floattensor, dim(batch_size, seq_len)
lengths : kept for compatibility with other layers
Returns
-------
A torch.FloatTensor of dim(batch_size, seq_len, hidden_dim) containing
context-enriched vectors
"""
# for compatibility with Annotated Transformer implementation
masks = masks.unsqueeze(1)
return self.encoder(self.positional_embedding(emb_batch), masks)
| [
"torch.nn.init.xavier_uniform_"
] | [((1878, 1904), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['p'], {}), '(p)\n', (1901, 1904), True, 'import torch.nn as nn\n')] |
# -*- coding: utf-8 -*-
from flask import make_response, Blueprint
from app import derive_import_root, add_url_rules_for_blueprint
from application import exception
from application.model.service import Service
from application.model.service_template import ServiceTemplate
from application.util.database import session_scope
from application.views.base_api import BaseNeedLoginAPI, ApiResult
class ServiceAPI(BaseNeedLoginAPI):
methods = ['GET', 'PATCH']
def get(self):
service_uuid = self.get_data('uuid')
if self.valid_data(service_uuid):
return self.get_service_by_uuid(service_uuid)
else:
return self.get_user_services(self.user_uuid)
def get_user_services(self, user_uuid):
with session_scope() as db_session:
query = db_session.query(Service, ServiceTemplate.title) \
.outerjoin(ServiceTemplate, Service.template_uuid == ServiceTemplate.uuid) \
.filter(Service.user_uuid == user_uuid) \
.filter(Service.status != Service.STATUS.DELETED) \
.order_by(Service.created_at)
page, page_size, offset, max_page = self._derive_page_parameter(query.count())
services = query.offset(offset).limit(page_size).all()
service_list = []
for record in services:
service = record.Service
service_dict = service.to_dict()
service_dict['title'] = record.title
service_list.append(service_dict)
result = ApiResult('获取用户学术服务信息成功', payload={
'page': page,
'page_size': page_size,
'max_page': max_page,
'services': service_list
})
return result.to_response()
def get_service_by_uuid(self, service_uuid):
with session_scope() as session:
service = session.query(Service).filter(Service.uuid == service_uuid,
Service.status != Service.STATUS.DELETED).first()
if service is None:
raise exception.api.NotFound('套餐不存在')
if service.user_uuid != self.user_uuid:
raise exception.api.Forbidden('无权查看其他用户的套餐信息')
template = session.query(ServiceTemplate) \
.filter(ServiceTemplate.uuid == service.template_uuid).first()
service_dict = service.to_dict()
service_dict['title'] = template.title
service_dict['price'] = float(template.price)
result = ApiResult('获取学术服务详情成功', payload={
'service': service_dict
})
return make_response(result.to_response())
def patch(self):
with session_scope() as session:
uuid = self.get_post_data('uuid', require=True, error_message='缺少uuid字段')
service = session.query(Service).filter(Service.uuid == uuid).first()
if service is None:
raise exception.api.NotFound('学术服务不存在')
if service.user_uuid != self.user_uuid:
raise exception.api.Forbidden('无权修改其他用户的学术服务')
auto_renew = self.get_post_data('auto_renew')
if self.valid_data(auto_renew):
self.patch_service_auto_renew(service)
result = ApiResult('修改套餐成功', 201)
return result.to_response()
@staticmethod
def patch_service_auto_renew(service: Service):
if service.type == service.TYPE.MONTHLY:
latter_auto_renew_status = 1 if service.auto_renew == 0 else 0
service.auto_renew = latter_auto_renew_status
view = ServiceAPI
bp = Blueprint(__name__.split('.')[-1], __name__)
root = derive_import_root(__name__)
add_url_rules_for_blueprint(root, bp)
| [
"application.exception.api.Forbidden",
"application.util.database.session_scope",
"application.views.base_api.ApiResult",
"app.add_url_rules_for_blueprint",
"application.exception.api.NotFound",
"app.derive_import_root"
] | [((3752, 3780), 'app.derive_import_root', 'derive_import_root', (['__name__'], {}), '(__name__)\n', (3770, 3780), False, 'from app import derive_import_root, add_url_rules_for_blueprint\n'), ((3781, 3818), 'app.add_url_rules_for_blueprint', 'add_url_rules_for_blueprint', (['root', 'bp'], {}), '(root, bp)\n', (3808, 3818), False, 'from app import derive_import_root, add_url_rules_for_blueprint\n'), ((758, 773), 'application.util.database.session_scope', 'session_scope', ([], {}), '()\n', (771, 773), False, 'from application.util.database import session_scope\n'), ((1567, 1692), 'application.views.base_api.ApiResult', 'ApiResult', (['"""获取用户学术服务信息成功"""'], {'payload': "{'page': page, 'page_size': page_size, 'max_page': max_page, 'services':\n service_list}"}), "('获取用户学术服务信息成功', payload={'page': page, 'page_size': page_size,\n 'max_page': max_page, 'services': service_list})\n", (1576, 1692), False, 'from application.views.base_api import BaseNeedLoginAPI, ApiResult\n'), ((1870, 1885), 'application.util.database.session_scope', 'session_scope', ([], {}), '()\n', (1883, 1885), False, 'from application.util.database import session_scope\n'), ((2596, 2654), 'application.views.base_api.ApiResult', 'ApiResult', (['"""获取学术服务详情成功"""'], {'payload': "{'service': service_dict}"}), "('获取学术服务详情成功', payload={'service': service_dict})\n", (2605, 2654), False, 'from application.views.base_api import BaseNeedLoginAPI, ApiResult\n'), ((2775, 2790), 'application.util.database.session_scope', 'session_scope', ([], {}), '()\n', (2788, 2790), False, 'from application.util.database import session_scope\n'), ((3356, 3380), 'application.views.base_api.ApiResult', 'ApiResult', (['"""修改套餐成功"""', '(201)'], {}), "('修改套餐成功', 201)\n", (3365, 3380), False, 'from application.views.base_api import BaseNeedLoginAPI, ApiResult\n'), ((2136, 2167), 'application.exception.api.NotFound', 'exception.api.NotFound', (['"""套餐不存在"""'], {}), "('套餐不存在')\n", (2158, 2167), False, 'from application import exception\n'), ((2243, 2283), 'application.exception.api.Forbidden', 'exception.api.Forbidden', (['"""无权查看其他用户的套餐信息"""'], {}), "('无权查看其他用户的套餐信息')\n", (2266, 2283), False, 'from application import exception\n'), ((3026, 3059), 'application.exception.api.NotFound', 'exception.api.NotFound', (['"""学术服务不存在"""'], {}), "('学术服务不存在')\n", (3048, 3059), False, 'from application import exception\n'), ((3135, 3175), 'application.exception.api.Forbidden', 'exception.api.Forbidden', (['"""无权修改其他用户的学术服务"""'], {}), "('无权修改其他用户的学术服务')\n", (3158, 3175), False, 'from application import exception\n')] |
# ----------------------------------------------------------------------
# |
# | CastExpressionParserInfo_UnitTest.py
# |
# | <NAME> <<EMAIL>>
# | 2021-10-04 09:14:16
# |
# ----------------------------------------------------------------------
# |
# | Copyright <NAME> 2021
# | Distributed under the Boost Software License, Version 1.0. See
# | accompanying file LICENSE_1_0.txt or copy at
# | http://www.boost.org/LICENSE_1_0.txt.
# |
# ----------------------------------------------------------------------
"""Unit test for CastExpressionParserInfo.py"""
import os
import pytest
import CommonEnvironment
from CommonEnvironmentEx.Package import InitRelativeImports
# ----------------------------------------------------------------------
_script_fullpath = CommonEnvironment.ThisFullpath()
_script_dir, _script_name = os.path.split(_script_fullpath)
# ----------------------------------------------------------------------
with InitRelativeImports():
from ..CastExpressionParserInfo import *
from ...Common.AutomatedTests import RegionCreator
from ...Types.StandardTypeParserInfo import StandardTypeParserInfo
# ----------------------------------------------------------------------
def test_TypeWithModifierError():
region_creator = RegionCreator()
with pytest.raises(TypeWithModifierError) as ex:
CastExpressionParserInfo(
[
region_creator(container=True),
region_creator(),
region_creator(),
],
ExpressionParserInfo([region_creator(container=True)]),
StandardTypeParserInfo(
[
region_creator(container=True),
region_creator(),
region_creator(expected_error=True),
],
"TheType",
TypeModifier.val,
),
)
ex = ex.value
assert str(ex) == "Cast expressions may specify a type or a modifier, but not both."
assert ex.Region == region_creator.ExpectedErrorRegion()
# ----------------------------------------------------------------------
def test_InvalidModifierError():
region_creator = RegionCreator()
with pytest.raises(InvalidModifierError) as ex:
CastExpressionParserInfo(
[
region_creator(container=True),
region_creator(),
region_creator(expected_error=True),
],
ExpressionParserInfo([region_creator(container=True),]),
TypeModifier.mutable,
)
ex = ex.value
assert str(ex) == "'mutable' cannot be used in cast expressions; supported values are 'ref', 'val', 'view'."
assert ex.Region == region_creator.ExpectedErrorRegion()
| [
"CommonEnvironment.ThisFullpath",
"CommonEnvironmentEx.Package.InitRelativeImports",
"pytest.raises",
"os.path.split"
] | [((827, 859), 'CommonEnvironment.ThisFullpath', 'CommonEnvironment.ThisFullpath', ([], {}), '()\n', (857, 859), False, 'import CommonEnvironment\n'), ((907, 938), 'os.path.split', 'os.path.split', (['_script_fullpath'], {}), '(_script_fullpath)\n', (920, 938), False, 'import os\n'), ((1021, 1042), 'CommonEnvironmentEx.Package.InitRelativeImports', 'InitRelativeImports', ([], {}), '()\n', (1040, 1042), False, 'from CommonEnvironmentEx.Package import InitRelativeImports\n'), ((1381, 1417), 'pytest.raises', 'pytest.raises', (['TypeWithModifierError'], {}), '(TypeWithModifierError)\n', (1394, 1417), False, 'import pytest\n'), ((2332, 2367), 'pytest.raises', 'pytest.raises', (['InvalidModifierError'], {}), '(InvalidModifierError)\n', (2345, 2367), False, 'import pytest\n')] |
from django.shortcuts import render
def contact(request):
return render(request, 'contacts/contact.html')
| [
"django.shortcuts.render"
] | [((75, 115), 'django.shortcuts.render', 'render', (['request', '"""contacts/contact.html"""'], {}), "(request, 'contacts/contact.html')\n", (81, 115), False, 'from django.shortcuts import render\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu May 3 18:33:28 2018
@author: malopez
"""
import pandas as pd
import matplotlib.pyplot as plt
import cv2
images_folder = "C:/Users/malopez/Desktop/disksMD/images"
data_folder = "C:/Users/malopez/Desktop/disksMD/data"
output_video = './video4.mp4'
particle_radius = 1.0
n_particles = 90 # TODO: Why 3 is the minimun number of particles?
desired_collisions_per_particle = 10
n_collisions = n_particles*desired_collisions_per_particle
size_X = 60 # System size X
size_Y = 30 # System size Y
size_X_inches = 6*(size_X/size_Y)
size_Y_inches = 6
size_figure = (size_X_inches, size_Y_inches)
# Fenomenological constant ;p
circle_size = 11875*size_X_inches*size_Y_inches / (size_X*size_Y)
# circle_size = particle_radius*427500 / (size_X*size_Y)
for i in range(n_collisions):
file_name_pos = data_folder + "/xy"+'{0:05d}'.format(i)+".dat"
pos = pd.read_table(file_name_pos, sep='\s+',
header = None, names =['x', 'y'])
img_name = images_folder+'/img'+'{0:05d}'.format(i)+".png"
fig, ax = plt.subplots(figsize=size_figure, dpi=250)
ax.set_xlim([0,size_X])
ax.set_ylim([0,size_Y])
plt.scatter(pos.x, pos.y, s=circle_size)
fig.savefig(img_name)
print('Saving img nº: '+str(i))
plt.close()
images = []
for i in range(n_collisions):
images.append(images_folder+'/img'+'{0:05d}'.format(i)+".png")
# Height and Width from first image
frame = cv2.imread(images[0])
height, width, channels = frame.shape
# Definimos el codec y creamos un objeto VideoWriter
fourcc = cv2.VideoWriter_fourcc(*'mp4v') # Be sure to use lower case
out = cv2.VideoWriter(output_video, fourcc, 30.0, (width, height))
print('Generating video, please wait')
for image in images:
frame = cv2.imread(image)
# Write out frame to video
out.write(frame)
# Release everything if job is finished
out.release()
print("The output video is {}".format(output_video)) | [
"cv2.VideoWriter",
"matplotlib.pyplot.close",
"pandas.read_table",
"cv2.VideoWriter_fourcc",
"matplotlib.pyplot.scatter",
"cv2.imread",
"matplotlib.pyplot.subplots"
] | [((1454, 1475), 'cv2.imread', 'cv2.imread', (['images[0]'], {}), '(images[0])\n', (1464, 1475), False, 'import cv2\n'), ((1577, 1608), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'mp4v'"], {}), "(*'mp4v')\n", (1599, 1608), False, 'import cv2\n'), ((1643, 1703), 'cv2.VideoWriter', 'cv2.VideoWriter', (['output_video', 'fourcc', '(30.0)', '(width, height)'], {}), '(output_video, fourcc, 30.0, (width, height))\n', (1658, 1703), False, 'import cv2\n'), ((893, 964), 'pandas.read_table', 'pd.read_table', (['file_name_pos'], {'sep': '"""\\\\s+"""', 'header': 'None', 'names': "['x', 'y']"}), "(file_name_pos, sep='\\\\s+', header=None, names=['x', 'y'])\n", (906, 964), True, 'import pandas as pd\n'), ((1074, 1116), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'size_figure', 'dpi': '(250)'}), '(figsize=size_figure, dpi=250)\n', (1086, 1116), True, 'import matplotlib.pyplot as plt\n'), ((1177, 1217), 'matplotlib.pyplot.scatter', 'plt.scatter', (['pos.x', 'pos.y'], {'s': 'circle_size'}), '(pos.x, pos.y, s=circle_size)\n', (1188, 1217), True, 'import matplotlib.pyplot as plt\n'), ((1284, 1295), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1293, 1295), True, 'import matplotlib.pyplot as plt\n'), ((1778, 1795), 'cv2.imread', 'cv2.imread', (['image'], {}), '(image)\n', (1788, 1795), False, 'import cv2\n')] |
import sys
import difflib
import errno
import json
import logging
import functools
import os
import pytest
from shell import shell
from diag_paranoia import diag_paranoia, filtered_overview, sanitize_errors
VALIDATOR_IMAGE = "datawire/ambassador-envoy-alpine:v1.5.0-116-g7ccb25882"
DIR = os.path.dirname(__file__)
EXCLUDES = [ "__pycache__" ]
# TESTDIR = os.path.join(DIR, "tests")
TESTDIR = DIR
DEFAULT_CONFIG = os.path.join(DIR, "..", "default-config")
MATCHES = [ n for n in os.listdir(TESTDIR)
if (n.startswith('0') and os.path.isdir(os.path.join(TESTDIR, n)) and (n not in EXCLUDES)) ]
os.environ['SCOUT_DISABLE'] = "1"
#### decorators
def standard_setup(f):
func_name = getattr(f, '__name__', '<anonymous>')
# @functools.wraps(f)
def wrapper(directory, *args, **kwargs):
print("%s: directory %s" % (func_name, directory))
dirpath = os.path.join(TESTDIR, directory)
testname = os.path.basename(dirpath)
configdir = os.path.join(dirpath, 'config')
if os.path.exists(os.path.join(dirpath, 'TEST_DEFAULT_CONFIG')):
configdir = DEFAULT_CONFIG
print("%s: using config %s" % (testname, configdir))
return f(testname, dirpath, configdir, *args, **kwargs)
return wrapper
#### Utilities
def unified_diff(gold_path, current_path):
gold = json.dumps(json.load(open(gold_path, "r")), indent=4, sort_keys=True)
current = json.dumps(json.load(open(current_path, "r")), indent=4, sort_keys=True)
udiff = list(difflib.unified_diff(gold.split("\n"), current.split("\n"),
fromfile=os.path.basename(gold_path),
tofile=os.path.basename(current_path),
lineterm=""))
return udiff
#### Test functions
@pytest.mark.parametrize("directory", MATCHES)
@standard_setup
def test_config(testname, dirpath, configdir):
errors = []
if not os.path.isdir(configdir):
errors.append("configdir %s is not a directory" % configdir)
print("==== checking intermediate output")
ambassador = shell([ 'ambassador', 'dump', configdir ])
if ambassador.code != 0:
errors.append('ambassador dump failed! %s' % ambassador.code)
else:
current_raw = ambassador.output(raw=True)
current = None
gold = None
try:
current = sanitize_errors(json.loads(current_raw))
except json.decoder.JSONDecodeError as e:
errors.append("current intermediate was unparseable?")
if current:
current['envoy_config'] = filtered_overview(current['envoy_config'])
current_path = os.path.join(dirpath, "intermediate.json")
json.dump(current, open(current_path, "w"), sort_keys=True, indent=4)
gold_path = os.path.join(dirpath, "gold.intermediate.json")
if os.path.exists(gold_path):
udiff = unified_diff(gold_path, current_path)
if udiff:
errors.append("gold.intermediate.json and intermediate.json do not match!\n\n%s" % "\n".join(udiff))
print("==== checking config generation")
envoy_json_out = os.path.join(dirpath, "envoy.json")
try:
os.unlink(envoy_json_out)
except OSError as e:
if e.errno != errno.ENOENT:
raise
ambassador = shell([ 'ambassador', 'config', '--check', configdir, envoy_json_out ])
print(ambassador.errors(raw=True))
if ambassador.code != 0:
errors.append('ambassador failed! %s' % ambassador.code)
else:
envoy = shell([ 'docker', 'run',
'--rm',
'-v', '%s:/etc/ambassador-config' % dirpath,
VALIDATOR_IMAGE,
'/usr/local/bin/envoy',
'--base-id', '1',
'--mode', 'validate',
'-c', '/etc/ambassador-config/envoy.json' ],
verbose=True)
envoy_succeeded = (envoy.code == 0)
if not envoy_succeeded:
errors.append('envoy failed! %s' % envoy.code)
envoy_output = list(envoy.output())
if envoy_succeeded:
if not envoy_output[-1].strip().endswith(' OK'):
errors.append('envoy validation failed!')
gold_path = os.path.join(dirpath, "gold.json")
if os.path.exists(gold_path):
udiff = unified_diff(gold_path, envoy_json_out)
if udiff:
errors.append("gold.json and envoy.json do not match!\n\n%s" % "\n".join(udiff))
print("==== checking short-circuit with existing config")
ambassador = shell([ 'ambassador', 'config', '--check', configdir, envoy_json_out ])
print(ambassador.errors(raw=True))
if ambassador.code != 0:
errors.append('ambassador repeat check failed! %s' % ambassador.code)
if 'Output file exists' not in ambassador.errors(raw=True):
errors.append('ambassador repeat check did not short circuit??')
if errors:
print("---- ERRORS")
print("%s" % "\n".join(errors))
assert not errors, ("failing, errors: %d" % len(errors))
@pytest.mark.parametrize("directory", MATCHES)
@standard_setup
def test_diag(testname, dirpath, configdir):
errors = []
errorcount = 0
if not os.path.isdir(configdir):
errors.append("configdir %s is not a directory" % configdir)
errorcount += 1
results = diag_paranoia(configdir, dirpath)
if results['warnings']:
errors.append("[DIAG WARNINGS]\n%s" % "\n".join(results['warnings']))
if results['errors']:
errors.append("[DIAG ERRORS]\n%s" % "\n".join(results['errors']))
errorcount += len(results['errors'])
if errors:
print("---- ERRORS")
print("%s" % "\n".join(errors))
print("---- OVERVIEW ----")
print("%s" % results['overview'])
print("---- RECONSTITUTED ----")
print("%s" % results['reconstituted'])
assert errorcount == 0, ("failing, errors: %d" % errorcount)
| [
"diag_paranoia.diag_paranoia",
"os.path.exists",
"os.listdir",
"json.loads",
"os.path.join",
"os.path.dirname",
"pytest.mark.parametrize",
"os.path.isdir",
"os.path.basename",
"os.unlink",
"diag_paranoia.filtered_overview",
"shell.shell"
] | [((293, 318), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (308, 318), False, 'import os\n'), ((420, 461), 'os.path.join', 'os.path.join', (['DIR', '""".."""', '"""default-config"""'], {}), "(DIR, '..', 'default-config')\n", (432, 461), False, 'import os\n'), ((1833, 1878), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""directory"""', 'MATCHES'], {}), "('directory', MATCHES)\n", (1856, 1878), False, 'import pytest\n'), ((5273, 5318), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""directory"""', 'MATCHES'], {}), "('directory', MATCHES)\n", (5296, 5318), False, 'import pytest\n'), ((2131, 2171), 'shell.shell', 'shell', (["['ambassador', 'dump', configdir]"], {}), "(['ambassador', 'dump', configdir])\n", (2136, 2171), False, 'from shell import shell\n'), ((3220, 3255), 'os.path.join', 'os.path.join', (['dirpath', '"""envoy.json"""'], {}), "(dirpath, 'envoy.json')\n", (3232, 3255), False, 'import os\n'), ((3397, 3466), 'shell.shell', 'shell', (["['ambassador', 'config', '--check', configdir, envoy_json_out]"], {}), "(['ambassador', 'config', '--check', configdir, envoy_json_out])\n", (3402, 3466), False, 'from shell import shell\n'), ((4766, 4835), 'shell.shell', 'shell', (["['ambassador', 'config', '--check', configdir, envoy_json_out]"], {}), "(['ambassador', 'config', '--check', configdir, envoy_json_out])\n", (4771, 4835), False, 'from shell import shell\n'), ((5561, 5594), 'diag_paranoia.diag_paranoia', 'diag_paranoia', (['configdir', 'dirpath'], {}), '(configdir, dirpath)\n', (5574, 5594), False, 'from diag_paranoia import diag_paranoia, filtered_overview, sanitize_errors\n'), ((485, 504), 'os.listdir', 'os.listdir', (['TESTDIR'], {}), '(TESTDIR)\n', (495, 504), False, 'import os\n'), ((891, 923), 'os.path.join', 'os.path.join', (['TESTDIR', 'directory'], {}), '(TESTDIR, directory)\n', (903, 923), False, 'import os\n'), ((943, 968), 'os.path.basename', 'os.path.basename', (['dirpath'], {}), '(dirpath)\n', (959, 968), False, 'import os\n'), ((989, 1020), 'os.path.join', 'os.path.join', (['dirpath', '"""config"""'], {}), "(dirpath, 'config')\n", (1001, 1020), False, 'import os\n'), ((1970, 1994), 'os.path.isdir', 'os.path.isdir', (['configdir'], {}), '(configdir)\n', (1983, 1994), False, 'import os\n'), ((3274, 3299), 'os.unlink', 'os.unlink', (['envoy_json_out'], {}), '(envoy_json_out)\n', (3283, 3299), False, 'import os\n'), ((3634, 3857), 'shell.shell', 'shell', (["['docker', 'run', '--rm', '-v', '%s:/etc/ambassador-config' % dirpath,\n VALIDATOR_IMAGE, '/usr/local/bin/envoy', '--base-id', '1', '--mode',\n 'validate', '-c', '/etc/ambassador-config/envoy.json']"], {'verbose': '(True)'}), "(['docker', 'run', '--rm', '-v', '%s:/etc/ambassador-config' % dirpath,\n VALIDATOR_IMAGE, '/usr/local/bin/envoy', '--base-id', '1', '--mode',\n 'validate', '-c', '/etc/ambassador-config/envoy.json'], verbose=True)\n", (3639, 3857), False, 'from shell import shell\n'), ((4431, 4465), 'os.path.join', 'os.path.join', (['dirpath', '"""gold.json"""'], {}), "(dirpath, 'gold.json')\n", (4443, 4465), False, 'import os\n'), ((4478, 4503), 'os.path.exists', 'os.path.exists', (['gold_path'], {}), '(gold_path)\n', (4492, 4503), False, 'import os\n'), ((5427, 5451), 'os.path.isdir', 'os.path.isdir', (['configdir'], {}), '(configdir)\n', (5440, 5451), False, 'import os\n'), ((1048, 1092), 'os.path.join', 'os.path.join', (['dirpath', '"""TEST_DEFAULT_CONFIG"""'], {}), "(dirpath, 'TEST_DEFAULT_CONFIG')\n", (1060, 1092), False, 'import os\n'), ((2630, 2672), 'diag_paranoia.filtered_overview', 'filtered_overview', (["current['envoy_config']"], {}), "(current['envoy_config'])\n", (2647, 2672), False, 'from diag_paranoia import diag_paranoia, filtered_overview, sanitize_errors\n'), ((2701, 2743), 'os.path.join', 'os.path.join', (['dirpath', '"""intermediate.json"""'], {}), "(dirpath, 'intermediate.json')\n", (2713, 2743), False, 'import os\n'), ((2851, 2898), 'os.path.join', 'os.path.join', (['dirpath', '"""gold.intermediate.json"""'], {}), "(dirpath, 'gold.intermediate.json')\n", (2863, 2898), False, 'import os\n'), ((2915, 2940), 'os.path.exists', 'os.path.exists', (['gold_path'], {}), '(gold_path)\n', (2929, 2940), False, 'import os\n'), ((558, 582), 'os.path.join', 'os.path.join', (['TESTDIR', 'n'], {}), '(TESTDIR, n)\n', (570, 582), False, 'import os\n'), ((1634, 1661), 'os.path.basename', 'os.path.basename', (['gold_path'], {}), '(gold_path)\n', (1650, 1661), False, 'import os\n'), ((1708, 1738), 'os.path.basename', 'os.path.basename', (['current_path'], {}), '(current_path)\n', (1724, 1738), False, 'import os\n'), ((2429, 2452), 'json.loads', 'json.loads', (['current_raw'], {}), '(current_raw)\n', (2439, 2452), False, 'import json\n')] |
#!/usr/bin/env python3
import json
import googlemaps
import sys
import os
gmaps = googlemaps.Client(key=os.environ["GOOGLE_API_KEY"])
print(gmaps)
filename = sys.argv[1]
with open(filename) as f:
data = json.load(f)
for d in data:
if d.get("address") and not d.get("latitude"):
result = gmaps.geocode(d["address"])
print(result)
result = result[0]["geometry"]["location"]
d["latitude"] = result["lat"]
d["longitude"] = result["lng"]
with open(filename, "w") as f:
json.dump(data, f) | [
"json.load",
"googlemaps.Client",
"json.dump"
] | [((83, 134), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': "os.environ['GOOGLE_API_KEY']"}), "(key=os.environ['GOOGLE_API_KEY'])\n", (100, 134), False, 'import googlemaps\n'), ((209, 221), 'json.load', 'json.load', (['f'], {}), '(f)\n', (218, 221), False, 'import json\n'), ((520, 538), 'json.dump', 'json.dump', (['data', 'f'], {}), '(data, f)\n', (529, 538), False, 'import json\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###################################################################
# Author: <NAME>
# Date : 2019.2
# Email : <EMAIL>
###################################################################
from dayu_widgets.avatar import MAvatar
from dayu_widgets.divider import MDivider
from dayu_widgets.field_mixin import MFieldMixin
from dayu_widgets.label import MLabel
from dayu_widgets.push_button import MPushButton
from dayu_widgets import dayu_theme
from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout
class AvatarExample(QWidget, MFieldMixin):
def __init__(self, parent=None):
super(AvatarExample, self).__init__(parent)
self.setWindowTitle('Example for MAvatar')
main_lay = QVBoxLayout()
main_lay.addWidget(MDivider('different size'))
size_list = [('Huge', MAvatar.huge),
('Large', MAvatar.large),
('Medium', MAvatar.medium),
('Small', MAvatar.small),
('Tiny', MAvatar.tiny)]
self.pix_map_list = [None, MPixmap('avatar.png'),
MPixmap('app-maya.png'),
MPixmap('app-nuke.png'),
MPixmap('app-houdini.png')]
form_lay = QFormLayout()
form_lay.setLabelAlignment(Qt.AlignRight)
for label, cls in size_list:
h_lay = QHBoxLayout()
for image in self.pix_map_list:
avatar_tmp = cls(image)
h_lay.addWidget(avatar_tmp)
h_lay.addStretch()
form_lay.addRow(MLabel(label), h_lay)
main_lay.addLayout(form_lay)
self.register_field('image', None)
main_lay.addWidget(MDivider('different image'))
avatar = MAvatar()
self.bind('image', avatar, 'dayu_image')
button = MPushButton(text='Change Avatar Image').primary()
button.clicked.connect(self.slot_change_image)
main_lay.addWidget(avatar)
main_lay.addWidget(button)
main_lay.addStretch()
self.setLayout(main_lay)
def slot_change_image(self):
"""Set the Avatar image random by data bind."""
import random
self.set_field('image', random.choice(self.pix_map_list))
if __name__ == '__main__':
import sys
from dayu_widgets.qt import QApplication
app = QApplication(sys.argv)
test = AvatarExample()
dayu_theme.apply(test)
test.show()
sys.exit(app.exec_())
| [
"dayu_widgets.qt.MPixmap",
"random.choice",
"dayu_widgets.qt.QApplication",
"dayu_widgets.qt.QHBoxLayout",
"dayu_widgets.divider.MDivider",
"dayu_widgets.qt.QFormLayout",
"dayu_widgets.label.MLabel",
"dayu_widgets.push_button.MPushButton",
"dayu_widgets.avatar.MAvatar",
"dayu_widgets.dayu_theme.ap... | [((2418, 2440), 'dayu_widgets.qt.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (2430, 2440), False, 'from dayu_widgets.qt import QApplication\n'), ((2472, 2494), 'dayu_widgets.dayu_theme.apply', 'dayu_theme.apply', (['test'], {}), '(test)\n', (2488, 2494), False, 'from dayu_widgets import dayu_theme\n'), ((781, 794), 'dayu_widgets.qt.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (792, 794), False, 'from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout\n'), ((1327, 1340), 'dayu_widgets.qt.QFormLayout', 'QFormLayout', ([], {}), '()\n', (1338, 1340), False, 'from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout\n'), ((1825, 1834), 'dayu_widgets.avatar.MAvatar', 'MAvatar', ([], {}), '()\n', (1832, 1834), False, 'from dayu_widgets.avatar import MAvatar\n'), ((822, 848), 'dayu_widgets.divider.MDivider', 'MDivider', (['"""different size"""'], {}), "('different size')\n", (830, 848), False, 'from dayu_widgets.divider import MDivider\n'), ((1120, 1141), 'dayu_widgets.qt.MPixmap', 'MPixmap', (['"""avatar.png"""'], {}), "('avatar.png')\n", (1127, 1141), False, 'from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout\n'), ((1172, 1195), 'dayu_widgets.qt.MPixmap', 'MPixmap', (['"""app-maya.png"""'], {}), "('app-maya.png')\n", (1179, 1195), False, 'from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout\n'), ((1226, 1249), 'dayu_widgets.qt.MPixmap', 'MPixmap', (['"""app-nuke.png"""'], {}), "('app-nuke.png')\n", (1233, 1249), False, 'from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout\n'), ((1280, 1306), 'dayu_widgets.qt.MPixmap', 'MPixmap', (['"""app-houdini.png"""'], {}), "('app-houdini.png')\n", (1287, 1306), False, 'from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout\n'), ((1449, 1462), 'dayu_widgets.qt.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (1460, 1462), False, 'from dayu_widgets.qt import QWidget, QVBoxLayout, MPixmap, QFormLayout, Qt, QHBoxLayout\n'), ((1779, 1806), 'dayu_widgets.divider.MDivider', 'MDivider', (['"""different image"""'], {}), "('different image')\n", (1787, 1806), False, 'from dayu_widgets.divider import MDivider\n'), ((2284, 2316), 'random.choice', 'random.choice', (['self.pix_map_list'], {}), '(self.pix_map_list)\n', (2297, 2316), False, 'import random\n'), ((1650, 1663), 'dayu_widgets.label.MLabel', 'MLabel', (['label'], {}), '(label)\n', (1656, 1663), False, 'from dayu_widgets.label import MLabel\n'), ((1901, 1940), 'dayu_widgets.push_button.MPushButton', 'MPushButton', ([], {'text': '"""Change Avatar Image"""'}), "(text='Change Avatar Image')\n", (1912, 1940), False, 'from dayu_widgets.push_button import MPushButton\n')] |
from bs4 import BeautifulSoup
from difflib import SequenceMatcher
class _Check:
"""
Parent check class. All other checks should be a subclass of this class.
"""
key = "check"
name = "Check"
description = "Parent check"
class _BlindCheck(_Check):
"""
These checks identify issues in internal systems. The payloads include a host that listens for callbacks from the
internal system. These checks do not raise issues directly. The listener server maintains issue information.
"""
_payloads = [] # attribute should be populated by children's __init___ method
example = "Payload example"
def payloads(self, url, target, value):
"""
Returns the check's payloads. Children can override to provide dynamic payloads.
:param url: url value
:param target: target name
:param value: target value
:return: list of payloads
"""
# return
return self._payloads
def _check_payloads(self, payloads):
"""
Checks if the payloads are adoptable for this class and modify the payloads to adjust to check function.
InvalidFormatException is raised, if a payload is not adoptable.
Children can override.
:param payloads: list of payloads
:return: list of modified payloads
"""
return payloads
def set_payloads(self, payloads):
"""
Overwrite the check's payloads.
:param payloads: list of payloads
"""
self._payloads = self._check_payloads(payloads)
def add_payloads(self, payloads):
"""
Add payloads to the check's payloads.
:param payloads: list of payloads
"""
self._payloads += self._check_payloads(payloads)
class _PassiveCheck(_Check):
"""
These checks identify sensitive information in responses. The response from the server may be checked for social
security numbers, credit card numbers, email addresses, etc. These checks do not have payloads.
"""
def check(self, response):
"""Method should be implemented by children"""
pass
class _ActiveCheck(_Check):
"""
Parent class for active checks. Subclasses include simple, differential, and timing checks.
"""
_payloads = [] # attribute should be populated by children's __init___ method
example = "Payload example"
def payloads(self, url, target, value):
"""
Returns the check's payloads. Children can override to provide dynamic payloads.
:param url: url value
:param target: target name
:param value: target value
:return: list of payloads
"""
return self._payloads
def _check_payloads(self, payloads):
"""
Checks if the payloads are adoptable for this class and modify the payloads to adjust to check function.
InvalidFormatException is raised, if a payload is not adoptable.
Children can override.
:param payloads: list of payloads
:return: list of modified payloads
"""
return payloads
def set_payloads(self, payloads):
"""
Overwrite the check's payloads.
:param payloads: list of payloads
"""
self._payloads = self._check_payloads(payloads)
def add_payloads(self, payloads):
"""
Add payloads to the check's payloads.
:param payloads: list of payloads
"""
self._payloads += self._check_payloads(payloads)
class _ValueCheck(_ActiveCheck):
"""
These checks perform value analysis to identify issues. For instance, they may analyze text in HTML bodies or
values in HTTP headers. These checks audit using a single payload and single response.
"""
def check(self, response, payload):
"""Method should be implemented by children"""
pass
class _DifferentialCheck(_ActiveCheck):
"""
These checks perform differential analysis between true and false payloads to identify issues. If the difference
between the payloads' responses is below a threshold, then an issue is raised. These checks audit using two
payloads and two responses.
"""
_threshold = 0.90
def check(self, responses, payload):
"""
Checks for issues by looking for the difference between response bodies. HTML script and style tags are
removed from HTML responses.
:param responses: response objects from server
:param payload: payload value
:return: true if vulnerable, false otherwise
"""
# extract
true_response = responses['true']
false_response = responses['false']
# check response
if not true_response.text or not false_response.text:
return False
# check status code
if true_response.status_code != false_response.status_code:
return False
# soup
true_soup = BeautifulSoup(true_response.text, "html.parser")
false_soup = BeautifulSoup(false_response.text, "html.parser")
# remove script and style tags
excludes = ["script", "style"]
true_tags = [tag for tag in true_soup.find_all(text=True) if tag.parent.name not in excludes]
false_tags = [tag for tag in false_soup.find_all(text=True) if tag.parent.name not in excludes]
# join back
true_text = ' '.join(true_tags)
false_text = ' '.join(false_tags)
# calculate ratio
sequence = SequenceMatcher(None, true_text, false_text)
ratio = sequence.quick_ratio()
# check difference
if ratio < _DifferentialCheck._threshold:
return True
else:
return False
class _TimingCheck(_ActiveCheck):
"""
These checks perform timing analysis from delays to identify issues. If the response's elapsed time is above a
threshold, then an issue is raised. These checks audit using one payload, the payload's delay, and one response.
"""
_padding = 0.50
def check(self, responses, payload, delay):
"""
Checks for issues by measuring the elapsed time of the response. It uses a padding to prevent slow endpoints
from producing false positives.
:param responses: response objects from server
:param payload: payload value
:param delay: time as float
:return: true if vulnerable, false otherwise
"""
# extract
original_response = responses['original']
timing_response = responses['timing']
# calculate elapsed time
original_elapsed = original_response.elapsed.seconds + (original_response.elapsed.microseconds / 1000000)
timing_elapsed = timing_response.elapsed.seconds + (timing_response.elapsed.microseconds / 1000000)
# calculate padding
padding = original_elapsed * _TimingCheck._padding
# check time
if timing_elapsed > (delay + padding):
return True
else:
return False
def _check_payloads(self, payloads):
"""
Checks if the payloads are adoptable for this class and modify the payloads to adjust to check function.
InvalidFormatException is raised, if a payload is not adoptable.
Children can override.
:param payloads: list of payloads
:return: list of modified payloads
"""
return [(payload, 9) for payload in payloads]
| [
"bs4.BeautifulSoup",
"difflib.SequenceMatcher"
] | [((4960, 5008), 'bs4.BeautifulSoup', 'BeautifulSoup', (['true_response.text', '"""html.parser"""'], {}), "(true_response.text, 'html.parser')\n", (4973, 5008), False, 'from bs4 import BeautifulSoup\n'), ((5030, 5079), 'bs4.BeautifulSoup', 'BeautifulSoup', (['false_response.text', '"""html.parser"""'], {}), "(false_response.text, 'html.parser')\n", (5043, 5079), False, 'from bs4 import BeautifulSoup\n'), ((5514, 5558), 'difflib.SequenceMatcher', 'SequenceMatcher', (['None', 'true_text', 'false_text'], {}), '(None, true_text, false_text)\n', (5529, 5558), False, 'from difflib import SequenceMatcher\n')] |
from os.path import abspath, join, dirname
from colibris.conf import settings
STATIC_PATH = abspath(join(dirname(__file__), 'swagger'))
UI_URL = settings.API_DOCS_URL
STATIC_URL = '{}/static'.format(UI_URL)
APISPEC_URL = '{}/apispec'.format(UI_URL)
| [
"os.path.dirname"
] | [((107, 124), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (114, 124), False, 'from os.path import abspath, join, dirname\n')] |
"""
Step Chart
-----------------
This example shows Google's stock price over time.
"""
import altair as alt
from vega_datasets import data
source = data.stocks()
chart = alt.Chart(source).mark_line(interpolate = 'step-after').encode(
x = 'date',
y = 'price'
)
chart.transform = [{"filter": "datum.symbol==='GOOG'"}]
| [
"altair.Chart",
"vega_datasets.data.stocks"
] | [((151, 164), 'vega_datasets.data.stocks', 'data.stocks', ([], {}), '()\n', (162, 164), False, 'from vega_datasets import data\n'), ((174, 191), 'altair.Chart', 'alt.Chart', (['source'], {}), '(source)\n', (183, 191), True, 'import altair as alt\n')] |
# -*- coding: utf-8 -*-
"""
file: graph_networkx.py
Provides a NetworkX compliant Graph class.
"""
from graphit.graph import GraphBase
from graphit.graph_exceptions import GraphitException, GraphitNodeNotFound
from graphit.graph_algorithms import degree, size
from graphit.graph_utils.graph_utilities import graph_undirectional_to_directional, graph_directional_to_undirectional
class NetworkXGraph(GraphBase):
def __init__(self, *args, **kwargs):
"""
Init a NetworkX graph type
Differences with regular Graph:
- 'auto_nid' is unknown in NetworkX, set to False
:param args: arguments to Graph __init__
:param kwargs: keyword arguments to Graph __init__
"""
kwargs['auto_nid'] = False
super(NetworkXGraph, self).__init__(*args, **kwargs)
def __contains__(self, node):
return self.has_node(node)
def __getitem__(self, key):
"""
Implement class __getitem__
Return adjacency based on node ID or edge on edge ID.
:return: adjacency nodes or an edge
:rtype: :py:list
"""
# Return edge using edge ID
if isinstance(key, tuple):
return self.edges[key]
# Return adjacency nodes
# TODO: this should return a view but that is not fully compliant to NetworkX yet
else:
return dict([(nid, self.nodes[nid]) for nid in self.adjacency[key]])
def __iter__(self):
"""
Implement class __iter__
Iterate over nodes IDs
:return: Node identifier (nid)
"""
# Always reset node view
for nid in self.nodes:
yield nid
@property
def adj(self):
return self.adjacency
def add_nodes_from(self, nodes, **kwargs):
return self.add_nodes(nodes, **kwargs)
def add_edges_from(self, edges, **kwargs):
return self.add_edges(edges, **kwargs)
def add_weighted_edges_from(self, edges, weight='weight', **kwargs):
"""
Add edges with a numeric weight factor
:param edges: edges as iterable of tuples with length 3 containing
(node1, node2, weight value)
:param weight: edge weight attribute name
:type weight: :py:str
:param kwargs: additional keyword arguments passed to add_edge
:return: list of edge ids for the objects added in
the same order as th input iterable.
:rtype: :py:list
"""
return self.add_edges(edges, weight=weight, **kwargs)
@property
def degree(self):
return degree(self, self.nodes.keys())
def get_edge_data(self, n1, n2, default=None):
edge = (n1, n2)
if edge not in self.edges:
return default
return self.edges[edge]
def has_edge(self, n1, n2):
return (n1, n2) in self.edges
def has_node(self, node):
return node in self.nodes
def is_directed(self):
"""
Return graph directionality
A graph with mixed edges (partly directed, partly undirected) is
considered a directed graph.
:return: directed or undirected graph
:rtype: :py:bool
"""
return self.directed
def nbunch_iter(self, nodes=None):
if nodes:
nodes = [node for node in nodes if node in self.nodes]
else:
nodes = self.nodes.keys()
return self.iternodes(nodes)
def neighbors(self, node):
if node not in self.nodes:
raise GraphitNodeNotFound()
return iter(self.adjacency[node])
def number_of_edges(self, first=None, second=None):
if first is None:
return int(self.size())
if second is not None and second in self.adjacency[first]:
return 1
return 0
def order(self):
"""
Return the number of nodes in the graph similar to __len__
:return: Number of nodes
:rtype: :py:int
"""
return len(self)
number_of_nodes = order
def remove_nodes_from(self, *args, **kwargs):
return self.remove_nodes(*args, **kwargs)
def remove_edges_from(self, *args, **kwargs):
return self.remove_edges(*args, **kwargs)
def size(self, weight=None):
return size(self, weight=weight)
def subgraph(self, nodes):
return self.getnodes(nodes)
def edge_subgraph(self, edges):
return self.getedges(edges)
def to_directed(self):
return graph_undirectional_to_directional(self)
def to_undirected(self):
return graph_directional_to_undirectional(self)
def update(self, edges=None, nodes=None):
if edges is not None:
if nodes is not None:
self.add_nodes(nodes)
self.add_edges(edges)
else:
if hasattr(edges, 'nodes') and hasattr(edges, 'edges'):
for node, attr in edges.nodes.items():
self.add_node(node, **attr)
for edge, attr in edges.edges.items():
self.add_edge(*edge, **attr)
else:
self.add_edges(edges)
elif nodes is not None:
self.add_nodes(nodes)
else:
raise GraphitException("update needs nodes or edges input")
| [
"graphit.graph_utils.graph_utilities.graph_directional_to_undirectional",
"graphit.graph_utils.graph_utilities.graph_undirectional_to_directional",
"graphit.graph_exceptions.GraphitNodeNotFound",
"graphit.graph_exceptions.GraphitException",
"graphit.graph_algorithms.size"
] | [((4371, 4396), 'graphit.graph_algorithms.size', 'size', (['self'], {'weight': 'weight'}), '(self, weight=weight)\n', (4375, 4396), False, 'from graphit.graph_algorithms import degree, size\n'), ((4584, 4624), 'graphit.graph_utils.graph_utilities.graph_undirectional_to_directional', 'graph_undirectional_to_directional', (['self'], {}), '(self)\n', (4618, 4624), False, 'from graphit.graph_utils.graph_utilities import graph_undirectional_to_directional, graph_directional_to_undirectional\n'), ((4671, 4711), 'graphit.graph_utils.graph_utilities.graph_directional_to_undirectional', 'graph_directional_to_undirectional', (['self'], {}), '(self)\n', (4705, 4711), False, 'from graphit.graph_utils.graph_utilities import graph_undirectional_to_directional, graph_directional_to_undirectional\n'), ((3601, 3622), 'graphit.graph_exceptions.GraphitNodeNotFound', 'GraphitNodeNotFound', ([], {}), '()\n', (3620, 3622), False, 'from graphit.graph_exceptions import GraphitException, GraphitNodeNotFound\n'), ((5375, 5428), 'graphit.graph_exceptions.GraphitException', 'GraphitException', (['"""update needs nodes or edges input"""'], {}), "('update needs nodes or edges input')\n", (5391, 5428), False, 'from graphit.graph_exceptions import GraphitException, GraphitNodeNotFound\n')] |
r"""
This is the base module for all other objects of the package.
+ `LaTeX` returns a LaTeX string out of an `Irene` object.
+ `base` is the parent of all `Irene` objects.
"""
def LaTeX(obj):
r"""
Returns LaTeX representation of Irene's objects.
"""
from sympy.core.core import all_classes
from Irene import SDPRelaxations, SDRelaxSol, Mom
inst = isinstance(obj, SDPRelaxations) or isinstance(
obj, SDRelaxSol) or isinstance(obj, Mom)
if inst:
return obj.__latex__()
elif isinstance(obj, tuple(all_classes)):
from sympy import latex
return latex(obj)
class base(object):
r"""
All the modules in `Irene` extend this class which perform some common
tasks such as checking existence of certain softwares.
"""
def __init__(self):
from sys import platform
self.os = platform
if self.os == 'win32':
import os
BASE = os.sep.join(os.path.dirname(os.path.realpath(__file__)).split(os.sep)) + os.sep
self.Path = dict(csdp=BASE+"csdp.exe", sdpa=BASE+"sdpa.exe")
else:
self.Path = dict(csdp="csdp", sdpa="sdpa")
def which(self, program):
r"""
Check the availability of the `program` system-wide.
Returns the path of the program if exists and returns
'None' otherwise.
"""
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def AvailableSDPSolvers(self):
r"""
find the existing sdp solvers.
"""
existsing = []
# CVXOPT
try:
import cvxopt
existsing.append('CVXOPT')
except ImportError:
pass
if self.os == 'win32':
from os.path import isfile
# DSDP
if ('dsdp' in self.Path):
if isfile(self.Path['dsdp']):
existsing.append('DSDP')
# SDPA
if ('sdpa' in self.Path):
if isfile(self.Path['sdpa']):
existsing.append('SDPA')
if ('csdp' in self.Path):
if isfile(self.Path['csdp']):
existsing.append('CSDP')
else:
# DSDP
if self.which('dsdp5') is not None:
existsing.append('DSDP')
# SDPA
if self.which('sdpa') is not None:
existsing.append('SDPA')
# CSDP
if self.which('csdp') is not None:
existsing.append('CSDP')
return existsing
| [
"os.access",
"sympy.latex",
"os.path.join",
"os.path.split",
"os.path.isfile",
"os.path.realpath"
] | [((1526, 1548), 'os.path.split', 'os.path.split', (['program'], {}), '(program)\n', (1539, 1548), False, 'import os\n'), ((616, 626), 'sympy.latex', 'latex', (['obj'], {}), '(obj)\n', (621, 626), False, 'from sympy import latex\n'), ((1450, 1471), 'os.path.isfile', 'os.path.isfile', (['fpath'], {}), '(fpath)\n', (1464, 1471), False, 'import os\n'), ((1476, 1501), 'os.access', 'os.access', (['fpath', 'os.X_OK'], {}), '(fpath, os.X_OK)\n', (1485, 1501), False, 'import os\n'), ((1772, 1799), 'os.path.join', 'os.path.join', (['path', 'program'], {}), '(path, program)\n', (1784, 1799), False, 'import os\n'), ((2302, 2327), 'os.path.isfile', 'isfile', (["self.Path['dsdp']"], {}), "(self.Path['dsdp'])\n", (2308, 2327), False, 'from os.path import isfile\n'), ((2450, 2475), 'os.path.isfile', 'isfile', (["self.Path['sdpa']"], {}), "(self.Path['sdpa'])\n", (2456, 2475), False, 'from os.path import isfile\n'), ((2579, 2604), 'os.path.isfile', 'isfile', (["self.Path['csdp']"], {}), "(self.Path['csdp'])\n", (2585, 2604), False, 'from os.path import isfile\n'), ((985, 1011), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1001, 1011), False, 'import os\n')] |
#from https://github.com/mfurukawa/imu_sensor/tree/master/src/Python
# September 03, 2020
# <NAME>
from __future__ import unicode_literals ,print_function
import serial
from time import sleep
import numpy as np
import matplotlib.pyplot as plt
import io
import csv
import time
import datetime
import struct
import tensorflow as tf
from tensorflow import keras
# Variable to get real value
MPU9250A_2g = 0.000061035156 # 0.000061035156 g/LSB
MPU9250A_4g = 0.000122070312 # 0.000122070312 g/LSB
MPU9250A_8g = 0.000244140625 # 0.000244140625 g/LSB
MPU9250A_16g = 0.000488281250 # 0.000488281250 g/LSB
MPU9250G_250dps = 0.007633587786 # 0.007633587786 dps/LSB
MPU9250G_500dps = 0.015267175572 # 0.015267175572 dps/LSB
MPU9250G_1000dps = 0.030487804878 # 0.030487804878 dps/LSB
MPU9250G_2000dps = 0.060975609756 # 0.060975609756 dps/LSB
MPU9250M_4800uT = 0.6 # 0.6 uT/LSB
MPU9250T_85degC = 0.002995177763 # 0.002995177763 degC/LSB
Magnetometer_Sensitivity_Scale_Factor = 0.15
# number of axis
numVariable = 24 # 4ch * 6acc
# Maximum time for measure
minuteLength = 25
# sampling rate
smplHz = 500
# Variable to count number of sampling
smpl_cnt = 0
# Variable to count number of fail
fail_cnt_byte = 0
fail_cnt_head = 0
# Array to store data
buf = [[0 for i in range(numVariable + 2)] for j in range(smplHz*60*minuteLength)]
# Array to store real value
buf_f = [[0 for i in range(numVariable + 2)] for j in range(smplHz*60*minuteLength)]
# define serial port
ser = serial.Serial("COM3",921600,timeout=1)
# Check serial connection
if ser.is_open:
print("Start Serial Connection")
else:
print("PORT ERROR")
ser.close()
exit()
# Function to create csv file
def writeCSV():
global ser
global smpl_cnt
global buf
global buf_f
ser.write(b'r')
print("Start Create CSV File")
head = ["sample_cc","ms","ACC_X1","ACC_Y1","ACC_Z1","GYRO_X1","GYRO_Y1","GYRO_Z1","ACC_X2","ACC_Y2","ACC_Z2","GYRO_X2","GYRO_Y2","GYRO_Z2","ACC_X3","ACC_Y3","ACC_Z3","GYRO_X3","GYRO_Y3","GYRO_Z3","ACC_X4","ACC_Y4","ACC_Z4","GYRO_X4","GYRO_Y4","GYRO_Z4"]
dt_now = datetime.datetime.now()
year = dt_now.year
month = dt_now.month
day = dt_now.day
hour = dt_now.hour
minute = dt_now.minute
t = str(year)+str(month)+str(day)+str(hour)+str(minute)
title_int = "acc_data"+str(t)+"_int"+".csv"
FILE_int = open(title_int,"w",newline="")
title_float="acc_data"+str(t)+"_float"+".csv"
FILE_float = open(title_float,"w",newline="")
wi = csv.writer(FILE_int)
wi.writerow(head)
wf = csv.writer(FILE_float)
wf.writerow(head)
for i in range(smpl_cnt):
wi.writerow(buf[i])
wf.writerow(buf_f[i])
FILE_int.close()
FILE_float.close()
print()
print(title_int+" "+"created")
print(title_float+" "+"created")
print()
print("Done Create CSV File")
# Function to Measure
def readByte():
global ser
global smpl_cnt
global buf
global buf_f
global xl
global yl
global fail_cnt_byte
global fail_cnt_head
ser.write(b"r")
time.sleep(0.01)
ser.write(b"s")
time.sleep(0.01)
state = 0
store = []
while(1):
res = ser.read()
if state == 0 and res == b'\r':
res=ser.read()
if res == b'\n':
state = 1
store = []
else:
#print("End byte set error")
fail_cnt_byte += 1
#time.sleep(2)
elif state == 1:
store.append(res)
if len(store)==50:
# check header
if store[0]==b'*':
del store[0]
else:
#print("header error")
#time.sleep(2)
fail_cnt_head += 1
state = 0
store = []
continue
#add time stamp
if smpl_cnt==0:
#start_time = int.from_bytes(res[-1],"big")
start_time = struct.unpack("b",store[-1])[0]
#start_time = store[-1]
#print(start_time)
tmp_time = 0
else:
#now_time = int.from_bytes(res[-1],"big")
add_time = struct.unpack("b",store[-1])[0]
#add_time = store[-1]
tmp_time += add_time
buf[smpl_cnt][1] = tmp_time
buf_f[smpl_cnt][1] = tmp_time
buf[smpl_cnt][0] = smpl_cnt
buf_f[smpl_cnt][0] = smpl_cnt
# store data
for i in range(0,48,2):
res2 = store[i:i+2]
tup = struct.unpack('>h', b''.join(res2))
val = tup[0]
num = i%12
ch = i//12
buf[smpl_cnt][6*ch + num//2 + 2]=val
if (num//2)>=3:
buf_f[smpl_cnt][6*ch + num//2 + 2]=val * MPU9250G_500dps
else:
buf_f[smpl_cnt][6*ch + num//2 + 2]=val * MPU9250A_4g
smpl_cnt += 1
store = []
state = 0
if smpl_cnt>=16000:
break
# Start
print("ready? --> press s key")
while(1):
ready_s = input()
if ready_s == "s":
break
if ready_s == "r":
print("over")
ser.close()
exit()
# Measure the start time
p_time = time.time()
# Function to measure
readByte()
# Measure the end time
e_time = time.time()
# The time it took
print("time: ",e_time - p_time)
# Function to create csv file
writeCSV()
# close serial port
ser.close()
print("number of data: ",smpl_cnt)
print("number of byte fail: ",fail_cnt_byte)
print("number of header fail: ",fail_cnt_head)
print("END") | [
"csv.writer",
"time.sleep",
"datetime.datetime.now",
"struct.unpack",
"serial.Serial",
"time.time"
] | [((1527, 1567), 'serial.Serial', 'serial.Serial', (['"""COM3"""', '(921600)'], {'timeout': '(1)'}), "('COM3', 921600, timeout=1)\n", (1540, 1567), False, 'import serial\n'), ((5693, 5704), 'time.time', 'time.time', ([], {}), '()\n', (5702, 5704), False, 'import time\n'), ((5775, 5786), 'time.time', 'time.time', ([], {}), '()\n', (5784, 5786), False, 'import time\n'), ((2157, 2180), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2178, 2180), False, 'import datetime\n'), ((2575, 2595), 'csv.writer', 'csv.writer', (['FILE_int'], {}), '(FILE_int)\n', (2585, 2595), False, 'import csv\n'), ((2627, 2649), 'csv.writer', 'csv.writer', (['FILE_float'], {}), '(FILE_float)\n', (2637, 2649), False, 'import csv\n'), ((3169, 3185), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (3179, 3185), False, 'import time\n'), ((3211, 3227), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (3221, 3227), False, 'import time\n'), ((4197, 4226), 'struct.unpack', 'struct.unpack', (['"""b"""', 'store[-1]'], {}), "('b', store[-1])\n", (4210, 4226), False, 'import struct\n'), ((4461, 4490), 'struct.unpack', 'struct.unpack', (['"""b"""', 'store[-1]'], {}), "('b', store[-1])\n", (4474, 4490), False, 'import struct\n')] |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test results APIs."""
import typing
# Non-standard docstrings are used to generate the API documentation.
import endpoints
from protorpc import message_types
from protorpc import messages
from protorpc import remote
from multitest_transport.api import base
from multitest_transport.models import messages as mtt_messages
from multitest_transport.models import ndb_models
from multitest_transport.models import sql_models
from multitest_transport.util import tfc_client
from multitest_transport.util import xts_result
@base.MTT_API.api_class(resource_name='test_result', path='test_results')
class TestResultApi(remote.Service):
"""Test results API handler."""
@base.ApiMethod(
endpoints.ResourceContainer(
message_types.VoidMessage,
attempt_id=messages.StringField(1),
test_run_id=messages.StringField(2)),
mtt_messages.TestModuleResultList,
path='modules', http_method='GET', name='list_modules')
def ListTestModuleResults(self, request):
"""Fetches a page of test module results.
Incomplete modules are returned first, and results are then sorted by
descending failure count.
Parameters:
attempt_id: Test run attempt ID
test_run_id: Test run ID (used if attempt_id not provided)
"""
if request.attempt_id:
# Use attempt_id directly if provided
return mtt_messages.TestModuleResultList(
results=self._GetTestModuleResults(request.attempt_id))
if not request.test_run_id:
# Invalid request (test_run_id and attempt_id not provided)
raise endpoints.BadRequestException('Test run ID or attempt ID required')
# Determine attempt_id from test_run_id (latest finished attempt)
test_run_id = request.test_run_id
test_run = ndb_models.TestRun.get_by_id(test_run_id)
if not test_run:
raise endpoints.NotFoundException('Test run %s not found' % test_run_id)
if not test_run.request_id:
return mtt_messages.TestModuleResultList(
extra_info='Test run %s not started' % test_run_id)
attempts = tfc_client.GetLatestFinishedAttempts(test_run.request_id)
result_list = mtt_messages.TestModuleResultList()
for attempt in attempts:
result_list.results.extend(self._GetTestModuleResults(attempt.attempt_id))
if not result_list.results:
# No results found for latest attempt, try fetching legacy results instead
return self._GetLegacyTestModuleResults(test_run.request_id)
return result_list
def _GetTestModuleResults(
self, attempt_id: str) -> typing.List[mtt_messages.TestModuleResult]:
"""Fetch test module results from the DB."""
with sql_models.db.Session() as session:
query = session.query(sql_models.TestModuleResult)
query = query.order_by(sql_models.TestModuleResult.complete,
sql_models.TestModuleResult.failed_tests.desc(),
sql_models.TestModuleResult.id)
query = query.filter_by(attempt_id=attempt_id)
modules = query.all()
return mtt_messages.ConvertList(modules, mtt_messages.TestModuleResult)
def _GetLegacyTestModuleResults(
self, request_id: int) -> mtt_messages.TestModuleResultList:
"""Fetch legacy test module results from TFC."""
invocation_status = tfc_client.GetRequestInvocationStatus(request_id)
test_group_statuses = sorted(
invocation_status.test_group_statuses or [],
key=lambda s: (s.is_complete, -s.failed_test_count))
results = mtt_messages.ConvertList(test_group_statuses,
mtt_messages.TestModuleResult)
return mtt_messages.TestModuleResultList(
results=results,
extra_info='Legacy test results from request %s' % request_id)
@base.ApiMethod(
endpoints.ResourceContainer(
message_types.VoidMessage,
module_id=messages.StringField(1, required=True),
max_results=messages.IntegerField(
2, default=base.DEFAULT_MAX_RESULTS),
page_token=messages.StringField(3),
status=messages.EnumField(xts_result.TestStatus, 4, repeated=True),
name=messages.StringField(5)),
mtt_messages.TestCaseResultList,
path='modules/{module_id}/test_cases',
http_method='GET', name='list_test_cases')
def ListTestCaseResults(self, request):
"""Fetches a page of test case results.
Parameters:
module_id: Test module ID
max_results: Maximum number of test results to return
page_token: Token for pagination
status: Set of statuses to include
name: Partial name filter (case-insensitive)
"""
with sql_models.db.Session() as session:
# Find parent module
module = session.query(sql_models.TestModuleResult).get(request.module_id)
if not module:
raise endpoints.NotFoundException(
'Module %s not found' % request.module_id)
# Initialize query (ordered by insertion order)
query = session.query(sql_models.TestCaseResult)
query = query.order_by(sql_models.TestCaseResult.id).with_parent(module)
# Apply page token (<last id>)
if request.page_token:
query = query.filter(
sql_models.TestCaseResult.id > int(request.page_token))
# Apply filters
if request.status:
query = query.filter(
sql_models.TestCaseResult.status.in_(request.status))
if request.name:
query = query.filter(
sql_models.TestCaseResult.name.contains(request.name))
# Fetch at most N + 1 results
if request.max_results and request.max_results > 0:
query = query.limit(request.max_results + 1)
test_cases = query.all()
# Generate next page token and response
next_page_token = None
if request.max_results and 0 < request.max_results < len(test_cases):
test_cases = test_cases[:-1]
next_page_token = str(test_cases[-1].id)
results = mtt_messages.ConvertList(test_cases, mtt_messages.TestCaseResult)
return mtt_messages.TestCaseResultList(
results=results, next_page_token=next_page_token)
| [
"multitest_transport.models.sql_models.TestModuleResult.failed_tests.desc",
"multitest_transport.models.sql_models.db.Session",
"multitest_transport.models.messages.ConvertList",
"protorpc.messages.EnumField",
"endpoints.BadRequestException",
"multitest_transport.util.tfc_client.GetLatestFinishedAttempts"... | [((1101, 1173), 'multitest_transport.api.base.MTT_API.api_class', 'base.MTT_API.api_class', ([], {'resource_name': '"""test_result"""', 'path': '"""test_results"""'}), "(resource_name='test_result', path='test_results')\n", (1123, 1173), False, 'from multitest_transport.api import base\n'), ((2345, 2386), 'multitest_transport.models.ndb_models.TestRun.get_by_id', 'ndb_models.TestRun.get_by_id', (['test_run_id'], {}), '(test_run_id)\n', (2373, 2386), False, 'from multitest_transport.models import ndb_models\n'), ((2646, 2703), 'multitest_transport.util.tfc_client.GetLatestFinishedAttempts', 'tfc_client.GetLatestFinishedAttempts', (['test_run.request_id'], {}), '(test_run.request_id)\n', (2682, 2703), False, 'from multitest_transport.util import tfc_client\n'), ((2722, 2757), 'multitest_transport.models.messages.TestModuleResultList', 'mtt_messages.TestModuleResultList', ([], {}), '()\n', (2755, 2757), True, 'from multitest_transport.models import messages as mtt_messages\n'), ((3626, 3690), 'multitest_transport.models.messages.ConvertList', 'mtt_messages.ConvertList', (['modules', 'mtt_messages.TestModuleResult'], {}), '(modules, mtt_messages.TestModuleResult)\n', (3650, 3690), True, 'from multitest_transport.models import messages as mtt_messages\n'), ((3871, 3920), 'multitest_transport.util.tfc_client.GetRequestInvocationStatus', 'tfc_client.GetRequestInvocationStatus', (['request_id'], {}), '(request_id)\n', (3908, 3920), False, 'from multitest_transport.util import tfc_client\n'), ((4083, 4159), 'multitest_transport.models.messages.ConvertList', 'mtt_messages.ConvertList', (['test_group_statuses', 'mtt_messages.TestModuleResult'], {}), '(test_group_statuses, mtt_messages.TestModuleResult)\n', (4107, 4159), True, 'from multitest_transport.models import messages as mtt_messages\n'), ((4210, 4328), 'multitest_transport.models.messages.TestModuleResultList', 'mtt_messages.TestModuleResultList', ([], {'results': 'results', 'extra_info': "('Legacy test results from request %s' % request_id)"}), "(results=results, extra_info=\n 'Legacy test results from request %s' % request_id)\n", (4243, 4328), True, 'from multitest_transport.models import messages as mtt_messages\n'), ((6529, 6594), 'multitest_transport.models.messages.ConvertList', 'mtt_messages.ConvertList', (['test_cases', 'mtt_messages.TestCaseResult'], {}), '(test_cases, mtt_messages.TestCaseResult)\n', (6553, 6594), True, 'from multitest_transport.models import messages as mtt_messages\n'), ((6606, 6692), 'multitest_transport.models.messages.TestCaseResultList', 'mtt_messages.TestCaseResultList', ([], {'results': 'results', 'next_page_token': 'next_page_token'}), '(results=results, next_page_token=\n next_page_token)\n', (6637, 6692), True, 'from multitest_transport.models import messages as mtt_messages\n'), ((2153, 2220), 'endpoints.BadRequestException', 'endpoints.BadRequestException', (['"""Test run ID or attempt ID required"""'], {}), "('Test run ID or attempt ID required')\n", (2182, 2220), False, 'import endpoints\n'), ((2420, 2486), 'endpoints.NotFoundException', 'endpoints.NotFoundException', (["('Test run %s not found' % test_run_id)"], {}), "('Test run %s not found' % test_run_id)\n", (2447, 2486), False, 'import endpoints\n'), ((2533, 2622), 'multitest_transport.models.messages.TestModuleResultList', 'mtt_messages.TestModuleResultList', ([], {'extra_info': "('Test run %s not started' % test_run_id)"}), "(extra_info='Test run %s not started' %\n test_run_id)\n", (2566, 2622), True, 'from multitest_transport.models import messages as mtt_messages\n'), ((3235, 3258), 'multitest_transport.models.sql_models.db.Session', 'sql_models.db.Session', ([], {}), '()\n', (3256, 3258), False, 'from multitest_transport.models import sql_models\n'), ((5231, 5254), 'multitest_transport.models.sql_models.db.Session', 'sql_models.db.Session', ([], {}), '()\n', (5252, 5254), False, 'from multitest_transport.models import sql_models\n'), ((1358, 1381), 'protorpc.messages.StringField', 'messages.StringField', (['(1)'], {}), '(1)\n', (1378, 1381), False, 'from protorpc import messages\n'), ((1405, 1428), 'protorpc.messages.StringField', 'messages.StringField', (['(2)'], {}), '(2)\n', (1425, 1428), False, 'from protorpc import messages\n'), ((3424, 3471), 'multitest_transport.models.sql_models.TestModuleResult.failed_tests.desc', 'sql_models.TestModuleResult.failed_tests.desc', ([], {}), '()\n', (3469, 3471), False, 'from multitest_transport.models import sql_models\n'), ((5410, 5480), 'endpoints.NotFoundException', 'endpoints.NotFoundException', (["('Module %s not found' % request.module_id)"], {}), "('Module %s not found' % request.module_id)\n", (5437, 5480), False, 'import endpoints\n'), ((4453, 4491), 'protorpc.messages.StringField', 'messages.StringField', (['(1)'], {'required': '(True)'}), '(1, required=True)\n', (4473, 4491), False, 'from protorpc import messages\n'), ((4515, 4573), 'protorpc.messages.IntegerField', 'messages.IntegerField', (['(2)'], {'default': 'base.DEFAULT_MAX_RESULTS'}), '(2, default=base.DEFAULT_MAX_RESULTS)\n', (4536, 4573), False, 'from protorpc import messages\n'), ((4611, 4634), 'protorpc.messages.StringField', 'messages.StringField', (['(3)'], {}), '(3)\n', (4631, 4634), False, 'from protorpc import messages\n'), ((4653, 4712), 'protorpc.messages.EnumField', 'messages.EnumField', (['xts_result.TestStatus', '(4)'], {'repeated': '(True)'}), '(xts_result.TestStatus, 4, repeated=True)\n', (4671, 4712), False, 'from protorpc import messages\n'), ((4729, 4752), 'protorpc.messages.StringField', 'messages.StringField', (['(5)'], {}), '(5)\n', (4749, 4752), False, 'from protorpc import messages\n'), ((5935, 5987), 'multitest_transport.models.sql_models.TestCaseResult.status.in_', 'sql_models.TestCaseResult.status.in_', (['request.status'], {}), '(request.status)\n', (5971, 5987), False, 'from multitest_transport.models import sql_models\n'), ((6054, 6107), 'multitest_transport.models.sql_models.TestCaseResult.name.contains', 'sql_models.TestCaseResult.name.contains', (['request.name'], {}), '(request.name)\n', (6093, 6107), False, 'from multitest_transport.models import sql_models\n')] |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Device Stats Monitor
====================
Monitors and logs device stats during training.
"""
from typing import Any, Dict, Optional
import pytorch_lightning as pl
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.types import STEP_OUTPUT
class DeviceStatsMonitor(Callback):
r"""
Automatically monitors and logs device stats during training stage. ``DeviceStatsMonitor``
is a special callback as it requires a ``logger`` to passed as argument to the ``Trainer``.
Raises:
MisconfigurationException:
If ``Trainer`` has no logger.
Example:
>>> from pytorch_lightning import Trainer
>>> from pytorch_lightning.callbacks import DeviceStatsMonitor
>>> device_stats = DeviceStatsMonitor() # doctest: +SKIP
>>> trainer = Trainer(callbacks=[device_stats]) # doctest: +SKIP
"""
def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: Optional[str] = None) -> None:
if not trainer.logger:
raise MisconfigurationException("Cannot use DeviceStatsMonitor callback with Trainer that has no logger.")
def on_train_batch_start(
self,
trainer: "pl.Trainer",
pl_module: "pl.LightningModule",
batch: Any,
batch_idx: int,
unused: Optional[int] = 0,
) -> None:
if not trainer.logger_connector.should_update_logs:
return
device_stats = trainer.accelerator.get_device_stats(pl_module.device)
prefixed_device_stats = prefix_metrics_keys(device_stats, "on_train_batch_start")
trainer.logger.log_metrics(prefixed_device_stats, step=trainer.global_step)
def on_train_batch_end(
self,
trainer: "pl.Trainer",
pl_module: "pl.LightningModule",
outputs: STEP_OUTPUT,
batch: Any,
batch_idx: int,
unused: Optional[int] = 0,
) -> None:
if not trainer.logger_connector.should_update_logs:
return
device_stats = trainer.accelerator.get_device_stats(pl_module.device)
prefixed_device_stats = prefix_metrics_keys(device_stats, "on_train_batch_end")
trainer.logger.log_metrics(prefixed_device_stats, step=trainer.global_step)
def prefix_metrics_keys(metrics_dict: Dict[str, float], prefix: str) -> Dict[str, float]:
return {prefix + "." + k: v for k, v in metrics_dict.items()}
| [
"pytorch_lightning.utilities.exceptions.MisconfigurationException"
] | [((1719, 1824), 'pytorch_lightning.utilities.exceptions.MisconfigurationException', 'MisconfigurationException', (['"""Cannot use DeviceStatsMonitor callback with Trainer that has no logger."""'], {}), "(\n 'Cannot use DeviceStatsMonitor callback with Trainer that has no logger.')\n", (1744, 1824), False, 'from pytorch_lightning.utilities.exceptions import MisconfigurationException\n')] |
import pickle
import pytest
import numpy as np
from astropy.coordinates import Longitude
from astropy import coordinates as coord
from astropy.tests.helper import pickle_protocol, check_pickling_recovery # noqa
# Can't test distances without scipy due to cosmology deps
from astropy.utils.compat.optional_deps import HAS_SCIPY # noqa
def test_basic():
lon1 = Longitude(1.23, "radian", wrap_angle='180d')
s = pickle.dumps(lon1)
lon2 = pickle.loads(s)
def test_pickle_longitude_wrap_angle():
a = Longitude(1.23, "radian", wrap_angle='180d')
s = pickle.dumps(a)
b = pickle.loads(s)
assert a.rad == b.rad
assert a.wrap_angle == b.wrap_angle
_names = [coord.Angle,
coord.Distance,
coord.DynamicMatrixTransform,
coord.ICRS,
coord.Latitude,
coord.Longitude,
coord.StaticMatrixTransform,
]
_xfail = [False,
not HAS_SCIPY,
True,
True,
False,
True,
False]
_args = [[0.0],
[],
[lambda *args: np.identity(3), coord.ICRS, coord.ICRS],
[0, 0],
[0],
[0],
[np.identity(3), coord.ICRS, coord.ICRS],
]
_kwargs = [{'unit': 'radian'},
{'z': 0.23},
{},
{'unit': ['radian', 'radian']},
{'unit': 'radian'},
{'unit': 'radian'},
{},
]
@pytest.mark.parametrize(("name", "args", "kwargs", "xfail"),
zip(_names, _args, _kwargs, _xfail))
def test_simple_object(pickle_protocol, name, args, kwargs, xfail):
# Tests easily instantiated objects
if xfail:
pytest.xfail()
original = name(*args, **kwargs)
check_pickling_recovery(original, pickle_protocol)
| [
"numpy.identity",
"pickle.dumps",
"astropy.coordinates.Longitude",
"pickle.loads",
"astropy.tests.helper.check_pickling_recovery",
"pytest.xfail"
] | [((369, 413), 'astropy.coordinates.Longitude', 'Longitude', (['(1.23)', '"""radian"""'], {'wrap_angle': '"""180d"""'}), "(1.23, 'radian', wrap_angle='180d')\n", (378, 413), False, 'from astropy.coordinates import Longitude\n'), ((422, 440), 'pickle.dumps', 'pickle.dumps', (['lon1'], {}), '(lon1)\n', (434, 440), False, 'import pickle\n'), ((452, 467), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (464, 467), False, 'import pickle\n'), ((518, 562), 'astropy.coordinates.Longitude', 'Longitude', (['(1.23)', '"""radian"""'], {'wrap_angle': '"""180d"""'}), "(1.23, 'radian', wrap_angle='180d')\n", (527, 562), False, 'from astropy.coordinates import Longitude\n'), ((571, 586), 'pickle.dumps', 'pickle.dumps', (['a'], {}), '(a)\n', (583, 586), False, 'import pickle\n'), ((595, 610), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (607, 610), False, 'import pickle\n'), ((1738, 1788), 'astropy.tests.helper.check_pickling_recovery', 'check_pickling_recovery', (['original', 'pickle_protocol'], {}), '(original, pickle_protocol)\n', (1761, 1788), False, 'from astropy.tests.helper import pickle_protocol, check_pickling_recovery\n'), ((1170, 1184), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (1181, 1184), True, 'import numpy as np\n'), ((1682, 1696), 'pytest.xfail', 'pytest.xfail', ([], {}), '()\n', (1694, 1696), False, 'import pytest\n'), ((1074, 1088), 'numpy.identity', 'np.identity', (['(3)'], {}), '(3)\n', (1085, 1088), True, 'import numpy as np\n')] |
#!/usr/bin/env python
#
# Copyright (C) 2017 ShadowMan
#
import operator
import numpy as np
group = np.array([
[1.0, 1.1],
[1.0, 1.0],
[0.0, 0.0],
[0.0, 0.1]
])
labels = ['A', 'A', 'B','B']
def auto_normal(data_set):
# 寻找一行/列中的最小值
# axis:表示行(1)或者列(0)
min_values = data_set.min(axis = 0)
# 寻找一行/列中的最大值
# axis:表示行(1)或者列(0)
max_values = data_set.max(axis = 0)
# 计算最大值和最小值之间的差值
diff_range = max_values - min_values
# 归一化的数组
normal_array = np.zeros(data_set.shape)
# 获得所有行的数目
row_count = data_set.shape[0]
# 得到减去最小值之后的数据集合
normal_array = data_set - np.tile(min_values, (row_count, 1))
# 得到归一化的数组
# 将 [1, 2, 3, 4, 5] 转化到 0 - 1 范围内
# 首先获取最小值为1,最大值为5,差值为4
# 将每个值减去这个最小值得到: [0, 1, 2, 3, 4]
# 最后将每个值除以差值: [0, .25, .5, .75, 1]
# 直接除以最大值是不对的,因为最小值应该转化之后为0,最大值转化之后为1
# 所以需要先把最小值变成0,然后再除以最小值变成0后的最大值(就是最大值和最小值的差值)
normal_array = normal_array / diff_range
# 返回结果
return normal_array, min_values, diff_range
def knn_classify(inX, data_set, labels, k):
# 将输入数据集进行归一化
data_set, min_values, diff_range = auto_normal(data_set)
# 将将要预测值进行归一化
inX = (inX - min_values) / diff_range
# shape 或者该 array/matrix 的大小, 结果为 [行数, 列数]
data_set_row_size = data_set.shape[0] # >>> 4
# 扩展 array/matrix
# 第二个参数如果是 int, 那就在列方向上重复 第一个参数 n 次
# [[1,2],[3,4]] 2 -> [[1,2,1,2],[3,4,3,4]]
# 第二个参数是元组,那就在列方向上重复 t[0] 次,在行方向上重复 t[1] 次
# [[1,2],[3,4]] (2, 3) -> [[1,2,1,2],[3,4,3,4],[1,2,1,2],[3,4,3,4],[1,2,1,2],[3,4,3,4]]
extra_array = np.tile(inX, (data_set_row_size, 1)) # >>> [[1.0, 0.9], [1.0, 0.9], [1.0, 0.9], [1.0, 0.9]]
# 将扩展的输入与每个数据进行对比,计算距离
# 上面已经将输入扩展为和原有数据集相同的行数
# 所以直接将 扩展的数据数组 - 已有数据数组 => 输入数据与原有每条数据对应的差值
# [[1.0, 1.1], [1.0, 1.0], [0.0, 0.0], [0.0, 0.1]]
# [[1.0, 0.9], [1.0, 0.9], [1.0, 0.9], [1.0, 0.9]]
#--------------------------------------------------
# [[0.0, 0.2], [0.0, 0.1], [-1., -.9], [-1., -.8]]
difference_array = extra_array - data_set # >>> [[0.0, 0.2], [0.0, 0.1], [-1., -.9], [-1., -.8]]
# 计算差值的平方
square_difference_array = difference_array ** 2 # >>> [[0.0, 0.04], [0.0, 0.01], [1.0, 0.81], [1.0, 0.64]]
# 计算差值平方和
# axis = 1 => 同行相加,[[1,2],[3,4]] => [3,7]
# axis = 0 -> 同列相加,[[1,2],[3,4]] => [4,6]
# 所以这是将每个数据行的差值的平方加起来
square_difference_matrix_sum = square_difference_array.sum(axis=1) # >>> [0.04, 0.01, 1.81, 1.64]
# 计算距离:将每个平方和开根号
# 计算距离的公式为:sqrt( sum( ((X1 - X2) ** 2) + ((Y1 - Y2) ** 2) ) )
# 其实就是在坐标轴上计算两点距离,即计算三角形第三条边
distances = square_difference_matrix_sum ** 0.5 # >>> [0.2, 0.1, 1.3453624, 1.28062485]
# 根据距离进行排序
# 返回值是一个数组,数组里是根据输入数组的值从小到大排序的索引
# np.array([2, 1, 3]) => [1, 0, 2]
# 最小的值的索引是1,其次是0,最后是2
sorted_distances = distances.argsort() # >>> [1, 0, 3, 2]
# 用于储存前k个最佳匹配的标签
# label => occurs_count
vote_labels = {}
for i in range(k):
# 获取前 i 个最佳匹配的标签
# sorted_distances[i] => 第 i 个最近距离的标签的索引
label = labels[sorted_distances[i]]
# 设置最佳匹配对应的标签的出现次数
vote_labels[label] = vote_labels.get(label, 0) + 1
# 根据标签出现次数进行投票
# operator.itemgetter(1) <===> lambda el: el[1]
sorted_vote_labels = sorted(vote_labels.items(), key=operator.itemgetter(1), reverse=True)
# 获取最佳的标签
return sorted_vote_labels[0][0]
if __name__ == '__main__':
print(knn_classify((1.0, 0.5), group, labels, 2))
print(knn_classify((18, 90), np.array([
[3, 104], [2, 100], [1, 81], [101, 10], [99, 5], [98, 2]
]), [ 'M', 'M', 'M', 'A', 'A', 'A'], 5))
| [
"numpy.array",
"numpy.zeros",
"numpy.tile",
"operator.itemgetter"
] | [((101, 159), 'numpy.array', 'np.array', (['[[1.0, 1.1], [1.0, 1.0], [0.0, 0.0], [0.0, 0.1]]'], {}), '([[1.0, 1.1], [1.0, 1.0], [0.0, 0.0], [0.0, 0.1]])\n', (109, 159), True, 'import numpy as np\n'), ((497, 521), 'numpy.zeros', 'np.zeros', (['data_set.shape'], {}), '(data_set.shape)\n', (505, 521), True, 'import numpy as np\n'), ((1606, 1642), 'numpy.tile', 'np.tile', (['inX', '(data_set_row_size, 1)'], {}), '(inX, (data_set_row_size, 1))\n', (1613, 1642), True, 'import numpy as np\n'), ((622, 657), 'numpy.tile', 'np.tile', (['min_values', '(row_count, 1)'], {}), '(min_values, (row_count, 1))\n', (629, 657), True, 'import numpy as np\n'), ((3358, 3380), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (3377, 3380), False, 'import operator\n'), ((3562, 3630), 'numpy.array', 'np.array', (['[[3, 104], [2, 100], [1, 81], [101, 10], [99, 5], [98, 2]]'], {}), '([[3, 104], [2, 100], [1, 81], [101, 10], [99, 5], [98, 2]])\n', (3570, 3630), True, 'import numpy as np\n')] |
import os
from typing import Tuple, List
from controller.invoker.invoker_task_exporting import TaskExportingInvoker
from controller import config
from controller.label_model.label_studio import LabelStudio
from controller.utils.app_logger import logger
def prepare_label_dir(working_dir: str, task_id: str) -> Tuple[str, str, str, str, str]:
asset_dir = os.path.join(working_dir, f"label_{task_id}", "Images")
os.makedirs(asset_dir, exist_ok=True)
export_path = os.path.join(working_dir, "label_{task_id}".format(task_id=task_id), "label_studio_output")
os.makedirs(export_path, exist_ok=True)
# keep same name as other task
monitor_file_path = os.path.join(working_dir, "out", "monitor.txt")
export_work_dir = os.path.join(working_dir, "export_work_dir")
os.makedirs(export_work_dir, exist_ok=True)
import_work_dir = os.path.join(working_dir, "import_work_dir")
os.makedirs(import_work_dir, exist_ok=True)
return asset_dir, export_path, monitor_file_path, export_work_dir, import_work_dir
def trigger_ymir_export(repo_root: str, dataset_id: str, asset_dir: str, media_location: str, export_work_dir: str) -> None:
# trigger ymir export, so that we can get pictures from ymir
TaskExportingInvoker.exporting_cmd(
repo_root=repo_root,
dataset_id=dataset_id,
format="none",
asset_dir=asset_dir,
annotation_dir=asset_dir,
media_location=media_location,
work_dir=export_work_dir
)
def gen_index_file(asset_dir: str) -> str:
# generate index file for mir command, keep same name as other task
media_files = [os.path.join(asset_dir, f) for f in os.listdir(asset_dir) if f.endswith(".jpeg")]
index_file = os.path.join(asset_dir, "index.txt")
with open(index_file, "w") as f:
f.write("\n".join(media_files))
return index_file
def start_label_task(
repo_root: str,
working_dir: str,
media_location: str,
task_id: str,
project_name: str,
dataset_id: str,
keywords: List,
collaborators: List,
expert_instruction: str,
) -> None:
logger.info("start label task!!!")
# set your lable tools name
if config.LABEL_STUDIO == config.LABEL_TOOL:
label_instance = LabelStudio()
else:
raise ValueError("Error! Please setting your label tools")
asset_dir, export_path, monitor_file_path, export_work_dir, import_work_dir = prepare_label_dir(
working_dir, task_id
)
trigger_ymir_export(repo_root, dataset_id, asset_dir, media_location, export_work_dir)
index_file = gen_index_file(asset_dir)
label_instance.run(
task_id=task_id,
project_name=project_name,
keywords=keywords,
collaborators=collaborators,
expert_instruction=expert_instruction,
asset_dir=asset_dir,
export_path=export_path,
monitor_file_path=monitor_file_path,
repo_root=repo_root,
index_file=index_file,
media_location=media_location,
import_work_dir=import_work_dir
)
logger.info("finish label task!!!")
| [
"os.listdir",
"controller.utils.app_logger.logger.info",
"os.makedirs",
"controller.invoker.invoker_task_exporting.TaskExportingInvoker.exporting_cmd",
"os.path.join",
"controller.label_model.label_studio.LabelStudio"
] | [((361, 416), 'os.path.join', 'os.path.join', (['working_dir', 'f"""label_{task_id}"""', '"""Images"""'], {}), "(working_dir, f'label_{task_id}', 'Images')\n", (373, 416), False, 'import os\n'), ((421, 458), 'os.makedirs', 'os.makedirs', (['asset_dir'], {'exist_ok': '(True)'}), '(asset_dir, exist_ok=True)\n', (432, 458), False, 'import os\n'), ((574, 613), 'os.makedirs', 'os.makedirs', (['export_path'], {'exist_ok': '(True)'}), '(export_path, exist_ok=True)\n', (585, 613), False, 'import os\n'), ((674, 721), 'os.path.join', 'os.path.join', (['working_dir', '"""out"""', '"""monitor.txt"""'], {}), "(working_dir, 'out', 'monitor.txt')\n", (686, 721), False, 'import os\n'), ((745, 789), 'os.path.join', 'os.path.join', (['working_dir', '"""export_work_dir"""'], {}), "(working_dir, 'export_work_dir')\n", (757, 789), False, 'import os\n'), ((794, 837), 'os.makedirs', 'os.makedirs', (['export_work_dir'], {'exist_ok': '(True)'}), '(export_work_dir, exist_ok=True)\n', (805, 837), False, 'import os\n'), ((860, 904), 'os.path.join', 'os.path.join', (['working_dir', '"""import_work_dir"""'], {}), "(working_dir, 'import_work_dir')\n", (872, 904), False, 'import os\n'), ((909, 952), 'os.makedirs', 'os.makedirs', (['import_work_dir'], {'exist_ok': '(True)'}), '(import_work_dir, exist_ok=True)\n', (920, 952), False, 'import os\n'), ((1237, 1444), 'controller.invoker.invoker_task_exporting.TaskExportingInvoker.exporting_cmd', 'TaskExportingInvoker.exporting_cmd', ([], {'repo_root': 'repo_root', 'dataset_id': 'dataset_id', 'format': '"""none"""', 'asset_dir': 'asset_dir', 'annotation_dir': 'asset_dir', 'media_location': 'media_location', 'work_dir': 'export_work_dir'}), "(repo_root=repo_root, dataset_id=\n dataset_id, format='none', asset_dir=asset_dir, annotation_dir=\n asset_dir, media_location=media_location, work_dir=export_work_dir)\n", (1271, 1444), False, 'from controller.invoker.invoker_task_exporting import TaskExportingInvoker\n'), ((1732, 1768), 'os.path.join', 'os.path.join', (['asset_dir', '"""index.txt"""'], {}), "(asset_dir, 'index.txt')\n", (1744, 1768), False, 'import os\n'), ((2111, 2145), 'controller.utils.app_logger.logger.info', 'logger.info', (['"""start label task!!!"""'], {}), "('start label task!!!')\n", (2122, 2145), False, 'from controller.utils.app_logger import logger\n'), ((3065, 3100), 'controller.utils.app_logger.logger.info', 'logger.info', (['"""finish label task!!!"""'], {}), "('finish label task!!!')\n", (3076, 3100), False, 'from controller.utils.app_logger import logger\n'), ((1633, 1659), 'os.path.join', 'os.path.join', (['asset_dir', 'f'], {}), '(asset_dir, f)\n', (1645, 1659), False, 'import os\n'), ((2252, 2265), 'controller.label_model.label_studio.LabelStudio', 'LabelStudio', ([], {}), '()\n', (2263, 2265), False, 'from controller.label_model.label_studio import LabelStudio\n'), ((1669, 1690), 'os.listdir', 'os.listdir', (['asset_dir'], {}), '(asset_dir)\n', (1679, 1690), False, 'import os\n')] |
from math import pi, sin, cos
from panda3d.core import *
from direct.showbase.ShowBase import ShowBase
from direct.task import Task
from floorplan import Floorplan
import numpy as np
import random
import copy
class Viewer(ShowBase):
def __init__(self):
ShowBase.__init__(self)
#self.scene = self.loader.loadModel("floorplan_1.txt-floor.obj")
#self.scene = base.loader.loadModel("floorplan_1.txt-floor.egg")
#self.scene = base.loader.loadModel("panda.egg")
#self.scene = base.loader.loadModel("environment")
base.setBackgroundColor(0, 0, 0)
self.angle = 0.0
lens = PerspectiveLens()
lens.setFov(60)
lens.setNear(0.01)
lens.setFar(100000)
base.cam.node().setLens(lens)
floorplan = Floorplan('test/floorplan_7')
#floorplan.setFilename('test/floorplan_2')
floorplan.read()
self.scene = floorplan.generateEggModel()
self.scene.reparentTo(self.render)
#self.scene.setScale(0.01, 0.01, 0.01)
#self.scene.setTwoSided(True)
self.scene.setTwoSided(True)
#self.scene.setPos(0, 0, 3)
#texture = loader.loadTexture("floorplan_1.png")
#self.scene.setTexture(texture)
#self.scene.setHpr(0, 0, 0)
# angleDegrees = 0
# angleRadians = angleDegrees * (pi / 180.0)
# self.camera.setPos(20 * sin(angleRadians), -20 * cos(angleRadians), 3)
# self.camera.setHpr(angleDegrees, 0, 0)
#self.camera.lookAt(0, 0, 0)
self.alight = AmbientLight('alight')
self.alight.setColor(VBase4(0.2, 0.2, 0.2, 1))
self.alnp = self.render.attachNewNode(self.alight)
self.render.setLight(self.alnp)
dlight = DirectionalLight('dlight')
dlight.setColor(VBase4(1, 1, 1, 1))
dlnp = self.render.attachNewNode(dlight)
#dlnp.setHpr(0, -90, 0)
dlnp.setPos(0.5, 0.5, 3)
dlnp.lookAt(0.5, 0.5, 2)
self.render.setLight(dlnp)
for i in xrange(10):
plight = PointLight('plight')
plight.setAttenuation((1, 0, 1))
color = random.randint(10, 15)
plight.setColor(VBase4(color, color, color, 1))
plnp = self.render.attachNewNode(plight)
if i == 0:
plnp.setPos(0.5, 0.5, 3)
else:
plnp.setPos(1 * random.random(), 1 * random.random(), 0.3)
pass
self.render.setLight(plnp)
#base.useTrackball()
#base.trackball.node().setPos(2.0, 0, 3)
#base.trackball.node().setHpr(0, 0, 3)
#base.enableMouse()
#base.useDrive()
base.disableMouse()
self.taskMgr.add(self.spinCameraTask, "SpinCameraTask")
#self.accept('arrow_up', self.moveForward)
#self.accept('arrow_up_-repeat', self.moveForward)
self.topDownCameraPos = [0.5, 0.5, 1.5]
self.topDownTarget = [0.5, 0.499, 0.5]
self.topDownH = 0
self.startCameraPos = floorplan.startCameraPos
self.startTarget = floorplan.startTarget
self.startH = 0
self.cameraPos = self.topDownCameraPos
self.target = self.topDownTarget
self.H = self.topDownH
self.accept('space', self.openDoor)
self.accept('enter', self.startChangingView)
self.viewMode = 'T'
self.viewChangingProgress = 1.02
ceiling = self.scene.find("**/ceiling")
ceiling.hide()
return
def moveForward(self):
self.cameraPos[0] -= 0.1
def openDoor(self):
minDistance = 10000
doors = self.scene.find("**/doors")
for door in doors.getChildren():
mins, maxs = door.getTightBounds()
vec_1 = (mins + maxs) / 2 - Vec3(self.target[0], self.target[1], (mins[2] + maxs[2]) / 2)
vec_2 = (mins + maxs) / 2 - Vec3(self.cameraPos[0], self.cameraPos[1], (mins[2] + maxs[2]) / 2)
if (vec_1.dot(vec_2) > 0 and vec_1.length() > vec_2.length()) or np.arccos(abs(vec_1.dot(vec_2)) / (vec_1.length() * vec_2.length())) > np.pi / 4:
continue
distance = pow(pow(self.cameraPos[0] - (mins[0] + maxs[0]) / 2, 2) + pow(self.cameraPos[1] - (mins[1] + maxs[1]) / 2, 2) + pow(self.cameraPos[2] - (mins[2] + maxs[2]) / 2, 2), 0.5)
if distance < minDistance:
minDistanceDoor = door
minDistance = distance
pass
continue
if minDistance > 1:
return
mins, maxs = minDistanceDoor.getTightBounds()
if abs(maxs[0] - mins[0]) > abs(maxs[1] - mins[1]):
minsExpected = Vec3(mins[0] - (maxs[1] - mins[1]), mins[1], mins[2])
maxsExpected = Vec3(mins[0], mins[1] + (maxs[0] - mins[0]), maxs[2])
else:
minsExpected = Vec3(mins[0] - (maxs[1] - mins[1]) + (maxs[0] - mins[0]), mins[1] - (maxs[0] - mins[0]), mins[2])
maxsExpected = Vec3(mins[0] + (maxs[0] - mins[0]), mins[1] + (maxs[0] - mins[0]) - (maxs[0] - mins[0]), maxs[2])
pass
minDistanceDoor.setH(minDistanceDoor, 90)
mins, maxs = minDistanceDoor.getTightBounds()
minDistanceDoor.setPos(minDistanceDoor, minsExpected[1] - mins[1], -minsExpected[0] + mins[0], 0)
#print(scene.findAllMatches('doors'))
return
def startChangingView(self):
self.viewChangingProgress = 0
self.prevCameraPos = copy.deepcopy(self.cameraPos)
self.prevTarget = copy.deepcopy(self.target)
self.prevH = self.camera.getR()
if self.viewMode == 'T':
self.newCameraPos = self.startCameraPos
self.newTarget = self.startTarget
self.newH = self.startH
self.viewMode = 'C'
else:
self.newCameraPos = self.topDownCameraPos
self.newTarget = self.topDownTarget
self.newH = self.topDownH
self.startCameraPos = copy.deepcopy(self.cameraPos)
self.startTarget = copy.deepcopy(self.target)
self.startH = self.camera.getR()
self.viewMode = 'T'
pass
return
def changeView(self):
self.cameraPos = []
self.target = []
for c in xrange(3):
self.cameraPos.append(self.prevCameraPos[c] + (self.newCameraPos[c] - self.prevCameraPos[c]) * self.viewChangingProgress)
self.target.append(self.prevTarget[c] + (self.newTarget[c] - self.prevTarget[c]) * self.viewChangingProgress)
continue
self.H = self.prevH + (self.newH - self.prevH) * self.viewChangingProgress
if self.viewChangingProgress + 0.02 >= 1 and self.viewMode == 'C':
ceiling = self.scene.find("**/ceiling")
ceiling.show()
pass
if self.viewChangingProgress <= 0.02 and self.viewMode == 'T':
ceiling = self.scene.find("**/ceiling")
ceiling.hide()
pass
return
def spinCameraTask(self, task):
#print(task.time)
#angleDegrees = task.time * 6.0
movementStep = 0.003
if self.viewChangingProgress <= 1.01:
self.changeView()
self.viewChangingProgress += 0.02
pass
if base.mouseWatcherNode.is_button_down('w'):
for c in xrange(2):
step = movementStep * (self.target[c] - self.cameraPos[c])
self.cameraPos[c] += step
self.target[c] += step
continue
pass
if base.mouseWatcherNode.is_button_down('s'):
for c in xrange(2):
step = movementStep * (self.target[c] - self.cameraPos[c])
self.cameraPos[c] -= step
self.target[c] -= step
continue
pass
if base.mouseWatcherNode.is_button_down('a'):
step = movementStep * (self.target[0] - self.cameraPos[0])
self.cameraPos[1] += step
self.target[1] += step
step = movementStep * (self.target[1] - self.cameraPos[1])
self.cameraPos[0] -= step
self.target[0] -= step
pass
if base.mouseWatcherNode.is_button_down('d'):
step = movementStep * (self.target[0] - self.cameraPos[0])
self.cameraPos[1] -= step
self.target[1] -= step
step = movementStep * (self.target[1] - self.cameraPos[1])
self.cameraPos[0] += step
self.target[0] += step
pass
rotationStep = 0.02
if base.mouseWatcherNode.is_button_down('arrow_left'):
angle = np.angle(complex(self.target[0] - self.cameraPos[0], self.target[1] - self.cameraPos[1]))
angle += rotationStep
self.target[0] = self.cameraPos[0] + np.cos(angle)
self.target[1] = self.cameraPos[1] + np.sin(angle)
pass
if base.mouseWatcherNode.is_button_down('arrow_right'):
angle = np.angle(complex(self.target[0] - self.cameraPos[0], self.target[1] - self.cameraPos[1]))
angle -= rotationStep
self.target[0] = self.cameraPos[0] + np.cos(angle)
self.target[1] = self.cameraPos[1] + np.sin(angle)
pass
if base.mouseWatcherNode.is_button_down('arrow_up'):
angle = np.arcsin(self.target[2] - self.cameraPos[2])
angle += rotationStep
self.target[2] = self.cameraPos[2] + np.sin(angle)
pass
if base.mouseWatcherNode.is_button_down('arrow_down'):
angle = np.arcsin(self.target[2] - self.cameraPos[2])
angle -= rotationStep
self.target[2] = self.cameraPos[2] + np.sin(angle)
pass
angleDegrees = self.angle
angleRadians = angleDegrees * (pi / 180.0)
#self.camera.setPos(2.0 * sin(angleRadians), -2.0 * cos(angleRadians), 3)
self.camera.setPos(self.cameraPos[0], self.cameraPos[1], self.cameraPos[2])
#self.camera.setHpr(angleDegrees, 0, 0)
#self.camera.lookAt(0, 0, 0)
self.camera.lookAt(self.target[0], self.target[1], self.target[2])
self.camera.setR(self.H)
#if base.mouseWatcherNode.hasMouse()
return Task.cont
app = Viewer()
app.run()
| [
"numpy.arcsin",
"floorplan.Floorplan",
"numpy.cos",
"copy.deepcopy",
"numpy.sin",
"direct.showbase.ShowBase.ShowBase.__init__",
"random.random",
"random.randint"
] | [((261, 284), 'direct.showbase.ShowBase.ShowBase.__init__', 'ShowBase.__init__', (['self'], {}), '(self)\n', (278, 284), False, 'from direct.showbase.ShowBase import ShowBase\n'), ((737, 766), 'floorplan.Floorplan', 'Floorplan', (['"""test/floorplan_7"""'], {}), "('test/floorplan_7')\n", (746, 766), False, 'from floorplan import Floorplan\n'), ((4975, 5004), 'copy.deepcopy', 'copy.deepcopy', (['self.cameraPos'], {}), '(self.cameraPos)\n', (4988, 5004), False, 'import copy\n'), ((5027, 5053), 'copy.deepcopy', 'copy.deepcopy', (['self.target'], {}), '(self.target)\n', (5040, 5053), False, 'import copy\n'), ((1965, 1987), 'random.randint', 'random.randint', (['(10)', '(15)'], {}), '(10, 15)\n', (1979, 1987), False, 'import random\n'), ((5421, 5450), 'copy.deepcopy', 'copy.deepcopy', (['self.cameraPos'], {}), '(self.cameraPos)\n', (5434, 5450), False, 'import copy\n'), ((5476, 5502), 'copy.deepcopy', 'copy.deepcopy', (['self.target'], {}), '(self.target)\n', (5489, 5502), False, 'import copy\n'), ((8404, 8449), 'numpy.arcsin', 'np.arcsin', (['(self.target[2] - self.cameraPos[2])'], {}), '(self.target[2] - self.cameraPos[2])\n', (8413, 8449), True, 'import numpy as np\n'), ((8619, 8664), 'numpy.arcsin', 'np.arcsin', (['(self.target[2] - self.cameraPos[2])'], {}), '(self.target[2] - self.cameraPos[2])\n', (8628, 8664), True, 'import numpy as np\n'), ((7933, 7946), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (7939, 7946), True, 'import numpy as np\n'), ((7990, 8003), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (7996, 8003), True, 'import numpy as np\n'), ((8250, 8263), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (8256, 8263), True, 'import numpy as np\n'), ((8307, 8320), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (8313, 8320), True, 'import numpy as np\n'), ((8521, 8534), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (8527, 8534), True, 'import numpy as np\n'), ((8736, 8749), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (8742, 8749), True, 'import numpy as np\n'), ((2175, 2190), 'random.random', 'random.random', ([], {}), '()\n', (2188, 2190), False, 'import random\n'), ((2196, 2211), 'random.random', 'random.random', ([], {}), '()\n', (2209, 2211), False, 'import random\n')] |
import numpy as np
class KF1D:
# this EKF assumes constant covariance matrix, so calculations are much simpler
# the Kalman gain also needs to be precomputed using the control module
def __init__(self, x0, A, C, K):
self.x = x0
self.A = A
self.C = C
self.K = K
self.A_K = self.A - np.dot(self.K, self.C)
# K matrix needs to be pre-computed as follow:
# import control
# (x, l, K) = control.dare(np.transpose(self.A), np.transpose(self.C), Q, R)
# self.K = np.transpose(K)
def update(self, meas):
self.x = np.dot(self.A_K, self.x) + np.dot(self.K, meas)
return self.x
| [
"numpy.dot"
] | [((311, 333), 'numpy.dot', 'np.dot', (['self.K', 'self.C'], {}), '(self.K, self.C)\n', (317, 333), True, 'import numpy as np\n'), ((560, 584), 'numpy.dot', 'np.dot', (['self.A_K', 'self.x'], {}), '(self.A_K, self.x)\n', (566, 584), True, 'import numpy as np\n'), ((587, 607), 'numpy.dot', 'np.dot', (['self.K', 'meas'], {}), '(self.K, meas)\n', (593, 607), True, 'import numpy as np\n')] |
import datetime
import logging
import os
import boto3
from qiskit import IBMQ
from qiskit import QuantumCircuit, execute
logger = logging.getLogger()
logger.setLevel(logging.INFO)
backend_candidates = [
"ibmq_athens",
"ibmq_santiago",
"ibmq_belem",
"ibmq_quito",
"ibmq_lima",
]
def _get_provider():
if IBMQ.active_account() is None:
ibmq_token = os.environ["IBMQ_TOKEN"]
provider = IBMQ.enable_account(ibmq_token)
else:
provider = IBMQ.get_provider(hub="ibm-q", group="open", project="main")
return provider
def execute_circuit(event: dict, context) -> dict:
provider = _get_provider()
devices = provider.backends(
filters=lambda x: x.configuration().n_qubits >= 4
and not x.configuration().simulator
and x.status().status_msg == "active"
)
logger.info(f"active backend={devices}")
backend_name = None
for backend_candidate in backend_candidates:
for device in devices:
if backend_candidate == device.name():
backend_name = device.name()
break
if backend_name is not None:
break
if backend_name is None:
message = f"active backend is not found in {backend_candidates}. can not execute circuit."
logger.error(message)
raise Exception(message)
else:
logger.info(f"use backend={backend_name}")
# build quantum circuit
circuit = QuantumCircuit(4, 4)
circuit.h([0, 1, 2, 3])
circuit.measure([0, 1, 2, 3], [0, 1, 2, 3])
# execute
backend = provider.get_backend(backend_name)
job = execute(circuit, backend)
job_id = job.job_id()
response = {"backend_name": backend_name, "job_id": job_id}
logger.info(f"response={response}")
return response
def get_job_status(event: dict, context) -> dict:
logger.info(f"event={event}")
# get job status
provider = _get_provider()
job = provider.get_backend(event["backend_name"]).retrieve_job(event["job_id"])
logger.info(f"job={job}")
response = {
"backend_name": event["backend_name"],
"job_id": event["job_id"],
"job_status": job.status().name,
}
logger.info(f"response={response}")
return response
def store_result(event: dict, context) -> dict:
logger.info(f"event={event}")
# get job result
provider = _get_provider()
job = provider.get_backend(event["backend_name"]).retrieve_job(event["job_id"])
counts = job.result().get_counts()
result = dict([("num_of_" + key, value) for key, value in counts.items()])
result["id"] = "latest"
result["backend_name"] = event["backend_name"]
result["job_id"] = event["job_id"]
result["creation_date"] = job.creation_date()
# store the execution result to DynamoDB
dynamodb = boto3.resource("dynamodb")
table_name = os.environ["DYNAMODB_TABLE"]
table = dynamodb.Table(table_name)
# store latest record
table.put_item(Item=result)
# store history record
dt = datetime.datetime.strptime(result["creation_date"], "%Y-%m-%dT%H:%M:%S.%fZ")
result["id"] = dt.strftime("%Y%m%d-%H%M%S") # UTC
table.put_item(Item=result)
logger.info(f"result={result}")
return result
def invoke_update_horoscope(event: dict, context) -> dict:
logger.info(f"event={event}")
sfn = boto3.client("stepfunctions")
result = None
# get the arn of UpdateHoroscope Step Functions
state_machine_list = sfn.list_state_machines()
for state_machine in state_machine_list["stateMachines"]:
name = state_machine["name"]
if name == f"UpdateHoroscope-{os.environ['STAGE']}":
# execute UpdateHoroscope Step Functions
arn = state_machine["stateMachineArn"]
sfn.start_execution(stateMachineArn=arn)
result = {"name": name, "arn": arn}
logger.info(f"sfn.start_execution: result={result}")
break
return result
| [
"logging.getLogger",
"boto3.client",
"qiskit.execute",
"datetime.datetime.strptime",
"qiskit.IBMQ.enable_account",
"qiskit.IBMQ.active_account",
"boto3.resource",
"qiskit.IBMQ.get_provider",
"qiskit.QuantumCircuit"
] | [((132, 151), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (149, 151), False, 'import logging\n'), ((1460, 1480), 'qiskit.QuantumCircuit', 'QuantumCircuit', (['(4)', '(4)'], {}), '(4, 4)\n', (1474, 1480), False, 'from qiskit import QuantumCircuit, execute\n'), ((1631, 1656), 'qiskit.execute', 'execute', (['circuit', 'backend'], {}), '(circuit, backend)\n', (1638, 1656), False, 'from qiskit import QuantumCircuit, execute\n'), ((2836, 2862), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (2850, 2862), False, 'import boto3\n'), ((3042, 3118), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["result['creation_date']", '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(result['creation_date'], '%Y-%m-%dT%H:%M:%S.%fZ')\n", (3068, 3118), False, 'import datetime\n'), ((3366, 3395), 'boto3.client', 'boto3.client', (['"""stepfunctions"""'], {}), "('stepfunctions')\n", (3378, 3395), False, 'import boto3\n'), ((331, 352), 'qiskit.IBMQ.active_account', 'IBMQ.active_account', ([], {}), '()\n', (350, 352), False, 'from qiskit import IBMQ\n'), ((427, 458), 'qiskit.IBMQ.enable_account', 'IBMQ.enable_account', (['ibmq_token'], {}), '(ibmq_token)\n', (446, 458), False, 'from qiskit import IBMQ\n'), ((488, 548), 'qiskit.IBMQ.get_provider', 'IBMQ.get_provider', ([], {'hub': '"""ibm-q"""', 'group': '"""open"""', 'project': '"""main"""'}), "(hub='ibm-q', group='open', project='main')\n", (505, 548), False, 'from qiskit import IBMQ\n')] |
"""
@author: mkowalska
"""
import os
import numpy as np
from numpy.linalg import LinAlgError
import matplotlib.pyplot as plt
from figure_properties import *
import matplotlib.gridspec as gridspec
from kcsd import KCSD1D
import targeted_basis as tb
__abs_file__ = os.path.abspath(__file__)
def _html(r, g, b):
return "#{:02X}{:02X}{:02X}".format(r, g, b)
def stability_M(n_src, total_ele, ele_pos, pots, R_init=0.23):
"""
Investigates stability of reconstruction for different number of basis
sources
Parameters
----------
n_src: int
Number of basis sources.
total_ele: int
Number of electrodes.
ele_pos: numpy array
Electrodes positions.
pots: numpy array
Values of potentials at ele_pos.
R_init: float
Initial value of R parameter - width of basis source
Default: 0.23.
Returns
-------
obj_all: class object
eigenvalues: numpy array
Eigenvalues of k_pot matrix.
eigenvectors: numpy array
Eigen vectors of k_pot matrix.
"""
obj_all = []
eigenvectors = np.zeros((len(n_src), total_ele, total_ele))
eigenvalues = np.zeros((len(n_src), total_ele))
for i, value in enumerate(n_src):
pots = pots.reshape((len(ele_pos), 1))
obj = KCSD1D(ele_pos, pots, src_type='gauss', sigma=0.3, h=0.25,
gdx=0.01, n_src_init=n_src[i], ext_x=0, xmin=0, xmax=1,
R_init=R_init)
try:
eigenvalue, eigenvector = np.linalg.eigh(obj.k_pot +
obj.lambd *
np.identity
(obj.k_pot.shape[0]))
except LinAlgError:
raise LinAlgError('EVD is failing - try moving the electrodes'
'slightly')
idx = eigenvalue.argsort()[::-1]
eigenvalues[i] = eigenvalue[idx]
eigenvectors[i] = eigenvector[:, idx]
obj_all.append(obj)
return obj_all, eigenvalues, eigenvectors
def set_axis(ax, x, y, letter=None):
"""
Formats the plot's caption.
Parameters
----------
ax: Axes object.
x: float
X-position of caption.
y: float
Y-position of caption.
letter: string
Caption of the plot.
Default: None.
Returns
-------
ax: modyfied Axes object.
"""
ax.text(
x,
y,
letter,
fontsize=15,
weight='bold',
transform=ax.transAxes)
return ax
def generate_figure(csd_profile, R, MU, true_csd_xlims, total_ele, ele_lims,
noise=0, R_init=0.23):
"""
Generates figure for spectral structure decomposition.
Parameters
----------
csd_profile: function
Function to produce csd profile.
R: float
Thickness of the groundtruth source.
Default: 0.2.
MU: float
Central position of Gaussian source
Default: 0.25.
true_csd_xlims: list
Boundaries for ground truth space.
total_ele: int
Number of electrodes.
ele_lims: list
Electrodes limits.
noise: float
Determines the level of noise in the data.
Default: 0.
R_init: float
Initial value of R parameter - width of basis source
Default: 0.23.
Returns
-------
None
"""
csd_at, true_csd, ele_pos, pots, val = tb.simulate_data(csd_profile,
true_csd_xlims,
R, MU, total_ele,
ele_lims,
noise=noise)
n_src_M = [2, 4, 8, 16, 32, 64, 128, 256, 512]
OBJ_M, eigenval_M, eigenvec_M = stability_M(n_src_M,
total_ele, ele_pos, pots,
R_init=R_init)
plt_cord = [(2, 0), (2, 2), (2, 4),
(3, 0), (3, 2), (3, 4),
(4, 0), (4, 2), (4, 4),
(5, 0), (5, 2), (5, 4)]
letters = ['C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N']
BLACK = _html(0, 0, 0)
ORANGE = _html(230, 159, 0)
SKY_BLUE = _html(86, 180, 233)
GREEN = _html(0, 158, 115)
YELLOW = _html(240, 228, 66)
BLUE = _html(0, 114, 178)
VERMILION = _html(213, 94, 0)
PURPLE = _html(204, 121, 167)
colors = [BLUE, ORANGE, GREEN, PURPLE, VERMILION, SKY_BLUE, YELLOW, BLACK]
fig = plt.figure(figsize=(18, 16))
# heights = [1, 1, 1, 0.2, 1, 1, 1, 1]
heights = [4, 0.3, 1, 1, 1, 1]
markers = ['^', '.', '*', 'x', ',']
# linestyles = [':', '--', '-.', '-']
linestyles = ['-', '-', '-', '-']
src_idx = [0, 2, 3, 8]
gs = gridspec.GridSpec(6, 6, height_ratios=heights, hspace=0.3, wspace=0.6)
ax = fig.add_subplot(gs[0, :3])
for indx, i in enumerate(src_idx):
ax.plot(np.arange(1, total_ele + 1), eigenval_M[i],
linestyle=linestyles[indx], color=colors[indx],
marker=markers[indx], label='M='+str(n_src_M[i]),
markersize=10)
ht, lh = ax.get_legend_handles_labels()
set_axis(ax, -0.05, 1.05, letter='A')
ax.set_xlabel('Number of components')
ax.set_ylabel('Eigenvalues')
ax.set_yscale('log')
ax.set_ylim([1e-6, 1])
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax = fig.add_subplot(gs[0, 3:])
ax.plot(n_src_M, eigenval_M[:, 0], marker='s', color='k', markersize=5,
linestyle=' ')
set_axis(ax, -0.05, 1.05, letter='B')
ax.set_xlabel('Number of basis sources')
ax.set_xscale('log')
ax.set_ylabel('Eigenvalues')
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
for i in range(OBJ_M[0].k_interp_cross.shape[1]):
ax = fig.add_subplot(gs[plt_cord[i][0],
plt_cord[i][1]:plt_cord[i][1]+2])
for idx, j in enumerate(src_idx):
ax.plot(np.linspace(0, 1, 100), np.dot(OBJ_M[j].k_interp_cross,
eigenvec_M[j, :, i]),
linestyle=linestyles[idx], color=colors[idx],
label='M='+str(n_src_M[j]), lw=2)
ax.text(0.5, 1., r"$\tilde{K}\cdot{v_{{%(i)d}}}$" % {'i': i+1},
horizontalalignment='center', transform=ax.transAxes,
fontsize=20)
set_axis(ax, -0.10, 1.1, letter=letters[i])
if i < 9:
ax.get_xaxis().set_visible(False)
ax.spines['bottom'].set_visible(False)
else:
ax.set_xlabel('Depth ($mm$)')
if i % 3 == 0:
ax.set_ylabel('CSD ($mA/mm$)')
ax.yaxis.set_label_coords(-0.18, 0.5)
ax.ticklabel_format(style='sci', axis='y', scilimits=((0.0, 0.0)))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
fig.legend(ht, lh, loc='lower center', ncol=5, frameon=False)
fig.savefig(os.path.join('vectors_' + '_noise_' +
str(noise) + 'R0_2' + '.png'), dpi=300)
plt.show()
if __name__ == '__main__':
ELE_LIMS = [0, 1.]
TRUE_CSD_XLIMS = [0., 1.]
TOTAL_ELE = 12
CSD_PROFILE = tb.csd_profile
R = 0.2
MU = 0.25
R_init = 0.2
generate_figure(CSD_PROFILE, R, MU, TRUE_CSD_XLIMS, TOTAL_ELE, ELE_LIMS,
noise=None, R_init=R_init)
| [
"numpy.identity",
"numpy.linalg.LinAlgError",
"targeted_basis.simulate_data",
"kcsd.KCSD1D",
"matplotlib.pyplot.figure",
"matplotlib.gridspec.GridSpec",
"numpy.linspace",
"numpy.dot",
"os.path.abspath",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((265, 290), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (280, 290), False, 'import os\n'), ((3478, 3568), 'targeted_basis.simulate_data', 'tb.simulate_data', (['csd_profile', 'true_csd_xlims', 'R', 'MU', 'total_ele', 'ele_lims'], {'noise': 'noise'}), '(csd_profile, true_csd_xlims, R, MU, total_ele, ele_lims,\n noise=noise)\n', (3494, 3568), True, 'import targeted_basis as tb\n'), ((4635, 4663), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(18, 16)'}), '(figsize=(18, 16))\n', (4645, 4663), True, 'import matplotlib.pyplot as plt\n'), ((4897, 4967), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(6)', '(6)'], {'height_ratios': 'heights', 'hspace': '(0.3)', 'wspace': '(0.6)'}), '(6, 6, height_ratios=heights, hspace=0.3, wspace=0.6)\n', (4914, 4967), True, 'import matplotlib.gridspec as gridspec\n'), ((7305, 7315), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7313, 7315), True, 'import matplotlib.pyplot as plt\n'), ((1296, 1429), 'kcsd.KCSD1D', 'KCSD1D', (['ele_pos', 'pots'], {'src_type': '"""gauss"""', 'sigma': '(0.3)', 'h': '(0.25)', 'gdx': '(0.01)', 'n_src_init': 'n_src[i]', 'ext_x': '(0)', 'xmin': '(0)', 'xmax': '(1)', 'R_init': 'R_init'}), "(ele_pos, pots, src_type='gauss', sigma=0.3, h=0.25, gdx=0.01,\n n_src_init=n_src[i], ext_x=0, xmin=0, xmax=1, R_init=R_init)\n", (1302, 1429), False, 'from kcsd import KCSD1D\n'), ((5060, 5087), 'numpy.arange', 'np.arange', (['(1)', '(total_ele + 1)'], {}), '(1, total_ele + 1)\n', (5069, 5087), True, 'import numpy as np\n'), ((1797, 1862), 'numpy.linalg.LinAlgError', 'LinAlgError', (['"""EVD is failing - try moving the electrodesslightly"""'], {}), "('EVD is failing - try moving the electrodesslightly')\n", (1808, 1862), False, 'from numpy.linalg import LinAlgError\n'), ((6158, 6180), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(100)'], {}), '(0, 1, 100)\n', (6169, 6180), True, 'import numpy as np\n'), ((6182, 6234), 'numpy.dot', 'np.dot', (['OBJ_M[j].k_interp_cross', 'eigenvec_M[j, :, i]'], {}), '(OBJ_M[j].k_interp_cross, eigenvec_M[j, :, i])\n', (6188, 6234), True, 'import numpy as np\n'), ((1664, 1695), 'numpy.identity', 'np.identity', (['obj.k_pot.shape[0]'], {}), '(obj.k_pot.shape[0])\n', (1675, 1695), True, 'import numpy as np\n')] |
import re
COMMENT_REGEX = re.compile(r'(checkov:skip=|bridgecrew:skip=) *([A-Z_\d]+)(:[^\n]+)?')
| [
"re.compile"
] | [((27, 98), 're.compile', 're.compile', (['"""(checkov:skip=|bridgecrew:skip=) *([A-Z_\\\\d]+)(:[^\\\\n]+)?"""'], {}), "('(checkov:skip=|bridgecrew:skip=) *([A-Z_\\\\d]+)(:[^\\\\n]+)?')\n", (37, 98), False, 'import re\n')] |
# Generated by Django 2.1.4 on 2020-04-20 23:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plugins', '0033_pluginparameter_short_flag'),
]
operations = [
migrations.AlterField(
model_name='computeresource',
name='compute_resource_identifier',
field=models.CharField(max_length=100, unique=True),
),
]
| [
"django.db.models.CharField"
] | [((374, 419), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (390, 419), False, 'from django.db import migrations, models\n')] |
'''
Created on Dec 21, 2014
@author: Milos
'''
'''
Forma za eventualna prosirenja djangovog user-a
'''
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.views.generic.detail import DetailView
from django.views.generic.edit import UpdateView, CreateView
from tasks.models import UserExtend
class UserExtendForm(forms.ModelForm):
class Meta:
model = UserExtend
fields = ['picture']
class RegistrationForm(UserCreationForm):
email = forms.EmailField(required=True)
first_name = forms.CharField(required = False)
last_name = forms.CharField(required = False)
class Meta:
model = UserExtend
fields = ['first_name','last_name', 'username', 'email', 'password1', 'password2','picture']
def save(self, commit=True):
user = super(RegistrationForm, self).save(commit = False)
user.email = self.cleaned_data['email']
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.picture = self.cleaned_data['picture']
if commit:
user.save()
return user
def __init__(self, *args, **kwargs):
super(RegistrationForm, self).__init__(*args, **kwargs)
self.fields["first_name"].widget.attrs['class']='form-control'
self.fields["last_name"].widget.attrs['class']='form-control'
self.fields["username"].widget.attrs['class']='form-control'
self.fields["email"].widget.attrs['class']='form-control'
self.fields["password1"].widget.attrs['class']='form-control'
self.fields["password2"].widget.attrs['class']='form-control'
self.fields["picture"].widget.attrs['class']='form-control'
class UserForm(forms.ModelForm):
class Meta:
model = UserExtend
fields = ['first_name','last_name', 'username', 'email', 'picture']
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
self.fields["first_name"].widget.attrs['class']='form-control'
self.fields["last_name"].widget.attrs['class']='form-control'
self.fields["username"].widget.attrs['class']='form-control'
self.fields["email"].widget.attrs['class']='form-control'
self.fields["picture"].widget.attrs['class']='form-control'
class UserCreate(CreateView):
model = UserExtend
template_name = 'tasks/register.html'
form_class = RegistrationForm
def get_success_url(self):
return reverse('home')
def get_context_data(self, **kwargs):
context = super(UserCreate, self).get_context_data(**kwargs)
#KeyError
try:
context["back"] = self.request.META["HTTP_REFERER"]
except(KeyError):
context["back"]="/"
return context
class UserUpdate(UpdateView):
model = UserExtend
template_name = 'tasks/uupdate.html'
form_class = UserForm
def get_success_url(self):
return reverse('udetail')
def get_context_data(self, **kwargs):
context = super(UserUpdate, self).get_context_data(**kwargs)
#KeyError
try:
context["back"] = self.request.META["HTTP_REFERER"]
except(KeyError):
context["back"]="/"
return context
class DetailUser(DetailView):
model = UserExtend
template_name = 'tasks/udetail.html'
context_object_name='user'
def get_object(self):
return get_object_or_404(User, pk=self.request.user.pk)
def get_context_data(self, **kwargs):
context = super(DetailUser, self).get_context_data(**kwargs)
#KeyError
try:
context["back"] = self.request.META["HTTP_REFERER"]
except(KeyError):
context["back"]="/"
#context["user"] = self.request.user
return context | [
"django.shortcuts.get_object_or_404",
"django.forms.EmailField",
"django.forms.CharField",
"django.core.urlresolvers.reverse"
] | [((656, 687), 'django.forms.EmailField', 'forms.EmailField', ([], {'required': '(True)'}), '(required=True)\n', (672, 687), False, 'from django import forms\n'), ((705, 736), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)'}), '(required=False)\n', (720, 736), False, 'from django import forms\n'), ((755, 786), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)'}), '(required=False)\n', (770, 786), False, 'from django import forms\n'), ((2699, 2714), 'django.core.urlresolvers.reverse', 'reverse', (['"""home"""'], {}), "('home')\n", (2706, 2714), False, 'from django.core.urlresolvers import reverse\n'), ((3202, 3220), 'django.core.urlresolvers.reverse', 'reverse', (['"""udetail"""'], {}), "('udetail')\n", (3209, 3220), False, 'from django.core.urlresolvers import reverse\n'), ((3709, 3757), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'pk': 'self.request.user.pk'}), '(User, pk=self.request.user.pk)\n', (3726, 3757), False, 'from django.shortcuts import get_object_or_404\n')] |
import os
import sys
from loguru import logger
from rich.console import Console
console_args = {}
if "pytest" in sys.modules:
console_args["width"] = 120
console = Console(**console_args)
cpu_count = None
def escape_logging(s):
return str(s).replace("<", "\\<").replace("{", "{{").replace("}", "}}")
def CPUs():
"""
Detects the number of CPUs on a system. Cribbed from pp.
"""
global cpu_count
if cpu_count is None:
cpu_count = 1 # default
# Linux, Unix and MacOS:
if hasattr(os, "sysconf"):
if "SC_NPROCESSORS_ONLN" in os.sysconf_names:
# Linux & Unix:
ncpus = os.sysconf("SC_NPROCESSORS_ONLN")
if isinstance(ncpus, int) and ncpus > 0:
cpu_count = ncpus
else: # OSX: pragma: no cover
cpu_count = int(
os.popen2("sysctl -n hw.ncpu")[1].read()
) # pragma: no cover
# Windows:
if "NUMBER_OF_PROCESSORS" in os.environ: # pragma: no cover
ncpus = int(os.environ["NUMBER_OF_PROCESSORS"])
if ncpus > 0:
cpu_count = ncpus
return cpu_count
def job_or_filename(job_or_filename, invariant_class=None):
"""Take a filename, or a job. Return Path(filename), dependency-for-that-file
ie. either the job, or a invariant_class (default: FileInvariant)"""
from .jobs import Job, FileInvariant
from pathlib import Path
if invariant_class is None: # pragma: no cover
invariant_class = FileInvariant
if isinstance(job_or_filename, Job):
filename = job_or_filename.files[0]
deps = [job_or_filename]
elif job_or_filename is not None:
filename = Path(job_or_filename)
deps = [invariant_class(filename)]
else:
filename = None
deps = []
return filename, deps
def assert_uniqueness_of_object(
object_with_name_attribute, pipegraph=None, also_check=None
):
"""Makes certain there is only one object with this class & .name.
This is necessary so the pipegraph jobs assign their data only to the
objects you're actually working with."""
if pipegraph is None: # pragma: no branch
from pypipegraph2 import global_pipegraph
pipegraph = global_pipegraph
if object_with_name_attribute.name.find("/") != -1:
raise ValueError(
"Names must not contain /, it confuses the directory calculations"
)
if pipegraph is None: # pragma: no cover
return
if not hasattr(pipegraph, "object_uniquifier"):
pipegraph.object_uniquifier = {}
typ = object_with_name_attribute.__class__
if typ not in pipegraph.object_uniquifier:
pipegraph.object_uniquifier[typ] = {}
if object_with_name_attribute.name in pipegraph.object_uniquifier[typ]:
raise ValueError(
"Doublicate object: %s, %s" % (typ, object_with_name_attribute.name)
)
if also_check:
if not isinstance(also_check, list):
also_check = [also_check]
for other_typ in also_check:
if (
other_typ in pipegraph.object_uniquifier
and object_with_name_attribute.name
in pipegraph.object_uniquifier[other_typ]
):
raise ValueError(
"Doublicate object: %s, %s"
% (other_typ, object_with_name_attribute.name)
)
object_with_name_attribute.unique_id = len(pipegraph.object_uniquifier[typ])
pipegraph.object_uniquifier[typ][object_with_name_attribute.name] = True
def flatten_jobs(j):
"""Take an arbitrary deeply nested list of lists of jobs
and return just the jobs"""
from .jobs import Job
if isinstance(j, Job):
yield j
else:
for sj in j:
yield from flatten_jobs(sj)
do_jobtrace_log = False
def log_warning(msg):
logger.opt(depth=1).warning(msg)
def log_error(msg):
logger.opt(depth=1).error(msg)
def log_info(msg):
logger.opt(depth=1).info(msg)
def log_debug(msg):
logger.opt(depth=1).debug(msg)
def log_job_trace(msg):
if do_jobtrace_log:
logger.opt(depth=1).log("JT", msg)
def log_trace(msg):
if do_jobtrace_log: # pragma: no cover
logger.opt(depth=1).trace(msg)
def shorten_job_id(job_id):
dotdotcount = job_id.count(":::")
if dotdotcount:
return job_id[: job_id.find(":::") + 3] + "+" + str(dotdotcount)
else:
return job_id
def pretty_log_errors(func):
"""capture exceptions (on a function outside of ppg)
and format it with our fancy local logging exception logger
"""
def inner(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception as e:
exception_type, exception_value, tb = sys.exc_info()
captured_tb = ppg2.ppg_traceback.Trace(exception_type, exception_value, tb)
logger.error(
captured_tb._format_rich_traceback_fallback(
include_locals=True, include_formating=True
)
)
raise
return inner
| [
"os.popen2",
"pathlib.Path",
"rich.console.Console",
"sys.exc_info",
"os.sysconf",
"loguru.logger.opt"
] | [((169, 192), 'rich.console.Console', 'Console', ([], {}), '(**console_args)\n', (176, 192), False, 'from rich.console import Console\n'), ((1754, 1775), 'pathlib.Path', 'Path', (['job_or_filename'], {}), '(job_or_filename)\n', (1758, 1775), False, 'from pathlib import Path\n'), ((3958, 3977), 'loguru.logger.opt', 'logger.opt', ([], {'depth': '(1)'}), '(depth=1)\n', (3968, 3977), False, 'from loguru import logger\n'), ((4017, 4036), 'loguru.logger.opt', 'logger.opt', ([], {'depth': '(1)'}), '(depth=1)\n', (4027, 4036), False, 'from loguru import logger\n'), ((4073, 4092), 'loguru.logger.opt', 'logger.opt', ([], {'depth': '(1)'}), '(depth=1)\n', (4083, 4092), False, 'from loguru import logger\n'), ((4129, 4148), 'loguru.logger.opt', 'logger.opt', ([], {'depth': '(1)'}), '(depth=1)\n', (4139, 4148), False, 'from loguru import logger\n'), ((665, 698), 'os.sysconf', 'os.sysconf', (['"""SC_NPROCESSORS_ONLN"""'], {}), "('SC_NPROCESSORS_ONLN')\n", (675, 698), False, 'import os\n'), ((4218, 4237), 'loguru.logger.opt', 'logger.opt', ([], {'depth': '(1)'}), '(depth=1)\n', (4228, 4237), False, 'from loguru import logger\n'), ((4327, 4346), 'loguru.logger.opt', 'logger.opt', ([], {'depth': '(1)'}), '(depth=1)\n', (4337, 4346), False, 'from loguru import logger\n'), ((4872, 4886), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4884, 4886), False, 'import sys\n'), ((890, 920), 'os.popen2', 'os.popen2', (['"""sysctl -n hw.ncpu"""'], {}), "('sysctl -n hw.ncpu')\n", (899, 920), False, 'import os\n')] |
from keras.models import Model, Sequential
from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation
import numpy as np
from os import listdir,path
from os.path import isfile, join
from PIL import Image
from keras.preprocessing.image import load_img, save_img, img_to_array
from keras.applications.imagenet_utils import preprocess_input
from keras.preprocessing import image
# import matplotlib.pyplot as plt
import cv2 as cv
import boto3
# import sounddevice as sd
model = Sequential()
model.add(ZeroPadding2D((1,1),input_shape=(224,224, 3)))
model.add(Convolution2D(64, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(4096, (7, 7), activation='relu'))
model.add(Dropout(0.5))
model.add(Convolution2D(4096, (1, 1), activation='relu'))
model.add(Dropout(0.5))
model.add(Convolution2D(2622, (1, 1)))
model.add(Flatten())
model.add(Activation('softmax'))
model.load_weights('vgg_face_weights.h5')
vgg_face_descriptor = Model(inputs=model.layers[0].input, outputs=model.layers[-2].output)
def preprocess_image(image_path):
img = load_img(image_path, target_size=(224, 224))
img = img_to_array(img)
img = np.expand_dims(img, axis=0)
img = preprocess_input(img)
return img
def preprocess_loaded_image(img):
img = img_to_array(img)
img = np.expand_dims(img, axis=0)
img = preprocess_input(img)
return img
def findCosineSimilarity(source_representation, target_representation):
a = np.matmul(np.transpose(source_representation), target_representation)
b = np.sum(np.multiply(source_representation, source_representation))
c = np.sum(np.multiply(target_representation, target_representation))
return 1 - (a / (np.sqrt(b) * np.sqrt(c)))
def findEuclideanDistance(source_representation, target_representation):
euclidean_distance = source_representation - target_representation
euclidean_distance = np.sum(np.multiply(euclidean_distance, euclidean_distance))
euclidean_distance = np.sqrt(euclidean_distance)
return euclidean_distance
face_cascade = cv.CascadeClassifier(join('haarcascades','haarcascade_frontalface_default.xml'))
faces_dir='faces'
faces={}
face_imgs = [f for f in listdir(faces_dir) if isfile(join(faces_dir, f))]
for face_file in face_imgs:
face_label=path.splitext(face_file)[0]
print(face_label)
face_representation= vgg_face_descriptor.predict(preprocess_image(join(faces_dir,face_file)))[0,:]
faces[face_label]=face_representation
def detect_face(img):
gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
if(len(faces)>0):
(x,y,w,h)=faces[0]
roi = img[y:y+h, x:x+w]
return roi
vc = cv.VideoCapture(0)
if vc.isOpened():
is_capturing, frame = vc.read()
frame = cv.cvtColor(frame, cv.COLOR_BGR2RGB)
vc.release()
face=detect_face(frame)
# plt.imshow(face)
face=cv.resize(face,(224,224))
face = face[...,::-1]
face_representation= vgg_face_descriptor.predict(preprocess_loaded_image(face))[0,:]
min_sim=2
candidate=''
for key in faces.keys():
candidate_representation=faces[key]
cosine_similarity = findCosineSimilarity(face_representation, candidate_representation) # Should be less then 0.40
euclidean_distance = findEuclideanDistance(face_representation, candidate_representation) #Less then 120
print("Candidate {} CosineSimularity: {}, EuclideanDistance: {}" .format(key, cosine_similarity, euclidean_distance))
if cosine_similarity<min_sim:
min_sim=cosine_similarity
candidate=key
print(candidate)
# speak('Hello '+candidate+'. May I help you?')
# def speak(text):
# response = polly_client.synthesize_speech(VoiceId='Brian',OutputFormat='pcm',SampleRate="8000",Text = text)
# stream=response['AudioStream'].read()
# sound=np.frombuffer(stream,dtype=np.int16)
# sd.play(sound, 8000) | [
"keras.preprocessing.image.img_to_array",
"numpy.sqrt",
"keras.layers.Activation",
"numpy.multiply",
"os.listdir",
"keras.models.Model",
"keras.applications.imagenet_utils.preprocess_input",
"keras.layers.ZeroPadding2D",
"keras.layers.Convolution2D",
"keras.layers.Flatten",
"keras.layers.MaxPool... | [((529, 541), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (539, 541), False, 'from keras.models import Model, Sequential\n'), ((2285, 2353), 'keras.models.Model', 'Model', ([], {'inputs': 'model.layers[0].input', 'outputs': 'model.layers[-2].output'}), '(inputs=model.layers[0].input, outputs=model.layers[-2].output)\n', (2290, 2353), False, 'from keras.models import Model, Sequential\n'), ((4037, 4055), 'cv2.VideoCapture', 'cv.VideoCapture', (['(0)'], {}), '(0)\n', (4052, 4055), True, 'import cv2 as cv\n'), ((552, 600), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {'input_shape': '(224, 224, 3)'}), '((1, 1), input_shape=(224, 224, 3))\n', (565, 600), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((609, 653), 'keras.layers.Convolution2D', 'Convolution2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (622, 653), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((665, 686), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (678, 686), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((697, 741), 'keras.layers.Convolution2D', 'Convolution2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (710, 741), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((753, 789), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'strides': '(2, 2)'}), '((2, 2), strides=(2, 2))\n', (765, 789), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((801, 822), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (814, 822), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((833, 878), 'keras.layers.Convolution2D', 'Convolution2D', (['(128)', '(3, 3)'], {'activation': '"""relu"""'}), "(128, (3, 3), activation='relu')\n", (846, 878), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((890, 911), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (903, 911), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((922, 967), 'keras.layers.Convolution2D', 'Convolution2D', (['(128)', '(3, 3)'], {'activation': '"""relu"""'}), "(128, (3, 3), activation='relu')\n", (935, 967), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((979, 1015), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'strides': '(2, 2)'}), '((2, 2), strides=(2, 2))\n', (991, 1015), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1027, 1048), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1040, 1048), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1059, 1104), 'keras.layers.Convolution2D', 'Convolution2D', (['(256)', '(3, 3)'], {'activation': '"""relu"""'}), "(256, (3, 3), activation='relu')\n", (1072, 1104), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1116, 1137), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1129, 1137), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1148, 1193), 'keras.layers.Convolution2D', 'Convolution2D', (['(256)', '(3, 3)'], {'activation': '"""relu"""'}), "(256, (3, 3), activation='relu')\n", (1161, 1193), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1205, 1226), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1218, 1226), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1237, 1282), 'keras.layers.Convolution2D', 'Convolution2D', (['(256)', '(3, 3)'], {'activation': '"""relu"""'}), "(256, (3, 3), activation='relu')\n", (1250, 1282), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1294, 1330), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'strides': '(2, 2)'}), '((2, 2), strides=(2, 2))\n', (1306, 1330), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1342, 1363), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1355, 1363), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1374, 1419), 'keras.layers.Convolution2D', 'Convolution2D', (['(512)', '(3, 3)'], {'activation': '"""relu"""'}), "(512, (3, 3), activation='relu')\n", (1387, 1419), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1431, 1452), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1444, 1452), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1463, 1508), 'keras.layers.Convolution2D', 'Convolution2D', (['(512)', '(3, 3)'], {'activation': '"""relu"""'}), "(512, (3, 3), activation='relu')\n", (1476, 1508), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1520, 1541), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1533, 1541), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1552, 1597), 'keras.layers.Convolution2D', 'Convolution2D', (['(512)', '(3, 3)'], {'activation': '"""relu"""'}), "(512, (3, 3), activation='relu')\n", (1565, 1597), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1609, 1645), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'strides': '(2, 2)'}), '((2, 2), strides=(2, 2))\n', (1621, 1645), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1657, 1678), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1670, 1678), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1689, 1734), 'keras.layers.Convolution2D', 'Convolution2D', (['(512)', '(3, 3)'], {'activation': '"""relu"""'}), "(512, (3, 3), activation='relu')\n", (1702, 1734), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1746, 1767), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1759, 1767), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1778, 1823), 'keras.layers.Convolution2D', 'Convolution2D', (['(512)', '(3, 3)'], {'activation': '"""relu"""'}), "(512, (3, 3), activation='relu')\n", (1791, 1823), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1835, 1856), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(1, 1)'], {}), '((1, 1))\n', (1848, 1856), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1867, 1912), 'keras.layers.Convolution2D', 'Convolution2D', (['(512)', '(3, 3)'], {'activation': '"""relu"""'}), "(512, (3, 3), activation='relu')\n", (1880, 1912), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1924, 1960), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'strides': '(2, 2)'}), '((2, 2), strides=(2, 2))\n', (1936, 1960), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((1972, 2018), 'keras.layers.Convolution2D', 'Convolution2D', (['(4096)', '(7, 7)'], {'activation': '"""relu"""'}), "(4096, (7, 7), activation='relu')\n", (1985, 2018), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((2030, 2042), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (2037, 2042), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((2054, 2100), 'keras.layers.Convolution2D', 'Convolution2D', (['(4096)', '(1, 1)'], {'activation': '"""relu"""'}), "(4096, (1, 1), activation='relu')\n", (2067, 2100), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((2112, 2124), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (2119, 2124), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((2136, 2163), 'keras.layers.Convolution2D', 'Convolution2D', (['(2622)', '(1, 1)'], {}), '(2622, (1, 1))\n', (2149, 2163), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((2175, 2184), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (2182, 2184), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((2196, 2217), 'keras.layers.Activation', 'Activation', (['"""softmax"""'], {}), "('softmax')\n", (2206, 2217), False, 'from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation\n'), ((2399, 2443), 'keras.preprocessing.image.load_img', 'load_img', (['image_path'], {'target_size': '(224, 224)'}), '(image_path, target_size=(224, 224))\n', (2407, 2443), False, 'from keras.preprocessing.image import load_img, save_img, img_to_array\n'), ((2454, 2471), 'keras.preprocessing.image.img_to_array', 'img_to_array', (['img'], {}), '(img)\n', (2466, 2471), False, 'from keras.preprocessing.image import load_img, save_img, img_to_array\n'), ((2482, 2509), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (2496, 2509), True, 'import numpy as np\n'), ((2520, 2541), 'keras.applications.imagenet_utils.preprocess_input', 'preprocess_input', (['img'], {}), '(img)\n', (2536, 2541), False, 'from keras.applications.imagenet_utils import preprocess_input\n'), ((2602, 2619), 'keras.preprocessing.image.img_to_array', 'img_to_array', (['img'], {}), '(img)\n', (2614, 2619), False, 'from keras.preprocessing.image import load_img, save_img, img_to_array\n'), ((2630, 2657), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (2644, 2657), True, 'import numpy as np\n'), ((2668, 2689), 'keras.applications.imagenet_utils.preprocess_input', 'preprocess_input', (['img'], {}), '(img)\n', (2684, 2689), False, 'from keras.applications.imagenet_utils import preprocess_input\n'), ((3306, 3333), 'numpy.sqrt', 'np.sqrt', (['euclidean_distance'], {}), '(euclidean_distance)\n', (3313, 3333), True, 'import numpy as np\n'), ((3401, 3460), 'os.path.join', 'join', (['"""haarcascades"""', '"""haarcascade_frontalface_default.xml"""'], {}), "('haarcascades', 'haarcascade_frontalface_default.xml')\n", (3405, 3460), False, 'from os.path import isfile, join\n'), ((3838, 3873), 'cv2.cvtColor', 'cv.cvtColor', (['img', 'cv.COLOR_BGR2GRAY'], {}), '(img, cv.COLOR_BGR2GRAY)\n', (3849, 3873), True, 'import cv2 as cv\n'), ((4124, 4160), 'cv2.cvtColor', 'cv.cvtColor', (['frame', 'cv.COLOR_BGR2RGB'], {}), '(frame, cv.COLOR_BGR2RGB)\n', (4135, 4160), True, 'import cv2 as cv\n'), ((4238, 4265), 'cv2.resize', 'cv.resize', (['face', '(224, 224)'], {}), '(face, (224, 224))\n', (4247, 4265), True, 'import cv2 as cv\n'), ((2796, 2831), 'numpy.transpose', 'np.transpose', (['source_representation'], {}), '(source_representation)\n', (2808, 2831), True, 'import numpy as np\n'), ((2871, 2928), 'numpy.multiply', 'np.multiply', (['source_representation', 'source_representation'], {}), '(source_representation, source_representation)\n', (2882, 2928), True, 'import numpy as np\n'), ((2945, 3002), 'numpy.multiply', 'np.multiply', (['target_representation', 'target_representation'], {}), '(target_representation, target_representation)\n', (2956, 3002), True, 'import numpy as np\n'), ((3228, 3279), 'numpy.multiply', 'np.multiply', (['euclidean_distance', 'euclidean_distance'], {}), '(euclidean_distance, euclidean_distance)\n', (3239, 3279), True, 'import numpy as np\n'), ((3515, 3533), 'os.listdir', 'listdir', (['faces_dir'], {}), '(faces_dir)\n', (3522, 3533), False, 'from os import listdir, path\n'), ((3609, 3633), 'os.path.splitext', 'path.splitext', (['face_file'], {}), '(face_file)\n', (3622, 3633), False, 'from os import listdir, path\n'), ((3544, 3562), 'os.path.join', 'join', (['faces_dir', 'f'], {}), '(faces_dir, f)\n', (3548, 3562), False, 'from os.path import isfile, join\n'), ((3025, 3035), 'numpy.sqrt', 'np.sqrt', (['b'], {}), '(b)\n', (3032, 3035), True, 'import numpy as np\n'), ((3038, 3048), 'numpy.sqrt', 'np.sqrt', (['c'], {}), '(c)\n', (3045, 3048), True, 'import numpy as np\n'), ((3729, 3755), 'os.path.join', 'join', (['faces_dir', 'face_file'], {}), '(faces_dir, face_file)\n', (3733, 3755), False, 'from os.path import isfile, join\n')] |
"""Module with view functions that serve each uri."""
from datetime import datetime
from learning_journal.models.mymodel import Journal
from learning_journal.security import is_authenticated
from pyramid.httpexceptions import HTTPFound, HTTPNotFound
from pyramid.security import NO_PERMISSION_REQUIRED, forget, remember
from pyramid.view import view_config
@view_config(route_name='home', renderer='learning_journal:templates/index.jinja2', permission='view')
def list_view(request):
"""Pass response to send to index.html page with all entries."""
entries = request.dbsession.query(Journal).all()
entries = [entry.to_dict() for entry in entries]
return {
'entries': entries
}
@view_config(route_name='detail', renderer='learning_journal:templates/detail.jinja2', permission='view')
def detail_view(request):
"""Pass response to send to detail page for individual entries."""
target_id = int(request.matchdict['id'])
entry = request.dbsession.query(Journal).get(target_id)
if not entry:
raise HTTPNotFound
if request.method == 'GET':
return {
'entry': entry.to_dict()
}
if request.method == "POST":
return HTTPFound(request.route_url('edit', id=entry.id))
@view_config(route_name='create', renderer='learning_journal:templates/new.jinja2', permission='secret')
def create_view(request):
"""Pass response to send to new page."""
if request.method == 'GET':
return{
'textarea': 'New Entry'
}
if request.method == 'POST':
new_entry = Journal(
title=request.POST['title'],
text=request.POST['text'],
created=datetime.now()
)
request.dbsession.add(new_entry)
return HTTPFound(request.route_url('home'))
@view_config(route_name='edit', renderer='learning_journal:templates/edit.jinja2', permission='secret')
def update_view(request):
"""Pass response to send to edit page."""
target_id = int(request.matchdict['id'])
entry = request.dbsession.query(Journal).get(target_id)
if not entry:
raise HTTPNotFound
if request.method == 'GET':
return {
'entry': entry.to_dict()
}
if request.method == 'POST' and request.POST:
entry.title = request.POST['title']
entry.text = request.POST['body']
entry.created = datetime.now()
request.dbsession.add(entry)
request.dbsession.flush()
return HTTPFound(request.route_url('detail', id=entry.id))
@view_config(route_name='delete', permission='secret')
def delete_view(request):
"""Delete a specific entry."""
target_id = int(request.matchdict['id'])
entry = request.dbsession.query(Journal).get(target_id)
if entry:
request.dbsession.delete(entry)
return HTTPFound(request.route_url('home'))
raise HTTPNotFound
@view_config(
route_name='login', renderer="learning_journal:templates/login.jinja2", permission=NO_PERMISSION_REQUIRED
)
def login(request):
"""Login view config to authenticate username/password."""
if request.authenticated_userid:
return HTTPFound(request.route_url('home'))
if request.method == "GET":
return {}
if request.method == "POST":
username = request.POST['username']
password = request.POST['password']
if is_authenticated(username, password):
headers = remember(request, username)
return HTTPFound(request.route_url('home'), headers=headers)
return {
'error': 'Invalid username/password combination.'
}
@view_config(route_name='logout', permission=NO_PERMISSION_REQUIRED)
def logout(request):
"""Logout view config to redirect to home view."""
headers = forget(request)
return HTTPFound(request.route_url('home'), headers=headers)
| [
"learning_journal.security.is_authenticated",
"pyramid.security.forget",
"datetime.datetime.now",
"pyramid.view.view_config",
"pyramid.security.remember"
] | [((365, 471), 'pyramid.view.view_config', 'view_config', ([], {'route_name': '"""home"""', 'renderer': '"""learning_journal:templates/index.jinja2"""', 'permission': '"""view"""'}), "(route_name='home', renderer=\n 'learning_journal:templates/index.jinja2', permission='view')\n", (376, 471), False, 'from pyramid.view import view_config\n'), ((715, 824), 'pyramid.view.view_config', 'view_config', ([], {'route_name': '"""detail"""', 'renderer': '"""learning_journal:templates/detail.jinja2"""', 'permission': '"""view"""'}), "(route_name='detail', renderer=\n 'learning_journal:templates/detail.jinja2', permission='view')\n", (726, 824), False, 'from pyramid.view import view_config\n'), ((1264, 1372), 'pyramid.view.view_config', 'view_config', ([], {'route_name': '"""create"""', 'renderer': '"""learning_journal:templates/new.jinja2"""', 'permission': '"""secret"""'}), "(route_name='create', renderer=\n 'learning_journal:templates/new.jinja2', permission='secret')\n", (1275, 1372), False, 'from pyramid.view import view_config\n'), ((1816, 1923), 'pyramid.view.view_config', 'view_config', ([], {'route_name': '"""edit"""', 'renderer': '"""learning_journal:templates/edit.jinja2"""', 'permission': '"""secret"""'}), "(route_name='edit', renderer=\n 'learning_journal:templates/edit.jinja2', permission='secret')\n", (1827, 1923), False, 'from pyramid.view import view_config\n'), ((2553, 2606), 'pyramid.view.view_config', 'view_config', ([], {'route_name': '"""delete"""', 'permission': '"""secret"""'}), "(route_name='delete', permission='secret')\n", (2564, 2606), False, 'from pyramid.view import view_config\n'), ((2905, 3033), 'pyramid.view.view_config', 'view_config', ([], {'route_name': '"""login"""', 'renderer': '"""learning_journal:templates/login.jinja2"""', 'permission': 'NO_PERMISSION_REQUIRED'}), "(route_name='login', renderer=\n 'learning_journal:templates/login.jinja2', permission=\n NO_PERMISSION_REQUIRED)\n", (2916, 3033), False, 'from pyramid.view import view_config\n'), ((3640, 3707), 'pyramid.view.view_config', 'view_config', ([], {'route_name': '"""logout"""', 'permission': 'NO_PERMISSION_REQUIRED'}), "(route_name='logout', permission=NO_PERMISSION_REQUIRED)\n", (3651, 3707), False, 'from pyramid.view import view_config\n'), ((3798, 3813), 'pyramid.security.forget', 'forget', (['request'], {}), '(request)\n', (3804, 3813), False, 'from pyramid.security import NO_PERMISSION_REQUIRED, forget, remember\n'), ((2397, 2411), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2409, 2411), False, 'from datetime import datetime\n'), ((3386, 3422), 'learning_journal.security.is_authenticated', 'is_authenticated', (['username', 'password'], {}), '(username, password)\n', (3402, 3422), False, 'from learning_journal.security import is_authenticated\n'), ((3446, 3473), 'pyramid.security.remember', 'remember', (['request', 'username'], {}), '(request, username)\n', (3454, 3473), False, 'from pyramid.security import NO_PERMISSION_REQUIRED, forget, remember\n'), ((1695, 1709), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1707, 1709), False, 'from datetime import datetime\n')] |
# Generated by Django 3.1.3 on 2021-02-10 01:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('labels', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='species',
name='de',
field=models.CharField(default='', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='species',
name='es',
field=models.CharField(default='', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='species',
name='fr',
field=models.CharField(default='', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='species',
name='pt',
field=models.CharField(default='', max_length=100),
preserve_default=False,
),
]
| [
"django.db.models.CharField"
] | [((319, 363), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(100)'}), "(default='', max_length=100)\n", (335, 363), False, 'from django.db import migrations, models\n'), ((516, 560), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(100)'}), "(default='', max_length=100)\n", (532, 560), False, 'from django.db import migrations, models\n'), ((713, 757), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(100)'}), "(default='', max_length=100)\n", (729, 757), False, 'from django.db import migrations, models\n'), ((910, 954), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(100)'}), "(default='', max_length=100)\n", (926, 954), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python3
import argparse
import getpass
import os
import sys
import argcomplete
from datetime import datetime
from forest import cmake_tools
from forest.common.eval_handler import EvalHandler
from forest.common.install import install_package, write_setup_file, write_ws_file, check_ws_file, uninstall_package, \
clean
from forest.common.recipe import RecipeSource, Cookbook
from forest.common import sudo_refresh
from pprint import pprint
# define directories for source, build, install, and recipes
from forest.common.forest_dirs import *
# just a try-except wrapper to catch ctrl+c
def main():
try:
if not do_main():
print('(failed)')
sys.exit(1)
sys.exit(0)
except KeyboardInterrupt:
print('\nfailed (interrupted by user)')
sys.exit(1)
# actual main
def do_main():
# set recipe dir
Cookbook.set_recipe_path(recipesdir)
# available recipes
available_recipes = Cookbook.get_available_recipes()
if len(available_recipes) == 0:
available_recipes = None
# parse cmd line args
buildtypes = ['None', 'RelWithDebInfo', 'Release', 'Debug']
cloneprotos = ['ssh', 'https']
dfl_log_file = datetime.now().strftime("/tmp/forest_%Y_%m_%d_%H_%M_%S.log")
parser = argparse.ArgumentParser(description='forest automatizes cloning and building of software packages')
parser.add_argument('--list', '-l', required=False, action='store_true', help='list available recipes')
parser.add_argument('--log-file', default=dfl_log_file, help='log file for non-verbose mode')
parser.add_argument('--verbose', '-v', required=False, action='store_true', help='print additional information')
subparsers = parser.add_subparsers(dest='command')
init_cmd = 'init'
init_parser = subparsers.add_parser(init_cmd, help='initialize the current folder as a forest workspace')
grow_cmd = 'grow'
grow_parser = subparsers.add_parser(grow_cmd, help='clone, configure, and build a recipe')
grow_parser.add_argument('recipe', nargs='?', metavar='RECIPE', choices=available_recipes, help='name of recipe with fetch and build information')
grow_parser.add_argument('--jobs', '-j', default=1, help='parallel jobs for building')
grow_parser.add_argument('--mode', '-m', nargs='+', required=False, help='specify modes that are used to set conditional compilation flags (e.g., cmake args)')
grow_parser.add_argument('--config', '-c', nargs='+', required=False, help='specify configuration variables that can be used inside recipes')
grow_parser.add_argument('--default-build-type', '-t', default=buildtypes[1], choices=buildtypes, help='build type for cmake, it is overridden by recipe')
grow_parser.add_argument('--force-reconfigure', required=False, action='store_true', help='force calling cmake before building with args from the recipe')
grow_parser.add_argument('--list-eval-locals', required=False, action='store_true', help='print available attributes when using conditional build args')
grow_parser.add_argument('--clone-protocol', required=False, choices=cloneprotos, help='override clone protocol')
grow_parser.add_argument('--clone-depth', required=False, type=int, help='set maximum history depth to save bandwidth')
grow_parser.add_argument('--cmake-args', nargs='+', required=False, help='specify additional cmake args to be appended to each recipe (leading -D must be omitted)')
grow_parser.add_argument('--no-deps', '-n', required=False, action='store_true', help='skip dependency fetch and build step')
grow_parser.add_argument('--uninstall', required=False, action='store_true', help='uninstall recipe')
grow_parser.add_argument('--clean', required=False, action='store_true', help='uninstall recipe and remove build')
grow_parser.add_argument('--pwd', '-p', required=False, help='user password to be used when sudo permission is required (if empty, user is prompted for password); note: to be used with care, as exposing your password might be harmful!')
grow_parser.add_argument('--verbose', '-v', required=False, action='store_true', help='print additional information')
recipes_cmd = 'add-recipes'
recipes_parser = subparsers.add_parser(recipes_cmd, help='add recipes from git remote')
recipes_parser.add_argument('url', help='url of the remote (e.g. git@github.com:<username>/<reponame>.git)')
recipes_parser.add_argument('--tag', '-t', required=False, default='master')
recipes_parser.add_argument('--subdir-path', '-s', required=False, default='recipes', help='relative path to the folder in which recipes are contained')
recipes_parser.add_argument('--recipes', '-r', required=False, nargs='+', help='specify which recipes to add, otherwise all recipes in subdir-path are added')
recipes_parser.add_argument('--allow-overwrite', '-o', required=False, action='store_true', help='allow overwritng local recipes with new ones')
recipes_parser.add_argument('--verbose', '-v', required=False, action='store_true', help='print additional information')
recipes_parser.add_argument('--clone-protocol', required=False, choices=cloneprotos, help='override clone protocol')
argcomplete.autocomplete(parser)
args = parser.parse_args()
# initialize workspace
if args.command == init_cmd:
# create marker file
write_ws_file(rootdir=rootdir) # note: error on failure?
# check ws
if not check_ws_file(rootdir=rootdir):
print(f'current directory {rootdir} is not a forest workspace.. \
have you called forest init ?', file=sys.stderr)
return False
# create directories (if do not exist)
for dir in (buildroot, installdir, srcroot, recipesdir):
if not os.path.exists(dir):
os.mkdir(dir)
# create setup.bash if does not exist
write_setup_file()
# verbose mode will show output of any called process
if args.verbose:
from forest.common import proc_utils
proc_utils.call_process_verbose = True
if not args.verbose:
from forest.common import print_utils
print_utils.log_file = open(args.log_file, 'w')
# sudo handling
if args.command == grow_cmd and args.pwd is not None:
sudo_refresher = sudo_refresh.SudoRefresher(pwd=args.pwd)
# print available packages
if args.list:
print(' '.join(Cookbook.get_available_recipes()))
return True
# set config vars
if args.command == grow_cmd and args.config:
from forest.common import config_handler
ch = config_handler.ConfigHandler.instance()
ch.set_config_variables(args.config)
# print available local attributes for conditional args
if args.command == grow_cmd and args.list_eval_locals:
from forest.common import eval_handler
eval_handler.EvalHandler.print_available_locals()
return True
# clone proto
if args.command in (grow_cmd, recipes_cmd) and args.clone_protocol is not None:
from forest.common.fetch_handler import GitFetcher
GitFetcher.proto_override = args.clone_protocol
# clone proto
if args.command == grow_cmd and args.clone_depth is not None:
from forest.common.fetch_handler import GitFetcher
GitFetcher.depth_override = args.clone_depth
# if required, add a recipe repository to the list of remotes
if args.command == recipes_cmd:
print('adding recipes...')
recipe_source = RecipeSource.FromUrl(args.url, args.tag)
return Cookbook.add_recipes(recipe_source, args.recipes, args.subdir_path, args.allow_overwrite)
# no recipe to install, exit
if args.command == grow_cmd and args.recipe is None:
print('no recipe to build, exiting...')
return True
# uninstall functionality
if args.command == grow_cmd and args.uninstall:
return uninstall_package(pkg=args.recipe,
buildroot=buildroot,
installdir=installdir,
verbose=args.verbose)
# clean functionality
if args.command == grow_cmd and args.clean:
return clean(pkg=args.recipe,
buildroot=buildroot,
installdir=installdir,
verbose=args.verbose)
# handle modes
if args.command == grow_cmd and args.mode is not None:
EvalHandler.modes = set(args.mode)
# default cmake args
if args.command == grow_cmd and args.cmake_args:
cmake_tools.CmakeTools.set_default_args(['-D' + a for a in args.cmake_args])
# print jobs
if args.command == grow_cmd:
# check ws is sourced
if rootdir not in os.environ.get('HHCM_FOREST_PATH', '').split(':'):
print('[warn] forest workspace does not appear to be sourced')
print(f'building {args.recipe} with {args.jobs} parallel job{"s" if int(args.jobs) > 1 else ""}')
# perform required installation
success = install_package(pkg=args.recipe,
srcroot=srcroot,
buildroot=buildroot,
installdir=installdir,
buildtype=args.default_build_type,
jobs=args.jobs,
reconfigure=args.force_reconfigure,
no_deps=args.no_deps
)
return success
return True
if __name__ == '__main__':
main() | [
"forest.common.install.clean",
"sys.exit",
"os.path.exists",
"forest.common.install.write_setup_file",
"argparse.ArgumentParser",
"forest.common.install.check_ws_file",
"forest.common.recipe.Cookbook.add_recipes",
"os.mkdir",
"forest.common.recipe.Cookbook.get_available_recipes",
"forest.common.in... | [((883, 919), 'forest.common.recipe.Cookbook.set_recipe_path', 'Cookbook.set_recipe_path', (['recipesdir'], {}), '(recipesdir)\n', (907, 919), False, 'from forest.common.recipe import RecipeSource, Cookbook\n'), ((969, 1001), 'forest.common.recipe.Cookbook.get_available_recipes', 'Cookbook.get_available_recipes', ([], {}), '()\n', (999, 1001), False, 'from forest.common.recipe import RecipeSource, Cookbook\n'), ((1291, 1395), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""forest automatizes cloning and building of software packages"""'}), "(description=\n 'forest automatizes cloning and building of software packages')\n", (1314, 1395), False, 'import argparse\n'), ((5217, 5249), 'argcomplete.autocomplete', 'argcomplete.autocomplete', (['parser'], {}), '(parser)\n', (5241, 5249), False, 'import argcomplete\n'), ((5858, 5876), 'forest.common.install.write_setup_file', 'write_setup_file', ([], {}), '()\n', (5874, 5876), False, 'from forest.common.install import install_package, write_setup_file, write_ws_file, check_ws_file, uninstall_package, clean\n'), ((715, 726), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (723, 726), False, 'import sys\n'), ((5379, 5409), 'forest.common.install.write_ws_file', 'write_ws_file', ([], {'rootdir': 'rootdir'}), '(rootdir=rootdir)\n', (5392, 5409), False, 'from forest.common.install import install_package, write_setup_file, write_ws_file, check_ws_file, uninstall_package, clean\n'), ((5464, 5494), 'forest.common.install.check_ws_file', 'check_ws_file', ([], {'rootdir': 'rootdir'}), '(rootdir=rootdir)\n', (5477, 5494), False, 'from forest.common.install import install_package, write_setup_file, write_ws_file, check_ws_file, uninstall_package, clean\n'), ((6282, 6322), 'forest.common.sudo_refresh.SudoRefresher', 'sudo_refresh.SudoRefresher', ([], {'pwd': 'args.pwd'}), '(pwd=args.pwd)\n', (6308, 6322), False, 'from forest.common import sudo_refresh\n'), ((6585, 6624), 'forest.common.config_handler.ConfigHandler.instance', 'config_handler.ConfigHandler.instance', ([], {}), '()\n', (6622, 6624), False, 'from forest.common import config_handler\n'), ((6845, 6894), 'forest.common.eval_handler.EvalHandler.print_available_locals', 'eval_handler.EvalHandler.print_available_locals', ([], {}), '()\n', (6892, 6894), False, 'from forest.common import eval_handler\n'), ((7492, 7532), 'forest.common.recipe.RecipeSource.FromUrl', 'RecipeSource.FromUrl', (['args.url', 'args.tag'], {}), '(args.url, args.tag)\n', (7512, 7532), False, 'from forest.common.recipe import RecipeSource, Cookbook\n'), ((7548, 7642), 'forest.common.recipe.Cookbook.add_recipes', 'Cookbook.add_recipes', (['recipe_source', 'args.recipes', 'args.subdir_path', 'args.allow_overwrite'], {}), '(recipe_source, args.recipes, args.subdir_path, args.\n allow_overwrite)\n', (7568, 7642), False, 'from forest.common.recipe import RecipeSource, Cookbook\n'), ((7895, 8000), 'forest.common.install.uninstall_package', 'uninstall_package', ([], {'pkg': 'args.recipe', 'buildroot': 'buildroot', 'installdir': 'installdir', 'verbose': 'args.verbose'}), '(pkg=args.recipe, buildroot=buildroot, installdir=\n installdir, verbose=args.verbose)\n', (7912, 8000), False, 'from forest.common.install import install_package, write_setup_file, write_ws_file, check_ws_file, uninstall_package, clean\n'), ((8185, 8278), 'forest.common.install.clean', 'clean', ([], {'pkg': 'args.recipe', 'buildroot': 'buildroot', 'installdir': 'installdir', 'verbose': 'args.verbose'}), '(pkg=args.recipe, buildroot=buildroot, installdir=installdir, verbose=\n args.verbose)\n', (8190, 8278), False, 'from forest.common.install import install_package, write_setup_file, write_ws_file, check_ws_file, uninstall_package, clean\n'), ((8546, 8624), 'forest.cmake_tools.CmakeTools.set_default_args', 'cmake_tools.CmakeTools.set_default_args', (["[('-D' + a) for a in args.cmake_args]"], {}), "([('-D' + a) for a in args.cmake_args])\n", (8585, 8624), False, 'from forest import cmake_tools\n'), ((9031, 9242), 'forest.common.install.install_package', 'install_package', ([], {'pkg': 'args.recipe', 'srcroot': 'srcroot', 'buildroot': 'buildroot', 'installdir': 'installdir', 'buildtype': 'args.default_build_type', 'jobs': 'args.jobs', 'reconfigure': 'args.force_reconfigure', 'no_deps': 'args.no_deps'}), '(pkg=args.recipe, srcroot=srcroot, buildroot=buildroot,\n installdir=installdir, buildtype=args.default_build_type, jobs=args.\n jobs, reconfigure=args.force_reconfigure, no_deps=args.no_deps)\n', (9046, 9242), False, 'from forest.common.install import install_package, write_setup_file, write_ws_file, check_ws_file, uninstall_package, clean\n'), ((695, 706), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (703, 706), False, 'import sys\n'), ((813, 824), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (821, 824), False, 'import sys\n'), ((1216, 1230), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1228, 1230), False, 'from datetime import datetime\n'), ((5764, 5783), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (5778, 5783), False, 'import os\n'), ((5797, 5810), 'os.mkdir', 'os.mkdir', (['dir'], {}), '(dir)\n', (5805, 5810), False, 'import os\n'), ((6396, 6428), 'forest.common.recipe.Cookbook.get_available_recipes', 'Cookbook.get_available_recipes', ([], {}), '()\n', (6426, 6428), False, 'from forest.common.recipe import RecipeSource, Cookbook\n'), ((8731, 8769), 'os.environ.get', 'os.environ.get', (['"""HHCM_FOREST_PATH"""', '""""""'], {}), "('HHCM_FOREST_PATH', '')\n", (8745, 8769), False, 'import os\n')] |
"""
Given an array of numbers, find the subarray that maximizes the sum of all
elements in the array. Note that these numbers can be negative
"""
import random
def createArray(n):
nums = []
for i in range(n):
nums.append(random.randint(-10, 10))
return nums
def bruteForceBest(a):
best = a
bestTotal = total(a)
for end in range(len(a)):
for start in range(end + 1):
t = total(a[start:(end + 1)])
if bestTotal < t:
bestTotal = t
best = a[start:(end + 1)]
return best
def total(a):
sum = 0
for n in a:
sum = sum + n
return sum
def maxSubArray(a):
return impl(a, 0, len(a))
# Returns max subarray within a[i:j] (inclusive of i, exclusive of j)
def impl(a, i, j):
#print(i, j)
if j - i <= 0:
return []
if j - i == 1:
return a[i:j]
# ignore non-positive elements at the edges WRONG: [1, -999999, 2]
# need to check how far we can spread from the center instead
while i < j - 1 and a[i] <= 0:
i += 1
while i < j - 1 and a[j - 1] <= 0:
j -= 1
#print(a, i, j)
bestUnbroken = total(a[i:j])
# break in half
mid = int((i + j) / 2)
left = impl(a, i, mid)
right = impl(a, mid, j)
leftTotal = total(left)
rightTotal = total(right)
# return either a half, or the total, whichever is max
if (max(leftTotal, rightTotal, bestUnbroken) == bestUnbroken):
return a[i:j]
elif (max(leftTotal, rightTotal, bestUnbroken) == rightTotal):
return right
else:
return left
if __name__ == "__main__":
a = createArray(100)
print(a)
bruteForce = bruteForceBest(a)
print(f'Brute force found {bruteForce} {total(bruteForce)}')
best = maxSubArray(a)
print(f'Best: {best} {total(best)}')
| [
"random.randint"
] | [((243, 266), 'random.randint', 'random.randint', (['(-10)', '(10)'], {}), '(-10, 10)\n', (257, 266), False, 'import random\n')] |
from datetime import datetime
from typing import Optional, List, Dict
import sys
sys.path.append("../../")
from backend.server.pd_model import *
from backend.queue.services import add_task, stop_all_ws_task
from backend.database.main import gis_stac
__all__ = ["preview_processing", "vector_processing", "refuse_processing"]
def get_items(time_interval: List[datetime], bbox: List[float]) -> List[dict]:
# response = get(DATABASE_URL + "get_preview") # TODO: Запрос к базе данных через url
items = gis_stac.filter(time_intervals=[time_interval], bboxes=[bbox])
items = sorted(map(lambda item: item.to_dict(), items),
key=lambda item: item["datetime"])
return list(items)
def preview_interval(timestamp: int) -> List[datetime]:
return [datetime.fromtimestamp(timestamp),
datetime.fromtimestamp(timestamp + 24 * 60 * 60)]
def preview_processing(data: PreviewData) -> Dict[str, List[PreviewData]]:
items = get_items(preview_interval(data.datetime),
[data.bbox[0].lat, data.bbox[0].lon,
data.bbox[1].lat, data.bbox[1].lon])
res = []
for item in items:
for asset in item["assets"]:
if asset["type"] == "img":
res.append(PreviewData(img=asset["href"],
datetime=item["properties"]["datetime"],
bbox=item["bbox"]))
break
res.sort(key=lambda el: el.datetime)
return {"imgs": res}
def get_item_url(iid: Optional[str]) -> str:
item = gis_stac.root_catalog.get_child(iid, recursive=True)
return item.href
def vector_processing(ws_id: Optional[str],
data: Optional[VectorsRequest]) -> None:
files = (get_item_url(data.ids[0]), get_item_url(data.ids[1]))
params = [files[0], files[1], data.points, data.window_size, data.vicinity_size]
add_to_queue(ws_id=ws_id, *params)
def add_to_queue(ws_id: Optional[str],
task_type: Optional[str] = "high",
*params, **kwargs) -> None:
# request_data = {"task_type": task_type,
# "params": params,
# "kwargs": kwargs}
# if ws_id is not None:
# request_data["ws_id"] = ws_id
add_task(ws_id=ws_id, args=params,
kwargs=kwargs, task_type=task_type)
# TODO: Microservices
def delete_work_to_queue(ws_id: Optional[str]) -> None:
stop_all_ws_task(ws_id) # TODO: Microservices
def refuse_processing(ws_id: Optional[str]) -> None:
delete_work_to_queue(ws_id)
| [
"backend.queue.services.add_task",
"datetime.datetime.fromtimestamp",
"backend.queue.services.stop_all_ws_task",
"backend.database.main.gis_stac.root_catalog.get_child",
"backend.database.main.gis_stac.filter",
"sys.path.append"
] | [((81, 106), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (96, 106), False, 'import sys\n'), ((510, 572), 'backend.database.main.gis_stac.filter', 'gis_stac.filter', ([], {'time_intervals': '[time_interval]', 'bboxes': '[bbox]'}), '(time_intervals=[time_interval], bboxes=[bbox])\n', (525, 572), False, 'from backend.database.main import gis_stac\n'), ((1585, 1637), 'backend.database.main.gis_stac.root_catalog.get_child', 'gis_stac.root_catalog.get_child', (['iid'], {'recursive': '(True)'}), '(iid, recursive=True)\n', (1616, 1637), False, 'from backend.database.main import gis_stac\n'), ((2295, 2365), 'backend.queue.services.add_task', 'add_task', ([], {'ws_id': 'ws_id', 'args': 'params', 'kwargs': 'kwargs', 'task_type': 'task_type'}), '(ws_id=ws_id, args=params, kwargs=kwargs, task_type=task_type)\n', (2303, 2365), False, 'from backend.queue.services import add_task, stop_all_ws_task\n'), ((2467, 2490), 'backend.queue.services.stop_all_ws_task', 'stop_all_ws_task', (['ws_id'], {}), '(ws_id)\n', (2483, 2490), False, 'from backend.queue.services import add_task, stop_all_ws_task\n'), ((780, 813), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['timestamp'], {}), '(timestamp)\n', (802, 813), False, 'from datetime import datetime\n'), ((827, 875), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(timestamp + 24 * 60 * 60)'], {}), '(timestamp + 24 * 60 * 60)\n', (849, 875), False, 'from datetime import datetime\n')] |
"""
Adds a ubt command which adds basic block counts to frames within a backtrace.
Usage: ubt
Contributors: <NAME>, <NAME>
Copyright (C) 2019 Undo Ltd
"""
import gdb
from undodb.debugger_extensions import (
debugger_utils,
udb,
)
class BacktraceWithTime(gdb.Command):
def __init__(self):
super().__init__("ubt", gdb.COMMAND_USER)
@staticmethod
def invoke(arg, from_tty):
# We disable all breakpoints, so we can reverse up the stack without
# hitting anything we shouldn't.
with udb.time.auto_reverting(), debugger_utils.suspend_breakpoints():
# Get the whole backtrace.
backtrace = debugger_utils.execute_to_string("where")
backtrace = backtrace.splitlines()
exception_hit = False
for line in backtrace:
if not exception_hit:
# Print time at start of each backtrace line.
time = udb.time.get()
print("[{}]\t{}".format(str(time.bbcount), line))
try:
# Go back to previous frame
debugger_utils.execute_to_string("rf")
except gdb.error:
# Can't figure out any further - perhaps stack frame is
# not available, or we have reached the start.
exception_hit = True
else:
print(f"[?]\t{line}")
BacktraceWithTime()
| [
"undodb.debugger_extensions.debugger_utils.execute_to_string",
"undodb.debugger_extensions.debugger_utils.suspend_breakpoints",
"undodb.debugger_extensions.udb.time.get",
"undodb.debugger_extensions.udb.time.auto_reverting"
] | [((539, 564), 'undodb.debugger_extensions.udb.time.auto_reverting', 'udb.time.auto_reverting', ([], {}), '()\n', (562, 564), False, 'from undodb.debugger_extensions import debugger_utils, udb\n'), ((566, 602), 'undodb.debugger_extensions.debugger_utils.suspend_breakpoints', 'debugger_utils.suspend_breakpoints', ([], {}), '()\n', (600, 602), False, 'from undodb.debugger_extensions import debugger_utils, udb\n'), ((667, 708), 'undodb.debugger_extensions.debugger_utils.execute_to_string', 'debugger_utils.execute_to_string', (['"""where"""'], {}), "('where')\n", (699, 708), False, 'from undodb.debugger_extensions import debugger_utils, udb\n'), ((957, 971), 'undodb.debugger_extensions.udb.time.get', 'udb.time.get', ([], {}), '()\n', (969, 971), False, 'from undodb.debugger_extensions import debugger_utils, udb\n'), ((1143, 1181), 'undodb.debugger_extensions.debugger_utils.execute_to_string', 'debugger_utils.execute_to_string', (['"""rf"""'], {}), "('rf')\n", (1175, 1181), False, 'from undodb.debugger_extensions import debugger_utils, udb\n')] |
"""
Integration test for remove_participants_under_18years module
Original Issues: DC-1724
The intent is to remove data for participants under 18 years old from all the domain tables."""
# Python Imports
import os
import datetime
# Project Imports
from common import VISIT_OCCURRENCE, OBSERVATION
from common import JINJA_ENV
from app_identity import PROJECT_ID
from cdr_cleaner.cleaning_rules.remove_participants_under_18years import (
RemoveParticipantsUnder18Years, AFFECTED_TABLES)
from tests.integration_tests.data_steward.cdr_cleaner.cleaning_rules.bigquery_tests_base import \
BaseTest
PERSON_DATA_TEMPLATE = JINJA_ENV.from_string("""
INSERT INTO `{{project_id}}.{{dataset_id}}.person`
(person_id, birth_datetime, gender_concept_id, year_of_birth, race_concept_id, ethnicity_concept_id)
VALUES
/* Adding participans with different ranges of birthdays.*/
/* Participant 4's birth_datetime was set to 2021.*/
/* The data belonging to this participant from all the domain tables should be dropped.*/
(1, '1970-01-01 00:00:00 UTC', 0, 1970, 0, 0),
(2, '2002-01-01 00:00:00 UTC', 0, 2002, 0, 0),
(3, '2003-01-01 00:00:00 UTC', 0, 2003, 0, 0),
(4, '2021-01-01 00:00:00 UTC', 0, 2015, 0, 0)
""")
VISIT_OCCURRENCE_DATA_TEMPLATE = JINJA_ENV.from_string("""
INSERT INTO `{{project_id}}.{{dataset_id}}.visit_occurrence`
(visit_occurrence_id, person_id, visit_start_date, visit_end_date, visit_concept_id, visit_type_concept_id)
VALUES
(1, 1, '2020-01-01', '2020-01-02', 0, 0),
(2, 3, '2020-01-02', '2020-01-03', 0, 0),
(3, 2, '2020-01-01', '2020-03-01', 0, 0),
(4, 4, '2020-01-02', '2022-01-03', 0, 0)
""")
OBSERVATION_DATA_TEMPLATE = JINJA_ENV.from_string("""
INSERT INTO `{{project_id}}.{{dataset_id}}.observation`
(observation_id, person_id, observation_date, observation_concept_id, observation_type_concept_id)
VALUES
(1, 1, '2020-01-01', 0, 0),
(2, 2, '2020-01-02', 0, 0),
(3, 3, '2020-03-01', 0, 0),
(4, 4, '2020-01-05', 0, 0),
(5, 3, '2020-05-05', 0, 0)
""")
class RemoveParticipantsUnder18YearsTest(BaseTest.CleaningRulesTestBase):
@classmethod
def setUpClass(cls):
print('**************************************************************')
print(cls.__name__)
print('**************************************************************')
super().initialize_class_vars()
# Set the test project identifier
cls.project_id = os.environ.get(PROJECT_ID)
# Set the expected test datasets
cls.dataset_id = os.environ.get('COMBINED_DATASET_ID')
cls.sandbox_id = cls.dataset_id + '_sandbox'
cls.rule_instance = RemoveParticipantsUnder18Years(
cls.project_id, cls.dataset_id, cls.sandbox_id)
# Generates list of fully qualified table names and their corresponding sandbox table names
# adding death table name for setup/cleanup operations
for table_name in AFFECTED_TABLES:
cls.fq_table_names.append(
f'{cls.project_id}.{cls.dataset_id}.{table_name}')
sandbox_table_name = cls.rule_instance.get_sandbox_tablenames(
table_name)
cls.fq_sandbox_table_names.append(
f'{cls.project_id}.{cls.sandbox_id}.{sandbox_table_name}')
# call super to set up the client, create datasets
cls.up_class = super().setUpClass()
def setUp(self):
"""
Create empty tables for the rule to run on
"""
# Create the observation, concept, and concept_relationship tables required for the test
super().setUp()
person_data_query = PERSON_DATA_TEMPLATE.render(
project_id=self.project_id, dataset_id=self.dataset_id)
visit_occurrence_data_query = VISIT_OCCURRENCE_DATA_TEMPLATE.render(
project_id=self.project_id, dataset_id=self.dataset_id)
observation_data_query = OBSERVATION_DATA_TEMPLATE.render(
project_id=self.project_id, dataset_id=self.dataset_id)
# Load test data
self.load_test_data([
f'''{person_data_query};
{visit_occurrence_data_query};
{observation_data_query}'''
])
def test_remove_participants_under_18years(self):
# Expected results list
tables_and_counts = [{
'fq_table_name':
f'{self.project_id}.{self.dataset_id}.{VISIT_OCCURRENCE}',
'fq_sandbox_table_name':
f'{self.project_id}.{self.sandbox_id}.{self.rule_instance.sandbox_table_for(VISIT_OCCURRENCE)}',
'loaded_ids': [1, 2, 3, 4],
'sandboxed_ids': [4],
'fields': [
'visit_occurrence_id', 'person_id', 'visit_start_date',
'visit_end_date'
],
'cleaned_values': [
(1, 1, datetime.datetime.strptime('2020-01-01',
'%Y-%m-%d').date(),
datetime.datetime.strptime('2020-01-02', '%Y-%m-%d').date()),
(2, 3, datetime.datetime.strptime('2020-01-02',
'%Y-%m-%d').date(),
datetime.datetime.strptime('2020-01-03', '%Y-%m-%d').date()),
(3, 2, datetime.datetime.strptime('2020-01-01',
'%Y-%m-%d').date(),
datetime.datetime.strptime('2020-03-01', '%Y-%m-%d').date())
]
}, {
'fq_table_name':
f'{self.project_id}.{self.dataset_id}.{OBSERVATION}',
'fq_sandbox_table_name':
f'{self.project_id}.{self.sandbox_id}.{self.rule_instance.sandbox_table_for(OBSERVATION)}',
'loaded_ids': [1, 2, 3, 4, 5],
'sandboxed_ids': [4],
'fields': ['observation_id', 'person_id', 'observation_date'],
'cleaned_values': [
(1, 1, datetime.datetime.strptime('2020-01-01',
'%Y-%m-%d').date()),
(2, 2, datetime.datetime.strptime('2020-01-02',
'%Y-%m-%d').date()),
(3, 3, datetime.datetime.strptime('2020-03-01',
'%Y-%m-%d').date()),
(5, 3, datetime.datetime.strptime('2020-05-05',
'%Y-%m-%d').date())
]
}]
self.default_test(tables_and_counts)
| [
"datetime.datetime.strptime",
"cdr_cleaner.cleaning_rules.remove_participants_under_18years.RemoveParticipantsUnder18Years",
"os.environ.get",
"common.JINJA_ENV.from_string"
] | [((630, 1263), 'common.JINJA_ENV.from_string', 'JINJA_ENV.from_string', (['"""\nINSERT INTO `{{project_id}}.{{dataset_id}}.person`\n(person_id, birth_datetime, gender_concept_id, year_of_birth, race_concept_id, ethnicity_concept_id)\nVALUES\n /* Adding participans with different ranges of birthdays.*/\n /* Participant 4\'s birth_datetime was set to 2021.*/\n /* The data belonging to this participant from all the domain tables should be dropped.*/ \n (1, \'1970-01-01 00:00:00 UTC\', 0, 1970, 0, 0),\n (2, \'2002-01-01 00:00:00 UTC\', 0, 2002, 0, 0),\n (3, \'2003-01-01 00:00:00 UTC\', 0, 2003, 0, 0),\n (4, \'2021-01-01 00:00:00 UTC\', 0, 2015, 0, 0)\n"""'], {}), '(\n """\nINSERT INTO `{{project_id}}.{{dataset_id}}.person`\n(person_id, birth_datetime, gender_concept_id, year_of_birth, race_concept_id, ethnicity_concept_id)\nVALUES\n /* Adding participans with different ranges of birthdays.*/\n /* Participant 4\'s birth_datetime was set to 2021.*/\n /* The data belonging to this participant from all the domain tables should be dropped.*/ \n (1, \'1970-01-01 00:00:00 UTC\', 0, 1970, 0, 0),\n (2, \'2002-01-01 00:00:00 UTC\', 0, 2002, 0, 0),\n (3, \'2003-01-01 00:00:00 UTC\', 0, 2003, 0, 0),\n (4, \'2021-01-01 00:00:00 UTC\', 0, 2015, 0, 0)\n"""\n )\n', (651, 1263), False, 'from common import JINJA_ENV\n'), ((1287, 1700), 'common.JINJA_ENV.from_string', 'JINJA_ENV.from_string', (['"""\nINSERT INTO `{{project_id}}.{{dataset_id}}.visit_occurrence`\n (visit_occurrence_id, person_id, visit_start_date, visit_end_date, visit_concept_id, visit_type_concept_id)\nVALUES\n (1, 1, \'2020-01-01\', \'2020-01-02\', 0, 0),\n (2, 3, \'2020-01-02\', \'2020-01-03\', 0, 0),\n (3, 2, \'2020-01-01\', \'2020-03-01\', 0, 0),\n (4, 4, \'2020-01-02\', \'2022-01-03\', 0, 0)\n"""'], {}), '(\n """\nINSERT INTO `{{project_id}}.{{dataset_id}}.visit_occurrence`\n (visit_occurrence_id, person_id, visit_start_date, visit_end_date, visit_concept_id, visit_type_concept_id)\nVALUES\n (1, 1, \'2020-01-01\', \'2020-01-02\', 0, 0),\n (2, 3, \'2020-01-02\', \'2020-01-03\', 0, 0),\n (3, 2, \'2020-01-01\', \'2020-03-01\', 0, 0),\n (4, 4, \'2020-01-02\', \'2022-01-03\', 0, 0)\n"""\n )\n', (1308, 1700), False, 'from common import JINJA_ENV\n'), ((1720, 2091), 'common.JINJA_ENV.from_string', 'JINJA_ENV.from_string', (['"""\nINSERT INTO `{{project_id}}.{{dataset_id}}.observation`\n(observation_id, person_id, observation_date, observation_concept_id, observation_type_concept_id)\nVALUES\n (1, 1, \'2020-01-01\', 0, 0),\n (2, 2, \'2020-01-02\', 0, 0),\n (3, 3, \'2020-03-01\', 0, 0),\n (4, 4, \'2020-01-05\', 0, 0),\n (5, 3, \'2020-05-05\', 0, 0)\n"""'], {}), '(\n """\nINSERT INTO `{{project_id}}.{{dataset_id}}.observation`\n(observation_id, person_id, observation_date, observation_concept_id, observation_type_concept_id)\nVALUES\n (1, 1, \'2020-01-01\', 0, 0),\n (2, 2, \'2020-01-02\', 0, 0),\n (3, 3, \'2020-03-01\', 0, 0),\n (4, 4, \'2020-01-05\', 0, 0),\n (5, 3, \'2020-05-05\', 0, 0)\n"""\n )\n', (1741, 2091), False, 'from common import JINJA_ENV\n'), ((2498, 2524), 'os.environ.get', 'os.environ.get', (['PROJECT_ID'], {}), '(PROJECT_ID)\n', (2512, 2524), False, 'import os\n'), ((2592, 2629), 'os.environ.get', 'os.environ.get', (['"""COMBINED_DATASET_ID"""'], {}), "('COMBINED_DATASET_ID')\n", (2606, 2629), False, 'import os\n'), ((2712, 2790), 'cdr_cleaner.cleaning_rules.remove_participants_under_18years.RemoveParticipantsUnder18Years', 'RemoveParticipantsUnder18Years', (['cls.project_id', 'cls.dataset_id', 'cls.sandbox_id'], {}), '(cls.project_id, cls.dataset_id, cls.sandbox_id)\n', (2742, 2790), False, 'from cdr_cleaner.cleaning_rules.remove_participants_under_18years import RemoveParticipantsUnder18Years, AFFECTED_TABLES\n'), ((4910, 4962), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-01-01"""', '"""%Y-%m-%d"""'], {}), "('2020-01-01', '%Y-%m-%d')\n", (4936, 4962), False, 'import datetime\n'), ((5038, 5090), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-01-02"""', '"""%Y-%m-%d"""'], {}), "('2020-01-02', '%Y-%m-%d')\n", (5064, 5090), False, 'import datetime\n'), ((5123, 5175), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-01-02"""', '"""%Y-%m-%d"""'], {}), "('2020-01-02', '%Y-%m-%d')\n", (5149, 5175), False, 'import datetime\n'), ((5251, 5303), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-01-03"""', '"""%Y-%m-%d"""'], {}), "('2020-01-03', '%Y-%m-%d')\n", (5277, 5303), False, 'import datetime\n'), ((5336, 5388), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-01-01"""', '"""%Y-%m-%d"""'], {}), "('2020-01-01', '%Y-%m-%d')\n", (5362, 5388), False, 'import datetime\n'), ((5464, 5516), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-03-01"""', '"""%Y-%m-%d"""'], {}), "('2020-03-01', '%Y-%m-%d')\n", (5490, 5516), False, 'import datetime\n'), ((6003, 6055), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-01-01"""', '"""%Y-%m-%d"""'], {}), "('2020-01-01', '%Y-%m-%d')\n", (6029, 6055), False, 'import datetime\n'), ((6138, 6190), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-01-02"""', '"""%Y-%m-%d"""'], {}), "('2020-01-02', '%Y-%m-%d')\n", (6164, 6190), False, 'import datetime\n'), ((6273, 6325), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-03-01"""', '"""%Y-%m-%d"""'], {}), "('2020-03-01', '%Y-%m-%d')\n", (6299, 6325), False, 'import datetime\n'), ((6408, 6460), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['"""2020-05-05"""', '"""%Y-%m-%d"""'], {}), "('2020-05-05', '%Y-%m-%d')\n", (6434, 6460), False, 'import datetime\n')] |
import pandas as pd
canucks = pd.read_csv('data/canucks.csv')
# Identify any columns with null values with .info()
# Save this dataframe as canucks_info
canucks_info = canucks.info()
canucks_info
# Create a new column in the dataframe named Wealth
# where all the values equal "comfortable"
# Name the new dataframe canucks_comf
canucks_comf = canucks.assign(Wealth = "comfortable")
canucks_comf
# Do conditional replacement, where if the value in the salary column is null,
# we replace "comfortable" with "unknown"
canucks_comf.loc[canucks_comf['Salary'].isnull(), "Wealth"] = "unknown"
canucks_comf | [
"pandas.read_csv"
] | [((31, 62), 'pandas.read_csv', 'pd.read_csv', (['"""data/canucks.csv"""'], {}), "('data/canucks.csv')\n", (42, 62), True, 'import pandas as pd\n')] |
# This file is part of the Edison Project.
# Please refer to the LICENSE document that was supplied with this software for information on how it can be used.
# Create your views here.
from django.http import Http404, HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.forms import ModelForm
from models import *
# Project specific imports
from models import *
def custom_proc(request):
"A context processor that provides 'app', 'user' and 'ip_address'."
return {
'app': 'edison',
'user': request.user,
'ip_address': request.META['REMOTE_ADDR']
}
@login_required
def home(request):
title = 'Configuration Database Home'
section_item_name = 'Configuration Item'
return render_to_response('cmdb/home.tpl',
locals(),
context_instance=RequestContext(request, processors=[custom_proc]))
@login_required
def listdata(request):
link_desc = 'Configuration Item'
cfgitems = ConfigurationItem.objects.all().order_by('Hostname')
return render_to_response('list.tpl',{'data_list':cfgitems,'link_desc':link_desc,},context_instance=RequestContext(request)) #{'data_list':cfgitems,locals()})
# Setup the 'edit' form
class EditForm(ModelForm):
class Meta:
model = ConfigurationItem
@login_required
def edit(request,cfgid):
title = 'Edit an Item'
if request.method == "POST":
cfgitem = ConfigurationItem.objects.get(pk=cfgid)
form = EditForm(request.POST,instance=cfgitem)
if form.is_valid():
form.save()
request.user.message_set.create(message='The Configuration Item was updated sucessfully')
else:
cfgitem = ConfigurationItem.objects.get(pk=cfgid)
form = EditForm(instance=cfgitem)
return render_to_response('cmdb/edit.tpl',{'form':form},context_instance=RequestContext(request, processors=[custom_proc]))
@login_required
def add(request):
title = 'Add a new Item'
return render_to_response('cmdb/add.tpl',{'form':form},context_instance=RequestContent(request, processors=[custom_proc]))
| [
"django.template.RequestContext"
] | [((947, 996), 'django.template.RequestContext', 'RequestContext', (['request'], {'processors': '[custom_proc]'}), '(request, processors=[custom_proc])\n', (961, 996), False, 'from django.template import RequestContext\n'), ((1247, 1270), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (1261, 1270), False, 'from django.template import RequestContext\n'), ((1975, 2024), 'django.template.RequestContext', 'RequestContext', (['request'], {'processors': '[custom_proc]'}), '(request, processors=[custom_proc])\n', (1989, 2024), False, 'from django.template import RequestContext\n')] |
from django.conf import settings
from django.conf.urls.static import static
from django.conf.urls import include,url
from . import views
urlpatterns=[
url(r'api/user/user-id/(?P<pk>[0-9]+)/$',
views.UserDescription.as_view()),
url(r'api/project/project-id/(?P<pk>[0-9]+)/$',
views.ProjectDescription.as_view()),
url(r'^profile/',views.profile,name='profile'),
url('^$',views.index,name ='index'),
url(r'^search/', views.search_results, name='search_results'),
url(r'^user/',views.user,name ='user'),
url(r'^tinymce/', include('tinymce.urls')),
url(r'^api/profile/', views.UserList.as_view()),
url(r'^api/project/', views.ProjectList.as_view()),
url(r'^project/',views.new_project,name ='newproject'),
url(r'^ajax/newsletter/$', views.newsletter, name='newsletter')
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
| [
"django.conf.urls.static.static",
"django.conf.urls.include",
"django.conf.urls.url"
] | [((344, 391), 'django.conf.urls.url', 'url', (['"""^profile/"""', 'views.profile'], {'name': '"""profile"""'}), "('^profile/', views.profile, name='profile')\n", (347, 391), False, 'from django.conf.urls import include, url\n'), ((396, 432), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.index'], {'name': '"""index"""'}), "('^$', views.index, name='index')\n", (399, 432), False, 'from django.conf.urls import include, url\n'), ((437, 497), 'django.conf.urls.url', 'url', (['"""^search/"""', 'views.search_results'], {'name': '"""search_results"""'}), "('^search/', views.search_results, name='search_results')\n", (440, 497), False, 'from django.conf.urls import include, url\n'), ((504, 542), 'django.conf.urls.url', 'url', (['"""^user/"""', 'views.user'], {'name': '"""user"""'}), "('^user/', views.user, name='user')\n", (507, 542), False, 'from django.conf.urls import include, url\n'), ((706, 760), 'django.conf.urls.url', 'url', (['"""^project/"""', 'views.new_project'], {'name': '"""newproject"""'}), "('^project/', views.new_project, name='newproject')\n", (709, 760), False, 'from django.conf.urls import include, url\n'), ((766, 828), 'django.conf.urls.url', 'url', (['"""^ajax/newsletter/$"""', 'views.newsletter'], {'name': '"""newsletter"""'}), "('^ajax/newsletter/$', views.newsletter, name='newsletter')\n", (769, 828), False, 'from django.conf.urls import include, url\n'), ((870, 931), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (876, 931), False, 'from django.conf.urls.static import static\n'), ((566, 589), 'django.conf.urls.include', 'include', (['"""tinymce.urls"""'], {}), "('tinymce.urls')\n", (573, 589), False, 'from django.conf.urls import include, url\n')] |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import pytest
from dci.common.exceptions import DCIException
from dci.common.schemas import check_json_is_valid, tag_schema
def test_create_tags(admin):
pt = admin.post('/api/v1/tags', data={'name': 'my tag'})
assert pt.status_code == 201
assert pt.data['tag']['name'] == 'my tag'
def test_get_tags(admin):
gt = admin.get('/api/v1/tags')
count = gt.data['_meta']['count']
for x in range(3):
admin.post('/api/v1/tags', data={'name': 'my tag %s' % x})
gt = admin.get('/api/v1/tags')
assert gt.status_code == 200
assert len(gt.data['tags']) == count + 3
def test_delete_tag_by_id(admin):
gt = admin.get('/api/v1/tags')
count = gt.data['_meta']['count']
pt = admin.post('/api/v1/tags',
data={'name': 'my tag to delete'})
pt_etag = pt.headers.get("ETag")
pt_id = pt.data['tag']['id']
assert pt.status_code == 201
deleted_t = admin.delete('/api/v1/tags/%s' % pt_id,
headers={'If-match': pt_etag})
assert deleted_t.status_code == 204
gt = admin.get('/api/v1/tags')
assert len(gt.data['tags']) == count
def test_post_schema():
try:
check_json_is_valid(tag_schema, {"name": "tag"})
except DCIException:
pytest.fail("tag_schema is invalid")
| [
"pytest.fail",
"dci.common.schemas.check_json_is_valid"
] | [((1824, 1872), 'dci.common.schemas.check_json_is_valid', 'check_json_is_valid', (['tag_schema', "{'name': 'tag'}"], {}), "(tag_schema, {'name': 'tag'})\n", (1843, 1872), False, 'from dci.common.schemas import check_json_is_valid, tag_schema\n'), ((1906, 1942), 'pytest.fail', 'pytest.fail', (['"""tag_schema is invalid"""'], {}), "('tag_schema is invalid')\n", (1917, 1942), False, 'import pytest\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Logging Handler integrating RabbitMQ and
Graylog Extended Log Format (GELF)"""
import json
from logging import Filter
from logging.handlers import SocketHandler
from amqplib import client_0_8 as amqp # pylint: disable=import-error
from graypy.handler import BaseGELFHandler
try:
from urllib.parse import urlparse, unquote
except ImportError:
from urlparse import urlparse
from urllib import unquote
_ifnone = lambda v, x: x if v is None else v
class GELFRabbitHandler(BaseGELFHandler, SocketHandler):
"""RabbitMQ / GELF handler
.. note::
This handler ignores all messages logged by amqplib.
"""
def __init__(self, url, exchange='logging.gelf', exchange_type='fanout',
virtual_host='/', routing_key='', **kwargs):
"""Initialize the GELFRabbitHandler
:param url: RabbitMQ URL (ex: amqp://guest:guest@localhost:5672/)
:type url: str
:param exchange: RabbitMQ exchange. A queue binding must be defined
on the server to prevent GELF logs from being dropped.
:type exchange: str
:param exchange_type: RabbitMQ exchange type.
:type exchange_type: str
:param virtual_host:
:type virtual_host: str
:param routing_key:
:type routing_key: str
"""
self.url = url
parsed = urlparse(url)
if parsed.scheme != 'amqp':
raise ValueError('invalid URL scheme (expected "amqp"): %s' % url)
host = parsed.hostname or 'localhost'
port = _ifnone(parsed.port, 5672)
self.virtual_host = virtual_host if not unquote(
parsed.path[1:]) else unquote(parsed.path[1:])
self.cn_args = {
'host': '%s:%s' % (host, port),
'userid': _ifnone(parsed.username, 'guest'),
'password': _ifnone(parsed.password, '<PASSWORD>'),
'virtual_host': self.virtual_host,
'insist': False,
}
self.exchange = exchange
self.exchange_type = exchange_type
self.routing_key = routing_key
BaseGELFHandler.__init__(
self,
**kwargs
)
SocketHandler.__init__(self, host, port)
self.addFilter(ExcludeFilter('amqplib'))
def makeSocket(self, timeout=1):
return RabbitSocket(self.cn_args, timeout, self.exchange,
self.exchange_type, self.routing_key)
def makePickle(self, record):
message_dict = self._make_gelf_dict(record)
return json.dumps(message_dict)
class RabbitSocket(object):
def __init__(self, cn_args, timeout, exchange, exchange_type, routing_key):
self.cn_args = cn_args
self.timeout = timeout
self.exchange = exchange
self.exchange_type = exchange_type
self.routing_key = routing_key
self.connection = amqp.Connection(
connection_timeout=timeout, **self.cn_args)
self.channel = self.connection.channel()
self.channel.exchange_declare(
exchange=self.exchange,
type=self.exchange_type,
durable=True,
auto_delete=False,
)
def sendall(self, data):
msg = amqp.Message(data, delivery_mode=2)
self.channel.basic_publish(
msg,
exchange=self.exchange,
routing_key=self.routing_key
)
def close(self):
"""Close the connection to the RabbitMQ socket"""
try:
self.connection.close()
except Exception:
pass
class ExcludeFilter(Filter):
"""A subclass of :class:`logging.Filter` which should be instantiated
with the name of the logger which, together with its children, will have
its events excluded (filtered out)"""
def __init__(self, name):
"""Initialize the ExcludeFilter
:param name: Name to match for within a :class:`logging.LogRecord`'s
``name`` field for filtering.
:type name: str
"""
if not name:
raise ValueError('ExcludeFilter requires a non-empty name')
Filter.__init__(self, name)
def filter(self, record):
return not (record.name.startswith(self.name) and (
len(record.name) == self.nlen or record.name[self.nlen] == "."))
| [
"logging.handlers.SocketHandler.__init__",
"urlparse.urlparse",
"urllib.unquote",
"json.dumps",
"logging.Filter.__init__",
"amqplib.client_0_8.Message",
"amqplib.client_0_8.Connection",
"graypy.handler.BaseGELFHandler.__init__"
] | [((1404, 1417), 'urlparse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (1412, 1417), False, 'from urlparse import urlparse\n'), ((2136, 2176), 'graypy.handler.BaseGELFHandler.__init__', 'BaseGELFHandler.__init__', (['self'], {}), '(self, **kwargs)\n', (2160, 2176), False, 'from graypy.handler import BaseGELFHandler\n'), ((2219, 2259), 'logging.handlers.SocketHandler.__init__', 'SocketHandler.__init__', (['self', 'host', 'port'], {}), '(self, host, port)\n', (2241, 2259), False, 'from logging.handlers import SocketHandler\n'), ((2581, 2605), 'json.dumps', 'json.dumps', (['message_dict'], {}), '(message_dict)\n', (2591, 2605), False, 'import json\n'), ((2919, 2978), 'amqplib.client_0_8.Connection', 'amqp.Connection', ([], {'connection_timeout': 'timeout'}), '(connection_timeout=timeout, **self.cn_args)\n', (2934, 2978), True, 'from amqplib import client_0_8 as amqp\n'), ((3264, 3299), 'amqplib.client_0_8.Message', 'amqp.Message', (['data'], {'delivery_mode': '(2)'}), '(data, delivery_mode=2)\n', (3276, 3299), True, 'from amqplib import client_0_8 as amqp\n'), ((4164, 4191), 'logging.Filter.__init__', 'Filter.__init__', (['self', 'name'], {}), '(self, name)\n', (4179, 4191), False, 'from logging import Filter\n'), ((1712, 1736), 'urllib.unquote', 'unquote', (['parsed.path[1:]'], {}), '(parsed.path[1:])\n', (1719, 1736), False, 'from urllib import unquote\n'), ((1669, 1693), 'urllib.unquote', 'unquote', (['parsed.path[1:]'], {}), '(parsed.path[1:])\n', (1676, 1693), False, 'from urllib import unquote\n')] |
import boto3
comprehend = boto3.client(service_name='comprehend')
translate = boto3.client(service_name='translate')
def detect_language(text):
"""
Detects the dominant language in a text
Parameters
----------
text: string, required
Input text
Returns
-------
string
Representing language code of the dominant language
"""
# Sending call to get language
result = comprehend.detect_dominant_language(Text = text)['Languages']
# Since the result can contain more than one language find the one with the highest score.
high_score = 0
best_guess = ''
for lang in range(len(result)):
if result[lang]['Score'] > high_score:
high_score = result[lang]['Score']
best_guess = result[lang]['LanguageCode']
return best_guess
def translate_text(text, source_lang, destination_lang):
"""
Translates given text from source language into destination language
Parameters
----------
text: string, required
Input text in source language
Returns
-------
string
Translated text in destination language
"""
result = translate.translate_text(Text=text,
SourceLanguageCode=source_lang, TargetLanguageCode=destination_lang)
return result.get('TranslatedText')
| [
"boto3.client"
] | [((27, 66), 'boto3.client', 'boto3.client', ([], {'service_name': '"""comprehend"""'}), "(service_name='comprehend')\n", (39, 66), False, 'import boto3\n'), ((79, 117), 'boto3.client', 'boto3.client', ([], {'service_name': '"""translate"""'}), "(service_name='translate')\n", (91, 117), False, 'import boto3\n')] |
# 2020.05.10
# update topNscore
# learner on subspace
# particular designed for encounter missing class in this subspace
# if one class do not exists in training data, probability for this class would be zeros under anytime
#
# learner: a regressor or classifier, must have methods named 'predict'
# num_class: total number of class in dataset
import numpy as np
from sklearn.metrics import accuracy_score
class myLearner():
def __init__(self, learner, num_class):
self.learner = learner
self.num_class = num_class
self.class_list = {}
self.oneclass = False
self.trained = False
def mapping(self, Y, train=True, probability=False):
c, res = 0, []
Y = Y.reshape(Y.shape[0], -1)
if train == True:
self.class_list = {}
for i in range(np.array(Y).shape[0]):
if Y[i, 0] not in self.class_list.keys():
self.class_list[Y[i,0]] = c
c += 1
res.append(self.class_list[Y[i, 0]])
else:
if probability == False:
for i in range(np.array(Y).shape[0]):
for d in self.class_list.keys():
if self.class_list[d] == Y[i, 0]:
res.append(d)
else:
res = np.zeros((Y.shape[0], self.num_class))
for i in range(np.array(Y).shape[0]):
c = 0
for j in range(self.num_class):
if j in self.class_list.keys():
res[i, j] = Y[i, self.class_list[j]]
c += 1
return np.array(res)
def fit(self, X, Y):
Y = self.mapping(Y, train=True)
if np.unique(Y).shape[0] == 1:
self.oneclass = True
else:
self.learner.fit(X, Y)
self.trained = True
return self
def predict(self, X):
assert (self.trained == True), "Must call fit first!"
if self.oneclass == False:
tmp_pred = self.learner.predict(X).reshape(-1)
else:
tmp_pred = np.zeros((X.shape[0]))
return self.mapping(tmp_pred, train=False)
def predict_proba(self, X):
assert (self.trained == True), "Must call fit first!"
if self.oneclass == False:
tmp_pred = self.learner.predict_proba(X)
else:
tmp_pred = np.ones((X.shape[0], 1))
return self.mapping(tmp_pred, train=False, probability=True)
def score(self, X, Y):
assert (self.trained == True), "Must call fit first!"
return accuracy_score(Y, self.predict(X))
def topNscore(self, X, Y, N=3):
prob = self.predict_proba(X)
idx = np.argsort(prob, axis=1)
ct = 0.
Y = Y.astype('int16')
for i in range(len(Y)):
if Y[i] in (list)(idx[i, -N:]):
ct+=1
return ct/(float)(len(Y))
if __name__ == "__main__":
from sklearn.svm import SVC
from sklearn import datasets
from sklearn.model_selection import train_test_split
print(" > This is a test example: ")
digits = datasets.load_digits()
X = digits.images.reshape((len(digits.images), -1))
print(" input feature shape: %s"%str(X.shape))
X_train, X_test, y_train, y_test = train_test_split(X, digits.target, test_size=0.2, stratify=digits.target)
clf = myLearner(SVC(gamma='scale', probability=True), 10)
clf.fit(X_train, y_train)
print(" --> train acc: %s"%str(clf.score(X_train, y_train)))
print(" --> test acc.: %s"%str(clf.score(X_test, y_test)))
print(" --> test top3 acc.: %s"%str(clf.topNscore(X_test, y_test, 3)))
print("------- DONE -------\n")
| [
"numpy.ones",
"numpy.unique",
"sklearn.model_selection.train_test_split",
"sklearn.datasets.load_digits",
"numpy.argsort",
"numpy.array",
"numpy.zeros",
"sklearn.svm.SVC"
] | [((3189, 3211), 'sklearn.datasets.load_digits', 'datasets.load_digits', ([], {}), '()\n', (3209, 3211), False, 'from sklearn import datasets\n'), ((3358, 3431), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'digits.target'], {'test_size': '(0.2)', 'stratify': 'digits.target'}), '(X, digits.target, test_size=0.2, stratify=digits.target)\n', (3374, 3431), False, 'from sklearn.model_selection import train_test_split\n'), ((1689, 1702), 'numpy.array', 'np.array', (['res'], {}), '(res)\n', (1697, 1702), True, 'import numpy as np\n'), ((2777, 2801), 'numpy.argsort', 'np.argsort', (['prob'], {'axis': '(1)'}), '(prob, axis=1)\n', (2787, 2801), True, 'import numpy as np\n'), ((3457, 3493), 'sklearn.svm.SVC', 'SVC', ([], {'gamma': '"""scale"""', 'probability': '(True)'}), "(gamma='scale', probability=True)\n", (3460, 3493), False, 'from sklearn.svm import SVC\n'), ((2159, 2179), 'numpy.zeros', 'np.zeros', (['X.shape[0]'], {}), '(X.shape[0])\n', (2167, 2179), True, 'import numpy as np\n'), ((2454, 2478), 'numpy.ones', 'np.ones', (['(X.shape[0], 1)'], {}), '((X.shape[0], 1))\n', (2461, 2478), True, 'import numpy as np\n'), ((1347, 1385), 'numpy.zeros', 'np.zeros', (['(Y.shape[0], self.num_class)'], {}), '((Y.shape[0], self.num_class))\n', (1355, 1385), True, 'import numpy as np\n'), ((1780, 1792), 'numpy.unique', 'np.unique', (['Y'], {}), '(Y)\n', (1789, 1792), True, 'import numpy as np\n'), ((840, 851), 'numpy.array', 'np.array', (['Y'], {}), '(Y)\n', (848, 851), True, 'import numpy as np\n'), ((1131, 1142), 'numpy.array', 'np.array', (['Y'], {}), '(Y)\n', (1139, 1142), True, 'import numpy as np\n'), ((1417, 1428), 'numpy.array', 'np.array', (['Y'], {}), '(Y)\n', (1425, 1428), True, 'import numpy as np\n')] |
"""Contains tests for the functions found in ``controls/templates_helpers.py``
To run this particular test file use the following command line:
nose2 -v app.tests.controls.tests_template_helpers
"""
from app import db, create_app
import unittest
from unittest import TestCase
from config import Config
from app.tests.utils import (dummy_post, control_categories,
control_search_bar,
add_three_dummy_widget_positions)
from app.models import SearchBarControls, CategoriesControls
from app.controls.template_helpers import (ordered_widgets,
categories_presence,
sidebar_widget_count,
search_bar_placement)
class TestConfig(Config):
""" Custom configuration for our tests.
Attributes
----------
TESTING : bool
Enable testing mode. Exceptions are propagated rather than handled by
the app’s error handlers.
Must be set to True to prevent the mail logger from sending email
warnings.
WHOOSHEE_MEMORY_STORAGE : bool
When set to True use the memory as storage. We need that during our
tests so the data that we write in the in-memory SQLite database do
not become indexed.
SQLALCHEMY_DATABASE_URI : str
Make SQLAlchemy to use an in-memory SQLite database during the tests,
so this way we are not writing dummy test data to our production
database.
"""
TESTING = True
WHOOSHEE_MEMORY_STORAGE = True
SQLALCHEMY_DATABASE_URI = 'sqlite://'
class TemplateGlobal(TestCase):
"""Contains tests for the blueprint's custom template global functions.
These functions can be found in the ``controls/templates_helpers.py``. They
are decorated with ``app_template_global``.
"""
def setUp(self):
self.app = create_app(TestConfig)
self.app_context = self.app.app_context()
self.app_context.push()
db.drop_all()
db.create_all()
dummy_post()
control_categories("no_posts")
control_search_bar('navbar')
def tearDown(self):
self.app_context.pop()
def test_search_bar_placement(self):
"""Testing of the ``search_bar_placement`` function.
"""
search_bar_placement()
query = SearchBarControls.query.first()
self.assertEqual(query.placement, 'navbar',
"Function was not capable to get the value "
"representing the search bar placement.")
def test_categories_presence(self):
categories_presence()
query = CategoriesControls.query.first()
self.assertEqual(query.presence, 'no_posts',
"Function was not capable to get the value "
"representing where the categories can be found on "
"the page.")
def test_sidebar_widget_count(self):
add_three_dummy_widget_positions()
self.assertEqual(sidebar_widget_count(), 3,
"Total number of entry in the the table of the"
" ``WidgetOrder`` is not what is expected.")
def test_ordered_widgets(self):
add_three_dummy_widget_positions()
ow = ordered_widgets()
expected = ['Search Bar Widget', 'Category Widget',
'Dummy Content Widget']
self.assertEqual(expected, ow, "List of widgets name was not returned"
"with the expected order.")
if __name__ == '__main__':
unittest.main(verbosity=2) | [
"app.controls.template_helpers.sidebar_widget_count",
"app.tests.utils.control_search_bar",
"app.controls.template_helpers.categories_presence",
"app.db.create_all",
"app.create_app",
"app.controls.template_helpers.ordered_widgets",
"app.models.SearchBarControls.query.first",
"app.tests.utils.add_thre... | [((3649, 3675), 'unittest.main', 'unittest.main', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (3662, 3675), False, 'import unittest\n'), ((1928, 1950), 'app.create_app', 'create_app', (['TestConfig'], {}), '(TestConfig)\n', (1938, 1950), False, 'from app import db, create_app\n'), ((2041, 2054), 'app.db.drop_all', 'db.drop_all', ([], {}), '()\n', (2052, 2054), False, 'from app import db, create_app\n'), ((2063, 2078), 'app.db.create_all', 'db.create_all', ([], {}), '()\n', (2076, 2078), False, 'from app import db, create_app\n'), ((2087, 2099), 'app.tests.utils.dummy_post', 'dummy_post', ([], {}), '()\n', (2097, 2099), False, 'from app.tests.utils import dummy_post, control_categories, control_search_bar, add_three_dummy_widget_positions\n'), ((2108, 2138), 'app.tests.utils.control_categories', 'control_categories', (['"""no_posts"""'], {}), "('no_posts')\n", (2126, 2138), False, 'from app.tests.utils import dummy_post, control_categories, control_search_bar, add_three_dummy_widget_positions\n'), ((2147, 2175), 'app.tests.utils.control_search_bar', 'control_search_bar', (['"""navbar"""'], {}), "('navbar')\n", (2165, 2175), False, 'from app.tests.utils import dummy_post, control_categories, control_search_bar, add_three_dummy_widget_positions\n'), ((2355, 2377), 'app.controls.template_helpers.search_bar_placement', 'search_bar_placement', ([], {}), '()\n', (2375, 2377), False, 'from app.controls.template_helpers import ordered_widgets, categories_presence, sidebar_widget_count, search_bar_placement\n'), ((2394, 2425), 'app.models.SearchBarControls.query.first', 'SearchBarControls.query.first', ([], {}), '()\n', (2423, 2425), False, 'from app.models import SearchBarControls, CategoriesControls\n'), ((2664, 2685), 'app.controls.template_helpers.categories_presence', 'categories_presence', ([], {}), '()\n', (2683, 2685), False, 'from app.controls.template_helpers import ordered_widgets, categories_presence, sidebar_widget_count, search_bar_placement\n'), ((2702, 2734), 'app.models.CategoriesControls.query.first', 'CategoriesControls.query.first', ([], {}), '()\n', (2732, 2734), False, 'from app.models import SearchBarControls, CategoriesControls\n'), ((3024, 3058), 'app.tests.utils.add_three_dummy_widget_positions', 'add_three_dummy_widget_positions', ([], {}), '()\n', (3056, 3058), False, 'from app.tests.utils import dummy_post, control_categories, control_search_bar, add_three_dummy_widget_positions\n'), ((3300, 3334), 'app.tests.utils.add_three_dummy_widget_positions', 'add_three_dummy_widget_positions', ([], {}), '()\n', (3332, 3334), False, 'from app.tests.utils import dummy_post, control_categories, control_search_bar, add_three_dummy_widget_positions\n'), ((3348, 3365), 'app.controls.template_helpers.ordered_widgets', 'ordered_widgets', ([], {}), '()\n', (3363, 3365), False, 'from app.controls.template_helpers import ordered_widgets, categories_presence, sidebar_widget_count, search_bar_placement\n'), ((3085, 3107), 'app.controls.template_helpers.sidebar_widget_count', 'sidebar_widget_count', ([], {}), '()\n', (3105, 3107), False, 'from app.controls.template_helpers import ordered_widgets, categories_presence, sidebar_widget_count, search_bar_placement\n')] |
import torch
import torch.nn as nn
from ..networks.basic.util import check
from ..networks.basic.predict import PredictNet, PredictLayer, OutLayer
from ..utils.util import get_shape_from_obs_space
class OneHot:
def __init__(self, out_dim):
self.out_dim = out_dim
def transform(self, tensor):
y_onehot = tensor.new(*tensor.shape[:-1], self.out_dim).zero_()
y_onehot.scatter_(-1, tensor.long(), 1)
return y_onehot.float()
class Predict(nn.Module):
def __init__(self, args, obs_space, action_space, cent_obs_space, device=torch.device("cpu")):
super(Predict, self).__init__()
self.hidden_size = args.hidden_size
self._gain = args.gain
self._use_orthogonal = args.use_orthogonal
self.tpdv = dict(dtype=torch.float32, device=device)
obs_shape = get_shape_from_obs_space(obs_space)
cent_obs_shape = get_shape_from_obs_space(cent_obs_space)
self.net = PredictNet(args, obs_shape, action_space, cent_obs_shape, use_projector=True)
self.onehot = OneHot(action_space.n)
self.to(device)
def forward(self, obs, actions):
obs = check(obs).to(**self.tpdv)
actions = self.onehot.transform(check(actions)).to(**self.tpdv)
x = torch.cat((obs, actions), dim=-1)
out = self.net(x)
return out
class Projector(nn.Module):
def __init__(self, args, cent_obs_space, device=torch.device("cpu")):
super(Projector, self).__init__()
self.args = args
self.hidden_size = args.hidden_size
self._gain = args.gain
self._use_orthogonal = args.use_orthogonal
self.tpdv = dict(dtype=torch.float32, device=device)
cent_obs_shape = get_shape_from_obs_space(cent_obs_space)
if args.predict_dim:
self.mlp = PredictLayer(cent_obs_shape[0], args.predict_dim,
1, self._use_orthogonal, False)
# self.out = OutLayer(cent_obs_shape[0] // 4, self.hidden_size, self._use_orthogonal, False)
self.to(device)
def forward(self, cent_obs):
x = check(cent_obs).to(**self.tpdv)
if self.args.predict_dim:
out = self.mlp(x)
# out = self.out(out)
else:
out = x
return out | [
"torch.cat",
"torch.device"
] | [((566, 585), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (578, 585), False, 'import torch\n'), ((1271, 1304), 'torch.cat', 'torch.cat', (['(obs, actions)'], {'dim': '(-1)'}), '((obs, actions), dim=-1)\n', (1280, 1304), False, 'import torch\n'), ((1431, 1450), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (1443, 1450), False, 'import torch\n')] |
"""Compare evolution matrices to v0.1 numerics"""
import wetrunner
import unittest
from pkg_resources import resource_filename
import numpy as np
import numpy.testing as npt
def getUs_new(classname):
arg = (0.56, 5, 0.12, 1/127, 0, 0, 0, 1.2, 4.2, 0, 0, 1.8)
return wetrunner.rge.getUs(classname, *arg)
def getUe_new(classname):
arg = (0.56, 5, 0.12, 1/127, 0, 0, 0, 1.2, 4.2, 0, 0, 1.8)
return wetrunner.rge.getUe(classname, *arg)
class TestEvMat(unittest.TestCase):
def test_evmat(self):
Usold = dict(np.load(resource_filename('wetrunner', 'tests/data/evmat_s_old.npz')))
Ueold = dict(np.load(resource_filename('wetrunner', 'tests/data/evmat_e_old.npz')))
Usnew = {k: getUs_new(k) for k in ['I', 'II', 'III', 'IV', 'Vb']}
Uenew = {k: getUe_new(k) for k in ['I', 'II', 'III', 'IV', 'Vb']}
Usnew['V'] = getUs_new('Vsb')
Uenew['V'] = getUe_new('Vsb')
for k in ['I', 'II', 'III', 'IV', 'V', 'Vb']:
npt.assert_array_almost_equal(Usold[k], Usnew[k],
err_msg="Failed for {} QCD".format(k))
for k in ['I', 'II', 'III', 'IV', 'Vb']: # NB, skipping V!
npt.assert_array_almost_equal(100*Ueold[k], 100*Uenew[k],
decimal=2,
err_msg="Failed for {} QED".format(k))
| [
"wetrunner.rge.getUs",
"wetrunner.rge.getUe",
"pkg_resources.resource_filename"
] | [((277, 313), 'wetrunner.rge.getUs', 'wetrunner.rge.getUs', (['classname', '*arg'], {}), '(classname, *arg)\n', (296, 313), False, 'import wetrunner\n'), ((416, 452), 'wetrunner.rge.getUe', 'wetrunner.rge.getUe', (['classname', '*arg'], {}), '(classname, *arg)\n', (435, 452), False, 'import wetrunner\n'), ((546, 606), 'pkg_resources.resource_filename', 'resource_filename', (['"""wetrunner"""', '"""tests/data/evmat_s_old.npz"""'], {}), "('wetrunner', 'tests/data/evmat_s_old.npz')\n", (563, 606), False, 'from pkg_resources import resource_filename\n'), ((638, 698), 'pkg_resources.resource_filename', 'resource_filename', (['"""wetrunner"""', '"""tests/data/evmat_e_old.npz"""'], {}), "('wetrunner', 'tests/data/evmat_e_old.npz')\n", (655, 698), False, 'from pkg_resources import resource_filename\n')] |
"""
Copyright (c) 2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from argparse import ArgumentParser
from typing import NamedTuple, Any
import torch
from os import listdir, makedirs
from os.path import isfile, join, exists
from shutil import copyfile
from nncf.torch.quantization.layers import SymmetricQuantizer, AsymmetricQuantizer
class ParameterToAdd(NamedTuple):
name: str
value: Any
def main(argv):
parser = ArgumentParser()
parser.add_argument('-i', '--input-folder', help='Path to directory with given checkpoints to modify',
required=True)
parser.add_argument('-o', '--output-folder', help='Path to directory to save modified checkpoints', required=True)
parser.add_argument('-b', '--bitwidth', help='Bitwidth to initialize quantizer',
required=False, default=8, type=int)
parser.add_argument('-v', '--verbose', help='Print all new names of parameters', required=False,
action='store_true')
args = parser.parse_args(args=argv)
src_dir = args.input_folder
dst_dir = args.output_folder
if not exists(dst_dir):
makedirs(dst_dir)
param_list = [ParameterToAdd('_num_bits', torch.IntTensor([args.bitwidth])),
ParameterToAdd('enabled', torch.IntTensor([1]))]
pth_files = [(join(src_dir, f), join(dst_dir, f)) for f in listdir(src_dir) if
isfile(join(src_dir, f)) and ('.pth' in f or '.sd' in f)]
files_to_copy = []
for pair in pth_files:
src_file, dst_file = pair
if 'binarization' in src_file:
files_to_copy.append(pair)
continue
sd = pth = torch.load(src_file)
if 'state_dict' in pth:
sd = pth['state_dict']
hooks = [SymmetricQuantizer.SCALE_PARAM_NAME, AsymmetricQuantizer.INPUT_LOW_PARAM_NAME]
new_keys = {}
for new_parameter in param_list:
old_keys = list(sd.keys())
for k in sd.keys():
for h in hooks:
new_key = k.replace(h, new_parameter.name)
if ('.' + h in k) and ('.' + new_parameter.name not in k) and (new_key not in old_keys):
new_keys[new_key] = new_parameter.value
if new_keys:
print(f'\nAdding #{len(new_keys)} of new keys')
if args.verbose:
print('New keys:', new_keys, sep='\n')
for new_key, value in new_keys.items():
sd[new_key] = value
pth['state_dict'] = sd
torch.save(pth, dst_file)
else:
files_to_copy.append(pair)
for src_file, dst_file in files_to_copy:
print("\nCopying {}".format(dst_file))
copyfile(src_file, dst_file)
if __name__ == '__main__':
main(sys.argv[1:])
| [
"os.path.exists",
"os.listdir",
"os.makedirs",
"argparse.ArgumentParser",
"torch.load",
"os.path.join",
"shutil.copyfile",
"torch.save",
"torch.IntTensor"
] | [((957, 973), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (971, 973), False, 'from argparse import ArgumentParser\n'), ((1648, 1663), 'os.path.exists', 'exists', (['dst_dir'], {}), '(dst_dir)\n', (1654, 1663), False, 'from os.path import isfile, join, exists\n'), ((1673, 1690), 'os.makedirs', 'makedirs', (['dst_dir'], {}), '(dst_dir)\n', (1681, 1690), False, 'from os import listdir, makedirs\n'), ((2202, 2222), 'torch.load', 'torch.load', (['src_file'], {}), '(src_file)\n', (2212, 2222), False, 'import torch\n'), ((3269, 3297), 'shutil.copyfile', 'copyfile', (['src_file', 'dst_file'], {}), '(src_file, dst_file)\n', (3277, 3297), False, 'from shutil import copyfile\n'), ((1738, 1770), 'torch.IntTensor', 'torch.IntTensor', (['[args.bitwidth]'], {}), '([args.bitwidth])\n', (1753, 1770), False, 'import torch\n'), ((1817, 1837), 'torch.IntTensor', 'torch.IntTensor', (['[1]'], {}), '([1])\n', (1832, 1837), False, 'import torch\n'), ((1859, 1875), 'os.path.join', 'join', (['src_dir', 'f'], {}), '(src_dir, f)\n', (1863, 1875), False, 'from os.path import isfile, join, exists\n'), ((1877, 1893), 'os.path.join', 'join', (['dst_dir', 'f'], {}), '(dst_dir, f)\n', (1881, 1893), False, 'from os.path import isfile, join, exists\n'), ((1904, 1920), 'os.listdir', 'listdir', (['src_dir'], {}), '(src_dir)\n', (1911, 1920), False, 'from os import listdir, makedirs\n'), ((3089, 3114), 'torch.save', 'torch.save', (['pth', 'dst_file'], {}), '(pth, dst_file)\n', (3099, 3114), False, 'import torch\n'), ((1948, 1964), 'os.path.join', 'join', (['src_dir', 'f'], {}), '(src_dir, f)\n', (1952, 1964), False, 'from os.path import isfile, join, exists\n')] |
#!/usr/bin/python3
import subprocess
import yaml
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
#MAC address of the smart plug
MAC_ADDRESS = '84:f3:eb:32:e3:b4'
# netmask of your network
NET_MASK = '192.168.1.1/24'
#octopi config
OCTOPI_CONFIG = '/home/pi/.octoprint/config.yaml'
if __name__ == "__main__":
subprocess.check_call(['/usr/bin/nmap','-sP','-T4',NET_MASK])
p = subprocess.Popen(['/usr/sbin/arp','-n'], stdout=subprocess.PIPE)
for line in iter(p.stdout.readline, b''):
str = line.decode('utf-8')
if(MAC_ADDRESS in str):
ipaddress = str.split(' ')[0]
print('Found Plug with address %s' % ipaddress)
config = open(OCTOPI_CONFIG)
configy = yaml.load(config, Loader=Loader)
config.close()
# print(configy)
print(configy['plugins']['tuyasmartplug']['arrSmartplugs'][0]['ip'])
if configy['plugins']['tuyasmartplug']['arrSmartplugs'][0]['ip'] != ipaddress:
configy['plugins']['tuyasmartplug']['arrSmartplugs'][0]['ip'] = ipaddress
config = open(OCTOPI_CONFIG, mode='w')
config.write(yaml.safe_dump(configy, allow_unicode=False))
config.close()
break
| [
"subprocess.Popen",
"yaml.safe_dump",
"yaml.load",
"subprocess.check_call"
] | [((384, 448), 'subprocess.check_call', 'subprocess.check_call', (["['/usr/bin/nmap', '-sP', '-T4', NET_MASK]"], {}), "(['/usr/bin/nmap', '-sP', '-T4', NET_MASK])\n", (405, 448), False, 'import subprocess\n'), ((453, 518), 'subprocess.Popen', 'subprocess.Popen', (["['/usr/sbin/arp', '-n']"], {'stdout': 'subprocess.PIPE'}), "(['/usr/sbin/arp', '-n'], stdout=subprocess.PIPE)\n", (469, 518), False, 'import subprocess\n'), ((763, 795), 'yaml.load', 'yaml.load', (['config'], {'Loader': 'Loader'}), '(config, Loader=Loader)\n', (772, 795), False, 'import yaml\n'), ((1151, 1195), 'yaml.safe_dump', 'yaml.safe_dump', (['configy'], {'allow_unicode': '(False)'}), '(configy, allow_unicode=False)\n', (1165, 1195), False, 'import yaml\n')] |
from tkinter_gui_builder.panel_templates.widget_panel.widget_panel import AbstractWidgetPanel
from tkinter_gui_builder.widgets import basic_widgets
class CanvasDemoButtonPanel(AbstractWidgetPanel):
fname_select = basic_widgets.Button
zoom_in = basic_widgets.Button
zoom_out = basic_widgets.Button
rect_select = basic_widgets.Button
update_rect_image = basic_widgets.Button
pan = basic_widgets.Button
draw_line_w_drag = basic_widgets.Button
draw_line_w_click = basic_widgets.Button
draw_arrow_w_drag = basic_widgets.Button
draw_arrow_w_click = basic_widgets.Button
draw_rect_w_drag = basic_widgets.Button
draw_rect_w_click = basic_widgets.Button
draw_polygon_w_click = basic_widgets.Button
draw_point_w_click = basic_widgets.Button
modify_existing_shape = basic_widgets.Button
color_selector = basic_widgets.Button
save_kml = basic_widgets.Button
select_existing_shape = basic_widgets.Combobox # type: basic_widgets.Combobox
remap_dropdown = basic_widgets.Combobox # type: basic_widgets.Combobox
def __init__(self, parent):
AbstractWidgetPanel.__init__(self, parent)
controls = ["fname_select",
"zoom_in",
"zoom_out",
"pan",
"draw_line_w_drag",
"draw_line_w_click",
"draw_arrow_w_drag",
"draw_arrow_w_click",
"draw_rect_w_drag",
"draw_rect_w_click",
"draw_polygon_w_click",
"draw_point_w_click",
"select_existing_shape",
"modify_existing_shape",
"save_kml",
"color_selector",
"rect_select",
"update_rect_image",
"remap_dropdown"]
self.init_w_box_layout(controls, 4, column_widths=20)
self.remap_dropdown.update_combobox_values(["density",
"brighter",
"darker",
"high contrast",
"linear",
"log",
"pedf",
"nrl"])
self.set_label_text("taser buttons")
if __name__ == '__main__':
print(dir(AbstractWidgetPanel))
| [
"tkinter_gui_builder.panel_templates.widget_panel.widget_panel.AbstractWidgetPanel.__init__"
] | [((1123, 1165), 'tkinter_gui_builder.panel_templates.widget_panel.widget_panel.AbstractWidgetPanel.__init__', 'AbstractWidgetPanel.__init__', (['self', 'parent'], {}), '(self, parent)\n', (1151, 1165), False, 'from tkinter_gui_builder.panel_templates.widget_panel.widget_panel import AbstractWidgetPanel\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import mechkit
import mechmean
class KanataniFactory(object):
def __init__(self, N):
self.con = mechkit.notation.Converter()
self._I2 = mechkit.tensors.Basic().I2
self.N = N = self.con.to_tensor(N)
self.degree = len(N.shape)
degrees = [x for x in range(1, self.degree + 1) if x % 2 == 0]
for degree in reversed(degrees):
N = self.first_kind(degree)
setattr(self, "N{}".format(degree), N)
setattr(self, "F{}".format(degree), self.second_kind(N))
setattr(self, "D{}".format(degree), self.third_kind(N))
def __getitem__(self, key):
"""Make attributes accessible dict-like."""
return getattr(self, key)
def first_kind(self, degree):
nbr_times_decrease = int((self.degree - degree) / 2)
N = self.N
for i in range(nbr_times_decrease):
N = self.decrease_first_kind_by_one_degree(N)
return N
def decrease_first_kind_by_one_degree(self, N):
return np.einsum("...ij, ...ij->...", N, self._I2)
def second_kind(self, N):
degree = len(N.shape)
func = self._get_func_second_kind(degree=degree)
return func(N)
def _get_func_second_kind(self, degree):
funcs = {
2: self.second_kind_N2,
4: self.second_kind_N4,
}
return funcs[degree]
def second_kind_N2(self, N):
return 15.0 / 2.0 * (N - 1.0 / 5.0 * self._I2)
def second_kind_N4(self, N):
return (
315.0
/ 8.0
* (
N
- 2.0
/ 3.0
* mechmean.operators.sym(
np.multiply.outer(self._I2, self.first_kind(degree=2))
)
+ 1.0
/ 21.0
* mechmean.operators.sym(np.multiply.outer(self._I2, self._I2))
)
)
def third_kind(self, N):
degree = len(N.shape)
func = self._get_func_third_kind(degree=degree)
return func(N)
def _get_func_third_kind(self, degree):
funcs = {2: self.third_kind_N2, 4: self.third_kind_N4}
return funcs[degree]
def third_kind_N2(self, N):
return 15.0 / 2.0 * (N - 1.0 / 3.0 * self._I2)
def third_kind_N4(self, N):
return (
315.0
/ 8.0
* (
N
- 6.0
/ 7.0
* mechmean.operators.sym(
np.multiply.outer(self._I2, self.first_kind(degree=2))
)
+ 3.0
/ 35.0
* mechmean.operators.sym(np.multiply.outer(self._I2, self._I2))
)
)
def evenly_distributed_vectors_on_sphere(nbr_vectors=1000):
"""
Define nbr_vectors evenly distributed vectors on a sphere
Using the golden spiral method kindly provided by
stackoverflow-user "<NAME>"
https://stackoverflow.com/a/44164075/8935243
"""
from numpy import pi, cos, sin, arccos, arange
indices = arange(0, nbr_vectors, dtype=float) + 0.5
phi = arccos(1 - 2 * indices / nbr_vectors)
theta = pi * (1 + 5 ** 0.5) * indices
x, y, z = cos(theta) * sin(phi), sin(theta) * sin(phi), cos(phi)
orientations = np.column_stack((x, y, z))
return orientations
def first_kind_discrete(orientations, order=4):
"""
Calc orientation tensors of ... kind
"""
# Normalize orientations
orientations = [np.array(v) / np.linalg.norm(v) for v in orientations]
# Symmetrize orientations
# orientations_reversed = [-v for v in orientations]
# orientations = orientations + orientations_reversed
einsumStrings = {
1: "ij -> j",
2: "ij, ik -> jk",
3: "ij, ik, il -> jkl",
4: "ij, ik, il, im -> jklm",
5: "ij, ik, il, im, in -> jklmn",
6: "ij, ik, il, im, in, ip -> jklmnp",
}
if order > 6:
einsumStrings[order] = einsum_str_fabric_tensor_first_kind_discrete(order=order)
einsumArgs = [orientations for i in range(order)]
N = 1.0 / len(orientations) * np.einsum(einsumStrings[order], *einsumArgs)
return N
def einsum_str_fabric_tensor_first_kind_discrete(order):
"""
Generalize to higher orders:
N = sum_i 'order'-times_dyad_product(vector)
=
1: 'ij -> j',
2: 'ij, ik -> jk',
3: 'ij, ik, il -> jkl',
4: 'ij, ik, il, im -> jklm',
5: 'ij, ik, il, im, in -> jklmn',
6: 'ij, ik, il, im, in, ip -> jklmnp',
...
"""
# Get list of all available characters
import string
letters = list(string.ascii_letters)
letters.remove("i")
# Create einsum string and arguments
einsumInput = ",".join(["i" + letters[index] for index in range(order)])
einsumOut = "".join(letters[0:order])
einsumString = einsumInput + "->" + einsumOut
return einsumString
| [
"numpy.arccos",
"mechkit.notation.Converter",
"mechkit.tensors.Basic",
"numpy.column_stack",
"numpy.multiply.outer",
"numpy.array",
"numpy.einsum",
"numpy.cos",
"numpy.linalg.norm",
"numpy.sin",
"numpy.arange"
] | [((3198, 3235), 'numpy.arccos', 'arccos', (['(1 - 2 * indices / nbr_vectors)'], {}), '(1 - 2 * indices / nbr_vectors)\n', (3204, 3235), False, 'from numpy import pi, cos, sin, arccos, arange\n'), ((3367, 3393), 'numpy.column_stack', 'np.column_stack', (['(x, y, z)'], {}), '((x, y, z))\n', (3382, 3393), True, 'import numpy as np\n'), ((177, 205), 'mechkit.notation.Converter', 'mechkit.notation.Converter', ([], {}), '()\n', (203, 205), False, 'import mechkit\n'), ((1093, 1136), 'numpy.einsum', 'np.einsum', (['"""...ij, ...ij->..."""', 'N', 'self._I2'], {}), "('...ij, ...ij->...', N, self._I2)\n", (1102, 1136), True, 'import numpy as np\n'), ((3145, 3180), 'numpy.arange', 'arange', (['(0)', 'nbr_vectors'], {'dtype': 'float'}), '(0, nbr_vectors, dtype=float)\n', (3151, 3180), False, 'from numpy import pi, cos, sin, arccos, arange\n'), ((3339, 3347), 'numpy.cos', 'cos', (['phi'], {}), '(phi)\n', (3342, 3347), False, 'from numpy import pi, cos, sin, arccos, arange\n'), ((4243, 4287), 'numpy.einsum', 'np.einsum', (['einsumStrings[order]', '*einsumArgs'], {}), '(einsumStrings[order], *einsumArgs)\n', (4252, 4287), True, 'import numpy as np\n'), ((225, 248), 'mechkit.tensors.Basic', 'mechkit.tensors.Basic', ([], {}), '()\n', (246, 248), False, 'import mechkit\n'), ((3293, 3303), 'numpy.cos', 'cos', (['theta'], {}), '(theta)\n', (3296, 3303), False, 'from numpy import pi, cos, sin, arccos, arange\n'), ((3306, 3314), 'numpy.sin', 'sin', (['phi'], {}), '(phi)\n', (3309, 3314), False, 'from numpy import pi, cos, sin, arccos, arange\n'), ((3316, 3326), 'numpy.sin', 'sin', (['theta'], {}), '(theta)\n', (3319, 3326), False, 'from numpy import pi, cos, sin, arccos, arange\n'), ((3329, 3337), 'numpy.sin', 'sin', (['phi'], {}), '(phi)\n', (3332, 3337), False, 'from numpy import pi, cos, sin, arccos, arange\n'), ((3574, 3585), 'numpy.array', 'np.array', (['v'], {}), '(v)\n', (3582, 3585), True, 'import numpy as np\n'), ((3588, 3605), 'numpy.linalg.norm', 'np.linalg.norm', (['v'], {}), '(v)\n', (3602, 3605), True, 'import numpy as np\n'), ((1928, 1965), 'numpy.multiply.outer', 'np.multiply.outer', (['self._I2', 'self._I2'], {}), '(self._I2, self._I2)\n', (1945, 1965), True, 'import numpy as np\n'), ((2740, 2777), 'numpy.multiply.outer', 'np.multiply.outer', (['self._I2', 'self._I2'], {}), '(self._I2, self._I2)\n', (2757, 2777), True, 'import numpy as np\n')] |
from setuptools import setup, find_packages
setup(
name="fancylog",
version="0.2.6",
description="Fancier logging in Python",
install_requires=["packaging", "rich"],
extras_require={
"dev": [
"black",
"pytest-cov",
"pytest",
"coverage",
"bump2version",
"pre-commit",
"flake8",
]
},
python_requires=">=3.6",
packages=find_packages(),
include_package_data=True,
url="https://github.com/adamltyson/fancylog",
author="<NAME>",
author_email="<EMAIL>",
classifiers=[
"Development Status :: 3 - Alpha",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Intended Audience :: Developers",
],
)
| [
"setuptools.find_packages"
] | [((450, 465), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (463, 465), False, 'from setuptools import setup, find_packages\n')] |
from ymmsl import Identifier, Reference
import pytest
import yatiml
def test_create_identifier() -> None:
part = Identifier('testing')
assert str(part) == 'testing'
part = Identifier('CapiTaLs')
assert str(part) == 'CapiTaLs'
part = Identifier('under_score')
assert str(part) == 'under_score'
part = Identifier('_underscore')
assert str(part) == '_underscore'
part = Identifier('digits123')
assert str(part) == 'digits123'
with pytest.raises(ValueError):
Identifier('1initialdigit')
with pytest.raises(ValueError):
Identifier('test.period')
with pytest.raises(ValueError):
Identifier('test-hyphen')
with pytest.raises(ValueError):
Identifier('test space')
with pytest.raises(ValueError):
Identifier('test/slash')
def test_compare_identifier() -> None:
assert Identifier('test') == Identifier('test')
assert Identifier('test1') != Identifier('test2')
assert Identifier('test') == 'test'
assert 'test' == Identifier('test') # pylint: disable=C0122
assert Identifier('test') != 'test2'
assert 'test2' != Identifier('test') # pylint: disable=C0122
def test_identifier_dict_key() -> None:
test_dict = {Identifier('test'): 1}
assert test_dict[Identifier('test')] == 1
def test_create_reference() -> None:
test_ref = Reference('_testing')
assert str(test_ref) == '_testing'
assert len(test_ref) == 1
assert isinstance(test_ref[0], Identifier)
assert str(test_ref[0]) == '_testing'
with pytest.raises(ValueError):
Reference('1test')
test_ref = Reference('test.testing')
assert len(test_ref) == 2
assert isinstance(test_ref[0], Identifier)
assert str(test_ref[0]) == 'test'
assert isinstance(test_ref[1], Identifier)
assert str(test_ref[1]) == 'testing'
assert str(test_ref) == 'test.testing'
test_ref = Reference('test[12]')
assert len(test_ref) == 2
assert isinstance(test_ref[0], Identifier)
assert str(test_ref[0]) == 'test'
assert isinstance(test_ref[1], int)
assert test_ref[1] == 12
assert str(test_ref) == 'test[12]'
test_ref = Reference('test[12].testing.ok.index[3][5]')
assert len(test_ref) == 7
assert isinstance(test_ref[0], Identifier)
assert str(test_ref[0]) == 'test'
assert isinstance(test_ref[1], int)
assert test_ref[1] == 12
assert isinstance(test_ref[2], Identifier)
assert str(test_ref[2]) == 'testing'
assert isinstance(test_ref[3], Identifier)
assert str(test_ref[3]) == 'ok'
assert isinstance(test_ref[4], Identifier)
assert str(test_ref[4]) == 'index'
assert isinstance(test_ref[5], int)
assert test_ref[5] == 3
assert isinstance(test_ref[6], int)
assert test_ref[6] == 5
assert str(test_ref) == 'test[12].testing.ok.index[3][5]'
with pytest.raises(ValueError):
Reference([4])
with pytest.raises(ValueError):
Reference([3, Identifier('test')])
with pytest.raises(ValueError):
Reference('ua",.u8[')
with pytest.raises(ValueError):
Reference('test[4')
with pytest.raises(ValueError):
Reference('test4]')
with pytest.raises(ValueError):
Reference('test[_t]')
with pytest.raises(ValueError):
Reference('testing_{3}')
with pytest.raises(ValueError):
Reference('test.(x)')
with pytest.raises(ValueError):
Reference('[3]test')
with pytest.raises(ValueError):
Reference('[4].test')
def test_reference_slicing() -> None:
test_ref = Reference('test[12].testing.ok.index[3][5]')
assert test_ref[0] == 'test'
assert test_ref[1] == 12
assert test_ref[3] == 'ok'
assert test_ref[:3] == 'test[12].testing'
assert test_ref[2:] == 'testing.ok.index[3][5]'
with pytest.raises(RuntimeError):
test_ref[0] = 'test2'
with pytest.raises(ValueError):
test_ref[1:] # pylint: disable=pointless-statement
def test_reference_dict_key() -> None:
test_dict = {Reference('test[4]'): 1}
assert test_dict[Reference('test[4]')] == 1
def test_reference_equivalence() -> None:
assert Reference('test.test[3]') == Reference('test.test[3]')
assert Reference('test.test[3]') != Reference('test1.test[3]')
assert Reference('test.test[3]') == 'test.test[3]'
assert Reference('test.test[3]') != 'test1.test[3]'
assert 'test.test[3]' == Reference('test.test[3]') # pylint: disable=C0122
assert 'test1.test[3]' != Reference(
'test.test[3]') # pylint: disable=C0122
def test_reference_concatenation() -> None:
assert Reference('test') + Reference('test2') == 'test.test2'
assert Reference('test') + Identifier('test2') == 'test.test2'
assert Reference('test') + 5 == 'test[5]'
assert Reference('test') + [Identifier('test2'), 5] == 'test.test2[5]'
assert Reference('test[5]') + Reference('test2[3]') == 'test[5].test2[3]'
assert Reference('test[5]') + Identifier('test2') == 'test[5].test2'
assert Reference('test[5]') + 3 == 'test[5][3]'
assert (Reference('test[5]') + [3, Identifier('test2')] ==
'test[5][3].test2')
def test_reference_io() -> None:
load_reference = yatiml.load_function(Reference, Identifier)
text = 'test[12]'
doc = load_reference(text)
assert str(doc[0]) == 'test'
assert doc[1] == 12
dump_reference = yatiml.dumps_function(Identifier, Reference)
doc = Reference('test[12].testing.ok.index[3][5]')
text = dump_reference(doc)
assert text == 'test[12].testing.ok.index[3][5]\n...\n'
| [
"yatiml.load_function",
"ymmsl.Identifier",
"yatiml.dumps_function",
"pytest.raises",
"ymmsl.Reference"
] | [((120, 141), 'ymmsl.Identifier', 'Identifier', (['"""testing"""'], {}), "('testing')\n", (130, 141), False, 'from ymmsl import Identifier, Reference\n'), ((188, 210), 'ymmsl.Identifier', 'Identifier', (['"""CapiTaLs"""'], {}), "('CapiTaLs')\n", (198, 210), False, 'from ymmsl import Identifier, Reference\n'), ((258, 283), 'ymmsl.Identifier', 'Identifier', (['"""under_score"""'], {}), "('under_score')\n", (268, 283), False, 'from ymmsl import Identifier, Reference\n'), ((334, 359), 'ymmsl.Identifier', 'Identifier', (['"""_underscore"""'], {}), "('_underscore')\n", (344, 359), False, 'from ymmsl import Identifier, Reference\n'), ((410, 433), 'ymmsl.Identifier', 'Identifier', (['"""digits123"""'], {}), "('digits123')\n", (420, 433), False, 'from ymmsl import Identifier, Reference\n'), ((1372, 1393), 'ymmsl.Reference', 'Reference', (['"""_testing"""'], {}), "('_testing')\n", (1381, 1393), False, 'from ymmsl import Identifier, Reference\n'), ((1632, 1657), 'ymmsl.Reference', 'Reference', (['"""test.testing"""'], {}), "('test.testing')\n", (1641, 1657), False, 'from ymmsl import Identifier, Reference\n'), ((1920, 1941), 'ymmsl.Reference', 'Reference', (['"""test[12]"""'], {}), "('test[12]')\n", (1929, 1941), False, 'from ymmsl import Identifier, Reference\n'), ((2181, 2225), 'ymmsl.Reference', 'Reference', (['"""test[12].testing.ok.index[3][5]"""'], {}), "('test[12].testing.ok.index[3][5]')\n", (2190, 2225), False, 'from ymmsl import Identifier, Reference\n'), ((3594, 3638), 'ymmsl.Reference', 'Reference', (['"""test[12].testing.ok.index[3][5]"""'], {}), "('test[12].testing.ok.index[3][5]')\n", (3603, 3638), False, 'from ymmsl import Identifier, Reference\n'), ((5251, 5294), 'yatiml.load_function', 'yatiml.load_function', (['Reference', 'Identifier'], {}), '(Reference, Identifier)\n', (5271, 5294), False, 'import yatiml\n'), ((5428, 5472), 'yatiml.dumps_function', 'yatiml.dumps_function', (['Identifier', 'Reference'], {}), '(Identifier, Reference)\n', (5449, 5472), False, 'import yatiml\n'), ((5484, 5528), 'ymmsl.Reference', 'Reference', (['"""test[12].testing.ok.index[3][5]"""'], {}), "('test[12].testing.ok.index[3][5]')\n", (5493, 5528), False, 'from ymmsl import Identifier, Reference\n'), ((480, 505), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (493, 505), False, 'import pytest\n'), ((515, 542), 'ymmsl.Identifier', 'Identifier', (['"""1initialdigit"""'], {}), "('1initialdigit')\n", (525, 542), False, 'from ymmsl import Identifier, Reference\n'), ((553, 578), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (566, 578), False, 'import pytest\n'), ((588, 613), 'ymmsl.Identifier', 'Identifier', (['"""test.period"""'], {}), "('test.period')\n", (598, 613), False, 'from ymmsl import Identifier, Reference\n'), ((624, 649), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (637, 649), False, 'import pytest\n'), ((659, 684), 'ymmsl.Identifier', 'Identifier', (['"""test-hyphen"""'], {}), "('test-hyphen')\n", (669, 684), False, 'from ymmsl import Identifier, Reference\n'), ((695, 720), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (708, 720), False, 'import pytest\n'), ((730, 754), 'ymmsl.Identifier', 'Identifier', (['"""test space"""'], {}), "('test space')\n", (740, 754), False, 'from ymmsl import Identifier, Reference\n'), ((765, 790), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (778, 790), False, 'import pytest\n'), ((800, 824), 'ymmsl.Identifier', 'Identifier', (['"""test/slash"""'], {}), "('test/slash')\n", (810, 824), False, 'from ymmsl import Identifier, Reference\n'), ((877, 895), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (887, 895), False, 'from ymmsl import Identifier, Reference\n'), ((899, 917), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (909, 917), False, 'from ymmsl import Identifier, Reference\n'), ((929, 948), 'ymmsl.Identifier', 'Identifier', (['"""test1"""'], {}), "('test1')\n", (939, 948), False, 'from ymmsl import Identifier, Reference\n'), ((952, 971), 'ymmsl.Identifier', 'Identifier', (['"""test2"""'], {}), "('test2')\n", (962, 971), False, 'from ymmsl import Identifier, Reference\n'), ((984, 1002), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (994, 1002), False, 'from ymmsl import Identifier, Reference\n'), ((1034, 1052), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (1044, 1052), False, 'from ymmsl import Identifier, Reference\n'), ((1092, 1110), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (1102, 1110), False, 'from ymmsl import Identifier, Reference\n'), ((1144, 1162), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (1154, 1162), False, 'from ymmsl import Identifier, Reference\n'), ((1249, 1267), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (1259, 1267), False, 'from ymmsl import Identifier, Reference\n'), ((1562, 1587), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1575, 1587), False, 'import pytest\n'), ((1597, 1615), 'ymmsl.Reference', 'Reference', (['"""1test"""'], {}), "('1test')\n", (1606, 1615), False, 'from ymmsl import Identifier, Reference\n'), ((2875, 2900), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2888, 2900), False, 'import pytest\n'), ((2910, 2924), 'ymmsl.Reference', 'Reference', (['[4]'], {}), '([4])\n', (2919, 2924), False, 'from ymmsl import Identifier, Reference\n'), ((2935, 2960), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2948, 2960), False, 'import pytest\n'), ((3015, 3040), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3028, 3040), False, 'import pytest\n'), ((3050, 3071), 'ymmsl.Reference', 'Reference', (['"""ua",.u8["""'], {}), '(\'ua",.u8[\')\n', (3059, 3071), False, 'from ymmsl import Identifier, Reference\n'), ((3082, 3107), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3095, 3107), False, 'import pytest\n'), ((3117, 3136), 'ymmsl.Reference', 'Reference', (['"""test[4"""'], {}), "('test[4')\n", (3126, 3136), False, 'from ymmsl import Identifier, Reference\n'), ((3147, 3172), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3160, 3172), False, 'import pytest\n'), ((3182, 3201), 'ymmsl.Reference', 'Reference', (['"""test4]"""'], {}), "('test4]')\n", (3191, 3201), False, 'from ymmsl import Identifier, Reference\n'), ((3212, 3237), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3225, 3237), False, 'import pytest\n'), ((3247, 3268), 'ymmsl.Reference', 'Reference', (['"""test[_t]"""'], {}), "('test[_t]')\n", (3256, 3268), False, 'from ymmsl import Identifier, Reference\n'), ((3279, 3304), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3292, 3304), False, 'import pytest\n'), ((3314, 3338), 'ymmsl.Reference', 'Reference', (['"""testing_{3}"""'], {}), "('testing_{3}')\n", (3323, 3338), False, 'from ymmsl import Identifier, Reference\n'), ((3349, 3374), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3362, 3374), False, 'import pytest\n'), ((3384, 3405), 'ymmsl.Reference', 'Reference', (['"""test.(x)"""'], {}), "('test.(x)')\n", (3393, 3405), False, 'from ymmsl import Identifier, Reference\n'), ((3416, 3441), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3429, 3441), False, 'import pytest\n'), ((3451, 3471), 'ymmsl.Reference', 'Reference', (['"""[3]test"""'], {}), "('[3]test')\n", (3460, 3471), False, 'from ymmsl import Identifier, Reference\n'), ((3482, 3507), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3495, 3507), False, 'import pytest\n'), ((3517, 3538), 'ymmsl.Reference', 'Reference', (['"""[4].test"""'], {}), "('[4].test')\n", (3526, 3538), False, 'from ymmsl import Identifier, Reference\n'), ((3841, 3868), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (3854, 3868), False, 'import pytest\n'), ((3910, 3935), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3923, 3935), False, 'import pytest\n'), ((4057, 4077), 'ymmsl.Reference', 'Reference', (['"""test[4]"""'], {}), "('test[4]')\n", (4066, 4077), False, 'from ymmsl import Identifier, Reference\n'), ((4185, 4210), 'ymmsl.Reference', 'Reference', (['"""test.test[3]"""'], {}), "('test.test[3]')\n", (4194, 4210), False, 'from ymmsl import Identifier, Reference\n'), ((4214, 4239), 'ymmsl.Reference', 'Reference', (['"""test.test[3]"""'], {}), "('test.test[3]')\n", (4223, 4239), False, 'from ymmsl import Identifier, Reference\n'), ((4251, 4276), 'ymmsl.Reference', 'Reference', (['"""test.test[3]"""'], {}), "('test.test[3]')\n", (4260, 4276), False, 'from ymmsl import Identifier, Reference\n'), ((4280, 4306), 'ymmsl.Reference', 'Reference', (['"""test1.test[3]"""'], {}), "('test1.test[3]')\n", (4289, 4306), False, 'from ymmsl import Identifier, Reference\n'), ((4319, 4344), 'ymmsl.Reference', 'Reference', (['"""test.test[3]"""'], {}), "('test.test[3]')\n", (4328, 4344), False, 'from ymmsl import Identifier, Reference\n'), ((4374, 4399), 'ymmsl.Reference', 'Reference', (['"""test.test[3]"""'], {}), "('test.test[3]')\n", (4383, 4399), False, 'from ymmsl import Identifier, Reference\n'), ((4448, 4473), 'ymmsl.Reference', 'Reference', (['"""test.test[3]"""'], {}), "('test.test[3]')\n", (4457, 4473), False, 'from ymmsl import Identifier, Reference\n'), ((4529, 4554), 'ymmsl.Reference', 'Reference', (['"""test.test[3]"""'], {}), "('test.test[3]')\n", (4538, 4554), False, 'from ymmsl import Identifier, Reference\n'), ((1293, 1311), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (1303, 1311), False, 'from ymmsl import Identifier, Reference\n'), ((4103, 4123), 'ymmsl.Reference', 'Reference', (['"""test[4]"""'], {}), "('test[4]')\n", (4112, 4123), False, 'from ymmsl import Identifier, Reference\n'), ((4653, 4670), 'ymmsl.Reference', 'Reference', (['"""test"""'], {}), "('test')\n", (4662, 4670), False, 'from ymmsl import Identifier, Reference\n'), ((4673, 4691), 'ymmsl.Reference', 'Reference', (['"""test2"""'], {}), "('test2')\n", (4682, 4691), False, 'from ymmsl import Identifier, Reference\n'), ((4719, 4736), 'ymmsl.Reference', 'Reference', (['"""test"""'], {}), "('test')\n", (4728, 4736), False, 'from ymmsl import Identifier, Reference\n'), ((4739, 4758), 'ymmsl.Identifier', 'Identifier', (['"""test2"""'], {}), "('test2')\n", (4749, 4758), False, 'from ymmsl import Identifier, Reference\n'), ((4786, 4803), 'ymmsl.Reference', 'Reference', (['"""test"""'], {}), "('test')\n", (4795, 4803), False, 'from ymmsl import Identifier, Reference\n'), ((4832, 4849), 'ymmsl.Reference', 'Reference', (['"""test"""'], {}), "('test')\n", (4841, 4849), False, 'from ymmsl import Identifier, Reference\n'), ((4908, 4928), 'ymmsl.Reference', 'Reference', (['"""test[5]"""'], {}), "('test[5]')\n", (4917, 4928), False, 'from ymmsl import Identifier, Reference\n'), ((4931, 4952), 'ymmsl.Reference', 'Reference', (['"""test2[3]"""'], {}), "('test2[3]')\n", (4940, 4952), False, 'from ymmsl import Identifier, Reference\n'), ((4986, 5006), 'ymmsl.Reference', 'Reference', (['"""test[5]"""'], {}), "('test[5]')\n", (4995, 5006), False, 'from ymmsl import Identifier, Reference\n'), ((5009, 5028), 'ymmsl.Identifier', 'Identifier', (['"""test2"""'], {}), "('test2')\n", (5019, 5028), False, 'from ymmsl import Identifier, Reference\n'), ((5059, 5079), 'ymmsl.Reference', 'Reference', (['"""test[5]"""'], {}), "('test[5]')\n", (5068, 5079), False, 'from ymmsl import Identifier, Reference\n'), ((5112, 5132), 'ymmsl.Reference', 'Reference', (['"""test[5]"""'], {}), "('test[5]')\n", (5121, 5132), False, 'from ymmsl import Identifier, Reference\n'), ((2984, 3002), 'ymmsl.Identifier', 'Identifier', (['"""test"""'], {}), "('test')\n", (2994, 3002), False, 'from ymmsl import Identifier, Reference\n'), ((4853, 4872), 'ymmsl.Identifier', 'Identifier', (['"""test2"""'], {}), "('test2')\n", (4863, 4872), False, 'from ymmsl import Identifier, Reference\n'), ((5139, 5158), 'ymmsl.Identifier', 'Identifier', (['"""test2"""'], {}), "('test2')\n", (5149, 5158), False, 'from ymmsl import Identifier, Reference\n')] |
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.parametrize("installed_packages", [
("haproxy20"),
("socat"),
("keepalived"),
("bind"),
])
def test_packages_installed(host, installed_packages):
rpackage = host.package(installed_packages)
assert rpackage.is_installed
@pytest.mark.parametrize("services", [
("haproxy"),
# ("keepalive"),
("named"),
])
def test_services_running_and_enabled(host, services):
service = host.service(services)
assert service.is_enabled
assert service.is_running
@pytest.mark.parametrize("files", [
("/etc/pki/haproxy/star_haproxy.pem"),
])
def test_star_haproxy_pem(host, files):
star_haproxy_pem = host.file(files)
assert star_haproxy_pem.user == "root"
assert star_haproxy_pem.group == "root"
assert star_haproxy_pem.mode == 0o600
assert star_haproxy_pem.contains('-----BEGIN CERTIFICATE-----')
assert star_haproxy_pem.contains('-----BEGIN RSA PRIVATE KEY-----')
def test_sysctl_non_local_bind(host):
non_local_bind = host.sysctl("net.ipv4.ip_nonlocal_bind")
assert non_local_bind == 1
| [
"pytest.mark.parametrize"
] | [((191, 286), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""installed_packages"""', "['haproxy20', 'socat', 'keepalived', 'bind']"], {}), "('installed_packages', ['haproxy20', 'socat',\n 'keepalived', 'bind'])\n", (214, 286), False, 'import pytest\n'), ((449, 506), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""services"""', "['haproxy', 'named']"], {}), "('services', ['haproxy', 'named'])\n", (472, 506), False, 'import pytest\n'), ((698, 769), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""files"""', "['/etc/pki/haproxy/star_haproxy.pem']"], {}), "('files', ['/etc/pki/haproxy/star_haproxy.pem'])\n", (721, 769), False, 'import pytest\n')] |
import sys
from util.Timer import Timer
from util.FileOpener import FileOpener
from util.Logger import Logger
from util.PathExtractor import PathExtractor
from util.PathValidator import PathValidator
from service import SpacyModel
def lemmatize_text(file_path: str, timer: Timer):
logger = Logger()
output_file = FileOpener().get_new_file("wiki.en.lemmatized.txt", "a")
with open(file_path, "r") as file:
for line in file:
lemmatized_list = [word.lemma_ for word in SpacyModel.instance.get_en_spacy_line(line)]
lemmazized_line = " ".join(lemmatized_list)
output_file.write(lemmazized_line)
logger.every_n_wiki_status(10, timer.get_duration())
logger.every_n_wiki_status(1)
def main():
script_name: str = PathExtractor().get_file_name(sys.argv[0])
if len(sys.argv) != 2:
Logger().usage(f"python {script_name} <wiki.en.filtered.txt>")
return
file_path = sys.argv[1]
if PathValidator().is_valid_files([file_path]):
Logger().info(f'Input file: "{file_path}"')
Logger().info("Starting to lemmatize text")
timer = Timer()
lemmatize_text(file_path, timer)
Logger().finish_script(timer.get_duration(), script_name)
if __name__ == '__main__':
main()
| [
"util.PathExtractor.PathExtractor",
"util.FileOpener.FileOpener",
"util.Logger.Logger",
"service.SpacyModel.instance.get_en_spacy_line",
"util.PathValidator.PathValidator",
"util.Timer.Timer"
] | [((292, 300), 'util.Logger.Logger', 'Logger', ([], {}), '()\n', (298, 300), False, 'from util.Logger import Logger\n'), ((1046, 1053), 'util.Timer.Timer', 'Timer', ([], {}), '()\n', (1051, 1053), False, 'from util.Timer import Timer\n'), ((316, 328), 'util.FileOpener.FileOpener', 'FileOpener', ([], {}), '()\n', (326, 328), False, 'from util.FileOpener import FileOpener\n'), ((726, 741), 'util.PathExtractor.PathExtractor', 'PathExtractor', ([], {}), '()\n', (739, 741), False, 'from util.PathExtractor import PathExtractor\n'), ((899, 914), 'util.PathValidator.PathValidator', 'PathValidator', ([], {}), '()\n', (912, 914), False, 'from util.PathValidator import PathValidator\n'), ((796, 804), 'util.Logger.Logger', 'Logger', ([], {}), '()\n', (802, 804), False, 'from util.Logger import Logger\n'), ((946, 954), 'util.Logger.Logger', 'Logger', ([], {}), '()\n', (952, 954), False, 'from util.Logger import Logger\n'), ((992, 1000), 'util.Logger.Logger', 'Logger', ([], {}), '()\n', (998, 1000), False, 'from util.Logger import Logger\n'), ((1091, 1099), 'util.Logger.Logger', 'Logger', ([], {}), '()\n', (1097, 1099), False, 'from util.Logger import Logger\n'), ((476, 519), 'service.SpacyModel.instance.get_en_spacy_line', 'SpacyModel.instance.get_en_spacy_line', (['line'], {}), '(line)\n', (513, 519), False, 'from service import SpacyModel\n')] |
from app import app
if __name__ == '__main__':
app.run_server(port=5000, debug=True, host='localhost') | [
"app.app.run_server"
] | [((52, 107), 'app.app.run_server', 'app.run_server', ([], {'port': '(5000)', 'debug': '(True)', 'host': '"""localhost"""'}), "(port=5000, debug=True, host='localhost')\n", (66, 107), False, 'from app import app\n')] |