code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient.common import template_utils
from unittest import mock
import testtools
import time
from senlinclient.common import exc
from senlinclient.common.i18n import _
from senlinclient.common import utils
class UtilTest(testtools.TestCase):
def test_format_parameter(self):
params = ['status=ACTIVE;name=cluster1']
format_params = {'status': 'ACTIVE', 'name': 'cluster1'}
self.assertEqual(format_params,
utils.format_parameters(params))
def test_format_parameter_split(self):
params = ['status=ACTIVE', 'name=cluster1']
format_params = {'status': 'ACTIVE', 'name': 'cluster1'}
self.assertEqual(format_params,
utils.format_parameters(params))
def test_format_parameter_none_dict(self):
params = ['{}']
self.assertEqual({}, utils.format_parameters(params))
def test_format_parameter_none(self):
self.assertEqual({}, utils.format_parameters(None))
def test_format_parameter_bad_format(self):
params = ['status:ACTIVE;name:cluster1']
ex = self.assertRaises(exc.CommandError,
utils.format_parameters,
params)
msg = _('Malformed parameter(status:ACTIVE). '
'Use the key=value format.')
self.assertEqual(msg, str(ex))
@mock.patch.object(template_utils,
'process_multiple_environments_and_files')
@mock.patch.object(template_utils, 'get_template_contents')
def test_process_stack_spec(self, mock_get_temp, mock_process):
spec = {
'template': 'temp.yaml',
'disable_rollback': True,
'context': {
'region_name': 'RegionOne'
},
}
tpl_files = {'fake_key1': 'fake_value1'}
template = mock.Mock()
mock_get_temp.return_value = tpl_files, template
env_files = {'fake_key2': 'fake_value2'}
env = mock.Mock()
mock_process.return_value = env_files, env
new_spec = utils.process_stack_spec(spec)
stack_spec = {
'disable_rollback': True,
'context': {
'region_name': 'RegionOne',
},
'parameters': {},
'timeout': 60,
'template': template,
'files': {
'fake_key1': 'fake_value1',
'fake_key2': 'fake_value2',
},
'environment': env
}
self.assertEqual(stack_spec, new_spec)
mock_get_temp.assert_called_once_with(template_file='temp.yaml')
mock_process.assert_called_once_with(env_paths=None)
def test_json_formatter_with_empty_json(self):
params = {}
self.assertEqual('{}', utils.json_formatter(params))
def test_list_formatter_with_list(self):
params = ['foo', 'bar']
self.assertEqual('foo\nbar', utils.list_formatter(params))
def test_list_formatter_with_empty_list(self):
params = []
self.assertEqual('', utils.list_formatter(params))
@mock.patch.object(utils, '_check')
def test_await_cluster_action(self, mock_check):
utils.await_action('fake-client', 'test-action-id')
mock_check.assert_called_once()
@mock.patch.object(utils, '_check')
def test_await_cluster_status(self, mock_check):
utils.await_cluster_status('fake-client', 'ACTIVE')
mock_check.assert_called_once()
@mock.patch.object(utils, '_check')
def test_await_cluster_delete(self, mock_check):
utils.await_cluster_delete('fake-client', 'test-cluster-id')
mock_check.assert_called_once()
def test_check(self):
check_func = mock.Mock(return_value=True)
try:
utils._check(check_func)
except Exception:
self.fail("_check() unexpectedly raised an exception")
check_func.assert_called()
@mock.patch.object(time, 'sleep')
def test_check_raises(self, mock_sleep):
mock_check_func = mock.Mock(return_value=False)
poll_count = 2
poll_interval = 1
self.assertRaises(exc.PollingExceededError, utils._check,
mock_check_func, poll_count, poll_interval)
mock_check_func.assert_called()
mock_sleep.assert_called()
| [
"unittest.mock.Mock",
"senlinclient.common.utils.json_formatter",
"senlinclient.common.utils.await_action",
"senlinclient.common.utils.process_stack_spec",
"senlinclient.common.utils.format_parameters",
"senlinclient.common.utils._check",
"senlinclient.common.i18n._",
"senlinclient.common.utils.list_f... | [((1935, 2011), 'unittest.mock.patch.object', 'mock.patch.object', (['template_utils', '"""process_multiple_environments_and_files"""'], {}), "(template_utils, 'process_multiple_environments_and_files')\n", (1952, 2011), False, 'from unittest import mock\n'), ((2040, 2098), 'unittest.mock.patch.object', 'mock.patch.object', (['template_utils', '"""get_template_contents"""'], {}), "(template_utils, 'get_template_contents')\n", (2057, 2098), False, 'from unittest import mock\n'), ((3664, 3698), 'unittest.mock.patch.object', 'mock.patch.object', (['utils', '"""_check"""'], {}), "(utils, '_check')\n", (3681, 3698), False, 'from unittest import mock\n'), ((3858, 3892), 'unittest.mock.patch.object', 'mock.patch.object', (['utils', '"""_check"""'], {}), "(utils, '_check')\n", (3875, 3892), False, 'from unittest import mock\n'), ((4052, 4086), 'unittest.mock.patch.object', 'mock.patch.object', (['utils', '"""_check"""'], {}), "(utils, '_check')\n", (4069, 4086), False, 'from unittest import mock\n'), ((4512, 4544), 'unittest.mock.patch.object', 'mock.patch.object', (['time', '"""sleep"""'], {}), "(time, 'sleep')\n", (4529, 4544), False, 'from unittest import mock\n'), ((1804, 1870), 'senlinclient.common.i18n._', '_', (['"""Malformed parameter(status:ACTIVE). Use the key=value format."""'], {}), "('Malformed parameter(status:ACTIVE). Use the key=value format.')\n", (1805, 1870), False, 'from senlinclient.common.i18n import _\n'), ((2420, 2431), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (2429, 2431), False, 'from unittest import mock\n'), ((2552, 2563), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (2561, 2563), False, 'from unittest import mock\n'), ((2634, 2664), 'senlinclient.common.utils.process_stack_spec', 'utils.process_stack_spec', (['spec'], {}), '(spec)\n', (2658, 2664), False, 'from senlinclient.common import utils\n'), ((3760, 3811), 'senlinclient.common.utils.await_action', 'utils.await_action', (['"""fake-client"""', '"""test-action-id"""'], {}), "('fake-client', 'test-action-id')\n", (3778, 3811), False, 'from senlinclient.common import utils\n'), ((3954, 4005), 'senlinclient.common.utils.await_cluster_status', 'utils.await_cluster_status', (['"""fake-client"""', '"""ACTIVE"""'], {}), "('fake-client', 'ACTIVE')\n", (3980, 4005), False, 'from senlinclient.common import utils\n'), ((4148, 4208), 'senlinclient.common.utils.await_cluster_delete', 'utils.await_cluster_delete', (['"""fake-client"""', '"""test-cluster-id"""'], {}), "('fake-client', 'test-cluster-id')\n", (4174, 4208), False, 'from senlinclient.common import utils\n'), ((4297, 4325), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (4306, 4325), False, 'from unittest import mock\n'), ((4616, 4645), 'unittest.mock.Mock', 'mock.Mock', ([], {'return_value': '(False)'}), '(return_value=False)\n', (4625, 4645), False, 'from unittest import mock\n'), ((1019, 1050), 'senlinclient.common.utils.format_parameters', 'utils.format_parameters', (['params'], {}), '(params)\n', (1042, 1050), False, 'from senlinclient.common import utils\n'), ((1278, 1309), 'senlinclient.common.utils.format_parameters', 'utils.format_parameters', (['params'], {}), '(params)\n', (1301, 1309), False, 'from senlinclient.common import utils\n'), ((1412, 1443), 'senlinclient.common.utils.format_parameters', 'utils.format_parameters', (['params'], {}), '(params)\n', (1435, 1443), False, 'from senlinclient.common import utils\n'), ((1517, 1546), 'senlinclient.common.utils.format_parameters', 'utils.format_parameters', (['None'], {}), '(None)\n', (1540, 1546), False, 'from senlinclient.common import utils\n'), ((3352, 3380), 'senlinclient.common.utils.json_formatter', 'utils.json_formatter', (['params'], {}), '(params)\n', (3372, 3380), False, 'from senlinclient.common import utils\n'), ((3497, 3525), 'senlinclient.common.utils.list_formatter', 'utils.list_formatter', (['params'], {}), '(params)\n', (3517, 3525), False, 'from senlinclient.common import utils\n'), ((3628, 3656), 'senlinclient.common.utils.list_formatter', 'utils.list_formatter', (['params'], {}), '(params)\n', (3648, 3656), False, 'from senlinclient.common import utils\n'), ((4352, 4376), 'senlinclient.common.utils._check', 'utils._check', (['check_func'], {}), '(check_func)\n', (4364, 4376), False, 'from senlinclient.common import utils\n')] |
#!/usr/bin/env python
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License. A copy of the License is
# located at
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
GGD Button
This GGD will send "green", "red", or "white" button messages.
"""
import os
import json
import time
import socket
import argparse
import datetime
import logging
from gpiozero import PWMLED, Button
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient, DROP_OLDEST
from AWSIoTPythonSDK.core.greengrass.discovery.providers import \
DiscoveryInfoProvider
import utils
from gg_group_setup import GroupConfigFile
dir_path = os.path.dirname(os.path.realpath(__file__))
log = logging.getLogger('button')
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s|%(name)-8s|%(levelname)s: %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.INFO)
GGD_BUTTON_TOPIC = "button"
hostname = socket.gethostname()
green_led = PWMLED(4)
green_button = Button(5)
red_led = PWMLED(17)
red_button = Button(6)
white_led = PWMLED(27)
white_button = Button(13)
mqttc = None
ggd_name = None
def button(sensor_id, toggle):
now = datetime.datetime.now()
if toggle:
val = "on"
else:
val = "off"
msg = {
"version": "2017-07-05", # YYYY-MM-DD
"ggd_id": ggd_name,
"hostname": hostname,
"data": [
{
"sensor_id": sensor_id,
"ts": now.isoformat(),
"value": val
}
]
}
mqttc.publish(GGD_BUTTON_TOPIC, json.dumps(msg), 0)
return msg
def red_push():
msg = button(sensor_id="red-button", toggle=True)
log.info("[red_push] publishing button msg: {0}".format(msg))
red_led.on()
green_led.off()
red_led.pulse()
def red_release():
msg = button(sensor_id="red-button", toggle=False)
log.info("[red_release] publishing button msg: {0}".format(msg))
def green_push():
msg = button(sensor_id="green-button", toggle=True)
log.info("[green_push] publishing button msg: {0}".format(msg))
green_led.on()
red_led.off()
green_led.pulse()
def green_release():
msg = button(sensor_id="green-button", toggle=False)
log.info("[green_release] publishing button msg: {0}".format(msg))
def white_push():
msg = button(sensor_id="white-button", toggle=True)
log.info("[white_push] publishing button msg: {0}".format(msg))
white_led.pulse()
def white_release():
msg = button(sensor_id="white-button", toggle=False)
log.info("[white_release] publishing button msg: {0}".format(msg))
white_led.on()
def use_box(cli):
log.info("[use_box] configuring magic buttons.")
red_button.when_pressed = red_push
red_button.when_released = red_release
green_button.when_pressed = green_push
green_button.when_released = green_release
white_button.when_pressed = white_push
white_button.when_released = white_release
white_led.on()
log.info("[use_box] configured buttons. White LED should now be on.")
try:
while 1:
time.sleep(0.2)
except KeyboardInterrupt:
log.info(
"[use_box] KeyboardInterrupt ... exiting box monitoring loop")
red_led.off()
green_led.off()
white_led.off()
def button_green(cli):
if cli.light:
green_led.on()
msg = button(sensor_id="green-button", toggle=cli.toggle)
print("[cli.button_green] publishing button msg: {0}".format(msg))
def button_red(cli):
if cli.light:
red_led.on()
msg = button(sensor_id="red-button", toggle=cli.toggle)
print("[cli.button_red] publishing button msg: {0}".format(msg))
def button_white(cli):
if cli.light:
white_led.on()
msg = button(sensor_id="white-button", toggle=cli.toggle)
print("[cli.button_white] publishing button msg: {0}".format(msg))
def core_connect(device_name, config_file, root_ca, certificate, private_key,
group_ca_path):
global ggd_name, mqttc
cfg = GroupConfigFile(config_file)
ggd_name = cfg['devices'][device_name]['thing_name']
iot_endpoint = cfg['misc']['iot_endpoint']
dip = DiscoveryInfoProvider()
dip.configureEndpoint(iot_endpoint)
dip.configureCredentials(
caPath=root_ca, certPath=certificate, keyPath=private_key
)
dip.configureTimeout(10) # 10 sec
logging.info("[button] Discovery using CA:{0} cert:{1} prv_key:{2}".format(
root_ca, certificate, private_key
))
gg_core, discovery_info = utils.discover_configured_core(
device_name=device_name, dip=dip, config_file=config_file,
)
if not gg_core:
raise EnvironmentError("[button] Couldn't find the Core")
ca_list = discovery_info.getAllCas()
group_id, ca = ca_list[0]
group_ca_file = utils.save_group_ca(ca, group_ca_path, group_id)
mqttc = AWSIoTMQTTClient(ggd_name)
# local Greengrass Core discovered, now connect to Core from this Device
log.info("[button] gca_file:{0} cert:{1}".format(
group_ca_file, certificate))
mqttc.configureCredentials(group_ca_file, private_key, certificate)
mqttc.configureOfflinePublishQueueing(10, DROP_OLDEST)
return mqttc, gg_core
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Mini Fulfillment GGD and CLI button',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('device_name',
help="The GGD device_name in the config file.")
parser.add_argument('config_file',
help="The config file.")
parser.add_argument('root_ca',
help="Root CA File Path of Cloud Server Certificate.")
parser.add_argument('certificate',
help="File Path of GGD Certificate.")
parser.add_argument('private_key',
help="File Path of GGD Private Key.")
parser.add_argument('group_ca_path',
help="The directory path where the discovered Group CA "
"will be saved.")
subparsers = parser.add_subparsers()
box_parser = subparsers.add_parser(
'box', description='Use the physical button box to drive.')
box_parser.add_argument('--on', action='store_true',
help="Toggle box ON")
box_parser.set_defaults(func=use_box, on=True)
green_parser = subparsers.add_parser(
'green',
description='Virtual GREEN button pushed')
green_parser.add_argument('--on', dest='toggle', action='store_true',
help="Virtual toggle ON")
green_parser.add_argument('--off', dest='toggle', action='store_false',
help="Virtual toggle OFF")
green_parser.add_argument('--light', action='store_true')
green_parser.set_defaults(func=button_green, toggle=True)
red_parser = subparsers.add_parser(
'red',
description='Virtual RED button pushed')
red_parser.add_argument('--on', dest='toggle', action='store_true',
help="Virtual toggle ON")
red_parser.add_argument('--off', dest='toggle', action='store_false',
help="Virtual toggle OFF")
red_parser.add_argument('--light', action='store_true')
red_parser.set_defaults(func=button_red, toggle=True)
white_parser = subparsers.add_parser(
'white',
description='Virtual WHITE button toggled')
white_parser.add_argument('--on', dest='toggle', action='store_true',
help="Virtual toggle ON")
white_parser.add_argument('--off', dest='toggle', action='store_false',
help="Virtual toggle OFF")
white_parser.add_argument('--light', action='store_true')
white_parser.set_defaults(func=button_white, toggle=True)
pa = parser.parse_args()
client, core = core_connect(
device_name=pa.device_name,
config_file=pa.config_file, root_ca=pa.root_ca,
certificate=pa.certificate, private_key=pa.private_key,
group_ca_path=pa.group_ca_path
)
if utils.mqtt_connect(mqtt_client=client, core_info=core):
pa.func(pa)
time.sleep(0.5)
mqttc.disconnect()
time.sleep(1)
| [
"logging.getLogger",
"logging.StreamHandler",
"argparse.ArgumentParser",
"AWSIoTPythonSDK.core.greengrass.discovery.providers.DiscoveryInfoProvider",
"logging.Formatter",
"json.dumps",
"gpiozero.Button",
"gg_group_setup.GroupConfigFile",
"os.path.realpath",
"datetime.datetime.now",
"utils.discov... | [((1079, 1106), 'logging.getLogger', 'logging.getLogger', (['"""button"""'], {}), "('button')\n", (1096, 1106), False, 'import logging\n'), ((1117, 1140), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1138, 1140), False, 'import logging\n'), ((1153, 1223), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s|%(name)-8s|%(levelname)s: %(message)s"""'], {}), "('%(asctime)s|%(name)-8s|%(levelname)s: %(message)s')\n", (1170, 1223), False, 'import logging\n'), ((1353, 1373), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1371, 1373), False, 'import socket\n'), ((1386, 1395), 'gpiozero.PWMLED', 'PWMLED', (['(4)'], {}), '(4)\n', (1392, 1395), False, 'from gpiozero import PWMLED, Button\n'), ((1411, 1420), 'gpiozero.Button', 'Button', (['(5)'], {}), '(5)\n', (1417, 1420), False, 'from gpiozero import PWMLED, Button\n'), ((1431, 1441), 'gpiozero.PWMLED', 'PWMLED', (['(17)'], {}), '(17)\n', (1437, 1441), False, 'from gpiozero import PWMLED, Button\n'), ((1455, 1464), 'gpiozero.Button', 'Button', (['(6)'], {}), '(6)\n', (1461, 1464), False, 'from gpiozero import PWMLED, Button\n'), ((1477, 1487), 'gpiozero.PWMLED', 'PWMLED', (['(27)'], {}), '(27)\n', (1483, 1487), False, 'from gpiozero import PWMLED, Button\n'), ((1503, 1513), 'gpiozero.Button', 'Button', (['(13)'], {}), '(13)\n', (1509, 1513), False, 'from gpiozero import PWMLED, Button\n'), ((1044, 1070), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1060, 1070), False, 'import os\n'), ((1586, 1609), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1607, 1609), False, 'import datetime\n'), ((4468, 4496), 'gg_group_setup.GroupConfigFile', 'GroupConfigFile', (['config_file'], {}), '(config_file)\n', (4483, 4496), False, 'from gg_group_setup import GroupConfigFile\n'), ((4612, 4635), 'AWSIoTPythonSDK.core.greengrass.discovery.providers.DiscoveryInfoProvider', 'DiscoveryInfoProvider', ([], {}), '()\n', (4633, 4635), False, 'from AWSIoTPythonSDK.core.greengrass.discovery.providers import DiscoveryInfoProvider\n'), ((4977, 5070), 'utils.discover_configured_core', 'utils.discover_configured_core', ([], {'device_name': 'device_name', 'dip': 'dip', 'config_file': 'config_file'}), '(device_name=device_name, dip=dip,\n config_file=config_file)\n', (5007, 5070), False, 'import utils\n'), ((5260, 5308), 'utils.save_group_ca', 'utils.save_group_ca', (['ca', 'group_ca_path', 'group_id'], {}), '(ca, group_ca_path, group_id)\n', (5279, 5308), False, 'import utils\n'), ((5322, 5348), 'AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient', 'AWSIoTMQTTClient', (['ggd_name'], {}), '(ggd_name)\n', (5338, 5348), False, 'from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient, DROP_OLDEST\n'), ((5717, 5851), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Mini Fulfillment GGD and CLI button"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='Mini Fulfillment GGD and CLI button',\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n", (5740, 5851), False, 'import argparse\n'), ((8605, 8659), 'utils.mqtt_connect', 'utils.mqtt_connect', ([], {'mqtt_client': 'client', 'core_info': 'core'}), '(mqtt_client=client, core_info=core)\n', (8623, 8659), False, 'import utils\n'), ((8686, 8701), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (8696, 8701), False, 'import time\n'), ((8729, 8742), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (8739, 8742), False, 'import time\n'), ((1998, 2013), 'json.dumps', 'json.dumps', (['msg'], {}), '(msg)\n', (2008, 2013), False, 'import json\n'), ((3529, 3544), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3539, 3544), False, 'import time\n')] |
import argparse
import sys
class HelpOnFailArgumentParser(argparse.ArgumentParser):
"""
Prints help whenever the command-line arguments could not be parsed.
"""
def error(self, message):
sys.stderr.write("Error: %s\n\n" % message)
self.print_help()
sys.exit(2)
| [
"sys.stderr.write",
"sys.exit"
] | [((214, 257), 'sys.stderr.write', 'sys.stderr.write', (["('Error: %s\\n\\n' % message)"], {}), "('Error: %s\\n\\n' % message)\n", (230, 257), False, 'import sys\n'), ((292, 303), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (300, 303), False, 'import sys\n')] |
from __future__ import division
import itertools
from pyoperators.iterative.stopconditions import StopCondition
from pyoperators.utils.testing import assert_eq, assert_raises
class A():
pass
sc1 = StopCondition(lambda s: s.a > 2, 'a>2')
sc2 = StopCondition(lambda s: s.b > 2, 'b>2')
sc3 = StopCondition(lambda s: s.c > 2, 'c>2')
def test_stop_condition():
values = (1, 3)
def func(v):
a = A()
a.a = v
if v > 2:
assert_raises(StopIteration, sc1, a)
for v in values:
yield func, v
def test_stop_condition_or():
sc = sc1 or sc2 or sc2
def func(v):
a = A()
a.a, a.b, a.c = v
if any(_ > 2 for _ in v):
try:
sc(a)
except StopIteration as e:
if a.a > 2:
assert_eq(str(e), str(sc1))
elif a.b > 2:
assert_eq(str(e), str(sc2))
else:
assert_eq(str(e), str(sc3))
for v in itertools.product((1, 3), repeat=3):
yield func, v
| [
"itertools.product",
"pyoperators.iterative.stopconditions.StopCondition",
"pyoperators.utils.testing.assert_raises"
] | [((206, 245), 'pyoperators.iterative.stopconditions.StopCondition', 'StopCondition', (['(lambda s: s.a > 2)', '"""a>2"""'], {}), "(lambda s: s.a > 2, 'a>2')\n", (219, 245), False, 'from pyoperators.iterative.stopconditions import StopCondition\n'), ((252, 291), 'pyoperators.iterative.stopconditions.StopCondition', 'StopCondition', (['(lambda s: s.b > 2)', '"""b>2"""'], {}), "(lambda s: s.b > 2, 'b>2')\n", (265, 291), False, 'from pyoperators.iterative.stopconditions import StopCondition\n'), ((298, 337), 'pyoperators.iterative.stopconditions.StopCondition', 'StopCondition', (['(lambda s: s.c > 2)', '"""c>2"""'], {}), "(lambda s: s.c > 2, 'c>2')\n", (311, 337), False, 'from pyoperators.iterative.stopconditions import StopCondition\n'), ((1015, 1050), 'itertools.product', 'itertools.product', (['(1, 3)'], {'repeat': '(3)'}), '((1, 3), repeat=3)\n', (1032, 1050), False, 'import itertools\n'), ((467, 503), 'pyoperators.utils.testing.assert_raises', 'assert_raises', (['StopIteration', 'sc1', 'a'], {}), '(StopIteration, sc1, a)\n', (480, 503), False, 'from pyoperators.utils.testing import assert_eq, assert_raises\n')] |
from ctypes import c_ushort
class DOMException(Exception):
"""Exception `DOMException`
DOM operations only raise exceptions in "exceptional" circumstances, i.e., when an operation is impossible to perform
(either for logical reasons, because data is lost, or because the implementation has become unstable).
In general, DOM methods return specific error values in ordinary processing situation, such as out-of-bound errors when using `NodeList`.
Implementations may raise other exceptions under other circumstances.
For example, implementations may raise an implementation-dependent exception if a `None` argument is passed.
Some languages and object systems do not support the concept of exceptions.
For such systems, error conditions may be indicated using native error reporting mechanisms.
For some bindings, for example, methods may return error codes similar to those listed in the corresponding method descriptions.
"""
# Definition group `ExceptionCode`
# An integer indicating the type of error generated.
INDEX_SIZE_ERR: c_ushort = c_ushort(1)
DOMSTRING_SIZE_ERR: c_ushort = c_ushort(2)
HIERARCHY_REQUEST_ERR: c_ushort = c_ushort(3)
WRONG_DOCUMENT_ERR: c_ushort = c_ushort(4)
INVALID_CHARACTER_ERR: c_ushort = c_ushort(5)
NO_DATA_ALLOWED_ERR: c_ushort = c_ushort(6)
NO_MODIFICATION_ALLOWED_ERR: c_ushort = c_ushort(7)
NOT_FOUND_ERR: c_ushort = c_ushort(8)
NOT_SUPPORTED_ERR: c_ushort = c_ushort(9)
INUSE_ATTRIBUTE_ERR: c_ushort = c_ushort(10)
def __init__(self, error_code: c_ushort, *args: object) -> None:
super().__init__(*args)
self.code: c_ushort = error_code
| [
"ctypes.c_ushort"
] | [((1100, 1111), 'ctypes.c_ushort', 'c_ushort', (['(1)'], {}), '(1)\n', (1108, 1111), False, 'from ctypes import c_ushort\n'), ((1147, 1158), 'ctypes.c_ushort', 'c_ushort', (['(2)'], {}), '(2)\n', (1155, 1158), False, 'from ctypes import c_ushort\n'), ((1197, 1208), 'ctypes.c_ushort', 'c_ushort', (['(3)'], {}), '(3)\n', (1205, 1208), False, 'from ctypes import c_ushort\n'), ((1244, 1255), 'ctypes.c_ushort', 'c_ushort', (['(4)'], {}), '(4)\n', (1252, 1255), False, 'from ctypes import c_ushort\n'), ((1294, 1305), 'ctypes.c_ushort', 'c_ushort', (['(5)'], {}), '(5)\n', (1302, 1305), False, 'from ctypes import c_ushort\n'), ((1342, 1353), 'ctypes.c_ushort', 'c_ushort', (['(6)'], {}), '(6)\n', (1350, 1353), False, 'from ctypes import c_ushort\n'), ((1398, 1409), 'ctypes.c_ushort', 'c_ushort', (['(7)'], {}), '(7)\n', (1406, 1409), False, 'from ctypes import c_ushort\n'), ((1440, 1451), 'ctypes.c_ushort', 'c_ushort', (['(8)'], {}), '(8)\n', (1448, 1451), False, 'from ctypes import c_ushort\n'), ((1486, 1497), 'ctypes.c_ushort', 'c_ushort', (['(9)'], {}), '(9)\n', (1494, 1497), False, 'from ctypes import c_ushort\n'), ((1534, 1546), 'ctypes.c_ushort', 'c_ushort', (['(10)'], {}), '(10)\n', (1542, 1546), False, 'from ctypes import c_ushort\n')] |
# Generated by Django 3.2b1 on 2021-03-16 23:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('videos', '0012_alter_video_video_id'),
('playlists', '0005_remove_playlist_videos'),
]
operations = [
migrations.CreateModel(
name='PlaylistItem',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.IntegerField(default=1)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('playlist', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='playlists.playlist')),
('video', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='videos.video')),
],
options={
'ordering': ['order', '-timestamp'],
},
),
migrations.AddField(
model_name='playlist',
name='videos',
field=models.ManyToManyField(blank=True, related_name='playlist_item', through='playlists.PlaylistItem', to='videos.Video'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.ManyToManyField",
"django.db.models.BigAutoField",
"django.db.models.DateTimeField"
] | [((1104, 1226), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""playlist_item"""', 'through': '"""playlists.PlaylistItem"""', 'to': '"""videos.Video"""'}), "(blank=True, related_name='playlist_item', through=\n 'playlists.PlaylistItem', to='videos.Video')\n", (1126, 1226), False, 'from django.db import migrations, models\n'), ((424, 520), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (443, 520), False, 'from django.db import migrations, models\n'), ((545, 575), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (564, 575), False, 'from django.db import migrations, models\n'), ((608, 647), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (628, 647), False, 'from django.db import migrations, models\n'), ((679, 771), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""playlists.playlist"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'playlists.playlist')\n", (696, 771), False, 'from django.db import migrations, models\n'), ((795, 881), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""videos.video"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'videos.video')\n", (812, 881), False, 'from django.db import migrations, models\n')] |
"""
Copyright 2010-2018 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Created on Jul 27, 2012
@author: maechlin
Utility to convert PEER format seismogram (acceleration) to SCEC bbp
format. Expects a pathname to directory containing 3 component PEER
files. Outputs .bbp format file for each set of 3 PEER files. Output
bbp files are based on the station name, which may be a station name
or a record sequence number rsn.
"""
from __future__ import division, print_function
import os
import bbp_formatter
def main():
#
# Logic is
#
# ls the LOMAP dir
# find each RNS
# for each RSN create three names of the three components
# for each RNS create name of output file based on RNS
# call bbp_formatter
#
#
#
path = './LOMAP'
listing = os.listdir(path)
for infile in listing:
print("current file is: " + infile)
sta_list = []
for infile in listing:
if infile.endswith("_E.acc" or "_N.acc" or "_Z.acc"):
fields = infile.split("_")
id = int(fields[0])
print("Found RECID %d" % (id))
sta_list.append(id)
print("next infile")
for sta in sta_list:
e_fname = os.path.join(path, "%s_E.acc" % (sta))
n_fname = os.path.join(path, "%s_N.acc" % (sta))
z_fname = os.path.join(path, "%s_Z.acc" % (sta))
bbp_fname = os.path.join(path, "%s.bbp" % (sta))
print(e_fname)
print(n_fname)
print(z_fname)
print(bbp_fname)
bbp_formatter.peer2bbp(n_fname, e_fname, z_fname, bbp_fname)
print("Created BBP file: %s" % (bbp_fname))
if __name__ == '__main__':
main()
| [
"os.listdir",
"os.path.join",
"bbp_formatter.peer2bbp"
] | [((1255, 1271), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1265, 1271), False, 'import os\n'), ((1670, 1706), 'os.path.join', 'os.path.join', (['path', "('%s_E.acc' % sta)"], {}), "(path, '%s_E.acc' % sta)\n", (1682, 1706), False, 'import os\n'), ((1727, 1763), 'os.path.join', 'os.path.join', (['path', "('%s_N.acc' % sta)"], {}), "(path, '%s_N.acc' % sta)\n", (1739, 1763), False, 'import os\n'), ((1784, 1820), 'os.path.join', 'os.path.join', (['path', "('%s_Z.acc' % sta)"], {}), "(path, '%s_Z.acc' % sta)\n", (1796, 1820), False, 'import os\n'), ((1843, 1877), 'os.path.join', 'os.path.join', (['path', "('%s.bbp' % sta)"], {}), "(path, '%s.bbp' % sta)\n", (1855, 1877), False, 'import os\n'), ((1982, 2042), 'bbp_formatter.peer2bbp', 'bbp_formatter.peer2bbp', (['n_fname', 'e_fname', 'z_fname', 'bbp_fname'], {}), '(n_fname, e_fname, z_fname, bbp_fname)\n', (2004, 2042), False, 'import bbp_formatter\n')] |
#!/usr/bin/env python
# coding: utf-8
# # Publications markdown generator for academicpages
#
# Takes a set of bibtex of publications and converts them for use with [academicpages.github.io](academicpages.github.io). This is an interactive Jupyter notebook ([see more info here](http://jupyter-notebook-beginner-guide.readthedocs.io/en/latest/what_is_jupyter.html)).
#
# The core python code is also in `pubsFromBibs.py`.
# Run either from the `markdown_generator` folder after replacing updating the publist dictionary with:
# * bib file names
# * specific venue keys based on your bib file preferences
# * any specific pre-text for specific files
# * Collection Name (future feature)
#
# TODO: Make this work with other databases of citations,
# TODO: Merge this with the existing TSV parsing solution
from pybtex.database.input import bibtex
import pybtex.database.input.bibtex
from time import strptime
import string
import html
import os
import re
from pylatexenc.latex2text import LatexNodes2Text
#todo: incorporate different collection types rather than a catch all publications, requires other changes to template
publist = {
"proceeding": {
"file" : "proceedings.bib",
"venuekey": "booktitle",
"venue-pretext": "In ",
"collection" : {"name":"proceedings",
"permalink":"/proceeding/"}
},
"journal":{
"file": "pubs.bib",
"venuekey" : "journal",
"venue-pretext" : "",
"collection" : {"name":"publications",
"permalink":"/publication/"}
}
}
html_escape_table = {
"&": "&",
'"': """,
"'": "'"
}
def html_escape(text):
"""Produce entities within text."""
return "".join(html_escape_table.get(c,c) for c in text)
for pubsource in publist:
parser = bibtex.Parser()
bibdata = parser.parse_file(publist[pubsource]["file"])
#loop through the individual references in a given bibtex file
for bib_id in bibdata.entries:
#reset default date
pub_year = "1900"
pub_month = "01"
pub_day = "01"
b = bibdata.entries[bib_id].fields
try:
if not "year" in b.keys():
print(b)
pub_year = f'{b["year"]}'
#todo: this hack for month and day needs some cleanup
#if "month" in b.keys():
# if(len(b["month"])<3):
# pub_month = "0"+b["month"]
# pub_month = pub_month[-2:]
# elif(b["month"] not in range(12)):
# tmnth = strptime(b["month"][:3],'%b').tm_mon
# pub_month = "{:02d}".format(tmnth)
# else:
# pub_month = str(b["month"])
if "month" in b.keys():
mes = b["month"]
if "jan" in mes.lower() or "ene" in mes.lower():
pub_month = "01"
if "feb" in mes.lower():
pub_month = "02"
if "mar" in mes.lower():
pub_month = "03"
if "apr" in mes.lower() or "abr" in mes.lower():
pub_month = "04"
if "may" in mes.lower():
pub_month = "05"
if "jun" in mes.lower():
pub_month = "06"
if "jul" in mes.lower():
pub_month = "07"
if "aug" in mes.lower() or "ago" in mes.lower():
pub_month = "08"
if "sep" in mes.lower():
pub_month = "09"
if "oct" in mes.lower():
pub_month = "10"
if "nov" in mes.lower():
pub_month = "11"
if "dec" in mes.lower() or "dic" in mes.lower():
pub_month = "12"
if "day" in b.keys():
pub_day = str(b["day"])
pub_date = pub_year+"-"+pub_month+"-"+pub_day
#strip out {} as needed (some bibtex entries that maintain formatting)
clean_title = LatexNodes2Text().latex_to_text(b["title"]).replace("{", "").replace("}","").replace("\\","").replace(" ","-").replace("á","a").replace("é","e").replace("í","i").replace("ó","o").replace("ú","u").replace("Á","A").replace("É","E").replace("Í","I").replace("Ó","O").replace("Ú","U").replace("ñ","n").replace("ñ","Ñ")
url_slug = re.sub("\\[.*\\]|[^a-zA-Z0-9_-]", "", clean_title)
url_slug = url_slug.replace("--","-")
url_slug = (url_slug[:100]) if len(url_slug) > 100 else url_slug
md_filename = (str(pub_date) + "-" + url_slug + ".md").replace("--","-")
html_filename = (str(pub_date) + "-" + url_slug).replace("--","-")
#Build Citation from text
citation = ""
#citation authors - todo - add highlighting for primary author?
for author in bibdata.entries[bib_id].persons["author"]:
if "pedro" in LatexNodes2Text().latex_to_text(author.first_names[0]).lower() and "antonio" in LatexNodes2Text().latex_to_text(author.middle_names[0]).lower() and "gutiérrez" in LatexNodes2Text().latex_to_text(author.last_names[0]).lower():
citation += "<strong>"
if author.middle_names:
citation += LatexNodes2Text().latex_to_text(author.first_names[0])+" "+LatexNodes2Text().latex_to_text(author.middle_names[0])+" "+LatexNodes2Text().latex_to_text(author.last_names[0])
else:
citation += LatexNodes2Text().latex_to_text(author.first_names[0])+" "+LatexNodes2Text().latex_to_text(author.last_names[0])
if "pedro" in LatexNodes2Text().latex_to_text(author.first_names[0]).lower() and "antonio" in LatexNodes2Text().latex_to_text(author.middle_names[0]).lower() and "gutiérrez" in LatexNodes2Text().latex_to_text(author.last_names[0]).lower():
citation += "</strong>"
citation += ", "
citation = citation.replace("Á","A").replace("É","E").replace("Í","I").replace("Ó","O").replace("Ú","U")
#citation title
citation += "\"" + html_escape(LatexNodes2Text().latex_to_text(b["title"]).replace("{", "").replace("}","").replace("\\","")) + ".\""
#add venue logic depending on citation type
venue = publist[pubsource]["venue-pretext"]+LatexNodes2Text().latex_to_text(b[publist[pubsource]["venuekey"]]).replace("{", "").replace("}","").replace("\\","")
citation += " " + html_escape(venue)
if "series" in b.keys() and b["series"]:
citation += ", " + html_escape(LatexNodes2Text().latex_to_text(b["series"]))
if "volume" in b.keys() and b["volume"]:
citation += ", Vol. " + b["volume"]
if "number" in b.keys() and b["number"]:
citation += "(" + b["number"] + ")"
citation += ", " + pub_year
if "organization" in b.keys() and b["organization"]:
citation += ", " + html_escape(LatexNodes2Text().latex_to_text(b["organization"]))
if "pages" in b.keys() and b["pages"]:
citation += ", pp." + b["pages"]
citation += "."
## YAML variables
md = "---\ntitle: \"" + html_escape(LatexNodes2Text().latex_to_text(b["title"]).replace("{", "").replace("}","").replace("\\","")) + '"\n'
md += """collection: """ + LatexNodes2Text().latex_to_text(publist[pubsource]["collection"]["name"])
md += """\npermalink: """ + publist[pubsource]["collection"]["permalink"] + html_filename
note = False
if "note" in b.keys():
if len(str(b["note"])) > 5:
md += "\nexcerpt: '" + html_escape(LatexNodes2Text().latex_to_text(b["note"])) + "'"
note = True
md += "\ndate: " + str(pub_date)
md += "\nvenue: '" + html_escape(venue) + "'"
url = False
if "url" in b.keys():
if len(str(b["url"])) > 5:
if b["url"].startswith('http://') or b["url"].startswith('https://'):
md += "\npaperurl: '" + b["url"] + "'"
else:
md += "\npaperurl: 'http://" + b["url"] + "'"
url = True
md += "\ncitation: '" + html_escape(citation) + "'"
md += "\n---"
## Markdown description for individual page
if note:
md += "\n" + html_escape(b["note"]) + "\n"
if url:
if b["url"].startswith('http://') or b["url"].startswith('https://'):
md += "\n[Access paper here](" + b["url"] + "){:target=\"_blank\"}\n"
else:
md += "\n[Access paper here](http://" + b["url"] + "){:target=\"_blank\"}\n"
else:
md += "\nUse [Google Scholar](https://scholar.google.com/scholar?q="+html.escape(clean_title.replace("-","+"))+"){:target=\"_blank\"} for full citation"
md_filename = os.path.basename(md_filename)
with open("../_"+publist[pubsource]["collection"]["name"]+"/" + md_filename, 'w') as f:
f.write(md)
print(f'SUCESSFULLY PARSED {bib_id}: \"', b["title"][:60],"..."*(len(b['title'])>60),"\"")
# field may not exist for a reference
except KeyError as e:
print(f'WARNING Missing Expected Field {e} from entry {bib_id}: \"', b["title"][:30],"..."*(len(b['title'])>30),"\"")
continue
| [
"re.sub",
"pylatexenc.latex2text.LatexNodes2Text",
"os.path.basename",
"pybtex.database.input.bibtex.Parser"
] | [((1846, 1861), 'pybtex.database.input.bibtex.Parser', 'bibtex.Parser', ([], {}), '()\n', (1859, 1861), False, 'from pybtex.database.input import bibtex\n'), ((4515, 4565), 're.sub', 're.sub', (['"""\\\\[.*\\\\]|[^a-zA-Z0-9_-]"""', '""""""', 'clean_title'], {}), "('\\\\[.*\\\\]|[^a-zA-Z0-9_-]', '', clean_title)\n", (4521, 4565), False, 'import re\n'), ((9332, 9361), 'os.path.basename', 'os.path.basename', (['md_filename'], {}), '(md_filename)\n', (9348, 9361), False, 'import os\n'), ((7626, 7643), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (7641, 7643), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5560, 5577), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5575, 5577), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5727, 5744), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5742, 5744), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((6786, 6803), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (6801, 6803), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((7194, 7211), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (7209, 7211), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5668, 5685), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5683, 5685), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5100, 5117), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5115, 5117), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5180, 5197), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5195, 5197), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5264, 5281), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5279, 5281), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5500, 5517), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5515, 5517), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5811, 5828), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5826, 5828), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5891, 5908), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5906, 5908), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5975, 5992), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5990, 5992), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((7976, 7993), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (7991, 7993), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((5441, 5458), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (5456, 5458), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((6519, 6536), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (6534, 6536), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((6303, 6320), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (6318, 6320), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((7470, 7487), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (7485, 7487), False, 'from pylatexenc.latex2text import LatexNodes2Text\n'), ((4173, 4190), 'pylatexenc.latex2text.LatexNodes2Text', 'LatexNodes2Text', ([], {}), '()\n', (4188, 4190), False, 'from pylatexenc.latex2text import LatexNodes2Text\n')] |
import os
from pathlib import Path
import pytest
from ploomber.util import default
from ploomber.exceptions import DAGSpecNotFound
@pytest.fixture
def pkg_location():
parent = Path('src', 'package_a')
parent.mkdir(parents=True)
pkg_location = (parent / 'pipeline.yaml')
pkg_location.touch()
return str(pkg_location)
def test_entry_point_env_var(monkeypatch, tmp_directory, pkg_location):
monkeypatch.setenv('ENTRY_POINT', 'some.entry.point')
assert default.entry_point() == 'some.entry.point'
def test_entry_point_pkg_location(tmp_directory, pkg_location):
assert default.entry_point() == str(pkg_location)
def test_entry_point_pkg_location_and_yaml(tmp_directory, pkg_location):
Path('pipeline.yaml').touch()
assert default.entry_point() == 'pipeline.yaml'
def test_entry_point_pkg_location_ignore_egg_info(tmp_directory):
for pkg in ['package_a.egg-info', 'package_b']:
parent = Path('src', pkg)
parent.mkdir(parents=True)
pkg_location = (parent / 'pipeline.yaml')
pkg_location.touch()
assert default.entry_point() == str(
Path('src', 'package_b', 'pipeline.yaml'))
def test_entry_point_pkg_location_multiple_pkgs(tmp_directory):
for pkg in ['package_a', 'package_b']:
parent = Path('src', pkg)
parent.mkdir(parents=True)
pkg_location = (parent / 'pipeline.yaml')
pkg_location.touch()
assert default.entry_point() == str(
Path('src', 'package_a', 'pipeline.yaml'))
def test_entry_point():
assert default.entry_point() == 'pipeline.yaml'
def test_entry_point_in_parent_folder(tmp_directory):
Path('dir').mkdir()
Path('pipeline.yaml').touch()
os.chdir('dir')
assert default.entry_point() == str(Path('..', 'pipeline.yaml'))
def test_entry_point_in_src_while_in_sibling_folder(tmp_directory):
Path('setup.py').touch()
pkg = Path('src', 'package')
pkg.mkdir(parents=True)
(pkg / 'pipeline.yaml').touch()
Path('tests').mkdir()
os.chdir('tests')
assert default.entry_point() == str(
Path('..', 'src', 'package', 'pipeline.yaml'))
def test_path_to_env_local(tmp_directory):
Path('env.yaml').touch()
Path('dir').mkdir()
Path('dir', 'env.yaml').touch()
assert default.path_to_env('dir') == str(Path('env.yaml').resolve())
def test_path_to_parent_sibling(tmp_directory):
Path('dir').mkdir()
Path('dir', 'env.yaml').touch()
assert default.path_to_env('dir') == str(Path('dir', 'env.yaml').resolve())
@pytest.mark.parametrize('arg', ['dir', None])
def test_path_to_env_none(tmp_directory, arg):
Path('dir').mkdir()
assert default.path_to_env(arg) is None
@pytest.mark.parametrize(
'to_create, to_move',
[
[
['environment.yml'],
'.',
],
[
['requirements.txt'],
'.',
],
[
['setup.py'],
'.',
],
[
['setup.py', 'subdir/'],
'subdir',
],
[
# environment.yml has higher priority than setup.py
['environment.yml', 'package/setup.py', 'package/nested/'],
'package/nested/',
],
[
# requirements.txt has higher priority than setup.py
['requirements.txt', 'package/setup.py', 'package/nested/'],
'package/nested/',
],
])
def test_find_root_recursively(tmp_directory, to_create, to_move):
expected = Path().resolve()
for f in to_create:
Path(f).parent.mkdir(exist_ok=True, parents=True)
if f.endswith('/'):
Path(f).mkdir()
else:
Path(f).touch()
os.chdir(to_move)
assert default.find_root_recursively() == expected
def test_raise_if_no_project_root(tmp_directory):
with pytest.raises(ValueError) as excinfo:
default.find_root_recursively(raise_=True)
expected = "Could not determine project's root directory"
assert expected in str(excinfo.value)
@pytest.mark.parametrize('to_create, to_move', [
[
['environment.yml', 'src/my_package/pipeline.yaml'],
'.',
],
])
def test_find_package_name(tmp_directory, to_create, to_move):
for f in to_create:
Path(f).parent.mkdir(exist_ok=True, parents=True)
if f.endswith('/'):
Path(f).mkdir()
else:
Path(f).touch()
os.chdir(to_move)
assert default.find_package_name() == 'my_package'
def test_error_if_no_package(tmp_directory):
Path('environment.yml').touch()
with pytest.raises(ValueError) as excinfo:
default.find_package_name()
expected = "Could not find a valid package"
assert expected in str(excinfo.value)
@pytest.mark.parametrize('filename, name', [
['pipeline.yaml', None],
['pipeline.serve.yaml', 'serve'],
[Path('src', 'my_pkg', 'pipeline.yaml'), None],
[Path('src', 'my_pkg', 'pipeline.serve.yaml'), 'serve'],
])
def test_entry_point_relative(tmp_directory, filename, name):
Path(filename).parent.mkdir(parents=True, exist_ok=True)
Path(filename).touch()
assert default.entry_point_relative(name=name) == str(filename)
def test_entry_point_relative_error_if_both_exist(tmp_directory):
Path('pipeline.yaml').touch()
dir_ = Path('src', 'some_pkg')
dir_.mkdir(parents=True)
(dir_ / 'pipeline.yaml').touch()
with pytest.raises(ValueError):
default.entry_point_relative()
def test_entry_point_relative_error_if_doesnt_exist(tmp_directory):
with pytest.raises(DAGSpecNotFound):
default.entry_point_relative()
| [
"ploomber.util.default.path_to_env",
"ploomber.util.default.find_package_name",
"ploomber.util.default.entry_point_relative",
"pathlib.Path",
"ploomber.util.default.find_root_recursively",
"os.chdir",
"pytest.mark.parametrize",
"pytest.raises",
"ploomber.util.default.entry_point"
] | [((2543, 2588), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""arg"""', "['dir', None]"], {}), "('arg', ['dir', None])\n", (2566, 2588), False, 'import pytest\n'), ((2708, 3049), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""to_create, to_move"""', "[[['environment.yml'], '.'], [['requirements.txt'], '.'], [['setup.py'],\n '.'], [['setup.py', 'subdir/'], 'subdir'], [['environment.yml',\n 'package/setup.py', 'package/nested/'], 'package/nested/'], [[\n 'requirements.txt', 'package/setup.py', 'package/nested/'],\n 'package/nested/']]"], {}), "('to_create, to_move', [[['environment.yml'], '.'],\n [['requirements.txt'], '.'], [['setup.py'], '.'], [['setup.py',\n 'subdir/'], 'subdir'], [['environment.yml', 'package/setup.py',\n 'package/nested/'], 'package/nested/'], [['requirements.txt',\n 'package/setup.py', 'package/nested/'], 'package/nested/']])\n", (2731, 3049), False, 'import pytest\n'), ((4056, 4167), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""to_create, to_move"""', "[[['environment.yml', 'src/my_package/pipeline.yaml'], '.']]"], {}), "('to_create, to_move', [[['environment.yml',\n 'src/my_package/pipeline.yaml'], '.']])\n", (4079, 4167), False, 'import pytest\n'), ((184, 208), 'pathlib.Path', 'Path', (['"""src"""', '"""package_a"""'], {}), "('src', 'package_a')\n", (188, 208), False, 'from pathlib import Path\n'), ((1715, 1730), 'os.chdir', 'os.chdir', (['"""dir"""'], {}), "('dir')\n", (1723, 1730), False, 'import os\n'), ((1909, 1931), 'pathlib.Path', 'Path', (['"""src"""', '"""package"""'], {}), "('src', 'package')\n", (1913, 1931), False, 'from pathlib import Path\n'), ((2026, 2043), 'os.chdir', 'os.chdir', (['"""tests"""'], {}), "('tests')\n", (2034, 2043), False, 'import os\n'), ((3724, 3741), 'os.chdir', 'os.chdir', (['to_move'], {}), '(to_move)\n', (3732, 3741), False, 'import os\n'), ((4444, 4461), 'os.chdir', 'os.chdir', (['to_move'], {}), '(to_move)\n', (4452, 4461), False, 'import os\n'), ((5338, 5361), 'pathlib.Path', 'Path', (['"""src"""', '"""some_pkg"""'], {}), "('src', 'some_pkg')\n", (5342, 5361), False, 'from pathlib import Path\n'), ((483, 504), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (502, 504), False, 'from ploomber.util import default\n'), ((604, 625), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (623, 625), False, 'from ploomber.util import default\n'), ((767, 788), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (786, 788), False, 'from ploomber.util import default\n'), ((945, 961), 'pathlib.Path', 'Path', (['"""src"""', 'pkg'], {}), "('src', pkg)\n", (949, 961), False, 'from pathlib import Path\n'), ((1088, 1109), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (1107, 1109), False, 'from ploomber.util import default\n'), ((1295, 1311), 'pathlib.Path', 'Path', (['"""src"""', 'pkg'], {}), "('src', pkg)\n", (1299, 1311), False, 'from pathlib import Path\n'), ((1438, 1459), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (1457, 1459), False, 'from ploomber.util import default\n'), ((1556, 1577), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (1575, 1577), False, 'from ploomber.util import default\n'), ((1742, 1763), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (1761, 1763), False, 'from ploomber.util import default\n'), ((2055, 2076), 'ploomber.util.default.entry_point', 'default.entry_point', ([], {}), '()\n', (2074, 2076), False, 'from ploomber.util import default\n'), ((2287, 2313), 'ploomber.util.default.path_to_env', 'default.path_to_env', (['"""dir"""'], {}), "('dir')\n", (2306, 2313), False, 'from ploomber.util import default\n'), ((2471, 2497), 'ploomber.util.default.path_to_env', 'default.path_to_env', (['"""dir"""'], {}), "('dir')\n", (2490, 2497), False, 'from ploomber.util import default\n'), ((2672, 2696), 'ploomber.util.default.path_to_env', 'default.path_to_env', (['arg'], {}), '(arg)\n', (2691, 2696), False, 'from ploomber.util import default\n'), ((3754, 3785), 'ploomber.util.default.find_root_recursively', 'default.find_root_recursively', ([], {}), '()\n', (3783, 3785), False, 'from ploomber.util import default\n'), ((3859, 3884), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3872, 3884), False, 'import pytest\n'), ((3905, 3947), 'ploomber.util.default.find_root_recursively', 'default.find_root_recursively', ([], {'raise_': '(True)'}), '(raise_=True)\n', (3934, 3947), False, 'from ploomber.util import default\n'), ((4474, 4501), 'ploomber.util.default.find_package_name', 'default.find_package_name', ([], {}), '()\n', (4499, 4501), False, 'from ploomber.util import default\n'), ((4611, 4636), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4624, 4636), False, 'import pytest\n'), ((4657, 4684), 'ploomber.util.default.find_package_name', 'default.find_package_name', ([], {}), '()\n', (4682, 4684), False, 'from ploomber.util import default\n'), ((5168, 5207), 'ploomber.util.default.entry_point_relative', 'default.entry_point_relative', ([], {'name': 'name'}), '(name=name)\n', (5196, 5207), False, 'from ploomber.util import default\n'), ((5438, 5463), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5451, 5463), False, 'import pytest\n'), ((5473, 5503), 'ploomber.util.default.entry_point_relative', 'default.entry_point_relative', ([], {}), '()\n', (5501, 5503), False, 'from ploomber.util import default\n'), ((5583, 5613), 'pytest.raises', 'pytest.raises', (['DAGSpecNotFound'], {}), '(DAGSpecNotFound)\n', (5596, 5613), False, 'import pytest\n'), ((5623, 5653), 'ploomber.util.default.entry_point_relative', 'default.entry_point_relative', ([], {}), '()\n', (5651, 5653), False, 'from ploomber.util import default\n'), ((726, 747), 'pathlib.Path', 'Path', (['"""pipeline.yaml"""'], {}), "('pipeline.yaml')\n", (730, 747), False, 'from pathlib import Path\n'), ((1126, 1167), 'pathlib.Path', 'Path', (['"""src"""', '"""package_b"""', '"""pipeline.yaml"""'], {}), "('src', 'package_b', 'pipeline.yaml')\n", (1130, 1167), False, 'from pathlib import Path\n'), ((1476, 1517), 'pathlib.Path', 'Path', (['"""src"""', '"""package_a"""', '"""pipeline.yaml"""'], {}), "('src', 'package_a', 'pipeline.yaml')\n", (1480, 1517), False, 'from pathlib import Path\n'), ((1657, 1668), 'pathlib.Path', 'Path', (['"""dir"""'], {}), "('dir')\n", (1661, 1668), False, 'from pathlib import Path\n'), ((1681, 1702), 'pathlib.Path', 'Path', (['"""pipeline.yaml"""'], {}), "('pipeline.yaml')\n", (1685, 1702), False, 'from pathlib import Path\n'), ((1771, 1798), 'pathlib.Path', 'Path', (['""".."""', '"""pipeline.yaml"""'], {}), "('..', 'pipeline.yaml')\n", (1775, 1798), False, 'from pathlib import Path\n'), ((1874, 1890), 'pathlib.Path', 'Path', (['"""setup.py"""'], {}), "('setup.py')\n", (1878, 1890), False, 'from pathlib import Path\n'), ((2000, 2013), 'pathlib.Path', 'Path', (['"""tests"""'], {}), "('tests')\n", (2004, 2013), False, 'from pathlib import Path\n'), ((2093, 2138), 'pathlib.Path', 'Path', (['""".."""', '"""src"""', '"""package"""', '"""pipeline.yaml"""'], {}), "('..', 'src', 'package', 'pipeline.yaml')\n", (2097, 2138), False, 'from pathlib import Path\n'), ((2189, 2205), 'pathlib.Path', 'Path', (['"""env.yaml"""'], {}), "('env.yaml')\n", (2193, 2205), False, 'from pathlib import Path\n'), ((2219, 2230), 'pathlib.Path', 'Path', (['"""dir"""'], {}), "('dir')\n", (2223, 2230), False, 'from pathlib import Path\n'), ((2243, 2266), 'pathlib.Path', 'Path', (['"""dir"""', '"""env.yaml"""'], {}), "('dir', 'env.yaml')\n", (2247, 2266), False, 'from pathlib import Path\n'), ((2403, 2414), 'pathlib.Path', 'Path', (['"""dir"""'], {}), "('dir')\n", (2407, 2414), False, 'from pathlib import Path\n'), ((2427, 2450), 'pathlib.Path', 'Path', (['"""dir"""', '"""env.yaml"""'], {}), "('dir', 'env.yaml')\n", (2431, 2450), False, 'from pathlib import Path\n'), ((2640, 2651), 'pathlib.Path', 'Path', (['"""dir"""'], {}), "('dir')\n", (2644, 2651), False, 'from pathlib import Path\n'), ((3519, 3525), 'pathlib.Path', 'Path', ([], {}), '()\n', (3523, 3525), False, 'from pathlib import Path\n'), ((4569, 4592), 'pathlib.Path', 'Path', (['"""environment.yml"""'], {}), "('environment.yml')\n", (4573, 4592), False, 'from pathlib import Path\n'), ((5133, 5147), 'pathlib.Path', 'Path', (['filename'], {}), '(filename)\n', (5137, 5147), False, 'from pathlib import Path\n'), ((4895, 4933), 'pathlib.Path', 'Path', (['"""src"""', '"""my_pkg"""', '"""pipeline.yaml"""'], {}), "('src', 'my_pkg', 'pipeline.yaml')\n", (4899, 4933), False, 'from pathlib import Path\n'), ((4947, 4991), 'pathlib.Path', 'Path', (['"""src"""', '"""my_pkg"""', '"""pipeline.serve.yaml"""'], {}), "('src', 'my_pkg', 'pipeline.serve.yaml')\n", (4951, 4991), False, 'from pathlib import Path\n'), ((5297, 5318), 'pathlib.Path', 'Path', (['"""pipeline.yaml"""'], {}), "('pipeline.yaml')\n", (5301, 5318), False, 'from pathlib import Path\n'), ((5072, 5086), 'pathlib.Path', 'Path', (['filename'], {}), '(filename)\n', (5076, 5086), False, 'from pathlib import Path\n'), ((2321, 2337), 'pathlib.Path', 'Path', (['"""env.yaml"""'], {}), "('env.yaml')\n", (2325, 2337), False, 'from pathlib import Path\n'), ((2505, 2528), 'pathlib.Path', 'Path', (['"""dir"""', '"""env.yaml"""'], {}), "('dir', 'env.yaml')\n", (2509, 2528), False, 'from pathlib import Path\n'), ((3570, 3577), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (3574, 3577), False, 'from pathlib import Path\n'), ((3661, 3668), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (3665, 3668), False, 'from pathlib import Path\n'), ((3703, 3710), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (3707, 3710), False, 'from pathlib import Path\n'), ((4290, 4297), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (4294, 4297), False, 'from pathlib import Path\n'), ((4381, 4388), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (4385, 4388), False, 'from pathlib import Path\n'), ((4423, 4430), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (4427, 4430), False, 'from pathlib import Path\n')] |
import turtle
turtle.bgcolor("black")
sq = turtle.Turtle()
sq.speed(20)
sq.color("white")
for i in range(500):
sq.forward(i)
sq.left(91)
| [
"turtle.bgcolor",
"turtle.Turtle"
] | [((15, 38), 'turtle.bgcolor', 'turtle.bgcolor', (['"""black"""'], {}), "('black')\n", (29, 38), False, 'import turtle\n'), ((44, 59), 'turtle.Turtle', 'turtle.Turtle', ([], {}), '()\n', (57, 59), False, 'import turtle\n')] |
"""
@author: <NAME>
@title: SmartSearch - An Intelligent Search Engine.
@date: 05/06/2019
"""
import requests
from uuid import uuid4
from bs4 import BeautifulSoup
from urllib.parse import urlsplit
DOMAIN = "uic.edu"
def check_goodness(url):
"""
Function to check if the url is a dead end (pds, doc, docx, etc)
:param url: Link to be checked.
:return True/False: Flag if dead end or not.
"""
# Documents are not necessary.
unnecessary_extensions = [
'.pdf',
'.doc',
'.docx',
'.xls',
'.avi',
'.mp4',
'.xlsx',
'.jpg',
'.png',
'.gif',
'.pdf',
'.gz',
'.rar',
'.tar',
'.rv',
'.tgz',
'.zip',
'.exe',
'.js',
'.css',
'.ppt'
]
for extension in unnecessary_extensions:
if extension in url:
return False
# Sometimes, the urls contain '@' to indicate a phone number or email address.
# Remove such urls.
if '@' in url:
return False
# If everything is alright, return True.
return True
def check_domain(url, main_domain):
"""
Function to check the crawling is limited to UIC domain.
:param url: URL to check.
:return True/False: Flag to indicate whether the site is in the domain.
"""
# If domain is not specified, return true.
if main_domain == "":
return True
if main_domain in url:
return True
else:
return False
def crawl_for_sites(main_url, number_of_pages_to_crawl, main_domain):
"""
Funtion to initialize the queue to store all the websites.
:param main_url: Initial point to start crawling.
:param number_of_pages_to_crawl: Minimum number of pages to crawl.
:param main_domain: The domain to stick with while crawling.
:return sites: List of websites.
"""
# List to store crawled sites and Queue for BFS.
sites_list = list()
helper_queue = list()
unknown_urls = list()
broken_urls = list()
# Map to store the document and words in the document.
documents_words_count_map = dict()
# Mao to store the outgoing urls from the parent url.
parent_children_url_map = dict()
# Check if main url is responding with a 200 OK.
try:
# Minor preprocessing.
# Remove "www." from the main url
main_url = main_url.replace("www.", "")
# Generating a random user agent as client
# So, even though many requests are sent, it is not considered as spamming.
# Basically, IP Spoofing.
main_url_response = requests.get(
main_url,
headers = {'User-agent': 'Some-Anonymous-User-{}'.format(str(uuid4()))}
)
# If the main url can not be crawled, raise exception.
if main_url_response.status_code != 200:
raise Exception('\nThe main URL ({url}) can not be reached.\n'.format(url=main_url))
broken_urls.append(main_url)
# The main url could be broken.
except:
raise Exception('\nThe main URL ({url}) provided is broken.\n'.format(url=main_url))
broken_urls.append(main_url)
# Add the main url to our queue and sites list.
sites_list.append(main_url)
helper_queue.append(main_url)
# Index of number of crawled websites.
crawled_sites_number = 0
# Operation to crawl only 3000 sites
while crawled_sites_number < number_of_pages_to_crawl or not helper_queue:
# Store the local outgoing urls.
local_urls = list()
# Pop the url to crawl.
url = helper_queue.pop()
# Minor preprocessing.
# Remove "www." from the main url
url = url.replace("www.", "")
# Extract base url to resolve relative links.
# Source: https://medium.freecodecamp.org/how-to-build-a-url-crawler-to-map-a-website-using-python-6a287be1da11.
url_parts = urlsplit(url)
url_base = (url_parts.netloc).replace("www.", "")
url_base = url_parts.scheme + "://" + url_parts.netloc
# If URL = "https://somewebsite.com/something",
# then path should only be "https://somewebsite.com/"
if '/' in url:
url_path = url[:url.rfind('/') + 1]
else:
url_path = url
# Hit the site.
try:
# Generating a random user agent as client
# So, even though many requests are sent, it is not considered as spamming.
# Basically, IP Spoofing.
url_response = requests.get(
url,
headers = {'User-agent': 'Some-Anonymous-User-{}'.format(uuid4())}
)
# Continue if the site is unresponsive.
if url_response.status_code != 200:
print("\nThe URL ({url}) is unresponsive. Moving on with next site.\n".format(url=url))
broken_urls.append(url)
continue
# Continue if the site url is broken.
except Exception as e:
print("\nThe URL ({url}) is broken. Moving on with next site.".format(url=url))
print("Error: {error_description}".format(error_description=e))
broken_urls.append(url)
continue
# Get the soup of the site.
site_soup = BeautifulSoup(url_response.text, "lxml")
# Get all link in the url.
all_outgoing_links = site_soup.find_all('a')
# If dead end (no outgoing links), continue.
if not all_outgoing_links:
continue
# Fill the queue with the outgoing links now.
for link in all_outgoing_links:
anchor = link.attrs["href"] if "href" in link.attrs else ''
# Get the fragment and append to the base.
if anchor.startswith('/') or anchor.startswith("#"):
local_link = url_base + anchor
if check_domain(local_link, main_domain):
local_urls.append(local_link)
# If the base is already there, append it.
elif url_base in anchor:
# Check the domain so no foreign urls are considered.
if check_domain(anchor, main_domain):
local_urls.append(anchor)
# If the anchor starts with "http", add it to local urls.
elif anchor.startswith("http"):
# Check the domain so no foreign urls are considered.
if check_domain(anchor, main_domain):
local_urls.append(anchor)
# If all above conditions fail, it might be an unknown URL.
else:
unknown_urls.append(anchor)
# Push the url and all the outgoing urls to the document
parent_children_url_map[url] = local_urls
# Add the outgoing urls to the queue from left.
# Adding from left obeys BFS traversal.
helper_queue = local_urls + helper_queue
for l in local_urls:
# Check if the URL follows the "GOODNESS" rules mentioned above.
if check_goodness(l) is False:
continue
# To avoid duplicated, check if it is already present in the queue.
if l not in sites_list:
print("Crawled Page Information -> Number {number} and URL {site}".format(
number=crawled_sites_number,
site=l)
)
crawled_sites_number += 1
sites_list.append(l)
return sites_list, unknown_urls, broken_urls, parent_children_url_map
| [
"bs4.BeautifulSoup",
"uuid.uuid4",
"urllib.parse.urlsplit"
] | [((3974, 3987), 'urllib.parse.urlsplit', 'urlsplit', (['url'], {}), '(url)\n', (3982, 3987), False, 'from urllib.parse import urlsplit\n'), ((5363, 5403), 'bs4.BeautifulSoup', 'BeautifulSoup', (['url_response.text', '"""lxml"""'], {}), "(url_response.text, 'lxml')\n", (5376, 5403), False, 'from bs4 import BeautifulSoup\n'), ((2764, 2771), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (2769, 2771), False, 'from uuid import uuid4\n'), ((4695, 4702), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (4700, 4702), False, 'from uuid import uuid4\n')] |
from abc import ABC, abstractmethod
import requests
import time
import os
class BaseCollector(ABC):
@abstractmethod
def collect(self):
pass
def get_vcenters(self):
current_iteration = self.get_iteration()
url = "http://localhost:8000/vcenters/{}".format(current_iteration)
request = requests.get(url)
self.vcenters = request.json()
return self.vcenters
def get_datacenters(self):
current_iteration = self.get_iteration()
url = "http://localhost:8000/datacenters/{}".format(current_iteration)
request = requests.get(url)
self.datacenters = request.json()
return self.datacenters
def get_clusters(self):
current_iteration = self.get_iteration()
url = "http://localhost:8000/clusters/{}".format(current_iteration)
request = requests.get(url)
self.clusters = request.json()
return self.clusters
def get_hosts(self):
current_iteration = self.get_iteration()
url = "http://localhost:8000/hosts/{}".format(current_iteration)
request = requests.get(url)
self.hosts = request.json()
return self.hosts
def get_datastores(self):
current_iteration = self.get_iteration()
url = "http://localhost:8000/datastores/{}".format(current_iteration)
request = requests.get(url)
self.datastores = request.json()
return self.datastores
def get_vms(self):
current_iteration = self.get_iteration()
url = "http://localhost:8000/vms/{}".format(current_iteration)
request = requests.get(url)
self.vms = request.json()
return self.vms
def get_iteration(self):
request = requests.get(url = "http://localhost:8000/iteration")
self.iteration = request.json()
return self.iteration
def get_targets(self):
request = requests.get(url="http://localhost:8000/vrops_list")
self.target = request.json()
return self.target
def get_target_tokens(self):
request = requests.get(url="http://localhost:8000/target_tokens")
self.target_tokens = request.json()
return self.target_tokens
def post_registered_collector(self, collector, *metric_names):
payload = {
'collector': collector,
'metric_names': list(metric_names)
}
request = requests.post(json=payload, url="http://localhost:8000/register")
if request.status_code != 200:
print("request failed with status: {}".format(request.status_code))
def get_registered_collectors(self):
request = requests.get(url="http://localhost:8000/register")
self.collectors_up = request.json()
return self.collectors_up
def post_metrics(self, metric):
payload = {
'metric_name': metric
}
r = requests.post(json=payload, url="http://localhost:8000/metrics")
if r.status_code != 200:
print("request failed with status: {}".format(r.status_code))
def get_metrics(self):
request = requests.get(url="http://localhost:8000/metrics")
self.metrics = request.json()
return self.metrics
def delete_metrics(self):
request = requests.delete(url="http://localhost:8000/metrics")
if request.status_code != 200:
print("request failed with status: {}".format(request.status_code))
def get_clusters_by_target(self):
if not hasattr(self, 'target_clusters'):
self.target_clusters = dict()
cluster_dict = self.get_clusters()
for uuid in cluster_dict:
cluster = cluster_dict[uuid]
if cluster['target'] not in self.target_clusters:
self.target_clusters[cluster['target']] = list()
self.target_clusters[cluster['target']].append(uuid)
return self.target_clusters
def get_hosts_by_target(self):
if not hasattr(self, 'target_hosts'):
self.target_hosts = dict()
host_dict = self.get_hosts()
for uuid in host_dict:
host = host_dict[uuid]
if host['target'] not in self.target_hosts:
self.target_hosts[host['target']] = list()
self.target_hosts[host['target']].append(uuid)
return self.target_hosts
def get_datastores_by_target(self):
if not hasattr(self, 'target_datastores'):
self.target_datastores = dict()
datastore_dict = self.get_datastores()
for uuid in datastore_dict:
host = datastore_dict[uuid]
if host['target'] not in self.target_datastores:
self.target_datastores[host['target']] = list()
self.target_datastores[host['target']].append(uuid)
return self.target_datastores
def get_vms_by_target(self):
if not hasattr(self, 'target_vms'):
self.target_vms = dict()
vms_dict = self.get_vms()
for uuid in vms_dict:
vm = vms_dict[uuid]
if vm['target'] not in self.target_vms:
self.target_vms[vm['target']] = list()
self.target_vms[vm['target']].append(uuid)
return self.target_vms
def wait_for_inventory_data(self):
iteration = 0
while not iteration:
time.sleep(5)
iteration = self.get_iteration()
if os.environ['DEBUG'] >= '1':
print("waiting for initial iteration: " + type(self).__name__)
print("done: initial query " + type(self).__name__)
return
| [
"requests.post",
"time.sleep",
"requests.get",
"requests.delete"
] | [((330, 347), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (342, 347), False, 'import requests\n'), ((594, 611), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (606, 611), False, 'import requests\n'), ((858, 875), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (870, 875), False, 'import requests\n'), ((1110, 1127), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1122, 1127), False, 'import requests\n'), ((1366, 1383), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1378, 1383), False, 'import requests\n'), ((1618, 1635), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1630, 1635), False, 'import requests\n'), ((1742, 1793), 'requests.get', 'requests.get', ([], {'url': '"""http://localhost:8000/iteration"""'}), "(url='http://localhost:8000/iteration')\n", (1754, 1793), False, 'import requests\n'), ((1912, 1964), 'requests.get', 'requests.get', ([], {'url': '"""http://localhost:8000/vrops_list"""'}), "(url='http://localhost:8000/vrops_list')\n", (1924, 1964), False, 'import requests\n'), ((2081, 2136), 'requests.get', 'requests.get', ([], {'url': '"""http://localhost:8000/target_tokens"""'}), "(url='http://localhost:8000/target_tokens')\n", (2093, 2136), False, 'import requests\n'), ((2414, 2479), 'requests.post', 'requests.post', ([], {'json': 'payload', 'url': '"""http://localhost:8000/register"""'}), "(json=payload, url='http://localhost:8000/register')\n", (2427, 2479), False, 'import requests\n'), ((2659, 2709), 'requests.get', 'requests.get', ([], {'url': '"""http://localhost:8000/register"""'}), "(url='http://localhost:8000/register')\n", (2671, 2709), False, 'import requests\n'), ((2901, 2965), 'requests.post', 'requests.post', ([], {'json': 'payload', 'url': '"""http://localhost:8000/metrics"""'}), "(json=payload, url='http://localhost:8000/metrics')\n", (2914, 2965), False, 'import requests\n'), ((3119, 3168), 'requests.get', 'requests.get', ([], {'url': '"""http://localhost:8000/metrics"""'}), "(url='http://localhost:8000/metrics')\n", (3131, 3168), False, 'import requests\n'), ((3284, 3336), 'requests.delete', 'requests.delete', ([], {'url': '"""http://localhost:8000/metrics"""'}), "(url='http://localhost:8000/metrics')\n", (3299, 3336), False, 'import requests\n'), ((5356, 5369), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (5366, 5369), False, 'import time\n')] |
import unittest
from rdbtools3.intset import unpack_intset
from rdbtools3.exceptions import RDBValueError
class TestIntset(unittest.TestCase):
def test_3x2bytes(self):
val = (b'\x02\x00\x00\x00' # int size
b'\x03\x00\x00\x00' # set length
b'\x01\x00' # item 1
b'\x02\x00' # item 2
b'\x00\x01') # item 3
ret = list(unpack_intset(val))
self.assertEqual([
1, 2, 256
], ret)
def test_2x4bytes(self):
val = (b'\x04\x00\x00\x00'
b'\x02\x00\x00\x00'
b'\x01\x00\x00\x00'
b'\x00\x00\x00\x80')
ret = list(unpack_intset(val))
self.assertEqual([
1, 2**31
], ret)
def test_2x8bytes(self):
val = (b'\x08\x00\x00\x00'
b'\x02\x00\x00\x00'
b'\x01\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x80')
ret = list(unpack_intset(val))
self.assertEqual([
1, 2**63
], ret)
def test_bad_length(self):
val = (b'\x02\x00\x00\x00'
b'\x01\x00\x00\x00'
b'\x01\x00'
b'\x02\x00\x00')
test = getattr(self, 'assertRaisesRegex',
getattr(self, 'assertRaisesRegexp'))
with test(RDBValueError, "Bad content size 5 \(expected 2\)"):
list(unpack_intset(val))
def test_bad_size_encoding(self):
val = (b'\x03\x00\x00\x00'
b'\x01\x00\x00\x00'
b'\x00\x00\x00')
test = getattr(self, 'assertRaisesRegex',
getattr(self, 'assertRaisesRegexp'))
with test(RDBValueError, "Unexpected size encoding 0x3"):
list(unpack_intset(val))
def test_zero_len(self):
val = (b'\x02\x00\x00\x00'
b'\x00\x00\x00\x00')
ret = list(unpack_intset(val))
self.assertEqual([], ret)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"rdbtools3.intset.unpack_intset"
] | [((2043, 2058), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2056, 2058), False, 'import unittest\n'), ((426, 444), 'rdbtools3.intset.unpack_intset', 'unpack_intset', (['val'], {}), '(val)\n', (439, 444), False, 'from rdbtools3.intset import unpack_intset\n'), ((705, 723), 'rdbtools3.intset.unpack_intset', 'unpack_intset', (['val'], {}), '(val)\n', (718, 723), False, 'from rdbtools3.intset import unpack_intset\n'), ((1015, 1033), 'rdbtools3.intset.unpack_intset', 'unpack_intset', (['val'], {}), '(val)\n', (1028, 1033), False, 'from rdbtools3.intset import unpack_intset\n'), ((1956, 1974), 'rdbtools3.intset.unpack_intset', 'unpack_intset', (['val'], {}), '(val)\n', (1969, 1974), False, 'from rdbtools3.intset import unpack_intset\n'), ((1462, 1480), 'rdbtools3.intset.unpack_intset', 'unpack_intset', (['val'], {}), '(val)\n', (1475, 1480), False, 'from rdbtools3.intset import unpack_intset\n'), ((1816, 1834), 'rdbtools3.intset.unpack_intset', 'unpack_intset', (['val'], {}), '(val)\n', (1829, 1834), False, 'from rdbtools3.intset import unpack_intset\n')] |
"""
Predicts which tiles are of good quality in WBS.
Usage:
python3 quality_control.py --help
"""
import argparse
from tqdm import tqdm
import tensorflow as tf
from tensorflow import keras
from glob import glob
from quality_net_utilities import *
from image_generator import *
n_channels = 3
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Predicts which tiles are of good quality in WBS.')
parser.add_argument('--slide_path',dest='slide_path',
action='store',type=str,default=None,
help="Path to slide.")
parser.add_argument('--input_height',dest = 'input_height',
action = 'store',type = int,default = 512,
help = 'The file extension for all images.')
parser.add_argument('--input_width',dest = 'input_width',
action = 'store',type = int,default = 512,
help = 'The file extension for all images.')
parser.add_argument('--checkpoint_path',dest = 'checkpoint_path',
action = 'store',type = str,default = 'summaries',
help = 'Path to checkpoint.')
parser.add_argument('--batch_size',dest = 'batch_size',
action = 'store',type = int,default = 4,
help = 'Size of mini batch.')
args = parser.parse_args()
quality_net = keras.models.load_model(args.checkpoint_path)
def generator():
G = image_generator_slide(
args.slide_path,args.input_height,args.input_width)
for image,coords in G:
image = image / 255.
yield image,coords
output_types = (tf.float32,tf.string)
output_shapes = (
[args.input_height,args.input_width,n_channels],[])
tf_dataset = tf.data.Dataset.from_generator(
generator,output_types=output_types,output_shapes=output_shapes)
tf_dataset = tf_dataset.batch(args.batch_size,drop_remainder=False)
tf_dataset = tf_dataset.prefetch(5)
for image,coords in tqdm(tf_dataset):
prediction = quality_net(image)
for c,p in zip(coords.numpy(),prediction.numpy()):
print('OUT,{},{},{}'.format(c.decode(),int(p>0.5),float(p))) | [
"tqdm.tqdm",
"tensorflow.keras.models.load_model",
"tensorflow.data.Dataset.from_generator",
"argparse.ArgumentParser"
] | [((341, 433), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Predicts which tiles are of good quality in WBS."""'}), "(description=\n 'Predicts which tiles are of good quality in WBS.')\n", (364, 433), False, 'import argparse\n'), ((1432, 1477), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['args.checkpoint_path'], {}), '(args.checkpoint_path)\n', (1455, 1477), False, 'from tensorflow import keras\n'), ((1836, 1937), 'tensorflow.data.Dataset.from_generator', 'tf.data.Dataset.from_generator', (['generator'], {'output_types': 'output_types', 'output_shapes': 'output_shapes'}), '(generator, output_types=output_types,\n output_shapes=output_shapes)\n', (1866, 1937), True, 'import tensorflow as tf\n'), ((2078, 2094), 'tqdm.tqdm', 'tqdm', (['tf_dataset'], {}), '(tf_dataset)\n', (2082, 2094), False, 'from tqdm import tqdm\n')] |
"""
Square
======
"""
import numpy as np
from ..topology_graph import Edge
from .cof import Cof
from .vertices import LinearVertex, NonLinearVertex
class Square(Cof):
"""
Represents a sqaure COF topology graph.
Unoptimized construction
.. moldoc::
import moldoc.molecule as molecule
import stk
cof = stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicSquare(
building_blocks=(
stk.BuildingBlock(
smiles='BrCC(Br)',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles='BrC1=C(Br)C(Br)=C1Br',
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(3, 3, 1),
),
)
moldoc_display_molecule = molecule.Molecule(
atoms=(
molecule.Atom(
atomic_number=atom.get_atomic_number(),
position=position,
) for atom, position in zip(
cof.get_atoms(),
cof.get_position_matrix(),
)
),
bonds=(
molecule.Bond(
atom1_id=bond.get_atom1().get_id(),
atom2_id=bond.get_atom2().get_id(),
order=bond.get_order(),
) for bond in cof.get_bonds()
if all(p == 0 for p in bond.get_periodicity())
),
)
``Collapser(scale_steps=False)`` optimized construction
.. moldoc::
import moldoc.molecule as molecule
import stk
cof = stk.ConstructedMolecule(
topology_graph=stk.cof.PeriodicSquare(
building_blocks=(
stk.BuildingBlock(
smiles='BrCC(Br)',
functional_groups=[stk.BromoFactory()],
),
stk.BuildingBlock(
smiles='BrC1=C(Br)C(Br)=C1Br',
functional_groups=[stk.BromoFactory()],
),
),
lattice_size=(3, 3, 1),
optimizer=stk.Collapser(scale_steps=False),
),
)
moldoc_display_molecule = molecule.Molecule(
atoms=(
molecule.Atom(
atomic_number=atom.get_atomic_number(),
position=position,
) for atom, position in zip(
cof.get_atoms(),
cof.get_position_matrix(),
)
),
bonds=(
molecule.Bond(
atom1_id=bond.get_atom1().get_id(),
atom2_id=bond.get_atom2().get_id(),
order=bond.get_order(),
) for bond in cof.get_bonds()
if all(p == 0 for p in bond.get_periodicity())
),
)
Building blocks with four and two functional groups are required
for this topology graph.
When using a :class:`dict` for the `building_blocks` parameter,
as in :ref:`cof-topology-graph-examples`:
*Multi-Building Block COF Construction*, a
:class:`.BuildingBlock`, with the following number of functional
groups, needs to be assigned to each of the following vertex ids:
| 4-functional groups: 0
| 2-functional groups: 1 to 2
See :class:`.Cof` for more details and examples.
"""
_lattice_constants = _a, _b, _c = (
np.array([1., 0., 0.]),
np.array([0., 1., 0.]),
np.array([0., 0., 1.])
)
_non_linears = (
NonLinearVertex(0, (0.5)*_a + (0.5)*_b + (0.5)*_c),
)
_vertex_prototypes = (
*_non_linears,
LinearVertex.init_at_shifted_center(
id=1,
vertices=(_non_linears[0], _non_linears[0]),
cell_shifts=((0, 0, 0), (1, 0, 0)),
lattice_constants=_lattice_constants,
),
LinearVertex.init_at_shifted_center(
id=2,
vertices=(_non_linears[0], _non_linears[0]),
cell_shifts=((0, 0, 0), (0, 1, 0)),
lattice_constants=_lattice_constants,
),
)
_edge_prototypes = (
Edge(0, _vertex_prototypes[1], _vertex_prototypes[0]),
Edge(
id=1,
vertex1=_vertex_prototypes[1],
vertex2=_vertex_prototypes[0],
periodicity=(1, 0, 0),
),
Edge(2, _vertex_prototypes[2], _vertex_prototypes[0]),
Edge(
id=3,
vertex1=_vertex_prototypes[2],
vertex2=_vertex_prototypes[0],
periodicity=(0, 1, 0),
),
)
| [
"numpy.array"
] | [((3649, 3674), 'numpy.array', 'np.array', (['[1.0, 0.0, 0.0]'], {}), '([1.0, 0.0, 0.0])\n', (3657, 3674), True, 'import numpy as np\n'), ((3681, 3706), 'numpy.array', 'np.array', (['[0.0, 1.0, 0.0]'], {}), '([0.0, 1.0, 0.0])\n', (3689, 3706), True, 'import numpy as np\n'), ((3713, 3738), 'numpy.array', 'np.array', (['[0.0, 0.0, 1.0]'], {}), '([0.0, 0.0, 1.0])\n', (3721, 3738), True, 'import numpy as np\n')] |
# Scripts for modeling ATF data.
def atf_article_uri(url, post_id):
return get_url_hash(url)+"/"+post_id
def atf_thread_uri(url):
return get_url_hash(url)
test_date = "Wed Feb 11, 2015 10:31 am"
def atf_date_created(date, format="%a %b %d, %Y %I:%M %p"):
"""Put the date in ISO format"""
return iso8601date(date, format)
def atf_joined_date(date, format="%a %b %d, %Y %I:%M %p"):
"""Put the date in ISO format"""
return iso8601date(date, format)
test_date2 = "Wednesday, March 18, 2015 10:33 AM"
test_format2 = "%A, %B %d, %Y %I:%M %p"
test_date3 = "2014-01-14 02:52:44"
from HTMLParser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def handle_starttag(self, tag, attrs):
self.fed.append(" ")
def handle_endtag(self, tag):
self.fed.append(" ")
def handle_startendtag(self, tag, attrs):
self.fed.append(" ")
def get_data(self):
return ''.join(self.fed)
def strip_tags(html):
s = HTMLStripper()
s.feed(html)
return s.get_data()
test_signature = "<span style=\"font-style: italic\">Precision Combat Arms<br />1710 E Trent, Unit 1<br />Spokane, WA 99202<br />509-535-0655<br />M-F 9-5</span></div>"
def signature_clean(text):
"""Strip HTML"""
return strip_tags(text).strip()
def atf_fc_uri(article_uri):
"""URI of feature collection"""
return article_uri+"/featurecollection"
def atf_get_city(city_state):
if "," in city_state:
return city_state.split(",")[0]
else:
return city_state
def atf_get_state(city_state):
if "," in city_state:
return city_state.split(",")[1]
else:
return ""
def atf_address_uri(city, state, country):
return address_uri(city, state, country)
def atf_clean_post_count(post_count):
return numericOnly(post_count)
def atf_clean_from_user(user):
user = user.strip()
if user == "-" or user == "N/A":
user = ''
return user
import re
WEAPONS_PHRASES = ['gun',
'rifle',
'missile',
'mark',
'tank',
'mk',
'torpedo',
'naval',
'vehicle',
'remington',
'smith',
'pistol',
'wesson',
'grenade',
'howitzer',
'mine',
'mortar',
'colt',
'submachine',
'canon',
'cannon',
'mod\xe8le',
'ruger',
'koch',
'heckler',
'weapon',
'bomb',
'armoured',
'carbine',
'beretta',
'missile',
'armored',
'winchester',
'springfield',
'revolver',
'launcher',
'caliber',
'assault',
'sig',
'45',
'ordnance',
'zastava',
'rocket',
'anti-tank',
'walther',
'combat',
'benelli',
'sniper',
'series',
'mle',
'browning',
'schneider',
'm1',
'carrier',
'kanone',
'defense',
'artillery',
'tank',
'steyr',
'rml',
'mowag',
'wz.',
'mauser',
'm3',
'vehicle',
'vickers',
'taurus',
'tactical',
'sword',
'infantry',
'panzer',
'marlin',
'hotchkiss',
'fk',
'barrett',
'weapon',
'sauer',
'modello',
'explosive',
'aircraft',
'tractor',
'skoda',
'self-propelled',
'rheinmetall',
'reconnaissance',
'minenwerfer',
'm4',
'kel-tec',
'fighting',
'daewoo',
'bofors',
'rocket',
'sd.kfz.',
'scout',
'pindad',
'knife',
'carriage',
'bliss-leavitt',
'arms',
'advanced',
'storm',
'sdkfz',
'savage',
'saurer',
'renault',
'nuclear',
'missile',
'bayonet',
'arsenal',
'sword',
'armoured',
'weapons',
'weapon-stub',
'war',
'strike',
'spartan',
'oerlikon',
'obusier',
'nebelwerfer',
'm\xF6rser',
'munition',
'military',
'marksman',
'krupp',
'flamethrower',
'feldhaubitze',
'eagle',
'crosman',
'cobra',
'carrier',
'bushmaster',
'breda',
'army',
'amphibious',
'afv',
'wolf',
'vektor',
'vehicle',
'turret',
'tanks',
'stridsvagn',
'soltam',
'siege',
'shotgun',
'sg',
'schwerer',
'schwere',
'pdw',
'panhard',
'nambu',
'mortier',
'magnum',
'm8',
'm60',
'm1918',
'm1895',
'luftminenwerfer',
'leopard',
'kbk',
'kanon',
'imbel',
'humber',
'hi-point',
'guns',
'gryazev-shipunov',
'explosives',
'denel',
'battle',
'axe',
'automag',
'attack',
'armory',
'armalite',
'alfa',
'pistol',
'bomb',
'artillery']
def isInt(s):
try:
int(s)
return True
except:
pass
return False
WEAPONS_PHRASES = [w for w in WEAPONS_PHRASES if not isInt(w)]
# restore a few numbers
WEAPONS_PHRASES = WEAPONS_PHRASES + ["45", ".45", "38", "50", "3006", ".22", "22", "357"]
# add a few missing popular items
WEAPONS_PHRASES = WEAPONS_PHRASES + ['uzi', 'ammo', 'ammunition', 'stoner', 'scar17', 'taser', 'tazer',
'Tokarev', 'glock', 'AK-47', 'AK 47', 'luger', 'P38', 'spdmstr',
'AR15', 'AR-15', 'AMT', 'Trejo', 'Armatix', 'Astra', 'Bechowiec',
'Bauer', 'Benelli', 'Versa', 'Browning', 'BUL', 'Caracal', 'Zamorana',
'Wesson', 'Danuvia', 'Musgrave', 'Vektor', 'Enfield',
'FEG', 'FN', 'Herstal', 'Gabilondo', 'Urresti', 'Makarov', 'Izhevsk',
'Sauer', 'KBP', 'Kimber', 'MAB', 'Mauser', 'MAC-10', 'MAC-11',
'MAC10', 'MAC11', 'Pindad',
'RPC', 'Bonifacio', 'Steyr',
'Tanfoglio', 'Tula', "CZ",
"\x010CZ", 'RSAF', 'Webley',
'Norinco', 'Akdal', 'Famars',
'Marlin']
# FROM SPREADSHEET 28 May 2015
WMD_RELATED = [
"Abrin",
"Agent 15",
"Antrax",
"Arsines",
"Backpack nuke",
"Balancing Machines",
"Bambi",
"Beryllium",
"biohazard",
"biological",
"biological weapon",
"Bioreactors",
"bonus damage",
"bush",
"Capacitors",
"Chemical",
"chemical weapon",
"cheney",
"chlorine",
"Crucibles",
"cyanide",
"Cyanogen chloride",
"cyclosarin",
"Devices",
"Dirty Bomb",
"dirty bomber",
"Discharge",
"Electrical",
"face-shot",
"FAE",
"Fermenters",
"Filament",
"Flow Form Machines",
"Focal Plane Sensors",
"Frequency Changers",
"Generators",
"Gravity Gradiometers",
"Gravity Meters",
"Hafnium",
"Hafnium",
"Heat Exchanges",
"hydrogen chloride",
"hydrogen cyanide",
"IED",
"IID",
"incendiary",
"Inspection",
"Isostatic Presses",
"Lasers",
"Lathes",
"lewisite",
"Magnetic gradiometers",
"Maraging Steel",
"nerve gas",
"Niobium",
"nitrogen mustard",
"nitrogen oxide",
"Novichok",
"nuclear",
"nuclear weapon",
"Outlet pipes",
"phosgene",
"phosgene oxime",
"Pressure Transducers",
"Radioactive fallout device",
"radiological",
"radiological weapon",
"ricin",
"Sarin",
"soman",
"Spectrometers",
"spud gun",
"sulfur mustard",
"Switching Devices",
"tabun",
"Tantalum",
"tear gas",
"TOW",
"toxin",
"VR",
"VX",
"Weapons of Mass Destruction",
"weapons-grade",
"Winding",
"WMD",
"Wooden",
"Woompher",
"Zirconium"
]
EXPLOSIVES_RELATED = [
"Acetylides of heavy metals",
"aerial shell",
"aluminum",
"Aluminum containing polymeric propellant",
"Aluminum ophorite explosive",
"Amatex",
"Amatol",
"Ammonal",
"ammonia",
"ammonium nitrate",
# "Ammonium nitrate explosive mixtures (cap sensitive).",
"Ammonium nitrate explosive mixtures",
"cap sensitive",
# "Ammonium nitrate explosive mixtures (non-cap sensitive)",
"Ammonium nitrate explosive mixtures",
"non-cap sensitive",
#"Ammonium perchlorate explosive mixtures (excluding ammonium perchlorate composite propellant (APCP))",
"Ammonium perchlorate",
"explosive mixtures",
"APCP",
# "Ammonium perchlorate having particle size less than 15 microns",
"Ammonium perchlorate",
# "Ammonium picrate [picrate of ammonia, Explosive D].",
"Ammonium picrate",
"picrate of ammonia",
"Explosive D",
# "Ammonium salt lattice with isomorphously substituted inorganic salts",
"Ammonium salt lattice",
"isomorphously substituted inorganic salts",
# "ANFO [ammonium nitrate-fuel oil]",
"ANFO",
"ammonium nitrate-fuel oil",
"fuel oil",
"Aromatic nitro-compound explosive mixtures",
"articles pyrotechnic",
"ATF",
"ATFE",
"Azide explosives",
"bang",
"Baranol",
"Baratol",
"BATFE",
# "BEAF [1, 2-bis (2, 2-difluoro-2-nitroacetoxyethane)]",
"BEAF",
"1, 2-bis (2, 2-difluoro-2-nitroacetoxyethane)",
"binary",
"Black powder",
"black powder",
"Black powder based explosive mixtures",
"black powder bomb",
# "Blasting agents, nitro-carbo-nitrates, including non-cap sensitive slurry and water gel explosives",
"Blasting agents",
"nitro-carbo-nitrates",
"non-cap sensitive slurry",
"water gel explosives",
"Blasting caps",
"Blasting gelatin",
"Blasting powder",
"blow up",
"bomb",
"boom",
"booster",
# "BTNEC [bis (trinitroethyl) carbonate]",
"BTNEC",
"bis (trinitroethyl) carbonate",
# "BTNEN [bis (trinitroethyl) nitramine]",
"BTNEN",
"bis (trinitroethyl) nitramine",
# "BTTN [1,2,4 butanetriol trinitrate]",
"BTTN",
"1,2,4 butanetriol trinitrate",
"Bulk salutes",
"Bureau of ATF",
"butane bomb",
"Butyl tetryl",
"c-4",
"cake",
# "Calcium nitrate explosive mixture",
"Calcium nitrate explosive mixture",
"Calcium nitrate",
"cap sensitive",
# "Cellulose hexanitrate explosive mixture",
"Cellulose hexanitrate explosive mixture",
"Cellulose hexanitrate",
"charge",
"Chlorate explosive mixtures",
"Class B",
"Class C",
# "Composition A and variations",
"Composition A and variations",
"Composition A",
# "Composition B and variations",
"Composition B and variations",
"Composition B",
# "Composition C and variations",
"Composition C and variations",
"Composition C",
"Copper acetylide",
"cord",
"Cyanuric triazide",
# "Cyclonite [RDX]",
"Cyclonite",
"RDX",
# "Cyclotetramethylenetetranitramine [HMX]",
"Cyclotetramethylenetetranitramine",
"HMX",
"Cyclotol",
# "Cyclotrimethylenetrinitramine",
"Cyclotrimethylenetrinitramine",
"RDX",
# "DATB [diaminotrinitrobenzene]",
"DATB",
"diaminotrinitrobenzene",
# "DATB [diaminotrinitrobenzene]",
"DATB",
"diaminotrinitrobenzene",
# "DDNP [diazodinitrophenol]",
"DDNP",
"diazodinitrophenol",
# "DEGDN [diethyleneglycol dinitrate]",
"DEGDN",
"diethyleneglycol dinitrate",
"depth charge",
"det cord",
"Detonating cord",
"detonator",
"detonator",
"Detonators",
"dets",
"diesel",
"diesel fuel",
"Dimethylol dimethyl methane dinitrate composition",
"Dinitroethyleneurea",
# "Dinitroglycerine [glycerol dinitrate]",
"Dinitroglycerine",
"glycerol dinitrate",
"Dinitrophenol",
"Dinitrophenolates",
"Dinitrophenyl hydrazine",
"Dinitroresorcinol",
"Dinitrotoluene-sodium nitrate explosive mixtures",
# "DIPAM [dipicramide; diaminohexanitrobiphenyl]",
"DIPAM",
"dipicramide; diaminohexanitrobiphenyl",
"Dipicryl sulfone",
"Dipicrylamine",
"Display fireworks",
# "DNPA [2,2-dinitropropyl acrylate]",
"DNPA",
"2,2-dinitropropyl acrylate",
# "DNPD [dinitropentano nitrile]",
"DNPD",
"dinitropentano nitrile",
"Dynamite",
# "EDDN [ethylene diamine dinitrate]",
"EDDN",
"ethylene diamine dinitrate",
# "EDNA [ethylenedinitramine]",
"EDNA",
"ethylenedinitramine",
"Ednatol",
# "EDNP [ethyl 4,4-dinitropentanoate]",
"EDNP",
"ethyl 4,4-dinitropentanoate",
# "EGDN [ethylene glycol dinitrate]",
"EGDN",
"ethylene glycol dinitrate",
"Emulsion",
"Erythritol tetranitrate explosives",
"Esters of nitro-substituted alcohols",
"Ethyl-tetryl",
"EX Number",
"explosive",
"Explosive conitrates",
"Explosive gelatins",
"Explosive liquids",
"Explosive mixtures containing oxygenreleasing inorganic salts and hydrocarbons",
"Explosive mixtures containing oxygenreleasing inorganic salts and nitro bodies",
"Explosive mixtures containing oxygenreleasing inorganic salts and water insoluble fuels",
"Explosive mixtures containing oxygenreleasing inorganic salts and water soluble fuels",
"Explosive mixtures containing sensitized nitromethane",
"Explosive mixtures containing tetranitromethane (nitroform)",
"Explosive nitro compounds of aromatic hydrocarbons",
"Explosive organic nitrate mixtures",
"Explosive powders",
"Federal Explosives Licensee",
"Federal Explosives Permit",
"FEL",
"FEP",
"fertilizer",
"firework",
"Flash powder",
"flash powder bomb",
"fuel",
"fuel air mixture bomb",
"Fulminate of mercury",
"Fulminate of silver",
"Fulminating gold",
"Fulminating mercury",
"Fulminating platinum",
"Fulminating silver",
"fuse",
"Gelatinized nitrocellulose",
"Gem-dinitro aliphatic explosive mixtures",
"grenade",
"Guanyl nitrosamino guanyl tetrazene",
"Guanyl nitrosamino guanylidene hydrazine",
"Guncotton",
"Heavy metal azides",
"helix",
"Hexanite",
"Hexanitrodiphenylamine",
"Hexanitrostilbene",
# "Hexogen [RDX]",
"Hexogen",
"RDX",
"Hexogene or octogene and a nitrated Nmethylaniline",
"Hexolites",
"high explosive",
"HME",
# "HMTD [hexamethylenetriperoxidediamine]",
"HMTD",
"hexamethylenetriperoxidediamine",
# "HMX [cyclo-1,3,5,7-tetramethylene 2,4,6,8-tetranitramine; Octogen]",
"HMX",
"cyclo-1,3,5,7-tetramethylene 2,4,6,8-tetranitramine",
"Octogen",
"homemade bomb",
"homemade explosive",
# "Hydrazinium nitrate/hydrazine/aluminum explosive system",
"Hydrazinium nitrate",
"hydrazine",
"aluminum explosive system",
"Hydrazoic acid",
"Hydrogen peroxide",
"igniter",
"Igniter cord",
"Igniters",
"incendiary",
"Initiating tube systems",
"instant mix",
# "KDNBF [potassium dinitrobenzo-furoxane]",
"KDNBF",
"potassium dinitrobenzo-furoxane",
"land mine",
"Lead azide",
"Lead mannite",
"Lead mononitroresorcinate",
"Lead picrate",
"Lead salts, explosive",
# "Lead styphnate [styphnate of lead, lead trinitroresorcinate]",
"Lead styphnate",
"styphnate of lead",
"lead trinitroresorcinate",
"Liquid nitrated polyol and trimethylolethane",
"Liquid oxygen explosives",
"low explosive",
"M/S",
"M-80",
"Magnesium ophorite explosives",
"Mannitol hexanitrate",
"mass destruction",
"mass detonate",
"mass detonation",
"mass explosion",
"mass explosive",
# "MDNP [methyl 4,4-dinitropentanoate]",
"MDNP",
"methyl 4,4-dinitropentanoate",
# "MEAN [monoethanolamine nitrate]",
"MEAN",
"monoethanolamine nitrate",
"Mercuric fulminate",
"Mercury oxalate",
"Mercury tartrate",
"methane",
"Metriol trinitrate",
"millisecond",
# "Minol-2 [40% TNT, 40% ammonium nitrate, 20% aluminum]",
"Minol-2",
"40% TNT, 40% ammonium nitrate, 20% aluminum",
# "MMAN [monomethylamine nitrate]; methylamine nitrate",
"MMAN",
"monomethylamine nitrate",
"methylamine nitrate",
"molotov",
"molotov cocktail",
"Mononitrotoluene-nitroglycerin mixture",
"Monopropellants",
# "NIBTN [nitroisobutametriol trinitrate]",
"NIBTN",
"nitroisobutametriol trinitrate",
"Nitrate explosive mixtures",
"Nitrate sensitized with gelled nitroparaffin",
"Nitrated carbohydrate explosive",
"Nitrated glucoside explosive",
"Nitrated polyhydric alcohol explosives",
"Nitric acid and a nitro aromatic compound explosive",
"Nitric acid and carboxylic fuel explosive",
"Nitric acid explosive mixtures",
"nitro",
"Nitro aromatic explosive mixtures",
"Nitro compounds of furane explosive mixtures",
"Nitrocellulose explosive",
"Nitroderivative of urea explosive mixture",
"Nitrogelatin explosive",
"Nitrogen trichloride",
"Nitrogen tri-iodide",
# "Nitroglycerine [NG, RNG, nitro, glyceryl trinitrate, trinitroglycerine",
"Nitroglycerine",
"NG",
"RNG",
"nitro",
"glyceryl trinitrate",
"trinitroglycerine",
"Nitroglycide",
# "Nitroglycol [ethylene glycol dinitrate, EGDN",
"Nitroglycol",
"ethylene glycol dinitrate",
"EGDN",
"Nitroguanidine explosives",
"Nitronium perchlorate propellant mixtures",
# "Nitroparaffins Explosive Grade and ammonium nitrate mixtures",
"Nitroparaffins",
"Nitrostarch",
"Nitro-substituted carboxylic acids",
"Nitrourea",
# "Octogen [HMX",
"Octogen",
"HMX",
# "Octol [75 percent HMX, 25 percent TNT]",
"Octol",
"75 percent HMX, 25 percent TNT",
"oil perforator",
"Organic amine nitrates",
"Organic nitramines",
"oxidizer",
# "PBX [plastic bonded explosives]",
"PBX",
"plastic bonded explosives",
"Pellet powder",
"Penthrinite composition",
"Pentolite",
"Perchlorate explosive mixtures",
"Peroxide based explosive mixtures",
# "PETN [nitropentaerythrite, pentaerythrite tetranitrate, pentaerythritol tetranitrate]",
"PETN",
"nitropentaerythrite",
"pentaerythrite tetranitrate",
"pentaerythritol tetranitrate",
"Picramic acid and its salts",
"Picramide",
"Picrate explosives",
"Picrate of potassium explosive mixtures",
"Picratol",
# "Picric acid (manufactured as an explosive)",
"Picric acid",
"Picryl chloride",
"Picryl fluoride",
"pipe bomb",
"plastic explosive",
# "PLX [95% nitromethane, 5% ethylenediamine]",
"PLX",
"95% nitromethane, 5% ethylenediamine",
"Polynitro aliphatic compounds",
"Polyolpolynitrate-nitrocellulose explosive gels",
# "Potassium chlorate and lead sulfocyanate explosive",
"Potassium chlorate",
"lead sulfocyanate",
# "Potassium nitrate explosive mixtures",
"Potassium nitrate",
"Potassium nitroaminotetrazole",
"pounds",
"propane bomb",
"pyrotechnic",
"Pyrotechnic compositions",
# "PYX [2,6-bis(picrylamino)] 3,5-dinitropyridine",
"PYX",
"2,6-bis(picrylamino) 3,5-dinitropyridine",
"Quarter stick",
# "RDX [cyclonite, hexogen, T4, cyclo-1,3,5,-trimethylene-2,4,6,-trinitramine; hexahydro-1,3,5-trinitro-S-triazine]",
"RDX",
"cyclonite",
"hexogen",
"T4",
"cyclo-1,3,5,-trimethylene-2,4,6,-trinitramine",
"hexahydro-1,3,5-trinitro-S-triazine",
"Safety fuse",
"Salts of organic amino sulfonic acid explosive mixture",
"salute",
"Salutes (bulk)",
"bulk salutes",
"shape charge",
"shell",
"Silver acetylide",
"Silver azide",
"Silver fulminate",
# "Silver oxalate explosive mixtures",
"Silver oxalate explosive mixtures",
"Silver oxalate",
"Silver styphnate",
# "Silver tartrate explosive mixtures",
"Silver tartrate explosive mixtures",
"Silver tartrate",
"Silver tetrazene",
"Slurried explosive mixtures of water, inorganic oxidizing salt, gelling agent, fuel, and sensitizer (cap sensitive)",
"Smokeless powder",
"Sodatol",
"Sodium amatol",
"Sodium azide explosive mixture",
"Sodium dinitro-ortho-cresolate",
"Sodium nitrate explosive mixtures",
"Sodium nitrate-potassium nitrate explosive mixture",
"Sodium picramate",
"Special fireworks",
"Squibs",
"Styphnic acid explosives",
# "Tacot [tetranitro-2,3,5,6-dibenzo-1,3a,4,6a tetrazapentalene]",
"Tacot",
"tetranitro-2,3,5,6-dibenzo-1,3a,4,6a tetrazapentalene",
"tannerite",
# "TATB [triaminotrinitrobenzene]",
"TATB",
"triaminotrinitrobenzene",
# "TATP [triacetonetriperoxide]",
"TATP",
"triacetonetriperoxide",
# "TEGDN [triethylene glycol dinitrate]",
"TEGDN",
"triethylene glycol dinitrate",
"Tetranitrocarbazole",
# "Tetrazene [tetracene, tetrazine, 1(5-tetrazolyl)-4-guanyl tetrazene hydrate]",
"Tetrazene",
"tetracene",
"tetrazine",
"1(5-tetrazolyl)-4-guanyl tetrazene hydrate",
# "Tetrazole explosives",
"Tetrazole explosives",
"Tetrazole",
# "Tetryl [2,4,6 tetranitro-N-methylaniline]",
"Tetrytol",
"thermobaric bomb",
"Thickened inorganic oxidizer salt slurried explosive mixture",
# "TMETN [trimethylolethane trinitrate]",
# "TNEF [trinitroethyl formal]",
# "TNEOC [trinitroethylorthocarbonate]",
# "TNEOF [trinitroethylorthoformate]",
# "TNT [trinitrotoluene, trotyl, trilite, triton]",
"Torpex",
"Tridite",
"Trimethylol ethyl methane trinitrate composition",
"Trimethylolthane trinitrate-nitrocellulose",
"Trimonite",
"Trinitroanisole",
"Trinitrobenzene",
"Trinitrobenzoic acid",
"Trinitrocresol",
"Trinitro-meta-cresol",
"Trinitronaphthalene",
"Trinitrophenetol",
"Trinitrophloroglucinol",
"Trinitroresorcinol",
"Tritonal",
"UN Number",
"Urea nitrate",
# "Water-bearing explosives having salts of oxidizing acids and nitrogen bases, sulfates, or sulfamates (cap sensitive)",
"Water-in-oil emulsion explosive compositions",
# "Xanthamonas hydrophilic colloid explosive mixture",
"Xanthamonas",
"hydrophilic colloid",
"Quarterstick",
"Halfstick"
]
NFA_RELATED = [
"11.5 inch",
'11.5"',
"14 inch",
'14"',
"any other weapon",
"AOW",
"ATF",
"ATFE",
"auto sear",
"automatic",
"autosear",
"AW-SIM",
"BATFE",
"Bureau of ATF",
"can",
"cane gun",
"class 3",
"class III",
"conversion",
"conversion kit",
"destructive device",
"DIAS",
"drop in auto sear",
"drop in autosear",
"FA",
"flash bang",
"flashbang",
"forward grip",
"full auto",
"grenade",
"homemade silencer",
"homemade suppressor",
"incendiary",
"land mine",
"Lightning Link",
"machine gun",
"machinegun",
"missile",
"molotov",
"molotov cocktail",
"muffler",
"nfa",
"pen gun",
"pistol grip",
"poison gas",
"RLL",
"rocket",
"rocket launcher",
"sawed off",
"sbr",
"sbs",
"sear",
"short barrel rifle",
"short barrel shotgun",
"short barreled rifle",
"short barreled shotgun",
"silencer",
"smooth bore",
"SOT",
"Special Occupational Tax",
"street sweeper",
"suppressor",
"umbrella gun",
"wallet gun",
"weapon made from",
"Zip gun",
"Zipgun"
]
GANG_RELATED = [
"boom stick",
"burner",
"cash",
"money",
"heroin",
"chopper",
"clap-clap",
"dat fire",
"dat thang",
"davy crocket",
"deuce",
"deuce deuce",
"duece",
"fofo",
"four nickle",
"gat",
"gatt",
"gauge",
"ghost load",
"hammer",
"heater",
"hog leg",
"jammy",
"lead spitta",
"lil buddy",
"long pumps",
"mac",
"mack",
"mr. 9mm",
"narco",
"nina",
"nine",
"ol'betsy",
"ol' betsy",
"ooh wop",
"piece",
"pocket rocket",
"rod",
"roscoe",
"sawdy",
"shotty",
"smoke wagon",
"strap",
"throw down",
"thunder stick",
"toaster",
"tres",
"widow maker",
"yeezy"
]
NON_ENGLISH_RELATED = [
"arma larga",
"armas de fuego",
"bala",
"cahuetas",
"Cartucho",
"Cebolla",
"Cebollas",
"cortas",
"cuerno de chivo",
"cuerno de chivo",
"Cuerno de Chivo",
"Cuerno",
"cuete",
"El Subfusil ametralladora",
"escopete",
"escuadra",
"escuadra",
"explosivo",
"fusca",
"gat",
"la ametralladora",
"la escopeta",
"Lanza papas",
"largas",
"municion",
"Municiones",
"Papa",
"Papas",
"penas",
"Pertrechos",
"pistola",
"Polvora",
"proyectil",
"r quinze",
"Tolba",
"trick or treat",
"tricki-tricki",
"Vaina"
]
FIREARMS_RELATED = [
"5.56",
"7.62",
"1911",
".45 caliber",
".50 caliber",
"37 mm",
"37mm",
"3-D firearm",
"3-D print",
"3-D printed",
"80 percent receiver",
"80% receiver",
"9mm",
"AK",
"AK-47",
"AK-74",
"ammo",
"ammunition",
"APA",
"AR",
"AR-15",
"Armor piercing",
"Armor piercing ammuntion",
"assault rifle",
"ATF",
"ATFE",
"Barrett",
"BATFE",
"Build party",
"bullet proof vest",
"bump fire",
"bump stock",
"Bureau of ATF",
"caliber",
"carbine",
"cash only",
"centerfire",
"Chopper",
"clip",
"CNC",
"concealable",
"derringer",
"F2F",
"face to face",
"Federal Firearms License",
"FFL",
"fixed stock",
"flare gun",
"FMJ",
"folding stock",
"frame",
"full metal jacket",
"gas mask",
"ghost gun",
"glock",
"handgun",
"hi point",
"high capacity",
"homemade gun",
"jimenez",
"keltec",
"kevlar",
"long gun",
"lower",
"mag",
"magazine",
"MP4",
"MP5",
"no 4473",
"no background",
"no check",
"no paper work",
"no paperwork",
"no questions asked",
"oblit",
"obliterated",
"off book",
"pistol",
"receiver",
"revolver",
"rifle",
"rimfire",
"RPG",
"S&W",
"SA",
"scratched",
"semi-auto",
"semi-automatic",
"shotgun",
"shotty",
"shoty",
"sig brace",
"stripped lower",
"stun gun",
"Taurus",
"untraceable",
"uzi",
"vest"
]
FIREARMS_TECHNOLOGY_BRANCH = [
"Glock Switch",
"Glock Chip",
# "Chip (since this is so inclusive - I would only search in conjunction with other firearm words)",
"Chip",
"Baffle",
"Baffle Stack",
"Monocore",
"Anarchist",
"Solvent Trap",
"LDC",
"Belt Fed",
"Crew served",
"Hip Whip",
"Polymer",
"CNC machine",
"Green tip",
"Black tip",
"FAL",
"Outer tube",
"trigger"
]
WEAPONS_PHRASES.extend(FIREARMS_RELATED)
WEAPONS_PHRASES.extend(FIREARMS_TECHNOLOGY_BRANCH)
WEAPONS_PHRASES = list(set([w.lower() for w in WEAPONS_PHRASES]))
test_text = """New In Box Walther UZI .22LR RIFLE 20+1 $349.99"""
WEAPONS_PATTERNS = [re.compile(r"""\b%s\b""" % ph, re.IGNORECASE) for ph in WEAPONS_PHRASES]
def weapons_words(text, patterns=WEAPONS_PATTERNS, phrases=WEAPONS_PHRASES):
matches = set()
for (pattern, phrase) in zip(patterns, phrases):
for match in re.finditer(pattern, text):
matches.add(phrase)
matches = list(matches)
matches.sort()
return matches
# print weapons_words(test_text)
def get_atf_weapons(*texts):
all_text = " ".join([strip_tags(t) for t in texts])
return "|".join(weapons_words(all_text))
##################################################################
KEYWORDS_PHRASES = WMD_RELATED + EXPLOSIVES_RELATED + NFA_RELATED + GANG_RELATED + NON_ENGLISH_RELATED
KEYWORDS_PHRASES = list(set([k.lower() for k in KEYWORDS_PHRASES]))
KEYWORDS_PATTERNS = [re.compile(r"""\b%s\b""" % ph, re.IGNORECASE) for ph in KEYWORDS_PHRASES]
def keywords_words(text, patterns=KEYWORDS_PATTERNS, phrases=KEYWORDS_PHRASES):
matches = set()
for (pattern, phrase) in zip(patterns, phrases):
for match in re.finditer(pattern, text):
matches.add(phrase)
matches = list(matches)
matches.sort()
return matches
# print keywords_words(test_text)
def get_keywords(*texts):
all_text = " ".join([strip_tags(t) for t in texts])
return "|".join(keywords_words(all_text))
##################################################################
test_prices = ["I like to spend $50 for a sword, $75.00 for ammo, $ 100.00 for rifle, $ 1,000 for lunch, and BTC 2.468 to donate to Edward Snowden.",
"I make $60K a year on Herbalife. Ask me how!",
"JPY 500000 is more than CHF 200.5",
"2.5 BTC or BTC 4.5"]
DOLLAR_PRICE_REGEXPS = [re.compile(r'''\$\s*(?:\d{1,3},\s?)*\d{1,3}(?:(?:\.\d+)|[KkMm])?''', re.IGNORECASE),
re.compile(r'''USD\s*\d{1,7}(?:\.\d+)?''', re.IGNORECASE),
re.compile(r'''\d{1,7}(?:\.\d+)?\s*USD''', re.IGNORECASE)
]
BITCOIN_PRICE_REGEXPS = [re.compile(r'''(?:BTC|XBT|XBC)\s*\d{1,7}(?:\.\d+)?''', re.IGNORECASE),
re.compile(r'''\d{1,7}(?:\.\d+)?\s*(?:BTC|XBT|XBC)''', re.IGNORECASE)
]
def get_dollar_prices(*texts):
matches = []
for t in texts:
for r in DOLLAR_PRICE_REGEXPS:
for m in r.findall(t):
matches.append(m.replace('$ ','$').replace(',','').replace('$','').replace('K',"000").replace('k',"000").replace("M","000").replace('m',"000"))
return "|".join(matches)
def get_prices(*texts):
return get_dollar_prices(*texts)
def get_bitcoin_prices(*texts):
matches = []
for t in texts:
for r in BITCOIN_PRICE_REGEXPS:
for m in r.findall(t):
matches.append(m.replace('BTC','').replace('XBT','').replace('XBC','').replace(' ',''))
return "|".join(matches)
# print get_prices(*test_prices)
def atf_body_clean(text):
"""Strip HTML"""
return strip_tags(text).strip()
def onion_name_to_provider_name(onion):
if onion in ["k5zq47j6wd3wdvjq.onion"]:
return "evolution"
elif onion in ["i25c62nvu4cgeqyz.onion"]:
return "evolution-forums"
else:
return onion
def atf_provider_name(uri):
domain = getWebsiteDomain(uri)
if domain.endswith('backpage.com'):
return "backpage.com"
elif domain.endswith('.onion'):
return onion_name_to_provider_name(domain)
else:
return domain
def person_userid_uri(cleaned):
if cleaned:
cleaned = cleaned.strip().replace(" ", "_").lower()
return "person_userid/%s" % cleaned
return ''
def person_postcount_uri(cleaned):
if cleaned:
cleaned = cleaned.strip().replace(" ", "_").lower()
return "person_postcount/%s" % cleaned
return ''
def enrollment_date_uri(cleaned):
if cleaned:
cleaned = cleaned.strip().replace(" ", "_").replace(":", "-").lower()
return "enrollment_date/%s" % cleaned
return ''
def fromUser_uri(cleaned):
if cleaned:
cleaned = cleaned.strip().replace(" ", "_").lower()
return "fromUser/%s" % cleaned
return ''
def weaponsMentioned_uri(cleaned):
if cleaned:
cleaned = cleaned.strip().replace(" ", "_").lower()
return "weaponsMentioned/%s" % cleaned
return ''
def keywordsMentioned_uri(cleaned):
if cleaned:
cleaned = cleaned.strip().replace(" ", "_").lower()
return "keywordsMentioned/%s" % cleaned
return ''
def place_postalAddress_uri(cleaned):
if cleaned:
cleaned = cleaned.strip().replace(" ", "_").lower()
return "place_postalAddress/%s" % cleaned
return ''
def ar15_user_uri(userid):
return "person/ar15/" + userid
def calguns_user_uri(userid):
return "person/calguns/" + userid
def glocktalk_user_uri(userid):
return "person/glocktalk/" + userid
def ohioccw_user_uri(userid):
return "person/ohioccwforums/" + userid
def postal_address_uri(location):
return "address/" + location.replace(" ", "_").replace(".","_").replace(",", "_")
# print test_prices, get_dollar_prices(*test_prices)
# print test_prices, get_bitcoin_prices(*test_prices)
##################################################################
def get_weapons(*texts):
atf_weapons = get_atf_weapons(*texts)
keywords = get_keywords(*texts)
return ("%s|%s" % (atf_weapons, keywords)).strip("|")
def floridaguntrader_availability_starts(date):
"""Return the date in iso format"""
d = translate_date(date,"%m/%d","2015-%m-%d")
if d == '':
d = translate_date(date,"%b %d, %Y","%Y-%m-%d")
return d
| [
"re.finditer",
"re.compile"
] | [((26507, 26549), 're.compile', 're.compile', (["('\\\\b%s\\\\b' % ph)", 're.IGNORECASE'], {}), "('\\\\b%s\\\\b' % ph, re.IGNORECASE)\n", (26517, 26549), False, 'import re\n'), ((27304, 27346), 're.compile', 're.compile', (["('\\\\b%s\\\\b' % ph)", 're.IGNORECASE'], {}), "('\\\\b%s\\\\b' % ph, re.IGNORECASE)\n", (27314, 27346), False, 'import re\n'), ((28241, 28331), 're.compile', 're.compile', (['"""\\\\$\\\\s*(?:\\\\d{1,3},\\\\s?)*\\\\d{1,3}(?:(?:\\\\.\\\\d+)|[KkMm])?"""', 're.IGNORECASE'], {}), "('\\\\$\\\\s*(?:\\\\d{1,3},\\\\s?)*\\\\d{1,3}(?:(?:\\\\.\\\\d+)|[KkMm])?', re.\n IGNORECASE)\n", (28251, 28331), False, 'import re\n'), ((28350, 28406), 're.compile', 're.compile', (['"""USD\\\\s*\\\\d{1,7}(?:\\\\.\\\\d+)?"""', 're.IGNORECASE'], {}), "('USD\\\\s*\\\\d{1,7}(?:\\\\.\\\\d+)?', re.IGNORECASE)\n", (28360, 28406), False, 'import re\n'), ((28433, 28489), 're.compile', 're.compile', (['"""\\\\d{1,7}(?:\\\\.\\\\d+)?\\\\s*USD"""', 're.IGNORECASE'], {}), "('\\\\d{1,7}(?:\\\\.\\\\d+)?\\\\s*USD', re.IGNORECASE)\n", (28443, 28489), False, 'import re\n'), ((28543, 28611), 're.compile', 're.compile', (['"""(?:BTC|XBT|XBC)\\\\s*\\\\d{1,7}(?:\\\\.\\\\d+)?"""', 're.IGNORECASE'], {}), "('(?:BTC|XBT|XBC)\\\\s*\\\\d{1,7}(?:\\\\.\\\\d+)?', re.IGNORECASE)\n", (28553, 28611), False, 'import re\n'), ((28639, 28707), 're.compile', 're.compile', (['"""\\\\d{1,7}(?:\\\\.\\\\d+)?\\\\s*(?:BTC|XBT|XBC)"""', 're.IGNORECASE'], {}), "('\\\\d{1,7}(?:\\\\.\\\\d+)?\\\\s*(?:BTC|XBT|XBC)', re.IGNORECASE)\n", (28649, 28707), False, 'import re\n'), ((26752, 26778), 're.finditer', 're.finditer', (['pattern', 'text'], {}), '(pattern, text)\n', (26763, 26778), False, 'import re\n'), ((27553, 27579), 're.finditer', 're.finditer', (['pattern', 'text'], {}), '(pattern, text)\n', (27564, 27579), False, 'import re\n')] |
#
# test_model
#
# Copyright (c) 2011-2021 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
#
import ayame
from ayame import model
from base import AyameTestCase
class ModelTestCase(AyameTestCase):
def test_model(self):
m = model.Model(None)
self.assertIsNone(m.object)
m.object = ''
self.assertEqual(m.object, '')
def test_nested_model(self):
m = model.Model(model.Model(None))
self.assertIsNone(m.object)
m.object = model.Model('')
self.assertEqual(m.object, '')
def test_inheritable_model(self):
class InheritableModel(model.InheritableModel):
def wrap(self, component):
return super().wrap(component)
m = InheritableModel(None)
self.assertIsNone(m.wrap(None))
def test_wrap_model(self):
class WrapModel(model.WrapModel):
@property
def object(self):
return super().object
m = WrapModel(None)
self.assertIsNone(m.object)
def test_compound_model_attr(self):
class Object:
attr = 'value'
o = Object()
m = model.CompoundModel(o)
mc = ayame.MarkupContainer('a', m)
mc.add(ayame.Component('attr'))
self.assertEqual(len(mc.children), 1)
self.assertEqual(o.attr, 'value')
self.assertEqual(mc.find('attr').model.object, 'value')
mc.find('attr').model.object = 'new_value'
self.assertEqual(o.attr, 'new_value')
self.assertEqual(mc.find('attr').model.object, 'new_value')
def test_compound_model_property(self):
class Object:
def __init__(self):
self.__attr = 'value'
def attr():
def fget(self):
return self.__attr
def fset(self, attr):
self.__attr = attr
return locals()
attr = property(**attr())
o = Object()
m = model.CompoundModel(o)
mc = ayame.MarkupContainer('a', m)
mc.add(ayame.Component('attr'))
self.assertEqual(len(mc.children), 1)
self.assertEqual(o.attr, 'value')
self.assertEqual(mc.find('attr').model.object, 'value')
mc.find('attr').model.object = 'new_value'
self.assertEqual(o.attr, 'new_value')
self.assertEqual(mc.find('attr').model.object, 'new_value')
def test_compound_model_method(self):
class Object:
def __init__(self):
self.__method = 'value'
def get_method(self):
return self.__method
def set_method(self, method):
self.__method = method
o = Object()
m = model.CompoundModel(o)
mc = ayame.MarkupContainer('a', m)
mc.add(ayame.Component('method'))
self.assertEqual(len(mc.children), 1)
self.assertEqual(o.get_method(), 'value')
self.assertEqual(mc.find('method').model.object, 'value')
mc.find('method').model.object = 'new_value'
self.assertEqual(o.get_method(), 'new_value')
self.assertEqual(mc.find('method').model.object, 'new_value')
def test_compound_model_method_noncallable(self):
class Object:
get_method = set_method = None
o = Object()
m = model.CompoundModel(o)
mc = ayame.MarkupContainer('a', m)
mc.add(ayame.Component('method'))
self.assertEqual(len(mc.children), 1)
self.assertIsNone(mc.find('method').model.object)
with self.assertRaisesRegex(AttributeError, r'^method$'):
mc.find('method').model.object = 'new_value'
def test_compound_model_dict(self):
o = {'mapping': 'value'}
m = model.CompoundModel(o)
mc = ayame.MarkupContainer('a', m)
mc.add(ayame.Component('mapping'))
self.assertEqual(len(mc.children), 1)
self.assertEqual(o['mapping'], 'value')
self.assertEqual(mc.find('mapping').model.object, 'value')
mc.find('mapping').model.object = 'new_value'
self.assertEqual(o['mapping'], 'new_value')
self.assertEqual(mc.find('mapping').model.object, 'new_value')
def test_compound_model_replace(self):
o = {
'b': 'b',
'c': 'c',
}
m = model.CompoundModel(o)
mc = ayame.MarkupContainer('a', m)
mc.add(ayame.MarkupContainer('b'))
self.assertEqual(len(mc.children), 1)
self.assertEqual(mc.find('b').model.object, 'b')
mc.find('b').add(ayame.Component('c'))
self.assertEqual(len(mc.children), 1)
self.assertEqual(len(mc.find('b').children), 1)
self.assertEqual(mc.find('b:c').model.object, 'c')
mc.model = model.CompoundModel(object())
self.assertIsNone(mc.find('b').model.object)
self.assertIsNone(mc.find('b:c').model.object)
with self.assertRaisesRegex(AttributeError, r'^b$'):
setattr(mc.find('b').model, 'object', '')
with self.assertRaisesRegex(AttributeError, r'^c$'):
setattr(mc.find('b:c').model, 'object', '')
self.assertEqual(mc.render(''), '')
| [
"ayame.model.CompoundModel",
"ayame.Component",
"ayame.model.Model",
"ayame.MarkupContainer"
] | [((245, 262), 'ayame.model.Model', 'model.Model', (['None'], {}), '(None)\n', (256, 262), False, 'from ayame import model\n'), ((494, 509), 'ayame.model.Model', 'model.Model', (['""""""'], {}), "('')\n", (505, 509), False, 'from ayame import model\n'), ((1159, 1181), 'ayame.model.CompoundModel', 'model.CompoundModel', (['o'], {}), '(o)\n', (1178, 1181), False, 'from ayame import model\n'), ((1195, 1224), 'ayame.MarkupContainer', 'ayame.MarkupContainer', (['"""a"""', 'm'], {}), "('a', m)\n", (1216, 1224), False, 'import ayame\n'), ((2000, 2022), 'ayame.model.CompoundModel', 'model.CompoundModel', (['o'], {}), '(o)\n', (2019, 2022), False, 'from ayame import model\n'), ((2036, 2065), 'ayame.MarkupContainer', 'ayame.MarkupContainer', (['"""a"""', 'm'], {}), "('a', m)\n", (2057, 2065), False, 'import ayame\n'), ((2749, 2771), 'ayame.model.CompoundModel', 'model.CompoundModel', (['o'], {}), '(o)\n', (2768, 2771), False, 'from ayame import model\n'), ((2785, 2814), 'ayame.MarkupContainer', 'ayame.MarkupContainer', (['"""a"""', 'm'], {}), "('a', m)\n", (2806, 2814), False, 'import ayame\n'), ((3351, 3373), 'ayame.model.CompoundModel', 'model.CompoundModel', (['o'], {}), '(o)\n', (3370, 3373), False, 'from ayame import model\n'), ((3387, 3416), 'ayame.MarkupContainer', 'ayame.MarkupContainer', (['"""a"""', 'm'], {}), "('a', m)\n", (3408, 3416), False, 'import ayame\n'), ((3773, 3795), 'ayame.model.CompoundModel', 'model.CompoundModel', (['o'], {}), '(o)\n', (3792, 3795), False, 'from ayame import model\n'), ((3809, 3838), 'ayame.MarkupContainer', 'ayame.MarkupContainer', (['"""a"""', 'm'], {}), "('a', m)\n", (3830, 3838), False, 'import ayame\n'), ((4345, 4367), 'ayame.model.CompoundModel', 'model.CompoundModel', (['o'], {}), '(o)\n', (4364, 4367), False, 'from ayame import model\n'), ((4381, 4410), 'ayame.MarkupContainer', 'ayame.MarkupContainer', (['"""a"""', 'm'], {}), "('a', m)\n", (4402, 4410), False, 'import ayame\n'), ((419, 436), 'ayame.model.Model', 'model.Model', (['None'], {}), '(None)\n', (430, 436), False, 'from ayame import model\n'), ((1240, 1263), 'ayame.Component', 'ayame.Component', (['"""attr"""'], {}), "('attr')\n", (1255, 1263), False, 'import ayame\n'), ((2081, 2104), 'ayame.Component', 'ayame.Component', (['"""attr"""'], {}), "('attr')\n", (2096, 2104), False, 'import ayame\n'), ((2830, 2855), 'ayame.Component', 'ayame.Component', (['"""method"""'], {}), "('method')\n", (2845, 2855), False, 'import ayame\n'), ((3432, 3457), 'ayame.Component', 'ayame.Component', (['"""method"""'], {}), "('method')\n", (3447, 3457), False, 'import ayame\n'), ((3854, 3880), 'ayame.Component', 'ayame.Component', (['"""mapping"""'], {}), "('mapping')\n", (3869, 3880), False, 'import ayame\n'), ((4426, 4452), 'ayame.MarkupContainer', 'ayame.MarkupContainer', (['"""b"""'], {}), "('b')\n", (4447, 4452), False, 'import ayame\n'), ((4583, 4603), 'ayame.Component', 'ayame.Component', (['"""c"""'], {}), "('c')\n", (4598, 4603), False, 'import ayame\n')] |
# coding: utf-8
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from config import config
app = Flask(__name__)
"""
config
-- 'default': DevelopmentConfig
-- 'develop': DevelopmentConfig
-- 'testing': TestingConfig
-- 'production': ProductionConfig
you can edit this in config.py
"""
config_name = 'default'
app.config.from_object(config[config_name])
config[config_name].init_app(app)
db = SQLAlchemy(app)
login_manager = LoginManager(app)
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
# admin site
from admin import views
"""
blueprint
you can register a <blueprint> by run:
-- mana blueprint <blueprint>
under app folder
"""
from main import main
app.register_blueprint(main, url_prefix='/main')
from auth import auth
app.register_blueprint(auth, url_prefix="/auth")
from api import api
app.register_blueprint(api, url_prefix="/api")
| [
"flask_sqlalchemy.SQLAlchemy",
"flask_login.LoginManager",
"flask.Flask"
] | [((152, 167), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (157, 167), False, 'from flask import Flask\n'), ((457, 472), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (467, 472), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((489, 506), 'flask_login.LoginManager', 'LoginManager', (['app'], {}), '(app)\n', (501, 506), False, 'from flask_login import LoginManager\n')] |
import unittest
from app.models import User,Posts,Comments,Subscribe
class UserModelTest(unittest.TestCase):
def setUp(self):
self.new_user = User(password = '<PASSWORD>')
def test_password_setter(self):
self.assertTrue(self.new_user.pass_secure is not None)
def test_no_access_password(self):
with self.assertRaises(AttributeError):
self.new_user.password
def test_password_verification(self):
self.assertTrue(self.new_user.verify_password('<PASSWORD>'))
class PostsModelTest(unittest.TestCase):
def setUp(self):
self.new_post = Posts(title='Terabyte', body='This is a new post',category='Technology')
def test_instance(self):
'''
Test case to check if new_post is an instance of Posts class
'''
self.assertTrue( isinstance( self.new_post, Posts) )
def test_save_post(self):
'''
Test case to check if a post is saved to the database
'''
self.new_post.save_post()
self.assertTrue( len(Posts.query.all()) > 0 )
class TestComments(unittest.TestCase):
'''
Test class to test behaviours of the Comments class
Args:
unittest.TestCase : Test case class that helps create test cases
'''
def setUp(self):
'''
Set up method that will run before every Test
'''
self.new_comment = Comments(the_comment="This is a test comment")
def test_instance(self):
'''
Test to check if new_comment is an instance of Comments
'''
self.assertTrue( isinstance( self.new_comment, Comments) )
def test_save_comment(self):
'''
Test case to check if comment is saved to the database
'''
self.new_comment.save_comment()
self.assertTrue( len(Comments.query.all()) > 0)
class TestSubscribe(unittest.TestCase):
'''
Test class to test behaviours of the Comments class
Args:
unittest.TestCase : Test case class that helps create test cases
'''
def setUp(self):
'''
Set up method that will run before every Test
'''
self.new_subscriber = Subscribe(name="<NAME>",email="<EMAIL>")
def test_instance(self):
'''
Test to check if new_comment is an instance of Comments
'''
self.assertTrue( isinstance( self.new_subscriber, Subscribe) )
def test_save_subscriber(self):
'''
Test case to check if comment is saved to the database
'''
self.new_subscriber.save_subscriber()
self.assertTrue( len(Subscribe.query.all()) > 0)
| [
"app.models.Subscribe.query.all",
"app.models.Posts",
"app.models.User",
"app.models.Subscribe",
"app.models.Posts.query.all",
"app.models.Comments.query.all",
"app.models.Comments"
] | [((156, 183), 'app.models.User', 'User', ([], {'password': '"""<PASSWORD>"""'}), "(password='<PASSWORD>')\n", (160, 183), False, 'from app.models import User, Posts, Comments, Subscribe\n'), ((618, 691), 'app.models.Posts', 'Posts', ([], {'title': '"""Terabyte"""', 'body': '"""This is a new post"""', 'category': '"""Technology"""'}), "(title='Terabyte', body='This is a new post', category='Technology')\n", (623, 691), False, 'from app.models import User, Posts, Comments, Subscribe\n'), ((1403, 1449), 'app.models.Comments', 'Comments', ([], {'the_comment': '"""This is a test comment"""'}), "(the_comment='This is a test comment')\n", (1411, 1449), False, 'from app.models import User, Posts, Comments, Subscribe\n'), ((2183, 2224), 'app.models.Subscribe', 'Subscribe', ([], {'name': '"""<NAME>"""', 'email': '"""<EMAIL>"""'}), "(name='<NAME>', email='<EMAIL>')\n", (2192, 2224), False, 'from app.models import User, Posts, Comments, Subscribe\n'), ((1056, 1073), 'app.models.Posts.query.all', 'Posts.query.all', ([], {}), '()\n', (1071, 1073), False, 'from app.models import User, Posts, Comments, Subscribe\n'), ((1828, 1848), 'app.models.Comments.query.all', 'Comments.query.all', ([], {}), '()\n', (1846, 1848), False, 'from app.models import User, Posts, Comments, Subscribe\n'), ((2615, 2636), 'app.models.Subscribe.query.all', 'Subscribe.query.all', ([], {}), '()\n', (2634, 2636), False, 'from app.models import User, Posts, Comments, Subscribe\n')] |
import csv
import os
class csvReader:
def __init__(self, address) -> None:
self.address = address
self.data = []
def read(self, delimiter=",") -> None:
with open(self.address) as csvfile:
reader = csv.reader(csvfile, delimiter=delimiter)
self.fields = next(reader)
for row in reader:
self.data.append(row)
def write(self, address, delimiter=",") -> None:
with open(address, "a+") as csvfile:
writer = csv.writer(csvfile,
delimiter=delimiter,
quotechar='"',
quoting=csv.QUOTE_MINIMAL)
for row in self.data:
writer.writerow(row)
def writeRows(self, address, rows, delimiter=",") -> None:
with open(address, "a+") as csvfile:
writer = csv.writer(csvfile,
delimiter=delimiter,
quotechar='"',
quoting=csv.QUOTE_MINIMAL)
writer.writerows(rows)
def readDict(self, delimiter=",") -> None:
with open(self.address) as csvfile:
reader = csv.DictReader(csvfile, delimiter=delimiter)
self.fields = reader.fieldnames
for row in reader:
self.data.append(row)
def writeDictRow(self, address, delimiter=",") -> None:
with open(address, "w") as csvfile:
writer = csv.DictWriter(csvfile,
delimiter=delimiter,
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
fieldnames=self.data[0].keys())
for row in self.data:
writer.writerow(row)
def writeDictRows(self, address, rows, delimiter=",") -> None:
with open(address, "w") as csvfile:
writer = csv.DictWriter(csvfile,
delimiter=delimiter,
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
fieldnames=rows[0].keys())
writer.writerows(rows)
def getData(self) -> list:
return self.data
| [
"csv.writer",
"csv.reader",
"csv.DictReader"
] | [((244, 284), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': 'delimiter'}), '(csvfile, delimiter=delimiter)\n', (254, 284), False, 'import csv\n'), ((513, 600), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': 'delimiter', 'quotechar': '"""\\""""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(csvfile, delimiter=delimiter, quotechar=\'"\', quoting=csv.\n QUOTE_MINIMAL)\n', (523, 600), False, 'import csv\n'), ((893, 980), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': 'delimiter', 'quotechar': '"""\\""""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(csvfile, delimiter=delimiter, quotechar=\'"\', quoting=csv.\n QUOTE_MINIMAL)\n', (903, 980), False, 'import csv\n'), ((1220, 1264), 'csv.DictReader', 'csv.DictReader', (['csvfile'], {'delimiter': 'delimiter'}), '(csvfile, delimiter=delimiter)\n', (1234, 1264), False, 'import csv\n')] |
# Made by @dyphen12
from flask import Flask, request
from flask_cors import CORS
from flask_restful import reqparse, abort, Api, Resource
import json
import os
from vibra.api.core import api_version
from vibra.api.users import handler as uhd
app = Flask(__name__)
api = Api(app)
CORS(app)
class Hello(Resource):
def get(self):
return api_version()
api.add_resource(Hello, '/')
################# Login Api #######################
CREDENTIAL = {
'token1':{'user': "admin",
'pass': "<PASSWORD>"}
}
def abort_if_credential_doesnt_exist(token_id):
if token_id not in CREDENTIAL:
abort(404, message="Token {} doesn't exist".format(token_id))
parserauth = reqparse.RequestParser()
parserauth.add_argument('user')
parserauth.add_argument('pass')
class Login(Resource):
def post(self):
args = parserauth.parse_args()
token_id = int(max(CREDENTIAL.keys()).lstrip('token')) + 1
token_id = 'token%i' % token_id
CREDENTIAL[token_id] = {'user': args['user'],
'pass': args['pass']}
token = CREDENTIAL[token_id]
x, auth = uhd.user_login_ryzen(token['user'],token['pass'])
try:
ids = x['id'].values[0]
ssid = ids
#print('auth success')
return int(ssid)
except TypeError:
ids = 0
print('auth failed')
return 'fail'
api.add_resource(Login, '/auth')
class getuserName(Resource):
def get(self, todo_id):
x = uhd.get_username_ryzen(int(todo_id))
return x.values[0]
api.add_resource(getuserName, '/user/<string:todo_id>')
class SignUp(Resource):
def get(self, todo_id):
query = json.loads(todo_id)
uname = query['results']['name']
ulastname = query['results']['lastname']
uemail = query['results']['email']
upass = query['results']['password']
resulta = uhd.user_signup_ryzen(uname, ulastname, upass, uemail)
return resulta
api.add_resource(SignUp, '/signup/<string:todo_id>')
if __name__ == '__main__':
# #app.run(host=os.getenv('IP', '0.0.0.0'), port=int(os.getenv('PORT', 8080)))
app.run() | [
"json.loads",
"vibra.api.users.handler.user_login_ryzen",
"flask_restful.reqparse.RequestParser",
"flask_cors.CORS",
"flask_restful.Api",
"flask.Flask",
"vibra.api.users.handler.user_signup_ryzen",
"vibra.api.core.api_version"
] | [((252, 267), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (257, 267), False, 'from flask import Flask, request\n'), ((274, 282), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (277, 282), False, 'from flask_restful import reqparse, abort, Api, Resource\n'), ((283, 292), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (287, 292), False, 'from flask_cors import CORS\n'), ((702, 726), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (724, 726), False, 'from flask_restful import reqparse, abort, Api, Resource\n'), ((352, 365), 'vibra.api.core.api_version', 'api_version', ([], {}), '()\n', (363, 365), False, 'from vibra.api.core import api_version\n'), ((1149, 1199), 'vibra.api.users.handler.user_login_ryzen', 'uhd.user_login_ryzen', (["token['user']", "token['pass']"], {}), "(token['user'], token['pass'])\n", (1169, 1199), True, 'from vibra.api.users import handler as uhd\n'), ((1750, 1769), 'json.loads', 'json.loads', (['todo_id'], {}), '(todo_id)\n', (1760, 1769), False, 'import json\n'), ((1966, 2020), 'vibra.api.users.handler.user_signup_ryzen', 'uhd.user_signup_ryzen', (['uname', 'ulastname', 'upass', 'uemail'], {}), '(uname, ulastname, upass, uemail)\n', (1987, 2020), True, 'from vibra.api.users import handler as uhd\n')] |
from allauth.account.views import confirm_email as confirm_email_view
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path, re_path
from django.views import defaults as default_views
from django.views.generic import TemplateView
from drf_yasg import openapi
from drf_yasg.views import get_schema_view
schema_view = get_schema_view(
openapi.Info(
title="API Docs",
default_version="v1",
)
)
urlpatterns = [
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
path(
"about/", TemplateView.as_view(template_name="pages/about.html"), name="about"
),
# Django Admin, use {% url 'admin:index' %}
path(settings.ADMIN_URL, admin.site.urls),
# User management
path("users/", include("my_receipts.apps.users.urls", namespace="users")),
path("accounts/", include("allauth.urls")),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# API URLS
urlpatterns += [
path("api/auth/", include("dj_rest_auth.urls")), # Auth
re_path(
r"api/registration/account-confirm-email/(?P<key>[-:\w]+)/$",
confirm_email_view,
name="account_confirm_email",
), # Email confirmation
path(
"api/registration/", include("dj_rest_auth.registration.urls")
), # Registration
path("api/v1/", include("config.api_routers.v1")), # API v1
path(
"api/docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs"
), # Docs
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
| [
"django.urls.include",
"django.views.generic.TemplateView.as_view",
"django.conf.urls.static.static",
"django.urls.re_path",
"django.urls.path",
"drf_yasg.openapi.Info"
] | [((431, 483), 'drf_yasg.openapi.Info', 'openapi.Info', ([], {'title': '"""API Docs"""', 'default_version': '"""v1"""'}), "(title='API Docs', default_version='v1')\n", (443, 483), False, 'from drf_yasg import openapi\n'), ((1007, 1068), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (1013, 1068), False, 'from django.conf.urls.static import static\n'), ((1163, 1286), 'django.urls.re_path', 're_path', (['"""api/registration/account-confirm-email/(?P<key>[-:\\\\w]+)/$"""', 'confirm_email_view'], {'name': '"""account_confirm_email"""'}), "('api/registration/account-confirm-email/(?P<key>[-:\\\\w]+)/$',\n confirm_email_view, name='account_confirm_email')\n", (1170, 1286), False, 'from django.urls import include, path, re_path\n'), ((764, 805), 'django.urls.path', 'path', (['settings.ADMIN_URL', 'admin.site.urls'], {}), '(settings.ADMIN_URL, admin.site.urls)\n', (768, 805), False, 'from django.urls import include, path, re_path\n'), ((1120, 1148), 'django.urls.include', 'include', (['"""dj_rest_auth.urls"""'], {}), "('dj_rest_auth.urls')\n", (1127, 1148), False, 'from django.urls import include, path, re_path\n'), ((1376, 1417), 'django.urls.include', 'include', (['"""dj_rest_auth.registration.urls"""'], {}), "('dj_rest_auth.registration.urls')\n", (1383, 1417), False, 'from django.urls import include, path, re_path\n'), ((1461, 1493), 'django.urls.include', 'include', (['"""config.api_routers.v1"""'], {}), "('config.api_routers.v1')\n", (1468, 1493), False, 'from django.urls import include, path, re_path\n'), ((2266, 2306), 'django.urls.path', 'path', (['"""500/"""', 'default_views.server_error'], {}), "('500/', default_views.server_error)\n", (2270, 2306), False, 'from django.urls import include, path, re_path\n'), ((539, 592), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""pages/home.html"""'}), "(template_name='pages/home.html')\n", (559, 592), False, 'from django.views.generic import TemplateView\n'), ((636, 690), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""pages/about.html"""'}), "(template_name='pages/about.html')\n", (656, 690), False, 'from django.views.generic import TemplateView\n'), ((848, 905), 'django.urls.include', 'include', (['"""my_receipts.apps.users.urls"""'], {'namespace': '"""users"""'}), "('my_receipts.apps.users.urls', namespace='users')\n", (855, 905), False, 'from django.urls import include, path, re_path\n'), ((930, 953), 'django.urls.include', 'include', (['"""allauth.urls"""'], {}), "('allauth.urls')\n", (937, 953), False, 'from django.urls import include, path, re_path\n'), ((2437, 2464), 'django.urls.include', 'include', (['debug_toolbar.urls'], {}), '(debug_toolbar.urls)\n', (2444, 2464), False, 'from django.urls import include, path, re_path\n')] |
'''
Module for using jyserver in Flask. This module provides to new
decorators.
Decorators
-----------
* @use
Link an application object to the Flask app
* @task
Helper that wraps a function inside a separate thread so that
it can execute concurrently.
Example
-------------
```html
<p id="time">TIME</p>
<button id="reset" onclick="server.reset()">Reset</button>
```
```python
import jyserver.Flask as js
import time
from flask import Flask, render_template, request
app = Flask(__name__)
@js.use(app)
class App():
def reset(self):
self.start0 = time.time()
self.js.dom.time.innerHTML = "{:.1f}".format(0)
@js.task
def main(self):
self.start0 = time.time()
while True:
t = "{:.1f}".format(time.time() - self.start0)
self.js.dom.time.innerHTML = t
time.sleep(0.1)
@app.route('/')
def index_page(name=None):
App.main()
return App.render(render_template('flask-simple.html')
'''
from flask import Flask, request
import json
import jyserver
import threading
def task(func):
'''
Decorator wraps the function in a separate thread for concurrent
execution.
'''
def wrapper(*args):
server_thread = threading.Thread(target=func, args=args, daemon=True)
server_thread.start()
return wrapper
def use(flaskapp):
'''
Link a class to an app object. Pass Flask's `app` object.
'''
def decorator(appClass):
global context
context = jyserver.ClientContext(appClass)
@flaskapp.route('/_process_srv0', methods=['GET', 'POST'])
def process():
if request.method == 'POST':
req = json.loads(request.data)
result = context.processCommand(req)
if result is None:
return ''
return result
else:
return "GET reqeust not allowed"
return context
return decorator | [
"threading.Thread",
"json.loads",
"jyserver.ClientContext"
] | [((1232, 1285), 'threading.Thread', 'threading.Thread', ([], {'target': 'func', 'args': 'args', 'daemon': '(True)'}), '(target=func, args=args, daemon=True)\n', (1248, 1285), False, 'import threading\n'), ((1503, 1535), 'jyserver.ClientContext', 'jyserver.ClientContext', (['appClass'], {}), '(appClass)\n', (1525, 1535), False, 'import jyserver\n'), ((1690, 1714), 'json.loads', 'json.loads', (['request.data'], {}), '(request.data)\n', (1700, 1714), False, 'import json\n')] |
import pandas as pd
import matplotlib.cm as cm
import numpy as np
import matplotlib.pyplot as plt
def plot(problemVariants, *, zero, outfile, numThreads):
columns = ['Problem', 'NotTriedYet', 'Scheduled', 'Success', 'Timeout', 'Stopped', 'Ended']
colors = ['w', 'tab:purple', 'tab:green', 'tab:orange', 'tab:red', 'w']
problems = {}
for problemVariant in problemVariants:
problem = problemVariant.problem
if not (problem.filePattern in problems):
problems[problem.filePattern] = problem
variants = {}
for problemVariant in problemVariants:
v = problemVariant.variant
if not (v in variants):
variants[v] = []
'''
Overall time used
'''
t_max = 0
for problemVariant in problemVariants:
t_max = max(t_max, problemVariant.process.timer.getEnd(zero))
for k, problem in sorted(problems.items(), reverse=True):
for v in sorted(variants.keys(), reverse=True):
if not (v in problem.variants):
variants[v].append([
problem.filePattern,
t_max, # time waiting
0, # time scheduled
0, # time success
0, # time timeout
0, # time error
0, # time ended
])
else:
problemVariant = problem.variants[v]
scheduled = problemVariant.process.timer.getScheduled(zero)
started = problemVariant.process.timer.getStart(zero)
ended = problemVariant.process.timer.getEnd(zero)
if problemVariant.isSuccessful():
state = 'Success'
elif problemVariant.szsStatus == 'Timeout':
state = 'Timeout'
else:
state = 'Stopped'
variants[v].append([
problem.filePattern,
scheduled, # time waiting
started - scheduled, # time scheduled
ended - started if state == 'Success' else 0, # time success
ended - started if state == 'Timeout' else 0, # time timeout
ended - started if state == 'Stopped' else 0, # time error
t_max - ended,
])
dfs = []
labels = []
for v, vd in variants.items():
df = pd.DataFrame(vd,
columns=columns,
).set_index('Problem')
dfs.append(df)
labels.append("v"+v)
ax = plot_grouped_stacked_bar(dfs, labels, title='LTB Scheduler - Problem Timings using {} Threads'.format(numThreads), color=colors)
ax.set_ylabel("Problems")
ax.set_xlabel("Time in s")
fig = ax.get_figure()
fig.set_size_inches(15, 1*len(problems))
fig.savefig(outfile)
def plot_grouped_stacked_bar(dfall, labels, *, title, H="/", **kwargs):
'''
Given a list of dataframes, with identical columns and index, create a clustered stacked bar plot.
Args:
* labels: is a list of the names of the dataframe, used for the legend
* title: a string for the title of the plot
* H: is the hatch used for identification of the different dataframe
Shamelessly taken and modified version of https://stackoverflow.com/a/22845857 thank you jrjc
'''
n_df = len(dfall)
n_col = len(dfall[0].columns)
n_ind = len(dfall[0].index)
axe = plt.subplot(111)
for df in dfall:
axe = df.plot(
kind="barh",
linewidth=0,
stacked=True,
ax=axe,
legend=False,
grid=False,
**kwargs
) # single bar plots
h,l = axe.get_legend_handles_labels()
for i in range(0, n_df * n_col, n_col): # len(h) = n_col * n_df
for j, pa in enumerate(h[i:i+n_col]):
for rect in pa.patches: # for each index
rect.set_y(rect.get_y() + 1 / float(n_df + 1) * i / float(n_col))
rect.set_hatch(H * int(i / n_col)) #edited part
rect.set_height(1 / float(n_df + 1))
axe.set_yticks((np.arange(0, 2 * n_ind, 2) + 1 / float(n_df + 1)) / 2.)
axe.set_yticklabels(df.index, rotation = 0)
axe.set_title(title)
# Add invisible data to add another legend
n=[]
for i in range(n_df):
n.append(axe.bar(0, 0, color="gray", hatch=H * i))
l1 = axe.legend(h[:n_col], l[:n_col], loc=[1.01, 0.5])
if labels is not None:
l2 = plt.legend(n, labels, loc=[1.01, 0.1])
axe.add_artist(l1)
return axe | [
"pandas.DataFrame",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.legend",
"numpy.arange"
] | [((3598, 3614), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (3609, 3614), True, 'import matplotlib.pyplot as plt\n'), ((4671, 4709), 'matplotlib.pyplot.legend', 'plt.legend', (['n', 'labels'], {'loc': '[1.01, 0.1]'}), '(n, labels, loc=[1.01, 0.1])\n', (4681, 4709), True, 'import matplotlib.pyplot as plt\n'), ((2572, 2605), 'pandas.DataFrame', 'pd.DataFrame', (['vd'], {'columns': 'columns'}), '(vd, columns=columns)\n', (2584, 2605), True, 'import pandas as pd\n'), ((4292, 4318), 'numpy.arange', 'np.arange', (['(0)', '(2 * n_ind)', '(2)'], {}), '(0, 2 * n_ind, 2)\n', (4301, 4318), True, 'import numpy as np\n')] |
#!/usr/bin/python
from __future__ import print_function
import sys
import rubrik_cdm
import getopt
import getpass
import urllib3
urllib3.disable_warnings()
def usage():
sys.stderr.write("Usage: rbk_share_grab.py [-h] [-c creds] [-p protocol] [-t token] [-o outfile] rubrik\n")
sys.stderr.write("-h | --help: Prints this message\n")
sys.stderr.write("-c | --creds : Enter cluster credentials on the CLI [user:password]\n")
sys.stderr.write("-p | --protocol : Only grab shares of the given protocol [NFS | SMB]\n")
sys.stderr.write("-t | --token : Authenticate via token\n")
sys.stderr.write("-o | --output : Write output to a file\n")
sys.stderr.write("rubrik : Hostname or IP of a Rubrik Cluster\n")
exit(0)
def python_input(message):
if int(sys.version[0]) > 2:
value = input(message)
else:
value = raw_input(message)
return(value)
if __name__ == "__main__":
user = ""
password = ""
token = ""
protocol = ""
outfile = ""
timeout = 60
optlist, args = getopt.getopt(sys.argv[1:], 'c:t:p:ho:', ['creds=', 'token=', 'protocol=', 'help', 'output='])
for opt, a in optlist:
if opt in ('-c', '--creds'):
(user, password) = a.split(':')
if opt in ('-t', '--token'):
token = a
if opt in ('-p', '--protocol'):
protocol = a.upper()
if opt in ('-h', '--help'):
usage()
if opt in ('-o', '--output'):
outfile = a
try:
rubrik_node = args[0]
except:
usage()
if not user:
user = python_input("User: ")
if not password:
password = getpass.getpass("Password: ")
if token != "":
rubrik = rubrik_cdm.Connect(rubrik_node, api_token=token)
else:
rubrik = rubrik_cdm.Connect(rubrik_node, user, password)
hs_data = rubrik.get('internal', '/host/share', timeout=timeout)
if outfile:
fp = open(outfile, "w")
for hs in hs_data['data']:
if protocol != "" and protocol != hs['shareType']:
continue
if hs['status'] != "REPLICATION_TARGET":
if outfile:
fp.write(hs['hostname'] + ":" + hs['exportPoint'] + "\n")
else:
print(hs['hostname'] + ":" + hs['exportPoint'])
if outfile:
fp.close()
| [
"getopt.getopt",
"rubrik_cdm.Connect",
"getpass.getpass",
"urllib3.disable_warnings",
"sys.stderr.write"
] | [((130, 156), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ([], {}), '()\n', (154, 156), False, 'import urllib3\n'), ((176, 296), 'sys.stderr.write', 'sys.stderr.write', (['"""Usage: rbk_share_grab.py [-h] [-c creds] [-p protocol] [-t token] [-o outfile] rubrik\n"""'], {}), '(\n """Usage: rbk_share_grab.py [-h] [-c creds] [-p protocol] [-t token] [-o outfile] rubrik\n"""\n )\n', (192, 296), False, 'import sys\n'), ((288, 342), 'sys.stderr.write', 'sys.stderr.write', (['"""-h | --help: Prints this message\n"""'], {}), "('-h | --help: Prints this message\\n')\n", (304, 342), False, 'import sys\n'), ((347, 441), 'sys.stderr.write', 'sys.stderr.write', (['"""-c | --creds : Enter cluster credentials on the CLI [user:password]\n"""'], {}), "(\n '-c | --creds : Enter cluster credentials on the CLI [user:password]\\n')\n", (363, 441), False, 'import sys\n'), ((441, 536), 'sys.stderr.write', 'sys.stderr.write', (['"""-p | --protocol : Only grab shares of the given protocol [NFS | SMB]\n"""'], {}), "(\n '-p | --protocol : Only grab shares of the given protocol [NFS | SMB]\\n')\n", (457, 536), False, 'import sys\n'), ((536, 595), 'sys.stderr.write', 'sys.stderr.write', (['"""-t | --token : Authenticate via token\n"""'], {}), "('-t | --token : Authenticate via token\\n')\n", (552, 595), False, 'import sys\n'), ((600, 660), 'sys.stderr.write', 'sys.stderr.write', (['"""-o | --output : Write output to a file\n"""'], {}), "('-o | --output : Write output to a file\\n')\n", (616, 660), False, 'import sys\n'), ((665, 730), 'sys.stderr.write', 'sys.stderr.write', (['"""rubrik : Hostname or IP of a Rubrik Cluster\n"""'], {}), "('rubrik : Hostname or IP of a Rubrik Cluster\\n')\n", (681, 730), False, 'import sys\n'), ((1045, 1143), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""c:t:p:ho:"""', "['creds=', 'token=', 'protocol=', 'help', 'output=']"], {}), "(sys.argv[1:], 'c:t:p:ho:', ['creds=', 'token=', 'protocol=',\n 'help', 'output='])\n", (1058, 1143), False, 'import getopt\n'), ((1660, 1689), 'getpass.getpass', 'getpass.getpass', (['"""Password: """'], {}), "('Password: ')\n", (1675, 1689), False, 'import getpass\n'), ((1727, 1775), 'rubrik_cdm.Connect', 'rubrik_cdm.Connect', (['rubrik_node'], {'api_token': 'token'}), '(rubrik_node, api_token=token)\n', (1745, 1775), False, 'import rubrik_cdm\n'), ((1803, 1850), 'rubrik_cdm.Connect', 'rubrik_cdm.Connect', (['rubrik_node', 'user', 'password'], {}), '(rubrik_node, user, password)\n', (1821, 1850), False, 'import rubrik_cdm\n')] |
"""Realtime rate limiting tests."""
import sched
import threading
import time
import pytest
from pytest import approx
from redbucket import (InMemoryRateLimiter, RedisScriptRateLimiter,
RedisTransactionalRateLimiter, RateLimit, Zone)
@pytest.fixture
def in_memory_rate_limiter():
return InMemoryRateLimiter()
@pytest.fixture
def redis_tx_rate_limiter(redis, key_format):
return RedisTransactionalRateLimiter(redis, key_format=key_format)
@pytest.fixture
def redis_script_rate_limiter(redis, redis_version_check, key_format):
redis_version_check(RedisScriptRateLimiter.MIN_REDIS_VERSION)
return RedisScriptRateLimiter(redis, key_format=key_format)
@pytest.fixture(params=('in_memory', 'redis_tx', 'redis_script'))
def rate_limiter(request):
return request.getfixturevalue(f'{request.param}_rate_limiter')
def test_basic(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 5)))
results = [None] * 12
def req(i):
results[i] = rate_limiter.request(k1='foo')
sch = sched.scheduler()
for i in range(12):
sch.enter(0.1 + i/12, 0, req, (i,))
sch.run()
accepted = [int(s) for s, d in results]
assert accepted == [1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0]
def test_burst(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 5), burst=2))
results = [None] * 12
def req(i):
results[i] = rate_limiter.request(k1='foo')
sch = sched.scheduler()
for i in range(12):
sch.enter(0.1 + i/12, 0, req, (i,))
sch.run()
accepted = [int(s) for s, d in results]
assert accepted == [1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0]
def test_delay(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 5), delay=2))
results = [None] * 12
def req(i):
results[i] = rate_limiter.request(k1='foo')
sch = sched.scheduler()
for i in range(12):
sch.enter(0.1 + i/12, 0, req, (i,))
sch.run()
accepted = [int(s) for s, d in results]
assert accepted == [1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0]
dreq = [i/12 + d for i, (s, d) in enumerate(results) if s]
assert dreq == approx([0, 1/5, 2/5, 3/5, 4/5, 1, 6/5], abs=0.05)
def test_burst_delay(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 5), burst=1, delay=1))
results = [None] * 12
def req(i):
results[i] = rate_limiter.request(k1='foo')
sch = sched.scheduler()
for i in range(12):
sch.enter(0.1 + i/12, 0, req, (i,))
sch.run()
accepted = [int(s) for s, d in results]
assert accepted == [1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0]
dreq = [i/12 + d for i, (s, d) in enumerate(results) if s]
assert dreq == approx([0, 1/12, 1/5, 2/5, 3/5, 4/5, 1], abs=0.05)
def test_multi_zone(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 1), burst=4),
k2=RateLimit(Zone('z2', 5), delay=2))
results = [None] * 12
def req(i):
results[i] = rate_limiter.request(k1='foo', k2='bar')
sch = sched.scheduler()
for i in range(12):
sch.enter(0.1 + i/12, 0, req, (i,))
sch.run()
accepted = [int(s) for s, d in results]
assert accepted == [1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0]
dreq = [i/12 + d for i, (s, d) in enumerate(results) if s]
assert dreq == approx([0, 1/5, 2/5, 3/5, 4/5], abs=0.05)
def test_multithreaded(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 7)))
tstamps = [[], [], []]
start = time.monotonic() + 0.1
end = start + .95
def thread_fn(i):
time.sleep(max(start - time.monotonic(), 0))
while time.monotonic() < end:
s, d = rate_limiter.request(k1='foo')
if s:
tstamps[i].append(time.monotonic())
time.sleep(0)
threads = [threading.Thread(target=thread_fn, args=(i,)) for i in range(3)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
all_ts = sorted(tstamps[0] + tstamps[1] + tstamps[2])
stime = all_ts[0]
for i in range(len(all_ts)):
all_ts[i] -= stime
assert all_ts == approx([0, 1/7, 2/7, 3/7, 4/7, 5/7, 6/7], abs=0.05)
def test_request_invalid_key(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 1)))
with pytest.raises(KeyError) as ei:
rate_limiter.request(k2='bar')
assert str(ei.value) == repr('k2')
def test_request_no_keys(rate_limiter):
rate_limiter.configure(k1=RateLimit(Zone('z1', 1)))
assert rate_limiter.request() == (True, 0)
| [
"pytest.approx",
"time.monotonic",
"sched.scheduler",
"time.sleep",
"redbucket.InMemoryRateLimiter",
"pytest.raises",
"redbucket.Zone",
"redbucket.RedisScriptRateLimiter",
"pytest.fixture",
"threading.Thread",
"redbucket.RedisTransactionalRateLimiter"
] | [((698, 762), 'pytest.fixture', 'pytest.fixture', ([], {'params': "('in_memory', 'redis_tx', 'redis_script')"}), "(params=('in_memory', 'redis_tx', 'redis_script'))\n", (712, 762), False, 'import pytest\n'), ((319, 340), 'redbucket.InMemoryRateLimiter', 'InMemoryRateLimiter', ([], {}), '()\n', (338, 340), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((416, 475), 'redbucket.RedisTransactionalRateLimiter', 'RedisTransactionalRateLimiter', (['redis'], {'key_format': 'key_format'}), '(redis, key_format=key_format)\n', (445, 475), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((642, 694), 'redbucket.RedisScriptRateLimiter', 'RedisScriptRateLimiter', (['redis'], {'key_format': 'key_format'}), '(redis, key_format=key_format)\n', (664, 694), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((1053, 1070), 'sched.scheduler', 'sched.scheduler', ([], {}), '()\n', (1068, 1070), False, 'import sched\n'), ((1462, 1479), 'sched.scheduler', 'sched.scheduler', ([], {}), '()\n', (1477, 1479), False, 'import sched\n'), ((1871, 1888), 'sched.scheduler', 'sched.scheduler', ([], {}), '()\n', (1886, 1888), False, 'import sched\n'), ((2428, 2445), 'sched.scheduler', 'sched.scheduler', ([], {}), '()\n', (2443, 2445), False, 'import sched\n'), ((3051, 3068), 'sched.scheduler', 'sched.scheduler', ([], {}), '()\n', (3066, 3068), False, 'import sched\n'), ((2159, 2218), 'pytest.approx', 'approx', (['[0, 1 / 5, 2 / 5, 3 / 5, 4 / 5, 1, 6 / 5]'], {'abs': '(0.05)'}), '([0, 1 / 5, 2 / 5, 3 / 5, 4 / 5, 1, 6 / 5], abs=0.05)\n', (2165, 2218), False, 'from pytest import approx\n'), ((2716, 2776), 'pytest.approx', 'approx', (['[0, 1 / 12, 1 / 5, 2 / 5, 3 / 5, 4 / 5, 1]'], {'abs': '(0.05)'}), '([0, 1 / 12, 1 / 5, 2 / 5, 3 / 5, 4 / 5, 1], abs=0.05)\n', (2722, 2776), False, 'from pytest import approx\n'), ((3339, 3388), 'pytest.approx', 'approx', (['[0, 1 / 5, 2 / 5, 3 / 5, 4 / 5]'], {'abs': '(0.05)'}), '([0, 1 / 5, 2 / 5, 3 / 5, 4 / 5], abs=0.05)\n', (3345, 3388), False, 'from pytest import approx\n'), ((3517, 3533), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (3531, 3533), False, 'import time\n'), ((3838, 3883), 'threading.Thread', 'threading.Thread', ([], {'target': 'thread_fn', 'args': '(i,)'}), '(target=thread_fn, args=(i,))\n', (3854, 3883), False, 'import threading\n'), ((4164, 4227), 'pytest.approx', 'approx', (['[0, 1 / 7, 2 / 7, 3 / 7, 4 / 7, 5 / 7, 6 / 7]'], {'abs': '(0.05)'}), '([0, 1 / 7, 2 / 7, 3 / 7, 4 / 7, 5 / 7, 6 / 7], abs=0.05)\n', (4170, 4227), False, 'from pytest import approx\n'), ((4328, 4351), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (4341, 4351), False, 'import pytest\n'), ((3652, 3668), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (3666, 3668), False, 'import time\n'), ((3808, 3821), 'time.sleep', 'time.sleep', (['(0)'], {}), '(0)\n', (3818, 3821), False, 'import time\n'), ((930, 943), 'redbucket.Zone', 'Zone', (['"""z1"""', '(5)'], {}), "('z1', 5)\n", (934, 943), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((1330, 1343), 'redbucket.Zone', 'Zone', (['"""z1"""', '(5)'], {}), "('z1', 5)\n", (1334, 1343), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((1739, 1752), 'redbucket.Zone', 'Zone', (['"""z1"""', '(5)'], {}), "('z1', 5)\n", (1743, 1752), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((2287, 2300), 'redbucket.Zone', 'Zone', (['"""z1"""', '(5)'], {}), "('z1', 5)\n", (2291, 2300), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((2844, 2857), 'redbucket.Zone', 'Zone', (['"""z1"""', '(1)'], {}), "('z1', 1)\n", (2848, 2857), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((2909, 2922), 'redbucket.Zone', 'Zone', (['"""z2"""', '(5)'], {}), "('z2', 5)\n", (2913, 2922), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((3461, 3474), 'redbucket.Zone', 'Zone', (['"""z1"""', '(7)'], {}), "('z1', 7)\n", (3465, 3474), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((4302, 4315), 'redbucket.Zone', 'Zone', (['"""z1"""', '(1)'], {}), "('z1', 1)\n", (4306, 4315), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((4520, 4533), 'redbucket.Zone', 'Zone', (['"""z1"""', '(1)'], {}), "('z1', 1)\n", (4524, 4533), False, 'from redbucket import InMemoryRateLimiter, RedisScriptRateLimiter, RedisTransactionalRateLimiter, RateLimit, Zone\n'), ((3616, 3632), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (3630, 3632), False, 'import time\n'), ((3778, 3794), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (3792, 3794), False, 'import time\n')] |
import os
from .vendored import colorconv
import numpy as np
import vispy.color
_matplotlib_list_file = os.path.join(os.path.dirname(__file__),
'matplotlib_cmaps.txt')
with open(_matplotlib_list_file) as fin:
matplotlib_colormaps = [line.rstrip() for line in fin]
def _all_rgb():
"""Return all 256**3 valid rgb tuples."""
base = np.arange(256, dtype=np.uint8)
r, g, b = np.meshgrid(base, base, base, indexing='ij')
return np.stack((r, g, b), axis=-1).reshape((-1, 3))
# obtained with colorconv.rgb2luv(_all_rgb().reshape((-1, 256, 3)))
LUVMIN = np.array([0., -83.07790815, -134.09790293])
LUVMAX = np.array([100., 175.01447356, 107.39905336])
LUVRNG = LUVMAX - LUVMIN
# obtained with colorconv.rgb2lab(_all_rgb().reshape((-1, 256, 3)))
LABMIN = np.array([0., -86.18302974, -107.85730021])
LABMAX = np.array([100., 98.23305386, 94.47812228])
LABRNG = LABMAX - LABMIN
def _validate_rgb(colors, *, tolerance=0.):
"""Return the subset of colors that is in [0, 1] for all channels.
Parameters
----------
colors : array of float, shape (N, 3)
Input colors in RGB space.
Other Parameters
----------------
tolerance : float, optional
Values outside of the range by less than ``tolerance`` are allowed and
clipped to be within the range.
Returns
-------
filtered_colors : array of float, shape (M, 3), M <= N
The subset of colors that are in valid RGB space.
Examples
--------
>>> colors = np.array([[ 0. , 1., 1. ],
... [ 1.1, 0., -0.03],
... [ 1.2, 1., 0.5 ]])
>>> _validate_rgb(colors)
array([[0., 1., 1.]])
>>> _validate_rgb(colors, tolerance=0.15)
array([[0., 1., 1.],
[1., 0., 0.]])
"""
lo = 0 - tolerance
hi = 1 + tolerance
valid = np.all((colors > lo) & (colors < hi), axis=1)
filtered_colors = np.clip(colors[valid], 0, 1)
return filtered_colors
def _low_discrepancy(dim, n, seed=0.5):
"""Generate a 1d, 2d, or 3d low discrepancy sequence of coordinates.
Parameters
----------
dim : one of {1, 2, 3}
The dimensionality of the sequence.
n : int
How many points to generate.
seed : float or array of float, shape (dim,)
The seed from which to start the quasirandom sequence.
Returns
-------
pts : array of float, shape (n, dim)
The sampled points.
References
----------
..[1]: http://extremelearning.com.au/unreasonable-effectiveness-of-quasirandom-sequences/
"""
phi1 = 1.6180339887498948482
phi2 = 1.32471795724474602596
phi3 = 1.22074408460575947536
seed = np.broadcast_to(seed, (1, dim))
phi = np.array([phi1, phi2, phi3])
g = 1 / phi
n = np.reshape(np.arange(n), (n, 1))
pts = (seed + (n * g[:dim])) % 1
return pts
def _color_random(n, *, colorspace='lab', tolerance=0.0, seed=0.5):
"""Generate n random RGB colors uniformly from LAB or LUV space.
Parameters
----------
n : int
Number of colors to generate.
colorspace : str, one of {'lab', 'luv', 'rgb'}
The colorspace from which to get random colors.
tolerance : float
How much margin to allow for out-of-range RGB values (these are
clipped to be in-range).
seed : float or array of float, shape (3,)
Value from which to start the quasirandom sequence.
Returns
-------
rgb : array of float, shape (n, 3)
RGB colors chosen uniformly at random from given colorspace.
"""
factor = 6 # about 1/5 of random LUV tuples are inside the space
expand_factor = 2
rgb = np.zeros((0, 3))
while len(rgb) < n:
random = _low_discrepancy(3, n * factor, seed=seed)
if colorspace == 'luv':
raw_rgb = colorconv.luv2rgb(random * LUVRNG + LUVMIN)
elif colorspace == 'rgb':
raw_rgb = random
else: # 'lab' by default
raw_rgb = colorconv.lab2rgb(random * LABRNG + LABMIN)
rgb = _validate_rgb(raw_rgb, tolerance=tolerance)
factor *= expand_factor
return rgb[:n]
def label_colormap(labels, seed=0.5, max_label=None):
"""Produce a colormap suitable for use with a given label set.
Parameters
----------
labels : array of int
A set of labels or label image.
seed : float or array of float, length 3
The seed for the low discrepancy sequence generator.
max_label : int, optional
The maximum label in `labels`. Computed if not given.
Returns
-------
cmap : vispy.color.Colormap
A colormap for use with ``labels``. The labels are remapped so that
the maximum label falls on 1.0, since vispy requires colormaps to map
within [0, 1].
Notes
-----
0 always maps to fully transparent.
"""
unique_labels = np.unique(labels)
if unique_labels[0] != 0:
unique_labels = np.concatenate([[0], unique_labels])
n = len(unique_labels)
max_label = max_label or np.max(unique_labels)
unique_labels_float = unique_labels / max_label
midpoints = np.convolve(unique_labels_float, [0.5, 0.5], mode='valid')
control_points = np.concatenate(([0.], midpoints, [1.]))
# make sure to add an alpha channel to the colors
colors = np.concatenate((_color_random(n, seed=seed),
np.full((n, 1), 0.7)), axis=1)
colors[0, :] = 0 # ensure alpha is 0 for label 0
cmap = vispy.color.Colormap(colors=colors, controls=control_points,
interpolation='zero')
return cmap
| [
"numpy.clip",
"numpy.convolve",
"numpy.unique",
"numpy.full",
"numpy.max",
"os.path.dirname",
"numpy.array",
"numpy.zeros",
"numpy.stack",
"numpy.concatenate",
"numpy.meshgrid",
"numpy.all",
"numpy.broadcast_to",
"numpy.arange"
] | [((609, 653), 'numpy.array', 'np.array', (['[0.0, -83.07790815, -134.09790293]'], {}), '([0.0, -83.07790815, -134.09790293])\n', (617, 653), True, 'import numpy as np\n'), ((662, 707), 'numpy.array', 'np.array', (['[100.0, 175.01447356, 107.39905336]'], {}), '([100.0, 175.01447356, 107.39905336])\n', (670, 707), True, 'import numpy as np\n'), ((810, 854), 'numpy.array', 'np.array', (['[0.0, -86.18302974, -107.85730021]'], {}), '([0.0, -86.18302974, -107.85730021])\n', (818, 854), True, 'import numpy as np\n'), ((863, 906), 'numpy.array', 'np.array', (['[100.0, 98.23305386, 94.47812228]'], {}), '([100.0, 98.23305386, 94.47812228])\n', (871, 906), True, 'import numpy as np\n'), ((120, 145), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (135, 145), False, 'import os\n'), ((383, 413), 'numpy.arange', 'np.arange', (['(256)'], {'dtype': 'np.uint8'}), '(256, dtype=np.uint8)\n', (392, 413), True, 'import numpy as np\n'), ((428, 472), 'numpy.meshgrid', 'np.meshgrid', (['base', 'base', 'base'], {'indexing': '"""ij"""'}), "(base, base, base, indexing='ij')\n", (439, 472), True, 'import numpy as np\n'), ((1880, 1925), 'numpy.all', 'np.all', (['((colors > lo) & (colors < hi))'], {'axis': '(1)'}), '((colors > lo) & (colors < hi), axis=1)\n', (1886, 1925), True, 'import numpy as np\n'), ((1948, 1976), 'numpy.clip', 'np.clip', (['colors[valid]', '(0)', '(1)'], {}), '(colors[valid], 0, 1)\n', (1955, 1976), True, 'import numpy as np\n'), ((2721, 2752), 'numpy.broadcast_to', 'np.broadcast_to', (['seed', '(1, dim)'], {}), '(seed, (1, dim))\n', (2736, 2752), True, 'import numpy as np\n'), ((2763, 2791), 'numpy.array', 'np.array', (['[phi1, phi2, phi3]'], {}), '([phi1, phi2, phi3])\n', (2771, 2791), True, 'import numpy as np\n'), ((3705, 3721), 'numpy.zeros', 'np.zeros', (['(0, 3)'], {}), '((0, 3))\n', (3713, 3721), True, 'import numpy as np\n'), ((4917, 4934), 'numpy.unique', 'np.unique', (['labels'], {}), '(labels)\n', (4926, 4934), True, 'import numpy as np\n'), ((5172, 5230), 'numpy.convolve', 'np.convolve', (['unique_labels_float', '[0.5, 0.5]'], {'mode': '"""valid"""'}), "(unique_labels_float, [0.5, 0.5], mode='valid')\n", (5183, 5230), True, 'import numpy as np\n'), ((5252, 5293), 'numpy.concatenate', 'np.concatenate', (['([0.0], midpoints, [1.0])'], {}), '(([0.0], midpoints, [1.0]))\n', (5266, 5293), True, 'import numpy as np\n'), ((2827, 2839), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (2836, 2839), True, 'import numpy as np\n'), ((4989, 5025), 'numpy.concatenate', 'np.concatenate', (['[[0], unique_labels]'], {}), '([[0], unique_labels])\n', (5003, 5025), True, 'import numpy as np\n'), ((5082, 5103), 'numpy.max', 'np.max', (['unique_labels'], {}), '(unique_labels)\n', (5088, 5103), True, 'import numpy as np\n'), ((484, 512), 'numpy.stack', 'np.stack', (['(r, g, b)'], {'axis': '(-1)'}), '((r, g, b), axis=-1)\n', (492, 512), True, 'import numpy as np\n'), ((5433, 5453), 'numpy.full', 'np.full', (['(n, 1)', '(0.7)'], {}), '((n, 1), 0.7)\n', (5440, 5453), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
"""
pass.py
Find hardcoded passwords on source code of your project.
python pass.py path/to/project
"""
import os
import sys
import re
import fnmatch
import json
from argparse import ArgumentParser
DEFAULT_BAD_WORDS = ['token', 'oauth', 'secret', 'pass', 'password', '<PASSWORD>']
DEFAULT_ANALYZERS = [r' *[:=] *["\'][^"\']{4,}["\']', r'[:=][^"\'& ,;{()<\n]{4,}'] # str and url based.
def check_exclude_pattern(checkers, line):
"""Regex checker function used to ignore false positives."""
for pattern in checkers:
if pattern.match(line):
return True
return False
def can_analyze_file(include_paths, exclude_paths, path):
"""Glob checker function used to specify or ignore paths and files."""
if include_paths and not any(fnmatch.fnmatch(path, p) for p in include_paths):
return False
if exclude_paths and any(fnmatch.fnmatch(path, p)for p in exclude_paths):
return False
return True
def build_bad_words(words):
"""Builds a regex pattern based on the bad words provided."""
bad_words = []
for word in words:
rule = '(?:'
for upper, lower in zip(word.upper(), word.lower()):
rule += f'[{upper}{lower}]'
rule += ')'
bad_words.append(rule)
return '|'.join(bad_words)
def build_regex_analyzers(rules, bad_words):
"""
Merges the regex patterns from the bad words
with the analyzers in order to create the
final regex pattern to be used.
"""
analyzers = []
for rule in rules:
analyzers.append(
re.compile(f'(?:{bad_words})(?:[a-zA-Z_][a-zA-Z0-9_]*)?{rule}')
)
return analyzers
def check_file_handler(path, max_length, analyzers, patterns):
"""
Check all lines of a single file.
Also checks for max line length and for false positives.
"""
result = []
try:
with open(path, 'r') as handler:
for i, line in enumerate(handler):
# Checking for max line length.
if len(line) > max_length:
continue
for checker in analyzers: # All analyzers run in every line.
data = checker.findall(line)
# Check if it's a false positive.
if data and not check_exclude_pattern(patterns, line):
result.append({
'file': path,
'target': data[0],
'line': i,
'string': line.strip(),
})
except UnicodeDecodeError:
# Ignore non text files.
pass
return result
def start_digging(root_path, limit, max_length, analyzers, patterns, include_paths, exclude_paths):
"""Start walking to all folders and subfolders in order to reach all files."""
counter = 0
result = []
for root, subfolder_list, file_list in os.walk(root_path):
for file in file_list:
path = os.path.join(root, file)
# Apply include/exclude glob rules.
if not can_analyze_file(include_paths, exclude_paths, path):
continue
# File counter.
if counter > limit:
return counter, result
counter += 1
# Send file to be analyzed by the handler.
result += check_file_handler(path, max_length, analyzers, patterns)
return counter, result
if __name__ == "__main__":
parser = ArgumentParser(description='Check for hardcoded passwords and tokens in your project.')
parser.add_argument('--bad-words', type=open, dest='bad_words',
help='File containing which WORDS to analyze, one word per line. \
If not provided, will fallback to the default bad words list.'
)
parser.add_argument('--ignore-patterns', type=open, dest='ignore_patterns',
help='File containing regex patterns of which TARGETS to ignore.'
)
parser.add_argument('--include-paths', type=open, dest='include_file',
help='File containing glob patterns of which FILES to analyze. \
WARNING: This option has precedence over the option "--exclude-paths".'
)
parser.add_argument('--exclude-paths', type=open, dest='exclude_file',
help='File containing glob patterns of which FILES to ignore.'
)
parser.add_argument('--max-length', type=int, default=1000, dest='max_length',
help='The maximun length of a line to analyze.'
)
parser.add_argument('--max-checks', type=int, default=sys.maxsize, dest='max_checks',
help='Max number of files to analize.'
)
parser.add_argument('--json', action='store_true', dest='json',
help='Output result in a pretty JSON format.'
)
parser.add_argument('path', type=str,
help='Path to the project.'
)
args = parser.parse_args()
# Preparing the bad word list.
bad_words = []
if args.bad_words:
bad_words = args.bad_words.read().splitlines()
args.bad_words.close()
# Preparing for target patterns to ignore.
ignore_patterns = []
if args.ignore_patterns:
for pattern in args.ignore_patterns:
ignore_patterns.append(re.compile(pattern))
args.ignore_patterns.close()
# Checking for paths to include in the results.
include_paths = []
if args.include_file:
include_paths = args.include_file.read().splitlines()
args.include_file.close()
# Checking for paths to exclude from results.
exclude_paths = []
if args.exclude_file:
exclude_paths = args.exclude_file.read().splitlines()
args.exclude_file.close()
# Building bad words.
bad_words = build_bad_words(bad_words or DEFAULT_BAD_WORDS)
# Building regex analyzers.
analyzers = build_regex_analyzers(DEFAULT_ANALYZERS, bad_words)
# Start the digging!!
counter, result = start_digging(
args.path,
args.max_checks,
args.max_length,
analyzers,
ignore_patterns,
include_paths,
exclude_paths
)
# Outputs to JSON or to stdout.
if args.json:
print(json.dumps(result, indent=2))
elif counter == 0:
print('No file found.')
print('STATUS: FAILED')
else:
for r in result:
print('File:\t', r['file'])
print('Line:\t', r['line'])
print('Target:\t', r['target'], '\n')
print(r['string'])
print('\n--------------------------------------------------------------------------------\n')
print('Found: {} | Files Checked: {} | (Hit Upper Limit? {})'.format(len(result), counter, 'Yes' if counter >= args.max_checks else 'No'))
print('STATUS: {}'.format('FAILED' if result else 'OK'))
# For CI/CD purposes.
sys.exit(1 if result else 0)
| [
"argparse.ArgumentParser",
"re.compile",
"json.dumps",
"os.path.join",
"fnmatch.fnmatch",
"sys.exit",
"os.walk"
] | [((2964, 2982), 'os.walk', 'os.walk', (['root_path'], {}), '(root_path)\n', (2971, 2982), False, 'import os\n'), ((3537, 3629), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Check for hardcoded passwords and tokens in your project."""'}), "(description=\n 'Check for hardcoded passwords and tokens in your project.')\n", (3551, 3629), False, 'from argparse import ArgumentParser\n'), ((6884, 6912), 'sys.exit', 'sys.exit', (['(1 if result else 0)'], {}), '(1 if result else 0)\n', (6892, 6912), False, 'import sys\n'), ((1596, 1659), 're.compile', 're.compile', (['f"""(?:{bad_words})(?:[a-zA-Z_][a-zA-Z0-9_]*)?{rule}"""'], {}), "(f'(?:{bad_words})(?:[a-zA-Z_][a-zA-Z0-9_]*)?{rule}')\n", (1606, 1659), False, 'import re\n'), ((3034, 3058), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (3046, 3058), False, 'import os\n'), ((6221, 6249), 'json.dumps', 'json.dumps', (['result'], {'indent': '(2)'}), '(result, indent=2)\n', (6231, 6249), False, 'import json\n'), ((893, 917), 'fnmatch.fnmatch', 'fnmatch.fnmatch', (['path', 'p'], {}), '(path, p)\n', (908, 917), False, 'import fnmatch\n'), ((5280, 5299), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (5290, 5299), False, 'import re\n'), ((792, 816), 'fnmatch.fnmatch', 'fnmatch.fnmatch', (['path', 'p'], {}), '(path, p)\n', (807, 816), False, 'import fnmatch\n')] |
from django.contrib.auth.models import User
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from django.contrib.auth import authenticate
class AuthenticationUserTestCase(APITestCase):
def setUp(self):
self.list_url = reverse('Company-list')
self.user = User.objects.create_superuser('root', password='<PASSWORD>')
def test_authentication_user_credentials(self):
"""User credentials verification test"""
user = authenticate(username='root', password='<PASSWORD>')
self.assertTrue((user is not None) and user.is_authenticated)
def test_get_request_with_not_authenticated_user(self):
"""Verification test of get request to user without authentication"""
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_authentication_incorrect_username(self):
"""Incorrect username verification test"""
user = authenticate(username='rot', password='<PASSWORD>')
self.assertFalse((user is not None) and user.is_authenticated)
def test_authentication_incorrect_password(self):
"""Incorrect password verification test"""
user = authenticate(username='root', password='<PASSWORD>')
self.assertFalse((user is not None) and user.is_authenticated)
def test_get_request_with_authenticated_user(self):
"""Verification test of get request to user authenticated"""
self.client.force_authenticate(self.user)
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_200_OK) | [
"django.contrib.auth.models.User.objects.create_superuser",
"django.contrib.auth.authenticate",
"django.urls.reverse"
] | [((293, 316), 'django.urls.reverse', 'reverse', (['"""Company-list"""'], {}), "('Company-list')\n", (300, 316), False, 'from django.urls import reverse\n'), ((337, 397), 'django.contrib.auth.models.User.objects.create_superuser', 'User.objects.create_superuser', (['"""root"""'], {'password': '"""<PASSWORD>"""'}), "('root', password='<PASSWORD>')\n", (366, 397), False, 'from django.contrib.auth.models import User\n'), ((515, 567), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': '"""root"""', 'password': '"""<PASSWORD>"""'}), "(username='root', password='<PASSWORD>')\n", (527, 567), False, 'from django.contrib.auth import authenticate\n'), ((1025, 1076), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': '"""rot"""', 'password': '"""<PASSWORD>"""'}), "(username='rot', password='<PASSWORD>')\n", (1037, 1076), False, 'from django.contrib.auth import authenticate\n'), ((1269, 1321), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': '"""root"""', 'password': '"""<PASSWORD>"""'}), "(username='root', password='<PASSWORD>')\n", (1281, 1321), False, 'from django.contrib.auth import authenticate\n')] |
#! python
import PyPDF2
pdf = open('encrypted.pdf', 'rb')
pdfRead = PyPDF2.PdfFileReader(pdf)
if pdfRead.isEncrypted:
# если зашифрован, то пароль
pdfRead.decrypt('rosebud')
for i in range(pdfRead.getNumPages()):
data = pdfRead.getPage(i)
print(data.extractText())
pdf.close()
| [
"PyPDF2.PdfFileReader"
] | [((75, 100), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['pdf'], {}), '(pdf)\n', (95, 100), False, 'import PyPDF2\n')] |
from dipy.denoise.nlmeans import nlmeans_3d, nlmeans
from dipy.denoise.noise_estimate import estimate_sigma
import cv2 as cv
import numpy as np
import nibabel as nib
def preprocess(nifti, name):
"""Preprocess the 3D MRI image before image segmentation"""
image = nifti.get_fdata()
sigma = estimate_sigma(image, N=16) # N: number of coils in the receiver of the MRI scanner
denoised = nlmeans(image, sigma)
denoised_nifti = nib.Nifti1Image(denoised, nifti.affine)
nib.save(denoised_nifti, f'lab4/data/clean_{name}.nii.gz')
def cluster(nifti, name):
"""Segment the 3D image slice by slice, then merge all slices and save as nifti"""
n_cluster = 7 # number of clusters
image = nifti.get_fdata(dtype=np.float32)
for i, slice in enumerate(image):
data = slice.reshape((-1, 1))
vessel, vessel_id = max(data), np.argmax(data) # vessel is the brightest pixel
if vessel < 10: # slice has no vessels (perhaps outside the brain)
image[i, ...] = 0 # enforce binary property so as to view polygon model in imeka
continue
criteria = (cv.TERM_CRITERIA_EPS + cv.TERM_CRITERIA_MAX_ITER, 30, 1) # (type, max_iter, epsilon)
_, labels, _ = cv.kmeans(data, n_cluster, None, criteria, 10, cv.KMEANS_RANDOM_CENTERS)
cluster_id = labels[vessel_id] # cluster id of all vessels
data[labels == cluster_id] = 255
data[labels != cluster_id] = 0
image[i, ...] = data.reshape(slice.shape)
output = nib.Nifti1Image(image, nifti.affine)
nib.save(output, f'lab4/data/out_{name}.nii.gz')
def run():
swi = nib.load('lab4/data/invert_swi.nii.gz')
tof = nib.load('lab4/data/bet_tof.nii.gz')
preprocess(swi, 'swi')
preprocess(tof, 'tof')
cluster(nib.load('lab4/data/clean_swi.nii.gz'), "swi")
cluster(nib.load('lab4/data/clean_tof.nii.gz'), "tof")
| [
"dipy.denoise.noise_estimate.estimate_sigma",
"dipy.denoise.nlmeans.nlmeans",
"nibabel.save",
"nibabel.load",
"cv2.kmeans",
"numpy.argmax",
"nibabel.Nifti1Image"
] | [((303, 330), 'dipy.denoise.noise_estimate.estimate_sigma', 'estimate_sigma', (['image'], {'N': '(16)'}), '(image, N=16)\n', (317, 330), False, 'from dipy.denoise.noise_estimate import estimate_sigma\n'), ((403, 424), 'dipy.denoise.nlmeans.nlmeans', 'nlmeans', (['image', 'sigma'], {}), '(image, sigma)\n', (410, 424), False, 'from dipy.denoise.nlmeans import nlmeans_3d, nlmeans\n'), ((446, 485), 'nibabel.Nifti1Image', 'nib.Nifti1Image', (['denoised', 'nifti.affine'], {}), '(denoised, nifti.affine)\n', (461, 485), True, 'import nibabel as nib\n'), ((490, 548), 'nibabel.save', 'nib.save', (['denoised_nifti', 'f"""lab4/data/clean_{name}.nii.gz"""'], {}), "(denoised_nifti, f'lab4/data/clean_{name}.nii.gz')\n", (498, 548), True, 'import nibabel as nib\n'), ((1522, 1558), 'nibabel.Nifti1Image', 'nib.Nifti1Image', (['image', 'nifti.affine'], {}), '(image, nifti.affine)\n', (1537, 1558), True, 'import nibabel as nib\n'), ((1563, 1611), 'nibabel.save', 'nib.save', (['output', 'f"""lab4/data/out_{name}.nii.gz"""'], {}), "(output, f'lab4/data/out_{name}.nii.gz')\n", (1571, 1611), True, 'import nibabel as nib\n'), ((1635, 1674), 'nibabel.load', 'nib.load', (['"""lab4/data/invert_swi.nii.gz"""'], {}), "('lab4/data/invert_swi.nii.gz')\n", (1643, 1674), True, 'import nibabel as nib\n'), ((1685, 1721), 'nibabel.load', 'nib.load', (['"""lab4/data/bet_tof.nii.gz"""'], {}), "('lab4/data/bet_tof.nii.gz')\n", (1693, 1721), True, 'import nibabel as nib\n'), ((1236, 1308), 'cv2.kmeans', 'cv.kmeans', (['data', 'n_cluster', 'None', 'criteria', '(10)', 'cv.KMEANS_RANDOM_CENTERS'], {}), '(data, n_cluster, None, criteria, 10, cv.KMEANS_RANDOM_CENTERS)\n', (1245, 1308), True, 'import cv2 as cv\n'), ((1789, 1827), 'nibabel.load', 'nib.load', (['"""lab4/data/clean_swi.nii.gz"""'], {}), "('lab4/data/clean_swi.nii.gz')\n", (1797, 1827), True, 'import nibabel as nib\n'), ((1848, 1886), 'nibabel.load', 'nib.load', (['"""lab4/data/clean_tof.nii.gz"""'], {}), "('lab4/data/clean_tof.nii.gz')\n", (1856, 1886), True, 'import nibabel as nib\n'), ((866, 881), 'numpy.argmax', 'np.argmax', (['data'], {}), '(data)\n', (875, 881), True, 'import numpy as np\n')] |
from conans import ConanFile, CMake, tools
class PionConan(ConanFile):
name = "pion"
version = "5.0.7+12"
license = "Boost Software License 1.0 - https://raw.githubusercontent.com/splunk/pion/develop/COPYING"
description = "C++ framework for building lightweight HTTP interfaces"
url = "https://github.com/odant/conan-pion"
settings = {
"os": ["Windows", "Linux"],
"compiler": ["Visual Studio", "gcc"],
"build_type": ["Debug", "Release"],
"arch": ["x86_64", "x86", "mips", "armv7"]
}
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "FindPion.cmake"
no_copy_source = True
build_policy = "missing"
def configure(self):
# Only C++11
if "libcxx" in self.settings.compiler.fields:
if self.settings.compiler.libcxx == "libstdc++":
raise Exception("This package is only compatible with libstdc++11")
def requirements(self):
self.requires("zlib/[>=1.2.3]@%s/stable" % self.user)
#self.requires("openssl/[~=1.1.0g]@%s/testing" % self.user)
self.requires("boost/[>=1.70.0]@%s/testing" % self.user)
def package_id(self):
self.info.requires["boost"].full_package_mode()
def build(self):
build_type = "RelWithDebInfo" if self.settings.build_type == "Release" else "Debug"
cmake = CMake(self, build_type=build_type)
cmake.verbose = True
#
cmake.definitions["CMAKE_INSTALL_PREFIX:STRING"] = self.package_folder.replace("\\", "/")
cmake.definitions["CMAKE_POSITION_INDEPENDENT_CODE:BOOL"] = "ON"
cmake.definitions["BUILD_SHARED_LIBS:BOOL"] = "OFF"
#
cmake.definitions["BUILD_SPDY:BOOL"] = "OFF"
cmake.definitions["BUILD_UT:BOOL"] = "OFF"
cmake.definitions["BUILD_PIOND:BOOL"] = "OFF"
cmake.definitions["BUILD_HELLOSERVER:BOOL"] = "OFF"
cmake.definitions["USE_LOG4CPLUS:BOOL"] = "OFF"
cmake.definitions["USE_LOG4CXX:BOOL"] = "OFF"
cmake.definitions["USE_LOG4CPP:BOOL"] = "OFF"
cmake.definitions["DISABLE_LOGGING:BOOL"] = "ON"
#
cmake.configure()
cmake.build()
cmake.install()
def package(self):
self.copy("FindPion.cmake", dst=".", src=".", keep_path=False)
self.copy("pion.pdb", dst="bin", src="lib", keep_path=False)
self.copy("pion_services.pdb", dst="bin", src="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
if self.settings.os == "Linux":
self.cpp_info.libs.append("dl")
self.cpp_info.defines = ["PION_STATIC_LINKING"]
| [
"conans.tools.collect_libs",
"conans.CMake"
] | [((1375, 1409), 'conans.CMake', 'CMake', (['self'], {'build_type': 'build_type'}), '(self, build_type=build_type)\n', (1380, 1409), False, 'from conans import ConanFile, CMake, tools\n'), ((2511, 2535), 'conans.tools.collect_libs', 'tools.collect_libs', (['self'], {}), '(self)\n', (2529, 2535), False, 'from conans import ConanFile, CMake, tools\n')] |
from sqlalchemy.inspection import inspect
from . import db
class CRUDMixin(object):
"""Implements methods to create, read, update, and delete."""
@classmethod
def create(cls, commit=True, **kwargs):
instance = cls(**kwargs)
return instance.save(commit=commit)
@classmethod
def get(cls, id):
return cls.query.get(id)
@classmethod
def get_or_create(cls, id, commit=True, **kwargs):
obj = cls.query.get(id) or cls(id)
obj.update(commit=False, **kwargs)
return obj.save(commit=commit)
@classmethod
def _filter(cls, **kwargs):
query = cls.query
for key, value in kwargs.iteritems():
query = query.filter_by(**{key: value})
return query.first()
@classmethod
def filter_or_create(cls, commit=True, **kwargs):
self = cls._filter(**kwargs)
if not self:
self = cls.create(commit, **kwargs)
return self
def save(self, commit=True):
db.session.add(self)
if commit:
db.session.commit()
return self
def delete(self, commit=True):
db.session.delete(self)
return commit and db.session.commit()
def update(self, commit=True, **kwargs):
for attr, value in kwargs.iteritems():
setattr(self, attr, value)
return commit and self.save() or self
class Serializer(object):
serialized_fields = ()
def json(self):
serialized_fields = self.serialized_fields
cls_serialized_fields = set([column.name for column in
self.__class__.__table__.columns])
for primary_key in inspect(self.__class__).primary_key:
if not getattr(self, primary_key.name):
raise ValueError("The object hasn't been loaded yet.")
if serialized_fields:
for field in serialized_fields:
if field not in cls_serialized_fields:
raise ValueError(
"The field `%s` isn't in `%s`"
% (field, self.__class__.__name__)
)
else:
serialized_fields = cls_serialized_fields
ret = {}
for field in serialized_fields:
try:
ret[field] = str(getattr(self, field))
except UnicodeEncodeError as e:
ret[field] = getattr(self, field)
return ret
| [
"sqlalchemy.inspection.inspect"
] | [((1682, 1705), 'sqlalchemy.inspection.inspect', 'inspect', (['self.__class__'], {}), '(self.__class__)\n', (1689, 1705), False, 'from sqlalchemy.inspection import inspect\n')] |
import bpy
import os
import sys
import argparse
## Example call from commandline: blender -b -P decimate_mesh_blender.py -- -f mesh.obj -o mesh_dec.obj -r 0.5 -i 2 -n 4 -l 0.5
## Blender will ignore all options after -- so parameters can be passed to python script.
# get the args passed to blender after "--", all of which are ignored by
# blender so scripts may receive their own arguments
argv = sys.argv
if "--" not in argv:
argv = [] # as if no args are passed
else:
argv = argv[argv.index("--") + 1:] # get all args after "--"
path = os.path.abspath('.')
path = path + '/'
parser = argparse.ArgumentParser(description="Mesh Decimation",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--filename','-f',type=str)
parser.add_argument('--output_filename','-o',type=str,default="")
parser.add_argument('--decimate_ratio','-r',type=float,default=0.4)
parser.add_argument('--decimate_iterations','-i',type=int,default=2)
parser.add_argument('--smooth_iterations','-n',type=int,default=4)
parser.add_argument('--smooth_lambda','-l',type=float,default=0.5)
parser.add_argument('--decimate','-d',type=bool,default=True)
parser.add_argument('--smooth','-s',type=bool,default=True)
args = parser.parse_args(argv)
#Get rid of blender default objects
for o in bpy.data.objects:
o.select=True
bpy.ops.object.delete()
#Import Mesh
bpy.ops.import_scene.obj(filepath= path + args.filename)
Mesh = bpy.context.selected_objects[0]
if (args.decimate):
#add mesh decimate modifier
modifierName='DecimateMod'
for i in range(0,args.decimate_iterations):
modifier=Mesh.modifiers.new(modifierName,'DECIMATE')
modifier.ratio=1-args.decimate_ratio*(i+1)
modifier.use_collapse_triangulate=True
#add smooth modifier
if (args.smooth):
Mesh.select = True
modifier_s = Mesh.modifiers.new("laplacesmooth",'LAPLACIANSMOOTH')
modifier_s.iterations = args.smooth_iterations
modifier_s.lambda_factor = args.smooth_lambda
#Export as .obj file
Mesh.select = True
if (args.output_filename == ""):
print(args.filename)
output_filename = path + str.split(args.filename,".obj")[0] + "_decimated.obj"
print( str.split(args.filename,".obj")[0])
print(output_filename)
else:
output_filename = path + args.output_filename
#Save file as .obj
bpy.ops.export_scene.obj(filepath=output_filename,use_materials=False)
| [
"bpy.ops.object.delete",
"bpy.ops.export_scene.obj",
"argparse.ArgumentParser",
"bpy.ops.import_scene.obj",
"os.path.abspath"
] | [((545, 565), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (560, 565), False, 'import os\n'), ((594, 709), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Mesh Decimation"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='Mesh Decimation', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (617, 709), False, 'import argparse\n'), ((1322, 1345), 'bpy.ops.object.delete', 'bpy.ops.object.delete', ([], {}), '()\n', (1343, 1345), False, 'import bpy\n'), ((1360, 1415), 'bpy.ops.import_scene.obj', 'bpy.ops.import_scene.obj', ([], {'filepath': '(path + args.filename)'}), '(filepath=path + args.filename)\n', (1384, 1415), False, 'import bpy\n'), ((2261, 2332), 'bpy.ops.export_scene.obj', 'bpy.ops.export_scene.obj', ([], {'filepath': 'output_filename', 'use_materials': '(False)'}), '(filepath=output_filename, use_materials=False)\n', (2285, 2332), False, 'import bpy\n')] |
#!/usr/bin/python
from Solution import Solution
obj = Solution()
A = 1
B = 9
print(obj.numberOfPatterns(A, B)) | [
"Solution.Solution"
] | [((55, 65), 'Solution.Solution', 'Solution', ([], {}), '()\n', (63, 65), False, 'from Solution import Solution\n')] |
"""Miscellaneous inspection tools
"""
from tempfile import NamedTemporaryFile
def disassemble_elf_to_cfg(elf):
"""
Gets the CFG of the disassembly of an ELF object, elf, and renders it
appropriately depending on the execution environment (terminal/notebook).
"""
try:
import r2pipe
except ImportError:
raise RuntimeError("r2pipe package needed for disasm CFG")
def get_rendering(cmd=None):
if cmd is None:
raise ValueError("No command given")
with NamedTemporaryFile(delete=False) as f:
f.write(elf)
f.flush() # force write, radare2 needs a binary blob on disk
# catch if r2pipe can actually talk to radare2
try:
flags = ['-e io.cache=true', # fix relocations in disassembly
'-e scr.color=1', # 16 bit ANSI colour terminal
]
r = r2pipe.open(f.name, flags=flags)
data = r.cmd('af;%s' % cmd)
r.quit()
except Exception as e:
if "radare2 in PATH" in str(e):
msg = ("This feature requires 'radare2' to be "
"installed and available on the system see: "
"https://github.com/radareorg/radare2. "
"Cannot find 'radare2' in $PATH.")
raise RuntimeError(msg)
else:
raise e
return data
class DisasmCFG(object):
def _repr_svg_(self):
try:
import graphviz
except ImportError:
raise RuntimeError("graphviz package needed for disasm CFG")
jupyter_rendering = get_rendering(cmd='agfd')
# this just makes it read slightly better in jupyter notebooks
jupyter_rendering.replace('fontname="Courier",',
'fontname="Courier",fontsize=6,')
src = graphviz.Source(jupyter_rendering)
return src.pipe('svg').decode('UTF-8')
def __repr__(self):
return get_rendering(cmd='agf')
return DisasmCFG()
| [
"r2pipe.open",
"graphviz.Source",
"tempfile.NamedTemporaryFile"
] | [((524, 556), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (542, 556), False, 'from tempfile import NamedTemporaryFile\n'), ((2012, 2046), 'graphviz.Source', 'graphviz.Source', (['jupyter_rendering'], {}), '(jupyter_rendering)\n', (2027, 2046), False, 'import graphviz\n'), ((939, 971), 'r2pipe.open', 'r2pipe.open', (['f.name'], {'flags': 'flags'}), '(f.name, flags=flags)\n', (950, 971), False, 'import r2pipe\n')] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^add/$', views.add_exploit, name='crits-exploits-views-add_exploit'),
url(r'^edit/cve/$', views.edit_exploit_cve, name='crits-exploits-views-edit_exploit_cve'),
url(r'^edit/name/(?P<id_>\S+)/$', views.edit_exploit_name, name='crits-exploits-views-edit_exploit_name'),
url(r'^details/(?P<id_>\S+)/$', views.exploit_detail, name='crits-exploits-views-exploit_detail'),
url(r'^remove/(?P<id_>\S+)/$', views.remove_exploit, name='crits-exploits-views-remove_exploit'),
url(r'^list/$', views.exploits_listing, name='crits-exploits-views-exploits_listing'),
url(r'^list/(?P<option>\S+)/$', views.exploits_listing, name='crits-exploits-views-exploits_listing'),
]
| [
"django.conf.urls.url"
] | [((75, 148), 'django.conf.urls.url', 'url', (['"""^add/$"""', 'views.add_exploit'], {'name': '"""crits-exploits-views-add_exploit"""'}), "('^add/$', views.add_exploit, name='crits-exploits-views-add_exploit')\n", (78, 148), False, 'from django.conf.urls import url\n'), ((155, 248), 'django.conf.urls.url', 'url', (['"""^edit/cve/$"""', 'views.edit_exploit_cve'], {'name': '"""crits-exploits-views-edit_exploit_cve"""'}), "('^edit/cve/$', views.edit_exploit_cve, name=\n 'crits-exploits-views-edit_exploit_cve')\n", (158, 248), False, 'from django.conf.urls import url\n'), ((250, 360), 'django.conf.urls.url', 'url', (['"""^edit/name/(?P<id_>\\\\S+)/$"""', 'views.edit_exploit_name'], {'name': '"""crits-exploits-views-edit_exploit_name"""'}), "('^edit/name/(?P<id_>\\\\S+)/$', views.edit_exploit_name, name=\n 'crits-exploits-views-edit_exploit_name')\n", (253, 360), False, 'from django.conf.urls import url\n'), ((361, 463), 'django.conf.urls.url', 'url', (['"""^details/(?P<id_>\\\\S+)/$"""', 'views.exploit_detail'], {'name': '"""crits-exploits-views-exploit_detail"""'}), "('^details/(?P<id_>\\\\S+)/$', views.exploit_detail, name=\n 'crits-exploits-views-exploit_detail')\n", (364, 463), False, 'from django.conf.urls import url\n'), ((464, 565), 'django.conf.urls.url', 'url', (['"""^remove/(?P<id_>\\\\S+)/$"""', 'views.remove_exploit'], {'name': '"""crits-exploits-views-remove_exploit"""'}), "('^remove/(?P<id_>\\\\S+)/$', views.remove_exploit, name=\n 'crits-exploits-views-remove_exploit')\n", (467, 565), False, 'from django.conf.urls import url\n'), ((566, 655), 'django.conf.urls.url', 'url', (['"""^list/$"""', 'views.exploits_listing'], {'name': '"""crits-exploits-views-exploits_listing"""'}), "('^list/$', views.exploits_listing, name=\n 'crits-exploits-views-exploits_listing')\n", (569, 655), False, 'from django.conf.urls import url\n'), ((657, 763), 'django.conf.urls.url', 'url', (['"""^list/(?P<option>\\\\S+)/$"""', 'views.exploits_listing'], {'name': '"""crits-exploits-views-exploits_listing"""'}), "('^list/(?P<option>\\\\S+)/$', views.exploits_listing, name=\n 'crits-exploits-views-exploits_listing')\n", (660, 763), False, 'from django.conf.urls import url\n')] |
"""Grades Model
"""
from django.core.exceptions import ValidationError
from django.db import models
from cs28.models import Student
from ..convert_to_ttpt import to_ttpt
class Grade(models.Model):
courseCode = models.CharField("Course Code",
max_length=30)
matricNo = models.ForeignKey(Student,
on_delete=models.CASCADE,
db_column="matricNo")
alphanum = models.CharField("Alphanumeric Grade",
max_length=2,
choices=[("A1", "A1"), ("A2", "A2"),
("A3", "A3"), ("A4", "A4"),
("A5", "A5"), ("B1", "B1"),
("B2", "B2"), ("B3", "B3"),
("C1", "C1"), ("C2", "C2"),
("C3", "C3"), ("D1", "D1"),
("D2", "D2"), ("D3", "D3"),
("E1", "E1"), ("E2", "E2"),
("E3", "E3"), ("F1", "F1"),
("F2", "F2"), ("F3", "F3"),
("G1", "G1"), ("G2", "G2"),
("H", "H"), ("CW", "CW"),
("CR", "CR"), ("MV", "MV")])
updatedGrade = models.CharField("Alphanumeric Grade",
max_length=2,
default="-1",
blank=True)
notes = models.TextField(default="", blank=True)
class Meta:
constraints = [models.UniqueConstraint(fields=["courseCode",
"matricNo"],
name='composite_key')]
verbose_name_plural = "Grades"
app_label = "cs28"
def get_alphanum_as_num(self):
return to_ttpt(self.alphanum)
def get_updated_as_num(self):
tt = to_ttpt(self.updatedGrade) if self.updatedGrade != "-1" else "-1"
return tt
def is_grade_a_special_code(self):
alpha = self.alphanum
updated = self.updatedGrade
grade = updated if updated != "-1" else alpha
return grade in ["MV", "CW", "CR"]
def course_does_not_exist(self):
plan = self.matricNo.academicPlan
return self.courseCode not in plan.get_courses()
def clean(self):
if self.course_does_not_exist():
raise ValidationError(("Course code does not exist in student's "
"academic plan."))
def save(self, *args, **kwargs):
self.clean()
self.matricNo.set_grade_data_updated()
super(Grade, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
self.matricNo.set_grade_data_updated()
super(Grade, self).delete(*args, **kwargs)
def __str__(self):
return (self.matricNo.matricNo +
" : " + self.courseCode +
" " +
self.alphanum)
| [
"django.db.models.UniqueConstraint",
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.core.exceptions.ValidationError",
"django.db.models.CharField"
] | [((217, 263), 'django.db.models.CharField', 'models.CharField', (['"""Course Code"""'], {'max_length': '(30)'}), "('Course Code', max_length=30)\n", (233, 263), False, 'from django.db import models\n'), ((314, 388), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Student'], {'on_delete': 'models.CASCADE', 'db_column': '"""matricNo"""'}), "(Student, on_delete=models.CASCADE, db_column='matricNo')\n", (331, 388), False, 'from django.db import models\n'), ((471, 919), 'django.db.models.CharField', 'models.CharField', (['"""Alphanumeric Grade"""'], {'max_length': '(2)', 'choices': "[('A1', 'A1'), ('A2', 'A2'), ('A3', 'A3'), ('A4', 'A4'), ('A5', 'A5'), (\n 'B1', 'B1'), ('B2', 'B2'), ('B3', 'B3'), ('C1', 'C1'), ('C2', 'C2'), (\n 'C3', 'C3'), ('D1', 'D1'), ('D2', 'D2'), ('D3', 'D3'), ('E1', 'E1'), (\n 'E2', 'E2'), ('E3', 'E3'), ('F1', 'F1'), ('F2', 'F2'), ('F3', 'F3'), (\n 'G1', 'G1'), ('G2', 'G2'), ('H', 'H'), ('CW', 'CW'), ('CR', 'CR'), (\n 'MV', 'MV')]"}), "('Alphanumeric Grade', max_length=2, choices=[('A1', 'A1'),\n ('A2', 'A2'), ('A3', 'A3'), ('A4', 'A4'), ('A5', 'A5'), ('B1', 'B1'), (\n 'B2', 'B2'), ('B3', 'B3'), ('C1', 'C1'), ('C2', 'C2'), ('C3', 'C3'), (\n 'D1', 'D1'), ('D2', 'D2'), ('D3', 'D3'), ('E1', 'E1'), ('E2', 'E2'), (\n 'E3', 'E3'), ('F1', 'F1'), ('F2', 'F2'), ('F3', 'F3'), ('G1', 'G1'), (\n 'G2', 'G2'), ('H', 'H'), ('CW', 'CW'), ('CR', 'CR'), ('MV', 'MV')])\n", (487, 919), False, 'from django.db import models\n'), ((1472, 1550), 'django.db.models.CharField', 'models.CharField', (['"""Alphanumeric Grade"""'], {'max_length': '(2)', 'default': '"""-1"""', 'blank': '(True)'}), "('Alphanumeric Grade', max_length=2, default='-1', blank=True)\n", (1488, 1550), False, 'from django.db import models\n'), ((1672, 1712), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""', 'blank': '(True)'}), "(default='', blank=True)\n", (1688, 1712), False, 'from django.db import models\n'), ((1753, 1838), 'django.db.models.UniqueConstraint', 'models.UniqueConstraint', ([], {'fields': "['courseCode', 'matricNo']", 'name': '"""composite_key"""'}), "(fields=['courseCode', 'matricNo'], name='composite_key'\n )\n", (1776, 1838), False, 'from django.db import models\n'), ((2630, 2703), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Course code does not exist in student\'s academic plan."""'], {}), '("Course code does not exist in student\'s academic plan.")\n', (2645, 2703), False, 'from django.core.exceptions import ValidationError\n')] |
"""
Installs Arch-Linux when called from a live-iso
"""
import argparse
from pyscripts import s000_detect_hardware as hardware
from pyscripts import s00_user_input as user_input
from pyscripts import s01_partitions as partitions
from pyscripts import s02_basic_arch as basic_arch
from pyscripts import s03_package_manager as package_manager
from pyscripts import s04_packages as packages
from pyscripts import s05_languages as languages
from pyscripts import s06_bootloader as bootloader
from pyscripts import s07_fstab as fstab
from pyscripts import s08_timezone as timezone
from pyscripts import s09_hostname as hostname
from pyscripts import s10_desktop as desktop
from pyscripts import s11_autostart as autostart
from pyscripts import s12_shell as shell
from pyscripts import s13_pacman_reflector_hook as pacman_reflector_hook
from pyscripts import s14_users as users
import pyscripts.utilities as install_utilities
print(">>>> ARCH INSTALLER STARTED <<<<")
# Allow for additional info being printed during setup
parser = argparse.ArgumentParser()
parser.add_argument('--debug', action='store_true',
help='Print additional info during setup.')
args = parser.parse_args()
install_utilities.DEBUG = args.debug
if args.debug:
print('--- Debug info-printing enabled ---')
# Try to auto-detect the hardware currently installed
print(' >> Autodecting hardware...')
detected_hardware = {}
detected_hardware['cpu'] = hardware.get_cpu_vendor_id()
detected_hardware['gpu'] = hardware.get_gpu_vendor()
print(' >> Detected:')
print(' >> Graphics card vendor: ', detected_hardware['gpu'])
print(' >> Processor vendor: ', detected_hardware['cpu'])
print('')
ui = user_input.get_user_input(detected_hardware)
# Go through all the installation functions
partitions.create_and_mount()
basic_arch.install_basic_arch()
with install_utilities.fake_install_user() as user:
package_manager.install_package_manager(user)
packages.install_packages(ui, user)
languages.setup_languages(ui)
bootloader.configure_bootloader()
fstab.generate_fstab()
timezone.setup_timezone(ui)
hostname.setup_hostname(ui)
desktop.configure_desktop(ui)
autostart.autostart_add_services(ui)
shell.configure_shell()
users.configure_users(ui)
pacman_reflector_hook.configure_pacman_reflector_hook()
print(">>>> ARCH INSTALLER FINISHED <<<<") | [
"pyscripts.s07_fstab.generate_fstab",
"pyscripts.s10_desktop.configure_desktop",
"pyscripts.utilities.fake_install_user",
"pyscripts.s04_packages.install_packages",
"pyscripts.s000_detect_hardware.get_gpu_vendor",
"pyscripts.s000_detect_hardware.get_cpu_vendor_id",
"pyscripts.s02_basic_arch.install_basi... | [((1035, 1060), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1058, 1060), False, 'import argparse\n'), ((1446, 1474), 'pyscripts.s000_detect_hardware.get_cpu_vendor_id', 'hardware.get_cpu_vendor_id', ([], {}), '()\n', (1472, 1474), True, 'from pyscripts import s000_detect_hardware as hardware\n'), ((1502, 1527), 'pyscripts.s000_detect_hardware.get_gpu_vendor', 'hardware.get_gpu_vendor', ([], {}), '()\n', (1525, 1527), True, 'from pyscripts import s000_detect_hardware as hardware\n'), ((1688, 1732), 'pyscripts.s00_user_input.get_user_input', 'user_input.get_user_input', (['detected_hardware'], {}), '(detected_hardware)\n', (1713, 1732), True, 'from pyscripts import s00_user_input as user_input\n'), ((1778, 1807), 'pyscripts.s01_partitions.create_and_mount', 'partitions.create_and_mount', ([], {}), '()\n', (1805, 1807), True, 'from pyscripts import s01_partitions as partitions\n'), ((1808, 1839), 'pyscripts.s02_basic_arch.install_basic_arch', 'basic_arch.install_basic_arch', ([], {}), '()\n', (1837, 1839), True, 'from pyscripts import s02_basic_arch as basic_arch\n'), ((1982, 2011), 'pyscripts.s05_languages.setup_languages', 'languages.setup_languages', (['ui'], {}), '(ui)\n', (2007, 2011), True, 'from pyscripts import s05_languages as languages\n'), ((2012, 2045), 'pyscripts.s06_bootloader.configure_bootloader', 'bootloader.configure_bootloader', ([], {}), '()\n', (2043, 2045), True, 'from pyscripts import s06_bootloader as bootloader\n'), ((2046, 2068), 'pyscripts.s07_fstab.generate_fstab', 'fstab.generate_fstab', ([], {}), '()\n', (2066, 2068), True, 'from pyscripts import s07_fstab as fstab\n'), ((2069, 2096), 'pyscripts.s08_timezone.setup_timezone', 'timezone.setup_timezone', (['ui'], {}), '(ui)\n', (2092, 2096), True, 'from pyscripts import s08_timezone as timezone\n'), ((2097, 2124), 'pyscripts.s09_hostname.setup_hostname', 'hostname.setup_hostname', (['ui'], {}), '(ui)\n', (2120, 2124), True, 'from pyscripts import s09_hostname as hostname\n'), ((2125, 2154), 'pyscripts.s10_desktop.configure_desktop', 'desktop.configure_desktop', (['ui'], {}), '(ui)\n', (2150, 2154), True, 'from pyscripts import s10_desktop as desktop\n'), ((2155, 2191), 'pyscripts.s11_autostart.autostart_add_services', 'autostart.autostart_add_services', (['ui'], {}), '(ui)\n', (2187, 2191), True, 'from pyscripts import s11_autostart as autostart\n'), ((2192, 2215), 'pyscripts.s12_shell.configure_shell', 'shell.configure_shell', ([], {}), '()\n', (2213, 2215), True, 'from pyscripts import s12_shell as shell\n'), ((2216, 2241), 'pyscripts.s14_users.configure_users', 'users.configure_users', (['ui'], {}), '(ui)\n', (2237, 2241), True, 'from pyscripts import s14_users as users\n'), ((2242, 2297), 'pyscripts.s13_pacman_reflector_hook.configure_pacman_reflector_hook', 'pacman_reflector_hook.configure_pacman_reflector_hook', ([], {}), '()\n', (2295, 2297), True, 'from pyscripts import s13_pacman_reflector_hook as pacman_reflector_hook\n'), ((1845, 1882), 'pyscripts.utilities.fake_install_user', 'install_utilities.fake_install_user', ([], {}), '()\n', (1880, 1882), True, 'import pyscripts.utilities as install_utilities\n'), ((1896, 1941), 'pyscripts.s03_package_manager.install_package_manager', 'package_manager.install_package_manager', (['user'], {}), '(user)\n', (1935, 1941), True, 'from pyscripts import s03_package_manager as package_manager\n'), ((1946, 1981), 'pyscripts.s04_packages.install_packages', 'packages.install_packages', (['ui', 'user'], {}), '(ui, user)\n', (1971, 1981), True, 'from pyscripts import s04_packages as packages\n')] |
"""
@author: <NAME>
@contact: <EMAIL>
"""
import random
import copy
import numpy as np
from torch.utils.data.dataset import ConcatDataset
from torch.utils.data.sampler import Sampler
class DefaultSampler(Sampler):
r"""Traverse all :math:`N` domains, randomly select :math:`K` samples in each domain to form a mini-batch of size
:math:`N\times K`.
Args:
data_source (ConcatDataset): dataset that contains data from multiple domains
batch_size (int): mini-batch size (:math:`N\times K` here)
"""
def __init__(self, data_source: ConcatDataset, batch_size: int):
super(Sampler, self).__init__()
self.num_all_domains = len(data_source.cumulative_sizes)
self.sample_idxes_per_domain = []
start = 0
for end in data_source.cumulative_sizes:
idxes = [idx for idx in range(start, end)]
self.sample_idxes_per_domain.append(idxes)
start = end
assert batch_size % self.num_all_domains == 0
self.batch_size_per_domain = batch_size // self.num_all_domains
self.length = len(list(self.__iter__()))
def __iter__(self):
sample_idxes_per_domain = copy.deepcopy(self.sample_idxes_per_domain)
final_idxes = []
stop_flag = False
while not stop_flag:
for domain in range(self.num_all_domains):
sample_idxes = sample_idxes_per_domain[domain]
selected_idxes = random.sample(sample_idxes, self.batch_size_per_domain)
final_idxes.extend(selected_idxes)
for idx in selected_idxes:
sample_idxes_per_domain[domain].remove(idx)
remaining_size = len(sample_idxes_per_domain[domain])
if remaining_size < self.batch_size_per_domain:
stop_flag = True
return iter(final_idxes)
def __len__(self):
return self.length
class RandomDomainSampler(Sampler):
r"""Randomly sample :math:`N` domains, then randomly select :math:`K` samples in each domain to form a mini-batch of
size :math:`N\times K`.
Args:
data_source (ConcatDataset): dataset that contains data from multiple domains
batch_size (int): mini-batch size (:math:`N\times K` here)
n_domains_per_batch (int): number of domains to select in a single mini-batch (:math:`N` here)
"""
def __init__(self, data_source: ConcatDataset, batch_size: int, n_domains_per_batch: int):
super(Sampler, self).__init__()
self.n_domains_in_dataset = len(data_source.cumulative_sizes)
self.n_domains_per_batch = n_domains_per_batch
assert self.n_domains_in_dataset >= self.n_domains_per_batch
self.sample_idxes_per_domain = []
start = 0
for end in data_source.cumulative_sizes:
idxes = [idx for idx in range(start, end)]
self.sample_idxes_per_domain.append(idxes)
start = end
assert batch_size % n_domains_per_batch == 0
self.batch_size_per_domain = batch_size // n_domains_per_batch
self.length = len(list(self.__iter__()))
def __iter__(self):
sample_idxes_per_domain = copy.deepcopy(self.sample_idxes_per_domain)
domain_idxes = [idx for idx in range(self.n_domains_in_dataset)]
final_idxes = []
stop_flag = False
while not stop_flag:
selected_domains = random.sample(domain_idxes, self.n_domains_per_batch)
for domain in selected_domains:
sample_idxes = sample_idxes_per_domain[domain]
if len(sample_idxes) < self.batch_size_per_domain:
selected_idxes = np.random.choice(sample_idxes, self.batch_size_per_domain, replace=True)
else:
selected_idxes = random.sample(sample_idxes, self.batch_size_per_domain)
final_idxes.extend(selected_idxes)
for idx in selected_idxes:
if idx in sample_idxes_per_domain[domain]:
sample_idxes_per_domain[domain].remove(idx)
remaining_size = len(sample_idxes_per_domain[domain])
if remaining_size < self.batch_size_per_domain:
stop_flag = True
return iter(final_idxes)
def __len__(self):
return self.length
| [
"numpy.random.choice",
"random.sample",
"copy.deepcopy"
] | [((1183, 1226), 'copy.deepcopy', 'copy.deepcopy', (['self.sample_idxes_per_domain'], {}), '(self.sample_idxes_per_domain)\n', (1196, 1226), False, 'import copy\n'), ((3199, 3242), 'copy.deepcopy', 'copy.deepcopy', (['self.sample_idxes_per_domain'], {}), '(self.sample_idxes_per_domain)\n', (3212, 3242), False, 'import copy\n'), ((3427, 3480), 'random.sample', 'random.sample', (['domain_idxes', 'self.n_domains_per_batch'], {}), '(domain_idxes, self.n_domains_per_batch)\n', (3440, 3480), False, 'import random\n'), ((1458, 1513), 'random.sample', 'random.sample', (['sample_idxes', 'self.batch_size_per_domain'], {}), '(sample_idxes, self.batch_size_per_domain)\n', (1471, 1513), False, 'import random\n'), ((3693, 3765), 'numpy.random.choice', 'np.random.choice', (['sample_idxes', 'self.batch_size_per_domain'], {'replace': '(True)'}), '(sample_idxes, self.batch_size_per_domain, replace=True)\n', (3709, 3765), True, 'import numpy as np\n'), ((3825, 3880), 'random.sample', 'random.sample', (['sample_idxes', 'self.batch_size_per_domain'], {}), '(sample_idxes, self.batch_size_per_domain)\n', (3838, 3880), False, 'import random\n')] |
from flask import Blueprint, render_template
from ..logic.crud import get_all_targets
pages = Blueprint('web', __name__, template_folder='../templates')
@pages.route('/targets/', methods=['GET'])
def get_web_targets():
targets = get_all_targets()
return render_template('targets.html', targets=targets)
| [
"flask.render_template",
"flask.Blueprint"
] | [((96, 154), 'flask.Blueprint', 'Blueprint', (['"""web"""', '__name__'], {'template_folder': '"""../templates"""'}), "('web', __name__, template_folder='../templates')\n", (105, 154), False, 'from flask import Blueprint, render_template\n'), ((266, 314), 'flask.render_template', 'render_template', (['"""targets.html"""'], {'targets': 'targets'}), "('targets.html', targets=targets)\n", (281, 314), False, 'from flask import Blueprint, render_template\n')] |
#!/usr/bin/env python
import argparse
import logging
import json
import re
from collections import defaultdict
from ensembl.rest_client import RestClient
from ensembl.server_utils import assert_mysql_uri, assert_mysql_db_uri
class DatacheckClient(RestClient):
"""Client for checking databases using the datacheck service"""
def submit_job(self, server_url, dbname, species, division, db_type,
datacheck_names, datacheck_groups, datacheck_types,
email, tag):
"""
Run datachecks on a given server, for one or more species.
Parameter requirements are complicated, because only the server_url is absolutely required,
for lots of other parameters you need one from a set, but it doesn't matter which one...
Arguments:
server_url - location of server, in URI format
dbname - name of a database to check
species - name of a species to check
division - name of a division to check
db_type - type of database to check, defaults to 'core'
datacheck_names - names of datacheck(s) to run, multiple values must be comma-separated
datacheck_groups - datacheck group(s) to run, multiple values must be comma-separated
datacheck_types - optional filter on type, 'critical' or 'advisory'
email - optional address for an email on job completion
tag - optional text for grouping datacheck submissions
"""
assert_mysql_uri(server_url)
payload = {
'server_url': server_url,
'dbname': dbname,
'species': species,
'division': division,
'db_type': db_type,
'datacheck_names': [],
'datacheck_groups': [],
'datacheck_types': [],
'email': email,
'tag': tag
}
if datacheck_names is not None:
payload['datacheck_names'] = datacheck_names.split(',')
if datacheck_groups is not None:
payload['datacheck_groups'] = datacheck_groups.split(',')
if datacheck_types is not None:
payload['datacheck_types'] = datacheck_types.split(',')
return RestClient.submit_job(self, payload)
def list_jobs(self, output_file, pattern, failure_only=False):
"""
Find jobs and print results
Arguments:
output_file - optional file to write report
pattern - optional pattern to filter jobs by
failure_only - only report failed jobs
"""
jobs = super(DatacheckClient, self).list_jobs()
if pattern is None:
pattern = '.*'
tag_pattern = re.compile(pattern)
output = []
for job in jobs:
if 'tag' in job['input']:
tag = job['input']['tag']
else:
tag = ''
if tag_pattern.search(tag):
if 'output' in job:
if failure_only is True:
if job['output']['failed_total'] > 0:
output.append(job)
else:
output.append(job)
else:
output.append(job)
if output_file is None:
print(json.dumps(output, indent=2))
else:
output_file.write(json.dumps(output))
def print_job(self, job, print_results=False, print_input=False):
"""
Render a job to logging
Arguments:
job : job to print
print_results : set to True to print detailed results
print_input : set to True to print input for job
"""
logging.info("Job %s - %s" % (job['id'], job['status']))
if print_input is True:
self.print_inputs(job['input'])
if job['status'] == 'complete':
if print_results is True:
logging.info("Submission status: " + str(job['status']))
logging.info("Database passed: " + str(job['output']['passed_total']))
logging.info("Database failed: " + str(job['output']['failed_total']))
logging.info("Output directory: " + str(job['output']['output_dir']))
logging.info("Per database results: ")
logging.info(json.dumps(job['output']['databases'], indent=2))
elif job['status'] == 'incomplete':
if print_results is True:
logging.info("Submission status: " + str(job['status']))
elif job['status'] == 'failed':
logging.info("Submission status: " + str(job['status']))
# failures = self.retrieve_job_failure(job['id'])
# logging.info("Error: " + str(failures))
else:
raise ValueError("Unknown status {}".format(job['status']))
def print_inputs(self, i):
"""Utility to render a job input dict to logging"""
logging.info("Registry file: " + i['registry_file'])
if 'dbname' in i:
for dbname in i['dbname']:
logging.info("Database name: " + dbname)
if 'species' in i:
for species in i['species']:
logging.info("Species name: " + species)
if 'division' in i:
for division in i['division']:
logging.info("Division name: " + division)
if 'db_type' in i:
logging.info("Database type: " + i['db_type'])
if 'datacheck_names' in i:
for name in i['datacheck_names']:
logging.info("Datacheck: " + name)
if 'datacheck_groups' in i:
for group in i['datacheck_groups']:
logging.info("Datacheck group: " + group)
if 'datacheck_types' in i:
for datacheck_type in i['datacheck_types']:
logging.info("Datacheck type: " + datacheck_type)
if 'email' in i:
logging.info("Email: " + i['email'])
if 'tag' in i:
logging.info("Tag: " + i['tag'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run datachecks via a REST service')
parser.add_argument('-u', '--uri', help='Datacheck REST service URI', required=True)
parser.add_argument('-a', '--action', help='Action to take',
choices=['submit', 'retrieve', 'list'], required=True)
parser.add_argument('-i', '--job_id', help='Datacheck job identifier to retrieve')
parser.add_argument('-v', '--verbose', help='Verbose output', action='store_true')
parser.add_argument('-o', '--output_file', help='File to write output as JSON', type=argparse.FileType('w'))
parser.add_argument('-s', '--server_url', help='URL of database server', required=True)
parser.add_argument('-db', '--dbname', help='Database name')
parser.add_argument('-sp', '--species', help='Species production name')
parser.add_argument('-div', '--division', help='Division')
parser.add_argument('-dbt', '--db_type', help='Database type')
parser.add_argument('-n', '--datacheck_names', help='Datacheck names, multiple names comma-separated')
parser.add_argument('-g', '--datacheck_groups', help='Datacheck groups, multiple names comma-separated')
parser.add_argument('-dct', '--datacheck_types', help='Datacheck type (advisory or critical)')
parser.add_argument('-e', '--email', help='Email address for pipeline reports')
parser.add_argument('-t', '--tag', help='Tag to collate results and facilitate filtering')
parser.add_argument('-f', '--failure_only', help='Show failures only', action='store_true')
args = parser.parse_args()
if args.verbose is True:
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
else:
logging.basicConfig(level=logging.INFO, format='%(message)s')
client = DatacheckClient(args.uri)
if args.action == 'submit':
job_id = client.submit_job(args.server_url, args.dbname, args.species, args.division, args.db_type,
args.datacheck_names, args.datacheck_groups, args.datacheck_types,
args.email, args.tag)
logging.info('Job submitted with ID ' + str(job_id))
elif args.action == 'retrieve':
job = client.retrieve_job(args.job_id)
client.print_job(job, print_results=True, print_input=True)
elif args.action == 'list':
jobs = client.list_jobs(args.output_file, args.tag, args.failure_only)
| [
"logging.basicConfig",
"argparse.FileType",
"argparse.ArgumentParser",
"re.compile",
"json.dumps",
"ensembl.server_utils.assert_mysql_uri",
"ensembl.rest_client.RestClient.submit_job",
"logging.info"
] | [((5963, 6035), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run datachecks via a REST service"""'}), "(description='Run datachecks via a REST service')\n", (5986, 6035), False, 'import argparse\n'), ((1433, 1461), 'ensembl.server_utils.assert_mysql_uri', 'assert_mysql_uri', (['server_url'], {}), '(server_url)\n', (1449, 1461), False, 'from ensembl.server_utils import assert_mysql_uri, assert_mysql_db_uri\n'), ((2160, 2196), 'ensembl.rest_client.RestClient.submit_job', 'RestClient.submit_job', (['self', 'payload'], {}), '(self, payload)\n', (2181, 2196), False, 'from ensembl.rest_client import RestClient\n'), ((2611, 2630), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (2621, 2630), False, 'import re\n'), ((3587, 3643), 'logging.info', 'logging.info', (["('Job %s - %s' % (job['id'], job['status']))"], {}), "('Job %s - %s' % (job['id'], job['status']))\n", (3599, 3643), False, 'import logging\n'), ((4831, 4883), 'logging.info', 'logging.info', (["('Registry file: ' + i['registry_file'])"], {}), "('Registry file: ' + i['registry_file'])\n", (4843, 4883), False, 'import logging\n'), ((7580, 7642), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(message)s"""'}), "(level=logging.DEBUG, format='%(message)s')\n", (7599, 7642), False, 'import logging\n'), ((7661, 7722), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(message)s"""'}), "(level=logging.INFO, format='%(message)s')\n", (7680, 7722), False, 'import logging\n'), ((5300, 5346), 'logging.info', 'logging.info', (["('Database type: ' + i['db_type'])"], {}), "('Database type: ' + i['db_type'])\n", (5312, 5346), False, 'import logging\n'), ((5815, 5851), 'logging.info', 'logging.info', (["('Email: ' + i['email'])"], {}), "('Email: ' + i['email'])\n", (5827, 5851), False, 'import logging\n'), ((5887, 5919), 'logging.info', 'logging.info', (["('Tag: ' + i['tag'])"], {}), "('Tag: ' + i['tag'])\n", (5899, 5919), False, 'import logging\n'), ((6533, 6555), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (6550, 6555), False, 'import argparse\n'), ((3210, 3238), 'json.dumps', 'json.dumps', (['output'], {'indent': '(2)'}), '(output, indent=2)\n', (3220, 3238), False, 'import json\n'), ((3284, 3302), 'json.dumps', 'json.dumps', (['output'], {}), '(output)\n', (3294, 3302), False, 'import json\n'), ((4147, 4185), 'logging.info', 'logging.info', (['"""Per database results: """'], {}), "('Per database results: ')\n", (4159, 4185), False, 'import logging\n'), ((4965, 5005), 'logging.info', 'logging.info', (["('Database name: ' + dbname)"], {}), "('Database name: ' + dbname)\n", (4977, 5005), False, 'import logging\n'), ((5090, 5130), 'logging.info', 'logging.info', (["('Species name: ' + species)"], {}), "('Species name: ' + species)\n", (5102, 5130), False, 'import logging\n'), ((5218, 5260), 'logging.info', 'logging.info', (["('Division name: ' + division)"], {}), "('Division name: ' + division)\n", (5230, 5260), False, 'import logging\n'), ((5444, 5478), 'logging.info', 'logging.info', (["('Datacheck: ' + name)"], {}), "('Datacheck: ' + name)\n", (5456, 5478), False, 'import logging\n'), ((5579, 5620), 'logging.info', 'logging.info', (["('Datacheck group: ' + group)"], {}), "('Datacheck group: ' + group)\n", (5591, 5620), False, 'import logging\n'), ((5728, 5777), 'logging.info', 'logging.info', (["('Datacheck type: ' + datacheck_type)"], {}), "('Datacheck type: ' + datacheck_type)\n", (5740, 5777), False, 'import logging\n'), ((4215, 4263), 'json.dumps', 'json.dumps', (["job['output']['databases']"], {'indent': '(2)'}), "(job['output']['databases'], indent=2)\n", (4225, 4263), False, 'import json\n')] |
import requests
import json
from .osvc_python_file_handling import OSvCPythonFileHandler
from .osvc_python_config import OSvCPythonConfig
from .osvc_python_validations import OSvCPythonValidations
from .osvc_python_examples import CLIENT_NOT_DEFINED,CLIENT_NO_INTERFACE_SET_EXAMPLE,CLIENT_NO_USERNAME_SET_EXAMPLE,CLIENT_NO_PASSWORD_SET_EXAMPLE
class OSvCPythonConnect:
def __init__(self):
pass
def get(self,**kwargs):
if "url" not in kwargs:
kwargs["url"] = ""
kwargs['verb'] = "get"
return self.__generic_http_request(kwargs)
def post(self,**kwargs):
kwargs['verb'] = "post"
return self.__generic_http_request(kwargs)
def patch(self,**kwargs):
kwargs['verb'] = "patch"
return self.__generic_http_request(kwargs)
def delete(self,**kwargs):
kwargs['verb'] = "delete"
return self.__generic_http_request(kwargs)
def options(self,**kwargs):
kwargs['verb'] = "options"
return self.__generic_http_request(kwargs)
def build_request_data(self, kwargs):
client = self.__check_client(kwargs)
request_data = {
"verify" : not client.no_ssl_verify,
"url" : OSvCPythonConfig().url_format(kwargs),
"headers": OSvCPythonConfig().headers_check(kwargs)
}
if client.username!="":
request_data["auth"] = (client.username,client.password)
return request_data
def __generic_http_request(self,kwargs):
final_request_data = self.build_request_data(kwargs)
download_local = None
if kwargs['verb'] == "get":
download_local = self.__download_check(kwargs)
final_request_data["stream"] = download_local["stream"]
elif kwargs['verb'] in ["post","patch"]:
kwargs['original_verb'] = kwargs['verb']
kwargs['verb'] = "post"
final_request_data["data"] = json.dumps(OSvCPythonFileHandler().upload_check(kwargs))
kwargs['download'] = download_local
try:
return self.__print_response(requests.request(kwargs['verb'],**final_request_data), kwargs)
except requests.exceptions.ConnectionError as e:
print("\n\033[31mError: Cannot connect to %s \033[0m" % final_request_data["url"])
print("\n\nYou should check the 'interface' value set in the OSvCPythonClient\nor check your internet connection\n\n")
def __print_response(self,response,kwargs):
if kwargs['verb'] == "get" and "download" in kwargs and kwargs["download"]["stream"] == True:
return OSvCPythonFileHandler().download_file(response,kwargs["download"])
if kwargs.get("debug") == True:
return response
if kwargs['verb'] == "options":
return response.headers
if kwargs['verb'] == "delete" or ('original_verb' in kwargs and kwargs['original_verb'] == "patch"):
return response.content
else:
return response.json()
def __download_check(self,kwargs):
if kwargs.get("url").find("?download") > -1:
resource_url = kwargs.get("url").replace("?download","")
file_data = self.get(client=kwargs.get("client"),url=resource_url)
file_name = OSvCPythonFileHandler().set_file_name(file_data)
return {"file_name" : file_name, "stream" : True}
else:
return {"file_name" : None, "stream" : False }
def __check_client(self,kwargs):
if 'client' in kwargs:
return self.__check_client_props(kwargs.get('client'))
else:
return OSvCPythonValidations().custom_error("Client must be defined in keyword arguments",CLIENT_NOT_DEFINED)
def __check_client_props(self, client):
if client.interface == None:
return OSvCPythonValidations().custom_error("Client interface cannot be undefined.",CLIENT_NO_INTERFACE_SET_EXAMPLE)
if client.username == None and client.password != None:
return OSvCPythonValidations().custom_error("Password is set but username is not.",CLIENT_NO_USERNAME_SET_EXAMPLE)
if client.password == None and client.username != None:
return OSvCPythonValidations().custom_error("Username is set but password is not.",CLIENT_NO_PASSWORD_SET_EXAMPLE)
return client | [
"requests.request"
] | [((1855, 1909), 'requests.request', 'requests.request', (["kwargs['verb']"], {}), "(kwargs['verb'], **final_request_data)\n", (1871, 1909), False, 'import requests\n')] |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the 'License'); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import simplejson as json
except ImportError:
import json
import mock
import requests
from st2common.services.access import delete_token
from st2common.triggers import InternalTriggerTypesRegistrar
from st2common.util import date as date_utils
from st2tests.base import (DbTestCase, FakeResponse)
from st2tests import config as tests_config
tests_config.parse_args()
FAKE_TRIGGER = {
'name': 'foo',
'pack': 'bar',
'parameters': {}
}
class InternalTriggerTypesTests(DbTestCase):
def test_token_successfully_obtained(self):
time_now = date_utils.get_datetime_utc_now()
registrar = InternalTriggerTypesRegistrar()
self.assertTrue(registrar._auth_creds is not None)
# TTL is at least 10 mins
self.assertTrue((registrar._auth_creds.expiry - time_now).seconds > 10 * 60)
delete_token(registrar._auth_creds.token)
def test_get_trigger_type_url(self):
registrar = InternalTriggerTypesRegistrar()
url = registrar._get_trigger_type_url('foo.bar')
self.assertEqual(url, 'http://localhost:9101/v1/triggertypes/foo.bar')
delete_token(registrar._auth_creds.token)
@mock.patch.object(
requests, 'get',
mock.MagicMock(return_value=FakeResponse(json.dumps(FAKE_TRIGGER), 200, 'OK')))
def test_is_triger_type_exists_happy_case(self):
registrar = InternalTriggerTypesRegistrar()
is_exists = registrar._is_triggertype_exists('bar.foo')
self.assertEqual(is_exists, True)
delete_token(registrar._auth_creds.token)
@mock.patch.object(
requests, 'get',
mock.MagicMock(return_value=FakeResponse(json.dumps('trigger not found'), 404,
'NOT FOUND')))
def test_is_triger_type_exists_sad_case(self):
registrar = InternalTriggerTypesRegistrar()
is_exists = registrar._is_triggertype_exists('bar.foo')
self.assertEqual(is_exists, False)
delete_token(registrar._auth_creds.token)
| [
"st2common.util.date.get_datetime_utc_now",
"json.dumps",
"st2common.services.access.delete_token",
"st2common.triggers.InternalTriggerTypesRegistrar",
"st2tests.config.parse_args"
] | [((1138, 1163), 'st2tests.config.parse_args', 'tests_config.parse_args', ([], {}), '()\n', (1161, 1163), True, 'from st2tests import config as tests_config\n'), ((1358, 1391), 'st2common.util.date.get_datetime_utc_now', 'date_utils.get_datetime_utc_now', ([], {}), '()\n', (1389, 1391), True, 'from st2common.util import date as date_utils\n'), ((1412, 1443), 'st2common.triggers.InternalTriggerTypesRegistrar', 'InternalTriggerTypesRegistrar', ([], {}), '()\n', (1441, 1443), False, 'from st2common.triggers import InternalTriggerTypesRegistrar\n'), ((1630, 1671), 'st2common.services.access.delete_token', 'delete_token', (['registrar._auth_creds.token'], {}), '(registrar._auth_creds.token)\n', (1642, 1671), False, 'from st2common.services.access import delete_token\n'), ((1734, 1765), 'st2common.triggers.InternalTriggerTypesRegistrar', 'InternalTriggerTypesRegistrar', ([], {}), '()\n', (1763, 1765), False, 'from st2common.triggers import InternalTriggerTypesRegistrar\n'), ((1910, 1951), 'st2common.services.access.delete_token', 'delete_token', (['registrar._auth_creds.token'], {}), '(registrar._auth_creds.token)\n', (1922, 1951), False, 'from st2common.services.access import delete_token\n'), ((2163, 2194), 'st2common.triggers.InternalTriggerTypesRegistrar', 'InternalTriggerTypesRegistrar', ([], {}), '()\n', (2192, 2194), False, 'from st2common.triggers import InternalTriggerTypesRegistrar\n'), ((2309, 2350), 'st2common.services.access.delete_token', 'delete_token', (['registrar._auth_creds.token'], {}), '(registrar._auth_creds.token)\n', (2321, 2350), False, 'from st2common.services.access import delete_token\n'), ((2623, 2654), 'st2common.triggers.InternalTriggerTypesRegistrar', 'InternalTriggerTypesRegistrar', ([], {}), '()\n', (2652, 2654), False, 'from st2common.triggers import InternalTriggerTypesRegistrar\n'), ((2770, 2811), 'st2common.services.access.delete_token', 'delete_token', (['registrar._auth_creds.token'], {}), '(registrar._auth_creds.token)\n', (2782, 2811), False, 'from st2common.services.access import delete_token\n'), ((2051, 2075), 'json.dumps', 'json.dumps', (['FAKE_TRIGGER'], {}), '(FAKE_TRIGGER)\n', (2061, 2075), False, 'import json\n'), ((2450, 2481), 'json.dumps', 'json.dumps', (['"""trigger not found"""'], {}), "('trigger not found')\n", (2460, 2481), False, 'import json\n')] |
import os
def touch_dir(wanted_dir):
not_exist_collection = []
while os.path.exists(wanted_dir) is not True and wanted_dir != '':
wanted_dir, step = os.path.split(wanted_dir)
not_exist_collection.append(step)
pass
while len(not_exist_collection) != 0:
step = not_exist_collection.pop()
wanted_dir = os.path.join(wanted_dir, step)
os.mkdir(wanted_dir)
pass
pass | [
"os.path.exists",
"os.path.join",
"os.mkdir",
"os.path.split"
] | [((167, 192), 'os.path.split', 'os.path.split', (['wanted_dir'], {}), '(wanted_dir)\n', (180, 192), False, 'import os\n'), ((354, 384), 'os.path.join', 'os.path.join', (['wanted_dir', 'step'], {}), '(wanted_dir, step)\n', (366, 384), False, 'import os\n'), ((393, 413), 'os.mkdir', 'os.mkdir', (['wanted_dir'], {}), '(wanted_dir)\n', (401, 413), False, 'import os\n'), ((79, 105), 'os.path.exists', 'os.path.exists', (['wanted_dir'], {}), '(wanted_dir)\n', (93, 105), False, 'import os\n')] |
"""
Module contains all functions working on users page.
Functions:
users_page()
edit_user(id)
delete_user(id)
check_session()
"""
import os
import sys
import urllib.parse
from flask_login import login_user, login_required
from flask import render_template, request, redirect, Blueprint, session
sys.path.append(os.path.abspath(os.path.join('..')))
from models.users import User
ADMIN = User.query.get(1).login
BASE_URL = 'http://127.0.0.1:5000/'
api_users = Blueprint('api_users', __name__)
@api_users.route('/users', methods=['POST', 'GET'])
@login_required
def users_page():
"""
Function working on departments page:
1) adding new users if method "POST" received and session is used by admin
2) showing the table of the users
:return: the template of the departments page
"""
if session.get('user') and session.get('user')[0] == ADMIN:
users = User.query.all()
if request.method == 'POST':
login = request.form.get('login')
password = request.form.get('password')
data = f'?login={session["user"][0]}&password={session["user"][1]}' \
f'&new_login={urllib.parse.quote(login)}&new_password={urllib.parse.quote(password)}&page=True'
return redirect('/api/users/add' + data)
return render_template('users_for_admin.html', users=users)
user = User.query.filter_by(login=session.get('user')[0]).first()
return render_template('users.html', user=user)
@api_users.route('/users/<int:id>/edit', methods=['GET', 'POST'])
@login_required
def edit_user(id):
"""
Function editing information about specific users
:param id: id of the specific user an admin wants to change information about
:return: return template of the users page or redirects to users page
"""
if session.get('user') and session.get('user')[0] == ADMIN:
users = User.query.all()
if User.query.get(id):
if request.method == 'POST':
login = request.form.get('new_login')
password = request.form.get('new_password')
data = f'?login={session["user"][0]}&password={session["user"][1]}' \
f'&id={id}&new_login={urllib.parse.quote(login)}&new_password={urllib.parse.quote(password)}&page=True'
return redirect('/api/users/edit' + data)
return render_template('users_for_admin.html', id=id, users=users)
return redirect('/users')
@api_users.route('/users/<int:id>/del')
@login_required
def delete_user(id):
"""
Function deleting specific user by its id
:param id: id of the specific user an admin wants to delete
:return: redirects user to the users page
"""
if session.get('user') and session.get('user')[0] == ADMIN:
data = f'?login={session["user"][0]}&password={session["user"][1]}' \
f'&id={id}&page=True'
return redirect('/api/users/del' + data)
@api_users.before_request
def check_session():
"""
Function logging in user to the page if session has been
already created. Else redirects to the main page.
:return: None or redirect
"""
if session.get('user'):
users = User.query.filter_by(login=session.get('user')[0]).all()
login_user(users[0])
session.permanent = False
else:
return redirect('/')
| [
"flask.render_template",
"models.users.User.query.get",
"flask.session.get",
"flask_login.login_user",
"os.path.join",
"flask.request.form.get",
"flask.redirect",
"flask.Blueprint",
"models.users.User.query.all"
] | [((479, 511), 'flask.Blueprint', 'Blueprint', (['"""api_users"""', '__name__'], {}), "('api_users', __name__)\n", (488, 511), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((407, 424), 'models.users.User.query.get', 'User.query.get', (['(1)'], {}), '(1)\n', (421, 424), False, 'from models.users import User\n'), ((1464, 1504), 'flask.render_template', 'render_template', (['"""users.html"""'], {'user': 'user'}), "('users.html', user=user)\n", (1479, 1504), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((3197, 3216), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (3208, 3216), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((346, 364), 'os.path.join', 'os.path.join', (['""".."""'], {}), "('..')\n", (358, 364), False, 'import os\n'), ((840, 859), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (851, 859), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((913, 929), 'models.users.User.query.all', 'User.query.all', ([], {}), '()\n', (927, 929), False, 'from models.users import User\n'), ((1330, 1382), 'flask.render_template', 'render_template', (['"""users_for_admin.html"""'], {'users': 'users'}), "('users_for_admin.html', users=users)\n", (1345, 1382), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((1841, 1860), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (1852, 1860), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((1914, 1930), 'models.users.User.query.all', 'User.query.all', ([], {}), '()\n', (1928, 1930), False, 'from models.users import User\n'), ((1942, 1960), 'models.users.User.query.get', 'User.query.get', (['id'], {}), '(id)\n', (1956, 1960), False, 'from models.users import User\n'), ((2482, 2500), 'flask.redirect', 'redirect', (['"""/users"""'], {}), "('/users')\n", (2490, 2500), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((2759, 2778), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (2770, 2778), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((2946, 2979), 'flask.redirect', 'redirect', (["('/api/users/del' + data)"], {}), "('/api/users/del' + data)\n", (2954, 2979), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((3299, 3319), 'flask_login.login_user', 'login_user', (['users[0]'], {}), '(users[0])\n', (3309, 3319), False, 'from flask_login import login_user, login_required\n'), ((3379, 3392), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3387, 3392), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((987, 1012), 'flask.request.form.get', 'request.form.get', (['"""login"""'], {}), "('login')\n", (1003, 1012), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((1036, 1064), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (1052, 1064), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((1281, 1314), 'flask.redirect', 'redirect', (["('/api/users/add' + data)"], {}), "('/api/users/add' + data)\n", (1289, 1314), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((2407, 2466), 'flask.render_template', 'render_template', (['"""users_for_admin.html"""'], {'id': 'id', 'users': 'users'}), "('users_for_admin.html', id=id, users=users)\n", (2422, 2466), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((864, 883), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (875, 883), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((1865, 1884), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (1876, 1884), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((2027, 2056), 'flask.request.form.get', 'request.form.get', (['"""new_login"""'], {}), "('new_login')\n", (2043, 2056), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((2084, 2116), 'flask.request.form.get', 'request.form.get', (['"""new_password"""'], {}), "('new_password')\n", (2100, 2116), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((2353, 2387), 'flask.redirect', 'redirect', (["('/api/users/edit' + data)"], {}), "('/api/users/edit' + data)\n", (2361, 2387), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((2783, 2802), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (2794, 2802), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((1421, 1440), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (1432, 1440), False, 'from flask import render_template, request, redirect, Blueprint, session\n'), ((3261, 3280), 'flask.session.get', 'session.get', (['"""user"""'], {}), "('user')\n", (3272, 3280), False, 'from flask import render_template, request, redirect, Blueprint, session\n')] |
# Generated by Django 3.0.8 on 2020-11-23 11:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('account', '0003_confirmstring'),
]
operations = [
migrations.CreateModel(
name='Equipment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, unique=True)),
('descript', models.CharField(max_length=256)),
('created_time', models.DateTimeField(auto_now_add=True)),
('user', models.ManyToManyField(to='account.User')),
],
options={
'verbose_name': 'Equipment',
'verbose_name_plural': 'Equipments',
'ordering': ['-created_time'],
},
),
migrations.CreateModel(
name='Data',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=256)),
('value', models.FloatField()),
('descript', models.CharField(max_length=256)),
('created_time', models.DateTimeField(auto_now_add=True)),
('equipment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Equipment')),
],
options={
'verbose_name': 'Data',
'verbose_name_plural': 'Datas',
'ordering': ['-created_time'],
},
),
]
| [
"django.db.models.FloatField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((381, 474), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (397, 474), False, 'from django.db import migrations, models\n'), ((498, 543), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'unique': '(True)'}), '(max_length=128, unique=True)\n', (514, 543), False, 'from django.db import migrations, models\n'), ((575, 607), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (591, 607), False, 'from django.db import migrations, models\n'), ((643, 682), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (663, 682), False, 'from django.db import migrations, models\n'), ((710, 751), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""account.User"""'}), "(to='account.User')\n", (732, 751), False, 'from django.db import migrations, models\n'), ((1063, 1156), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1079, 1156), False, 'from django.db import migrations, models\n'), ((1179, 1211), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (1195, 1211), False, 'from django.db import migrations, models\n'), ((1240, 1259), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (1257, 1259), False, 'from django.db import migrations, models\n'), ((1291, 1323), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (1307, 1323), False, 'from django.db import migrations, models\n'), ((1359, 1398), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1379, 1398), False, 'from django.db import migrations, models\n'), ((1431, 1520), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""polls.Equipment"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'polls.Equipment')\n", (1448, 1520), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/Contract
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import contract
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class ContractTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("Contract", js["resourceType"])
return contract.Contract(js)
def testContract1(self):
inst = self.instantiate_from("pcd-example-notOrg.json")
self.assertIsNotNone(inst, "Must have instantiated a Contract instance")
self.implContract1(inst)
js = inst.as_json()
self.assertEqual("Contract", js["resourceType"])
inst2 = contract.Contract(js)
self.implContract1(inst2)
def implContract1(self, inst):
self.assertEqual(
force_bytes(inst.friendly[0].contentAttachment.title),
force_bytes("The terms of the consent in friendly consumer speak."),
)
self.assertEqual(force_bytes(inst.id), force_bytes("pcd-example-notOrg"))
self.assertEqual(inst.issued.date, FHIRDate("2015-11-18").date)
self.assertEqual(inst.issued.as_json(), "2015-11-18")
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.title),
force_bytes("The terms of the consent in lawyer speak."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].code), force_bytes("Opt-In")
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].display),
force_bytes("Default Authorization with exceptions."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].system),
force_bytes("http://www.infoway-inforoute.ca.org/Consent-subtype-codes"),
)
self.assertEqual(
force_bytes(inst.term[0].text),
force_bytes(
"Withhold this order and any results or related objects from any provider."
),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].code), force_bytes("withhold-from")
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].display),
force_bytes("Withhold all data from specified actor entity."),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].system),
force_bytes("http://example.org/fhir/consent-term-type-codes"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.coding[0].code), force_bytes("57016-8"))
self.assertEqual(
force_bytes(inst.type.coding[0].system), force_bytes("http://loinc.org")
)
def testContract2(self):
inst = self.instantiate_from("contract-example-42cfr-part2.json")
self.assertIsNotNone(inst, "Must have instantiated a Contract instance")
self.implContract2(inst)
js = inst.as_json()
self.assertEqual("Contract", js["resourceType"])
inst2 = contract.Contract(js)
self.implContract2(inst2)
def implContract2(self, inst):
self.assertEqual(
force_bytes(inst.agent[0].role[0].coding[0].code), force_bytes("IR")
)
self.assertEqual(
force_bytes(inst.agent[0].role[0].coding[0].display),
force_bytes("Recipient"),
)
self.assertEqual(
force_bytes(inst.agent[0].role[0].coding[0].system),
force_bytes("http://org.mdhhs.fhir.consent-actor-type"),
)
self.assertEqual(
force_bytes(inst.agent[0].role[0].text),
force_bytes("Recipient of restricted health information"),
)
self.assertEqual(
force_bytes(inst.agent[1].role[0].coding[0].code), force_bytes("IS")
)
self.assertEqual(
force_bytes(inst.agent[1].role[0].coding[0].display), force_bytes("Sender")
)
self.assertEqual(
force_bytes(inst.agent[1].role[0].coding[0].system),
force_bytes("http://org.mdhhs.fhir.consent-actor-type"),
)
self.assertEqual(
force_bytes(inst.agent[1].role[0].text),
force_bytes("Sender of restricted health information"),
)
self.assertEqual(force_bytes(inst.id), force_bytes("C-2121"))
self.assertEqual(inst.issued.date, FHIRDate("2031-11-01T21:18:27-04:00").date)
self.assertEqual(inst.issued.as_json(), "2031-11-01T21:18:27-04:00")
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.contentType),
force_bytes("application/pdf"),
)
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.language), force_bytes("en-US")
)
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.title),
force_bytes("MDHHS-5515 Consent To Share Your Health Information"),
)
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.url),
force_bytes("http://org.mihin.ecms/ConsentDirective-2121"),
)
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2016-07-19T18:18:42.108-04:00").date
)
self.assertEqual(
inst.meta.lastUpdated.as_json(), "2016-07-19T18:18:42.108-04:00"
)
self.assertEqual(force_bytes(inst.meta.versionId), force_bytes("1"))
self.assertEqual(force_bytes(inst.securityLabel[0].code), force_bytes("R"))
self.assertEqual(
force_bytes(inst.securityLabel[0].display), force_bytes("Restricted")
)
self.assertEqual(
force_bytes(inst.securityLabel[0].system),
force_bytes("http://hl7.org/fhir/v3/Confidentiality"),
)
self.assertEqual(force_bytes(inst.securityLabel[1].code), force_bytes("ETH"))
self.assertEqual(
force_bytes(inst.securityLabel[1].display),
force_bytes("substance abuse information sensitivity"),
)
self.assertEqual(
force_bytes(inst.securityLabel[1].system),
force_bytes("http://hl7.org/fhir/v3/ActCode"),
)
self.assertEqual(
force_bytes(inst.securityLabel[2].code), force_bytes("42CFRPart2")
)
self.assertEqual(
force_bytes(inst.securityLabel[2].system),
force_bytes("http://hl7.org/fhir/v3/ActCode"),
)
self.assertEqual(force_bytes(inst.securityLabel[3].code), force_bytes("TREAT"))
self.assertEqual(
force_bytes(inst.securityLabel[3].display), force_bytes("treatment")
)
self.assertEqual(
force_bytes(inst.securityLabel[3].system),
force_bytes("http://hl7.org/fhir/v3/ActReason"),
)
self.assertEqual(force_bytes(inst.securityLabel[4].code), force_bytes("HPAYMT"))
self.assertEqual(
force_bytes(inst.securityLabel[4].display),
force_bytes("healthcare payment"),
)
self.assertEqual(
force_bytes(inst.securityLabel[4].system),
force_bytes("http://hl7.org/fhir/v3/ActReason"),
)
self.assertEqual(
force_bytes(inst.securityLabel[5].code), force_bytes("HOPERAT")
)
self.assertEqual(
force_bytes(inst.securityLabel[5].display),
force_bytes("healthcare operations"),
)
self.assertEqual(
force_bytes(inst.securityLabel[5].system),
force_bytes("http://hl7.org/fhir/v3/ActReason"),
)
self.assertEqual(
force_bytes(inst.securityLabel[6].code), force_bytes("PERSISTLABEL")
)
self.assertEqual(
force_bytes(inst.securityLabel[6].display),
force_bytes("persist security label"),
)
self.assertEqual(
force_bytes(inst.securityLabel[6].system),
force_bytes("http://hl7.org/fhir/v3/ActCode"),
)
self.assertEqual(
force_bytes(inst.securityLabel[7].code), force_bytes("PRIVMARK")
)
self.assertEqual(
force_bytes(inst.securityLabel[7].display), force_bytes("privacy mark")
)
self.assertEqual(
force_bytes(inst.securityLabel[7].system),
force_bytes("http://hl7.org/fhir/v3/ActCode"),
)
self.assertEqual(
force_bytes(inst.securityLabel[8].code), force_bytes("NORDSCLCD")
)
self.assertEqual(
force_bytes(inst.securityLabel[8].display),
force_bytes("no redisclosure without consent directive"),
)
self.assertEqual(
force_bytes(inst.securityLabel[8].system),
force_bytes("http://hl7.org/fhir/v3/ActCode"),
)
self.assertEqual(
force_bytes(inst.signer[0].signature[0].type[0].code),
force_bytes("1.2.840.10065.1.12.1.1"),
)
self.assertEqual(
force_bytes(inst.signer[0].signature[0].type[0].system),
force_bytes("urn:iso-astm:E1762-95:2013"),
)
self.assertEqual(
inst.signer[0].signature[0].when.date,
FHIRDate("2017-02-08T10:57:34+01:00").date,
)
self.assertEqual(
inst.signer[0].signature[0].when.as_json(), "2017-02-08T10:57:34+01:00"
)
self.assertEqual(force_bytes(inst.signer[0].type.code), force_bytes("SELF"))
self.assertEqual(
force_bytes(inst.signer[0].type.system),
force_bytes("http://org.mdhhs.fhir.consent-signer-type"),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].code), force_bytes("MDHHS-5515")
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].display),
force_bytes(
"Michigan MDHHS-5515 Consent to Share Behavioral Health Information for Care Coordination Purposes"
),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].system),
force_bytes("http://hl7.org/fhir/consentcategorycodes"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.coding[0].code), force_bytes("OPTIN"))
self.assertEqual(
force_bytes(inst.type.coding[0].system),
force_bytes("http://org.mdhhs.fhir.consentdirective-type"),
)
self.assertEqual(
force_bytes(inst.type.text), force_bytes("Opt-in consent directive")
)
def testContract3(self):
inst = self.instantiate_from("pcd-example-notLabs.json")
self.assertIsNotNone(inst, "Must have instantiated a Contract instance")
self.implContract3(inst)
js = inst.as_json()
self.assertEqual("Contract", js["resourceType"])
inst2 = contract.Contract(js)
self.implContract3(inst2)
def implContract3(self, inst):
self.assertEqual(
force_bytes(inst.friendly[0].contentAttachment.title),
force_bytes("The terms of the consent in friendly consumer speak."),
)
self.assertEqual(force_bytes(inst.id), force_bytes("pcd-example-notLabs"))
self.assertEqual(inst.issued.date, FHIRDate("2014-08-17").date)
self.assertEqual(inst.issued.as_json(), "2014-08-17")
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.title),
force_bytes("The terms of the consent in lawyer speak."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].code), force_bytes("Opt-In")
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].display),
force_bytes("Default Authorization with exceptions."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].system),
force_bytes("http://www.infoway-inforoute.ca.org/Consent-subtype-codes"),
)
self.assertEqual(
force_bytes(inst.term[0].subType.coding[0].code),
force_bytes("ProcedureRequest"),
)
self.assertEqual(
force_bytes(inst.term[0].subType.coding[0].system),
force_bytes("http://hl7.org/fhir/resource-types"),
)
self.assertEqual(
force_bytes(inst.term[0].text),
force_bytes("Withhold orders from any provider."),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].code),
force_bytes("withhold-object-type"),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].system),
force_bytes("http://example.org/fhir/consent-term-type-codes"),
)
self.assertEqual(
force_bytes(inst.term[1].subType.coding[0].code),
force_bytes("DiagnosticReport"),
)
self.assertEqual(
force_bytes(inst.term[1].subType.coding[0].system),
force_bytes("http://hl7.org/fhir/resource-types"),
)
self.assertEqual(
force_bytes(inst.term[1].text),
force_bytes("Withhold order results from any provider."),
)
self.assertEqual(
force_bytes(inst.term[1].type.coding[0].code),
force_bytes("withhold-object-type"),
)
self.assertEqual(
force_bytes(inst.term[1].type.coding[0].system),
force_bytes("http://example.org/fhir/consent-term-type-codes"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.coding[0].code), force_bytes("57016-8"))
self.assertEqual(
force_bytes(inst.type.coding[0].system), force_bytes("http://loinc.org")
)
def testContract4(self):
inst = self.instantiate_from("pcd-example-notThem.json")
self.assertIsNotNone(inst, "Must have instantiated a Contract instance")
self.implContract4(inst)
js = inst.as_json()
self.assertEqual("Contract", js["resourceType"])
inst2 = contract.Contract(js)
self.implContract4(inst2)
def implContract4(self, inst):
self.assertEqual(
force_bytes(inst.friendly[0].contentAttachment.title),
force_bytes("The terms of the consent in friendly consumer speak."),
)
self.assertEqual(force_bytes(inst.id), force_bytes("pcd-example-notThem"))
self.assertEqual(inst.issued.date, FHIRDate("2015-11-18").date)
self.assertEqual(inst.issued.as_json(), "2015-11-18")
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.title),
force_bytes("The terms of the consent in lawyer speak."),
)
self.assertEqual(
force_bytes(inst.signer[0].signature[0].type[0].code),
force_bytes("1.2.840.10065.1.12.1.1"),
)
self.assertEqual(
force_bytes(inst.signer[0].signature[0].type[0].system),
force_bytes("urn:iso-astm:E1762-95:2013"),
)
self.assertEqual(
inst.signer[0].signature[0].when.date,
FHIRDate("2013-06-08T10:57:34-07:00").date,
)
self.assertEqual(
inst.signer[0].signature[0].when.as_json(), "2013-06-08T10:57:34-07:00"
)
self.assertEqual(force_bytes(inst.signer[0].type.code), force_bytes("COVPTY"))
self.assertEqual(
force_bytes(inst.signer[0].type.system),
force_bytes("http://www.hl7.org/fhir/contractsignertypecodes"),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].code), force_bytes("Opt-In")
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].display),
force_bytes("Default Authorization with exceptions."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].system),
force_bytes("http://www.infoway-inforoute.ca.org/Consent-subtype-codes"),
)
self.assertEqual(
force_bytes(inst.term[0].text),
force_bytes(
"Withhold this order and any results or related objects from specified nurse provider."
),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].code), force_bytes("withhold-from")
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].display),
force_bytes("Withhold all data from specified actor entity."),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].system),
force_bytes("http://example.org/fhir/consent-term-type-codes"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.coding[0].code), force_bytes("57016-8"))
self.assertEqual(
force_bytes(inst.type.coding[0].system), force_bytes("http://loinc.org")
)
def testContract5(self):
inst = self.instantiate_from("pcd-example-notAuthor.json")
self.assertIsNotNone(inst, "Must have instantiated a Contract instance")
self.implContract5(inst)
js = inst.as_json()
self.assertEqual("Contract", js["resourceType"])
inst2 = contract.Contract(js)
self.implContract5(inst2)
def implContract5(self, inst):
self.assertEqual(
force_bytes(inst.friendly[0].contentAttachment.title),
force_bytes("The terms of the consent in friendly consumer speak."),
)
self.assertEqual(force_bytes(inst.id), force_bytes("pcd-example-notAuthor"))
self.assertEqual(inst.issued.date, FHIRDate("2015-11-18").date)
self.assertEqual(inst.issued.as_json(), "2015-11-18")
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.title),
force_bytes("The terms of the consent in lawyer speak."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].code), force_bytes("Opt-In")
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].display),
force_bytes("Default Authorization with exceptions."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].system),
force_bytes("http://www.infoway-inforoute.ca.org/Consent-subtype-codes"),
)
self.assertEqual(
force_bytes(inst.term[0].text),
force_bytes("Withhold all data authored by Good Health provider."),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].code),
force_bytes("withhold-authored-by"),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].display),
force_bytes("Withhold all data authored by specified actor entity."),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].system),
force_bytes("http://example.org/fhir/consent-term-type-codes"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.coding[0].code), force_bytes("57016-8"))
self.assertEqual(
force_bytes(inst.type.coding[0].system), force_bytes("http://loinc.org")
)
def testContract6(self):
inst = self.instantiate_from("contract-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Contract instance")
self.implContract6(inst)
js = inst.as_json()
self.assertEqual("Contract", js["resourceType"])
inst2 = contract.Contract(js)
self.implContract6(inst2)
def implContract6(self, inst):
self.assertEqual(force_bytes(inst.id), force_bytes("C-123"))
self.assertEqual(
force_bytes(inst.identifier.system),
force_bytes("http://happyvalley.com/contract"),
)
self.assertEqual(force_bytes(inst.identifier.value), force_bytes("12347"))
self.assertEqual(
force_bytes(inst.text.div),
force_bytes(
'<div xmlns="http://www.w3.org/1999/xhtml">A human-readable rendering of the contract</div>'
),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
def testContract7(self):
inst = self.instantiate_from("pcd-example-notThis.json")
self.assertIsNotNone(inst, "Must have instantiated a Contract instance")
self.implContract7(inst)
js = inst.as_json()
self.assertEqual("Contract", js["resourceType"])
inst2 = contract.Contract(js)
self.implContract7(inst2)
def implContract7(self, inst):
self.assertEqual(
force_bytes(inst.friendly[0].contentAttachment.title),
force_bytes("The terms of the consent in friendly consumer speak."),
)
self.assertEqual(force_bytes(inst.id), force_bytes("pcd-example-notThis"))
self.assertEqual(inst.issued.date, FHIRDate("2015-11-18").date)
self.assertEqual(inst.issued.as_json(), "2015-11-18")
self.assertEqual(
force_bytes(inst.legal[0].contentAttachment.title),
force_bytes("The terms of the consent in lawyer speak."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].code), force_bytes("Opt-In")
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].display),
force_bytes("Default Authorization with exceptions."),
)
self.assertEqual(
force_bytes(inst.subType[0].coding[0].system),
force_bytes("http://www.infoway-inforoute.ca.org/Consent-subtype-codes"),
)
self.assertEqual(
force_bytes(inst.term[0].text),
force_bytes(
"Withhold this order and any results or related objects from any provider."
),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].code),
force_bytes("withhold-identified-object-and-related"),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].display),
force_bytes(
"Withhold the identified object and any other resources that are related to this object."
),
)
self.assertEqual(
force_bytes(inst.term[0].type.coding[0].system),
force_bytes("http://example.org/fhir/consent-term-type-codes"),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.type.coding[0].code), force_bytes("57016-8"))
self.assertEqual(
force_bytes(inst.type.coding[0].system), force_bytes("http://loinc.org")
)
| [
"json.load",
"os.environ.get",
"pytest.mark.usefixtures",
"os.path.join"
] | [((333, 373), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""base_settings"""'], {}), "('base_settings')\n", (356, 373), False, 'import pytest\n'), ((474, 513), 'os.environ.get', 'os.environ.get', (['"""FHIR_UNITTEST_DATADIR"""'], {}), "('FHIR_UNITTEST_DATADIR')\n", (488, 513), False, 'import os\n'), ((625, 642), 'json.load', 'json.load', (['handle'], {}), '(handle)\n', (634, 642), False, 'import json\n'), ((541, 572), 'os.path.join', 'os.path.join', (['datadir', 'filename'], {}), '(datadir, filename)\n', (553, 572), False, 'import os\n')] |
from django.contrib import admin
from django.urls import path
# See: https://docs.djangoproject.com/en/dev/ref/contrib/admin/#hooking-adminsite-instances-into-your-urlconf
admin.autodiscover()
# See: https://docs.djangoproject.com/en/dev/topics/http/urls/
urlpatterns = [
path('admin/', admin.site.urls),
]
| [
"django.urls.path",
"django.contrib.admin.autodiscover"
] | [((174, 194), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (192, 194), False, 'from django.contrib import admin\n'), ((280, 311), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (284, 311), False, 'from django.urls import path\n')] |
from stuff import *
# Get weekday pattern from case data in order to identify exact date on SGTF graph
# 0 mod 7 is Thursday in daytodate notation (being 1970-01-01)
nc={}
with open('SAcases','r') as fp:
for x in fp:
y=x.split()
nc[datetoday(y[0])]=int(y[1])
minday=min(nc)
maxday=max(nc)
c0=[0]*7
c1=[0]*7
for d in range(minday+3,maxday-3):
ex=[nc[r] for r in range(d-3,d+4)]
if min(ex)>=50:
i=d%7
c0[i]+=1
c1[i]+=nc[d]*7/sum(ex)
#for i in range(7):
# print(i,c1[i]/c0[i])
# Thur 1.184
# Fri 1.170
# Sat 1.122
# Sun 0.913
# Mon 0.655
# Tue 0.766
# Wed 1.158
if 0:
infile='OmicronSGTF.png'
dateorigin=datetoday('2021-10-01')-564
row0,row1=23,359
col0,col1=81,614
y0=(0,358);y1=(50,43)
z0=(0,357);z1=(1600,126)
if 1:
infile='OmicronSGTF_frompdf.png'
dateorigin=datetoday('2021-10-01')-564
row0,row1=11,345
col0,col1=81,614
y0=(0,344.5);y1=(50,32)
z0=(0,344.5);z1=(2000,57.5)
# SGTF image from slide 12 of https://sacoronavirus.co.za/2021/11/25/sars-cov-2-sequencing-new-variant-update-25-november-2021/
# resized down by a factor of 2/3 in order to get 1 horizontal pixel = 1 day.
from PIL import Image
import numpy as np
im_frame = Image.open(infile)
cc = np.array(im_frame,dtype=int)
im_frame.close()
# Top-leftian, row before column
r=cc.shape[0]
c=cc.shape[1]
# Get blueness
bb=cc[:,:,2]*2-(cc[:,:,0]+cc[:,:,1])
def process(bb,name):
bb1=bb[row0:row1,:]
mm=row0+np.argmax(bb1,axis=0)
im=Image.fromarray(((bb-bb.min())/(bb.max()-bb.min())*255.999+0.0005).astype(np.dtype('uint8')))
im.save(name+'_filtered.png')
oo=cc.astype(np.dtype('uint8'))
for x in range(col0,col1): oo[mm[x],x]=[255,0,0]
im=Image.fromarray(oo)
im.save(name+'_sgtf.png')
sgtf={}
for x in range(col0,col1):
sgtf[daytodate(dateorigin+x)]=(mm[x]-y1[1])/(y0[1]-y1[1])*(y0[0]-y1[0])+y1[0]
with open(name+'_sgtf','w') as fp:
for date in sorted(list(sgtf)):
print(date,"%6.2f"%sgtf[date],file=fp)
return mm,sgtf
process(bb,'simple')
lrantialias=bb-np.maximum(np.roll(bb,1,1),np.roll(bb,-1,1))
process(lrantialias,'LRantialias')
# Hybrid because deantialiasing method is likely to work well for the vertical spike, but not when derivative is low.
spike=605
hybrid=np.concatenate([bb[:,:spike],lrantialias[:,spike:]],axis=1)
mm,sgtf=process(hybrid,'hybrid')
dd=cc[:,:,0]-np.maximum(cc[:,:,1],cc[:,:,2])
oo=(dd>3).astype(np.dtype('uint8'))*255
im=Image.fromarray(oo)
im.save('temp.png')
ee=(dd>3)*1000+np.tile(np.arange(r-1,-1,-1)[:,None],(1,c))
process(ee,'simplered')
oo=cc.astype(np.dtype('uint8'))
nn=np.zeros(c)
for x in range(col0,col1):
s0=1
s1=10
f=0.5
mx=0
for y in range(row1-1,row0-1,-1):
if abs(y-mm[x])>1:
s0=(1-f)*s0+f*1
s1=(1-f)*s1+f*dd[y,x]
#print(y,dd[y,x],s1/s0)
if s1/s0>5: mx=y
nn[x]=mx
oo[mx,x]=[0,255,0]
oo[mm[x],x]=[255,0,0]
im=Image.fromarray(oo)
im.save('sgtf+counts.png')
with open('SA_sgtf','w') as fp:
print("# Date %SGTF Tests num(S-) num(S+)",file=fp)
for x in range(col0,col1):
if nn[x]>0:
date=daytodate(dateorigin+x)
n=max((nn[x]-z1[1])/(z0[1]-z1[1])*(z0[0]-z1[0])+z1[0],0)
s=sgtf[date]
print(date,"%6.2f %6.1f %6.1f %6.1f"%(s,n,s/100*n,(1-s/100)*n),file=fp)
| [
"numpy.dtype",
"PIL.Image.fromarray",
"PIL.Image.open",
"numpy.roll",
"numpy.argmax",
"numpy.array",
"numpy.zeros",
"numpy.concatenate",
"numpy.maximum",
"numpy.arange"
] | [((1195, 1213), 'PIL.Image.open', 'Image.open', (['infile'], {}), '(infile)\n', (1205, 1213), False, 'from PIL import Image\n'), ((1219, 1248), 'numpy.array', 'np.array', (['im_frame'], {'dtype': 'int'}), '(im_frame, dtype=int)\n', (1227, 1248), True, 'import numpy as np\n'), ((2239, 2302), 'numpy.concatenate', 'np.concatenate', (['[bb[:, :spike], lrantialias[:, spike:]]'], {'axis': '(1)'}), '([bb[:, :spike], lrantialias[:, spike:]], axis=1)\n', (2253, 2302), True, 'import numpy as np\n'), ((2421, 2440), 'PIL.Image.fromarray', 'Image.fromarray', (['oo'], {}), '(oo)\n', (2436, 2440), False, 'from PIL import Image\n'), ((2581, 2592), 'numpy.zeros', 'np.zeros', (['c'], {}), '(c)\n', (2589, 2592), True, 'import numpy as np\n'), ((2872, 2891), 'PIL.Image.fromarray', 'Image.fromarray', (['oo'], {}), '(oo)\n', (2887, 2891), False, 'from PIL import Image\n'), ((1679, 1698), 'PIL.Image.fromarray', 'Image.fromarray', (['oo'], {}), '(oo)\n', (1694, 1698), False, 'from PIL import Image\n'), ((2346, 2382), 'numpy.maximum', 'np.maximum', (['cc[:, :, 1]', 'cc[:, :, 2]'], {}), '(cc[:, :, 1], cc[:, :, 2])\n', (2356, 2382), True, 'import numpy as np\n'), ((2559, 2576), 'numpy.dtype', 'np.dtype', (['"""uint8"""'], {}), "('uint8')\n", (2567, 2576), True, 'import numpy as np\n'), ((1435, 1457), 'numpy.argmax', 'np.argmax', (['bb1'], {'axis': '(0)'}), '(bb1, axis=0)\n', (1444, 1457), True, 'import numpy as np\n'), ((1604, 1621), 'numpy.dtype', 'np.dtype', (['"""uint8"""'], {}), "('uint8')\n", (1612, 1621), True, 'import numpy as np\n'), ((2034, 2051), 'numpy.roll', 'np.roll', (['bb', '(1)', '(1)'], {}), '(bb, 1, 1)\n', (2041, 2051), True, 'import numpy as np\n'), ((2050, 2068), 'numpy.roll', 'np.roll', (['bb', '(-1)', '(1)'], {}), '(bb, -1, 1)\n', (2057, 2068), True, 'import numpy as np\n'), ((2395, 2412), 'numpy.dtype', 'np.dtype', (['"""uint8"""'], {}), "('uint8')\n", (2403, 2412), True, 'import numpy as np\n'), ((1536, 1553), 'numpy.dtype', 'np.dtype', (['"""uint8"""'], {}), "('uint8')\n", (1544, 1553), True, 'import numpy as np\n'), ((2485, 2509), 'numpy.arange', 'np.arange', (['(r - 1)', '(-1)', '(-1)'], {}), '(r - 1, -1, -1)\n', (2494, 2509), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
Path handling
"""
import pathlib
from pathlib import Path
import simplejson as json
customdir = Path.home() / ".komoog"
def _prepare():
customdir.mkdir(exist_ok=True)
cred_file = customdir / "komoot.json"
if not cred_file.exists():
data = {
"email" : "",
"password" : "",
"clientid" : "",
}
with open(cred_file,'w') as f:
credentials = json.dump(data,f)
def get_credentials():
"""
Returns credentials for komoot login in structure
.. code:: python
{
"email" : "",
"password" : "",
"clientid" : ""
}
from the file ``~/.komoog/komoot.json``
"""
_prepare()
cred_file = customdir / "komoot.json"
with open(cred_file,'r') as f:
credentials = json.load(f)
assert(all([ k in credentials.keys() for k in ["email", "password", "clientid"]]))
assert(not any([ credentials[k] == '' for k in ["email", "password", "clientid"]]))
return credentials
if __name__ == "__main__":
get_credentials()
| [
"simplejson.load",
"pathlib.Path.home",
"simplejson.dump"
] | [((126, 137), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (135, 137), False, 'from pathlib import Path\n'), ((884, 896), 'simplejson.load', 'json.load', (['f'], {}), '(f)\n', (893, 896), True, 'import simplejson as json\n'), ((483, 501), 'simplejson.dump', 'json.dump', (['data', 'f'], {}), '(data, f)\n', (492, 501), True, 'import simplejson as json\n')] |
"""
Test.py 10/10/2021
MIT License
Copyright (c) 2021 http-samc
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from Py2048_Engine.Game import Game
from Py2048_Engine.Exceptions import GameLostException, GameWonException
def runTest():
"""Run a basic CLI test of the Py2048 Engine."""
game = Game()
print(game)
while True:
try:
choice = int(input("Choose a movement:\n\t[1] Left\n\t[2] Right\n\t[3] Up\n\t[4] Down\n\nChoice: "))
if choice == 1: game.left()
elif choice == 2: game.right()
elif choice == 3: game.up()
elif choice == 4: game.down()
print(f"\n\n{game}\n")
except Exception as e:
if isinstance(e, GameLostException):
print(f"Game lost after {e.numMoves} moves!\nFinal Board:")
print(e.board)
break
elif isinstance(e, GameWonException):
print(f"Game lost after {e.numMoves} moves!\nFinal Board:")
print(e.board)
break
else:
print("Please make sure to enter a valid selection (integer from [1, 4])!")
if __name__ == "__main__":
runTest() | [
"Py2048_Engine.Game.Game"
] | [((1353, 1359), 'Py2048_Engine.Game.Game', 'Game', ([], {}), '()\n', (1357, 1359), False, 'from Py2048_Engine.Game import Game\n')] |
from megastone.util import round_up
import threading
import logging
import io
import enum
import dataclasses
import abc
from megastone.errors import UnsupportedError
from megastone.mem import SegmentMemory, MemoryAccessError
from megastone.debug import Debugger, StopReason, StopType, HookType, CPUError, InvalidInsnError, MemFaultError
from .connection import RSPConnection, Signal, parse_ascii, parse_hex_int, parse_hexint_list, parse_list, encode_hex, parse_hex, ParsingError
from .stream import EndOfStreamError, TCPStreamServer
from .target import load_gdb_regs
logger = logging.getLogger(__name__)
STOP_POLL_TIME = 0.25
HOOK_TYPE_TO_STOP_REASON = {
HookType.CODE: 'hwbreak',
HookType.WRITE: 'watch',
HookType.READ: 'rwatch',
HookType.ACCESS: 'awatch'
}
ERROR_RESPONSE = b'E01'
OK_RESPONSE = b'OK'
GDB_TYPE_TO_HOOK_TYPE = {
0: HookType.CODE,
1: HookType.CODE,
2: HookType.WRITE,
3: HookType.READ,
4: HookType.ACCESS
}
class ServerStopReason(enum.Enum):
STOPPED = enum.auto()
KILLED = enum.auto()
DETACHED = enum.auto()
@dataclasses.dataclass
class _MonitorCommand(abc.ABC):
name: str
handler: callable
help: str
class GDBServer:
"""GDB Server implementation. Exposes a Debugger to external GDB clients."""
def __init__(self, dbg: Debugger, port=1234, host='localhost'):
if not dbg.arch.gdb_supported:
raise UnsupportedError('Architecture doesn\'t support GDB')
self.dbg = dbg
self.stop_reason: ServerStopReason = None
self._regs = load_gdb_regs(dbg.arch)
self._server = TCPStreamServer(host, port)
self._stopped = threading.Event()
self._listening = threading.Event()
self._cmd_stop_reason: ServerStopReason = None
self._cpu_stop_reason: StopReason = None
self._stop_exception: CPUError = None
self._hooks = {} #HookType => address => Hook
self._handlers = {
b'?': self._handle_stop_reason,
b'D': self._handle_detach,
b'k': self._handle_kill,
b'qAttached': self._handle_attached,
b'qSupported': self._handle_supported,
b'qXfer:features:read:target.xml:': self._handle_read_features,
b'qXfer:memory-map:read::': self._handle_read_memory_map,
b'g': self._handle_read_regs,
b'G': self._handle_write_regs,
b'm': self._handle_read_mem,
b'M': self._handle_write_mem,
b's': self._handle_step,
b'c': self._handle_continue,
b'S': self._handle_step_signal,
b'C': self._handle_continue_signal,
b'Z': self._handle_add_breakpoint,
b'z': self._handle_remove_breakpoint,
b'qRcmd,' : self._handle_monitor_command
}
self._monitor_commands = [
_MonitorCommand('help', self._handle_help, 'Print a list of monitor commands.'),
_MonitorCommand('megastone', self._handle_megastone, 'Check whether the server is a Megastone server.'),
_MonitorCommand('segments', self._handle_segments, 'Print the list of Segments.'),
_MonitorCommand('info', self._handle_info, 'Print information about the Megastone configuration.'),
_MonitorCommand('error', self._handle_error, 'Print information about the last CPU error.'),
_MonitorCommand('endian', self._handle_endian, 'Print the endian of the server.')
]
def run(self, *, persistent=False):
"""Run the server. Blocks until the client exists or an error occurs."""
self._stopped.clear()
self._server.initialize()
self._listening.set()
with self._server:
self._server.set_timeout(STOP_POLL_TIME)
while True:
reason = self._run_once()
if reason is ServerStopReason.STOPPED or reason is ServerStopReason.KILLED or not persistent:
self.stop_reason = reason
break
self._listening.clear()
def stop(self):
"""
Stop the server.
This can be safely called from a different thread than the one running the server.
"""
self._stopped.set()
def _run_once(self):
conn = self._wait_for_connection()
if conn is None:
return ServerStopReason.STOPPED
with conn:
return self._main_loop(conn)
def _wait_for_connection(self):
logger.info('waiting for client connection')
while True:
try:
stream = self._server.get_stream()
except TimeoutError:
if self._check_stopped():
return None
else:
return RSPConnection(stream)
def _main_loop(self, conn: RSPConnection):
self._cmd_stop_reason = None
while True:
try:
command = conn.receive_packet(timeout=STOP_POLL_TIME)
except EndOfStreamError:
logger.warning('client disconnected')
return ServerStopReason.DETACHED
if self._check_stopped():
return ServerStopReason.STOPPED
if command is None:
continue
logger.debug(f'received packet: {command}')
response = self._handle_command(command)
if response is not None:
logger.debug(f'sending response: {response}')
conn.send_packet(response)
if self._cmd_stop_reason is not None:
return self._cmd_stop_reason
def _handle_command(self, command):
for prefix, handler in self._handlers.items():
if command.startswith(prefix):
args = command[len(prefix):]
return handler(args)
return b''
def _check_stopped(self):
if self._stopped.is_set():
logger.info('server stopped by thread')
return True
return False
def _handle_stop_reason(self, args):
return self._get_stop_response()
def _handle_detach(self, args):
logger.info('client detached')
self._cmd_stop_reason = ServerStopReason.DETACHED
return b'OK'
def _handle_kill(self, args):
logger.info('killed by client')
self._cmd_stop_reason = ServerStopReason.KILLED
return None
def _handle_attached(self, args):
return b'1'
def _handle_read_regs(self, args):
return self._encode_regs()
def _handle_write_regs(self, args):
self._parse_regs(args)
return OK_RESPONSE
def _handle_read_mem(self, args):
address, size = parse_hexint_list(args, 2)
try:
data = self.dbg.mem.read(address, size)
except MemoryAccessError as e:
logger.error(str(e))
return ERROR_RESPONSE
return encode_hex(data)
def _handle_write_mem(self, args):
addresses, hex_data = parse_list(args, 2, b':')
address, _ = parse_hexint_list(addresses, 2)
data = parse_hex(hex_data)
logger.info(f'Write memory: 0x{address:X} +0x{len(data):X}')
try:
self.dbg.mem.write(address, data)
except MemoryAccessError as e:
logger.error(str(e))
return ERROR_RESPONSE
return OK_RESPONSE
def _handle_continue(self, args):
return self._handle_run(args, None)
def _handle_step(self, args):
return self._handle_run(args, 1)
def _handle_add_breakpoint(self, args):
type, address, size = self._parse_hook(args)
logger.debug(f'adding hook: {type} 0x{address:X} +0x{size:X}')
hook = self.dbg.add_breakpoint(address, size, type)
self._add_hook(hook)
return OK_RESPONSE
def _handle_remove_breakpoint(self, args):
type, address, _ = self._parse_hook(args)
logger.debug(f'remove hook: {type} 0x{address:X}')
hook = self._pop_hook(type, address)
self.dbg.remove_hook(hook)
return OK_RESPONSE
def _handle_continue_signal(self, args):
return self._handle_run_signal(args, None)
def _handle_step_signal(self, args):
return self._handle_run_signal(args, 1)
def _handle_run_signal(self, args, count):
_, _, address = args.partition(b';')
return self._handle_run(address, count)
def _handle_run(self, args, count):
if len(args) == 0:
address = None
else:
address = parse_hex_int(args)
self._cpu_stop_reason = None
self._stop_exception = None
logger.debug(f'run: address={address}, count={count}')
try:
self._cpu_stop_reason = self.dbg.run(count=count, address=address)
except CPUError as e:
self._stop_exception = e
logger.info(f'stopped: {e}')
else:
logger.debug(f'stopped: {self._cpu_stop_reason.type.name}')
return self._get_stop_response()
def _handle_supported(self, args):
return b'swbreak+;hwbreak+;qXfer:features:read+;qXfer:memory-map:read+;multiprocess-'
def _handle_read_features(self, args):
features = f'<target version="1.0"><architecture>{self.dbg.arch.gdb_name}</architecture></target>'
file = io.BytesIO(features.encode())
return self._handle_xfer(file, args)
def _handle_read_memory_map(self, args):
file = io.BytesIO()
if isinstance(self.dbg.mem, SegmentMemory):
self._build_memory_map(file)
return self._handle_xfer(file, args)
def _build_memory_map(self, fileobj):
assert isinstance(self.dbg.mem, SegmentMemory)
fileobj.write(b'<memory-map>')
for segment in self.dbg.mem.segments:
fileobj.write(f'<memory type="ram" start="0x{segment.address:x}" length="0x{segment.size:x}"/>'.encode())
fileobj.write(b'</memory-map>')
def _handle_xfer(self, fileobj, args):
offset, length = parse_hexint_list(args, 2)
fileobj.seek(offset)
data = fileobj.read(length)
if len(data) < length:
return b'l' + data
return b'm' + data
def _add_hook(self, hook):
address_hooks = self._hooks.setdefault(hook.type, {})
address_hooks[hook.address] = hook
def _pop_hook(self, type, address):
if type not in self._hooks or address not in self._hooks[type]:
raise ParsingError(f'Hook of type {type} does not exist at 0x{address:X}')
return self._hooks[type].pop(address)
def _parse_hook(self, args):
type, address, size = parse_hexint_list(args, 3)
htype = GDB_TYPE_TO_HOOK_TYPE.get(type)
if htype is None:
raise ParsingError(f'Invalid hook type {type}')
if size == 0:
size = 1
return htype, address, size
def _get_stop_response(self):
info = ''
if self._stop_exception is None:
signum = Signal.SIGTRAP
if self._cpu_stop_reason is not None:
info = self._get_stop_info(self._cpu_stop_reason)
elif isinstance(self._stop_exception, MemFaultError):
signum = Signal.SIGSEGV
elif isinstance(self._stop_exception, InvalidInsnError):
signum = Signal.SIGILL
else:
signum = Signal.SIGABRT
return f'T{signum.value:02X}{info}'.encode()
def _get_stop_info(self, reason: StopReason):
if reason.type is not StopType.HOOK:
return ''
hook = reason.hook
key = HOOK_TYPE_TO_STOP_REASON.get(hook.type)
if key is None:
return ''
if hook.type.is_data:
value = f'{hook.address:X}'
else:
value = ''
return f'{key}:{value};'
def _encode_reg(self, gdb_reg):
if gdb_reg.is_dummy:
value = 0
else:
value = self.dbg.regs[gdb_reg.name]
return self.dbg.arch.endian.encode_int(value, gdb_reg.size)
def _parse_reg(self, data):
return self.dbg.arch.endian.decode_int(data)
def _encode_regs(self):
reg_data = b''.join(self._encode_reg(reg) for reg in self._regs)
return encode_hex(reg_data)
def _parse_regs(self, data):
stream = io.BytesIO(parse_hex(data))
for reg in self._regs:
reg_data = stream.read(reg.size)
if len(reg_data) < reg.size:
raise ParsingError('Received register packet is too short')
if reg.is_dummy:
continue
value = self._parse_reg(reg_data)
if value != self.dbg.regs[reg.name]:
logger.debug(f'Setting register {reg.name} to 0x{value:X}')
self.dbg.regs[reg.name] = value
def _handle_monitor_command(self, args):
cmd = parse_ascii(parse_hex(args))
response = self._handle_monitor_command_string(cmd) + '\n'
return encode_hex(response.encode())
def _handle_monitor_command_string(self, s):
if s == '':
s = 'help'
commands = [cmd for cmd in self._monitor_commands if cmd.name.startswith(s)]
if len(commands) == 0:
return f'Unknown monitor command {s}. Type "monitor help" for a list of commands.'
elif len(commands) > 1:
names = ', '.join(cmd.name for cmd in commands)
return f'Ambiguous monitor command {s}: could be {names}.'
else:
logger.debug(f'monitor command: {commands[0].name}')
return commands[0].handler()
def _handle_help(self):
lines = ['Megastone monitor commands:']
for command in self._monitor_commands:
lines.append(f'{command.name} - {command.help}')
return '\n'.join(lines)
def _handle_megastone(self):
return 'true'
def _handle_segments(self):
if not isinstance(self.dbg.mem, SegmentMemory):
return 'Current Memory doesn\'t support segments.'
segs = sorted(self.dbg.mem.segments, key=lambda s: s.start)
if len(segs) == 0:
return 'No segments information is available.'
addr_width = _get_field_width((seg.address for seg in segs), 'Address')
size_width = _get_field_width((seg.size for seg in segs), 'Size')
lines = [f'{"Address":{addr_width}} {"Size":{size_width}} Perms Name']
for seg in segs:
lines.append(f'{seg.address:#{addr_width}x} {seg.size:#{size_width}x} {seg.perms:<5} {seg.name}')
return '\n'.join(lines)
def _handle_info(self):
return (
f'Architecture: {self.dbg.arch.name}\n'
f'InstructionSet: {self.dbg.isa.name}\n'
f'Endian: {self.dbg.arch.endian.name.lower()}\n'
f'Debugger class: {self.dbg.__class__.__name__}\n'
f'Memory class: {self.dbg.mem.__class__.__name__}\n'
f'Server address: {self._server.host}:{self._server.port}'
)
def _handle_error(self):
if self._stop_exception is None:
return 'No CPU error occurred.'
return str(self._stop_exception)
def _handle_endian(self):
return self.dbg.arch.endian.name
def _get_field_width(values, title):
max_value = max(values)
max_size = round_up(max_value.bit_length(), 4) // 4
return max(len(title), max_size + 2) | [
"logging.getLogger",
"enum.auto",
"megastone.errors.UnsupportedError",
"io.BytesIO",
"threading.Event"
] | [((579, 606), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (596, 606), False, 'import logging\n'), ((1018, 1029), 'enum.auto', 'enum.auto', ([], {}), '()\n', (1027, 1029), False, 'import enum\n'), ((1043, 1054), 'enum.auto', 'enum.auto', ([], {}), '()\n', (1052, 1054), False, 'import enum\n'), ((1070, 1081), 'enum.auto', 'enum.auto', ([], {}), '()\n', (1079, 1081), False, 'import enum\n'), ((1665, 1682), 'threading.Event', 'threading.Event', ([], {}), '()\n', (1680, 1682), False, 'import threading\n'), ((1709, 1726), 'threading.Event', 'threading.Event', ([], {}), '()\n', (1724, 1726), False, 'import threading\n'), ((9511, 9523), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (9521, 9523), False, 'import io\n'), ((1415, 1467), 'megastone.errors.UnsupportedError', 'UnsupportedError', (['"""Architecture doesn\'t support GDB"""'], {}), '("Architecture doesn\'t support GDB")\n', (1431, 1467), False, 'from megastone.errors import UnsupportedError\n')] |
#!/usr/bin/python
from mininet.topo import Topo
from mininet.cli import CLI
from mininet.net import Mininet
from mininet.util import dumpNodeConnections
from mininet.log import setLogLevel
from mininet.node import RemoteController
# Traffic Control
from mininet.link import TCLink
REMOTE_CONTROLLER_IP = "172.21.22.1"
def simpleTest():
# Create and test a simple network
topo = SingleSwitchTopo(n=4)
net = Mininet(topo=topo, controller=RemoteController, autoStaticArp=True)
net.addController("c0", controller=RemoteController,
ip=REMOTE_CONTROLLER_IP, port=6633)
net.start()
print("Dumping host connections")
dumpNodeConnections(net.hosts)
print("Testing network connectivity")
net.pingAll()
net.stop()
def perfTest():
# Create network and run simple performance test
topo = SingleSwitchTopo(n=4)
net = Mininet(topo=topo, link=TCLink,
controller=RemoteController,
autoStaticArp=True)
net.addController("c0",
controller=RemoteController,
ip=REMOTE_CONTROLLER_IP,
port=6633)
net.start()
print("Dumping host connections")
dumpNodeConnections(net.hosts)
print("Testing network connectivity")
# net.pingAll()
print("Testing bandwidth between h1 and h4")
h1, h4 = net.get('h1', 'h4')
net.iperf((h1, h4), l4Type='UDP')
net.stop()
class SingleSwitchTopo(Topo):
# Single switch connected to n hosts
def __init__(self, n=2, **opts):
# Initialize topology and default options
Topo.__init__(self, **opts)
switch = self.addSwitch('s1', protocols='OpenFlow13')
# Python's range(N) generates 0..N-1
for h in range(n):
host = self.addHost('h%s' % (h + 1))
# self.addLink(host, switch, bw=10, delay='5ms', loss=10)
self.addLink(host, switch, bw=10)
class TwoSwitchTwoHost(Topo):
def __init__(self):
# Initialize topology
Topo.__init__(self)
# Add hosts and switches
leftHost = self.addHost('h1')
rightHost = self.addHost('h2')
leftSwitch = self.addSwitch('s3',protocols='OpenFlow13')
rightSwitch = self.addSwitch('s4',protocols='OpenFlow13')
# Add links
self.addLink(leftHost, leftSwitch)
self.addLink(leftSwitch, rightSwitch)
self.addLink(rightSwitch, rightHost)
if __name__ == '__main__':
# Tell mininet to print useful information
setLogLevel('info')
# simpleTest()
# perfTest()
topo = TwoSwitchTwoHost()
net = Mininet(topo=topo, link=TCLink,
controller=None,
autoStaticArp=True)
net.addController("c0",
controller=RemoteController,
ip=REMOTE_CONTROLLER_IP,
port=6633)
net.start()
print("Dumping host connections")
dumpNodeConnections(net.hosts)
CLI(net)
net.stop() | [
"mininet.topo.Topo.__init__",
"mininet.util.dumpNodeConnections",
"mininet.cli.CLI",
"mininet.log.setLogLevel",
"mininet.net.Mininet"
] | [((426, 493), 'mininet.net.Mininet', 'Mininet', ([], {'topo': 'topo', 'controller': 'RemoteController', 'autoStaticArp': '(True)'}), '(topo=topo, controller=RemoteController, autoStaticArp=True)\n', (433, 493), False, 'from mininet.net import Mininet\n'), ((668, 698), 'mininet.util.dumpNodeConnections', 'dumpNodeConnections', (['net.hosts'], {}), '(net.hosts)\n', (687, 698), False, 'from mininet.util import dumpNodeConnections\n'), ((889, 974), 'mininet.net.Mininet', 'Mininet', ([], {'topo': 'topo', 'link': 'TCLink', 'controller': 'RemoteController', 'autoStaticArp': '(True)'}), '(topo=topo, link=TCLink, controller=RemoteController, autoStaticArp=True\n )\n', (896, 974), False, 'from mininet.net import Mininet\n'), ((1224, 1254), 'mininet.util.dumpNodeConnections', 'dumpNodeConnections', (['net.hosts'], {}), '(net.hosts)\n', (1243, 1254), False, 'from mininet.util import dumpNodeConnections\n'), ((2540, 2559), 'mininet.log.setLogLevel', 'setLogLevel', (['"""info"""'], {}), "('info')\n", (2551, 2559), False, 'from mininet.log import setLogLevel\n'), ((2637, 2705), 'mininet.net.Mininet', 'Mininet', ([], {'topo': 'topo', 'link': 'TCLink', 'controller': 'None', 'autoStaticArp': '(True)'}), '(topo=topo, link=TCLink, controller=None, autoStaticArp=True)\n', (2644, 2705), False, 'from mininet.net import Mininet\n'), ((2959, 2989), 'mininet.util.dumpNodeConnections', 'dumpNodeConnections', (['net.hosts'], {}), '(net.hosts)\n', (2978, 2989), False, 'from mininet.util import dumpNodeConnections\n'), ((2994, 3002), 'mininet.cli.CLI', 'CLI', (['net'], {}), '(net)\n', (2997, 3002), False, 'from mininet.cli import CLI\n'), ((1620, 1647), 'mininet.topo.Topo.__init__', 'Topo.__init__', (['self'], {}), '(self, **opts)\n', (1633, 1647), False, 'from mininet.topo import Topo\n'), ((2043, 2062), 'mininet.topo.Topo.__init__', 'Topo.__init__', (['self'], {}), '(self)\n', (2056, 2062), False, 'from mininet.topo import Topo\n')] |
from datetime import tzinfo, timedelta, datetime
from dateutil.zoneinfo import getzoneinfofile_stream, ZoneInfoFile
class GeneralTZ(tzinfo):
"""
General timezone with hour offset.
"""
def __init__(self, hour_offset):
self.hour_offset = hour_offset
def __repr__(self):
return "<TZ{0}>".format(self.hour_offset)
def utcoffset(self, dt):
return timedelta(hours=self.hour_offset)
def tzname(self, dt):
return "TZ{0}".format(self.hour_offset)
def dst(self, dt):
return timedelta(hours=self.hour_offset)
class UTC(tzinfo):
"""
UTC implementation taken from Python's docs.
Used only when pytz isn't available.
"""
def __repr__(self):
return "<UTC>"
def utcoffset(self, dt):
return timedelta(0)
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return timedelta(0)
EPOCH_TIME = datetime.utcfromtimestamp(0).replace(tzinfo=UTC())
def get_timezone_by_name(tzname):
zone_info_file = ZoneInfoFile(getzoneinfofile_stream())
return zone_info_file.zones.get(tzname)
def get_total_seconds_since(date_time=datetime.utcnow().replace(tzinfo=UTC()), since=EPOCH_TIME):
delta = date_time - since
return delta.total_seconds()
def to_client_support_time_str(time_value):
return time_value.replace(microsecond=0).isoformat()
def to_client_timestamp(date_time):
if not isinstance(date_time, datetime):
raise TypeError("Argument type should be datetime or date.")
if date_time is None:
return None
if date_time.tzinfo is None:
time_value = date_time.replace(tzinfo=UTC())
else:
time_value = date_time
delta = (time_value - EPOCH_TIME)
return int(delta.total_seconds() * 1000)
def from_client_timestamp(timestamp):
"""
Convert timestamp from client to UTC datetime.
:param timestamp: timestamp from clients.
:return: UTC datetime.
"""
if isinstance(timestamp, str):
timestamp = int(timestamp)
timestamp = float(timestamp / 1000.0)
return datetime.utcfromtimestamp(timestamp).replace(tzinfo=UTC())
def from_utc_timestamp(timestamp):
utc_time = datetime.utcfromtimestamp(timestamp)
return utc_time.replace(tzinfo=UTC())
def beginning_of_today():
return datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
def end_of_today():
return datetime.now().replace(hour=23, minute=59, second=59, microsecond=999999)
def beginning_of_week():
beginning_of_today_time = beginning_of_today()
day_of_week = beginning_of_today_time.weekday()
beginning_of_week_time = beginning_of_today_time - timedelta(days=day_of_week)
return beginning_of_week_time
def end_of_week():
return beginning_of_week() + timedelta(days=7, microseconds=-1)
def utc_beginning_of_today(timezone_offset_hours):
# Make sure year,month,day are same as client time.
now = datetime.utcnow() + timedelta(hours=timezone_offset_hours)
utc_0am = now.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=UTC())
return utc_0am - timedelta(hours=timezone_offset_hours)
def utc_end_of_today(timezone_offset_hours):
# Make sure year/month/day are same as client time.
client_time_now = datetime.utcnow() + timedelta(hours=timezone_offset_hours)
utc_today_end_time = client_time_now.replace(hour=23, minute=59, second=59, microsecond=999999, tzinfo=UTC())
return utc_today_end_time - timedelta(hours=timezone_offset_hours)
def utc_beginning_of_week(timezone_offset_hours):
beginning_of_today_time = utc_beginning_of_today(timezone_offset_hours)
client_time = datetime.utcnow() + timedelta(hours=timezone_offset_hours)
day_of_week = client_time.weekday()
beginning_of_week_time = beginning_of_today_time - timedelta(days=day_of_week)
return beginning_of_week_time
def utc_end_of_week(timezone_offset_hours):
return utc_beginning_of_week(timezone_offset_hours) + timedelta(days=7, microseconds=-1)
| [
"datetime.datetime.utcfromtimestamp",
"dateutil.zoneinfo.getzoneinfofile_stream",
"datetime.datetime.utcnow",
"datetime.datetime.now",
"datetime.timedelta"
] | [((2203, 2239), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['timestamp'], {}), '(timestamp)\n', (2228, 2239), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((394, 427), 'datetime.timedelta', 'timedelta', ([], {'hours': 'self.hour_offset'}), '(hours=self.hour_offset)\n', (403, 427), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((542, 575), 'datetime.timedelta', 'timedelta', ([], {'hours': 'self.hour_offset'}), '(hours=self.hour_offset)\n', (551, 575), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((797, 809), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (806, 809), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((897, 909), 'datetime.timedelta', 'timedelta', (['(0)'], {}), '(0)\n', (906, 909), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((925, 953), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['(0)'], {}), '(0)\n', (950, 953), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((1046, 1070), 'dateutil.zoneinfo.getzoneinfofile_stream', 'getzoneinfofile_stream', ([], {}), '()\n', (1068, 1070), False, 'from dateutil.zoneinfo import getzoneinfofile_stream, ZoneInfoFile\n'), ((2679, 2706), 'datetime.timedelta', 'timedelta', ([], {'days': 'day_of_week'}), '(days=day_of_week)\n', (2688, 2706), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((2795, 2829), 'datetime.timedelta', 'timedelta', ([], {'days': '(7)', 'microseconds': '(-1)'}), '(days=7, microseconds=-1)\n', (2804, 2829), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((2949, 2966), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2964, 2966), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((2969, 3007), 'datetime.timedelta', 'timedelta', ([], {'hours': 'timezone_offset_hours'}), '(hours=timezone_offset_hours)\n', (2978, 3007), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3112, 3150), 'datetime.timedelta', 'timedelta', ([], {'hours': 'timezone_offset_hours'}), '(hours=timezone_offset_hours)\n', (3121, 3150), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3276, 3293), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3291, 3293), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3296, 3334), 'datetime.timedelta', 'timedelta', ([], {'hours': 'timezone_offset_hours'}), '(hours=timezone_offset_hours)\n', (3305, 3334), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3481, 3519), 'datetime.timedelta', 'timedelta', ([], {'hours': 'timezone_offset_hours'}), '(hours=timezone_offset_hours)\n', (3490, 3519), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3666, 3683), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3681, 3683), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3686, 3724), 'datetime.timedelta', 'timedelta', ([], {'hours': 'timezone_offset_hours'}), '(hours=timezone_offset_hours)\n', (3695, 3724), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3820, 3847), 'datetime.timedelta', 'timedelta', ([], {'days': 'day_of_week'}), '(days=day_of_week)\n', (3829, 3847), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((3986, 4020), 'datetime.timedelta', 'timedelta', ([], {'days': '(7)', 'microseconds': '(-1)'}), '(days=7, microseconds=-1)\n', (3995, 4020), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((1156, 1173), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1171, 1173), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((2092, 2128), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['timestamp'], {}), '(timestamp)\n', (2117, 2128), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((2321, 2335), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2333, 2335), False, 'from datetime import tzinfo, timedelta, datetime\n'), ((2420, 2434), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2432, 2434), False, 'from datetime import tzinfo, timedelta, datetime\n')] |
import sys
from os.path import dirname
sys.path.append(dirname("../src/"))
from src.PhaseIdentification.common import *
from src.VisualizePhaseIdentification.visualization import *
from src.PhaseIdentification.voltageBasedPhaseIdentification import *
from src.PhaseIdentification.powerBasedPhaseIdentification import *
import matplotlib.lines as lines
#
feeder = Feeder("1351982_1596442",include_three_phase=True)
phaseID = PhaseIdentification(feeder, ErrorClass(0.1))
phaseID.voltage_correlation()
# viz = WrongLabels(phaseID)
# viz.visualize_length_correlation()
# viz.visualize_imbalance_correlation()
voltage_data = phaseID.voltage_features
print(phaseID.phase_labels)
fig = plt.figure(figsize=(8, 6))
SIZE = 18
SMALL_SIZE = 14
plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels
plt.rc('ytick', labelsize=SMALL_SIZE)
plt.rc('font', size=SIZE) # controls default text sizes
plt.rc('axes', titlesize=SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=SIZE)
plt.scatter(voltage_data[6],voltage_data[8])
x = np.linspace(0.998,1,100)
plt.plot(x, x, '-r', label='y=2x+1')
plt.xticks(np.arange(0.998, 1.000, step=0.001))
plt.yticks(np.arange(0.998, 1.000, step=0.001))
plt.xlabel("Voltage at customer A (p.u.)")
plt.ylabel("Voltage at customer B (p.u.)")
plt.show()
# plt.figure(figsize=(8, 6))
# plt.scatter(voltage_data[6],voltage_data[7])
# plt.plot(x, x, '-r', label='y=2x+1')
# plt.xticks(np.arange(0.998, 1.000, step=0.001))
# plt.yticks(np.arange(0.998, 1.000, step=0.001))
# plt.xlabel("Voltage at customer A (p.u.)")
# plt.ylabel("Voltage at customer C (p.u.)")
# plt.show()
#
# plt.figure(figsize=(8, 6))
# plt.scatter(np.append(1,voltage_data[6]), np.append(voltage_data[7],1))
# plt.plot(x, x, '-r', label='y=2x+1')
# plt.xticks(np.arange(0.998, 1.000, step=0.001))
# plt.yticks(np.arange(0.998, 1.000, step=0.001))
# plt.xlabel("Voltage at customer A shifted 1h (p.u.)")
# plt.ylabel("Voltage at customer C (p.u.)")
# plt.show()
#
included_feeders = ["86315_785383", "65028_84566", "1076069_1274129", "1351982_1596442", "65025_80035", "1076069_1274125"]
#included_feeders = ["1351982_1596442"]
markers = ["s", "o", "D", ">", "<", "v", "+"]
plt.figure(figsize=(14, 8))
plt.rc('font', size=18)
for id in included_feeders:
feeder = Feeder(id, include_three_phase=True)
phaseID = PhaseIdentification(feeder, ErrorClass(1.0))
phaseID.voltage_correlation_transfo_ref()
viz = WrongLabels(phaseID)
mav_imbalance = viz.get_mav_imbalance()
customer_path_length = viz.get_customer_path_length()
correct = viz.phase_labels == viz.partial_phase_labels
wrong = viz.phase_labels != viz.partial_phase_labels
plt.scatter(mav_imbalance[correct], customer_path_length[correct], color='tab:green',marker="+")
plt.scatter(mav_imbalance[wrong], customer_path_length[wrong], color='tab:red',marker="x")
# plt.xticks(np.arange(0.998, 1.000, step=0.001))
# plt.yticks(np.arange(0.998, 1.000, step=0.001))
plt.xlabel("Relative voltage variation (%)")
plt.ylabel("Distance from transformer (m)")
plt.legend(["Wrong allocated","Correctly allocated"])
plt.show()
mav_imbalance_correct = np.array([])
mav_imbalance_wrong = np.array([])
for rep in range(0,10):
for id in included_feeders:
feeder = Feeder(id, include_three_phase=True)
phaseID = PhaseIdentification(feeder, ErrorClass(1.0))
phaseID.voltage_correlation_transfo_ref()
viz = WrongLabels(phaseID)
mav_imbalance_id = viz.get_mav_imbalance()
#customer_path_length_id = viz.get_customer_path_length()
correct = viz.phase_labels == viz.partial_phase_labels
wrong = viz.phase_labels != viz.partial_phase_labels
mav_imbalance_correct = np.append(mav_imbalance_correct, mav_imbalance_id[correct])
mav_imbalance_wrong = np.append(mav_imbalance_wrong, mav_imbalance_id[wrong])
#np.append(customer_path_length, customer_path_length_id[correct])
# Assign colors for each airline and the names
colors = [ '#009E73', '#D55E00']
names = [ "Correct allocated", "Wrong allocated" ]
# Make the histogram using a list of lists
# Normalize the flights and assign colors and names
plt.figure(figsize=(8, 6))
plt.hist([mav_imbalance_correct, mav_imbalance_wrong], density=False,
color=colors, bins=np.arange(0, 1, 0.02), label=names)
plt.xticks(np.arange(0.1, 1.000, step=0.1))
plt.yticks(np.arange(0, 600, step=100))
plt.ylim([0, 505])
plt.xlim([0, 0.91])
# Plot formatting
plt.legend()
plt.xlabel('Relative voltage variation (%)')
plt.ylabel('Frequency')
#plt.title('Side-by-Side Histogram with Multiple Airlines')
plt.show() | [
"os.path.dirname"
] | [((55, 73), 'os.path.dirname', 'dirname', (['"""../src/"""'], {}), "('../src/')\n", (62, 73), False, 'from os.path import dirname\n')] |
################################################################################
# @file ReversiPlayTest.py
# @brief リバーシプレイテストクラス実装ファイル
# @author <NAME>
# @date 2018.11.13
# $Version: $
# $Revision: $
#
# (c) 2018 <NAME>.
#
# - 本ソフトウェアの一部又は全てを無断で複写複製(コピー)することは、
# 著作権侵害にあたりますので、これを禁止します。
# - 本製品の使用に起因する侵害または特許権その他権利の侵害に関しては
# 当方は一切その責任を負いません。
#
################################################################################
from model import ReversiPlay
from model import Reversi
from model import ReversiConst
from model import ReversiSetting
################################################################################
# @class ReversiPlayTest
# @brief リバーシプレイテストクラス
#
################################################################################
class ReversiPlayTest:
############################################################################
# @brief コンストラクタ
# @fn __init__(self)
# @return ありません
# @author <NAME>
# @date 2018.11.13
#
############################################################################
def __init__(self):
self.tgt = ReversiPlay.ReversiPlay()
self.tgt.mDelegate = self
############################################################################
# @brief メッセージダイアログ
# @fn ViewMsgDlg(self, title , msg)
# @param[in] self
# @param[in] title タイトル
# @param[in] msg メッセージ
# @return ありません
# @author <NAME>
# @date 2018.11.13
#
############################################################################
def ViewMsgDlg(self, title, msg):
print('ViewMsgDlg : title = ' + str(title) + ' msg = ' + str(msg))
############################################################################
# @brief 1マス描画
# @fn DrawSingle(self, y, x, sts, bk, text)
# @param[in] self
# @param[in] y Y座標
# @param[in] x X座標
# @param[in] sts ステータス
# @param[in] bk 背景
# @param[in] text テキスト
# @return ありません
# @author <NAME>
# @date 2018.11.13
#
############################################################################
def DrawSingle(self, y, x, sts, bk, text):
print('DrawSingle : y = ' + str(y) + ' x = ' + str(x) + ' sts = ' + str(sts) +
' bk = ' + str(bk) + ' text = ' + str(text))
############################################################################
# @brief 現在の色メッセージ
# @fn CurColMsg(self, text)
# @param[in] self
# @param[in] text テキスト
# @return ありません
# @author <NAME>
# @date 2018.11.13
#
############################################################################
def CurColMsg(self, text):
print('CurColMsg : text = ' + str(text))
############################################################################
# @brief 現在のステータスメッセージ
# @fn CurStsMsg(self, text)
# @param[in] self
# @param[in] text テキスト
# @return ありません
# @author <NAME>
# @date 2018.11.13
#
############################################################################
def CurStsMsg(self, text):
print('CurStsMsg : text = ' + str(text))
############################################################################
# @brief ウェイト
# @fn Wait(self, time)
# @param[in] self
# @param[in] time ウェイト時間(msec)
# @return ありません
# @author <NAME>
# @date 2018.11.13
#
############################################################################
def Wait(self, time):
print('Wait : time = ' + str(time))
def test_case1(self):
self.tgt.mReversi = Reversi.Reversi(
ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL, ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL)
if self.tgt.mReversi != None:
print("TEST CASE1 OK")
else:
print("TEST CASE1 NG " + str(self.tgt.mReversi))
def test_case2(self):
self.tgt.mSetting = ReversiSetting.ReversiSetting()
if self.tgt.mSetting != None:
print("TEST CASE2 OK")
else:
print("TEST CASE2 NG " + str(self.tgt.mSetting))
def test_case3(self):
self.tgt.mCurColor = 1
if self.tgt.mCurColor == 1:
print("TEST CASE3 OK")
else:
print("TEST CASE3 NG " + str(self.tgt.mCurColor))
def test_case4(self):
self.tgt.mCpu = [0 for i in range(
ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL * ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL)]
if self.tgt.mCpu != None:
print("TEST CASE4 OK")
else:
print("TEST CASE4 NG " + str(self.tgt.mCpu))
def test_case5(self):
self.tgt.mEdge = [0 for i in range(
ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL * ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL)]
if self.tgt.mEdge != None:
print("TEST CASE5 OK")
else:
print("TEST CASE5 NG " + str(self.tgt.mEdge))
def test_case6(self):
self.tgt.mPassEnaB = 1
if self.tgt.mPassEnaB == 1:
print("TEST CASE6 OK")
else:
print("TEST CASE6 NG " + str(self.tgt.mPassEnaB))
def test_case7(self):
self.tgt.mPassEnaW = 1
if self.tgt.mPassEnaW == 1:
print("TEST CASE7 OK")
else:
print("TEST CASE7 NG " + str(self.tgt.mPassEnaW))
def test_case8(self):
self.tgt.mGameEndSts = 1
if self.tgt.mGameEndSts == 1:
print("TEST CASE8 OK")
else:
print("TEST CASE8 NG " + str(self.tgt.mGameEndSts))
def test_case9(self):
self.tgt.mPlayLock = 1
if self.tgt.mPlayLock == 1:
print("TEST CASE9 OK")
else:
print("TEST CASE9 NG " + str(self.tgt.mPlayLock))
def test_case10(self):
self.tgt.mDelegate = self
self.tgt.ViewMsgDlgLocal('テスト', 'メッセージ')
self.tgt.DrawSingleLocal(0, 1, 2, 3, 'テキスト')
self.tgt.CurColMsgLocal('テキスト')
self.tgt.CurStsMsgLocal('テキスト')
self.tgt.WaitLocal(500)
if self.tgt.mDelegate != None:
print("TEST CASE10 OK")
else:
print("TEST CASE10 NG " + str(self.tgt.mDelegate))
def test_case11(self):
self.tgt = ReversiPlay.ReversiPlay()
self.tgt.mDelegate = None
self.tgt.reversiPlay(2,4)
print("TEST CASE11 OK")
def test_case12(self):
self.tgt.reversiPlaySub(1,ReversiConst.ReversiConst.REVERSI_STS_WHITE)
print("TEST CASE12 OK")
def test_case13(self):
self.tgt.reversiPlayEnd()
print("TEST CASE13 OK")
def test_case14(self):
self.tgt.reversiPlayPass(ReversiConst.ReversiConst.REVERSI_STS_BLACK)
print("TEST CASE14 OK")
def test_case15(self):
self.tgt.reversiPlayCpu(ReversiConst.ReversiConst.REVERSI_STS_WHITE,1)
print("TEST CASE15 OK")
def test_case16(self):
self.tgt.drawUpdate(ReversiConst.ReversiConst.DEF_ASSIST_ON)
print("TEST CASE16 OK")
def test_case17(self):
self.tgt.drawUpdateForcibly(ReversiConst.ReversiConst.DEF_ASSIST_ON)
print("TEST CASE17 OK")
def test_case18(self):
self.tgt.reset()
print("TEST CASE18 OK")
def test_case19(self):
self.tgt.gameEndAnimExec()
print("TEST CASE19 OK")
def test_case20(self):
self.tgt.sendDrawMsg(2,4)
print("TEST CASE20 OK")
def test_case21(self):
self.tgt.sendDrawInfoMsg(2,4)
print("TEST CASE21 OK")
def test_case22(self):
self.tgt.execMessage(ReversiConst.ReversiConst.LC_MSG_ERASE_ALL, None)
print("TEST CASE22 OK")
obj = ReversiPlayTest()
obj.test_case1()
obj.test_case2()
obj.test_case3()
obj.test_case4()
obj.test_case5()
obj.test_case6()
obj.test_case7()
obj.test_case8()
obj.test_case9()
obj.test_case10()
obj.test_case11()
obj.test_case12()
obj.test_case13()
obj.test_case14()
obj.test_case15()
obj.test_case16()
obj.test_case17()
obj.test_case18()
obj.test_case19()
obj.test_case20()
obj.test_case21()
obj.test_case22()
| [
"model.ReversiPlay.ReversiPlay",
"model.Reversi.Reversi",
"model.ReversiSetting.ReversiSetting"
] | [((1176, 1201), 'model.ReversiPlay.ReversiPlay', 'ReversiPlay.ReversiPlay', ([], {}), '()\n', (1199, 1201), False, 'from model import ReversiPlay\n'), ((3809, 3924), 'model.Reversi.Reversi', 'Reversi.Reversi', (['ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL', 'ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL'], {}), '(ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL,\n ReversiConst.ReversiConst.DEF_MASU_CNT_MAX_VAL)\n', (3824, 3924), False, 'from model import Reversi\n'), ((4145, 4176), 'model.ReversiSetting.ReversiSetting', 'ReversiSetting.ReversiSetting', ([], {}), '()\n', (4174, 4176), False, 'from model import ReversiSetting\n'), ((6544, 6569), 'model.ReversiPlay.ReversiPlay', 'ReversiPlay.ReversiPlay', ([], {}), '()\n', (6567, 6569), False, 'from model import ReversiPlay\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainWindow.ui'
#
# Created by: PyQt5 UI code generator 5.6
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(704, 558)
MainWindow.setMinimumSize(QtCore.QSize(0, 0))
MainWindow.setStyleSheet("")
MainWindow.setTabShape(QtWidgets.QTabWidget.Rounded)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.Layout_controls = QtWidgets.QGridLayout()
self.Layout_controls.setObjectName("Layout_controls")
self.cBox_Speed = QtWidgets.QComboBox(self.centralwidget)
self.cBox_Speed.setEditable(True)
self.cBox_Speed.setObjectName("cBox_Speed")
self.Layout_controls.addWidget(self.cBox_Speed, 1, 1, 1, 1)
self.pButton_Stop = QtWidgets.QPushButton(self.centralwidget)
self.pButton_Stop.setObjectName("pButton_Stop")
self.Layout_controls.addWidget(self.pButton_Stop, 1, 3, 1, 1)
self.cBox_Port = QtWidgets.QComboBox(self.centralwidget)
self.cBox_Port.setEditable(True)
self.cBox_Port.setObjectName("cBox_Port")
self.Layout_controls.addWidget(self.cBox_Port, 0, 1, 1, 1)
self.cBox_Source = QtWidgets.QComboBox(self.centralwidget)
self.cBox_Source.setObjectName("cBox_Source")
self.Layout_controls.addWidget(self.cBox_Source, 0, 0, 1, 1)
self.pButton_Start = QtWidgets.QPushButton(self.centralwidget)
self.pButton_Start.setMinimumSize(QtCore.QSize(0, 0))
self.pButton_Start.setObjectName("pButton_Start")
self.Layout_controls.addWidget(self.pButton_Start, 0, 3, 1, 1)
self.sBox_Samples = QtWidgets.QSpinBox(self.centralwidget)
self.sBox_Samples.setMinimum(1)
self.sBox_Samples.setMaximum(100000)
self.sBox_Samples.setProperty("value", 500)
self.sBox_Samples.setObjectName("sBox_Samples")
self.Layout_controls.addWidget(self.sBox_Samples, 0, 2, 1, 1)
self.chBox_export = QtWidgets.QCheckBox(self.centralwidget)
self.chBox_export.setEnabled(True)
self.chBox_export.setObjectName("chBox_export")
self.Layout_controls.addWidget(self.chBox_export, 1, 2, 1, 1)
self.gridLayout.addLayout(self.Layout_controls, 7, 0, 1, 2)
self.Layout_graphs = QtWidgets.QGridLayout()
self.Layout_graphs.setObjectName("Layout_graphs")
self.plt = GraphicsLayoutWidget(self.centralwidget)
self.plt.setAutoFillBackground(False)
self.plt.setStyleSheet("border: 0px;")
self.plt.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.plt.setFrameShadow(QtWidgets.QFrame.Plain)
self.plt.setLineWidth(0)
self.plt.setObjectName("plt")
self.Layout_graphs.addWidget(self.plt, 0, 0, 1, 1)
self.gridLayout.addLayout(self.Layout_graphs, 2, 1, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "RTGraph"))
self.pButton_Stop.setText(_translate("MainWindow", "Stop"))
self.pButton_Start.setText(_translate("MainWindow", "Start"))
self.sBox_Samples.setSuffix(_translate("MainWindow", " samples"))
self.sBox_Samples.setPrefix(_translate("MainWindow", "Show "))
self.chBox_export.setText(_translate("MainWindow", "Export to CSV"))
from pyqtgraph import GraphicsLayoutWidget
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QSpinBox",
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QGridLayout",
"pyqtgraph.GraphicsLayoutWidget",
"PyQt5.QtWidgets.QCheckBox",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtCore.QSize"
] | [((569, 598), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (586, 598), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((683, 724), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.centralwidget'], {}), '(self.centralwidget)\n', (704, 724), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((808, 831), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (829, 831), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((920, 959), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (939, 959), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1150, 1191), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1171, 1191), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1343, 1382), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1362, 1382), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1568, 1607), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1587, 1607), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1760, 1801), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1781, 1801), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2021, 2059), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2039, 2059), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2351, 2390), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2370, 2390), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2657, 2680), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (2678, 2680), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2758, 2798), 'pyqtgraph.GraphicsLayoutWidget', 'GraphicsLayoutWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2778, 2798), False, 'from pyqtgraph import GraphicsLayoutWidget\n'), ((3309, 3358), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (3346, 3358), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((422, 440), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (434, 440), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1844, 1862), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (1856, 1862), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
"""
Prevent the system from shutting down if you have
no admin control and need it to run for many hours
"""
import pyautogui
import time
import sys
from datetime import datetime
# quickly move the mouse to the upper left corner to exit
pyautogui.FAILSAFE=True
numMin = 3
run = True
while(run == True):
x=0
print('Keep Awake: move mouse quickly to top left to exit')
# # in_put = input('Enter q to quit, anything else to continue: ')
# if in_put == 'q':
# run == False
# break
# else:
while(x < numMin):
print(x)
time.sleep(50)
x += 1
# for i in range(0,200, 10):
# pyautogui.moveTo(500,250 + i, 5) # x, y, over 5 seconds
pyautogui.press('volumedown')
time.sleep(5)
pyautogui.press('volumeup')
print('Volume up down at {}'.format(datetime.now().time()))
time.sleep(5)
# for i in range(0,3):
pyautogui.press("shift")
print('Key pressed at {}'.format(datetime.now().time()))
# used to keep the cmd window open when run on the desktop
input("Press enter to exit ;)")
| [
"datetime.datetime.now",
"pyautogui.press",
"time.sleep"
] | [((710, 739), 'pyautogui.press', 'pyautogui.press', (['"""volumedown"""'], {}), "('volumedown')\n", (725, 739), False, 'import pyautogui\n'), ((744, 757), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (754, 757), False, 'import time\n'), ((762, 789), 'pyautogui.press', 'pyautogui.press', (['"""volumeup"""'], {}), "('volumeup')\n", (777, 789), False, 'import pyautogui\n'), ((858, 871), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (868, 871), False, 'import time\n'), ((903, 927), 'pyautogui.press', 'pyautogui.press', (['"""shift"""'], {}), "('shift')\n", (918, 927), False, 'import pyautogui\n'), ((577, 591), 'time.sleep', 'time.sleep', (['(50)'], {}), '(50)\n', (587, 591), False, 'import time\n'), ((830, 844), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (842, 844), False, 'from datetime import datetime\n'), ((965, 979), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (977, 979), False, 'from datetime import datetime\n')] |
import json
import random
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import Column, Integer, String, MetaData, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import select
import monitor_db
import monitor_logger
import monitor_util
Base = declarative_base()
url = 'mysql+mysqlconnector://hawkeye:Hawkeye#Pwd123@10.214.168.25:3306/wingx_hawkeye'
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = url
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db = SQLAlchemy(app)
logger = monitor_logger.get_logger(__name__)
# http://flask-sqlalchemy.pocoo.org/2.3/
# http://docs.sqlalchemy.org/en/latest/
# SQLAlchemy orm
class Monitor(Base):
__tablename__ = 't_credit_monitor'
id = Column('id', Integer, primary_key=True)
credit_type = Column('credit_type', String(128))
query_type = Column('query_type', String(128))
credit_status = Column('credit_status', String(128))
monitor_time = Column('monitor_time', String(128))
elapsed_time = Column('elapsed_time', String(128))
create_time = Column('create_time', String(128))
def __init__(self, id, credit_type, query_type, credit_status, monitor_time, elapsed_time, create_time):
self.id = id
self.credit_type = credit_type
self.query_type = query_type
self.credit_status = credit_status
self.monitor_time = monitor_time
self.elapsed_time = elapsed_time
self.create_time = create_time
def __repr__(self):
return '<id is %s, creditType is %s, queryType is %s, creditStatus is %s, monitorTime is %s, elapsedTime is %s>' % (
self.id, self.credit_type, self.query_type, self.credit_status, self.monitor_time, self.elapsed_time)
# Flask-SQLAlchemy
class FlaskMonitor(db.Model):
__tablename__ = 't_credit_monitor'
id = Column('id', Integer, primary_key=True)
credit_type = Column('credit_type', String(128))
query_type = Column('query_type', String(128))
credit_status = Column('credit_status', String(128))
monitor_time = Column('monitor_time', String(128))
elapsed_time = Column('elapsed_time', String(128))
create_time = Column('create_time', String(128))
def __init__(self, id, credit_type, query_type, credit_status, monitor_time, elapsed_time, create_time):
self.id = id
self.credit_type = credit_type
self.query_type = query_type
self.credit_status = credit_status
self.monitor_time = monitor_time
self.elapsed_time = elapsed_time
self.create_time = create_time
def __repr__(self):
return '<id is %s, creditType is %s, queryType is %s, creditStatus is %s, monitorTime is %s, elapsedTime is %s>' % (
self.id, self.credit_type, self.query_type, self.credit_status, self.monitor_time, self.elapsed_time)
# SQLAlchemy core
metadata = MetaData()
T_Monitor = Table('t_credit_monitor', metadata, Column('id', Integer, primary_key=True)
, Column('credit_type', String(128))
, Column('query_type', String(128))
, Column('credit_status', String(128))
, Column('monitor_time', String(128))
, Column('elapsed_time', String(128))
, Column('create_time', String(128)))
# http://docs.sqlalchemy.org/en/latest/
# SQLAlchemy orm
def get_monitor_with_orm():
s = monitor_db.get_connection_session(url)
print(s.query(Monitor).limit(2).all())
print(s.query(Monitor).first())
print(type(s.query(Monitor)))
print(s.query(Monitor).count())
# SQLAlchemy core
def get_monitor_with_core():
conn = monitor_db.get_connection_with_url(url)
sql = select([T_Monitor])
result = conn.execute(sql)
print(result.rowcount)
print(type(result.fetchall()))
# using flask_sqlalchemy
def get_monitor_flask_sqlalchemy(page=1, limit=10):
try:
logger.debug('get_monitor_flask_sqlalchemy: page is %s, limit is %s' % (page, limit))
return FlaskMonitor.query.paginate(page, limit)
except Exception as e:
logger.debug("Exception in get_monitor_flask_sqlalchemy %s" % e)
return None
# add monitor
def add_monitor(d):
logger.debug('add monitor is %s' % d)
conn = monitor_db.get_connection_with_url(url)
d = json.loads(d)
# Content-Type: application/json
conn.execute(T_Monitor.insert(), [{
'credit_type': d['credit_type']
, 'query_type': d['query_type']
, 'credit_status': d['credit_status']
, 'elapsed_time': int(random.random() * 100)
}])
# # Content-Type: application/x-www-form-urlencoded; charset=UTF-8
# for key in d.keys():
# logger.debug("form data is %s" % json.loads(key))
# d_dict = json.loads(key)
# conn.execute(T_Monitor.insert(), [{
# 'credit_type': d_dict['credit_type']
# , 'query_type': d_dict['query_type']
# , 'credit_status': d_dict['credit_status']
# , 'elapsed_time': int(random.random() * 100)
# }])
if __name__ == '__main__':
print(get_monitor_flask_sqlalchemy(1, 2).items)
| [
"monitor_db.get_connection_session",
"json.loads",
"monitor_logger.get_logger",
"monitor_db.get_connection_with_url",
"sqlalchemy.sql.select",
"flask.Flask",
"sqlalchemy.MetaData",
"sqlalchemy.String",
"sqlalchemy.ext.declarative.declarative_base",
"flask_sqlalchemy.SQLAlchemy",
"random.random",... | [((314, 332), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (330, 332), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((428, 443), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (433, 443), False, 'from flask import Flask\n'), ((545, 560), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (555, 560), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((571, 606), 'monitor_logger.get_logger', 'monitor_logger.get_logger', (['__name__'], {}), '(__name__)\n', (596, 606), False, 'import monitor_logger\n'), ((2906, 2916), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (2914, 2916), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((777, 816), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (783, 816), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((1876, 1915), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (1882, 1915), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((2965, 3004), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (2971, 3004), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((3434, 3472), 'monitor_db.get_connection_session', 'monitor_db.get_connection_session', (['url'], {}), '(url)\n', (3467, 3472), False, 'import monitor_db\n'), ((3682, 3721), 'monitor_db.get_connection_with_url', 'monitor_db.get_connection_with_url', (['url'], {}), '(url)\n', (3716, 3721), False, 'import monitor_db\n'), ((3732, 3751), 'sqlalchemy.sql.select', 'select', (['[T_Monitor]'], {}), '([T_Monitor])\n', (3738, 3751), False, 'from sqlalchemy.sql import select\n'), ((4292, 4331), 'monitor_db.get_connection_with_url', 'monitor_db.get_connection_with_url', (['url'], {}), '(url)\n', (4326, 4331), False, 'import monitor_db\n'), ((4340, 4353), 'json.loads', 'json.loads', (['d'], {}), '(d)\n', (4350, 4353), False, 'import json\n'), ((857, 868), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (863, 868), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((908, 919), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (914, 919), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((965, 976), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (971, 976), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((1020, 1031), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (1026, 1031), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((1075, 1086), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (1081, 1086), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((1128, 1139), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (1134, 1139), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((1956, 1967), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (1962, 1967), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((2007, 2018), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (2013, 2018), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((2064, 2075), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (2070, 2075), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((2119, 2130), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (2125, 2130), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((2174, 2185), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (2180, 2185), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((2227, 2238), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (2233, 2238), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((3047, 3058), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (3053, 3058), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((3101, 3112), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (3107, 3112), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((3158, 3169), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (3164, 3169), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((3214, 3225), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (3220, 3225), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((3270, 3281), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (3276, 3281), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((3325, 3336), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (3331, 3336), False, 'from sqlalchemy import Column, Integer, String, MetaData, Table\n'), ((4587, 4602), 'random.random', 'random.random', ([], {}), '()\n', (4600, 4602), False, 'import random\n')] |
# author: badf00d21
import os
from os.path import dirname, join
from textx import metamodel_from_file
from textx.export import metamodel_export, model_export
from datetime import datetime
from distutils.dir_util import copy_tree
CURRENT_DIR = dirname(__file__)
PROJECT_DIRECTORY_TREE = {}
PROJECT_GENERAL_INFO = {}
def init_general_info(projectModel):
global PROJECT_GENERAL_INFO
groupId = projectModel.gradleBuildModel.groupId
artifact = projectModel.gradleBuildModel.artifactId
name = projectModel.gradleBuildModel.projectName
version = '1.0.0'
if projectModel.gradleBuildModel.appVersion != '':
version = projectModel.gradleBuildModel.appVersion
project_package_root = groupId + '.' + name.lower()
PROJECT_GENERAL_INFO = {
'author': 'JSD SpringBoot generator by <NAME>',
'date': datetime.now().strftime('%d.%m.%y'),
'packageRoot': project_package_root,
'groupId': groupId,
'artifactId': artifact,
'name': name,
'version': version
}
def init_project_directory_tree(output_path):
global PROJECT_DIRECTORY_TREE
PROJECT_DIRECTORY_TREE['root'] = join(output_path, PROJECT_GENERAL_INFO['name'])
PROJECT_DIRECTORY_TREE['main'] = join(PROJECT_DIRECTORY_TREE['root'], 'src/main/java/' + PROJECT_GENERAL_INFO['packageRoot'].replace('.', '/'))
PROJECT_DIRECTORY_TREE['resources'] = join(PROJECT_DIRECTORY_TREE['root'], 'src/main/resources/')
PROJECT_DIRECTORY_TREE['test'] = join(PROJECT_DIRECTORY_TREE['root'], 'src/test/java/' + PROJECT_GENERAL_INFO['packageRoot'].replace('.', '/'))
PROJECT_DIRECTORY_TREE['generated'] = join(PROJECT_DIRECTORY_TREE['main'], 'generated')
PROJECT_DIRECTORY_TREE['model'] = join(PROJECT_DIRECTORY_TREE['generated'], 'model')
PROJECT_DIRECTORY_TREE['service_gen'] = join(PROJECT_DIRECTORY_TREE['generated'], 'service')
PROJECT_DIRECTORY_TREE['service'] = join(PROJECT_DIRECTORY_TREE['main'], 'service')
PROJECT_DIRECTORY_TREE['config'] = join(PROJECT_DIRECTORY_TREE['main'], 'config')
PROJECT_DIRECTORY_TREE['repository'] = join(PROJECT_DIRECTORY_TREE['main'], 'repository')
PROJECT_DIRECTORY_TREE['controller_gen'] = join(PROJECT_DIRECTORY_TREE['generated'], 'controller')
PROJECT_DIRECTORY_TREE['controller'] = join(PROJECT_DIRECTORY_TREE['main'], 'controller')
def copy_static_files():
from_directory = join(CURRENT_DIR, './static_files/gradle_wrapper')
to_directory = PROJECT_DIRECTORY_TREE['root']
copy_tree(from_directory, to_directory)
def prepare_env(projectModel, output_path):
if not os.path.exists(output_path):
os.makedirs(output_path)
print('Created directories on path: ' + output_path + ' for generating project & dotexport.')
init_general_info(projectModel)
init_project_directory_tree(output_path)
copy_static_files()
for key in PROJECT_DIRECTORY_TREE:
if not os.path.exists(PROJECT_DIRECTORY_TREE[key]):
os.makedirs(PROJECT_DIRECTORY_TREE[key])
print('Generated project directory on path: ', PROJECT_DIRECTORY_TREE[key])
return PROJECT_GENERAL_INFO
class BaseType(object):
def __init__(self, parent, name):
self.parent = parent
self.name = name
def __str__(self):
return self.name
def get_metamodel(path_to_grammar):
simple_types = {
'int': BaseType(None, 'int'),
'String': BaseType(None, 'String'),
'Long': BaseType(None, 'Long'),
'boolean': BaseType(None, 'boolean')
}
print('Loading metamodel_from_file: ' + path_to_grammar)
metamodel = metamodel_from_file(path_to_grammar,
classes=[BaseType],
builtins=simple_types)
return metamodel
def export_to_dot(mm, mff, path):
dot_folder = join(path, 'dotexport')
if not os.path.exists(dot_folder):
os.mkdir(dot_folder)
metamodel_export(mm, join(dot_folder, 'meta-model.dot'))
model_export(mff, join(dot_folder, 'model.dot'))
print('.dot files generated in:' + dot_folder)
| [
"os.path.exists",
"distutils.dir_util.copy_tree",
"os.makedirs",
"os.path.join",
"textx.metamodel_from_file",
"os.path.dirname",
"datetime.datetime.now",
"os.mkdir"
] | [((244, 261), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (251, 261), False, 'from os.path import dirname, join\n'), ((1157, 1204), 'os.path.join', 'join', (['output_path', "PROJECT_GENERAL_INFO['name']"], {}), "(output_path, PROJECT_GENERAL_INFO['name'])\n", (1161, 1204), False, 'from os.path import dirname, join\n'), ((1395, 1454), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['root']", '"""src/main/resources/"""'], {}), "(PROJECT_DIRECTORY_TREE['root'], 'src/main/resources/')\n", (1399, 1454), False, 'from os.path import dirname, join\n'), ((1645, 1694), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['main']", '"""generated"""'], {}), "(PROJECT_DIRECTORY_TREE['main'], 'generated')\n", (1649, 1694), False, 'from os.path import dirname, join\n'), ((1733, 1783), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['generated']", '"""model"""'], {}), "(PROJECT_DIRECTORY_TREE['generated'], 'model')\n", (1737, 1783), False, 'from os.path import dirname, join\n'), ((1828, 1880), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['generated']", '"""service"""'], {}), "(PROJECT_DIRECTORY_TREE['generated'], 'service')\n", (1832, 1880), False, 'from os.path import dirname, join\n'), ((1921, 1968), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['main']", '"""service"""'], {}), "(PROJECT_DIRECTORY_TREE['main'], 'service')\n", (1925, 1968), False, 'from os.path import dirname, join\n'), ((2008, 2054), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['main']", '"""config"""'], {}), "(PROJECT_DIRECTORY_TREE['main'], 'config')\n", (2012, 2054), False, 'from os.path import dirname, join\n'), ((2098, 2148), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['main']", '"""repository"""'], {}), "(PROJECT_DIRECTORY_TREE['main'], 'repository')\n", (2102, 2148), False, 'from os.path import dirname, join\n'), ((2196, 2251), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['generated']", '"""controller"""'], {}), "(PROJECT_DIRECTORY_TREE['generated'], 'controller')\n", (2200, 2251), False, 'from os.path import dirname, join\n'), ((2295, 2345), 'os.path.join', 'join', (["PROJECT_DIRECTORY_TREE['main']", '"""controller"""'], {}), "(PROJECT_DIRECTORY_TREE['main'], 'controller')\n", (2299, 2345), False, 'from os.path import dirname, join\n'), ((2394, 2444), 'os.path.join', 'join', (['CURRENT_DIR', '"""./static_files/gradle_wrapper"""'], {}), "(CURRENT_DIR, './static_files/gradle_wrapper')\n", (2398, 2444), False, 'from os.path import dirname, join\n'), ((2499, 2538), 'distutils.dir_util.copy_tree', 'copy_tree', (['from_directory', 'to_directory'], {}), '(from_directory, to_directory)\n', (2508, 2538), False, 'from distutils.dir_util import copy_tree\n'), ((3614, 3693), 'textx.metamodel_from_file', 'metamodel_from_file', (['path_to_grammar'], {'classes': '[BaseType]', 'builtins': 'simple_types'}), '(path_to_grammar, classes=[BaseType], builtins=simple_types)\n', (3633, 3693), False, 'from textx import metamodel_from_file\n'), ((3840, 3863), 'os.path.join', 'join', (['path', '"""dotexport"""'], {}), "(path, 'dotexport')\n", (3844, 3863), False, 'from os.path import dirname, join\n'), ((2596, 2623), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (2610, 2623), False, 'import os\n'), ((2633, 2657), 'os.makedirs', 'os.makedirs', (['output_path'], {}), '(output_path)\n', (2644, 2657), False, 'import os\n'), ((3875, 3901), 'os.path.exists', 'os.path.exists', (['dot_folder'], {}), '(dot_folder)\n', (3889, 3901), False, 'import os\n'), ((3911, 3931), 'os.mkdir', 'os.mkdir', (['dot_folder'], {}), '(dot_folder)\n', (3919, 3931), False, 'import os\n'), ((3957, 3991), 'os.path.join', 'join', (['dot_folder', '"""meta-model.dot"""'], {}), "(dot_folder, 'meta-model.dot')\n", (3961, 3991), False, 'from os.path import dirname, join\n'), ((4015, 4044), 'os.path.join', 'join', (['dot_folder', '"""model.dot"""'], {}), "(dot_folder, 'model.dot')\n", (4019, 4044), False, 'from os.path import dirname, join\n'), ((2919, 2962), 'os.path.exists', 'os.path.exists', (['PROJECT_DIRECTORY_TREE[key]'], {}), '(PROJECT_DIRECTORY_TREE[key])\n', (2933, 2962), False, 'import os\n'), ((2976, 3016), 'os.makedirs', 'os.makedirs', (['PROJECT_DIRECTORY_TREE[key]'], {}), '(PROJECT_DIRECTORY_TREE[key])\n', (2987, 3016), False, 'import os\n'), ((841, 855), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (853, 855), False, 'from datetime import datetime\n')] |
from dataclasses import dataclass, field
from typing import Optional
__NAMESPACE__ = "http://www.opengis.net/ogc"
@dataclass
class FunctionNameType:
value: str = field(
default="",
metadata={
"required": True,
},
)
n_args: Optional[str] = field(
default=None,
metadata={
"name": "nArgs",
"type": "Attribute",
"required": True,
},
)
| [
"dataclasses.field"
] | [((169, 215), 'dataclasses.field', 'field', ([], {'default': '""""""', 'metadata': "{'required': True}"}), "(default='', metadata={'required': True})\n", (174, 215), False, 'from dataclasses import dataclass, field\n'), ((290, 380), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'name': 'nArgs', 'type': 'Attribute', 'required': True}"}), "(default=None, metadata={'name': 'nArgs', 'type': 'Attribute',\n 'required': True})\n", (295, 380), False, 'from dataclasses import dataclass, field\n')] |
# coding: utf-8
"""
Selling Partner API for Pricing
The Selling Partner API for Pricing helps you programmatically retrieve product pricing and offer information for Amazon Marketplace products. # noqa: E501
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class OfferType(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'buying_price': 'PriceType',
'regular_price': 'MoneyType',
'fulfillment_channel': 'str',
'item_condition': 'str',
'item_sub_condition': 'str',
'seller_sku': 'str'
}
attribute_map = {
'buying_price': 'BuyingPrice',
'regular_price': 'RegularPrice',
'fulfillment_channel': 'FulfillmentChannel',
'item_condition': 'ItemCondition',
'item_sub_condition': 'ItemSubCondition',
'seller_sku': 'SellerSKU'
}
def __init__(self, buying_price=None, regular_price=None, fulfillment_channel=None, item_condition=None, item_sub_condition=None, seller_sku=None): # noqa: E501
"""OfferType - a model defined in Swagger""" # noqa: E501
self._buying_price = None
self._regular_price = None
self._fulfillment_channel = None
self._item_condition = None
self._item_sub_condition = None
self._seller_sku = None
self.discriminator = None
self.buying_price = buying_price
self.regular_price = regular_price
self.fulfillment_channel = fulfillment_channel
self.item_condition = item_condition
self.item_sub_condition = item_sub_condition
self.seller_sku = seller_sku
@property
def buying_price(self):
"""Gets the buying_price of this OfferType. # noqa: E501
:return: The buying_price of this OfferType. # noqa: E501
:rtype: PriceType
"""
return self._buying_price
@buying_price.setter
def buying_price(self, buying_price):
"""Sets the buying_price of this OfferType.
:param buying_price: The buying_price of this OfferType. # noqa: E501
:type: PriceType
"""
if buying_price is None:
raise ValueError("Invalid value for `buying_price`, must not be `None`") # noqa: E501
self._buying_price = buying_price
@property
def regular_price(self):
"""Gets the regular_price of this OfferType. # noqa: E501
:return: The regular_price of this OfferType. # noqa: E501
:rtype: MoneyType
"""
return self._regular_price
@regular_price.setter
def regular_price(self, regular_price):
"""Sets the regular_price of this OfferType.
:param regular_price: The regular_price of this OfferType. # noqa: E501
:type: MoneyType
"""
if regular_price is None:
raise ValueError("Invalid value for `regular_price`, must not be `None`") # noqa: E501
self._regular_price = regular_price
@property
def fulfillment_channel(self):
"""Gets the fulfillment_channel of this OfferType. # noqa: E501
The fulfillment channel for the offer listing. Possible values: * Amazon - Fulfilled by Amazon. * Merchant - Fulfilled by the seller. # noqa: E501
:return: The fulfillment_channel of this OfferType. # noqa: E501
:rtype: str
"""
return self._fulfillment_channel
@fulfillment_channel.setter
def fulfillment_channel(self, fulfillment_channel):
"""Sets the fulfillment_channel of this OfferType.
The fulfillment channel for the offer listing. Possible values: * Amazon - Fulfilled by Amazon. * Merchant - Fulfilled by the seller. # noqa: E501
:param fulfillment_channel: The fulfillment_channel of this OfferType. # noqa: E501
:type: str
"""
if fulfillment_channel is None:
raise ValueError("Invalid value for `fulfillment_channel`, must not be `None`") # noqa: E501
self._fulfillment_channel = fulfillment_channel
@property
def item_condition(self):
"""Gets the item_condition of this OfferType. # noqa: E501
The item condition for the offer listing. Possible values: New, Used, Collectible, Refurbished, or Club. # noqa: E501
:return: The item_condition of this OfferType. # noqa: E501
:rtype: str
"""
return self._item_condition
@item_condition.setter
def item_condition(self, item_condition):
"""Sets the item_condition of this OfferType.
The item condition for the offer listing. Possible values: New, Used, Collectible, Refurbished, or Club. # noqa: E501
:param item_condition: The item_condition of this OfferType. # noqa: E501
:type: str
"""
if item_condition is None:
raise ValueError("Invalid value for `item_condition`, must not be `None`") # noqa: E501
self._item_condition = item_condition
@property
def item_sub_condition(self):
"""Gets the item_sub_condition of this OfferType. # noqa: E501
The item subcondition for the offer listing. Possible values: New, Mint, Very Good, Good, Acceptable, Poor, Club, OEM, Warranty, Refurbished Warranty, Refurbished, Open Box, or Other. # noqa: E501
:return: The item_sub_condition of this OfferType. # noqa: E501
:rtype: str
"""
return self._item_sub_condition
@item_sub_condition.setter
def item_sub_condition(self, item_sub_condition):
"""Sets the item_sub_condition of this OfferType.
The item subcondition for the offer listing. Possible values: New, Mint, Very Good, Good, Acceptable, Poor, Club, OEM, Warranty, Refurbished Warranty, Refurbished, Open Box, or Other. # noqa: E501
:param item_sub_condition: The item_sub_condition of this OfferType. # noqa: E501
:type: str
"""
if item_sub_condition is None:
raise ValueError("Invalid value for `item_sub_condition`, must not be `None`") # noqa: E501
self._item_sub_condition = item_sub_condition
@property
def seller_sku(self):
"""Gets the seller_sku of this OfferType. # noqa: E501
The seller stock keeping unit (SKU) of the item. # noqa: E501
:return: The seller_sku of this OfferType. # noqa: E501
:rtype: str
"""
return self._seller_sku
@seller_sku.setter
def seller_sku(self, seller_sku):
"""Sets the seller_sku of this OfferType.
The seller stock keeping unit (SKU) of the item. # noqa: E501
:param seller_sku: The seller_sku of this OfferType. # noqa: E501
:type: str
"""
if seller_sku is None:
raise ValueError("Invalid value for `seller_sku`, must not be `None`") # noqa: E501
self._seller_sku = seller_sku
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(OfferType, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OfferType):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
] | [((7458, 7491), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (7471, 7491), False, 'import six\n')] |
import numpy as np
from sklearn import multioutput
import xgboost as xgb
class Regressor():
def _init_(self):
super()._init_()
self.model = None
def fit(self, X, y):
# Create empty model made
self.model_bag = dict()
# Data bag
self.data_bag = dict()
max_cascade_size = 9
for i in range(1, max_cascade_size):
self.data_bag[i] = dict()
self.model_bag[i] = dict()
for inp, out in zip(X, y):
metadata = inp[0]
signal = inp[1]
number_modules_in_cascade = len(metadata)
metadata_str = ""
for i in range(number_modules_in_cascade):
metadata_str += (metadata[i][0] + "_" + str(metadata[i][1][0]) + "_" + str(metadata[i][1][1]) + "-")
metadata_str = metadata_str[:-1]
try:
self.data_bag[number_modules_in_cascade][metadata_str].append([signal, out])
except:
self.data_bag[number_modules_in_cascade][metadata_str] = [[signal, out]]
all_train_input_EDFA = []
all_train_output_EDFA = []
all_train_input_SMF = []
all_train_output_SMF = []
# Train one model per size 1 cascade
for metadata_str in list(self.data_bag[1].keys()):
# Train only with size 1 cascades
train_data = np.asarray((self.data_bag[1][metadata_str]))
train_input, train_output = train_data[:, 0], train_data[:, 1]
if 'EDFA' in metadata_str:
all_train_input_EDFA += list(train_input)
all_train_output_EDFA += list(train_output)
else:
all_train_input_SMF += list(train_input)
all_train_output_SMF += list(train_output)
self.model_bag[1][metadata_str] = multioutput.MultiOutputRegressor(xgb.XGBRegressor()).fit(train_input, train_output)
self.model_bag[1]['joker_EDFA'] = multioutput.MultiOutputRegressor(xgb.XGBRegressor()).fit(np.asarray(all_train_input_EDFA), np.asarray(all_train_output_EDFA))
self.model_bag[1]['joker_SMF'] = multioutput.MultiOutputRegressor(xgb.XGBRegressor()).fit(np.asarray(all_train_input_SMF), np.asarray(all_train_output_SMF))
# Now, let's train also with the size 2 cascades
for metadata_str in list(self.data_bag[2].keys()):
metadata_split_str = metadata_str.split('-')
first_individual_module = metadata_split_str[0]
second_individual_module = metadata_split_str[1]
try:
model = self.model_bag[1][first_individual_module]
except:
if 'EDFA' in first_individual_module:
model = self.model_bag[1]['joker_EDFA']
else:
model = self.model_bag[1]['joker_SMF']
data = np.asarray(self.data_bag[2][metadata_str])
train_inp, train_out = data[:, 0], data[:, 1]
pred = model.predict(train_inp)
pred = pred * (pred > 0)
if second_individual_module in self.model_bag[1]:
self.model_bag[1][second_individual_module].fit(pred, train_out)
else:
if 'EDFA' in second_individual_module:
self.model_bag[1]['joker_EDFA'].fit(pred, train_out)
else:
self.model_bag[1]['joker_SMF'].fit(pred, train_out)
def predict(self, X):
preds = []
for inp in X:
metadata = inp[0]
signal = inp[1]
for module in metadata:
metadata_str = module[0] + "_" + str(module[1][0]) + "_" + str(module[1][1])
try:
model = self.model_bag[1][metadata_str]
except:
if 'EDFA' in metadata_str:
model = self.model_bag[1]['joker_EDFA']
else:
model = self.model_bag[1]['joker_SMF']
pred = model.predict(np.asarray(signal).reshape(1, -1))
pred = pred * (pred > 0)
# Use previous pred as the new input
signal = pred
preds.append(pred[0])
return np.asarray(preds)
| [
"xgboost.XGBRegressor",
"numpy.asarray"
] | [((4365, 4382), 'numpy.asarray', 'np.asarray', (['preds'], {}), '(preds)\n', (4375, 4382), True, 'import numpy as np\n'), ((1460, 1502), 'numpy.asarray', 'np.asarray', (['self.data_bag[1][metadata_str]'], {}), '(self.data_bag[1][metadata_str])\n', (1470, 1502), True, 'import numpy as np\n'), ((2103, 2135), 'numpy.asarray', 'np.asarray', (['all_train_input_EDFA'], {}), '(all_train_input_EDFA)\n', (2113, 2135), True, 'import numpy as np\n'), ((2137, 2170), 'numpy.asarray', 'np.asarray', (['all_train_output_EDFA'], {}), '(all_train_output_EDFA)\n', (2147, 2170), True, 'import numpy as np\n'), ((2270, 2301), 'numpy.asarray', 'np.asarray', (['all_train_input_SMF'], {}), '(all_train_input_SMF)\n', (2280, 2301), True, 'import numpy as np\n'), ((2303, 2335), 'numpy.asarray', 'np.asarray', (['all_train_output_SMF'], {}), '(all_train_output_SMF)\n', (2313, 2335), True, 'import numpy as np\n'), ((2953, 2995), 'numpy.asarray', 'np.asarray', (['self.data_bag[2][metadata_str]'], {}), '(self.data_bag[2][metadata_str])\n', (2963, 2995), True, 'import numpy as np\n'), ((2079, 2097), 'xgboost.XGBRegressor', 'xgb.XGBRegressor', ([], {}), '()\n', (2095, 2097), True, 'import xgboost as xgb\n'), ((2246, 2264), 'xgboost.XGBRegressor', 'xgb.XGBRegressor', ([], {}), '()\n', (2262, 2264), True, 'import xgboost as xgb\n'), ((1952, 1970), 'xgboost.XGBRegressor', 'xgb.XGBRegressor', ([], {}), '()\n', (1968, 1970), True, 'import xgboost as xgb\n'), ((4142, 4160), 'numpy.asarray', 'np.asarray', (['signal'], {}), '(signal)\n', (4152, 4160), True, 'import numpy as np\n')] |
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import TypeVar
from typing import Iterable
T = TypeVar("T")
import os
BASE_PATH = os.path.dirname(__file__)
INPUT_PATH = os.path.join(BASE_PATH, "input.txt")
def median(iterable: Iterable[T]) -> T:
return iterable[len(iterable) // 2]
# def average(iterable: Iterable[int | float]) -> int | float:
# return sum(iterable) / len(iterable)
def main() -> int:
print(f"Day №{os.path.basename(BASE_PATH)}")
print(f"Part №1")
with open(INPUT_PATH, mode="r") as f:
positions = list(map(int, (f.read().splitlines()[0].split(","))))
print(f"Raw input: {positions}")
print(f"Sorted input: {sorted(positions)}")
target_position = median(sorted(positions))
print(f"Target: {target_position}")
fuels = [abs(target_position - position) for position in positions]
fuel_sum = sum(fuels)
for i, fuel in enumerate(fuels):
print(f"Move from {positions[i]} to {target_position}: {fuel} fuel")
print(f"Total: {fuel_sum}")
return 0
if __name__ == "__main__":
raise SystemExit(main())
| [
"os.path.dirname",
"os.path.join",
"os.path.basename",
"typing.TypeVar"
] | [((196, 221), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (211, 221), False, 'import os\n'), ((235, 271), 'os.path.join', 'os.path.join', (['BASE_PATH', '"""input.txt"""'], {}), "(BASE_PATH, 'input.txt')\n", (247, 271), False, 'import os\n'), ((158, 170), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (165, 170), False, 'from typing import TypeVar\n'), ((502, 529), 'os.path.basename', 'os.path.basename', (['BASE_PATH'], {}), '(BASE_PATH)\n', (518, 529), False, 'import os\n')] |
import pygame
import os
pygame.init()
SCREEN_WIDTH = 800
SCREEN_HEIGHT = int(SCREEN_WIDTH * 0.8)
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
pygame.display.set_caption('Shooter')
#set framerate
clock = pygame.time.Clock()
FPS = 60
#define game variables
GRAVITY = 0.75
#define player action variables
moving_left = False
moving_right = False
shoot = False
#load images
#bullet
#bullet_img = pygame.image.load('img/icons/bullet.png')
bullet_img = pygame.transform.scale(pygame.image.load('img/icons/bullet.png'),(60,45))
#define colours
BG = (184, 150, 120)
RED = (255, 0, 0)
def draw_bg():
screen.fill(BG)
pygame.draw.line(screen, RED, (0, 400), (SCREEN_WIDTH, 400))
class Soldier(pygame.sprite.Sprite):
def __init__(self, char_type, x, y, scale, speed, ammo):
pygame.sprite.Sprite.__init__(self)
self.alive = True
self.char_type = char_type
self.speed = speed
self.ammo = ammo
self.start_ammo = ammo
self.shoot_cooldown = 0
self.direction = 1
self.vel_y = 0
self.jump = False
self.in_air = True
self.flip = False
self.animation_list = []
self.frame_index = 0
self.action = 0
self.update_time = pygame.time.get_ticks()
#load all images for the players
animation_types = ['Idle', 'Run', 'Jump']
for animation in animation_types:
#reset temporary list of images
temp_list = []
#count number of files in the folder
num_of_frames = len(os.listdir(f'img/{self.char_type}/{animation}'))
for i in range(num_of_frames):
img = pygame.image.load(f'img/{self.char_type}/{animation}/{i}.png')
img = pygame.transform.scale(img, (int(img.get_width() * scale), int(img.get_height() * scale)))
temp_list.append(img)
self.animation_list.append(temp_list)
self.image = self.animation_list[self.action][self.frame_index]
self.rect = self.image.get_rect()
self.rect.center = (x, y)
def update(self):
self.update_animation()
#update cooldown
if self.shoot_cooldown > 0:
self.shoot_cooldown -= 1
def move(self, moving_left, moving_right):
#reset movement variables
dx = 0
dy = 0
#assign movement variables if moving left or right
if moving_left:
dx = -self.speed
self.flip = True
self.direction = -1
if moving_right:
dx = self.speed
self.flip = False
self.direction = 1
#jump
if self.jump == True and self.in_air == False:
self.vel_y = -11
self.jump = False
self.in_air = True
#apply gravity
self.vel_y += GRAVITY
if self.vel_y > 10:
self.vel_y
dy += self.vel_y
#check collision with floor
if self.rect.bottom + dy > 400:
dy = 400 - self.rect.bottom
self.in_air = False
#update rectangle position
self.rect.x += dx
self.rect.y += dy
def shoot(self):
if self.shoot_cooldown == 0 and self.ammo > 0:
self.shoot_cooldown = 20
bullet = Bullet(self.rect.centerx + (0.6 * self.rect.size[0] * self.direction), self.rect.centery + (-0.1 * self.rect.size[0]), self.direction)
bullet_group.add(bullet)
#reduce ammo
self.ammo -= 1
def update_animation(self):
#update animation
ANIMATION_COOLDOWN = 100
#update image depending on current frame
self.image = self.animation_list[self.action][self.frame_index]
#check if enough time has passed since the last update
if pygame.time.get_ticks() - self.update_time > ANIMATION_COOLDOWN:
self.update_time = pygame.time.get_ticks()
self.frame_index += 1
#if the animation has run out the reset back to the start
if self.frame_index >= len(self.animation_list[self.action]):
self.frame_index = 0
def update_action(self, new_action):
#check if the new action is different to the previous one
if new_action != self.action:
self.action = new_action
#update the animation settings
self.frame_index = 0
self.update_time = pygame.time.get_ticks()
def draw(self):
screen.blit(pygame.transform.flip(self.image, self.flip, False), self.rect)
class Bullet(pygame.sprite.Sprite):
def __init__(self, x, y, direction):
pygame.sprite.Sprite.__init__(self)
self.speed = 5
self.image = bullet_img
self.flip = False
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.direction = direction
def update(self):
#move bullet
self.rect.x += (self.direction * self.speed)
#check if the bullets has gone off screen
if self.rect.right < 0 or self.rect.left > SCREEN_WIDTH:
self.kill()
#create sprite groups
bullet_group = pygame.sprite.Group()
hero = Soldier('player', 200, 200, 0.5, 3, 5)
enemy = Soldier('enemy', 400, 200, 0.5, 5, 5)
run = True
while run:
clock.tick(FPS)
draw_bg()
hero.update()
hero.draw()
enemy.draw()
#update and draw bullets
bullet_group.update()
bullet_group.draw(screen)
#update player actions
if hero.alive:
if shoot:
hero.shoot()
if hero.in_air:
hero.update_action(2)#2: jump
elif moving_left or moving_right:
hero.update_action(1)#1: run
else:
hero.update_action(0)#0: idle
hero.move(moving_left, moving_right)
for event in pygame.event.get():
#quit game
if event.type == pygame.QUIT:
run = False
#keyboard presses
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_a:
moving_left = True
if event.key == pygame.K_d:
moving_right = True
if event.key == pygame.K_SPACE:
shoot = True
if event.key == pygame.K_w and hero.alive:
hero.jump = True
if event.key == pygame.K_ESCAPE:
run = False
#keyboard button released
if event.type == pygame.KEYUP:
if event.key == pygame.K_a:
moving_left = False
if event.key == pygame.K_d:
moving_right = False
if event.key == pygame.K_SPACE:
shoot = False
pygame.display.update()
pygame.quit() | [
"pygame.transform.flip",
"pygame.display.set_caption",
"os.listdir",
"pygame.init",
"pygame.quit",
"pygame.draw.line",
"pygame.event.get",
"pygame.sprite.Group",
"pygame.display.set_mode",
"pygame.sprite.Sprite.__init__",
"pygame.time.get_ticks",
"pygame.time.Clock",
"pygame.image.load",
"... | [((25, 38), 'pygame.init', 'pygame.init', ([], {}), '()\n', (36, 38), False, 'import pygame\n'), ((110, 164), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(SCREEN_WIDTH, SCREEN_HEIGHT)'], {}), '((SCREEN_WIDTH, SCREEN_HEIGHT))\n', (133, 164), False, 'import pygame\n'), ((165, 202), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Shooter"""'], {}), "('Shooter')\n", (191, 202), False, 'import pygame\n'), ((227, 246), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (244, 246), False, 'import pygame\n'), ((5221, 5242), 'pygame.sprite.Group', 'pygame.sprite.Group', ([], {}), '()\n', (5240, 5242), False, 'import pygame\n'), ((6790, 6803), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (6801, 6803), False, 'import pygame\n'), ((498, 539), 'pygame.image.load', 'pygame.image.load', (['"""img/icons/bullet.png"""'], {}), "('img/icons/bullet.png')\n", (515, 539), False, 'import pygame\n'), ((645, 705), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'RED', '(0, 400)', '(SCREEN_WIDTH, 400)'], {}), '(screen, RED, (0, 400), (SCREEN_WIDTH, 400))\n', (661, 705), False, 'import pygame\n'), ((5897, 5915), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (5913, 5915), False, 'import pygame\n'), ((6765, 6788), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (6786, 6788), False, 'import pygame\n'), ((815, 850), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self'], {}), '(self)\n', (844, 850), False, 'import pygame\n'), ((1269, 1292), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (1290, 1292), False, 'import pygame\n'), ((4675, 4710), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self'], {}), '(self)\n', (4704, 4710), False, 'import pygame\n'), ((3939, 3962), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (3960, 3962), False, 'import pygame\n'), ((4458, 4481), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (4479, 4481), False, 'import pygame\n'), ((4525, 4576), 'pygame.transform.flip', 'pygame.transform.flip', (['self.image', 'self.flip', '(False)'], {}), '(self.image, self.flip, False)\n', (4546, 4576), False, 'import pygame\n'), ((1587, 1634), 'os.listdir', 'os.listdir', (['f"""img/{self.char_type}/{animation}"""'], {}), "(f'img/{self.char_type}/{animation}')\n", (1597, 1634), False, 'import os\n'), ((1701, 1763), 'pygame.image.load', 'pygame.image.load', (['f"""img/{self.char_type}/{animation}/{i}.png"""'], {}), "(f'img/{self.char_type}/{animation}/{i}.png')\n", (1718, 1763), False, 'import pygame\n'), ((3843, 3866), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (3864, 3866), False, 'import pygame\n')] |
import pytest
from app.api.business import brief_overview_business
@pytest.fixture()
def publish_links():
return [
'How long your brief will be open',
'Description of work',
'Location',
'Review and publish your requirements',
'Question and answer session details',
'Role',
'Shortlist and evaluation process',
'Title',
'Who can respond'
]
def test_publish_section_has_all_links_for_draft_specialist_brief(specialist_brief, publish_links):
links = brief_overview_business.get_publish_links(specialist_brief)
for link in links:
assert link['path']
assert any(link['text'] == text for text in publish_links)
def test_publish_section_links_are_disabled_when_specialist_brief_has_been_published(app, specialist_brief):
with app.app_context():
specialist_brief.status = 'live'
links = brief_overview_business.get_publish_links(specialist_brief)
for link in links:
assert all(not link['path'] for link in links)
| [
"pytest.fixture",
"app.api.business.brief_overview_business.get_publish_links"
] | [((71, 87), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (85, 87), False, 'import pytest\n'), ((535, 594), 'app.api.business.brief_overview_business.get_publish_links', 'brief_overview_business.get_publish_links', (['specialist_brief'], {}), '(specialist_brief)\n', (576, 594), False, 'from app.api.business import brief_overview_business\n'), ((910, 969), 'app.api.business.brief_overview_business.get_publish_links', 'brief_overview_business.get_publish_links', (['specialist_brief'], {}), '(specialist_brief)\n', (951, 969), False, 'from app.api.business import brief_overview_business\n')] |
import os
import cv2
import gc
import random
import time
from tqdm import tqdm
import numpy as np
import matplotlib.pyplot as plt
import argparse
from glob import glob
import torch
import torch.nn as nn
import torchvision.transforms as transforms
from PIL import Image, ImageFilter
from models.OEFT import OEFT
parser = argparse.ArgumentParser(description='Code to optimize')
parser.add_argument('--device', help='cuda | cuda:0 | cpu', default="cuda", type=str)
parser.add_argument('--device_num', help='which GPUs to use', default="0", type=str)
parser.add_argument('--sample_freq', help="sampling frequency of saving results", default=500, type=float)
""" optimizer.py setting """
parser.add_argument('--content_root', help='folder of content images', default="../OEFT/example_210905/input/", type=str)
parser.add_argument('--style_root', help='folder of style images', default="../OEFT/example_210905/style/", type=str)
parser.add_argument('--save_root', help='folder of saving results', default="../OEFT/example_210929/experiment", type=str)
parser.add_argument('--fileType', help='png|jpg', default="png", type=str)
parser.add_argument('--keys', help='vgg layer names', default=['r12', 'r22', 'r34', 'r44', 'r54'], nargs="+")
parser.add_argument('--iter', help="number of iteration for optimization", default=1000, type=int)
parser.add_argument('--img_size', help="size of input image", default=256, type=int)
parser.add_argument('--pretrained', help="use pre-trained network or not", action="store_false")
parser.add_argument('--denorm', help="size of input image", action="store_false")
parser.add_argument('--lr', help="learning rate", default=1e-4, type=float)
parser.add_argument('--beta1', help="optimizer parameter", default=0.5, type=float)
parser.add_argument('--beta2', help="optimizer parameter", default=0.999, type=float)
parser.add_argument('--weight_decay', help="weight_decay", default=1e-4, type=float)
""" OEPT.py setting """
parser.add_argument('--warpFeat', help="use warped feature as decoder input", action="store_true")
parser.add_argument('--warpMv', help="use warped feature as moving averaged feature with content feature", action="store_true")
parser.add_argument('--warpRes', help="use warped image as residual", action="store_true")
parser.add_argument('--cycle', help="use cycle consistency regularization", action="store_true")
parser.add_argument('--res_wt', help="weight between decoder output and residual warped img", default=8/9, type=float)
parser.add_argument('--cycle_wt', help="weight of cycle consistency regularization", default=1., type=float)
# 256, 128, 64, 32, 16
parser.add_argument('--nce_wt', help='nce loss weights from each layer[256-16]', default=[1/8*1/4, 1/4*1/4, 1/2*1/4, 1.*1/4, 1.*1/4], nargs="+")
parser.add_argument('--nns_wt', help='NN style loss weights from each layer[256-16]', default=[1/16*1/4, 1/8*1/4, 1/4*1/4, 1/2*1/4, 1.*1/4], nargs="+")
parser.add_argument('--nce_temp', help="temperature for nce", default=0.07, type=float)
parser.add_argument('--nns_temp', help="temperature for nns", default=0.05, type=float)
parser.add_argument('--content_style_wt', help="weight of between content and style", default=4/5, type=float)
""" corrnet.py setting """
parser.add_argument('--corr_temp', help="temperature of correlation module", default=0.01, type=float)
parser.add_argument('--mv_wt', help="weight of moving average", default=0.6, type=float)
mean = np.array([0.485, 0.456, 0.406]).reshape(1,1,3)
std = np.array([0.229, 0.224, 0.225]).reshape(1,1,3)
args = parser.parse_args()
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
GPU_list = args.device_num
len_GPU = len( GPU_list.split(","))
print("@@@@@@@@@@@@ len_GPU: ", len_GPU)
os.environ["CUDA_VISIBLE_DEVICES"] = GPU_list
# Load content and style
print(os.listdir('./'))
def img_load(path):
img = cv2.imread(path)[::,::,::-1] # BGR to RGB, [0-255]
return img
def toPIL(img):
# image range should be [0-255] for converting.
img_type = str(type(img))
if 'numpy' in img_type:
img = Image.fromarray(img)
elif 'torch' in img_type:
img = transforms.ToPILImage()(img).convert("RGB")
return img
if __name__ == "__main__":
# parse options
keys = args.keys
content_root = args.content_root
style_root = args.style_root
if not os.path.exists(args.content_root):
print("!!! args.content_root does not exist !!!")
exit()
if not os.path.exists(args.style_root):
print("!!! args.style_root does not exist !!!")
exit()
content_list = glob( os.path.join(content_root, "*.{}".format(args.fileType)) )
style_list = glob( os.path.join(style_root, "*.{}".format(args.fileType) ) )
if len(content_list) < 1 or len(style_list) < 1:
print("!!! The number of content and style images should be more than 1 !!!")
exit()
content_list.sort()
style_list.sort()
print("@@@@@@@@@ len(content_list): ", len(content_list))
for z in range(len(content_list)):
random_seed = 1006
os.environ['PYTHONHASHSEED'] = str(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
torch.cuda.manual_seed_all(random_seed) # if use multi-GPU
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(random_seed)
random.seed(random_seed)
""" start iteration """
torch.cuda.empty_cache() # remove all caches
try:
""" content, style image path indexing """
content_path = content_list[z] # './examples/input/in11.png'
style_path = style_list[z] # './examples/style/tar11.png'
""" img load """
content = img_load(content_path)
style = img_load(style_path)
content_256 = content.copy()
style_256 = style.copy()
except Exception as e:
print("image loading error : ", e)
continue
""" Convert numpy array to PIL.Image format """
""" and modify the range (0-255) to [0-1] """
content = toPIL(content)
style = toPIL(style)
""" Make transform """
transform_list = []
img_size = (args.img_size, args.img_size)
transform_list.append(transforms.Resize(img_size, interpolation=2)) # @@@@ args.interpol-method = 2
transform_list += [transforms.ToTensor()]
transform_list += [transforms.Normalize( (0.485, 0.456, 0.406),
(0.229, 0.224, 0.225))]
transform = transforms.Compose(transform_list)
""" do transform """
content = transform(content)
style = transform(style)
content = torch.unsqueeze( content, dim=0 )
style = torch.unsqueeze( style, dim=0 )
""" Load model """
model = OEFT(args=args, pretrained=args.pretrained)
model = model.to(args.device)
""" Define optimizer """
e_optimizer = torch.optim.Adam(model.corrNet.parameters(), lr=args.lr,
betas=(args.beta1, args.beta2)
)
g_optimizer = torch.optim.Adam(model.decoder.parameters(), lr=args.lr,
betas=(args.beta1, args.beta2)
)
for param_group in e_optimizer.param_groups:
param_group['lr'] = 1e-4 #hparams.initial_learning_rate
for param_group in g_optimizer.param_groups:
param_group['lr'] = 1e-4 #hparams.initial_learning_rate
if "cuda" in args.device:
content = content.type(torch.cuda.FloatTensor).to(args.device).detach()
style = style.type(torch.cuda.FloatTensor).to(args.device).detach()
new_input = content.clone().type(torch.cuda.FloatTensor).to(args.device)
else:
content = content.type(torch.FloatTensor).to(args.device).detach()
style = style.type(torch.FloatTensor).to(args.device).detach()
new_input = content.clone().type(torch.FloatTensor).to(args.device)
warp_result = None
dec_result = None
cycle_result = None
count = 0
model.train()
start = time.time()
prog_bar = tqdm(range(args.iter))
for i in prog_bar:
if count == 0:
warped_s2c_feat, warped_s2c_imgs, tr_s2c_img_decs, loss, loss_dict, nce_dict, nns_dict = model(style, content, step=i)
else:
if args.warpMv:
warped_s2c_feat = warped_s2c_feat
for key in args.keys:
warped_s2c_feat[key] = warped_s2c_feat[key].detach()
warped_s2c_feat, warped_s2c_imgs, tr_s2c_img_decs, loss, loss_dict, nce_dict, nns_dict = model(style, content, warped_s2c_feat, step=i)
else:
warped_s2c_feat, warped_s2c_imgs, tr_s2c_img_decs, loss, loss_dict, nce_dict, nns_dict = model(style, content, step=i)
# summary_writer.add_scalars( "Total NCE and NNS" , loss_dict, i)
# summary_writer.add_scalars( "NCEs" , nce_dict, i)
# summary_writer.add_scalars( "NNSs" , nns_dict, i)
# nce_dict.update(nns_dict)
# summary_writer.add_scalars( "NCEs and NNSs" , nce_dict, i)
prog_bar.set_description("Pair:{}, iter:{}, loss_style:{}, loss_cont:{}, loss_cycle:{}".format(
z+1,
i+1,
loss_dict["L_style"],
loss_dict["L_content"],
loss_dict["L_cycle"])
)
e_optimizer.zero_grad()
g_optimizer.zero_grad()
loss.backward()
e_optimizer.step()
g_optimizer.step()
count += 1
""" generation result """
dec_result = tr_s2c_img_decs.clone().detach()
""" save the results """
if (i + 1) % args.sample_freq == 0 or i == 0 or i == args.iter-1:
c_img = os.path.basename(content_path) # 'in11.png'
s_img = os.path.basename(style_path) # 'tar11.png'
c_name = c_img.split('.')[0] # in11
s_name = s_img.split('.')[0] # tar11
pair_dir = '{}_'.format(z) + c_name + '_and_' + s_name
pair_iter_dir = '{}_'.format(z) + "iter" + str(i) + "_" +c_name + '_and_' + s_name
""" making folder to save results """
root_iter_path = os.path.join(args.save_root, pair_dir)
save_dir_path = os.path.join(root_iter_path, pair_iter_dir)
if not os.path.isdir(args.save_root):
os.makedirs(args.save_root)
if not os.path.isdir(save_dir_path):
os.makedirs(save_dir_path)
""" denormalization """
if args.denorm == True:
result = ( np.clip(( dec_result[0].permute(1,2,0).clone().detach().cpu().numpy()) *std + mean, 0.0, 1.0)*255.0).astype('uint8')[::,::,::-1]
else:
result = ( np.clip(( dec_result[0].permute(1,2,0).clone().detach().cpu().numpy()), 0.0, 1.0)*255.0).astype('uint8')[::,::,::-1]
""" change the form """
content_save = (cv2.resize(content_256, (256,256))).astype('uint8')[::,::,::-1]
style_save = (cv2.resize(style_256, (256,256))).astype('uint8')[::,::,::-1]
bundle_result = np.stack( (content_save, style_save, result), axis=1 )
bundle_result = bundle_result.reshape((256, 256*3, 3))
""" save the result """
cv2.imwrite( os.path.join(save_dir_path, c_name+'.png'), content_save)
cv2.imwrite( os.path.join(save_dir_path, s_name+'.png'), style_save)
cv2.imwrite( os.path.join(save_dir_path, 'result.png'), result)
cv2.imwrite( os.path.join(save_dir_path, c_name + '_' + s_name + '_' + 'result_bundle.png'), bundle_result)
# if args.denorm == True:
# warped_s2c_imgs= (np.clip( ( warped_s2c_imgs[0].clone().permute(1,2,0).detach().cpu().numpy()) *std + mean, 0.0, 1.0)*255.0).astype('uint8')[::,::,::-1]
# else:
# warped_s2c_imgs = (np.clip( ( warped_s2c_imgs[0].clone().permute(1,2,0).detach().cpu().numpy()), 0.0, 1.0)*255.0).astype('uint8')[::,::,::-1]
# resolution = ['256', '128', '64', '32', '16']
# cv2.imwrite( os.path.join(save_dir_path, 'warp_{0}.png'.format(resolution[2])) , warped_s2c_imgs ) # k=2 (64)
print("time :", time.time() - start)
print("root path: ", root_iter_path)
del model
gc.collect()
| [
"models.OEFT.OEFT",
"torchvision.transforms.ToPILImage",
"numpy.array",
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"torch.unsqueeze",
"numpy.stack",
"os.path.isdir",
"numpy.random.seed",
"torchvision.transforms.ToTensor",
"gc.collect",
"torchvision.transforms.Normalize",
"t... | [((324, 379), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Code to optimize"""'}), "(description='Code to optimize')\n", (347, 379), False, 'import argparse\n'), ((4596, 4612), 'os.listdir', 'os.listdir', (['"""./"""'], {}), "('./')\n", (4606, 4612), False, 'import os\n'), ((4238, 4269), 'numpy.array', 'np.array', (['[0.485, 0.456, 0.406]'], {}), '([0.485, 0.456, 0.406])\n', (4246, 4269), True, 'import numpy as np\n'), ((4292, 4323), 'numpy.array', 'np.array', (['[0.229, 0.224, 0.225]'], {}), '([0.229, 0.224, 0.225])\n', (4300, 4323), True, 'import numpy as np\n'), ((4644, 4660), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (4654, 4660), False, 'import cv2\n'), ((4851, 4871), 'PIL.Image.fromarray', 'Image.fromarray', (['img'], {}), '(img)\n', (4866, 4871), False, 'from PIL import Image, ImageFilter\n'), ((5132, 5165), 'os.path.exists', 'os.path.exists', (['args.content_root'], {}), '(args.content_root)\n', (5146, 5165), False, 'import os\n'), ((5251, 5282), 'os.path.exists', 'os.path.exists', (['args.style_root'], {}), '(args.style_root)\n', (5265, 5282), False, 'import os\n'), ((5916, 5946), 'torch.manual_seed', 'torch.manual_seed', (['random_seed'], {}), '(random_seed)\n', (5933, 5946), False, 'import torch\n'), ((5955, 5990), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['random_seed'], {}), '(random_seed)\n', (5977, 5990), False, 'import torch\n'), ((5999, 6038), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['random_seed'], {}), '(random_seed)\n', (6025, 6038), False, 'import torch\n'), ((6164, 6191), 'numpy.random.seed', 'np.random.seed', (['random_seed'], {}), '(random_seed)\n', (6178, 6191), True, 'import numpy as np\n'), ((6200, 6224), 'random.seed', 'random.seed', (['random_seed'], {}), '(random_seed)\n', (6211, 6224), False, 'import random\n'), ((6266, 6290), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (6288, 6290), False, 'import torch\n'), ((7453, 7487), 'torchvision.transforms.Compose', 'transforms.Compose', (['transform_list'], {}), '(transform_list)\n', (7471, 7487), True, 'import torchvision.transforms as transforms\n'), ((7614, 7645), 'torch.unsqueeze', 'torch.unsqueeze', (['content'], {'dim': '(0)'}), '(content, dim=0)\n', (7629, 7645), False, 'import torch\n'), ((7666, 7695), 'torch.unsqueeze', 'torch.unsqueeze', (['style'], {'dim': '(0)'}), '(style, dim=0)\n', (7681, 7695), False, 'import torch\n'), ((7742, 7785), 'models.OEFT.OEFT', 'OEFT', ([], {'args': 'args', 'pretrained': 'args.pretrained'}), '(args=args, pretrained=args.pretrained)\n', (7746, 7785), False, 'from models.OEFT import OEFT\n'), ((9156, 9167), 'time.time', 'time.time', ([], {}), '()\n', (9165, 9167), False, 'import time\n'), ((14087, 14099), 'gc.collect', 'gc.collect', ([], {}), '()\n', (14097, 14099), False, 'import gc\n'), ((7161, 7205), 'torchvision.transforms.Resize', 'transforms.Resize', (['img_size'], {'interpolation': '(2)'}), '(img_size, interpolation=2)\n', (7178, 7205), True, 'import torchvision.transforms as transforms\n'), ((7266, 7287), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (7285, 7287), True, 'import torchvision.transforms as transforms\n'), ((7316, 7382), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.485, 0.456, 0.406)', '(0.229, 0.224, 0.225)'], {}), '((0.485, 0.456, 0.406), (0.229, 0.224, 0.225))\n', (7336, 7382), True, 'import torchvision.transforms as transforms\n'), ((11349, 11379), 'os.path.basename', 'os.path.basename', (['content_path'], {}), '(content_path)\n', (11365, 11379), False, 'import os\n'), ((11417, 11445), 'os.path.basename', 'os.path.basename', (['style_path'], {}), '(style_path)\n', (11433, 11445), False, 'import os\n'), ((11826, 11864), 'os.path.join', 'os.path.join', (['args.save_root', 'pair_dir'], {}), '(args.save_root, pair_dir)\n', (11838, 11864), False, 'import os\n'), ((11897, 11940), 'os.path.join', 'os.path.join', (['root_iter_path', 'pair_iter_dir'], {}), '(root_iter_path, pair_iter_dir)\n', (11909, 11940), False, 'import os\n'), ((12823, 12875), 'numpy.stack', 'np.stack', (['(content_save, style_save, result)'], {'axis': '(1)'}), '((content_save, style_save, result), axis=1)\n', (12831, 12875), True, 'import numpy as np\n'), ((13993, 14004), 'time.time', 'time.time', ([], {}), '()\n', (14002, 14004), False, 'import time\n'), ((11965, 11994), 'os.path.isdir', 'os.path.isdir', (['args.save_root'], {}), '(args.save_root)\n', (11978, 11994), False, 'import os\n'), ((12016, 12043), 'os.makedirs', 'os.makedirs', (['args.save_root'], {}), '(args.save_root)\n', (12027, 12043), False, 'import os\n'), ((12067, 12095), 'os.path.isdir', 'os.path.isdir', (['save_dir_path'], {}), '(save_dir_path)\n', (12080, 12095), False, 'import os\n'), ((12117, 12143), 'os.makedirs', 'os.makedirs', (['save_dir_path'], {}), '(save_dir_path)\n', (12128, 12143), False, 'import os\n'), ((13020, 13064), 'os.path.join', 'os.path.join', (['save_dir_path', "(c_name + '.png')"], {}), "(save_dir_path, c_name + '.png')\n", (13032, 13064), False, 'import os\n'), ((13107, 13151), 'os.path.join', 'os.path.join', (['save_dir_path', "(s_name + '.png')"], {}), "(save_dir_path, s_name + '.png')\n", (13119, 13151), False, 'import os\n'), ((13192, 13233), 'os.path.join', 'os.path.join', (['save_dir_path', '"""result.png"""'], {}), "(save_dir_path, 'result.png')\n", (13204, 13233), False, 'import os\n'), ((13272, 13350), 'os.path.join', 'os.path.join', (['save_dir_path', "(c_name + '_' + s_name + '_' + 'result_bundle.png')"], {}), "(save_dir_path, c_name + '_' + s_name + '_' + 'result_bundle.png')\n", (13284, 13350), False, 'import os\n'), ((4916, 4939), 'torchvision.transforms.ToPILImage', 'transforms.ToPILImage', ([], {}), '()\n', (4937, 4939), True, 'import torchvision.transforms as transforms\n'), ((12630, 12665), 'cv2.resize', 'cv2.resize', (['content_256', '(256, 256)'], {}), '(content_256, (256, 256))\n', (12640, 12665), False, 'import cv2\n'), ((12728, 12761), 'cv2.resize', 'cv2.resize', (['style_256', '(256, 256)'], {}), '(style_256, (256, 256))\n', (12738, 12761), False, 'import cv2\n')] |
import tensorflow as tf
import numpy as np
import random
import time
from math import exp
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.layers import Merge
from keras.optimizers import RMSprop, Adam
start_time = time.time()
class UAV_fire_extinguish(object):
n_w = 4 # width of grid world
n_uav = 2 # number of agents
n_fire = 3 # number of fires
u_loca = [0, 15] # initial location of agents
t_fail = [0.02, 0.04] # probability for automatical failure
t_emit = [0.5, 0.5] # probability for getting observation
l_fire = [2, 7, 12] # location of fires
r_fire = [5.0, 5.0, 50.0] # reward of putting down each fire
e_fire = [[0.9,0.9],
[0.9,0.9],
[0.0,0.9]] # fire extinguish probability for each fire [down by 1 agent, down by 2 agent]
l_bigf = [12] # location of big fire
l_smlf = [2,7] # location of small fire
s_init = u_loca + [1]*n_fire + [1]*n_uav # initial state of the system
n_visi = 3 # length of local vision
##### Sampling method #####
def sampling_events(event,prob):
n_length = len(event)
x_rand = np.random.random()
for i in range(n_length):
x_rand = x_rand - prob[i]
if x_rand <= 0:
return event[i]
def mix_distribution(event1,prob1,event2,prob2):
n_length_1 = len(event1)
n_length_2 = len(event2)
new_event = []
new_prob = []
for e1 in range(n_length_1):
for e2 in range(n_length_2):
e_new = event1[e1] + [event2[e2]]
new_event.append(e_new)
p_new = prob1[e1] * prob2[e2]
new_prob.append(p_new)
return (new_event,new_prob)
##### check boundary #####
def check_boundary(x,w):
if x < 0:
return 0
elif x > w-1:
return w-1
else:
return x
##################################
##### Mapping between states #####
##################################
def two_dim_to_one(l_cor,n_w):
x = l_cor[0]
y = l_cor[1]
l = n_w * y + x
return l
def one_dim_to_two(l,n_w):
x = l%n_w
y = (l-x)/n_w
return [x,y]
############################
##### TRANSITION MODEL #####
############################
### simple movement of one agent due to action
def move_location_single(l_1d,a,n_w):
if l_1d == n_w * n_w:
return l_1d
l = one_dim_to_two(l_1d,n_w)
x_next = l[0]
y_next = l[1]
if a == 0: # up
y_next = y_next + 1
elif a == 1: # down
y_next = y_next - 1
elif a == 2: # left
x_next = x_next - 1
elif a == 3:
x_next = x_next + 1
else:
pass
x_next = check_boundary(x_next,n_w)
y_next = check_boundary(y_next,n_w)
l_next = two_dim_to_one((x_next,y_next),n_w)
return l_next
######################################################
##### number of uavs at the location of the fire #####
######################################################
def fire_has_uavs(lf,l_tuple,n_cut):
num = 0
for i in range(len(l_tuple)):
if lf == l_tuple[i]:
num += 1
if num > n_cut:
num = n_cut
return num
######################################################################
##### Obtain all possible sets and the corresponding probability #####
######################################################################
def transition_model(sys_cart_product_and_time_delay,a_joint,UAV_fire_extinguish):
s_fail = UAV_fire_extinguish.n_w * UAV_fire_extinguish.n_w
cart_product = sys_cart_product_and_time_delay[0: UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire]
time_delay = sys_cart_product_and_time_delay[UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire :]
##### Terminal states #####
die_product = 1
### if all agents are broken ###
for i_uav in range(UAV_fire_extinguish.n_uav):
if cart_product[i_uav] == s_fail:
die_product = die_product * 1
else:
die_product = die_product * 0
if die_product == 1:
return ([UAV_fire_extinguish.u_loca + [1]*UAV_fire_extinguish.n_fire], [1.0], [1]*UAV_fire_extinguish.n_uav)
### if all fires are extinguished ###
if sum(cart_product[UAV_fire_extinguish.n_uav:UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire]) == 0:
return ([UAV_fire_extinguish.u_loca + [1]*UAV_fire_extinguish.n_fire], [1.0], [1]*UAV_fire_extinguish.n_uav)
##### Transition of the first UAV #####
if cart_product[0] == s_fail:
event_product = [[s_fail]]
prob_product = [1.0]
else:
l0_next = move_location_single(cart_product[0],a_joint[0],UAV_fire_extinguish.n_w)
event_product = [[l0_next],[s_fail]]
prob_product = [1.0 - UAV_fire_extinguish.t_fail[0], UAV_fire_extinguish.t_fail[0]]
##### Transition of the second UAV #####
for i_uav in range(1,UAV_fire_extinguish.n_uav):
if cart_product[i_uav] == s_fail:
event_set_1 = [s_fail]
prob_set_1 = [1.0]
else:
l1_next = move_location_single(cart_product[i_uav],a_joint[i_uav],UAV_fire_extinguish.n_w)
event_set_1 = [l1_next,s_fail]
prob_set_1 = [1.0 - UAV_fire_extinguish.t_fail[i_uav], UAV_fire_extinguish.t_fail[i_uav]]
(event_product,prob_product) = mix_distribution(event_product,prob_product,event_set_1,prob_set_1)
##### Transition of the fire states #####
for i_fire in range(UAV_fire_extinguish.n_fire):
the_fire_state = cart_product[UAV_fire_extinguish.n_uav + i_fire]
if the_fire_state == 0: # no fire
(event_product,prob_product) = mix_distribution(event_product,prob_product,[0],[1.0])
else:
l_f = UAV_fire_extinguish.l_fire[i_fire]
l_0 = cart_product[0]
l_1 = cart_product[1]
if fire_has_uavs(l_f,cart_product[0:UAV_fire_extinguish.n_uav],2) == 1:
rate_put_down = UAV_fire_extinguish.e_fire[i_fire][0]
(event_product,prob_product) = mix_distribution(event_product,prob_product,[0,1],[rate_put_down,1.0-rate_put_down])
elif fire_has_uavs(l_f,cart_product[0:UAV_fire_extinguish.n_uav],2) == 2:
rate_put_down = UAV_fire_extinguish.e_fire[i_fire][1]
(event_product,prob_product) = mix_distribution(event_product,prob_product,[0,1],[rate_put_down,1.0-rate_put_down])
else:
(event_product,prob_product) = mix_distribution(event_product,prob_product,[1],[1.0])
##### Consider the transition of time delay (Poisson Process) #####
for i_uav in range(UAV_fire_extinguish.n_uav):
random_p = random.random()
if random_p < UAV_fire_extinguish.t_emit[i_uav]:
time_delay[i_uav] = 1
else:
time_delay[i_uav] = time_delay[i_uav] + 1
return (event_product,prob_product,time_delay)
def global_observation(agent,sys_state,UAV_fire_extinguish):
s_fail = UAV_fire_extinguish.n_w * UAV_fire_extinguish.n_w
o_length = 2 * UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire + 1 + 1
# (x,y) coordinate of each agent + fire status of each fire + agent ID + time_delay
obs = ([agent] +
[0] * ( 2 * UAV_fire_extinguish.n_uav) +
sys_state[UAV_fire_extinguish.n_uav: UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire] +
[sys_state[UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire + agent]])
for j_agent in range(UAV_fire_extinguish.n_uav):
[x,y] = one_dim_to_two(sys_state[j_agent],UAV_fire_extinguish.n_w)
obs[1 + 2*j_agent] = x
obs[2 + 2*j_agent] = y
return obs
def local_observation(agent,sys_state,UAV_fire_extinguish):
s_fail = UAV_fire_extinguish.n_w * UAV_fire_extinguish.n_w
# agent = which agent is going to make the observation
vision_depth = UAV_fire_extinguish.n_visi
vision_area = (vision_depth * 2 + 1) ** 2
self_location_xy = one_dim_to_two(sys_state[agent],UAV_fire_extinguish.n_w)
# vision 1: other agents
vision_1 = [0]*vision_area
for other_agent in range(UAV_fire_extinguish.n_uav):
if other_agent != agent :
location_other_agent = sys_state[other_agent]
location_other_xy = one_dim_to_two(location_other_agent,UAV_fire_extinguish.n_w)
dx = location_other_xy[0] - self_location_xy[0]
dy = location_other_xy[1] - self_location_xy[1]
if (-1)*vision_depth <= dx <= vision_depth and (-1)*vision_depth <= dy <= vision_depth and sys_state[other_agent] != s_fail:
relative_location = two_dim_to_one((dx + vision_depth,dy + vision_depth), vision_depth * 2 + 1)
vision_1[relative_location] += 1
# vision 2: big fires
vision_2 = [0]*vision_area
# vision 3: small fires
vision_3 = [0]*vision_area
for i_fire in range(UAV_fire_extinguish.n_fire):
if sys_state[UAV_fire_extinguish.n_uav + i_fire] == 1:
if UAV_fire_extinguish.l_fire[i_fire] in UAV_fire_extinguish.l_bigf: # it is a big fire
big_location = one_dim_to_two(UAV_fire_extinguish.l_fire[i_fire],UAV_fire_extinguish.n_w)
dx = big_location[0] - self_location_xy[0]
dy = big_location[1] - self_location_xy[1]
if (-1)*vision_depth <= dx <= vision_depth and (-1)*vision_depth <= dy <= vision_depth:
relative_location = two_dim_to_one((dx + vision_depth,dy + vision_depth), vision_depth * 2 + 1)
vision_2[relative_location] += 1
else: # it is a small fire
sml_location = one_dim_to_two(UAV_fire_extinguish.l_fire[i_fire],UAV_fire_extinguish.n_w)
dx = sml_location[0] - self_location_xy[0]
dy = sml_location[1] - self_location_xy[1]
if (-1)*vision_depth <= dx <= vision_depth and (-1)*vision_depth <= dy <= vision_depth:
relative_location = two_dim_to_one((dx + vision_depth,dy + vision_depth), vision_depth * 2 + 1)
vision_3[relative_location] += 1
time_delay = sys_state[UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire + agent]
return (([agent] + self_location_xy + [time_delay]),(vision_1),(vision_2),(vision_3))
def transition_sample(
current_state,
a_joint, # tuple
info_list, # [info_1,info_2,....]
UAV_fire_extinguish):
n_w = UAV_fire_extinguish.n_w
reward = 0.0
(event,prob,time_delay) = transition_model(current_state,a_joint,UAV_fire_extinguish)
next_state = sampling_events(event,prob) + time_delay
# Collect rewards
for i_fire in range(UAV_fire_extinguish.n_fire):
if current_state[UAV_fire_extinguish.n_uav + i_fire] == 1 and next_state[UAV_fire_extinguish.n_uav + i_fire] == 0:
reward += UAV_fire_extinguish.r_fire[i_fire]
# Update information if time delay is 1.0
updated_info_list = info_list[:]
for i_agent in range(UAV_fire_extinguish.n_uav):
if next_state[UAV_fire_extinguish.n_uav + UAV_fire_extinguish.n_fire + i_agent] == 1:
updated_info_list[i_agent] = global_observation(i_agent,next_state,UAV_fire_extinguish)
else:
#updated_info_list[i_agent][3] = updated_info_list[i_agent][3] + 1
updated_info_list[i_agent][-1] = updated_info_list[i_agent][-1] + 1
return [next_state,updated_info_list,reward]
######## CODE FOR SIMULATOR IS FINISHED #########
######## CODE FOR SIMULATOR IS FINISHED #########
######## CODE FOR SIMULATOR IS FINISHED #########
######## CODE FOR SIMULATOR IS FINISHED #########
######## CODE FOR SIMULATOR IS FINISHED #########
######## CODE FOR SIMULATOR IS FINISHED #########
def samples_by_random_action(n_init_pool,UAV_fire_extinguish):
size = UAV_fire_extinguish.n_w
input_number = 4 + 3 *(2 * UAV_fire_extinguish.n_visi + 1)**2
o_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,input_number),float)
a_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,5),float)
r_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,1),float)
op_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,input_number),float)
s_current = UAV_fire_extinguish.s_init
last_info_list = []
for i_uav in range(UAV_fire_extinguish.n_uav):
last_info_list.append(local_observation(i_uav,s_current,UAV_fire_extinguish))
#print(last_info_list[i_uav])
next_info_list = last_info_list[:]
for i_event in range(n_init_pool):
a_joint = [0] * UAV_fire_extinguish.n_uav
for i_uav in range(UAV_fire_extinguish.n_uav):
a_joint[i_uav] = random.randint(0,4)
#print(s_current,a_joint)
outcome = transition_sample(s_current,a_joint,last_info_list,UAV_fire_extinguish)
next_state = outcome[0]
next_info_list = outcome[1]
reward = outcome[2]
for i_uav in range(UAV_fire_extinguish.n_uav):
o_pool[i_uav,i_event,:] = last_info_list[i_uav][:]
op_pool[i_uav,i_event,:] = next_info_list[i_uav][:]
a_pool[i_uav,i_event,a_joint[i_uav]] = 1.0
r_pool[i_uav,i_event,0] = reward
last_info_list = next_info_list[:]
s_current = next_state
return (o_pool,a_pool,r_pool,op_pool)
def samples_by_one_agent_random_action(n_init_pool,free_agent,UAV_fire_extinguish):
size = UAV_fire_extinguish.n_w
input_number = 4 + 3 *(2 * UAV_fire_extinguish.n_visi + 1)**2
o_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,input_number),float)
a_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,5),float)
r_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,1),float)
op_pool = np.zeros((UAV_fire_extinguish.n_uav,n_init_pool,input_number),float)
s_current = UAV_fire_extinguish.s_init
last_info_list = []
for i_uav in range(UAV_fire_extinguish.n_uav):
last_info_list.append(local_observation(i_uav,s_current,UAV_fire_extinguish))
next_info_list = last_info_list[:]
for i_event in range(n_init_pool):
a_joint = [0] * UAV_fire_extinguish.n_uav
for i_uav in range(UAV_fire_extinguish.n_uav):
if i_uav == free_agent:
a_joint[i_uav] = random.randint(0,4)
else:
a_joint[i_uav] = es_greedy(sess.run(Q, feed_dict={last_info: [last_info_list[i_uav]]}),0.0)
outcome = transition_sample(s_current,a_joint,last_info_list,UAV_fire_extinguish)
next_state = outcome[0]
next_info_list = outcome[1]
reward = outcome[2]
for i_uav in range(UAV_fire_extinguish.n_uav):
o_pool[i_uav,i_event,:] = last_info_list[i_uav][:]
op_pool[i_uav,i_event,:] = next_info_list[i_uav][:]
a_pool[i_uav,i_event,a_joint[i_uav]] = 1.0
r_pool[i_uav,i_event,0] = reward
last_info_list = next_info_list[:]
s_current = next_state
return (o_pool,a_pool,r_pool,op_pool)
def truncate_dataset_multiagent(data_array,n_keep_size):
n_size = len(data_array[0])
if n_size <= n_keep_size:
return data_array
else:
return data_array[:,(n_size-n_keep_size):,:]
def batch_select_multiagent(inputs,n_uav,n_batch,seeds):
batch_set = np.zeros((n_uav,n_batch,len(inputs[0][1])))
for i in range(n_batch):
for i_uav in range(n_uav):
batch_set[i_uav,i,:] = inputs[i_uav,seeds[i],:]
return batch_set
def visualize_scenario_indp(current_state,h_print,r_explore,UAV_fire_extinguish):
last_info_list = []
for i_uav in range(UAV_fire_extinguish.n_uav):
last_info_list.append(local_observation(i_uav,current_state,UAV_fire_extinguish))
next_info_list = last_info_list[:]
for h in range(h_print):
a_joint = [0] * UAV_fire_extinguish.n_uav
for i_uav in range(UAV_fire_extinguish.n_uav):
(obs_0,obs_1,obs_2,obs_3) = last_info_list[i_uav][:]
old_qval = final_model.predict([np.array(obs_0).reshape(1,input_size_nn_sfinfo),
np.array(obs_1).reshape(1,input_size_nn_vision),
np.array(obs_2).reshape(1,input_size_nn_vision),
np.array(obs_3).reshape(1,input_size_nn_vision)], batch_size=1)
a_joint[i_uav] = es_greedy(old_qval,r_explore)
outcome = transition_sample(current_state,a_joint,last_info_list,UAV_fire_extinguish)
(next_state,next_info_list,reward_immed) = outcome
next_state = outcome[0]
next_info_list = outcome[1]
reward = outcome[2]
print(current_state,a_joint,reward)
current_state = next_state
last_info_list = next_info_list
##########################################
############ Neural Network ##############
##########################################
##### functions for nerual network #####
def es_greedy(inputs,epsi):
x_rand = np.random.random()
if x_rand < epsi:
return np.random.randint(0,4)
else:
return np.argmax(inputs)
def softmax(inputs,T):
x_rand = np.random.random()
e_input = np.ones(len(inputs))
for i in range(len(inputs)):
e_input[i] = exp(inputs[i]/float(T))
e_input = e_input/sum(e_input)
e_input[-1] += 0.01
for i in range(len(inputs)):
if x < e_input[i]:
return x
else:
x = x - e_input[i]
##### target value #####
#####################################################################
#####################################################################
input_size_nn_vision = (2 * UAV_fire_extinguish.n_visi + 1)**2
input_size_nn_sfinfo = 4
self_info_branch = Sequential()
self_info_branch.add(Dense(10, init='lecun_uniform', input_shape = (input_size_nn_sfinfo,)))
self_info_branch.add(Activation('relu'))
other_vision_branch = Sequential()
other_vision_branch.add(Dense(50, init='lecun_uniform', input_shape = (input_size_nn_vision,)))
other_vision_branch.add(Activation('relu'))
other_vision_branch.add(Dense(50, init='lecun_uniform'))
other_vision_branch.add(Activation('relu'))
smalf_vision_branch = Sequential()
smalf_vision_branch.add(Dense(50, init='lecun_uniform', input_shape = (input_size_nn_vision,)))
smalf_vision_branch.add(Activation('relu'))
smalf_vision_branch.add(Dense(50, init='lecun_uniform'))
smalf_vision_branch.add(Activation('relu'))
bigf_vision_branch = Sequential()
bigf_vision_branch.add(Dense(50, init='lecun_uniform', input_shape = (input_size_nn_vision,)))
bigf_vision_branch.add(Activation('relu'))
bigf_vision_branch.add(Dense(50, init='lecun_uniform'))
bigf_vision_branch.add(Activation('relu'))
merged = Merge([self_info_branch, other_vision_branch, smalf_vision_branch, bigf_vision_branch], mode='concat')
final_model = Sequential()
final_model.add(merged)
final_model.add(Activation('relu'))
final_model.add(Dense(5,init='lecun_uniform'))
final_model.add(Activation('linear'))
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
final_model.compile(loss='mse', optimizer=adam)
##############################
epochs = 60000
gamma = 0.9
epsilon = 0.2
random_action_thresold = 1000
epsi = 0.2
max_pool_size = 20000
n_batch_size = 5000
current_state = UAV_fire_extinguish.s_init
last_info_list = []
for i_uav in range(UAV_fire_extinguish.n_uav):
last_info_list.append(local_observation(i_uav,current_state,UAV_fire_extinguish))
for iteration_times in range(0): # 3 not 0
obs_sfinfo = []
obs_otheru = []
obs_smallf = []
obs_bigf = []
reward_list = []
target_list = []
if iteration_times == 0:
pass
elif iteration_times == 1:
UAV_fire_extinguish.e_fire[2][0] = 0.2
elif iteration_times == 2:
UAV_fire_extinguish.e_fire[2][0] = 0.1
else:
UAV_fire_extinguish.e_fire[2][0] = 0.0
print(UAV_fire_extinguish.e_fire[2][0])
for ep in range(epochs):
epsi = 0.1# - 0.2 * (ep / epochs)
if ep % 100 == 0:
print("iteration times = ",ep,"===============================")
###################################
########## Choose action ##########
###################################
a_joint = [0] * UAV_fire_extinguish.n_uav
if ep < random_action_thresold:
for i_uav in range(UAV_fire_extinguish.n_uav):
a_joint[i_uav] = random.randint(0,4)
else:
for i_uav in range(UAV_fire_extinguish.n_uav):
(obs_0,obs_1,obs_2,obs_3) = last_info_list[i_uav][:]
old_qval = final_model.predict([np.array(obs_0).reshape(1,input_size_nn_sfinfo),
np.array(obs_1).reshape(1,input_size_nn_vision),
np.array(obs_2).reshape(1,input_size_nn_vision),
np.array(obs_3).reshape(1,input_size_nn_vision)], batch_size=1)
a_joint[i_uav] = es_greedy(old_qval,epsi)
#####################################
########## Make transition ##########
#####################################
outcome_transition = transition_sample(current_state,a_joint,last_info_list,UAV_fire_extinguish)
next_state = outcome_transition[0]
#############################################################
### Add observations and rewards into pool for all agents ###
#############################################################
for i_uav in range(UAV_fire_extinguish.n_uav):
# add observations
(obs_0,obs_1,obs_2,obs_3) = last_info_list[i_uav][:]
obs_sfinfo.append(np.array(obs_0).reshape(1,input_size_nn_sfinfo))
obs_otheru.append(np.array(obs_1).reshape(1,input_size_nn_vision))
obs_smallf.append(np.array(obs_2).reshape(1,input_size_nn_vision))
obs_bigf.append(np.array(obs_3).reshape(1,input_size_nn_vision))
reward_list.append(outcome_transition[2])
# add target value
(obsp_0,obsp_1,obsp_2,obsp_3) = outcome_transition[1][i_uav][:]
old_qval = final_model.predict([np.array(obs_0).reshape(1,input_size_nn_sfinfo),
np.array(obs_1).reshape(1,input_size_nn_vision),
np.array(obs_2).reshape(1,input_size_nn_vision),
np.array(obs_3).reshape(1,input_size_nn_vision)], batch_size=1)
new_qval = final_model.predict([np.array(obsp_0).reshape(1,input_size_nn_sfinfo),
np.array(obsp_1).reshape(1,input_size_nn_vision),
np.array(obsp_2).reshape(1,input_size_nn_vision),
np.array(obsp_3).reshape(1,input_size_nn_vision)], batch_size=1)
max_q_new = np.max(new_qval)
y = np.zeros((1,5))
y[:] = old_qval[:]
y[0][a_joint[i_uav]] = outcome_transition[2] + gamma * max_q_new
target_list.append(y)
#########################################
### update next state and information ###
#########################################
current_state = next_state
last_info_list = outcome_transition[1][:]
###########################################
### if we have too many samples in pool ###
###########################################
if len(obs_sfinfo) > max_pool_size:
obs_sfinfo.pop(0)
obs_otheru.pop(0)
obs_smallf.pop(0)
obs_bigf.pop(0)
reward_list.pop(0)
############################
### train neural network ###
############################
if ep % 500 == 0 and ep > random_action_thresold:
# create batch
obs_0_array = np.zeros((n_batch_size,input_size_nn_sfinfo))
obs_1_array = np.zeros((n_batch_size,input_size_nn_vision))
obs_2_array = np.zeros((n_batch_size,input_size_nn_vision))
obs_3_array = np.zeros((n_batch_size,input_size_nn_vision))
targt_array = np.zeros((n_batch_size,5))
if len(obs_sfinfo) > n_batch_size+1:
seeds = random.sample(xrange(0,len(obs_sfinfo)),n_batch_size)
for i_batch_sample in range(n_batch_size):
b_number = seeds[i_batch_sample]
obs_0_array[i_batch_sample,:] = obs_sfinfo[b_number][0][:]
obs_1_array[i_batch_sample,:] = obs_otheru[b_number][0][:]
obs_2_array[i_batch_sample,:] = obs_smallf[b_number][0][:]
obs_3_array[i_batch_sample,:] = obs_bigf[b_number][0][:]
targt_array[i_batch_sample,:] = target_list[b_number][0][:]
# train
final_model.fit([obs_0_array,obs_1_array,obs_2_array,obs_3_array],
targt_array,
batch_size = n_batch_size,
nb_epoch = 50,
verbose = 1)
visualize_scenario_indp(UAV_fire_extinguish.s_init,30,0.2,UAV_fire_extinguish)
print("=====================")
visualize_scenario_indp(UAV_fire_extinguish.s_init,30,0.2,UAV_fire_extinguish)
print("=====================")
visualize_scenario_indp(UAV_fire_extinguish.s_init,30,0.2,UAV_fire_extinguish)
print("=====================")
visualize_scenario_indp(UAV_fire_extinguish.s_init,30,0.2,UAV_fire_extinguish)
print("=====================")
visualize_scenario_indp(UAV_fire_extinguish.s_init,30,0.2,UAV_fire_extinguish)
print("=====================")
| [
"keras.optimizers.Adam",
"keras.layers.Merge",
"keras.layers.core.Activation",
"numpy.random.random",
"numpy.argmax",
"keras.models.Sequential",
"numpy.max",
"numpy.array",
"numpy.zeros",
"numpy.random.randint",
"random.random",
"time.time",
"random.randint",
"keras.layers.core.Dense"
] | [((271, 282), 'time.time', 'time.time', ([], {}), '()\n', (280, 282), False, 'import time\n'), ((16855, 16867), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (16865, 16867), False, 'from keras.models import Sequential\n'), ((17025, 17037), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (17035, 17037), False, 'from keras.models import Sequential\n'), ((17302, 17314), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (17312, 17314), False, 'from keras.models import Sequential\n'), ((17578, 17590), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (17588, 17590), False, 'from keras.models import Sequential\n'), ((17838, 17944), 'keras.layers.Merge', 'Merge', (['[self_info_branch, other_vision_branch, smalf_vision_branch, bigf_vision_branch\n ]'], {'mode': '"""concat"""'}), "([self_info_branch, other_vision_branch, smalf_vision_branch,\n bigf_vision_branch], mode='concat')\n", (17843, 17944), False, 'from keras.layers import Merge\n'), ((17956, 17968), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (17966, 17968), False, 'from keras.models import Sequential\n'), ((18122, 18188), 'keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)', 'beta_1': '(0.9)', 'beta_2': '(0.999)', 'epsilon': '(1e-08)', 'decay': '(0.0)'}), '(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)\n', (18126, 18188), False, 'from keras.optimizers import RMSprop, Adam\n'), ((1144, 1162), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (1160, 1162), True, 'import numpy as np\n'), ((11346, 11417), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, input_number)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, input_number), float)\n', (11354, 11417), True, 'import numpy as np\n'), ((11426, 11486), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, 5)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, 5), float)\n', (11434, 11486), True, 'import numpy as np\n'), ((11495, 11555), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, 1)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, 1), float)\n', (11503, 11555), True, 'import numpy as np\n'), ((11565, 11636), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, input_number)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, input_number), float)\n', (11573, 11636), True, 'import numpy as np\n'), ((12853, 12924), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, input_number)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, input_number), float)\n', (12861, 12924), True, 'import numpy as np\n'), ((12933, 12993), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, 5)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, 5), float)\n', (12941, 12993), True, 'import numpy as np\n'), ((13002, 13062), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, 1)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, 1), float)\n', (13010, 13062), True, 'import numpy as np\n'), ((13072, 13143), 'numpy.zeros', 'np.zeros', (['(UAV_fire_extinguish.n_uav, n_init_pool, input_number)', 'float'], {}), '((UAV_fire_extinguish.n_uav, n_init_pool, input_number), float)\n', (13080, 13143), True, 'import numpy as np\n'), ((16138, 16156), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (16154, 16156), True, 'import numpy as np\n'), ((16285, 16303), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (16301, 16303), True, 'import numpy as np\n'), ((16889, 16957), 'keras.layers.core.Dense', 'Dense', (['(10)'], {'init': '"""lecun_uniform"""', 'input_shape': '(input_size_nn_sfinfo,)'}), "(10, init='lecun_uniform', input_shape=(input_size_nn_sfinfo,))\n", (16894, 16957), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((16982, 17000), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (16992, 17000), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17062, 17130), 'keras.layers.core.Dense', 'Dense', (['(50)'], {'init': '"""lecun_uniform"""', 'input_shape': '(input_size_nn_vision,)'}), "(50, init='lecun_uniform', input_shape=(input_size_nn_vision,))\n", (17067, 17130), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17158, 17176), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17168, 17176), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17202, 17233), 'keras.layers.core.Dense', 'Dense', (['(50)'], {'init': '"""lecun_uniform"""'}), "(50, init='lecun_uniform')\n", (17207, 17233), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17259, 17277), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17269, 17277), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17339, 17407), 'keras.layers.core.Dense', 'Dense', (['(50)'], {'init': '"""lecun_uniform"""', 'input_shape': '(input_size_nn_vision,)'}), "(50, init='lecun_uniform', input_shape=(input_size_nn_vision,))\n", (17344, 17407), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17435, 17453), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17445, 17453), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17479, 17510), 'keras.layers.core.Dense', 'Dense', (['(50)'], {'init': '"""lecun_uniform"""'}), "(50, init='lecun_uniform')\n", (17484, 17510), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17536, 17554), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17546, 17554), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17614, 17682), 'keras.layers.core.Dense', 'Dense', (['(50)'], {'init': '"""lecun_uniform"""', 'input_shape': '(input_size_nn_vision,)'}), "(50, init='lecun_uniform', input_shape=(input_size_nn_vision,))\n", (17619, 17682), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17709, 17727), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17719, 17727), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17752, 17783), 'keras.layers.core.Dense', 'Dense', (['(50)'], {'init': '"""lecun_uniform"""'}), "(50, init='lecun_uniform')\n", (17757, 17783), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((17808, 17826), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (17818, 17826), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((18009, 18027), 'keras.layers.core.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (18019, 18027), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((18045, 18075), 'keras.layers.core.Dense', 'Dense', (['(5)'], {'init': '"""lecun_uniform"""'}), "(5, init='lecun_uniform')\n", (18050, 18075), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((18092, 18112), 'keras.layers.core.Activation', 'Activation', (['"""linear"""'], {}), "('linear')\n", (18102, 18112), False, 'from keras.layers.core import Dense, Dropout, Activation\n'), ((6341, 6356), 'random.random', 'random.random', ([], {}), '()\n', (6354, 6356), False, 'import random\n'), ((16189, 16212), 'numpy.random.randint', 'np.random.randint', (['(0)', '(4)'], {}), '(0, 4)\n', (16206, 16212), True, 'import numpy as np\n'), ((16231, 16248), 'numpy.argmax', 'np.argmax', (['inputs'], {}), '(inputs)\n', (16240, 16248), True, 'import numpy as np\n'), ((12066, 12086), 'random.randint', 'random.randint', (['(0)', '(4)'], {}), '(0, 4)\n', (12080, 12086), False, 'import random\n'), ((21839, 21855), 'numpy.max', 'np.max', (['new_qval'], {}), '(new_qval)\n', (21845, 21855), True, 'import numpy as np\n'), ((21867, 21883), 'numpy.zeros', 'np.zeros', (['(1, 5)'], {}), '((1, 5))\n', (21875, 21883), True, 'import numpy as np\n'), ((22729, 22775), 'numpy.zeros', 'np.zeros', (['(n_batch_size, input_size_nn_sfinfo)'], {}), '((n_batch_size, input_size_nn_sfinfo))\n', (22737, 22775), True, 'import numpy as np\n'), ((22795, 22841), 'numpy.zeros', 'np.zeros', (['(n_batch_size, input_size_nn_vision)'], {}), '((n_batch_size, input_size_nn_vision))\n', (22803, 22841), True, 'import numpy as np\n'), ((22861, 22907), 'numpy.zeros', 'np.zeros', (['(n_batch_size, input_size_nn_vision)'], {}), '((n_batch_size, input_size_nn_vision))\n', (22869, 22907), True, 'import numpy as np\n'), ((22927, 22973), 'numpy.zeros', 'np.zeros', (['(n_batch_size, input_size_nn_vision)'], {}), '((n_batch_size, input_size_nn_vision))\n', (22935, 22973), True, 'import numpy as np\n'), ((22993, 23020), 'numpy.zeros', 'np.zeros', (['(n_batch_size, 5)'], {}), '((n_batch_size, 5))\n', (23001, 23020), True, 'import numpy as np\n'), ((13572, 13592), 'random.randint', 'random.randint', (['(0)', '(4)'], {}), '(0, 4)\n', (13586, 13592), False, 'import random\n'), ((19482, 19502), 'random.randint', 'random.randint', (['(0)', '(4)'], {}), '(0, 4)\n', (19496, 19502), False, 'import random\n'), ((20677, 20692), 'numpy.array', 'np.array', (['obs_0'], {}), '(obs_0)\n', (20685, 20692), True, 'import numpy as np\n'), ((20750, 20765), 'numpy.array', 'np.array', (['obs_1'], {}), '(obs_1)\n', (20758, 20765), True, 'import numpy as np\n'), ((20823, 20838), 'numpy.array', 'np.array', (['obs_2'], {}), '(obs_2)\n', (20831, 20838), True, 'import numpy as np\n'), ((20894, 20909), 'numpy.array', 'np.array', (['obs_3'], {}), '(obs_3)\n', (20902, 20909), True, 'import numpy as np\n'), ((15201, 15216), 'numpy.array', 'np.array', (['obs_0'], {}), '(obs_0)\n', (15209, 15216), True, 'import numpy as np\n'), ((15288, 15303), 'numpy.array', 'np.array', (['obs_1'], {}), '(obs_1)\n', (15296, 15303), True, 'import numpy as np\n'), ((15375, 15390), 'numpy.array', 'np.array', (['obs_2'], {}), '(obs_2)\n', (15383, 15390), True, 'import numpy as np\n'), ((15462, 15477), 'numpy.array', 'np.array', (['obs_3'], {}), '(obs_3)\n', (15470, 15477), True, 'import numpy as np\n'), ((21127, 21142), 'numpy.array', 'np.array', (['obs_0'], {}), '(obs_0)\n', (21135, 21142), True, 'import numpy as np\n'), ((21214, 21229), 'numpy.array', 'np.array', (['obs_1'], {}), '(obs_1)\n', (21222, 21229), True, 'import numpy as np\n'), ((21301, 21316), 'numpy.array', 'np.array', (['obs_2'], {}), '(obs_2)\n', (21309, 21316), True, 'import numpy as np\n'), ((21388, 21403), 'numpy.array', 'np.array', (['obs_3'], {}), '(obs_3)\n', (21396, 21403), True, 'import numpy as np\n'), ((21491, 21507), 'numpy.array', 'np.array', (['obsp_0'], {}), '(obsp_0)\n', (21499, 21507), True, 'import numpy as np\n'), ((21579, 21595), 'numpy.array', 'np.array', (['obsp_1'], {}), '(obsp_1)\n', (21587, 21595), True, 'import numpy as np\n'), ((21667, 21683), 'numpy.array', 'np.array', (['obsp_2'], {}), '(obsp_2)\n', (21675, 21683), True, 'import numpy as np\n'), ((21755, 21771), 'numpy.array', 'np.array', (['obsp_3'], {}), '(obsp_3)\n', (21763, 21771), True, 'import numpy as np\n'), ((19670, 19685), 'numpy.array', 'np.array', (['obs_0'], {}), '(obs_0)\n', (19678, 19685), True, 'import numpy as np\n'), ((19757, 19772), 'numpy.array', 'np.array', (['obs_1'], {}), '(obs_1)\n', (19765, 19772), True, 'import numpy as np\n'), ((19844, 19859), 'numpy.array', 'np.array', (['obs_2'], {}), '(obs_2)\n', (19852, 19859), True, 'import numpy as np\n'), ((19931, 19946), 'numpy.array', 'np.array', (['obs_3'], {}), '(obs_3)\n', (19939, 19946), True, 'import numpy as np\n')] |
from django import forms
class ArticleRecommendForm(forms.Form):
"""Formulario para recomendar articulo."""
name = forms.CharField(
label='Nombre'
)
from_email = forms.EmailField(
label='Tu email',
widget=forms.EmailInput()
)
to_email = forms.EmailField(
label='Email destinatario',
widget=forms.EmailInput()
)
message = forms.CharField(
label='Mensaje (Opcional)',
required=False,
widget=forms.Textarea()
)
| [
"django.forms.Textarea",
"django.forms.EmailInput",
"django.forms.CharField"
] | [((125, 156), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Nombre"""'}), "(label='Nombre')\n", (140, 156), False, 'from django import forms\n'), ((247, 265), 'django.forms.EmailInput', 'forms.EmailInput', ([], {}), '()\n', (263, 265), False, 'from django import forms\n'), ((356, 374), 'django.forms.EmailInput', 'forms.EmailInput', ([], {}), '()\n', (372, 374), False, 'from django import forms\n'), ((487, 503), 'django.forms.Textarea', 'forms.Textarea', ([], {}), '()\n', (501, 503), False, 'from django import forms\n')] |
from time import sleep
print('\033[1:31m-=-\033[m' * 6)
print('\033[1m FATORIAL \033[m')
print('\033[1:31m-=-\033[m' * 6)
sleep(1)
numero = int(input('\033[1:33mDigite um número: \033[m'))
c = numero - 1
fatorial = numero
while c > 0:
fatorial = fatorial * c
c = c - 1
sleep(1)
print('O fatorial de {} é ...'.format(numero))
sleep(1)
print('{}! = \033[1:32m{}\033[m'.format(numero, fatorial))
sleep(1)
| [
"time.sleep"
] | [((129, 137), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (134, 137), False, 'from time import sleep\n'), ((288, 296), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (293, 296), False, 'from time import sleep\n'), ((344, 352), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (349, 352), False, 'from time import sleep\n'), ((412, 420), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (417, 420), False, 'from time import sleep\n')] |
#!/usr/bin/python
import math
import sys
#bValue = float(sys.argv[1])
#nomOhm = float(sys.argv[2])
#nomTemp = float(sys.argv[3])
#seriesR = float(sys.argv[4])
#adcRes = int(sys.argv[5])
bValue = 3750
nomOhm = 10000
nomTemp = 250
seriesR = 4700
adcRes = 10
adcMax = 2**adcRes
adcVal = 0
vals = [0] * adcMax
while adcVal < adcMax:
try:
vals[adcVal] = 10 / ((math.log((seriesR / (((adcMax - 1) / adcVal) -1)) / nomOhm) / bValue) + (10 / (nomTemp + 2731.5))) -2731
adcVal += 1
except:
adcVal += 1
print("int16_t tVals[{0}] = {{".format(adcMax))
adcVal = 0
while adcVal < adcMax:
print(" {0},{1},{2},{3},{4},{5},{6},{7},"
.format(
round(vals[adcVal]),
round(vals[adcVal + 1]),
round(vals[adcVal + 2]),
round(vals[adcVal + 3]),
round(vals[adcVal + 4]),
round(vals[adcVal + 5]),
round(vals[adcVal + 6]),
round(vals[adcVal + 7]),
))
adcVal += 8
print("};\n") | [
"math.log"
] | [((374, 430), 'math.log', 'math.log', (['(seriesR / ((adcMax - 1) / adcVal - 1) / nomOhm)'], {}), '(seriesR / ((adcMax - 1) / adcVal - 1) / nomOhm)\n', (382, 430), False, 'import math\n')] |
# Copyright 2018 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functionality for working with probability spaces and random variables.
Basic recap of probability theory, and thus of classes in this file:
* A probability space is a (finite or infinite) set Omega with a probability
measure defined on this.
* A random variable is a mapping from a probability space to another measure
space.
* An event is a measurable set in a sample space.
For example, suppose a bag contains 3 balls: two red balls, and one white ball.
This could be represented by a discrete probability space of size 3 with
elements {1, 2, 3}, with equal measure assigned to all 3 elements; and a random
variable that maps 1->red, 2->red, and 3->white. Then the probability of drawing
a red ball is the measure in the probability space of the inverse under the
random variable mapping of {red}, i.e., of {1, 2}, which is 2/3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import itertools
# Dependency imports
import six
from six.moves import zip
import sympy
@six.add_metaclass(abc.ABCMeta)
class Event(object):
"""Represents an event in a measure space."""
@six.add_metaclass(abc.ABCMeta)
class ProbabilitySpace(object):
"""Represents a probability space."""
@abc.abstractmethod
def probability(self, event):
"""Returns the probability of an event."""
@six.add_metaclass(abc.ABCMeta)
class RandomVariable(object):
"""Random variable; a mapping from a probability space to a measure space."""
@abc.abstractmethod
def __call__(self, event):
"""Maps an `_Event` in the probability space to one in the sample space."""
@abc.abstractmethod
def inverse(self, event):
"""Maps event in the sample space back to the inverse in the prob. space."""
class DiscreteEvent(Event):
"""Set of discrete values."""
def __init__(self, values):
self._values = values
@property
def values(self):
return self._values
class FiniteProductEvent(Event):
"""Event consisting of cartesian product of events."""
def __init__(self, events):
"""Initializes a `FiniteProductEvent`.
Args:
events: Tuple of `Event`s; resulting event will be cartesian product of
these.
"""
self._events = events
@property
def events(self):
return self._events
def all_sequences(self):
"""Returns iterator of sequences by selecting a single event in each coord.
This assumes that every component event is an instance of `DiscreteEvent`.
Returns:
Iterator over tuples of values.
Raises:
ValueError: If one of the component events is not a `DiscreteEvent`.
"""
if not all(isinstance(event, DiscreteEvent) for event in self._events):
raise ValueError('Not all component events are DiscreteEvents')
values_list = [event.values for event in self._events]
return itertools.product(*values_list)
class CountLevelSetEvent(Event):
"""Event of all sequences with fixed number of different values occurring."""
def __init__(self, counts):
"""Initializes `CountLevelSetEvent`.
E.g., to construct the event of getting two red balls and one green ball,
pass `counts = {red: 2, green: 1}`. (Then `all_sequences()` would return
`[(red, red, green), (red, green, red), (green, red, red)]`.
Args:
counts: Dictionary mapping values to the number of times they occur in a
sequence.
"""
self._counts = counts
self._all_sequences = None
@property
def counts(self):
return self._counts
def all_sequences(self):
"""Returns all sequences generated by this level set."""
if self._all_sequences is None:
# Generate via dynamic programming.
cache = {} # dict mapping tuple -> list of tuples
labels = list(self._counts.keys())
def generate(counts):
"""Returns list of tuples for given `counts` of labels."""
if sum(counts) == 0:
return [()]
counts = tuple(counts)
if counts in cache:
return cache[counts]
generated = []
for i, count in enumerate(counts):
if count == 0:
continue
counts_minus = list(counts)
counts_minus[i] -= 1
counts_minus = tuple(counts_minus)
extensions = generate(counts_minus)
generated += [tuple([labels[i]] + list(extension))
for extension in extensions]
cache[counts] = generated
return generated
self._all_sequences = generate(list(self._counts.values()))
return self._all_sequences
class SequenceEvent(Event):
"""Collection of sequences."""
def __init__(self, sequences):
self._sequences = sequences
def all_sequences(self):
return self._sequences
def normalize_weights(weights):
"""Normalizes the weights (as sympy.Rational) in dictionary of weights."""
weight_sum = sum(six.itervalues(weights))
return {
i: sympy.Rational(weight, weight_sum)
for i, weight in six.iteritems(weights)
}
class DiscreteProbabilitySpace(ProbabilitySpace):
"""Discrete probability space."""
def __init__(self, weights=None):
"""Initializes an `DiscreteProbabilitySpace`.
Args:
weights: Dictionary mapping values to relative probability of selecting
that value. This will be normalized.
"""
self._weights = normalize_weights(weights)
def probability(self, event):
if isinstance(event, DiscreteEvent):
return sum(self._weights[value]
for value in event.values if value in self._weights)
else:
raise ValueError('Unhandled event type {}'.format(type(event)))
@property
def weights(self):
"""Returns dictionary of probability of each element."""
return self._weights
class FiniteProductSpace(ProbabilitySpace):
"""Finite cartesian product of probability spaces."""
def __init__(self, spaces):
"""Initializes a `FiniteProductSpace`.
Args:
spaces: List of `ProbabilitySpace`.
"""
self._spaces = spaces
def all_spaces_equal(self):
return all([self._spaces[0] == space for space in self._spaces])
def probability(self, event):
# Specializations for optimization.
if isinstance(event, FiniteProductEvent):
assert len(self._spaces) == len(event.events)
return sympy.prod([
space.probability(event_slice)
for space, event_slice in zip(self._spaces, event.events)])
if isinstance(event, CountLevelSetEvent) and self.all_spaces_equal():
space = self._spaces[0]
counts = event.counts
probabilities = {
value: space.probability(DiscreteEvent({value}))
for value in six.iterkeys(counts)
}
num_events = sum(six.itervalues(counts))
assert num_events == len(self._spaces)
# Multinomial coefficient:
coeff = (
sympy.factorial(num_events) / sympy.prod(
[sympy.factorial(i) for i in six.itervalues(counts)]))
return coeff * sympy.prod([
pow(probabilities[value], counts[value])
for value in six.iterkeys(counts)
])
raise ValueError('Unhandled event type {}'.format(type(event)))
@property
def spaces(self):
"""Returns list of spaces."""
return self._spaces
class SampleWithoutReplacementSpace(ProbabilitySpace):
"""Probability space formed by sampling discrete space without replacement."""
def __init__(self, weights, n_samples):
"""Initializes a `SampleWithoutReplacementSpace`.
Args:
weights: Dictionary mapping values to relative probability of selecting
that value. This will be normalized.
n_samples: Number of samples to draw.
Raises:
ValueError: If `n_samples > len(weights)`.
"""
if n_samples > len(weights):
raise ValueError('n_samples is more than number of discrete elements')
self._weights = normalize_weights(weights)
self._n_samples = n_samples
@property
def n_samples(self):
"""Number of samples to draw."""
return self._n_samples
def probability(self, event):
try:
all_sequences = event.all_sequences()
except AttributeError:
raise ValueError('Unhandled event type {}'.format(type(event)))
probability_sum = 0
for sequence in all_sequences:
if len(sequence) != len(set(sequence)):
continue # not all unique, so not "without replacement".
p_sequence = 1
removed_prob = 0
for i in sequence:
p = self._weights[i] if i in self._weights else 0
if p == 0:
p_sequence = 0
break
p_sequence *= p / (1 - removed_prob)
removed_prob += p
probability_sum += p_sequence
return probability_sum
class IdentityRandomVariable(RandomVariable):
"""Identity map of a probability space."""
def __call__(self, event):
return event
def inverse(self, event):
return event
class DiscreteRandomVariable(RandomVariable):
"""Specialization to discrete random variable.
This is simply a mapping from a discrete space to a discrete space (dictionary
lookup).
"""
def __init__(self, mapping):
"""Initializes `DiscreteRandomVariable` from `mapping` dict."""
self._mapping = mapping
self._inverse = {}
for key, value in six.iteritems(mapping):
if value in self._inverse:
self._inverse[value].add(key)
else:
self._inverse[value] = set([key])
def __call__(self, event):
if isinstance(event, DiscreteEvent):
return DiscreteEvent({self._mapping[value] for value in event.values})
else:
raise ValueError('Unhandled event type {}'.format(type(event)))
def inverse(self, event):
if isinstance(event, DiscreteEvent):
set_ = set()
for value in event.values:
if value in self._inverse:
set_.update(self._inverse[value])
return DiscreteEvent(set_)
else:
raise ValueError('Unhandled event type {}'.format(type(event)))
class FiniteProductRandomVariable(RandomVariable):
"""Product random variable.
This has the following semantics. Let this be X = (X_1, ..., X_n). Then
X(w) = (X_1(w_1), ..., X_n(w_n))
(the sample space is assumed to be of sequence type).
"""
def __init__(self, random_variables):
"""Initializes a `FiniteProductRandomVariable`.
Args:
random_variables: Tuple of `RandomVariable`.
"""
self._random_variables = random_variables
def __call__(self, event):
if isinstance(event, FiniteProductEvent):
assert len(event.events) == len(self._random_variables)
zipped = list(zip(self._random_variables, event.events))
return FiniteProductEvent(
[random_variable(sub_event)
for random_variable, sub_event in zipped])
else:
raise ValueError('Unhandled event type {}'.format(type(event)))
def inverse(self, event):
# Specialization for `FiniteProductEvent`; don't need to take all sequences.
if isinstance(event, FiniteProductEvent):
assert len(event.events) == len(self._random_variables)
zipped = list(zip(self._random_variables, event.events))
return FiniteProductEvent(tuple(
random_variable.inverse(sub_event)
for random_variable, sub_event in zipped))
# Try fallback of mapping each sequence separately.
try:
all_sequences = event.all_sequences()
except AttributeError:
raise ValueError('Unhandled event type {}'.format(type(event)))
mapped = set()
for sequence in all_sequences:
assert len(sequence) == len(self._random_variables)
zipped = list(zip(self._random_variables, sequence))
mapped_sequence = FiniteProductEvent(tuple(
random_variable.inverse(DiscreteEvent({element}))
for random_variable, element in zipped))
mapped.update(mapped_sequence.all_sequences())
return SequenceEvent(mapped)
| [
"sympy.factorial",
"six.itervalues",
"six.add_metaclass",
"itertools.product",
"six.iteritems",
"six.iterkeys",
"six.moves.zip",
"sympy.Rational"
] | [((1666, 1696), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (1683, 1696), False, 'import six\n'), ((1769, 1799), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (1786, 1799), False, 'import six\n'), ((1977, 2007), 'six.add_metaclass', 'six.add_metaclass', (['abc.ABCMeta'], {}), '(abc.ABCMeta)\n', (1994, 2007), False, 'import six\n'), ((3473, 3504), 'itertools.product', 'itertools.product', (['*values_list'], {}), '(*values_list)\n', (3490, 3504), False, 'import itertools\n'), ((5505, 5528), 'six.itervalues', 'six.itervalues', (['weights'], {}), '(weights)\n', (5519, 5528), False, 'import six\n'), ((5550, 5584), 'sympy.Rational', 'sympy.Rational', (['weight', 'weight_sum'], {}), '(weight, weight_sum)\n', (5564, 5584), False, 'import sympy\n'), ((9888, 9910), 'six.iteritems', 'six.iteritems', (['mapping'], {}), '(mapping)\n', (9901, 9910), False, 'import six\n'), ((5608, 5630), 'six.iteritems', 'six.iteritems', (['weights'], {}), '(weights)\n', (5621, 5630), False, 'import six\n'), ((7346, 7368), 'six.itervalues', 'six.itervalues', (['counts'], {}), '(counts)\n', (7360, 7368), False, 'import six\n'), ((7474, 7501), 'sympy.factorial', 'sympy.factorial', (['num_events'], {}), '(num_events)\n', (7489, 7501), False, 'import sympy\n'), ((11203, 11244), 'six.moves.zip', 'zip', (['self._random_variables', 'event.events'], {}), '(self._random_variables, event.events)\n', (11206, 11244), False, 'from six.moves import zip\n'), ((11689, 11730), 'six.moves.zip', 'zip', (['self._random_variables', 'event.events'], {}), '(self._random_variables, event.events)\n', (11692, 11730), False, 'from six.moves import zip\n'), ((12209, 12246), 'six.moves.zip', 'zip', (['self._random_variables', 'sequence'], {}), '(self._random_variables, sequence)\n', (12212, 12246), False, 'from six.moves import zip\n'), ((7293, 7313), 'six.iterkeys', 'six.iterkeys', (['counts'], {}), '(counts)\n', (7305, 7313), False, 'import six\n'), ((7020, 7051), 'six.moves.zip', 'zip', (['self._spaces', 'event.events'], {}), '(self._spaces, event.events)\n', (7023, 7051), False, 'from six.moves import zip\n'), ((7531, 7549), 'sympy.factorial', 'sympy.factorial', (['i'], {}), '(i)\n', (7546, 7549), False, 'import sympy\n'), ((7559, 7581), 'six.itervalues', 'six.itervalues', (['counts'], {}), '(counts)\n', (7573, 7581), False, 'import six\n'), ((7693, 7713), 'six.iterkeys', 'six.iterkeys', (['counts'], {}), '(counts)\n', (7705, 7713), False, 'import six\n')] |
###############################################################################
# Copyright (c) 2007-2018, National Research Foundation (Square Kilometre Array)
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""Tests for non-linear least-squares fitter.
:author: <NAME>
:license: Modified BSD
"""
from __future__ import division
import numpy as np
from numpy.testing import TestCase, assert_almost_equal, run_module_suite
from scikits.fitting import (NonLinearLeastSquaresFit, LinearLeastSquaresFit,
vectorize_fit_func)
class TestNonLinearLeastSquaresFit(TestCase):
"""Check the NonLinearLeastSquaresFit class."""
def setUp(self):
# Quadratic function centred at p
self.vFunc = vectorize_fit_func(lambda p, x: ((x - p) ** 2).sum())
self.true_params = np.array([1, -4])
self.init_params = np.array([0, 0])
self.x = 4.0 * np.random.randn(2, 20)
self.y = self.vFunc(self.true_params, self.x)
# 2-D log Gaussian function
def lngauss_diagcov(p, x):
xminmu = x - p[:2, np.newaxis]
return p[4] - 0.5 * np.dot(p[2:4], xminmu * xminmu)
self.func2 = lngauss_diagcov
self.true_params2 = np.array([3, -2, 10, 10, 4])
self.init_params2 = np.array([0, 0, 1, 1, 0])
self.x2 = np.random.randn(2, 80)
self.y2 = lngauss_diagcov(self.true_params2, self.x2)
# Linear function
self.func3 = lambda p, x: np.dot(p, x)
self.jac3 = lambda p, x: x
self.true_params3 = np.array([-0.1, 0.2, -0.3, 0.0, 0.5])
self.init_params3 = np.zeros(5)
self.enabled_params_int = [0, 1, 2, 4]
self.enabled_params_bool = [True, True, True, False, True]
t = np.arange(0, 10., 10. / 100)
self.x3 = np.vander(t, 5).T
self.y3 = self.func3(self.true_params3, self.x3)
def test_fit_eval_func1(self):
"""NonLinearLeastSquaresFit: Basic function fitting and evaluation."""
interp = NonLinearLeastSquaresFit(self.vFunc, self.init_params)
interp.fit(self.x, self.y)
y = interp(self.x)
assert_almost_equal(interp.params, self.true_params, decimal=7)
assert_almost_equal(y, self.y, decimal=5)
def test_fit_eval_gauss(self):
"""NonLinearLeastSquaresFit: Check fit on 2-D log Gaussian function."""
interp2 = NonLinearLeastSquaresFit(self.func2, self.init_params2)
interp2.fit(self.x2, self.y2)
y2 = interp2(self.x2)
assert_almost_equal(interp2.params, self.true_params2, decimal=10)
assert_almost_equal(y2, self.y2, decimal=10)
def test_fit_eval_linear(self):
"""NonLinearLeastSquaresFit: Do linear problem and check Jacobian."""
lin = LinearLeastSquaresFit()
lin.fit(self.x3, self.y3, std_y=2.0)
nonlin = NonLinearLeastSquaresFit(self.func3, self.init_params3,
func_jacobian=self.jac3)
nonlin.fit(self.x3, self.y3, std_y=2.0)
# A correct Jacobian helps a lot...
assert_almost_equal(nonlin.params, self.true_params3, decimal=11)
assert_almost_equal(nonlin.cov_params, lin.cov_params, decimal=11)
nonlin_nojac = NonLinearLeastSquaresFit(self.func3, self.init_params3)
nonlin_nojac.fit(self.x3, self.y3, std_y=0.1)
assert_almost_equal(nonlin_nojac.params, self.true_params3, decimal=5)
# Covariance matrix is way smaller than linear one...
def test_enabled_params(self):
"""NonLinearLeastSquaresFit: Try to optimise subset of parameters."""
lin = LinearLeastSquaresFit()
lin.fit(self.x3[self.enabled_params_int, :], self.y3, std_y=2.0)
lin_cov_params = np.zeros((len(self.true_params3),
len(self.true_params3)))
subset = np.ix_(self.enabled_params_int, self.enabled_params_int)
lin_cov_params[subset] = lin.cov_params
nonlin = NonLinearLeastSquaresFit(self.func3, self.init_params3,
self.enabled_params_int, self.jac3)
nonlin.fit(self.x3, self.y3, std_y=2.0)
assert_almost_equal(nonlin.params, self.true_params3, decimal=11)
assert_almost_equal(nonlin.cov_params, lin_cov_params, decimal=11)
nonlin = NonLinearLeastSquaresFit(self.func3, self.init_params3,
self.enabled_params_bool, self.jac3)
nonlin.fit(self.x3, self.y3, std_y=2.0)
assert_almost_equal(nonlin.params, self.true_params3, decimal=11)
assert_almost_equal(nonlin.cov_params, lin_cov_params, decimal=11)
if __name__ == "__main__":
run_module_suite()
| [
"scikits.fitting.LinearLeastSquaresFit",
"numpy.vander",
"scikits.fitting.NonLinearLeastSquaresFit",
"numpy.ix_",
"numpy.array",
"numpy.zeros",
"numpy.testing.assert_almost_equal",
"numpy.dot",
"numpy.testing.run_module_suite",
"numpy.random.randn",
"numpy.arange"
] | [((5252, 5270), 'numpy.testing.run_module_suite', 'run_module_suite', ([], {}), '()\n', (5268, 5270), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((1392, 1409), 'numpy.array', 'np.array', (['[1, -4]'], {}), '([1, -4])\n', (1400, 1409), True, 'import numpy as np\n'), ((1437, 1453), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (1445, 1453), True, 'import numpy as np\n'), ((1798, 1826), 'numpy.array', 'np.array', (['[3, -2, 10, 10, 4]'], {}), '([3, -2, 10, 10, 4])\n', (1806, 1826), True, 'import numpy as np\n'), ((1855, 1880), 'numpy.array', 'np.array', (['[0, 0, 1, 1, 0]'], {}), '([0, 0, 1, 1, 0])\n', (1863, 1880), True, 'import numpy as np\n'), ((1899, 1921), 'numpy.random.randn', 'np.random.randn', (['(2)', '(80)'], {}), '(2, 80)\n', (1914, 1921), True, 'import numpy as np\n'), ((2120, 2157), 'numpy.array', 'np.array', (['[-0.1, 0.2, -0.3, 0.0, 0.5]'], {}), '([-0.1, 0.2, -0.3, 0.0, 0.5])\n', (2128, 2157), True, 'import numpy as np\n'), ((2186, 2197), 'numpy.zeros', 'np.zeros', (['(5)'], {}), '(5)\n', (2194, 2197), True, 'import numpy as np\n'), ((2324, 2354), 'numpy.arange', 'np.arange', (['(0)', '(10.0)', '(10.0 / 100)'], {}), '(0, 10.0, 10.0 / 100)\n', (2333, 2354), True, 'import numpy as np\n'), ((2578, 2632), 'scikits.fitting.NonLinearLeastSquaresFit', 'NonLinearLeastSquaresFit', (['self.vFunc', 'self.init_params'], {}), '(self.vFunc, self.init_params)\n', (2602, 2632), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((2703, 2766), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['interp.params', 'self.true_params'], {'decimal': '(7)'}), '(interp.params, self.true_params, decimal=7)\n', (2722, 2766), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((2775, 2816), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['y', 'self.y'], {'decimal': '(5)'}), '(y, self.y, decimal=5)\n', (2794, 2816), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((2951, 3006), 'scikits.fitting.NonLinearLeastSquaresFit', 'NonLinearLeastSquaresFit', (['self.func2', 'self.init_params2'], {}), '(self.func2, self.init_params2)\n', (2975, 3006), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((3083, 3149), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['interp2.params', 'self.true_params2'], {'decimal': '(10)'}), '(interp2.params, self.true_params2, decimal=10)\n', (3102, 3149), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((3158, 3202), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['y2', 'self.y2'], {'decimal': '(10)'}), '(y2, self.y2, decimal=10)\n', (3177, 3202), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((3332, 3355), 'scikits.fitting.LinearLeastSquaresFit', 'LinearLeastSquaresFit', ([], {}), '()\n', (3353, 3355), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((3418, 3503), 'scikits.fitting.NonLinearLeastSquaresFit', 'NonLinearLeastSquaresFit', (['self.func3', 'self.init_params3'], {'func_jacobian': 'self.jac3'}), '(self.func3, self.init_params3, func_jacobian=self.jac3\n )\n', (3442, 3503), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((3641, 3706), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['nonlin.params', 'self.true_params3'], {'decimal': '(11)'}), '(nonlin.params, self.true_params3, decimal=11)\n', (3660, 3706), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((3715, 3781), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['nonlin.cov_params', 'lin.cov_params'], {'decimal': '(11)'}), '(nonlin.cov_params, lin.cov_params, decimal=11)\n', (3734, 3781), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((3805, 3860), 'scikits.fitting.NonLinearLeastSquaresFit', 'NonLinearLeastSquaresFit', (['self.func3', 'self.init_params3'], {}), '(self.func3, self.init_params3)\n', (3829, 3860), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((3923, 3993), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['nonlin_nojac.params', 'self.true_params3'], {'decimal': '(5)'}), '(nonlin_nojac.params, self.true_params3, decimal=5)\n', (3942, 3993), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((4184, 4207), 'scikits.fitting.LinearLeastSquaresFit', 'LinearLeastSquaresFit', ([], {}), '()\n', (4205, 4207), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((4417, 4473), 'numpy.ix_', 'np.ix_', (['self.enabled_params_int', 'self.enabled_params_int'], {}), '(self.enabled_params_int, self.enabled_params_int)\n', (4423, 4473), True, 'import numpy as np\n'), ((4539, 4635), 'scikits.fitting.NonLinearLeastSquaresFit', 'NonLinearLeastSquaresFit', (['self.func3', 'self.init_params3', 'self.enabled_params_int', 'self.jac3'], {}), '(self.func3, self.init_params3, self.\n enabled_params_int, self.jac3)\n', (4563, 4635), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((4729, 4794), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['nonlin.params', 'self.true_params3'], {'decimal': '(11)'}), '(nonlin.params, self.true_params3, decimal=11)\n', (4748, 4794), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((4803, 4869), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['nonlin.cov_params', 'lin_cov_params'], {'decimal': '(11)'}), '(nonlin.cov_params, lin_cov_params, decimal=11)\n', (4822, 4869), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((4887, 4984), 'scikits.fitting.NonLinearLeastSquaresFit', 'NonLinearLeastSquaresFit', (['self.func3', 'self.init_params3', 'self.enabled_params_bool', 'self.jac3'], {}), '(self.func3, self.init_params3, self.\n enabled_params_bool, self.jac3)\n', (4911, 4984), False, 'from scikits.fitting import NonLinearLeastSquaresFit, LinearLeastSquaresFit, vectorize_fit_func\n'), ((5078, 5143), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['nonlin.params', 'self.true_params3'], {'decimal': '(11)'}), '(nonlin.params, self.true_params3, decimal=11)\n', (5097, 5143), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((5152, 5218), 'numpy.testing.assert_almost_equal', 'assert_almost_equal', (['nonlin.cov_params', 'lin_cov_params'], {'decimal': '(11)'}), '(nonlin.cov_params, lin_cov_params, decimal=11)\n', (5171, 5218), False, 'from numpy.testing import TestCase, assert_almost_equal, run_module_suite\n'), ((1477, 1499), 'numpy.random.randn', 'np.random.randn', (['(2)', '(20)'], {}), '(2, 20)\n', (1492, 1499), True, 'import numpy as np\n'), ((2044, 2056), 'numpy.dot', 'np.dot', (['p', 'x'], {}), '(p, x)\n', (2050, 2056), True, 'import numpy as np\n'), ((2371, 2386), 'numpy.vander', 'np.vander', (['t', '(5)'], {}), '(t, 5)\n', (2380, 2386), True, 'import numpy as np\n'), ((1701, 1732), 'numpy.dot', 'np.dot', (['p[2:4]', '(xminmu * xminmu)'], {}), '(p[2:4], xminmu * xminmu)\n', (1707, 1732), True, 'import numpy as np\n')] |
# Generated by Django 2.0.7 on 2018-09-19 18:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hivs_administrative', '0005_set_extras_default_value_to_callable'),
]
operations = [
migrations.CreateModel(
name='AbstractAreaType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True, verbose_name='name')),
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='created')),
('last_modified', models.DateTimeField(auto_now=True, null=True, verbose_name='last modified')),
],
options={
'verbose_name': 'Area type',
'verbose_name_plural': 'Area types',
},
),
migrations.CreateModel(
name='AreaType',
fields=[
('abstractareatype_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='hivs_administrative.AbstractAreaType')),
],
bases=('hivs_administrative.abstractareatype',),
),
migrations.AddField(
model_name='area',
name='area_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='areas', to='hivs_administrative.AreaType', verbose_name='area type'),
),
]
| [
"django.db.models.OneToOneField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((1460, 1626), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""areas"""', 'to': '"""hivs_administrative.AreaType"""', 'verbose_name': '"""area type"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n related_name='areas', to='hivs_administrative.AreaType', verbose_name=\n 'area type')\n", (1477, 1626), False, 'from django.db import migrations, models\n'), ((403, 496), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (419, 496), False, 'from django.db import migrations, models\n'), ((520, 586), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)', 'verbose_name': '"""name"""'}), "(max_length=255, unique=True, verbose_name='name')\n", (536, 586), False, 'from django.db import migrations, models\n'), ((619, 682), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""created"""'}), "(auto_now_add=True, verbose_name='created')\n", (639, 682), False, 'from django.db import migrations, models\n'), ((719, 795), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)', 'verbose_name': '"""last modified"""'}), "(auto_now=True, null=True, verbose_name='last modified')\n", (739, 795), False, 'from django.db import migrations, models\n'), ((1082, 1272), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'auto_created': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'parent_link': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'to': '"""hivs_administrative.AbstractAreaType"""'}), "(auto_created=True, on_delete=django.db.models.deletion\n .CASCADE, parent_link=True, primary_key=True, serialize=False, to=\n 'hivs_administrative.AbstractAreaType')\n", (1102, 1272), False, 'from django.db import migrations, models\n')] |
from pygears import gear, datagear, alternative, module
from pygears.typing.qround import get_out_type, get_cut_bits
from pygears.typing import Uint, code, Bool, Int, Fixp, Ufixp
@datagear
def qround(din,
*,
fract=0,
cut_bits=b'get_cut_bits(din, fract)',
signed=b'din.signed') -> b'get_out_type(din, fract)':
res = code(din, Int if signed else Uint) + (Bool(1) << (cut_bits - 1))
return code(res >> cut_bits, module().tout)
# @datagear
# def qround_even(din,
# *,
# fract=0,
# cut_bits=b'get_cut_bits(din, fract)',
# signed=b'din.signed') -> b'get_out_type(din, fract)':
# val_coded = code(din, Int if signed else Uint)
# round_bit = val_coded[cut_bits]
# res = val_coded + Uint([round_bit] + [~round_bit] * (cut_bits - 1))
# return code(res[cut_bits:])
@gear
def truncate(din, *, nbits=2) -> b'din':
pass
@gear
def round_half_up(din, *, nbits=2) -> b'din':
pass
@gear
def round_to_zero(din, *, nbits=2) -> b'din':
pass
@gear
async def round_to_even(din, *, nbits=2) -> b'din':
async with din as d:
return round(float(d) / (2**nbits)) * (2**nbits)
| [
"pygears.typing.Bool",
"pygears.module",
"pygears.typing.code"
] | [((366, 400), 'pygears.typing.code', 'code', (['din', '(Int if signed else Uint)'], {}), '(din, Int if signed else Uint)\n', (370, 400), False, 'from pygears.typing import Uint, code, Bool, Int, Fixp, Ufixp\n'), ((404, 411), 'pygears.typing.Bool', 'Bool', (['(1)'], {}), '(1)\n', (408, 411), False, 'from pygears.typing import Uint, code, Bool, Int, Fixp, Ufixp\n'), ((464, 472), 'pygears.module', 'module', ([], {}), '()\n', (470, 472), False, 'from pygears import gear, datagear, alternative, module\n')] |
"""Main module."""
__authors__ = '<NAME>, <NAME>'
__version__ = '1.0'
__date__ = '9/10/2017'
import json
import os.path
import pickle
import random
import urllib
from bs4 import BeautifulSoup
from nltk.corpus import stopwords
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import train_test_split
from sklearn import naive_bayes
from sklearn import svm
from sklearn.metrics import accuracy_score
from textblob import TextBlob
import matplotlib.pyplot as plt
import requests
import numpy as np
SETTINGS_PATH = 'settings.json'
RAW_PATH = 'data/raw.json'
STORIES_PATH = 'data/with_stories.json'
LABELS_PATH = 'data/with_labels.json'
SENTIMENTS_PATH = 'data/with_sentiments.json'
MNB_PATH = 'models/mnb.pkl'
SVM_PATH = 'models/svm.pkl'
COUNT_VECT_PATH = 'models/count_vect.pkl'
TFIDF_VECT_PATH = 'models/tfidf_vect.pkl'
BASE_URI = 'http://api.nytimes.com/svc/mostpopular/v2'
TYPE = 'mostviewed'
SECTION = 'all-sections'
TIME_PERIOD = '1'
RESPONSE_FORMAT = 'json'
def query(num_queries=1):
"""Request data from NYT and store it as a json file.
Args:
num_queries (int): The number of queries
"""
# Load API key
settings = json.load(open(SETTINGS_PATH))
API_KEY = settings['API_KEY']
# Send requests
URI = f'{BASE_URI}/{TYPE}/{SECTION}/{TIME_PERIOD}.{RESPONSE_FORMAT}'
articles = []
for k in range(num_queries):
print(f'Running query {k+1}...')
offset = k * 20
payload = {'api_key': API_KEY, 'offset': offset}
response = requests.get(URI, params=payload)
articles += response.json()['results']
# Save to file
with open(RAW_PATH, 'w') as output_file:
json.dump(articles, output_file)
def scrape_stories():
"""Get full document texts from urls."""
# Load articles
articles = json.load(open(RAW_PATH))
# Submit GET request and parse response content
for k, article in enumerate(articles):
print(f'Scraping article {k+1}...')
url = article['url']
f = urllib.request.urlopen(url)
soup = BeautifulSoup(f, 'html5lib')
story = ''
for par in soup.find_all('p', class_='story-body-text \
story-content'):
if par.string:
story += ' ' + par.string
article.update({'story': story})
# Save articles
with open(STORIES_PATH, 'w') as output_file:
json.dump(articles, output_file)
def label_articles(reset=False, relabel=False, start=0, rand_labels=False):
"""Run UI for sentiment labeling.
Loads all articles and presents those without a label.
Args:
reset (boolean): Delete all labels
relabel (boolean): Allow option to override existing labels
start (int): Article number to start from
rand_labels (boolean): Assign all random labels
"""
# Load articles
if reset or not os.path.isfile(LABELS_PATH):
articles = json.load(open(STORIES_PATH))
else:
articles = json.load(open(LABELS_PATH))
if start >= len(articles):
raise ValueError(f'Invalid starting point: {start}')
# Label articles
sentiments = [-1, 1]
print(f'Available sentiments: {sentiments}')
for k, article in enumerate(articles[start:]):
if not relabel and 'sentiment' in article:
continue
print(f'Article: {k+start+1}')
print(f"Title: {article['title']}")
print(f"Abstract: {article['abstract']}")
if rand_labels:
sent = random.choice(sentiments)
else:
try:
sent = int(input('Label: '))
except ValueError:
break
if sent not in sentiments:
break
article.update({'sentiment': sent})
print('----------------------------')
# Save articles
with open(LABELS_PATH, 'w') as output_file:
json.dump(articles, output_file)
def train_model(random_state=None):
"""Train a sentiment analyzer model.
Args:
random_state (int): Random seed for train_test_split used by numpy
"""
# Load articles
articles = json.load(open(LABELS_PATH))
# Extract data
articles = [article for article in articles if 'sentiment' in article]
stopset = set(stopwords.words('english'))
titles = [article['title'] for article in articles]
labels = [article['sentiment'] for article in articles]
# Vectorize data
count_vect = CountVectorizer(lowercase=True,
strip_accents='ascii',
stop_words=stopset,
decode_error='replace')
tfidf_vect = TfidfVectorizer(use_idf=True,
lowercase=True,
strip_accents='ascii',
stop_words=stopset,
decode_error='replace')
# Analyze and display relevant information
num_total = len(articles)
num_pos = sum(article['sentiment'] == 1 for article in articles)
num_neg = sum(article['sentiment'] == -1 for article in articles)
print(f'Found {num_total} labeled articles')
print(f'{num_pos} +, {num_neg} -')
# Train using count vectorizer
print('Vectorizing using bag of words...')
x = count_vect.fit_transform(titles)
y = labels
if random_state is not None:
x_train, x_test, y_train, y_test = train_test_split(
x, y, random_state=random_state)
else:
x_train, x_test, y_train, y_test = train_test_split(x, y)
mnb_clf = naive_bayes.MultinomialNB()
mnb_clf.fit(x_train, y_train)
y_pred = mnb_clf.predict(x_test)
mnb_acc = accuracy_score(y_test, y_pred) * 100
print('Naive Bayes: %.2f%% accuracy' % mnb_acc)
svm_clf = svm.SVC(probability=True)
svm_clf.fit(x_train, y_train)
y_pred = svm_clf.predict(x_test)
svm_acc = accuracy_score(y_test, y_pred) * 100
print('SVM: %.2f%% accuracy' % svm_acc)
# Train using tfidf vectorizer
print('Vectorizing using tfidf...')
x = tfidf_vect.fit_transform(titles)
y = labels
if random_state is not None:
x_train, x_test, y_train, y_test = train_test_split(
x, y, random_state=random_state)
else:
x_train, x_test, y_train, y_test = train_test_split(x, y)
mnb_clf = naive_bayes.MultinomialNB()
mnb_clf.fit(x_train, y_train)
y_pred = mnb_clf.predict(x_test)
mnb_acc = accuracy_score(y_test, y_pred) * 100
print('Naive Bayes: %.2f%% accuracy' % mnb_acc)
svm_clf = svm.SVC(probability=True)
svm_clf.fit(x_train, y_train)
y_pred = svm_clf.predict(x_test)
svm_acc = accuracy_score(y_test, y_pred) * 100
print('SVM: %.2f%% accuracy' % svm_acc)
# Store vectorizers and trained classifiers
with open(SVM_PATH, 'wb') as output_file:
pickle.dump(mnb_clf, output_file)
with open(MNB_PATH, 'wb') as output_file:
pickle.dump(svm_clf, output_file)
with open(COUNT_VECT_PATH, 'wb') as output_file:
pickle.dump(count_vect.vocabulary_, output_file)
with open(TFIDF_VECT_PATH, 'wb') as output_file:
pickle.dump(tfidf_vect.vocabulary_, output_file)
def analyze():
"""Analyze article data."""
# Calculate sentiment scores
articles = json.load(open(LABELS_PATH))
mnb_clf = pickle.load(open(MNB_PATH, 'rb'))
svm_clf = pickle.load(open(SVM_PATH, 'rb'))
count_vocabulary = pickle.load(open(COUNT_VECT_PATH, 'rb'))
tfidf_vocabulary = pickle.load(open(TFIDF_VECT_PATH, 'rb'))
stopset = set(stopwords.words('english'))
count_vect = CountVectorizer(lowercase=True,
strip_accents='ascii',
stop_words=stopset,
decode_error='replace',
vocabulary=count_vocabulary)
tfidf_vect = TfidfVectorizer(use_idf=True,
lowercase=True,
strip_accents='ascii',
stop_words=stopset,
decode_error='replace',
vocabulary=tfidf_vocabulary)
for k, article in enumerate(articles):
title = article['title']
abstract = article['abstract']
story = article['story']
print(f'{k+1}: {title}')
title_sent = TextBlob(title).sentiment
abstract_sent = TextBlob(abstract).sentiment
story_sent = TextBlob(story).sentiment
article.update({'title_sent': title_sent,
'abstract_sent': abstract_sent,
'story_sent': story_sent})
print(f'{title_sent} {abstract_sent} {story_sent}')
count = count_vect.fit_transform([title])
tfidf = tfidf_vect.fit_transform([title])
article.update({'count_mnb_sent': mnb_clf.predict(count).item(0),
'count_svm_sent': svm_clf.predict(count).item(0),
'tfidf_mnb_sent': mnb_clf.predict(tfidf).item(0),
'tfidf_svm_sent': svm_clf.predict(tfidf).item(0)})
# Test TextBlob performance
num_total = 0
num_correct = 0
for article in articles:
if 'sentiment' not in article:
continue
title_sent = article['title_sent'].polarity
true_sent = article['sentiment']
if title_sent == 0:
continue
if _sign(title_sent) == true_sent:
num_correct += 1
num_total += 1
acc = num_correct / num_total * 100
print('=========================')
print('TextBlob accuracy: %.2f' % acc)
print('=========================')
# Determine min, max, mean, and std
title_sents = np.array([a['title_sent'] for a in articles])
abstract_sents = np.array([a['abstract_sent'] for a in articles])
story_sents = np.array([a['story_sent'] for a in articles])
print('Title Sentiments')
print('----------------')
print(f'min: {np.min(title_sents)}')
print(f'max: {np.max(title_sents)}')
print(f'mean: {np.mean(title_sents)}')
print(f'std: {np.std(title_sents)}')
print()
print('Abstract Sentiments')
print('-------------------')
print(f'min: {np.min(abstract_sents)}')
print(f'max: {np.max(abstract_sents)}')
print(f'mean: {np.mean(abstract_sents)}')
print(f'std: {np.std(abstract_sents)}')
print()
print('Story Sentiments')
print('----------------')
print(f'min: {np.min(story_sents)}')
print(f'max: {np.max(story_sents)}')
print(f'mean: {np.mean(story_sents)}')
print(f'std: {np.std(story_sents)}')
print()
# Save to file
with open(SENTIMENTS_PATH, 'w') as output_file:
json.dump(articles, output_file)
def visualize():
"""Visualize the data."""
# Load data
articles = json.load(open(SENTIMENTS_PATH))
title_sents = [article['title_sent'][0] for article in articles]
abstract_sents = [article['abstract_sent'][0] for article in articles]
story_sents = [article['story_sent'][0] for article in articles]
count_mnb_sents = [article['count_mnb_sent'] for article in articles]
count_svm_sents = [article['count_svm_sent'] for article in articles]
tfidf_mnb_sents = [article['tfidf_mnb_sent'] for article in articles]
tfidf_svm_sents = [article['tfidf_svm_sent'] for article in articles]
view_rank = range(1, len(articles) + 1)
# Calculate trendlines
z1 = np.polyfit(view_rank, title_sents, 1)
p1 = np.poly1d(z1)
z2 = np.polyfit(view_rank, abstract_sents, 1)
p2 = np.poly1d(z2)
z3 = np.polyfit(view_rank, story_sents, 1)
p3 = np.poly1d(z3)
z4 = np.polyfit(view_rank, count_mnb_sents, 1)
p4 = np.poly1d(z4)
z5 = np.polyfit(view_rank, count_svm_sents, 1)
p5 = np.poly1d(z5)
z6 = np.polyfit(view_rank, tfidf_mnb_sents, 1)
p6 = np.poly1d(z6)
z7 = np.polyfit(view_rank, tfidf_svm_sents, 1)
p7 = np.poly1d(z7)
# Compute moving average
window_size = 10
window = np.ones(int(window_size))/float(window_size)
count_svm_sents_ma = np.convolve(count_svm_sents, window, 'same')
tfidf_svm_sents_ma = np.convolve(tfidf_svm_sents, window, 'same')
# Plot sentiment versus view rank
# TextBlob
plt.figure(1)
plt.subplot(1, 3, 1)
plt.scatter(view_rank, title_sents, s=5)
plt.plot(view_rank, p1(view_rank), 'r--')
plt.title('Title Sentiment')
plt.xlabel('View Rank')
plt.ylabel('Sentiment Score')
plt.ylim(-1.1, 1.1)
plt.subplot(1, 3, 2)
plt.scatter(view_rank, abstract_sents, s=5)
plt.plot(view_rank, p2(view_rank), 'r--')
plt.title('Abstract Sentiment')
plt.xlabel('View Rank')
plt.ylim(-1.1, 1.1)
plt.subplot(1, 3, 3)
plt.scatter(view_rank, story_sents, s=5)
plt.plot(view_rank, p3(view_rank), 'r--')
plt.title('Story Sentiment')
plt.xlabel('View Rank')
plt.ylim(-1.1, 1.1)
# sklearn classifiers
plt.figure(2)
plt.subplot(2, 2, 1)
plt.scatter(view_rank, count_mnb_sents, s=5)
plt.plot(view_rank, p4(view_rank), 'r--')
plt.title('Bag of Words + Naive Bayes')
plt.ylabel('Sentiment Score')
plt.ylim(-1.1, 1.1)
plt.subplot(2, 2, 2)
plt.scatter(view_rank, count_svm_sents, s=5)
plt.scatter(view_rank, count_svm_sents_ma, s=5, facecolor='0.5')
plt.plot(view_rank, p5(view_rank), 'r--')
plt.title('Bag of Words + SVM')
plt.ylim(-1.1, 1.1)
plt.subplot(2, 2, 3)
plt.scatter(view_rank, tfidf_mnb_sents, s=5)
plt.plot(view_rank, p6(view_rank), 'r--')
plt.title('Tfidf + Naive Bayes')
plt.xlabel('View Rank')
plt.ylabel('Sentiment Score')
plt.ylim(-1.1, 1.1)
plt.subplot(2, 2, 4)
plt.scatter(view_rank, tfidf_svm_sents, s=5)
plt.scatter(view_rank, tfidf_svm_sents_ma, s=5, facecolor='0.5')
plt.plot(view_rank, p7(view_rank), 'r--')
plt.title('Tfidf + SVM')
plt.xlabel('View Rank')
plt.ylim(-1.1, 1.1)
plt.show()
def _sign(x):
if x < 0:
return -1
elif x > 0:
return 1
else:
return 0
| [
"numpy.convolve",
"numpy.polyfit",
"matplotlib.pyplot.ylabel",
"numpy.array",
"numpy.poly1d",
"textblob.TextBlob",
"numpy.mean",
"nltk.corpus.stopwords.words",
"sklearn.feature_extraction.text.CountVectorizer",
"matplotlib.pyplot.xlabel",
"numpy.max",
"sklearn.naive_bayes.MultinomialNB",
"ma... | [((4568, 4670), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {'lowercase': '(True)', 'strip_accents': '"""ascii"""', 'stop_words': 'stopset', 'decode_error': '"""replace"""'}), "(lowercase=True, strip_accents='ascii', stop_words=stopset,\n decode_error='replace')\n", (4583, 4670), False, 'from sklearn.feature_extraction.text import CountVectorizer\n'), ((4783, 4899), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {'use_idf': '(True)', 'lowercase': '(True)', 'strip_accents': '"""ascii"""', 'stop_words': 'stopset', 'decode_error': '"""replace"""'}), "(use_idf=True, lowercase=True, strip_accents='ascii',\n stop_words=stopset, decode_error='replace')\n", (4798, 4899), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n'), ((5706, 5733), 'sklearn.naive_bayes.MultinomialNB', 'naive_bayes.MultinomialNB', ([], {}), '()\n', (5731, 5733), False, 'from sklearn import naive_bayes\n'), ((5923, 5948), 'sklearn.svm.SVC', 'svm.SVC', ([], {'probability': '(True)'}), '(probability=True)\n', (5930, 5948), False, 'from sklearn import svm\n'), ((6481, 6508), 'sklearn.naive_bayes.MultinomialNB', 'naive_bayes.MultinomialNB', ([], {}), '()\n', (6506, 6508), False, 'from sklearn import naive_bayes\n'), ((6698, 6723), 'sklearn.svm.SVC', 'svm.SVC', ([], {'probability': '(True)'}), '(probability=True)\n', (6705, 6723), False, 'from sklearn import svm\n'), ((7748, 7879), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {'lowercase': '(True)', 'strip_accents': '"""ascii"""', 'stop_words': 'stopset', 'decode_error': '"""replace"""', 'vocabulary': 'count_vocabulary'}), "(lowercase=True, strip_accents='ascii', stop_words=stopset,\n decode_error='replace', vocabulary=count_vocabulary)\n", (7763, 7879), False, 'from sklearn.feature_extraction.text import CountVectorizer\n'), ((8025, 8170), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {'use_idf': '(True)', 'lowercase': '(True)', 'strip_accents': '"""ascii"""', 'stop_words': 'stopset', 'decode_error': '"""replace"""', 'vocabulary': 'tfidf_vocabulary'}), "(use_idf=True, lowercase=True, strip_accents='ascii',\n stop_words=stopset, decode_error='replace', vocabulary=tfidf_vocabulary)\n", (8040, 8170), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n'), ((9892, 9937), 'numpy.array', 'np.array', (["[a['title_sent'] for a in articles]"], {}), "([a['title_sent'] for a in articles])\n", (9900, 9937), True, 'import numpy as np\n'), ((9959, 10007), 'numpy.array', 'np.array', (["[a['abstract_sent'] for a in articles]"], {}), "([a['abstract_sent'] for a in articles])\n", (9967, 10007), True, 'import numpy as np\n'), ((10026, 10071), 'numpy.array', 'np.array', (["[a['story_sent'] for a in articles]"], {}), "([a['story_sent'] for a in articles])\n", (10034, 10071), True, 'import numpy as np\n'), ((11624, 11661), 'numpy.polyfit', 'np.polyfit', (['view_rank', 'title_sents', '(1)'], {}), '(view_rank, title_sents, 1)\n', (11634, 11661), True, 'import numpy as np\n'), ((11671, 11684), 'numpy.poly1d', 'np.poly1d', (['z1'], {}), '(z1)\n', (11680, 11684), True, 'import numpy as np\n'), ((11694, 11734), 'numpy.polyfit', 'np.polyfit', (['view_rank', 'abstract_sents', '(1)'], {}), '(view_rank, abstract_sents, 1)\n', (11704, 11734), True, 'import numpy as np\n'), ((11744, 11757), 'numpy.poly1d', 'np.poly1d', (['z2'], {}), '(z2)\n', (11753, 11757), True, 'import numpy as np\n'), ((11767, 11804), 'numpy.polyfit', 'np.polyfit', (['view_rank', 'story_sents', '(1)'], {}), '(view_rank, story_sents, 1)\n', (11777, 11804), True, 'import numpy as np\n'), ((11814, 11827), 'numpy.poly1d', 'np.poly1d', (['z3'], {}), '(z3)\n', (11823, 11827), True, 'import numpy as np\n'), ((11838, 11879), 'numpy.polyfit', 'np.polyfit', (['view_rank', 'count_mnb_sents', '(1)'], {}), '(view_rank, count_mnb_sents, 1)\n', (11848, 11879), True, 'import numpy as np\n'), ((11889, 11902), 'numpy.poly1d', 'np.poly1d', (['z4'], {}), '(z4)\n', (11898, 11902), True, 'import numpy as np\n'), ((11912, 11953), 'numpy.polyfit', 'np.polyfit', (['view_rank', 'count_svm_sents', '(1)'], {}), '(view_rank, count_svm_sents, 1)\n', (11922, 11953), True, 'import numpy as np\n'), ((11963, 11976), 'numpy.poly1d', 'np.poly1d', (['z5'], {}), '(z5)\n', (11972, 11976), True, 'import numpy as np\n'), ((11986, 12027), 'numpy.polyfit', 'np.polyfit', (['view_rank', 'tfidf_mnb_sents', '(1)'], {}), '(view_rank, tfidf_mnb_sents, 1)\n', (11996, 12027), True, 'import numpy as np\n'), ((12037, 12050), 'numpy.poly1d', 'np.poly1d', (['z6'], {}), '(z6)\n', (12046, 12050), True, 'import numpy as np\n'), ((12060, 12101), 'numpy.polyfit', 'np.polyfit', (['view_rank', 'tfidf_svm_sents', '(1)'], {}), '(view_rank, tfidf_svm_sents, 1)\n', (12070, 12101), True, 'import numpy as np\n'), ((12111, 12124), 'numpy.poly1d', 'np.poly1d', (['z7'], {}), '(z7)\n', (12120, 12124), True, 'import numpy as np\n'), ((12259, 12303), 'numpy.convolve', 'np.convolve', (['count_svm_sents', 'window', '"""same"""'], {}), "(count_svm_sents, window, 'same')\n", (12270, 12303), True, 'import numpy as np\n'), ((12329, 12373), 'numpy.convolve', 'np.convolve', (['tfidf_svm_sents', 'window', '"""same"""'], {}), "(tfidf_svm_sents, window, 'same')\n", (12340, 12373), True, 'import numpy as np\n'), ((12432, 12445), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (12442, 12445), True, 'import matplotlib.pyplot as plt\n'), ((12450, 12470), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(1)'], {}), '(1, 3, 1)\n', (12461, 12470), True, 'import matplotlib.pyplot as plt\n'), ((12475, 12515), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'title_sents'], {'s': '(5)'}), '(view_rank, title_sents, s=5)\n', (12486, 12515), True, 'import matplotlib.pyplot as plt\n'), ((12566, 12594), 'matplotlib.pyplot.title', 'plt.title', (['"""Title Sentiment"""'], {}), "('Title Sentiment')\n", (12575, 12594), True, 'import matplotlib.pyplot as plt\n'), ((12599, 12622), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""View Rank"""'], {}), "('View Rank')\n", (12609, 12622), True, 'import matplotlib.pyplot as plt\n'), ((12627, 12656), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sentiment Score"""'], {}), "('Sentiment Score')\n", (12637, 12656), True, 'import matplotlib.pyplot as plt\n'), ((12661, 12680), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.1)', '(1.1)'], {}), '(-1.1, 1.1)\n', (12669, 12680), True, 'import matplotlib.pyplot as plt\n'), ((12686, 12706), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(2)'], {}), '(1, 3, 2)\n', (12697, 12706), True, 'import matplotlib.pyplot as plt\n'), ((12711, 12754), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'abstract_sents'], {'s': '(5)'}), '(view_rank, abstract_sents, s=5)\n', (12722, 12754), True, 'import matplotlib.pyplot as plt\n'), ((12805, 12836), 'matplotlib.pyplot.title', 'plt.title', (['"""Abstract Sentiment"""'], {}), "('Abstract Sentiment')\n", (12814, 12836), True, 'import matplotlib.pyplot as plt\n'), ((12841, 12864), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""View Rank"""'], {}), "('View Rank')\n", (12851, 12864), True, 'import matplotlib.pyplot as plt\n'), ((12869, 12888), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.1)', '(1.1)'], {}), '(-1.1, 1.1)\n', (12877, 12888), True, 'import matplotlib.pyplot as plt\n'), ((12894, 12914), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(3)', '(3)'], {}), '(1, 3, 3)\n', (12905, 12914), True, 'import matplotlib.pyplot as plt\n'), ((12919, 12959), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'story_sents'], {'s': '(5)'}), '(view_rank, story_sents, s=5)\n', (12930, 12959), True, 'import matplotlib.pyplot as plt\n'), ((13010, 13038), 'matplotlib.pyplot.title', 'plt.title', (['"""Story Sentiment"""'], {}), "('Story Sentiment')\n", (13019, 13038), True, 'import matplotlib.pyplot as plt\n'), ((13043, 13066), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""View Rank"""'], {}), "('View Rank')\n", (13053, 13066), True, 'import matplotlib.pyplot as plt\n'), ((13071, 13090), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.1)', '(1.1)'], {}), '(-1.1, 1.1)\n', (13079, 13090), True, 'import matplotlib.pyplot as plt\n'), ((13122, 13135), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (13132, 13135), True, 'import matplotlib.pyplot as plt\n'), ((13140, 13160), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(1)'], {}), '(2, 2, 1)\n', (13151, 13160), True, 'import matplotlib.pyplot as plt\n'), ((13165, 13209), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'count_mnb_sents'], {'s': '(5)'}), '(view_rank, count_mnb_sents, s=5)\n', (13176, 13209), True, 'import matplotlib.pyplot as plt\n'), ((13260, 13299), 'matplotlib.pyplot.title', 'plt.title', (['"""Bag of Words + Naive Bayes"""'], {}), "('Bag of Words + Naive Bayes')\n", (13269, 13299), True, 'import matplotlib.pyplot as plt\n'), ((13304, 13333), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sentiment Score"""'], {}), "('Sentiment Score')\n", (13314, 13333), True, 'import matplotlib.pyplot as plt\n'), ((13338, 13357), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.1)', '(1.1)'], {}), '(-1.1, 1.1)\n', (13346, 13357), True, 'import matplotlib.pyplot as plt\n'), ((13363, 13383), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(2)'], {}), '(2, 2, 2)\n', (13374, 13383), True, 'import matplotlib.pyplot as plt\n'), ((13388, 13432), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'count_svm_sents'], {'s': '(5)'}), '(view_rank, count_svm_sents, s=5)\n', (13399, 13432), True, 'import matplotlib.pyplot as plt\n'), ((13437, 13501), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'count_svm_sents_ma'], {'s': '(5)', 'facecolor': '"""0.5"""'}), "(view_rank, count_svm_sents_ma, s=5, facecolor='0.5')\n", (13448, 13501), True, 'import matplotlib.pyplot as plt\n'), ((13552, 13583), 'matplotlib.pyplot.title', 'plt.title', (['"""Bag of Words + SVM"""'], {}), "('Bag of Words + SVM')\n", (13561, 13583), True, 'import matplotlib.pyplot as plt\n'), ((13588, 13607), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.1)', '(1.1)'], {}), '(-1.1, 1.1)\n', (13596, 13607), True, 'import matplotlib.pyplot as plt\n'), ((13613, 13633), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(3)'], {}), '(2, 2, 3)\n', (13624, 13633), True, 'import matplotlib.pyplot as plt\n'), ((13638, 13682), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'tfidf_mnb_sents'], {'s': '(5)'}), '(view_rank, tfidf_mnb_sents, s=5)\n', (13649, 13682), True, 'import matplotlib.pyplot as plt\n'), ((13733, 13765), 'matplotlib.pyplot.title', 'plt.title', (['"""Tfidf + Naive Bayes"""'], {}), "('Tfidf + Naive Bayes')\n", (13742, 13765), True, 'import matplotlib.pyplot as plt\n'), ((13770, 13793), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""View Rank"""'], {}), "('View Rank')\n", (13780, 13793), True, 'import matplotlib.pyplot as plt\n'), ((13798, 13827), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sentiment Score"""'], {}), "('Sentiment Score')\n", (13808, 13827), True, 'import matplotlib.pyplot as plt\n'), ((13832, 13851), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.1)', '(1.1)'], {}), '(-1.1, 1.1)\n', (13840, 13851), True, 'import matplotlib.pyplot as plt\n'), ((13857, 13877), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(4)'], {}), '(2, 2, 4)\n', (13868, 13877), True, 'import matplotlib.pyplot as plt\n'), ((13882, 13926), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'tfidf_svm_sents'], {'s': '(5)'}), '(view_rank, tfidf_svm_sents, s=5)\n', (13893, 13926), True, 'import matplotlib.pyplot as plt\n'), ((13931, 13995), 'matplotlib.pyplot.scatter', 'plt.scatter', (['view_rank', 'tfidf_svm_sents_ma'], {'s': '(5)', 'facecolor': '"""0.5"""'}), "(view_rank, tfidf_svm_sents_ma, s=5, facecolor='0.5')\n", (13942, 13995), True, 'import matplotlib.pyplot as plt\n'), ((14046, 14070), 'matplotlib.pyplot.title', 'plt.title', (['"""Tfidf + SVM"""'], {}), "('Tfidf + SVM')\n", (14055, 14070), True, 'import matplotlib.pyplot as plt\n'), ((14075, 14098), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""View Rank"""'], {}), "('View Rank')\n", (14085, 14098), True, 'import matplotlib.pyplot as plt\n'), ((14103, 14122), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.1)', '(1.1)'], {}), '(-1.1, 1.1)\n', (14111, 14122), True, 'import matplotlib.pyplot as plt\n'), ((14128, 14138), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (14136, 14138), True, 'import matplotlib.pyplot as plt\n'), ((1608, 1641), 'requests.get', 'requests.get', (['URI'], {'params': 'payload'}), '(URI, params=payload)\n', (1620, 1641), False, 'import requests\n'), ((1762, 1794), 'json.dump', 'json.dump', (['articles', 'output_file'], {}), '(articles, output_file)\n', (1771, 1794), False, 'import json\n'), ((2106, 2133), 'urllib.request.urlopen', 'urllib.request.urlopen', (['url'], {}), '(url)\n', (2128, 2133), False, 'import urllib\n'), ((2149, 2177), 'bs4.BeautifulSoup', 'BeautifulSoup', (['f', '"""html5lib"""'], {}), "(f, 'html5lib')\n", (2162, 2177), False, 'from bs4 import BeautifulSoup\n'), ((2512, 2544), 'json.dump', 'json.dump', (['articles', 'output_file'], {}), '(articles, output_file)\n', (2521, 2544), False, 'import json\n'), ((4003, 4035), 'json.dump', 'json.dump', (['articles', 'output_file'], {}), '(articles, output_file)\n', (4012, 4035), False, 'import json\n'), ((4385, 4411), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (4400, 4411), False, 'from nltk.corpus import stopwords\n'), ((5548, 5597), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'random_state': 'random_state'}), '(x, y, random_state=random_state)\n', (5564, 5597), False, 'from sklearn.model_selection import train_test_split\n'), ((5668, 5690), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {}), '(x, y)\n', (5684, 5690), False, 'from sklearn.model_selection import train_test_split\n'), ((5819, 5849), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (5833, 5849), False, 'from sklearn.metrics import accuracy_score\n'), ((6034, 6064), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (6048, 6064), False, 'from sklearn.metrics import accuracy_score\n'), ((6323, 6372), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'random_state': 'random_state'}), '(x, y, random_state=random_state)\n', (6339, 6372), False, 'from sklearn.model_selection import train_test_split\n'), ((6443, 6465), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {}), '(x, y)\n', (6459, 6465), False, 'from sklearn.model_selection import train_test_split\n'), ((6594, 6624), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (6608, 6624), False, 'from sklearn.metrics import accuracy_score\n'), ((6809, 6839), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (6823, 6839), False, 'from sklearn.metrics import accuracy_score\n'), ((6993, 7026), 'pickle.dump', 'pickle.dump', (['mnb_clf', 'output_file'], {}), '(mnb_clf, output_file)\n', (7004, 7026), False, 'import pickle\n'), ((7081, 7114), 'pickle.dump', 'pickle.dump', (['svm_clf', 'output_file'], {}), '(svm_clf, output_file)\n', (7092, 7114), False, 'import pickle\n'), ((7176, 7224), 'pickle.dump', 'pickle.dump', (['count_vect.vocabulary_', 'output_file'], {}), '(count_vect.vocabulary_, output_file)\n', (7187, 7224), False, 'import pickle\n'), ((7286, 7334), 'pickle.dump', 'pickle.dump', (['tfidf_vect.vocabulary_', 'output_file'], {}), '(tfidf_vect.vocabulary_, output_file)\n', (7297, 7334), False, 'import pickle\n'), ((7703, 7729), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (7718, 7729), False, 'from nltk.corpus import stopwords\n'), ((10887, 10919), 'json.dump', 'json.dump', (['articles', 'output_file'], {}), '(articles, output_file)\n', (10896, 10919), False, 'import json\n'), ((3620, 3645), 'random.choice', 'random.choice', (['sentiments'], {}), '(sentiments)\n', (3633, 3645), False, 'import random\n'), ((8534, 8549), 'textblob.TextBlob', 'TextBlob', (['title'], {}), '(title)\n', (8542, 8549), False, 'from textblob import TextBlob\n'), ((8584, 8602), 'textblob.TextBlob', 'TextBlob', (['abstract'], {}), '(abstract)\n', (8592, 8602), False, 'from textblob import TextBlob\n'), ((8634, 8649), 'textblob.TextBlob', 'TextBlob', (['story'], {}), '(story)\n', (8642, 8649), False, 'from textblob import TextBlob\n'), ((10151, 10170), 'numpy.min', 'np.min', (['title_sents'], {}), '(title_sents)\n', (10157, 10170), True, 'import numpy as np\n'), ((10192, 10211), 'numpy.max', 'np.max', (['title_sents'], {}), '(title_sents)\n', (10198, 10211), True, 'import numpy as np\n'), ((10234, 10254), 'numpy.mean', 'np.mean', (['title_sents'], {}), '(title_sents)\n', (10241, 10254), True, 'import numpy as np\n'), ((10276, 10295), 'numpy.std', 'np.std', (['title_sents'], {}), '(title_sents)\n', (10282, 10295), True, 'import numpy as np\n'), ((10396, 10418), 'numpy.min', 'np.min', (['abstract_sents'], {}), '(abstract_sents)\n', (10402, 10418), True, 'import numpy as np\n'), ((10440, 10462), 'numpy.max', 'np.max', (['abstract_sents'], {}), '(abstract_sents)\n', (10446, 10462), True, 'import numpy as np\n'), ((10485, 10508), 'numpy.mean', 'np.mean', (['abstract_sents'], {}), '(abstract_sents)\n', (10492, 10508), True, 'import numpy as np\n'), ((10530, 10552), 'numpy.std', 'np.std', (['abstract_sents'], {}), '(abstract_sents)\n', (10536, 10552), True, 'import numpy as np\n'), ((10647, 10666), 'numpy.min', 'np.min', (['story_sents'], {}), '(story_sents)\n', (10653, 10666), True, 'import numpy as np\n'), ((10688, 10707), 'numpy.max', 'np.max', (['story_sents'], {}), '(story_sents)\n', (10694, 10707), True, 'import numpy as np\n'), ((10730, 10750), 'numpy.mean', 'np.mean', (['story_sents'], {}), '(story_sents)\n', (10737, 10750), True, 'import numpy as np\n'), ((10772, 10791), 'numpy.std', 'np.std', (['story_sents'], {}), '(story_sents)\n', (10778, 10791), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2022, <NAME> and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class SalesInvoice(Document):
def validate (self):
if(self.posting_date > self.due_date):
frappe.throw('Your selected <b>Posted Date</b> is more recent than <b>Due Date</b>!')
def on_submit (self):
#debit
doc = frappe.new_doc('General Ledger Entry')
doc.title = "Debit"
doc.posting_date = self.posting_date
doc.due_date = self.due_date
doc.account = self.debit_to
doc.account_type = "Asset"
doc.party = self.customer
doc.debit_amount = self.total_amount
frappe.db.commit()
doc.insert()
doc = frappe.new_doc('General Ledger Entry')
doc.title = "Credit"
doc.posting_date = self.posting_date
doc.due_date = self.due_date
doc.party = self.customer
doc.account = self.income_account
doc.account_type = "Income"
doc.profit = self.total_amount
doc.credit_amount = self.total_amount
frappe.db.commit()
doc.insert()
| [
"frappe.throw",
"frappe.db.commit",
"frappe.new_doc"
] | [((441, 479), 'frappe.new_doc', 'frappe.new_doc', (['"""General Ledger Entry"""'], {}), "('General Ledger Entry')\n", (455, 479), False, 'import frappe\n'), ((700, 718), 'frappe.db.commit', 'frappe.db.commit', ([], {}), '()\n', (716, 718), False, 'import frappe\n'), ((743, 781), 'frappe.new_doc', 'frappe.new_doc', (['"""General Ledger Entry"""'], {}), "('General Ledger Entry')\n", (757, 781), False, 'import frappe\n'), ((1044, 1062), 'frappe.db.commit', 'frappe.db.commit', ([], {}), '()\n', (1060, 1062), False, 'import frappe\n'), ((315, 405), 'frappe.throw', 'frappe.throw', (['"""Your selected <b>Posted Date</b> is more recent than <b>Due Date</b>!"""'], {}), "(\n 'Your selected <b>Posted Date</b> is more recent than <b>Due Date</b>!')\n", (327, 405), False, 'import frappe\n')] |
from datetime import datetime
from email.message import EmailMessage
from email.headerregistry import Address
from jinja2 import Markup
from premailer import transform
import smtplib
from kpireport.output import OutputDriver
class SMTPOutputDriver(OutputDriver):
"""Email a report's contents via SMTP to one or more recipients.
Attributes:
email_from (str): From email address.
email_to (List[str]): Email addresses to send to.
smtp_host (str): SMTP server to relay mail through. Defaults to
"localhost".
smtp_port (int): SMTP port to use. Defaults to 25.
image_strategy (str): Strategy to use for including images in the mail
contents. Two options are available:
* ``embed``: embed the image directly in the mail using Content-ID
(`RFC2392 <https://tools.ietf.org/html/rfc2392>`_) linked
resources. These should be compatible with most modern desktop
and web mail clients.
* ``remote``: link the image to a remote resource. For this strategy
to work, the image assets must exist on a server reachable via
the public Internet (and not require authentication). Consider
using the SMTP plugin in conjunction with e.g., the
:ref:`S3 <s3-plugin>` or :ref:`SCP <scp-plugin>` plugins to
accomplish this entirely within KPI reporter.
.. note::
No tracking information is included when rendering remote
image URLs; if for some reason you need to track open rates,
consider using the :ref:`SendGrid <sendgrid-plugin>` plugin
to send the report instead.
image_remote_base_url (str): When using the "remote" image strategy,
the base URL for the image assets. Image blobs generated by Views
are placed in folders named after the View ID; this base URL should
point to the root path for all of these folders.
"""
def init(
self,
email_from=None,
email_to=[],
smtp_host="localhost",
smtp_port=25,
image_strategy="embed",
image_remote_base_url=None,
):
if not (email_from and email_to):
raise ValueError("Both 'from' and 'to' addresses are required")
self.email_from = self._parse_address(email_from)
self.email_to = [self._parse_address(to) for to in email_to]
self.smtp_host = smtp_host
self.smtp_port = smtp_port
self.image_strategy = image_strategy
if image_remote_base_url:
self.image_remote_base_url = image_remote_base_url.format(
**self.report.__dict__
)
else:
self.image_remote_base_url = None
self.cache_buster = f"?_={datetime.now()}"
def _parse_address(self, address):
username, domain = address.split("@")
return Address(username=username, domain=domain)
def render_blob_inline(self, blob, fmt=None):
if self.image_strategy == "embed":
return Markup(f"""<img src="cid:{blob.id}" />""")
elif self.image_strategy == "remote":
path = "/".join([self.image_remote_base_url, blob.id])
return Markup(f"""<img src="{path}{self.cache_buster}" />""")
else:
raise ValueError(f"Unsupported image strategy '{self.image_strategy}'")
def render_output(self, content, blobs):
msg = EmailMessage()
msg["Subject"] = self.report.title
msg["From"] = self.email_from
msg["To"] = self.email_to
msg.set_content(content.get_format("md"))
html = transform(content.get_format("html"))
msg.add_alternative(html, subtype="html")
if self.image_strategy == "embed":
payload = msg.get_payload()[1]
for blob in blobs:
mime_type = blob.mime_type
if not mime_type:
raise ValueError(f"No mime type specified for blob {blob.id}")
maintype, subtype = mime_type.split("/")
payload.add_related(
blob.content.getvalue(), maintype, subtype, cid=blob.id
)
# Send the message via local SMTP server.
with smtplib.SMTP(self.smtp_host, port=self.smtp_port) as s:
s.send_message(msg)
| [
"smtplib.SMTP",
"email.headerregistry.Address",
"datetime.datetime.now",
"jinja2.Markup",
"email.message.EmailMessage"
] | [((3012, 3053), 'email.headerregistry.Address', 'Address', ([], {'username': 'username', 'domain': 'domain'}), '(username=username, domain=domain)\n', (3019, 3053), False, 'from email.headerregistry import Address\n'), ((3555, 3569), 'email.message.EmailMessage', 'EmailMessage', ([], {}), '()\n', (3567, 3569), False, 'from email.message import EmailMessage\n'), ((3167, 3205), 'jinja2.Markup', 'Markup', (['f"""<img src="cid:{blob.id}" />"""'], {}), '(f\'<img src="cid:{blob.id}" />\')\n', (3173, 3205), False, 'from jinja2 import Markup\n'), ((4369, 4418), 'smtplib.SMTP', 'smtplib.SMTP', (['self.smtp_host'], {'port': 'self.smtp_port'}), '(self.smtp_host, port=self.smtp_port)\n', (4381, 4418), False, 'import smtplib\n'), ((2894, 2908), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2906, 2908), False, 'from datetime import datetime\n'), ((3342, 3392), 'jinja2.Markup', 'Markup', (['f"""<img src="{path}{self.cache_buster}" />"""'], {}), '(f\'<img src="{path}{self.cache_buster}" />\')\n', (3348, 3392), False, 'from jinja2 import Markup\n')] |
from tortoise import Tortoise
from tortoise.contrib import test
from tortoise.exceptions import ConfigurationError
from tortoise.tests.testmodels import Tournament
class TestInitErrors(test.SimpleTestCase):
async def setUp(self):
self.apps = Tortoise.apps
self.inited = Tortoise._inited
Tortoise.apps = {}
Tortoise._inited = False
Tortoise._db_routing = None
Tortoise._global_connection = None
self.db = await self.getDB()
async def tearDown(self):
await self.db.close()
await self.db.db_delete()
Tortoise.apps = self.apps
Tortoise._inited = self.inited
def test_dup_model(self):
with self.assertRaisesRegex(ConfigurationError, 'duplicates in'):
Tortoise.register_model('models', 'Tournament', Tournament)
Tortoise.register_model('models', 'Tournament', Tournament)
def test_missing_app_route(self):
Tortoise.apps = self.apps
with self.assertRaisesRegex(ConfigurationError, 'No db instanced for apps'):
Tortoise._client_routing(db_routing={
'models': self.db,
})
def test_exclusive_route_param(self):
with self.assertRaisesRegex(ConfigurationError, 'You must pass either'):
Tortoise._client_routing(db_routing={
'models': self.db,
}, global_client=self.db)
def test_not_db(self):
with self.assertRaisesRegex(ConfigurationError,
'global_client must inherit from BaseDBAsyncClient'):
Tortoise._client_routing(global_client='moo')
def test_missing_param(self):
with self.assertRaisesRegex(ConfigurationError,
'You must pass either global_client or db_routing'):
Tortoise._client_routing()
def test_missing_app_route2(self):
Tortoise.apps = self.apps
with self.assertRaisesRegex(ConfigurationError,
'All app values must inherit from BaseDBAsyncClient'):
Tortoise._client_routing(db_routing={
'models': 'moo',
})
def test_dup_init(self):
with self.assertRaisesRegex(ConfigurationError, 'Already initialised'):
Tortoise.init(self.db)
Tortoise.init(self.db)
| [
"tortoise.Tortoise.register_model",
"tortoise.Tortoise._client_routing",
"tortoise.Tortoise.init"
] | [((770, 829), 'tortoise.Tortoise.register_model', 'Tortoise.register_model', (['"""models"""', '"""Tournament"""', 'Tournament'], {}), "('models', 'Tournament', Tournament)\n", (793, 829), False, 'from tortoise import Tortoise\n'), ((842, 901), 'tortoise.Tortoise.register_model', 'Tortoise.register_model', (['"""models"""', '"""Tournament"""', 'Tournament'], {}), "('models', 'Tournament', Tournament)\n", (865, 901), False, 'from tortoise import Tortoise\n'), ((1072, 1128), 'tortoise.Tortoise._client_routing', 'Tortoise._client_routing', ([], {'db_routing': "{'models': self.db}"}), "(db_routing={'models': self.db})\n", (1096, 1128), False, 'from tortoise import Tortoise\n'), ((1296, 1375), 'tortoise.Tortoise._client_routing', 'Tortoise._client_routing', ([], {'db_routing': "{'models': self.db}", 'global_client': 'self.db'}), "(db_routing={'models': self.db}, global_client=self.db)\n", (1320, 1375), False, 'from tortoise import Tortoise\n'), ((1593, 1638), 'tortoise.Tortoise._client_routing', 'Tortoise._client_routing', ([], {'global_client': '"""moo"""'}), "(global_client='moo')\n", (1617, 1638), False, 'from tortoise import Tortoise\n'), ((1831, 1857), 'tortoise.Tortoise._client_routing', 'Tortoise._client_routing', ([], {}), '()\n', (1855, 1857), False, 'from tortoise import Tortoise\n'), ((2091, 2145), 'tortoise.Tortoise._client_routing', 'Tortoise._client_routing', ([], {'db_routing': "{'models': 'moo'}"}), "(db_routing={'models': 'moo'})\n", (2115, 2145), False, 'from tortoise import Tortoise\n'), ((2299, 2321), 'tortoise.Tortoise.init', 'Tortoise.init', (['self.db'], {}), '(self.db)\n', (2312, 2321), False, 'from tortoise import Tortoise\n'), ((2334, 2356), 'tortoise.Tortoise.init', 'Tortoise.init', (['self.db'], {}), '(self.db)\n', (2347, 2356), False, 'from tortoise import Tortoise\n')] |
import torch
import argparse
import os
import glob
from torch.utils.data import DataLoader, SequentialSampler
from tqdm import tqdm
from nsmc_modeling import RobertaForSequenceClassification
from bert.tokenizer import Tokenizer
from dataset import NSMCDataSet
def _get_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--state_dict", type=str, required=True)
parser.add_argument("--bert_model", type=str, default='bert/')
parser.add_argument("--batch_size", type=int, default=32)
parser.add_argument("--max_seq_length", type=int, default=512)
parser.add_argument("--gpu_index", type=int, default=0)
parser.add_argument("--no_display", action="store_true")
return parser
if __name__ == "__main__":
args = _get_parser().parse_args()
tokenizer = Tokenizer(os.path.join(args.bert_model, "senti_vocab.txt"),
os.path.join(args.bert_model, "RoBERTa_Sentiment_kor"))
dataset = NSMCDataSet("test", tokenizer, max_seq_length=args.max_seq_length)
sampler = SequentialSampler(dataset)
dataloader = DataLoader(dataset,
batch_size=args.batch_size,
sampler=sampler,
collate_fn=dataset.collate_fn)
device = torch.device(type="cuda", index=args.gpu_index)
model = RobertaForSequenceClassification()
model_path = os.path.join('checkpoints/yaho/', '*.ckpt')
model_path_list = glob.glob(model_path)
for path in model_path_list:
model.load_state_dict(state_dict=torch.load(path, map_location=torch.device('cpu')), strict=False)
model.to(device)
model.eval()
match = 0
progress = 0
pbar = tqdm(dataloader, disable=args.no_display, desc="Eval")
for batch in pbar:
input_ids, attention_mask, labels = batch
inputs = {
"input_ids": torch.tensor(input_ids, dtype=torch.long).cuda(),
"attention_mask": torch.tensor(attention_mask, dtype=torch.long).cuda()
}
with torch.no_grad():
logits = model(**inputs)
labels = torch.tensor(labels, dtype=torch.float).cuda()
match_seq = (logits.view(-1) >= 0.0) == (labels.view(-1) == 1)
match += match_seq.sum().item()
progress += labels.size(0)
pbar.update()
pbar.set_postfix(
{"state_dict": path, "accuracy": f"{100.0 * match / progress:.2f}"}
)
pbar.close()
log_file = open('./output/10^5step_log.txt', 'a')
log_file.write("state_dict : " + path + "accuracy :" + str(100 * match / progress) + '\n')
log_file.close()
print({"state_dict": path, "accuracy": f"{100 * match / progress:.2f}"})
| [
"argparse.ArgumentParser",
"tqdm.tqdm",
"torch.utils.data.SequentialSampler",
"os.path.join",
"nsmc_modeling.RobertaForSequenceClassification",
"torch.tensor",
"dataset.NSMCDataSet",
"torch.utils.data.DataLoader",
"torch.no_grad",
"glob.glob",
"torch.device"
] | [((296, 321), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (319, 321), False, 'import argparse\n'), ((966, 1032), 'dataset.NSMCDataSet', 'NSMCDataSet', (['"""test"""', 'tokenizer'], {'max_seq_length': 'args.max_seq_length'}), "('test', tokenizer, max_seq_length=args.max_seq_length)\n", (977, 1032), False, 'from dataset import NSMCDataSet\n'), ((1047, 1073), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['dataset'], {}), '(dataset)\n', (1064, 1073), False, 'from torch.utils.data import DataLoader, SequentialSampler\n'), ((1091, 1191), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'args.batch_size', 'sampler': 'sampler', 'collate_fn': 'dataset.collate_fn'}), '(dataset, batch_size=args.batch_size, sampler=sampler, collate_fn\n =dataset.collate_fn)\n', (1101, 1191), False, 'from torch.utils.data import DataLoader, SequentialSampler\n'), ((1285, 1332), 'torch.device', 'torch.device', ([], {'type': '"""cuda"""', 'index': 'args.gpu_index'}), "(type='cuda', index=args.gpu_index)\n", (1297, 1332), False, 'import torch\n'), ((1346, 1380), 'nsmc_modeling.RobertaForSequenceClassification', 'RobertaForSequenceClassification', ([], {}), '()\n', (1378, 1380), False, 'from nsmc_modeling import RobertaForSequenceClassification\n'), ((1398, 1441), 'os.path.join', 'os.path.join', (['"""checkpoints/yaho/"""', '"""*.ckpt"""'], {}), "('checkpoints/yaho/', '*.ckpt')\n", (1410, 1441), False, 'import os\n'), ((1464, 1485), 'glob.glob', 'glob.glob', (['model_path'], {}), '(model_path)\n', (1473, 1485), False, 'import glob\n'), ((819, 867), 'os.path.join', 'os.path.join', (['args.bert_model', '"""senti_vocab.txt"""'], {}), "(args.bert_model, 'senti_vocab.txt')\n", (831, 867), False, 'import os\n'), ((895, 949), 'os.path.join', 'os.path.join', (['args.bert_model', '"""RoBERTa_Sentiment_kor"""'], {}), "(args.bert_model, 'RoBERTa_Sentiment_kor')\n", (907, 949), False, 'import os\n'), ((1729, 1783), 'tqdm.tqdm', 'tqdm', (['dataloader'], {'disable': 'args.no_display', 'desc': '"""Eval"""'}), "(dataloader, disable=args.no_display, desc='Eval')\n", (1733, 1783), False, 'from tqdm import tqdm\n'), ((2088, 2103), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2101, 2103), False, 'import torch\n'), ((2168, 2207), 'torch.tensor', 'torch.tensor', (['labels'], {'dtype': 'torch.float'}), '(labels, dtype=torch.float)\n', (2180, 2207), False, 'import torch\n'), ((1590, 1609), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (1602, 1609), False, 'import torch\n'), ((1918, 1959), 'torch.tensor', 'torch.tensor', (['input_ids'], {'dtype': 'torch.long'}), '(input_ids, dtype=torch.long)\n', (1930, 1959), False, 'import torch\n'), ((2002, 2048), 'torch.tensor', 'torch.tensor', (['attention_mask'], {'dtype': 'torch.long'}), '(attention_mask, dtype=torch.long)\n', (2014, 2048), False, 'import torch\n')] |
import numpy as np
from numpy.testing import assert_allclose
import pytest
from mne._ola import _COLA, _Interp2, _Storer
def test_interp_2pt():
"""Test our two-point interpolator."""
n_pts = 200
assert n_pts % 50 == 0
feeds = [ # test a bunch of feeds to make sure they don't break things
[n_pts],
[50] * (n_pts // 50),
[10] * (n_pts // 10),
[5] * (n_pts // 5),
[2] * (n_pts // 2),
[1] * n_pts,
]
# ZOH
values = np.array([10, -10])
expected = np.full(n_pts, 10)
for feed in feeds:
expected[-1] = 10
interp = _Interp2([0, n_pts], values, 'zero')
out = np.concatenate([interp.feed(f)[0] for f in feed])
assert_allclose(out, expected)
interp = _Interp2([0, n_pts - 1], values, 'zero')
expected[-1] = -10
out = np.concatenate([interp.feed(f)[0] for f in feed])
assert_allclose(out, expected)
# linear and inputs of different sizes
values = [np.arange(2)[:, np.newaxis, np.newaxis], np.array([20, 10])]
expected = [
np.linspace(0, 1, n_pts, endpoint=False)[np.newaxis, np.newaxis, :],
np.linspace(20, 10, n_pts, endpoint=False)]
for feed in feeds:
interp = _Interp2([0, n_pts], values, 'linear')
outs = [interp.feed(f) for f in feed]
outs = [np.concatenate([o[0] for o in outs], axis=-1),
np.concatenate([o[1] for o in outs], axis=-1)]
assert_allclose(outs[0], expected[0], atol=1e-7)
assert_allclose(outs[1], expected[1], atol=1e-7)
# cos**2 and more interesting bounds
values = np.array([10, -10])
expected = np.full(n_pts, 10.)
expected[-5:] = -10
cos = np.cos(np.linspace(0, np.pi / 2., n_pts - 9,
endpoint=False))
expected[4:-5] = cos ** 2 * 20 - 10
for feed in feeds:
interp = _Interp2([4, n_pts - 5], values, 'cos2')
out = np.concatenate([interp.feed(f)[0] for f in feed])
assert_allclose(out, expected, atol=1e-7)
out = interp.feed(10)[0]
assert_allclose(out, [values[-1]] * 10, atol=1e-7)
# hann and broadcasting
n_hann = n_pts - 9
expected[4:-5] = np.hanning(2 * n_hann + 1)[n_hann:-1] * 20 - 10
expected = np.array([expected, expected[::-1] * 0.5])
values = np.array([values, values[::-1] * 0.5]).T
for feed in feeds:
interp = _Interp2([4, n_pts - 5], values, 'hann')
out = np.concatenate([interp.feed(f)[0] for f in feed], axis=-1)
assert_allclose(out, expected, atol=1e-7)
# one control point and None support
values = [np.array([10]), None]
for start in [0, 50, 99, 100, 1000]:
interp = _Interp2([start], values, 'zero')
out, none = interp.feed(n_pts)
assert none is None
expected = np.full(n_pts, 10.)
assert_allclose(out, expected)
@pytest.mark.parametrize('ndim', (1, 2, 3))
def test_cola(ndim):
"""Test COLA processing."""
sfreq = 1000.
rng = np.random.RandomState(0)
def processor(x):
return (x / 2.,) # halve the signal
for n_total in (999, 1000, 1001):
signal = rng.randn(n_total)
out = rng.randn(n_total) # shouldn't matter
for _ in range(ndim - 1):
signal = signal[np.newaxis]
out = out[np.newaxis]
for n_samples in (99, 100, 101, 102,
n_total - n_total // 2 + 1, n_total):
for window in ('hann', 'bartlett', 'boxcar', 'triang'):
# A few example COLA possibilities
n_overlaps = ()
if window in ('hann', 'bartlett') or n_samples % 2 == 0:
n_overlaps += ((n_samples + 1) // 2,)
if window == 'boxcar':
n_overlaps += (0,)
for n_overlap in n_overlaps:
# can pass callable or ndarray
for storer in (out, _Storer(out)):
cola = _COLA(processor, storer, n_total, n_samples,
n_overlap, sfreq, window)
n_input = 0
# feed data in an annoying way
while n_input < n_total:
next_len = min(rng.randint(1, 30),
n_total - n_input)
cola.feed(signal[..., n_input:n_input + next_len])
n_input += next_len
assert_allclose(out, signal / 2., atol=1e-7)
| [
"numpy.hanning",
"mne._ola._COLA",
"numpy.arange",
"numpy.testing.assert_allclose",
"mne._ola._Interp2",
"pytest.mark.parametrize",
"numpy.array",
"numpy.linspace",
"mne._ola._Storer",
"numpy.concatenate",
"numpy.full",
"numpy.random.RandomState"
] | [((2880, 2922), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ndim"""', '(1, 2, 3)'], {}), "('ndim', (1, 2, 3))\n", (2903, 2922), False, 'import pytest\n'), ((493, 512), 'numpy.array', 'np.array', (['[10, -10]'], {}), '([10, -10])\n', (501, 512), True, 'import numpy as np\n'), ((528, 546), 'numpy.full', 'np.full', (['n_pts', '(10)'], {}), '(n_pts, 10)\n', (535, 546), True, 'import numpy as np\n'), ((1626, 1645), 'numpy.array', 'np.array', (['[10, -10]'], {}), '([10, -10])\n', (1634, 1645), True, 'import numpy as np\n'), ((1661, 1681), 'numpy.full', 'np.full', (['n_pts', '(10.0)'], {}), '(n_pts, 10.0)\n', (1668, 1681), True, 'import numpy as np\n'), ((2074, 2125), 'numpy.testing.assert_allclose', 'assert_allclose', (['out', '([values[-1]] * 10)'], {'atol': '(1e-07)'}), '(out, [values[-1]] * 10, atol=1e-07)\n', (2089, 2125), False, 'from numpy.testing import assert_allclose\n'), ((2261, 2303), 'numpy.array', 'np.array', (['[expected, expected[::-1] * 0.5]'], {}), '([expected, expected[::-1] * 0.5])\n', (2269, 2303), True, 'import numpy as np\n'), ((3004, 3028), 'numpy.random.RandomState', 'np.random.RandomState', (['(0)'], {}), '(0)\n', (3025, 3028), True, 'import numpy as np\n'), ((613, 649), 'mne._ola._Interp2', '_Interp2', (['[0, n_pts]', 'values', '"""zero"""'], {}), "([0, n_pts], values, 'zero')\n", (621, 649), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((722, 752), 'numpy.testing.assert_allclose', 'assert_allclose', (['out', 'expected'], {}), '(out, expected)\n', (737, 752), False, 'from numpy.testing import assert_allclose\n'), ((770, 810), 'mne._ola._Interp2', '_Interp2', (['[0, n_pts - 1]', 'values', '"""zero"""'], {}), "([0, n_pts - 1], values, 'zero')\n", (778, 810), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((910, 940), 'numpy.testing.assert_allclose', 'assert_allclose', (['out', 'expected'], {}), '(out, expected)\n', (925, 940), False, 'from numpy.testing import assert_allclose\n'), ((1040, 1058), 'numpy.array', 'np.array', (['[20, 10]'], {}), '([20, 10])\n', (1048, 1058), True, 'import numpy as np\n'), ((1162, 1204), 'numpy.linspace', 'np.linspace', (['(20)', '(10)', 'n_pts'], {'endpoint': '(False)'}), '(20, 10, n_pts, endpoint=False)\n', (1173, 1204), True, 'import numpy as np\n'), ((1246, 1284), 'mne._ola._Interp2', '_Interp2', (['[0, n_pts]', 'values', '"""linear"""'], {}), "([0, n_pts], values, 'linear')\n", (1254, 1284), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((1465, 1514), 'numpy.testing.assert_allclose', 'assert_allclose', (['outs[0]', 'expected[0]'], {'atol': '(1e-07)'}), '(outs[0], expected[0], atol=1e-07)\n', (1480, 1514), False, 'from numpy.testing import assert_allclose\n'), ((1522, 1571), 'numpy.testing.assert_allclose', 'assert_allclose', (['outs[1]', 'expected[1]'], {'atol': '(1e-07)'}), '(outs[1], expected[1], atol=1e-07)\n', (1537, 1571), False, 'from numpy.testing import assert_allclose\n'), ((1722, 1776), 'numpy.linspace', 'np.linspace', (['(0)', '(np.pi / 2.0)', '(n_pts - 9)'], {'endpoint': '(False)'}), '(0, np.pi / 2.0, n_pts - 9, endpoint=False)\n', (1733, 1776), True, 'import numpy as np\n'), ((1886, 1926), 'mne._ola._Interp2', '_Interp2', (['[4, n_pts - 5]', 'values', '"""cos2"""'], {}), "([4, n_pts - 5], values, 'cos2')\n", (1894, 1926), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((1999, 2041), 'numpy.testing.assert_allclose', 'assert_allclose', (['out', 'expected'], {'atol': '(1e-07)'}), '(out, expected, atol=1e-07)\n', (2014, 2041), False, 'from numpy.testing import assert_allclose\n'), ((2317, 2355), 'numpy.array', 'np.array', (['[values, values[::-1] * 0.5]'], {}), '([values, values[::-1] * 0.5])\n', (2325, 2355), True, 'import numpy as np\n'), ((2398, 2438), 'mne._ola._Interp2', '_Interp2', (['[4, n_pts - 5]', 'values', '"""hann"""'], {}), "([4, n_pts - 5], values, 'hann')\n", (2406, 2438), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((2520, 2562), 'numpy.testing.assert_allclose', 'assert_allclose', (['out', 'expected'], {'atol': '(1e-07)'}), '(out, expected, atol=1e-07)\n', (2535, 2562), False, 'from numpy.testing import assert_allclose\n'), ((2618, 2632), 'numpy.array', 'np.array', (['[10]'], {}), '([10])\n', (2626, 2632), True, 'import numpy as np\n'), ((2698, 2731), 'mne._ola._Interp2', '_Interp2', (['[start]', 'values', '"""zero"""'], {}), "([start], values, 'zero')\n", (2706, 2731), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((2818, 2838), 'numpy.full', 'np.full', (['n_pts', '(10.0)'], {}), '(n_pts, 10.0)\n', (2825, 2838), True, 'import numpy as np\n'), ((2846, 2876), 'numpy.testing.assert_allclose', 'assert_allclose', (['out', 'expected'], {}), '(out, expected)\n', (2861, 2876), False, 'from numpy.testing import assert_allclose\n'), ((999, 1011), 'numpy.arange', 'np.arange', (['(2)'], {}), '(2)\n', (1008, 1011), True, 'import numpy as np\n'), ((1085, 1125), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', 'n_pts'], {'endpoint': '(False)'}), '(0, 1, n_pts, endpoint=False)\n', (1096, 1125), True, 'import numpy as np\n'), ((1347, 1392), 'numpy.concatenate', 'np.concatenate', (['[o[0] for o in outs]'], {'axis': '(-1)'}), '([o[0] for o in outs], axis=-1)\n', (1361, 1392), True, 'import numpy as np\n'), ((1410, 1455), 'numpy.concatenate', 'np.concatenate', (['[o[1] for o in outs]'], {'axis': '(-1)'}), '([o[1] for o in outs], axis=-1)\n', (1424, 1455), True, 'import numpy as np\n'), ((2198, 2224), 'numpy.hanning', 'np.hanning', (['(2 * n_hann + 1)'], {}), '(2 * n_hann + 1)\n', (2208, 2224), True, 'import numpy as np\n'), ((3938, 3950), 'mne._ola._Storer', '_Storer', (['out'], {}), '(out)\n', (3945, 3950), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((3984, 4054), 'mne._ola._COLA', '_COLA', (['processor', 'storer', 'n_total', 'n_samples', 'n_overlap', 'sfreq', 'window'], {}), '(processor, storer, n_total, n_samples, n_overlap, sfreq, window)\n', (3989, 4054), False, 'from mne._ola import _COLA, _Interp2, _Storer\n'), ((4508, 4554), 'numpy.testing.assert_allclose', 'assert_allclose', (['out', '(signal / 2.0)'], {'atol': '(1e-07)'}), '(out, signal / 2.0, atol=1e-07)\n', (4523, 4554), False, 'from numpy.testing import assert_allclose\n')] |
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import tinctest
from tinctest.lib import local_path
from tinctest import TINCTestCase
from mpp.lib.PSQL import PSQL
from mpp.gpdb.tests.storage.lib.dbstate import DbStateClass
from gppylib.db import dbconn
class Verification(TINCTestCase):
dbname = 'mpp18816_db'
@classmethod
def setUpClass(cls):
tinctest.logger.info('Running Verification...')
def run_SQLQuery(self, exec_sql, dbname = 'template1'):
with dbconn.connect(dbconn.DbURL(dbname=dbname)) as conn:
curs = dbconn.execSQL(conn, exec_sql)
results = curs.fetchall()
return results
def test_checklog(self):
''' Select from gp_toolkit log message to see if the concurrent test run resulted in PANIC messages'''
log_sql = "select logseverity, logstate, substring(logmessage from 0 for 60) from gp_toolkit.__gp_log_master_ext where logmessage \
like '%Unrecognized DTX transaction context%' or logmessage like '%proclock table corrupted%' or logseverity = 'PANIC' ;"
result = self.run_SQLQuery(log_sql, dbname = Verification.dbname)
for (logsev, logstate, logmsg) in result:
if (logsev.strip() == 'PANIC' or 'Unrecognized DTX transaction context' in logmsg or 'proclock table corrupted' in logmsg ):
raise Exception('Master log shows PANIC or other error messages: Please check the master_log')
tinctest.logger.info('No PANIC messages found in logs')
def test_gpcheckcat(self):
dbstate = DbStateClass('run_validation')
dbstate.check_catalog(alldb = False, dbname = Verification.dbname)
| [
"tinctest.logger.info",
"gppylib.db.dbconn.execSQL",
"gppylib.db.dbconn.DbURL",
"mpp.gpdb.tests.storage.lib.dbstate.DbStateClass"
] | [((1008, 1055), 'tinctest.logger.info', 'tinctest.logger.info', (['"""Running Verification..."""'], {}), "('Running Verification...')\n", (1028, 1055), False, 'import tinctest\n'), ((2096, 2151), 'tinctest.logger.info', 'tinctest.logger.info', (['"""No PANIC messages found in logs"""'], {}), "('No PANIC messages found in logs')\n", (2116, 2151), False, 'import tinctest\n'), ((2207, 2237), 'mpp.gpdb.tests.storage.lib.dbstate.DbStateClass', 'DbStateClass', (['"""run_validation"""'], {}), "('run_validation')\n", (2219, 2237), False, 'from mpp.gpdb.tests.storage.lib.dbstate import DbStateClass\n'), ((1202, 1232), 'gppylib.db.dbconn.execSQL', 'dbconn.execSQL', (['conn', 'exec_sql'], {}), '(conn, exec_sql)\n', (1216, 1232), False, 'from gppylib.db import dbconn\n'), ((1145, 1172), 'gppylib.db.dbconn.DbURL', 'dbconn.DbURL', ([], {'dbname': 'dbname'}), '(dbname=dbname)\n', (1157, 1172), False, 'from gppylib.db import dbconn\n')] |
# encoding: utf-8
import pygame
from assets import constants as C
class Decor(pygame.sprite.Sprite):
def __init__(self, name, pos_tuple):
pygame.sprite.Sprite.__init__(self)
self.name = name
self.image = pygame.image.load(
self.full_path(C.IMAGES[name])
).convert()
self.rect = self.image.get_rect()
self.rect.x = pos_tuple[1] * C.A_MOVE
self.rect.y = pos_tuple[0] * C.A_MOVE
@staticmethod
def full_path(image: str):
return ''.join([C.IMAGE_FOLDER, image])
class Elements(Decor):
def __init__(self, elt):
super().__init__(elt.name, (elt.position_Y, elt.position_X))
self.rect.centerx = elt.position_X * C.A_MOVE + 20
self.rect.centery = elt.position_Y * C.A_MOVE + 20
class Player(Elements):
def __init__(self, maze, macgyver):
super().__init__(macgyver)
self.maze = maze
self.macgyver = macgyver
def right(self):
self.rect.x += C.A_MOVE
def left(self):
self.rect.x -= C.A_MOVE
def up(self):
self.rect.y -= C.A_MOVE
def down(self):
self.rect.y += C.A_MOVE
def update_pos(self, event, inter):
"""
Manage character movements in lab.
Args:
event : event in pygame.event.get()
inter : Instance of class Interaction
"""
keys = pygame.key.get_pressed()
mov_keys = (pygame.K_LEFT, pygame.K_RIGHT, pygame.K_UP, pygame.K_DOWN)
if (any(mov_keys) in keys) and event.type == pygame.KEYDOWN:
key = pygame.key.name(event.key)
if self.macgyver.move_char(key, self.maze.list_paths):
if inter.check_chars_pos():
self.maze.chars_meet_up()
elif self.maze.list_items:
inter.item_picking_process()
getattr(self, key)()
| [
"pygame.sprite.Sprite.__init__",
"pygame.key.get_pressed",
"pygame.key.name"
] | [((152, 187), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self'], {}), '(self)\n', (181, 187), False, 'import pygame\n'), ((1399, 1423), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (1421, 1423), False, 'import pygame\n'), ((1590, 1616), 'pygame.key.name', 'pygame.key.name', (['event.key'], {}), '(event.key)\n', (1605, 1616), False, 'import pygame\n')] |
import argparse
import csv
from skyfield import api
from skyfield.api import EarthSatellite
from skyfield.constants import AU_KM, AU_M
from skyfield.sgp4lib import TEME_to_ITRF
from skyfield.api import Topos, load
# Read TLE file and write key parameters in CSV format
def readTLE(tleFilename):
# Open TLE filename
tleFile = open(tleFilename,'r')
# print("Opened TLE file: ",tleFilename)
# Read TLEs into catalog
catalog = []
line0 = None
line1 = None
line2 = None
for line in tleFile:
if line[0] == '0':
line0 = line
elif line[0] == '1':
line1 = line
elif line[0] == '2':
line2 = line
else:
# Error - TLE lines start with 0, 1 or 2
print("Error: line does not start with 0, 1 or 2: ",line)
if line1 and line2:
# Check if object number is same in both line 1 and 2
catalog.append(EarthSatellite(line1,line2))
line1 = None;
line2 = None;
# print("Read ", len(catalog), "TLEs into catalog")
return catalog
def writeSatelliteCSV(catalog, tleCSVFilename):
with open(tleCSVFilename, 'w', newline='') as csvfile:
fieldnames = ['satnum', 'epochyr', 'epochdays', 'jdsatepoch', 'ndot', \
'nddot', 'bstar', 'inclination', 'rightascension', 'eccentricity', \
'argofperigee', 'meanmotion', 'meananomaly']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for satellite in catalog:
writer.writerow({\
'satnum':satellite.model.satnum, \
'epochyr':satellite.model.epochyr, \
'epochdays':satellite.model.epochdays, \
'jdsatepoch':satellite.model.jdsatepoch, \
'ndot':satellite.model.ndot, \
'nddot':satellite.model.nddot, \
'bstar':satellite.model.bstar, \
'inclination':satellite.model.inclo, \
'rightascension':satellite.model.nodeo, \
'eccentricity':satellite.model.ecco, \
'argofperigee':satellite.model.argpo, \
'meananomaly':satellite.model.mo, \
'meanmotion':satellite.model.no})
def getUniqueSats(catalog):
# Just get the first TLE for each satellite from catalog
# Assumes that the TLE are ordered by satellite number
uniqueSats = []
satnum = 0
for sat in catalog:
if sat.model.satnum != satnum:
uniqueSats.append(sat)
satnum = sat.model.satnum
return uniqueSats
def computeSchedule(site, catalog, tStart, tEnd):
tList = []
eventList = []
for sat in catalog:
t, events = sat.find_events(desertLaser, tStart, tEnd, altitude_degrees=5)
tList.append(t)
eventList.append(t)
return tList, eventList
tleFilename = 'catalogTest.txt'
tleCSVFilename = 'catalogTest.csv'
schedFilename = 'catalogSched.csv'
catalog = readTLE(tleFilename)
writeSatelliteCSV(catalog,tleCSVFilename)
uniqueSats = getUniqueSats(catalog)
ts = load.timescale()
tStart = ts.utc(2020,1,1)
tEnd = ts.utc(2020,1,4)
desertLaser = Topos(21.0, 16.5, 100)
eph = load('de421.bsp')
times, events = computeSchedule(desertLaser, uniqueSats, tStart, tEnd)
for i in range(0,len(times)):
satnum = uniqueSats[i].model.satnum
difference = uniqueSats[i] - desertLaser
t = times[i]
sunlit = uniqueSats[i].at(t).is_sunlit(eph)
for j in range(0,len(times[i])):
topocentric = difference.at(t[j])
alt, az, distance = topocentric.altaz()
if(sunlit[j]):
lit = 1
else:
lit = 0
print(f'{satnum:6}',f'{(t[j]-tStart):8.5f}',
f'{distance.km:8.2f}',
f'{az.degrees:8.2f}',
f'{alt.degrees:8.2f}',
lit)
| [
"csv.DictWriter",
"skyfield.api.EarthSatellite",
"skyfield.api.load.timescale",
"skyfield.api.Topos",
"skyfield.api.load"
] | [((3204, 3220), 'skyfield.api.load.timescale', 'load.timescale', ([], {}), '()\n', (3218, 3220), False, 'from skyfield.api import Topos, load\n'), ((3288, 3310), 'skyfield.api.Topos', 'Topos', (['(21.0)', '(16.5)', '(100)'], {}), '(21.0, 16.5, 100)\n', (3293, 3310), False, 'from skyfield.api import Topos, load\n'), ((3320, 3337), 'skyfield.api.load', 'load', (['"""de421.bsp"""'], {}), "('de421.bsp')\n", (3324, 3337), False, 'from skyfield.api import Topos, load\n'), ((1493, 1539), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': 'fieldnames'}), '(csvfile, fieldnames=fieldnames)\n', (1507, 1539), False, 'import csv\n'), ((981, 1009), 'skyfield.api.EarthSatellite', 'EarthSatellite', (['line1', 'line2'], {}), '(line1, line2)\n', (995, 1009), False, 'from skyfield.api import EarthSatellite\n')] |
from django.contrib import admin
from django.urls import path, include
from Blog import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', include('home.urls'), name='home'),
path('aboutus/', include('aboutus.urls'), name='aboutus'),
path('accounts/', include('accounts.urls'), name='accounts'),
path('admin/', admin.site.urls),
path('Blog/', include('Blog.urls'), name='Blog'),
path('Events/', include('Events.urls'), name='Events'),
path('Organizer/', include('Organizer.urls'), name='Organizer'),
path('Participant/', include('Participant.urls'), name='Participant'),
path('Report/', include('Report.urls'), name='Report'),
path('Sponsor/', include('Sponsor.urls'), name='Sponsor'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"django.conf.urls.static.static",
"django.urls.path",
"django.urls.include"
] | [((794, 855), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (800, 855), False, 'from django.conf.urls.static import static\n'), ((372, 403), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (376, 403), False, 'from django.urls import path, include\n'), ((203, 223), 'django.urls.include', 'include', (['"""home.urls"""'], {}), "('home.urls')\n", (210, 223), False, 'from django.urls import path, include\n'), ((260, 283), 'django.urls.include', 'include', (['"""aboutus.urls"""'], {}), "('aboutus.urls')\n", (267, 283), False, 'from django.urls import path, include\n'), ((324, 348), 'django.urls.include', 'include', (['"""accounts.urls"""'], {}), "('accounts.urls')\n", (331, 348), False, 'from django.urls import path, include\n'), ((424, 444), 'django.urls.include', 'include', (['"""Blog.urls"""'], {}), "('Blog.urls')\n", (431, 444), False, 'from django.urls import path, include\n'), ((480, 502), 'django.urls.include', 'include', (['"""Events.urls"""'], {}), "('Events.urls')\n", (487, 502), False, 'from django.urls import path, include\n'), ((543, 568), 'django.urls.include', 'include', (['"""Organizer.urls"""'], {}), "('Organizer.urls')\n", (550, 568), False, 'from django.urls import path, include\n'), ((614, 641), 'django.urls.include', 'include', (['"""Participant.urls"""'], {}), "('Participant.urls')\n", (621, 641), False, 'from django.urls import path, include\n'), ((684, 706), 'django.urls.include', 'include', (['"""Report.urls"""'], {}), "('Report.urls')\n", (691, 706), False, 'from django.urls import path, include\n'), ((745, 768), 'django.urls.include', 'include', (['"""Sponsor.urls"""'], {}), "('Sponsor.urls')\n", (752, 768), False, 'from django.urls import path, include\n')] |
#!/usr/bin/env python3
#
# aimap.py
#
# This code is part of the aimap package, and is governed by its licence.
# Please see the LICENSE file that should have been included as part of
# this package.
import json
import logging
import logging.handlers
import os
import subprocess
import pandas as pd
import gffutils
import random
import shutil
import sys
import tarfile
import time
import traceback
from argparse import ArgumentParser
from aimap import (aimap_tools, aimap_config)
from aimap import __version__ as VERSION
# Process command-line arguments
def parse_cmdline():
"""Parse command-line arguments for script."""
parser = ArgumentParser(prog="aimap.py")
parser.add_argument('--version', action='version',
version='%(prog)s: aimap ' + VERSION)
parser.add_argument("-o", "--outdir", dest="outdirname",
action="store", default=None, required=True,
help="Output directory (required)")
parser.add_argument("-g", "--genome", dest="genomename",
action="store", default=None, required=True,
help="Genome file name (required)")
parser.add_argument("-a", "--annotation", dest="annotation",
action="store", default=None,required=True,
help="Genome annotation file, gff3 format")
parser.add_argument("--outfile_name", dest="outfile_name",
action="store", default=None,required=True,
help="output file name")
parser.add_argument( "-l","--length", dest="length",
action="store", default=80,
help="Discard reads that became shorter than length INT because of either quality or adapter trimming. A value of '0' effectively disables this behaviour. Default: 80 bp.")
parser.add_argument("-f", "--filename", dest="filename",
action="store", default=None,
help="Fastq file if model is single")
parser.add_argument("-f1", "--filename1", dest="filename1",
action="store", default=None,
help="Fastq1 file if model is paired")
parser.add_argument("-f2", "--filename2", dest="filename2",
action="store", default=None,
help="Fastq2 file if model is paired")
parser.add_argument("-m", "--model", dest="model",
action="store", default="paired",
choices=["single", "paired"],
help="aimap model (default paired)")
parser.add_argument("-t", "--threads", dest="threads",
action="store", default=1,
help="Number of additional threads to use (default 1)")
parser.add_argument("--logfile", dest="logfile",
action="store", default=None,
help="Logfile location")
parser.add_argument("--editing_level", dest="editing_level",
action="store", default=0.03,
help="A-I editing_level,discard position that became lower than editing_level. Default: 0.03")
parser.add_argument("--coverage", dest="coverage",
action="store", default=30,
help="Discard position that became lower than coverage INT. Default: 30")
return parser.parse_args()
# Report last exception as string
def last_exception():
""" Returns last exception as a string, or use in logging.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
return ''.join(traceback.format_exception(exc_type, exc_value,
exc_traceback))
# Create output directory if it doesn't exist
def make_outdir():
"""Make the output directory, if required.
This is a little involved. If the output directory already exists,
we take the safe option by default, and stop with an error. We can,
however, choose to force the program to go on, in which case we can
either clobber the existing directory, or not. The options turn out
as the following, if the directory exists:
DEFAULT: stop and report the collision
FORCE: continue, and remove the existing output directory
NOCLOBBER+FORCE: continue, but do not remove the existing output
"""
if os.path.exists(args.outdirname):
logger.error("Output directory %s would overwrite existing " +
"files (exiting)", args.outdirname)
sys.exit(1)
else:
logger.info("Creating directory %s", args.outdirname)
os.makedirs(args.outdirname)
# Run as script
if __name__ == '__main__':
# Parse command-line
args = parse_cmdline()
# Set up logging
logger = logging.getLogger('aimap.py: %s' %
time.asctime())
t0 = time.time()
logger.setLevel(logging.DEBUG)
err_handler = logging.StreamHandler(sys.stderr)
err_formatter = logging.Formatter('%(levelname)s: %(message)s')
err_handler.setFormatter(err_formatter)
# Was a logfile specified? If so, use it
if args.logfile is not None:
try:
logstream = open(args.logfile, 'w')
err_handler_file = logging.StreamHandler(logstream)
err_handler_file.setFormatter(err_formatter)
err_handler_file.setLevel(logging.INFO)
logger.addHandler(err_handler_file)
except:
logger.error("Could not open %s for logging",
args.logfile)
sys.exit(1)
# Have we got required args? If not, exit.
if args.genomename is None:
logger.error("No genome file name (exiting)")
sys.exit(1)
if args.annotation is None:
logger.error("No annotation file (exiting)")
sys.exit(1)
if args.outfile_name is None:
logger.error("No outfile name (exiting)")
sys.exit(1)
make_outdir()
logger.info("Output directory: %s", args.outdirname)
#Step1 Apply adapter and quality trimming to FastQ files.
logger.info("Step1 Apply adapter and quality trimming to FastQ files.")
if args.model == "single":
if args.filename is None:
logger.error("No fastq file name (exiting)")
sys.exit(1)
else:
os.system(aimap_tools.construct_trim_galore_single_cmdline(args.outdirname,args.length,args.filename))
if args.model == "paired":
if args.filename1 is None:
logger.error("No required fastq file1 name (exiting)")
sys.exit(1)
elif args.filename2 is None:
logger.error("No required fastq file2 name (exiting)")
sys.exit(1)
else:
os.system(aimap_tools.construct_trim_galore_paired_cmdline(args.outdirname,args.length,args.filename1,args.filename2))
#Step2 Index genome sequences in the FASTA format.
logger.info("Step2 Index sequences in the FASTA format.")
os.system(aimap_tools.construct_bwa_index_cmdline(args.genomename))
#Step3 Map reads to genome.
logger.info("Step3 Map reads to genome.")
if args.model == "single":
old_file=os.path.splitext(os.path.split(args.filename)[1])[0]
new_file=args.outdirname+"/"+old_file+"_trimmed.fq"
os.system(aimap_tools.construct_bwa_mem_single_cmdline(args.genomename, new_file,args.outdirname,args.outfile_name,args.threads))
if args.model == "paired":
old_file1=os.path.splitext(os.path.split(args.filename1)[1])[0]
old_file2=os.path.splitext(os.path.split(args.filename2)[1])[0]
new_file1=args.outdirname+"/"+old_file1+"_val_1.fq"
new_file2=args.outdirname+"/"+old_file2+"_val_2.fq"
os.system(aimap_tools.construct_bwa_mem_paired_cmdline(args.genomename, new_file1,new_file2,args.outdirname,args.outfile_name,args.threads))
#Step4 Convert sam file to bam file.
logger.info("Step4 Convert sam file to bam file.")
insamfile=args.outdirname+"/"+args.outfile_name+".sam"
os.system(aimap_tools.construct_samtools_view_cmdline(insamfile,args.outdirname,args.outfile_name,args.threads))
#Step5 Sort the bam file.
logger.info("Step5 Sort the bam file.")
inbamfile=args.outdirname+"/"+args.outfile_name+".bam"
os.system(aimap_tools.construct_samtools_sort_cmdline(inbamfile,args.outdirname,args.outfile_name,args.threads))
#Step6 Generate pileup for one or multiple BAM files.
logger.info("Step6 Generate pileup for one or multiple BAM files.")
sorted_bamfile=args.outdirname+"/"+args.outfile_name+"-sorted.bam"
os.system(aimap_tools.construct_samtools_mpileup_cmdline(sorted_bamfile,args.genomename,args.outdirname,args.outfile_name,args.threads))
#Step7 Convert pileup file to table.
logger.info("Step7 Convert pileup file to table.")
pileup_file=args.outdirname+"/"+args.outfile_name+".pileup"
aimap_tools.convert_pileup_to_table(pileup_file,args.outdirname,args.outfile_name)
#Step8 Get a_i position.
logger.info("Step8 Get a_i position.")
all_info_file=args.outdirname+"/"+args.outfile_name+"_all_position_info.txt"
aimap_tools.get_a_i_map(all_info_file,args.outdirname,args.outfile_name,args.coverage,args.editing_level)
#Step9 Confirm whether the amino acid has changed.
logger.info("Step9 Confirm whether the amino acid has changed.")
a_i_file=args.outdirname+"/"+args.outfile_name+"_a_i.txt"
aimap_tools.get_founction_Prokaryote(a_i_file,args.outdirname,args.outfile_name,args.genomename,args.annotation)
# Report that we've finished
logger.info("Done: %s.", time.asctime())
logger.info("Time taken: %.2fs", (time.time() - t0))
| [
"logging.StreamHandler",
"sys.exc_info",
"sys.exit",
"os.path.exists",
"argparse.ArgumentParser",
"aimap.aimap_tools.construct_bwa_mem_paired_cmdline",
"os.path.split",
"aimap.aimap_tools.construct_samtools_mpileup_cmdline",
"aimap.aimap_tools.construct_bwa_mem_single_cmdline",
"aimap.aimap_tools.... | [((644, 675), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'prog': '"""aimap.py"""'}), "(prog='aimap.py')\n", (658, 675), False, 'from argparse import ArgumentParser\n'), ((3623, 3637), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3635, 3637), False, 'import sys\n'), ((4419, 4450), 'os.path.exists', 'os.path.exists', (['args.outdirname'], {}), '(args.outdirname)\n', (4433, 4450), False, 'import os\n'), ((4937, 4948), 'time.time', 'time.time', ([], {}), '()\n', (4946, 4948), False, 'import time\n'), ((5002, 5035), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stderr'], {}), '(sys.stderr)\n', (5023, 5035), False, 'import logging\n'), ((5056, 5103), 'logging.Formatter', 'logging.Formatter', (['"""%(levelname)s: %(message)s"""'], {}), "('%(levelname)s: %(message)s')\n", (5073, 5103), False, 'import logging\n'), ((8992, 9081), 'aimap.aimap_tools.convert_pileup_to_table', 'aimap_tools.convert_pileup_to_table', (['pileup_file', 'args.outdirname', 'args.outfile_name'], {}), '(pileup_file, args.outdirname, args.\n outfile_name)\n', (9027, 9081), False, 'from aimap import aimap_tools, aimap_config\n'), ((9238, 9351), 'aimap.aimap_tools.get_a_i_map', 'aimap_tools.get_a_i_map', (['all_info_file', 'args.outdirname', 'args.outfile_name', 'args.coverage', 'args.editing_level'], {}), '(all_info_file, args.outdirname, args.outfile_name,\n args.coverage, args.editing_level)\n', (9261, 9351), False, 'from aimap import aimap_tools, aimap_config\n'), ((9540, 9661), 'aimap.aimap_tools.get_founction_Prokaryote', 'aimap_tools.get_founction_Prokaryote', (['a_i_file', 'args.outdirname', 'args.outfile_name', 'args.genomename', 'args.annotation'], {}), '(a_i_file, args.outdirname, args.\n outfile_name, args.genomename, args.annotation)\n', (9576, 9661), False, 'from aimap import aimap_tools, aimap_config\n'), ((3657, 3719), 'traceback.format_exception', 'traceback.format_exception', (['exc_type', 'exc_value', 'exc_traceback'], {}), '(exc_type, exc_value, exc_traceback)\n', (3683, 3719), False, 'import traceback\n'), ((4592, 4603), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4600, 4603), False, 'import sys\n'), ((4684, 4712), 'os.makedirs', 'os.makedirs', (['args.outdirname'], {}), '(args.outdirname)\n', (4695, 4712), False, 'import os\n'), ((5786, 5797), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5794, 5797), False, 'import sys\n'), ((5891, 5902), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5899, 5902), False, 'import sys\n'), ((5995, 6006), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6003, 6006), False, 'import sys\n'), ((7064, 7120), 'aimap.aimap_tools.construct_bwa_index_cmdline', 'aimap_tools.construct_bwa_index_cmdline', (['args.genomename'], {}), '(args.genomename)\n', (7103, 7120), False, 'from aimap import aimap_tools, aimap_config\n'), ((8120, 8228), 'aimap.aimap_tools.construct_samtools_view_cmdline', 'aimap_tools.construct_samtools_view_cmdline', (['insamfile', 'args.outdirname', 'args.outfile_name', 'args.threads'], {}), '(insamfile, args.outdirname,\n args.outfile_name, args.threads)\n', (8163, 8228), False, 'from aimap import aimap_tools, aimap_config\n'), ((8372, 8480), 'aimap.aimap_tools.construct_samtools_sort_cmdline', 'aimap_tools.construct_samtools_sort_cmdline', (['inbamfile', 'args.outdirname', 'args.outfile_name', 'args.threads'], {}), '(inbamfile, args.outdirname,\n args.outfile_name, args.threads)\n', (8415, 8480), False, 'from aimap import aimap_tools, aimap_config\n'), ((8696, 8830), 'aimap.aimap_tools.construct_samtools_mpileup_cmdline', 'aimap_tools.construct_samtools_mpileup_cmdline', (['sorted_bamfile', 'args.genomename', 'args.outdirname', 'args.outfile_name', 'args.threads'], {}), '(sorted_bamfile, args.\n genomename, args.outdirname, args.outfile_name, args.threads)\n', (8742, 8830), False, 'from aimap import aimap_tools, aimap_config\n'), ((9723, 9737), 'time.asctime', 'time.asctime', ([], {}), '()\n', (9735, 9737), False, 'import time\n'), ((4912, 4926), 'time.asctime', 'time.asctime', ([], {}), '()\n', (4924, 4926), False, 'import time\n'), ((5317, 5349), 'logging.StreamHandler', 'logging.StreamHandler', (['logstream'], {}), '(logstream)\n', (5338, 5349), False, 'import logging\n'), ((6355, 6366), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6363, 6366), False, 'import sys\n'), ((6641, 6652), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6649, 6652), False, 'import sys\n'), ((7381, 7506), 'aimap.aimap_tools.construct_bwa_mem_single_cmdline', 'aimap_tools.construct_bwa_mem_single_cmdline', (['args.genomename', 'new_file', 'args.outdirname', 'args.outfile_name', 'args.threads'], {}), '(args.genomename, new_file,\n args.outdirname, args.outfile_name, args.threads)\n', (7425, 7506), False, 'from aimap import aimap_tools, aimap_config\n'), ((7814, 7951), 'aimap.aimap_tools.construct_bwa_mem_paired_cmdline', 'aimap_tools.construct_bwa_mem_paired_cmdline', (['args.genomename', 'new_file1', 'new_file2', 'args.outdirname', 'args.outfile_name', 'args.threads'], {}), '(args.genomename, new_file1,\n new_file2, args.outdirname, args.outfile_name, args.threads)\n', (7858, 7951), False, 'from aimap import aimap_tools, aimap_config\n'), ((9777, 9788), 'time.time', 'time.time', ([], {}), '()\n', (9786, 9788), False, 'import time\n'), ((5632, 5643), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5640, 5643), False, 'import sys\n'), ((6403, 6501), 'aimap.aimap_tools.construct_trim_galore_single_cmdline', 'aimap_tools.construct_trim_galore_single_cmdline', (['args.outdirname', 'args.length', 'args.filename'], {}), '(args.outdirname, args.\n length, args.filename)\n', (6451, 6501), False, 'from aimap import aimap_tools, aimap_config\n'), ((6769, 6780), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6777, 6780), False, 'import sys\n'), ((6816, 6931), 'aimap.aimap_tools.construct_trim_galore_paired_cmdline', 'aimap_tools.construct_trim_galore_paired_cmdline', (['args.outdirname', 'args.length', 'args.filename1', 'args.filename2'], {}), '(args.outdirname, args.\n length, args.filename1, args.filename2)\n', (6864, 6931), False, 'from aimap import aimap_tools, aimap_config\n'), ((7267, 7295), 'os.path.split', 'os.path.split', (['args.filename'], {}), '(args.filename)\n', (7280, 7295), False, 'import os\n'), ((7567, 7596), 'os.path.split', 'os.path.split', (['args.filename1'], {}), '(args.filename1)\n', (7580, 7596), False, 'import os\n'), ((7639, 7668), 'os.path.split', 'os.path.split', (['args.filename2'], {}), '(args.filename2)\n', (7652, 7668), False, 'import os\n')] |
import csv
from _tkinter import TclError
import os.path as ospath
from enum import Enum
class Errors(Enum):
SUCCESS = 'Successfully completed the action.'
FILE_NOT_FOUND = "The file, {}, couldn't be found."
FILE_MADE = "The file, {}, didn't exist and so it has been created."
FILE_CURRENTLY_OPEN = 'The file, {}, cannot be accessed because it is currently open.'
CLIPBOARD_EMPTY = 'Your clipboard is currently empty.'
DUPLICATE_HEADER = 'A header exists multiple times - This is not allowed.'
@staticmethod
def is_error(error):
return type(error) is Errors and error != Errors.SUCCESS
class IOUtilities:
@staticmethod
def save_grid_list_to_clipboard(root, clipboard_list, item_to_list_converter,
headers=None, print_clipboard=False):
root.clipboard_clear()
if headers is not None:
root.clipboard_append('\t'.join(headers) + '\n')
for item in clipboard_list:
row = '\t'.join(item_to_list_converter(item))
root.clipboard_append(row + '\n')
if print_clipboard:
print(root.clipboard_get())
return Errors.SUCCESS
@staticmethod
def get_clipboard_as_dictionary_list(root, lower_headers=False):
try:
clipboard = root.clipboard_get()
rows = clipboard.split('\n')
headers = rows[0].split('\t')
if lower_headers:
headers = [h.lower() for h in headers]
for header in headers:
if headers.count(header) > 1:
return Errors.DUPLICATE_HEADER
dictionary_list = []
for row in rows[1:-1]:
columns = row.split('\t')
dict_data = dict(zip(headers, columns))
dictionary_list.append(dict_data)
return dictionary_list
except TclError:
return Errors.CLIPBOARD_EMPTY
@staticmethod
def get_csv_as_dictionary_list(path, not_found_callback=None, lower_headers=False):
if path[-4:] != '.csv':
path += '.csv'
if not ospath.isfile(path):
if not_found_callback is not None:
return not_found_callback(path)
else:
return Errors.FILE_NOT_FOUND
with open(path, 'r') as csvfile:
reader = csv.DictReader(csvfile)
if lower_headers:
reader.fieldnames = [h.lower().strip() for h in reader.fieldnames]
headers = reader.fieldnames
for header in headers:
if headers.count(header) > 1:
return Errors.DUPLICATE_HEADER
return list(reader)
@staticmethod
def create_csv_file_callback(headers):
def create_csv_file(path):
with open(path, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=headers)
writer.writeheader()
csvfile.close()
return Errors.FILE_MADE
return create_csv_file
@staticmethod
def write_csv_file_from_list(path, data_list, headers, item_to_list_converter):
if path[-4:] != '.csv':
path += '.csv'
try:
with open(path, 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=headers)
writer.writeheader()
for item in data_list:
row = dict(zip(headers, item_to_list_converter(item)))
writer.writerow(row)
except PermissionError:
return Errors.FILE_CURRENTLY_OPEN
else:
return Errors.SUCCESS
| [
"os.path.isfile",
"csv.DictWriter",
"csv.DictReader"
] | [((2135, 2154), 'os.path.isfile', 'ospath.isfile', (['path'], {}), '(path)\n', (2148, 2154), True, 'import os.path as ospath\n'), ((2377, 2400), 'csv.DictReader', 'csv.DictReader', (['csvfile'], {}), '(csvfile)\n', (2391, 2400), False, 'import csv\n'), ((2886, 2929), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': 'headers'}), '(csvfile, fieldnames=headers)\n', (2900, 2929), False, 'import csv\n'), ((3316, 3359), 'csv.DictWriter', 'csv.DictWriter', (['csvfile'], {'fieldnames': 'headers'}), '(csvfile, fieldnames=headers)\n', (3330, 3359), False, 'import csv\n')] |
from collections import deque
from functools import reduce
from inspect import getmembers, isfunction, signature
from typing import Any, Iterable, List
from deepmerge import always_merger
def _constructor(self, *parts: List[Iterable[Any]]) -> None:
self._parts = parts
def _make_iterator(cls):
def _iterator(self):
# Simple depth-first composite Iterator
# Recursive version did not work for some mysterious reason
# This one proved to be more reliable
# Credit: https://stackoverflow.com/questions/26145678/implementing-a-depth-first-tree-iterator-in-python
stack = deque(self._parts)
while stack:
# Pop out the first element in the stack
part = stack.popleft()
if cls == type(part): # The same composite exactly
stack.extendleft(reversed(part._parts))
elif isinstance(part, cls) or not isinstance(part, Iterable):
yield part # derived classes presumably have overloads
else: # Iterable
stack.extendleft(reversed(part))
return _iterator
def _make_initializer(rt: type) -> Any:
return getattr(rt, "__origin__", rt)()
def _make_method(name: str, func: callable) -> callable:
def _make_reduce(m: str, rt: type) -> callable:
def _reduce_parts(self, *args, **kwargs) -> Any:
# self is iterable, results come out flattened
return reduce(
lambda acc, obj: always_merger.merge(
acc, getattr(obj, m)(*args, **kwargs)
)
if rt is dict
else acc + getattr(obj, m)(*args, **kwargs),
self,
_make_initializer(rt),
)
return _reduce_parts
def _make_foreach(m) -> callable:
def _foreach_parts(self, *args, **kwargs) -> callable:
# self is iterable, concrete functions invoked depth first
for obj in self:
getattr(obj, m)(*args, **kwargs)
return _foreach_parts
rt: type = signature(func).return_annotation
return _make_foreach(name) if rt is None else _make_reduce(name, rt)
# TODO: type annotation for parts (have to be descendants from the original class)
def composite(cls: type) -> type:
"""
Generic class decorator to create a Composite from original class.
Notes:
1. the constructor does not make copy, so do not pass generators,
if you plan to invoke more than one operation.
2. it will return always flattened results of any operation.
:param cls: original class
:return: Composite version of original class
"""
setattr(cls, "__init__", _constructor)
base = cls.__bases__[0]
attrs = {
n: _make_method(n, f)
for n, f in getmembers(cls, predicate=isfunction)
if not n.startswith("_")
}
attrs["__init__"] = _constructor
composite_cls = type(cls.__name__, (base,), attrs)
composite_cls.__iter__ = _make_iterator(composite_cls)
return composite_cls
| [
"inspect.signature",
"inspect.getmembers",
"collections.deque"
] | [((620, 638), 'collections.deque', 'deque', (['self._parts'], {}), '(self._parts)\n', (625, 638), False, 'from collections import deque\n'), ((2078, 2093), 'inspect.signature', 'signature', (['func'], {}), '(func)\n', (2087, 2093), False, 'from inspect import getmembers, isfunction, signature\n'), ((2809, 2846), 'inspect.getmembers', 'getmembers', (['cls'], {'predicate': 'isfunction'}), '(cls, predicate=isfunction)\n', (2819, 2846), False, 'from inspect import getmembers, isfunction, signature\n')] |
# -*- coding: utf-8 -*-
#
# Copyright 2017-2020 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Get and set Renku repository or global options.
Set values
~~~~~~~~~~
You can set various Renku configuration options, for example the image registry
URL, with a command like:
.. code-block:: console
$ renku config set registry https://registry.gitlab.com/demo/demo
By default, configuration is stored locally in the project's directory. Use
``--global`` option to store configuration for all projects in your home
directory.
Remove values
~~~~~~~~~~~~~
To remove a specific key from configuration use:
.. code-block:: console
$ renku config remove registry
By default, only local configuration is searched for removal. Use ``--global``
option to remove a global configuration value.
Query values
~~~~~~~~~~~~
You can display all configuration values with:
.. code-block:: console
$ renku config show
Both local and global configuration files are read. Values in local
configuration take precedence over global values. Use ``--local`` or
``--global`` flag to read corresponding configuration only.
You can provide a KEY to display only its value:
.. code-block:: console
$ renku config show registry
https://registry.gitlab.com/demo/demo
Available configuration values
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following values are available for the ``renku config`` command:
+------------------------+-------------------------------------+-----------+
| Name | Description | Default |
+========================+=====================================+===========+
| registry | The image registry to store Docker | ``None`` |
| | images in | |
+------------------------+-------------------------------------+-----------+
| zenodo.access_token | Access token for Zenodo API | ``None`` |
+------------------------+-------------------------------------+-----------+
| dataverse.access_token | Access token for Dataverse API | ``None`` |
+------------------------+-------------------------------------+-----------+
| dataverse.server_url | URL for the Dataverse API server | ``None`` |
| | to use | |
+------------------------+-------------------------------------+-----------+
| show_lfs_message | Whether to show messages about | ``True`` |
| | files being added to git LFS or not | |
+------------------------+-------------------------------------+-----------+
| lfs_threshold | Threshold file size below which | ``100kb`` |
| | files are not added to git LFS | |
+------------------------+-------------------------------------+-----------+
"""
import click
from renku.core import errors
from renku.core.commands.config import read_config, update_config
@click.group()
def config():
"""Interact with renku configuration."""
pass
@config.command()
@click.argument("key", required=False, default=None)
@click.option("--local", "local_only", is_flag=True, help="Read from local configuration only.")
@click.option("--global", "global_only", is_flag=True, help="Read from global configuration only.")
def show(key, local_only, global_only):
"""Show current configuration.
KEY is of the form <group>.<entry>, e.g. 'interactive.default_url'.
"""
if local_only and global_only:
raise errors.UsageError("Cannot use --local and --global together.")
value = read_config(key, local_only, global_only)
click.secho(value)
@config.command("set")
@click.argument("key")
@click.argument("value")
@click.option("--global", "global_only", is_flag=True, help="Store to global configuration only.")
def set_(key, value, global_only):
"""Set a configuration value.
KEY is of the form <group>.<entry>, e.g. 'interactive.default_url'.
"""
update_config(key, value=value, global_only=global_only)
click.secho("OK", fg="green")
@config.command()
@click.argument("key")
@click.option("--global", "global_only", is_flag=True, help="Remove from global configuration only.")
def remove(key, global_only):
"""Remove a configuration value.
KEY is of the form <group>.<entry>, e.g. 'interactive.default_url'.
"""
update_config(key, remove=True, global_only=global_only)
click.secho("OK", fg="green")
| [
"click.argument",
"renku.core.errors.UsageError",
"click.secho",
"click.group",
"click.option",
"renku.core.commands.config.update_config",
"renku.core.commands.config.read_config"
] | [((3652, 3665), 'click.group', 'click.group', ([], {}), '()\n', (3663, 3665), False, 'import click\n'), ((3755, 3806), 'click.argument', 'click.argument', (['"""key"""'], {'required': '(False)', 'default': 'None'}), "('key', required=False, default=None)\n", (3769, 3806), False, 'import click\n'), ((3808, 3908), 'click.option', 'click.option', (['"""--local"""', '"""local_only"""'], {'is_flag': '(True)', 'help': '"""Read from local configuration only."""'}), "('--local', 'local_only', is_flag=True, help=\n 'Read from local configuration only.')\n", (3820, 3908), False, 'import click\n'), ((3905, 4008), 'click.option', 'click.option', (['"""--global"""', '"""global_only"""'], {'is_flag': '(True)', 'help': '"""Read from global configuration only."""'}), "('--global', 'global_only', is_flag=True, help=\n 'Read from global configuration only.')\n", (3917, 4008), False, 'import click\n'), ((4376, 4397), 'click.argument', 'click.argument', (['"""key"""'], {}), "('key')\n", (4390, 4397), False, 'import click\n'), ((4399, 4422), 'click.argument', 'click.argument', (['"""value"""'], {}), "('value')\n", (4413, 4422), False, 'import click\n'), ((4424, 4526), 'click.option', 'click.option', (['"""--global"""', '"""global_only"""'], {'is_flag': '(True)', 'help': '"""Store to global configuration only."""'}), "('--global', 'global_only', is_flag=True, help=\n 'Store to global configuration only.')\n", (4436, 4526), False, 'import click\n'), ((4788, 4809), 'click.argument', 'click.argument', (['"""key"""'], {}), "('key')\n", (4802, 4809), False, 'import click\n'), ((4811, 4916), 'click.option', 'click.option', (['"""--global"""', '"""global_only"""'], {'is_flag': '(True)', 'help': '"""Remove from global configuration only."""'}), "('--global', 'global_only', is_flag=True, help=\n 'Remove from global configuration only.')\n", (4823, 4916), False, 'import click\n'), ((4285, 4326), 'renku.core.commands.config.read_config', 'read_config', (['key', 'local_only', 'global_only'], {}), '(key, local_only, global_only)\n', (4296, 4326), False, 'from renku.core.commands.config import read_config, update_config\n'), ((4331, 4349), 'click.secho', 'click.secho', (['value'], {}), '(value)\n', (4342, 4349), False, 'import click\n'), ((4676, 4732), 'renku.core.commands.config.update_config', 'update_config', (['key'], {'value': 'value', 'global_only': 'global_only'}), '(key, value=value, global_only=global_only)\n', (4689, 4732), False, 'from renku.core.commands.config import read_config, update_config\n'), ((4737, 4766), 'click.secho', 'click.secho', (['"""OK"""'], {'fg': '"""green"""'}), "('OK', fg='green')\n", (4748, 4766), False, 'import click\n'), ((5064, 5120), 'renku.core.commands.config.update_config', 'update_config', (['key'], {'remove': '(True)', 'global_only': 'global_only'}), '(key, remove=True, global_only=global_only)\n', (5077, 5120), False, 'from renku.core.commands.config import read_config, update_config\n'), ((5125, 5154), 'click.secho', 'click.secho', (['"""OK"""'], {'fg': '"""green"""'}), "('OK', fg='green')\n", (5136, 5154), False, 'import click\n'), ((4209, 4271), 'renku.core.errors.UsageError', 'errors.UsageError', (['"""Cannot use --local and --global together."""'], {}), "('Cannot use --local and --global together.')\n", (4226, 4271), False, 'from renku.core import errors\n')] |
# -*- coding: utf-8 -*-
__author__ = 'mateusz'
__date__ = '13.11.14 / 08:59'
__git__ = 'https://github.com/mateuszdargacz'
from django.db import models
from django.utils.translation import gettext_lazy as _
class HouseType(models.Model):
name = models.CharField(_('Nazwa'), max_length=56)
description = models.TextField(_('Opis'))
short_desc = models.TextField(_('Krótki opis'))
space = models.DecimalField(_('Metraż'), max_digits=6, decimal_places=2)
price = models.DecimalField(_('Cena'), max_digits=10, decimal_places=2)
@property
def get_price(self):
return self.price
def __unicode__(self):
return self.name
class HouseDraft(models.Model):
name = models.CharField(_('Nazwa'), max_length=128)
desc = models.TextField(_('Opis'))
house_type = models.ForeignKey('HouseType', verbose_name=_('Dom'))
def __unicode__(self):
return self.name
class CImage(models.Model):
caption = models.CharField(_('podpis zdjecia'), max_length=56)
description = models.TextField(_('podpis zdjecia'))
image = models.ImageField(_('Plik zdjęcia'), upload_to='pics/')
house_type = models.ForeignKey('HouseType', verbose_name=_('Dom'), related_name='images')
corder = models.IntegerField(_('kolejnosc'), blank=True, null=True)
def __unicode__(self):
return self.caption
ETAP_CHOICES = (
('etap1', u'Prace ziemne i fundamenty.'),
('etap2', u'Wykonanie gabarytów domu'),
('etap3', u'Instalacje'),
('etap4', u'Wykończenie wnętrza'),
('etap5', u'Wykończenie zewnątrz'),
('etap6', u'Osiedle i okolice'),
)
class GalleryImage(models.Model):
caption = models.CharField(_('podpis zdjecia'), max_length=56)
description = models.TextField(_('podpis zdjecia'))
image = models.ImageField(_('Plik zdjęcia'), upload_to='pics/')
order = models.IntegerField(_('kolejnosc'), blank=True, null=True)
etap = models.CharField(max_length=20, blank=True, null=True, choices=ETAP_CHOICES)
def __unicode__(self):
return self.caption
class Message(models.Model):
email = models.CharField(_('Email'), max_length=128)
message = models.TextField(_('Wiadomość'))
d_created = models.DateTimeField(_('Odebrano'), auto_now=True)
def __unicode__(self):
return self.email | [
"django.utils.translation.gettext_lazy",
"django.db.models.CharField"
] | [((1928, 2004), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'blank': '(True)', 'null': '(True)', 'choices': 'ETAP_CHOICES'}), '(max_length=20, blank=True, null=True, choices=ETAP_CHOICES)\n', (1944, 2004), False, 'from django.db import models\n'), ((269, 279), 'django.utils.translation.gettext_lazy', '_', (['"""Nazwa"""'], {}), "('Nazwa')\n", (270, 279), True, 'from django.utils.translation import gettext_lazy as _\n'), ((331, 340), 'django.utils.translation.gettext_lazy', '_', (['"""Opis"""'], {}), "('Opis')\n", (332, 340), True, 'from django.utils.translation import gettext_lazy as _\n'), ((376, 392), 'django.utils.translation.gettext_lazy', '_', (['"""Krótki opis"""'], {}), "('Krótki opis')\n", (377, 392), True, 'from django.utils.translation import gettext_lazy as _\n'), ((426, 437), 'django.utils.translation.gettext_lazy', '_', (['"""Metraż"""'], {}), "('Metraż')\n", (427, 437), True, 'from django.utils.translation import gettext_lazy as _\n'), ((503, 512), 'django.utils.translation.gettext_lazy', '_', (['"""Cena"""'], {}), "('Cena')\n", (504, 512), True, 'from django.utils.translation import gettext_lazy as _\n'), ((728, 738), 'django.utils.translation.gettext_lazy', '_', (['"""Nazwa"""'], {}), "('Nazwa')\n", (729, 738), True, 'from django.utils.translation import gettext_lazy as _\n'), ((784, 793), 'django.utils.translation.gettext_lazy', '_', (['"""Opis"""'], {}), "('Opis')\n", (785, 793), True, 'from django.utils.translation import gettext_lazy as _\n'), ((980, 999), 'django.utils.translation.gettext_lazy', '_', (['"""podpis zdjecia"""'], {}), "('podpis zdjecia')\n", (981, 999), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1051, 1070), 'django.utils.translation.gettext_lazy', '_', (['"""podpis zdjecia"""'], {}), "('podpis zdjecia')\n", (1052, 1070), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1102, 1119), 'django.utils.translation.gettext_lazy', '_', (['"""Plik zdjęcia"""'], {}), "('Plik zdjęcia')\n", (1103, 1119), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1267, 1281), 'django.utils.translation.gettext_lazy', '_', (['"""kolejnosc"""'], {}), "('kolejnosc')\n", (1268, 1281), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1686, 1705), 'django.utils.translation.gettext_lazy', '_', (['"""podpis zdjecia"""'], {}), "('podpis zdjecia')\n", (1687, 1705), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1757, 1776), 'django.utils.translation.gettext_lazy', '_', (['"""podpis zdjecia"""'], {}), "('podpis zdjecia')\n", (1758, 1776), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1808, 1825), 'django.utils.translation.gettext_lazy', '_', (['"""Plik zdjęcia"""'], {}), "('Plik zdjęcia')\n", (1809, 1825), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1878, 1892), 'django.utils.translation.gettext_lazy', '_', (['"""kolejnosc"""'], {}), "('kolejnosc')\n", (1879, 1892), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2121, 2131), 'django.utils.translation.gettext_lazy', '_', (['"""Email"""'], {}), "('Email')\n", (2122, 2131), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2180, 2194), 'django.utils.translation.gettext_lazy', '_', (['"""Wiadomość"""'], {}), "('Wiadomość')\n", (2181, 2194), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2233, 2246), 'django.utils.translation.gettext_lazy', '_', (['"""Odebrano"""'], {}), "('Odebrano')\n", (2234, 2246), True, 'from django.utils.translation import gettext_lazy as _\n'), ((856, 864), 'django.utils.translation.gettext_lazy', '_', (['"""Dom"""'], {}), "('Dom')\n", (857, 864), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1201, 1209), 'django.utils.translation.gettext_lazy', '_', (['"""Dom"""'], {}), "('Dom')\n", (1202, 1209), True, 'from django.utils.translation import gettext_lazy as _\n')] |
#!/bin/usr/python
#
# This class imports search results from
# DUDEN website and stores in a pickle for
# later use.
#
#############################################
import pprint, pickle
from bs4 import BeautifulSoup
import requests
import re
class Dictionary:
dict_pickle = "german_dict.pickle"
#meaning = list()
def __init__(self, word):
self.word = word
self.getHTMLaddress()
def getHTMLaddress(self):
self.url_word = 'http://www.duden.de/rechtschreibung/' + self.word
def showAttributes(self):
print('The word is ' + self.word + '.')
print('The website is ' + self.url_word + '.')
def getMeaningDict(self):
self.meaning = self.showDictionary()
def getMeaningDuden(self):
meaning = list()
r = requests.get(self.url_word)
soup = BeautifulSoup(r.text, 'html.parser')
soup_2 = BeautifulSoup(r.text, 'html.parser')
pattern_a = re.compile('>(.*)<')
pattern = re.compile('(.*)<section')
regex = re.compile('span class="lexem"')
i=1
for word_definition in soup.find_all(id='block-duden-tiles-1'):
for definitions in word_definition.find_all(lambda tag: tag.name == 'ol' and ( tag.get('class') == ['entry'] or tag.get('class') == ['lexem'] ) ):
for a_link in definitions.find_all(lambda tag: tag.name == 'a'):
meaning.append("{0} {2}: {1}.".format('Meaning', self.returnStr(a_link),i))
i+=1
for definitions in word_definition.find_all(lambda tag: tag.name == 'span' and tag.get('class') == ['lexem']):
meaning = pattern.search(self.returnStr(definitions))
meaning.append("{0} {2}: {1}.".format('Meaning', meaning.group(1),i))
print("{0} {2}: {1}.".format('Meaning', meaning.group(1),i))
i+=1
self.meaning = meaning
def getExample(self):
pass
def printMeaning(self):
for item in self.meaning:
print(item)
def getSynonym(self):
pass
def getPronunciation(self):
pass
def printWord(self):
pass
def debugMe( var ):
print('\n')
print(var)
print(type(var))
print('\n')
def saveWord( self ):
pickle_out = open( self.dict_pickle ,"wb")
pickle.dump( self.meaning, pickle_out )
pickle_out.close()
def showDictionary( self ):
pickle_in = open(self.dict_pickle,"rb")
de_dict = pickle.load(pickle_in)
print(type(de_dict))
return de_dict
def returnStr( self, bs4object ):
#for item in bs4object.contents:
# expression = "{0}: {1}.".format('Meaning is ', item.encode('utf-8'))
# print(expression)
return "".join([item.encode('utf-8') for item in bs4object.contents])
#german_dict = { word:meaning }
#pickleMe( german_dict )
#my_stored_dictionary = depickleMe("german_dict.pickle")
#print(my_stored_dictionary)
# print(line)
# for definition in line.find_all(lambda tag: tag.name == 'span' and tag.get('class') == ['lexem'],limit=1):
# print(definition)
# contents = "".join([str(item) for item in definition.contents])
# result = pattern.search(contents)
# print(result.group(1))
# #result = pattern.search(definition.text)
# #print(result.group(0))
# #for definition in line.find_all(lambda tag: tag.name == 'span' and tag.get('class') == ['lexem'],limit=1):
# # print(definition.text + '\n\n\n')
#for child in soup_2.section.descendants:
# print(child)
#for link in soup.find_all(lambda tag: tag.name='div' and tag.get('class') == ['entry']):
# print(link)
#for link in soup.find_all(lambda hat: hat.name='div' and hat.get('class') == ['entry']).find_all(lambda tag: tag.name == 'span' and tag.get('class') == ['lexem']):
# print(link.text)i
##########################################################################################################################
#
# Essential links used in this code:
#
#
#
| [
"pickle.dump",
"re.compile",
"pickle.load",
"requests.get",
"bs4.BeautifulSoup"
] | [((730, 757), 'requests.get', 'requests.get', (['self.url_word'], {}), '(self.url_word)\n', (742, 757), False, 'import requests\n'), ((768, 804), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (781, 804), False, 'from bs4 import BeautifulSoup\n'), ((816, 852), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (829, 852), False, 'from bs4 import BeautifulSoup\n'), ((868, 888), 're.compile', 're.compile', (['""">(.*)<"""'], {}), "('>(.*)<')\n", (878, 888), False, 'import re\n'), ((901, 927), 're.compile', 're.compile', (['"""(.*)<section"""'], {}), "('(.*)<section')\n", (911, 927), False, 'import re\n'), ((938, 970), 're.compile', 're.compile', (['"""span class="lexem\\""""'], {}), '(\'span class="lexem"\')\n', (948, 970), False, 'import re\n'), ((2058, 2095), 'pickle.dump', 'pickle.dump', (['self.meaning', 'pickle_out'], {}), '(self.meaning, pickle_out)\n', (2069, 2095), False, 'import pprint, pickle\n'), ((2203, 2225), 'pickle.load', 'pickle.load', (['pickle_in'], {}), '(pickle_in)\n', (2214, 2225), False, 'import pprint, pickle\n')] |
import sys
import os
import numpy as np
import torchvision
from torchvision.models.detection.faster_rcnn import FastRCNNPredictor
from torchvision.models.detection import FasterRCNN
from torchvision.models.detection.rpn import AnchorGenerator
import wandb
import json
from cs329s_waymo_object_detection.utils.gcp_utils import download_blob, upload_blob
import os
def collate_fn(batch):
return tuple(zip(*batch))
def get_fast_rcnn(num_classes):
model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)
in_features = model.roi_heads.box_predictor.cls_score.in_features
model.roi_heads.box_predictor = FastRCNNPredictor(in_features, num_classes)
return model
def get_custom_backbone_fast_rcnn(num_classes):
backbone = torchvision.models.mobilenet_v2(pretrained=True).features
backbone.out_channels = 1280
anchor_generator = AnchorGenerator(sizes=((32, 64, 128, 256, 512),),
aspect_ratios=((0.5, 1.0, 2.0),))
roi_pooler = torchvision.ops.MultiScaleRoIAlign(featmap_names=['0'],
output_size=7,
sampling_ratio=4)
model = FasterRCNN(backbone,
num_classes=num_classes,
rpn_anchor_generator=anchor_generator,
box_roi_pool=roi_pooler)
return model
def track_metrics(loss, classifier_loss, box_reg_loss, objectness_loss, rpn_loss, epoch):
print('\n')
print('################################################')
print('Epoch_{}'.format(epoch))
print('################################################')
print('loss: {}'.format(loss))
print('classifier_loss: {}'.format(classifier_loss))
print('box_reg_loss: {}'.format(box_reg_loss))
print('objectness_loss: {}'.format(objectness_loss))
print('rpn_loss: {}'.format(rpn_loss))
print('\n')
wandb.log({'loss':loss})
wandb.log({'classifier_loss':classifier_loss})
wandb.log({'box_reg_loss':box_reg_loss})
wandb.log({'objectness_loss':objectness_loss})
wandb.log({'rpn_loss':rpn_loss})
def classify_record(pred_label, gt_label, iou, iou_thresh):
if pred_label==gt_label:
if iou >= iou_thresh:
return 'TP'
else:
return 'FP'
else:
if iou >= iou_thresh:
return 'FN'
else:
return 'TN'
def calc_precision_recall(eval_df, label, iou_thresh):
try:
tmp_df = eval_df[eval_df['gt_label']==label]
tmp_df = tmp_df.sort_values(by='confidence_score', ascending=False).reset_index(drop=True)
total_positives = tmp_df.shape[0]
tmp_df['classification'] = tmp_df.apply(lambda x: classify_record(x['pred_label'], x['gt_label'], x['iou'], 0.5), axis=1)
precision = []
recall = []
counts = {'TP':0,'FP':0,'TN':0,'FN':0}
for classification in list(tmp_df['classification']):
counts[classification] +=1
precision.append(counts['TP']/(counts['TP']+counts['FP']))
recall.append(counts['TP']/total_positives)
return precision, recall
except:
return None, None
def bb_intersection_over_union(boxA, boxB):
# determine the (x, y)-coordinates of the intersection rectangle
xA = max(boxA[0], boxB[0])
yA = max(boxA[1], boxB[1])
xB = min(boxA[2], boxB[2])
yB = min(boxA[3], boxB[3])
# compute the area of intersection rectangle
interArea = abs(max((xB - xA, 0)) * max((yB - yA), 0))
if interArea == 0:
return 0
# compute the area of both the prediction and ground-truth
# rectangles
boxAArea = abs((boxA[2] - boxA[0]) * (boxA[3] - boxA[1]))
boxBArea = abs((boxB[2] - boxB[0]) * (boxB[3] - boxB[1]))
# compute the intersection over union by taking the intersection
# area and dividing it by the sum of prediction + ground-truth
# areas - the interesection area
iou = interArea / float(boxAArea + boxBArea - interArea)
# return the intersection over union value
return iou
def concatenateJSON(paths, mount_dir, write_path, gcp_bucket="waymo-processed"):
"""
:param paths: list of annotation file paths to concatenate
:return: gcp path containing JSON concatenatation of input annotation files
*** assuming bucket is waymo-processed at this point ***
*** sample use of function
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = '/Users/tiffanyshi/Desktop/waymo-2d-object-detection-9ea7bd3b9e0b.json'
root_dir = '/Users/tiffanyshi/PycharmProjects/329swaymoproject/herbie-vision/'
tests = ['train/annotations/2019-05-22/11940460932056521663_1760_000_1780_000.json',
'test/annotations/2019-06-01/2942662230423855469_880_000_900_000.json']
concatenateJSON(tests, root_dir, "test", "tester.json")
"""
return_file = "tmpFile.json"
return_dict = {}
for gcp_annotations_path in paths:
f = open(mount_dir + gcp_annotations_path, 'r')
data = json.load(f)
if len(return_dict) == 0:
for key in data.keys():
if isinstance(data[key], list):
return_dict[key] = data[key].copy()
else:
return_dict[key] = list([data[key]])
f.close()
continue
for key in data.keys():
if isinstance(data[key], list):
return_dict[key].extend(data[key])
else:
return_dict[key].extend(list([data[key]]))
f.close()
with open(return_file, "w") as f:
json.dump(return_dict, f)
upload_blob(gcp_bucket, return_file, write_path)
os.remove(return_file) | [
"wandb.log",
"torchvision.models.detection.rpn.AnchorGenerator",
"torchvision.models.detection.faster_rcnn.FastRCNNPredictor",
"torchvision.models.detection.fasterrcnn_resnet50_fpn",
"cs329s_waymo_object_detection.utils.gcp_utils.upload_blob",
"torchvision.models.mobilenet_v2",
"json.load",
"torchvisi... | [((466, 535), 'torchvision.models.detection.fasterrcnn_resnet50_fpn', 'torchvision.models.detection.fasterrcnn_resnet50_fpn', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (518, 535), False, 'import torchvision\n'), ((642, 685), 'torchvision.models.detection.faster_rcnn.FastRCNNPredictor', 'FastRCNNPredictor', (['in_features', 'num_classes'], {}), '(in_features, num_classes)\n', (659, 685), False, 'from torchvision.models.detection.faster_rcnn import FastRCNNPredictor\n'), ((883, 970), 'torchvision.models.detection.rpn.AnchorGenerator', 'AnchorGenerator', ([], {'sizes': '((32, 64, 128, 256, 512),)', 'aspect_ratios': '((0.5, 1.0, 2.0),)'}), '(sizes=((32, 64, 128, 256, 512),), aspect_ratios=((0.5, 1.0,\n 2.0),))\n', (898, 970), False, 'from torchvision.models.detection.rpn import AnchorGenerator\n'), ((1023, 1115), 'torchvision.ops.MultiScaleRoIAlign', 'torchvision.ops.MultiScaleRoIAlign', ([], {'featmap_names': "['0']", 'output_size': '(7)', 'sampling_ratio': '(4)'}), "(featmap_names=['0'], output_size=7,\n sampling_ratio=4)\n", (1057, 1115), False, 'import torchvision\n'), ((1228, 1342), 'torchvision.models.detection.FasterRCNN', 'FasterRCNN', (['backbone'], {'num_classes': 'num_classes', 'rpn_anchor_generator': 'anchor_generator', 'box_roi_pool': 'roi_pooler'}), '(backbone, num_classes=num_classes, rpn_anchor_generator=\n anchor_generator, box_roi_pool=roi_pooler)\n', (1238, 1342), False, 'from torchvision.models.detection import FasterRCNN\n'), ((2048, 2073), 'wandb.log', 'wandb.log', (["{'loss': loss}"], {}), "({'loss': loss})\n", (2057, 2073), False, 'import wandb\n'), ((2085, 2132), 'wandb.log', 'wandb.log', (["{'classifier_loss': classifier_loss}"], {}), "({'classifier_loss': classifier_loss})\n", (2094, 2132), False, 'import wandb\n'), ((2144, 2185), 'wandb.log', 'wandb.log', (["{'box_reg_loss': box_reg_loss}"], {}), "({'box_reg_loss': box_reg_loss})\n", (2153, 2185), False, 'import wandb\n'), ((2197, 2244), 'wandb.log', 'wandb.log', (["{'objectness_loss': objectness_loss}"], {}), "({'objectness_loss': objectness_loss})\n", (2206, 2244), False, 'import wandb\n'), ((2256, 2289), 'wandb.log', 'wandb.log', (["{'rpn_loss': rpn_loss}"], {}), "({'rpn_loss': rpn_loss})\n", (2265, 2289), False, 'import wandb\n'), ((5708, 5756), 'cs329s_waymo_object_detection.utils.gcp_utils.upload_blob', 'upload_blob', (['gcp_bucket', 'return_file', 'write_path'], {}), '(gcp_bucket, return_file, write_path)\n', (5719, 5756), False, 'from cs329s_waymo_object_detection.utils.gcp_utils import download_blob, upload_blob\n'), ((5762, 5784), 'os.remove', 'os.remove', (['return_file'], {}), '(return_file)\n', (5771, 5784), False, 'import os\n'), ((769, 817), 'torchvision.models.mobilenet_v2', 'torchvision.models.mobilenet_v2', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (800, 817), False, 'import torchvision\n'), ((5098, 5110), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5107, 5110), False, 'import json\n'), ((5677, 5702), 'json.dump', 'json.dump', (['return_dict', 'f'], {}), '(return_dict, f)\n', (5686, 5702), False, 'import json\n')] |
import re
import subprocess
import getpass
# for testing purposes only.
EDITOR="gedit"
class Project:
"""
Used to model Project objects.
Attributes
-----------
project_path : Path
Used to indicate the path of the project folder.
number : int
Project number.
py_paths : list<Path>
List of Paths of all the .py files for a given project.
all_file_paths : list<Path>
List of paths of all the files for a given project.
is-graded : bool
Indicates whether project is graded or not.
scoresheet_path : Path
Path to scoresheet file.
"""
def __init__(self, path):
"""
Default construtor for Project object.
"""
self.project_path = path
self.number = int(path.name)
self.py_paths = self.get_py_paths()
self.all_file_paths = self.get_all_file_paths()
self.is_graded = self.check_graded()
self.scoresheet_path = self.get_scoresheet()
def get_py_paths(self):
"""
Populates py_paths with paths to all py files.
Returns
-------
list<Path>
List of paths of all the .py files in the project directory
"""
py_paths = []
pattern = r"^[^\.]\w+\.py"
for path in self.project_path.iterdir():
match = re.search(pattern, path.name)
if match:
# print(path.name)
py_paths.append(path)
return py_paths
def get_all_file_paths(self):
"""
Populates all_file_paths member with paths to all the files.
Returns
------
list<Path>
List of paths of all files in the project directory except .graded.
"""
files = []
for path in self.project_path.iterdir():
if path.is_file() and path.name != ".graded":
# print("Adding {} to project files.".format(path.name))
files.append(path)
return files
def check_graded(self):
"""
Checks if a project is graded.
Returns
-------
bool
True if .graded file exists in the project directory.
"""
return (self.project_path / ".graded").exists()
def open_scoresheet(self):
"""
Opens the scoresheet using EDITOR in a subprocess.
"""
subprocess.Popen([EDITOR, str(self.scoresheet_path)], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT)
def open_files(self):
"""
Opens all the files in the list self.all_file_paths using EDITOR in a subprocess.
"""
# TODO: Implement checking for when project does not exist
input("Press enter to open the files in {}.".format(EDITOR))
for path in self.all_file_paths:
# print("Project graded = ", project.is_graded)
print("Opening: ", str(path))
subprocess.Popen([EDITOR, str(path)], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT)
def get_scoresheet(self):
"""
Grabs the scoresheet file if it exists. Behaviour is undefined atm if .graded does not exist
Returns
-------
Path
Path object pointing to score file.
"""
score_file_paths = list(self.project_path.glob("./*.score"))
if len(score_file_paths) < 1 or len(score_file_paths) > 1:
# print("ERROR: Student does not have a score file.")
# TODO: Handle missing score file
# raise Exception
return
score_file_path = score_file_paths[0]
return score_file_path
def get_project_total_score(self):
"""
Uses regex to pick up all __#__ patterns and sums up all the scores (except the first).
The first number in the list is the total score (which may or may not be 0).
Will match any number of surrounding underscores (must be at least 1 underscore).
Returns
-------
int
Total number of points summed by checking all __#__ patterns.
list<int>
List of all the scores found in the file.
"""
with open(str(self.scoresheet_path), "r") as file_object_read:
lines = file_object_read.read()
pattern_list = (re.findall(r'_+\d+_+', lines))
# print (pattern_list)
points_list = list()
for elements in pattern_list:
points = elements.split('_')
# print (points)
point_position = int(len(points) / 2)
points_list.append(int(points[point_position]))
# print (points_list)
# Subtract the current project score from the total incase this is a regrading
total = sum(points_list) - points_list[0]
# print (total)
return total, points_list
def write_project_score(self, total, points_list):
"""
Writes the score to the scoresheet file.
Parameters
----------
total : int
The total points rewarded to the student
points_list : list<int>
List of all the scores found from the scoresheet. FIrst item should be the current total score.
"""
with open(str(self.scoresheet_path), 'r+') as file_object_write:
lines = file_object_write.readlines()
for i, line in enumerate(lines):
if '__' in line and 'Score:' in line:
# print(line)
line = line.replace("__{:02d}__".format(points_list[0]), "__{}__".format(total))
# print(line)
lines[i] = line
file_object_write.seek(0)
file_object_write.writelines(lines)
# print("TOTAL = ", total)
def check_scoresheet(self):
"""
Grabs total points and list of all points.
Passes that data to write_project_score in order to write the score.
Returns
-------
int
Total number of points awarded to student.
"""
score_total, points_list = self.get_project_total_score()
fix_choice = "wut"
if score_total != points_list[0] and (score_total != 0):
while fix_choice not in "yes no":
fix_choice = input("\nThe score and sum do not match.\nGiven Score: {}\nComputed Score: {}\nWould you like me to fix that? (Yes/No): ".format(points_list[0], score_total)).lower().strip()
if fix_choice == "yes":
self.write_project_score(score_total, points_list)
else:
return points_list[0] # The point total at the top of the file
else:
self.write_project_score(score_total, points_list)
# print("Calculated score: {:02d}".format(score_total))
return points_list[0]
def mark_as_graded(self):
"""
Mark the project as graded by writing a .graded file
"""
ta_username = getpass.getuser()
print("Graded by", ta_username)
with open("{}/.graded".format(str(self.project_path.resolve())), "w") as scoresheet:
scoresheet.write(ta_username)
self.is_graded = True
| [
"getpass.getuser",
"re.findall",
"re.search"
] | [((7043, 7060), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (7058, 7060), False, 'import getpass\n'), ((1357, 1386), 're.search', 're.search', (['pattern', 'path.name'], {}), '(pattern, path.name)\n', (1366, 1386), False, 'import re\n'), ((4332, 4361), 're.findall', 're.findall', (['"""_+\\\\d+_+"""', 'lines'], {}), "('_+\\\\d+_+', lines)\n", (4342, 4361), False, 'import re\n')] |
from reaction import *
import rospy
class ReactionPublishRosOutNode(Reaction):
"""A reaction that is able to publish a message on rosout."""
def __init__(self, autonomy_level, message, loglevel):
super(ReactionPublishRosOutNode, self).__init__(None, autonomy_level)
#: The message to publish.
#: :type: string
self._message = message
if loglevel == "info":
log = rospy.loginfo
elif loglevel == "debug":
log = rospy.logdebug
elif loglevel == "err":
log = rospy.logerr
elif loglevel == "warn":
log = rospy.logwarn
elif loglevel == "fatal":
log = rospy.logfatal
else:
# loglevel does not make sense
rospy.logwarn(
"A reaction wants to log on loglevel"
+ " '%s', but that loglevel does not exist." % loglevel
+ " Setting loglevel to info.")
log = rospy.info
#: The logging function to use.
self.__log = log
def execute_reaction(self):
"""Log the reaction message at a specific loglevel."""
self.__log(self._message)
| [
"rospy.logwarn"
] | [((774, 922), 'rospy.logwarn', 'rospy.logwarn', (['(\'A reaction wants to log on loglevel\' + \n " \'%s\', but that loglevel does not exist." % loglevel +\n \' Setting loglevel to info.\')'], {}), '(\'A reaction wants to log on loglevel\' + \n " \'%s\', but that loglevel does not exist." % loglevel +\n \' Setting loglevel to info.\')\n', (787, 922), False, 'import rospy\n')] |
#!/usr/bin/env python3
"""
Lints the chart's yaml files without any cluster interaction. For this script to
function, you must install yamllint and kubeval.
- https://github.com/adrienverge/yamllint
- https://github.com/garethr/kubeval
"""
import argparse
import glob
import subprocess
def lint(config, values, kubernetes_version):
"""Calls `helm lint`, `helm template` and `yamllint`."""
output = 'lint-output'
print("### helm lint")
subprocess.check_call([
'helm', 'lint', '../jupyterhub',
'--values', values,
])
print("### helm template")
subprocess.check_call([
'helm', 'template', '../jupyterhub',
'--values', values,
'--output-dir', output
])
print("### yamllint")
subprocess.check_call([
'yamllint', '-c', config, output
])
print("### kubeval")
for filename in glob.iglob(output + '/**/*.yaml', recursive=True):
subprocess.check_call([
'kubeval', filename,
'--kubernetes-version', kubernetes_version,
'--strict'
])
print()
print("### All good!")
if __name__ == '__main__':
argparser = argparse.ArgumentParser()
argparser.add_argument('--config', default='lint-config.yaml', help='Specify the yamllint config')
argparser.add_argument('--values', default='lint-chart-values.yaml', help='Specify additional chart value files')
argparser.add_argument('--output', default='lint-output', help='Specify an output directory')
argparser.add_argument('--kubernetes-version', default='1.8.0', help='Validate against this kubernetes version')
args = argparser.parse_args()
lint(args.config, args.values, args.kubernetes_version)
| [
"argparse.ArgumentParser",
"glob.iglob",
"subprocess.check_call"
] | [((457, 533), 'subprocess.check_call', 'subprocess.check_call', (["['helm', 'lint', '../jupyterhub', '--values', values]"], {}), "(['helm', 'lint', '../jupyterhub', '--values', values])\n", (478, 533), False, 'import subprocess\n'), ((593, 701), 'subprocess.check_call', 'subprocess.check_call', (["['helm', 'template', '../jupyterhub', '--values', values, '--output-dir',\n output]"], {}), "(['helm', 'template', '../jupyterhub', '--values',\n values, '--output-dir', output])\n", (614, 701), False, 'import subprocess\n'), ((764, 821), 'subprocess.check_call', 'subprocess.check_call', (["['yamllint', '-c', config, output]"], {}), "(['yamllint', '-c', config, output])\n", (785, 821), False, 'import subprocess\n'), ((882, 931), 'glob.iglob', 'glob.iglob', (["(output + '/**/*.yaml')"], {'recursive': '(True)'}), "(output + '/**/*.yaml', recursive=True)\n", (892, 931), False, 'import glob\n'), ((1173, 1198), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1196, 1198), False, 'import argparse\n'), ((941, 1045), 'subprocess.check_call', 'subprocess.check_call', (["['kubeval', filename, '--kubernetes-version', kubernetes_version, '--strict']"], {}), "(['kubeval', filename, '--kubernetes-version',\n kubernetes_version, '--strict'])\n", (962, 1045), False, 'import subprocess\n')] |
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from io import BytesIO
import PyPDF2
def new_pdf(details, name, width=216, height=280):
"""Creates a new empty PDF file"""
pdfobj = PyPDF2.PdfFileWriter()
pdfobj.addMetadata(details)
pdfobj.addBlankPage(width, height)
pdfio = BytesIO()
pdfobj.write(pdfio)
pdfio.seek(0)
return {"data": pdfio,
"name": name}
def new_email(pdfs):
msg = MIMEMultipart()
for pdf in pdfs.values():
stream = pdf["data"]
stream.seek(0)
data = stream.read()
pdfp = MIMEApplication(data, "pdf", name=pdf["name"])
pdfp.add_header("Content-Disposition", "attachment",
filename=pdf["name"])
msg.attach(pdfp)
return msg
class PDFWithAttachments():
def __init__(self, details, name, width=216, height=280):
self._images = []
self.pdfobj = PyPDF2.PdfFileWriter()
self.pdfobj.addMetadata(details)
self.page = self.pdfobj.addBlankPage(width, height)
def addAttachment(self, name, data):
self.pdfobj.addAttachment(name, data)
def as_file(self):
pdfio = BytesIO()
self.pdfobj.write(pdfio)
pdfio.seek(0)
return pdfio
| [
"PyPDF2.PdfFileWriter",
"email.mime.application.MIMEApplication",
"email.mime.multipart.MIMEMultipart",
"io.BytesIO"
] | [((241, 263), 'PyPDF2.PdfFileWriter', 'PyPDF2.PdfFileWriter', ([], {}), '()\n', (261, 263), False, 'import PyPDF2\n'), ((347, 356), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (354, 356), False, 'from io import BytesIO\n'), ((485, 500), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', ([], {}), '()\n', (498, 500), False, 'from email.mime.multipart import MIMEMultipart\n'), ((627, 673), 'email.mime.application.MIMEApplication', 'MIMEApplication', (['data', '"""pdf"""'], {'name': "pdf['name']"}), "(data, 'pdf', name=pdf['name'])\n", (642, 673), False, 'from email.mime.application import MIMEApplication\n'), ((961, 983), 'PyPDF2.PdfFileWriter', 'PyPDF2.PdfFileWriter', ([], {}), '()\n', (981, 983), False, 'import PyPDF2\n'), ((1213, 1222), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (1220, 1222), False, 'from io import BytesIO\n')] |
import numpy as np
import sys
import string
M=4
N=5
# grille rempli par sequence
s_grille=np.full((M+N,M+N),0)
# grille remplit par 0 -1 1
grille=np.full((M,N), -1)
#grille[1][0]=0
sequence1=[1,1]
sequence2=[2,1]
# non-colore -1
# blanche 0
# noire 1
def lire_fichier(s_grille):
#file=sys.argv[1:]
try:
in_file = open(sys.argv[1], "r")
except:
sys.exit("ERROR. Can't read supplied filename.")
text = in_file.read()
lg=len(text)
i=0
nextline=0
line=0
colonne=0
bool=0
j=0
while(i<lg-1):
if(text[i]=='\n'):
nextline=1
if(bool==1):
bool=0
else:
line=line+1
i=i+1
colonne=0
continue
else:
if nextline==1:
if text[i]=="0x20":
if text[i+1]!="0x20" and text[i+1]!="\n":
s_grille[line][colonne]=0
colonne=colonne+1
nextline==1
else:
nextline==1
elif (text[i]>='1' and text[i]<='9'):
s_grille[line][colonne]=text[i]
colonne=colonne+1
nextline==0
elif text[i]=='#':
j=line-1
bool=1
nextline==0
else:
nextline==0
if nextline==0:
#print("hi")
if (text[i]>='1' and text[i]<='9'):
s_grille[line][colonne]=text[i]
i=i+1
#print(s_grille)
in_file.close()
return s_grille
def compare_block_ligne(grille, i, j, sl):
if ((j+1<N) and (grille[i][j+1]==1))or ((j-1>=0) and (grille[i][j-1]==1)):
return False
while(j>=0 and j<N and sl>0):
if grille[i][j]==0:
return False
j=j+1
sl=sl-1
return True
def compare_block_colonne(grille, i, j, sl):
if ((i+1<M) and (grille[i+1][j]==1))or ((i-1>=0) and (grille[i-1][j]==1)):
return False
while(i>=0 and i<M and sl>0):
if grille[i][j]==0:
return False
i=i+1
sl=sl-1
return True
def coloriage_possible_ligne(grille, sequence, i, j, l, cl):
# problem de syntaxe
# cas a: si l depasse le nb d'element de la sequence, inviolement de syntaxe
# cas b, i n'est pas compris entre 0 et N-1, inviolement de syntaxe
# cas c, j < 0 , inviolement de syntaxe
if (len(sequence)<l) or (i<0) or (i>N-1) or(j<0):
return False
# cas 1 : l=0:
# -si j=0, vrai
# -sinon faux
if (l==0):
if (j==0):
return True
print("1false")
return False
else:
val=sequence[l-1]
print("s", sequence[l-1])
# cas 2a : si j < sl -1
if (j<(sequence[l-1]-1)):
print("2false")
return False
# cas 2b : si j == sl-1
# -si l == 1, vrai
# -sinon faux
elif (j==(sequence[l-1]-1)):
cpt=j
bool=0
while(j>=0):
if grille[i][j]==0 or cl==0:
bool=1
break
j=j-1
print(l, bool)
if l==1 and bool==0:
print("ABC true")
return True
print("3false")
return False
else:
#cas 2c
return coloriage_possible_ligne_rec(grille, sequence, i, j, l, -1, cl )#, case_j ,nb_block)
def coloriage_possible_ligne_rec(grille, sequence, i, j, l, check ,cl):#, case_j ,nb_block):
if (l==0) and j>=-1 :
print("ABC True")
return True
if j<0:
print(i, j, l)
print(grille)
print("0false")
return False
# Pour la premiere iteration, on ne sait pas si c'est une case blanche ou noire
print(grille)
if check ==-1:
if cl==0:
compare=compare_block_ligne(grille, i, j-sequence[l-1], sequence[l-1])
else:
compare=compare_block_ligne(grille, i, j-sequence[l-1]+1, sequence[l-1])
print("i, j", i, j,"compare:", compare, "l", l)
if grille[i][j]==-1:
if not (compare):
print("4false")
return False
else:
if(j==0) and l==1 and sequence[0]==1:
return True
print("here i j", i ,j-(sequence[l-1])-(1-cl)-1)
if (j-(sequence[l-1])-(1-cl)-1<-1):
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1]), l-1, 0, cl)
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-(1-cl)-1, l-1, 0, cl)
elif grille[i][j]==1:
if(j==0) and l==1 and sequence[0]==1:
return True
if cl==0:
return False
if compare:
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-1, l-1 ,0, cl)
return False
elif grille[i][j]==0:
if(j==0) and l==1 and sequence[0]==1:
return False
if cl==1:
return False
if compare:
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-2, l-1 ,0, cl)
return False
else:
print("Syntaxe erreur valeur different que -1 0 1")
exit()
else:
compare_1=compare_block_ligne(grille, i, j-sequence[l-1], sequence[l-1])
compare_2=compare_block_ligne(grille, i, j-sequence[l-1]+1, sequence[l-1])
print("i, j", i, j,"compare1:", compare_1, "l",l)
print("i, j", i, j,"compare2:", compare_2, "l",l)
if grille[i][j]==-1:
if(j==0) and l==1 and sequence[0]==1:
return True
#print(i,j-sequence[l-1] ,sequence[l-1])
if grille[i][j-sequence[l-1]-1]==1 and compare_1:
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-2, l-1, 0, cl)
elif grille[i][j-sequence[l-1]]==1 and compare_2:
#if(j==0):
# return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1]), l-1 ,0, cl)
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-1, l-1 ,0, cl)
elif not (compare_1 or compare_2):
print("6false")
return False
else:
if grille[i][j-sequence[l-1]-1]==0:
l=len(sequence[l-1])
while(l>=0):
list[i][j-(sequence[l-1])+l]=1
l=l-1
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-1, l-1 ,0, cl)
else:
print("or")
if (j==0) and sequence[l-1]==1:
print("ABC True")
return True
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-1, l-1 ,0, cl) or coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-2, l-1, 0, cl)
elif grille[i][j]==1:
if(j==0) and l==1 and sequence[0]==1:
return True
if compare_2:
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-1, l-1 ,0, cl)
else:
print("7false")
return False
elif grille[i][j]==0:
if(j==0) and l==1 and sequence[0]==1:
return False
if compare_1:
return coloriage_possible_ligne_rec(grille, sequence, i ,j-(sequence[l-1])-2, l-1 ,0, cl)
else:
print("8false")
return False
else:
print("Syntaxe erreur valeur different que -1 0 1")
exit()
def coloriage_possible_colonne(grille, sequence, i, j, l ,cl):
# problem de syntaxe
# cas a: si l depasse le nb d'element de la sequence, inviolement de syntaxe
# cas b, i n'est pas compris entre 0 et N-1, inviolement de syntaxe
# cas c, j < 0 , inviolement de syntaxe
if (len(sequence)<l) or (i<0) or (i>N-1) or(j<0):
return False
# cas 1 : l=0:
# -si j=0, vrai
# -sinon faux
if (l==0):
if (i==0):
return True
print("11false")
return False
else:
print("i")
val=sequence[l-1]
# cas 2a : si j < sl -1
if (i<(sequence[l-1]-1)):
print("22false")
return False
# cas 2b : si j == sl-1
# -si l == 1, vrai
# -sinon faux
elif (i==(sequence[l-1]-1)):
cpt=i
bool=0
while(i>=0):
if grille[i][j]==0 or cl==0:
bool=1
break
i=i-1
if l==1 and bool==0:
print("ABC true")
return True
print("33false")
return False
else:
#cas 2c
return coloriage_possible_colonne_rec(grille, sequence, i, j, l, -1 ,cl)#, case_j ,nb_block)
def coloriage_possible_colonne_rec(grille, sequence, i, j, l, check, cl):#, case_j ,nb_block):
if (l==0) and (i>=-1):
print("ABC true")
return True
if i<0:
print("44false")
return False
# Pour la premiere iteration, on ne sait pas si c'est une case blanche ou noire
print(grille)
if check ==-1:
if cl==0:
compare=compare_block_colonne(grille, i-sequence[l-1], j, sequence[l-1])
else:
compare=compare_block_colonne(grille, i-sequence[l-1]+1, j, sequence[l-1])
print("i, j", i, j,"compare:", compare, "l", l)
if grille[i][j]==-1:
if not (compare):
print("55false")
return False
else:
if(i==0) and l==1 and sequence[0]==1:
return True
print("here i j", i-(sequence[l-1])-(1-cl)-1 ,j)
if (i-(sequence[l-1])-(1-cl)-1<-1):
return coloriage_possible_ligne_rec(grille, sequence, i-(sequence[l-1]) ,j, l-1, 0, cl)
return coloriage_possible_ligne_rec(grille, sequence, i-(sequence[l-1])-(1-cl)-1 ,j, l-1, 0, cl)
elif grille[i][j]==1:
if(i==0) and l==1 and sequence[0]==1:
return True
if compare:
return coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-1 ,j, l-1 ,0, cl)
else:
##print("77false")
return False
elif grille[i][j]==0:
return False
else:
print("Syntaxe erreur valeur different que -1 0 1")
exit()
else:
compare_1=compare_block_colonne(grille, i-sequence[l-1], j, sequence[l-1])
compare_2=compare_block_colonne(grille, i-sequence[l-1]+1, j, sequence[l-1])
print("i, j", i, j,"compare1:", compare_1, "l",l)
print("i, j", i, j,"compare2:", compare_2, "l",l)
if grille[i][j]==-1:
if grille[i][j-sequence[l-1]-1]==1 and compare_1:
return coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-2 ,j, l-1, 0, cl)
elif grille[i][j-sequence[l-1]]==1 and compare_2:
if(i==0):
return coloriage_possible_ligne_rec(grille, sequence, i-(sequence[l-1]) ,j, l-1 ,0, cl)
return coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-1 ,j, l-1 ,0, cl)
elif not (compare_1 or compare_2):
print("66false")
return False
else:
if grille[i][j-sequence[l-1]-1]==0:
return coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-1 ,j, l-1 ,0, cl)
else:
if (j==0) and sequence[l-1]==1:
print("ABC True")
return True
return coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-1 ,j, l-1 ,0, cl) or coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-2 ,j, l-1, 0, cl)
elif grille[i][j]==1:
if(i==0) and l==1 and sequence[0]==1:
return True
if compare_2:
return coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-1 ,j, l-1 ,0, cl)
else:
print("77false")
return False
elif grille[i][j]==0:
if(i==0) and l==1 and sequence[0]==1:
return False
if compare_1:
return coloriage_possible_colonne_rec(grille, sequence, i-(sequence[l-1])-2 ,j, l-1 ,0, cl)
else:
print("88false")
return False
else:
print("Syntaxe erreur valeur different que -1 0 1")
exit()
def dupliquer(grille):
grille_d=np.full((M,N), -1)
for i in range(M):
for j in range(N):
grille_d[i][j]=grille[i][j]
return grille_d
def creer_sequence(indice, direction):
init=1
k=0
sequence=[]
#print(s_grille)
if direction==1:
while(k<M):
if(s_grille[indice][k]!=0 or init==1):
sequence.append(s_grille[indice][k])
#print("this",indice, k)
#print(s_grille[indice][k])
init=0
k=k+1
elif direction==2:
while(k<N):
if(s_grille[indice+M][k]!=0 or init==1):
sequence.append(s_grille[indice+M][k])
init=0
k=k+1
return sequence
def coloreLig(grille, i):
sequence=creer_sequence(i,1)# 1 signifie ligne 2 signifie colonne
l=len(sequence)
a=0
somme_sequence=0
while (a<l):
somme_sequence=somme_sequence+sequence[a]
a=a+1
j=N-1
bool=0
print("----------------------",sequence)
while(j>=0 and l>0):
print("i",i, "j", j, "l", l)
resultat_blanc=(coloriage_possible_ligne(grille, sequence, i, j, l, 0))
print("noir")
resultat_noir=(coloriage_possible_ligne(grille, sequence, i, j, l, 1) )
print("resultat_blanc, resultat_noir",resultat_blanc, resultat_noir)
k=j
if resultat_noir==True:
bool=1
if resultat_blanc==False:
s=sequence[l-1]
print(l-1)
while(s>0):
print("in while")
print(sequence)
grille[i][k]=1
k=k-1
s=s-1
del sequence[l-1]
else:
nb=j-1
min=j
max=-1
while(nb>=0):
#print(grille[i][nb], nb)
if grille[i][nb]==1:
if(grille[i][nb]>max):
max=nb
if(grille[i][nb]<min):
min=nb
nb=nb-1
print("max",max)
print("min",min)
l=len(sequence)
print("************l",l,"max-min+1", max-min+1)
print((l==1 and max-min+1==sequence[l-1]))
if not (l==1 and max-min+1==sequence[l-1]):
print("why?")
del sequence[l-1]
print("fin")
if resultat_noir==False and resultat_blanc==False and j==N-1:
print(i, j)
return (False, grille)
j=k-1
l=len(sequence)
if(j<0 and l>0):
del sequence[l-1]
j=M-1
l=len(sequence)
if(bool==1):
return (True,grille)
print("what")
return (False, grille)
return resultat
def coloreCol(grille, j):
sequence=creer_sequence(j,2)# 1 signifie ligne 2 signifie colonne
l=len(sequence)
i=M-1
bool=0
print("----------------------",sequence)
while(i>=0 and l>0):
bool_del=0
print("i",i, "j", j, "l", l)
resultat_blanc=(coloriage_possible_colonne(grille, sequence, i, j, l, 0))
print("noir")
resultat_noir=(coloriage_possible_colonne(grille, sequence, i, j, l, 1) )
print("resultat_blanc, resultat_noir",resultat_blanc, resultat_noir)
print("l=",l)
k=i
if resultat_noir==True:
bool=1
if resultat_blanc==False:
s=sequence[l-1]
k=i
while(s>0):
print("welcome")
grille[k][j]=1
k=k-1
s=s-1
del sequence[l-1]
else:
nb=i-1
min=i
max=-1
while(nb>=0):
#print(grille[i][nb], nb)
if grille[nb][j]==1:
if(grille[nb][j]>max):
max=nb
if(grille[nb][j]<min):
min=nb
nb=nb-1
if not (l==1 and max-min+1==sequence[l-1]):
print("why?")
del sequence[l-1]
#del sequence[-1]
print("fin")
#if resultat_blanc==True:
# if resultat_noir==False:
if resultat_noir==False and resultat_blanc==False and i==M-1:
print(i, j)
return (False, grille)
i=k-1
#l=len(sequence)
#if(i<=0 and l>0):
# if(bool_del!=1):
# del sequence[-1]
#i=M-1"""
l=len(sequence)
if(bool==1):
return (True,grille)
print("what")
return (False, grille)
def coloration(grille):
grille_d=dupliquer(grille)
LigneAVoir=set()
ColonneAVoir=set()
i=M-1
j=N-1
while (i>=0):
LigneAVoir.add(i)
i=i-1
while(j>=0):
ColonneAVoir.add(j)
j=j-1
while ((LigneAVoir!=set())or(ColonneAVoir!=set())):
while (LigneAVoir):
i=LigneAVoir.pop()
(ok,grille_d)=coloreLig(grille_d, i)
if ok==False:
print("hi")
print(grille_d)
return (-1, [[]])#matrice vide!!
Nouveaux=set()
for j in range(N):
if grille_d[i][j]==1:
Nouveaux.add(j)
ColonneAVoir=ColonneAVoir.union(Nouveaux)
while(ColonneAVoir):
j=ColonneAVoir.pop()
(ok,grille_d)=coloreCol(grille_d, j)
if ok==False:
print("hello")
return (False, [[]])#matrice vide!
Nouveaux=set()
for i in range(M):
if grille_d[i][j]==1:
Nouveaux.add(i)
print("-------Nouveaux-------",Nouveaux)
print("-------LigneAVoir-------",LigneAVoir)
LigneAVoir=LigneAVoir.union(Nouveaux)
print("-------LigneAVoir-------",LigneAVoir)
for i in range(M) :
for j in range(N) :
if(grille_d[i][j]!=0 and grille_d[i][j]!=1):
return (0, grille_d)
return (1,grille_d)
#print(coloriage_possible(grille, sequence1, 1, 1, 2))
#print(coloriage_possible(grille, sequence2, 1, 3, 2))
lire_fichier(s_grille)
print(s_grille)
print(coloration(grille))
| [
"numpy.full",
"sys.exit"
] | [((92, 118), 'numpy.full', 'np.full', (['(M + N, M + N)', '(0)'], {}), '((M + N, M + N), 0)\n', (99, 118), True, 'import numpy as np\n'), ((148, 167), 'numpy.full', 'np.full', (['(M, N)', '(-1)'], {}), '((M, N), -1)\n', (155, 167), True, 'import numpy as np\n'), ((13363, 13382), 'numpy.full', 'np.full', (['(M, N)', '(-1)'], {}), '((M, N), -1)\n', (13370, 13382), True, 'import numpy as np\n'), ((375, 423), 'sys.exit', 'sys.exit', (['"""ERROR. Can\'t read supplied filename."""'], {}), '("ERROR. Can\'t read supplied filename.")\n', (383, 423), False, 'import sys\n')] |
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# Import here since eggs aren't loaded outside of this scope
import tox
import shlex
args = self.tox_args
if args:
args = shlex.split(self.tox_args)
errno = tox.cmdline(args=args)
sys.exit(errno)
setup(
name = 'victor',
version = '0.1.1',
description = "A simple tool for debugging and profiling applications",
url = 'https://github.com/jcomo/victor',
author = '<NAME>',
author_email = '<EMAIL>',
packages = find_packages(exclude=['docs', 'tests', 'scripts']),
install_requires = [
'six>=1.10',
],
tests_require = ['tox'],
cmdclass = {
'test': Tox,
},
classifiers = [
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords = 'debug profile python test'
)
| [
"shlex.split",
"setuptools.find_packages",
"setuptools.command.test.test.finalize_options",
"setuptools.command.test.test.initialize_options",
"sys.exit",
"tox.cmdline"
] | [((249, 285), 'setuptools.command.test.test.initialize_options', 'TestCommand.initialize_options', (['self'], {}), '(self)\n', (279, 285), True, 'from setuptools.command.test import test as TestCommand\n'), ((356, 390), 'setuptools.command.test.test.finalize_options', 'TestCommand.finalize_options', (['self'], {}), '(self)\n', (384, 390), True, 'from setuptools.command.test import test as TestCommand\n'), ((695, 717), 'tox.cmdline', 'tox.cmdline', ([], {'args': 'args'}), '(args=args)\n', (706, 717), False, 'import tox\n'), ((726, 741), 'sys.exit', 'sys.exit', (['errno'], {}), '(errno)\n', (734, 741), False, 'import sys\n'), ((984, 1035), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs', 'tests', 'scripts']"}), "(exclude=['docs', 'tests', 'scripts'])\n", (997, 1035), False, 'from setuptools import setup, find_packages\n'), ((651, 677), 'shlex.split', 'shlex.split', (['self.tox_args'], {}), '(self.tox_args)\n', (662, 677), False, 'import shlex\n')] |
#!/usr/bin/env python3
# -*- coding: future_fstrings -*-
import argparse, sys, os
from collections import defaultdict
from db_sync_tool.utility import helper
# Workaround for ModuleNotFoundError
sys.path.append(os.getcwd())
from file_sync_tool import sync
def main(args={}):
"""
Main entry point for the command line. Parse the arguments and call to the main process.
:param args:
:return:
"""
args = get_arguments(args)
config = build_config(args)
sync.Sync(
config_file=args.config_file,
verbose=args.verbose,
mute=args.mute,
host_file=args.host_file,
config=config
)
def get_arguments(args):
"""
Parses and returns script arguments
:param args:
:return:
"""
parser = argparse.ArgumentParser(prog='file_sync_tool', description='A tool for automatic file synchronization from and to host systems.')
parser.add_argument('-f', '--config-file',
help='Path to configuration file',
required=False,
type=str)
parser.add_argument('-v', '--verbose',
help='Enable extended console output',
required=False,
action='store_true')
parser.add_argument('-m', '--mute',
help='Mute console output',
required=False,
action='store_true')
parser.add_argument('-o', '--host-file',
help='Using an additional hosts file for merging hosts information with the configuration file',
required=False,
type=str)
parser.add_argument('-th', '--target-host',
help='SSH host to target system',
required=False,
type=str)
parser.add_argument('-tu', '--target-user',
help='SSH user for target system',
required=False,
type=str)
parser.add_argument('-tpw', '--target-password',
help='SSH password for target system',
required=False,
type=str)
parser.add_argument('-tk', '--target-key',
help='File path to SSH key for target system',
required=False,
type=str)
parser.add_argument('-tpo', '--target-port',
help='SSH port for target system',
required=False,
type=int)
parser.add_argument('-oh', '--origin-host',
help='SSH host to origin system',
required=False,
type=str)
parser.add_argument('-ou', '--origin-user',
help='SSH user for origin system',
required=False,
type=str)
parser.add_argument('-opw', '--origin-password',
help='SSH password for origin system',
required=False,
type=str)
parser.add_argument('-ok', '--origin-key',
help='File path to SSH key for origin system',
required=False,
type=str)
parser.add_argument('-opo', '--origin-port',
help='SSH port for origin system',
required=False,
type=int)
parser.add_argument('-fo', '--files-origin',
help='File path for origin source of file sync',
required=False,
type=str)
parser.add_argument('-ft', '--files-target',
help='File path for target destination of file sync',
required=False,
type=str)
parser.add_argument('-fe', '--files-exclude',
help='Excludes for file sync',
required=False,
type=str)
parser.add_argument('-fop', '--files-option',
help='Additional rsync options',
required=False,
type=str)
return parser.parse_args(helper.dict_to_args(args))
def build_config(args):
"""
Building an optional config
:param args:
:return:
"""
config = defaultdict(dict)
config['target'] = defaultdict(dict)
config['origin'] = defaultdict(dict)
if not args.target_host is None:
config['target']['host'] = args.target_host
if not args.target_user is None:
config['target']['user'] = args.target_user
if not args.target_password is None:
config['target']['password'] = args.target_password
if not args.target_key is None:
config['target']['ssh_key'] = args.target_key
if not args.target_port is None:
config['target']['port'] = args.target_port
if not args.origin_host is None:
config['origin']['host'] = args.origin_host
if not args.origin_user is None:
config['origin']['user'] = args.origin_user
if not args.origin_password is None:
config['origin']['password'] = args.origin_password
if not args.origin_key is None:
config['origin']['ssh_key'] = args.origin_key
if not args.origin_port is None:
config['origin']['port'] = args.origin_port
if not args.files_origin is None:
if 'config' not in config['files']:
config['files']['config'] = []
config['files']['config'].append({})
config['files']['config'][0]['origin'] = args.files_origin
if not args.files_target is None:
if 'config' not in config['files']:
config['files']['config'] = []
config['files']['config'].append({})
config['files']['config'][0]['target'] = args.files_target
if not args.files_exclude is None:
config['files']['config'][0]['exclude'] = args.files_exclude.split(',')
if not args.files_option is None:
config['files']['option'] = args.files_option.split(',')
return config
if __name__ == "__main__":
main()
| [
"argparse.ArgumentParser",
"db_sync_tool.utility.helper.dict_to_args",
"file_sync_tool.sync.Sync",
"os.getcwd",
"collections.defaultdict"
] | [((212, 223), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (221, 223), False, 'import argparse, sys, os\n'), ((484, 607), 'file_sync_tool.sync.Sync', 'sync.Sync', ([], {'config_file': 'args.config_file', 'verbose': 'args.verbose', 'mute': 'args.mute', 'host_file': 'args.host_file', 'config': 'config'}), '(config_file=args.config_file, verbose=args.verbose, mute=args.\n mute, host_file=args.host_file, config=config)\n', (493, 607), False, 'from file_sync_tool import sync\n'), ((775, 909), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""file_sync_tool"""', 'description': '"""A tool for automatic file synchronization from and to host systems."""'}), "(prog='file_sync_tool', description=\n 'A tool for automatic file synchronization from and to host systems.')\n", (798, 909), False, 'import argparse, sys, os\n'), ((4474, 4491), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (4485, 4491), False, 'from collections import defaultdict\n'), ((4515, 4532), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (4526, 4532), False, 'from collections import defaultdict\n'), ((4556, 4573), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (4567, 4573), False, 'from collections import defaultdict\n'), ((4330, 4355), 'db_sync_tool.utility.helper.dict_to_args', 'helper.dict_to_args', (['args'], {}), '(args)\n', (4349, 4355), False, 'from db_sync_tool.utility import helper\n')] |