code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from nose.tools import assert_equal, raises
from ..connection.info import custom_setup, custom_teardown
from imcsdk.apis.server.storage import _list_to_string
from imcsdk.apis.server.storage import _flatten_list
from imcsdk.apis.server.storage import _flatten_to_string
from imcsdk.apis.server.storage import vd_name_derive
from imcsdk.apis.server.storage import _human_to_bytes
from imcsdk.apis.server.storage import _bytes_to_human
from imcsdk.apis.server.storage import _pd_min_size_get
from imcsdk.apis.server.storage import _pd_total_size_get
from imcsdk.apis.server.storage import _vd_span_depth_get
from imcsdk.apis.server.storage import _raid_max_size_get
from imcsdk.apis.server.storage import virtual_drive_create
from imcsdk.apis.server.storage import virtual_drive_delete
from imcsdk.apis.server.storage import virtual_drive_exists
from imcsdk.apis.server.storage import controller_encryption_enable, \
controller_encryption_disable, controller_encryption_exists, \
controller_encryption_modify_security_key, \
controller_encryption_key_id_generate, controller_encryption_key_generate
from imcsdk.apis.server.storage import \
is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, \
physical_drive_encryption_enable, physical_drive_encryption_disable, \
is_physical_drive_encryption_enabled, physical_drive_get, \
physical_drive_set_unconfigured_good
from imcsdk.imccoreutils import get_server_dn
CONTROLLER_TYPE="SAS"
CONTROLLER_SLOT="SLOT-HBA"
PD_DRIVE_SLOT=4
is_pd_capable = False
def test_list_to_string():
tests = [{"input": [[1]], "expected": '[1]'},
{"input": [[1, 2]], "expected": '[1,2]'},
{"input": [[1, 2], [3, 4]], "expected": '[1,2][3,4]'},
{"input": [[1], [4, 5, 6], [7]], "expected": '[1][4,5,6][7]'}]
for t in tests:
assert_equal(_list_to_string(t["input"]), t["expected"])
def test_flatten_list():
tests = [{"input": [[1]], "expected": [1]},
{"input": [[1, 2]], "expected": [1, 2]},
{"input": [[1, 2], [3, 4]], "expected": [1, 2, 3, 4]}]
for test in tests:
assert_equal(_flatten_list(test["input"]), test["expected"])
@raises(Exception)
def test_flatten_list_error():
_flatten_list([1])
def test_flatten_to_string():
tests = [{"input": [[1]], "expected": '1'},
{"input": [[1, 2]], "expected": '12'},
{"input": [[1, 2], [3, 4]], "expected": '1234'}]
for test in tests:
assert_equal(_flatten_to_string(test["input"]), test["expected"])
def test_vd_name_derive():
tests = [{"dg": [[1]], "raid": 0, "expected": 'RAID0_1'},
{"dg": [[1, 2]], "raid": 1, "expected": 'RAID1_12'},
{"dg": [[1, 2], [3, 4]], "raid": 10, "expected": 'RAID10_1234'}]
for test in tests:
assert_equal(vd_name_derive(test["raid"], test["dg"]),
test["expected"])
def test_human_to_bytes():
tests = [{"input": "1 KB", "expected": 1024},
{"input": "100 MB", "expected": 100 * 1024*1024},
{"input": "121 GB", "expected": 121 * 1024*1024*1024},
{"input": "1 TB", "expected": 1024*1024*1024*1024},
{"input": "1 PB", "expected": 1024*1024*1024*1024*1024},
{"input": "1 EB", "expected": 1024*1024*1024*1024*1024*1024},
{"input": "1 ZB", "expected": 1024*1024*1024*1024*1024*1024*1024},
{"input": "1 YB", "expected": 1024*1024*1024*1024*1024*1024*1024*1024},
{"input": "3814697 MB", "expected": 3814697*1024*1024}]
for test in tests:
assert_equal(_human_to_bytes(test["input"]), test["expected"])
def test_bytes_to_human():
tests = [{"input": 100*1024*1024, "expected": "100 MB"},
{"input": 100*1024*1024*1024, "expected": "100 GB"},
{"input": 100*1024*1024*1024, "format": "MB", "expected": "102400 MB"},
{"input": 3814697*1024*1024, "format": "MB", "expected": "3814697 MB"}]
for test in tests:
if "format" in test:
assert_equal(_bytes_to_human(test["input"], test["format"]), test["expected"])
else:
assert_equal(_bytes_to_human(test["input"]), test["expected"])
def test_pd_min_size_get():
tests = [{"input": [1024*1024, 1024*1024*1024], "expected": 1024*1024},
{"input": [1024*1024*1024, 1024], "expected": 1024},
{"input": [1024*1024*1024, 1024, 1024*10], "expected": 1024}]
for test in tests:
assert_equal(_pd_min_size_get(test["input"]), test["expected"])
def test_pd_total_size_get():
tests = [{"input": [1024*1024, 1024*1024*1024],
"expected": 1024*1024 + 1024*1024*1024},
{"input": [1024*1024*1024, 1024],
"expected": 1024*1024*1024 + 1024},
{"input": [1024*1024*1024, 1024, 1024*10],
"expected": 1024*1024*1024+1024+1024*10}]
for test in tests:
assert_equal(_pd_total_size_get(test["input"]), test["expected"])
def test_vd_spand_depth_get():
tests = [{"input": [[1]], "expected": 1},
{"input": [[1, 2], [3, 4]], "expected": 2},
{"input": [[1, 2, 3], [4], [5, 6]], "expected": 3},
{"input": [[1], [2], [3], [4], [5, 6]], "expected": 5}]
for test in tests:
assert_equal(_vd_span_depth_get(test["input"]), test["expected"])
def test_raid_max_size_get():
tests = [{"r": 0,
"s": 1000*1024*1024*1024,
"ms": 1000*1024*1024*1024,
"sd": 1,
"expected": 1000*1024*1024*1024},
{"r": 1,
"s": 1000*1024*1024*1024,
"ms": 1000*1024*1024*1024,
"sd": 1,
"expected": (1000*1024*1024*1024)/2},
{"r": 5,
"s": 6*1000*1024*1024*1024,
"ms": 1000*1024*1024*1024,
"sd": 2,
"expected": (6*1000*1024*1024*1024) - (2*1*1000*1024*1024*1024)},
{"r": 50,
"s": 6*1000*1024*1024*1024,
"ms": 1000*1024*1024*1024,
"sd": 2,
"expected": (6*1000*1024*1024*1024) - (2*1*1000*1024*1024*1024)},
{"r": 6,
"s": 6*1000*1024*1024*1024,
"ms": 1000*1024*1024*1024,
"sd": 2,
"expected": (6*1000*1024*1024*1024) - (2*2*1000*1024*1024*1024)},
{"r": 60,
"s": 6*1000*1024*1024*1024,
"ms": 1000*1024*1024*1024,
"sd": 2,
"expected": (6*1000*1024*1024*1024) - (2*2*1000*1024*1024*1024)}]
for t in tests:
assert_equal(_raid_max_size_get(t["r"], t["s"], t["ms"], t["sd"]),
t["expected"])
handle = None
def setup_module():
global handle
handle = custom_setup()
def teardown_module():
custom_teardown(handle)
def test_vd_create_delete():
# Guarding check to execute only on servers that have a CONTROLLER_SLOT controller
# and have drive 1-6 present
server_dn = get_server_dn(handle, server_id=1)
slot_dn = server_dn + "/board/storage-SAS-SLOT-MEZZ"
mo = handle.query_dn(slot_dn)
if mo is None:
return
for i in range(1, 7):
mo = handle.query_dn(slot_dn + "/pd-" + str(i))
if mo is None:
return
tests = [{"dg": [[1]], "ct": CONTROLLER_TYPE, "cs": CONTROLLER_SLOT, "r": 0},
{"dg": [[1, 2, 3, 4]], "ct": CONTROLLER_TYPE, "cs": CONTROLLER_SLOT, "r": 1},
{"dg": [[1, 2, 3]], "ct": CONTROLLER_TYPE, "cs": CONTROLLER_SLOT, "r": 5},
{"dg": [[1, 2, 3]], "ct": CONTROLLER_TYPE, "cs": CONTROLLER_SLOT, "r": 6},
{"dg": [[1, 2], [3, 4], [5, 6]], "ct": CONTROLLER_TYPE, "cs": CONTROLLER_SLOT, "r": 10},
{"dg": [[1, 2, 3], [4, 5, 6]], "ct": CONTROLLER_TYPE, "cs": CONTROLLER_SLOT, "r": 50},
{"dg": [[1, 2, 3], [4, 5, 6]], "ct": CONTROLLER_TYPE, "cs": CONTROLLER_SLOT, "r": 60}]
for t in tests:
vd = virtual_drive_create(handle=handle,
drive_group=t["dg"],
controller_type=t["ct"],
controller_slot=t["cs"],
raid_level=t["r"],
self_encrypt=True)
virtual_drive_delete(handle=handle,
controller_slot=t["cs"],
name=vd.virtual_drive_name)
def test_controller_encryption_enable():
controller_encryption_enable(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
key_id='<KEY>', security_key='<KEY>')
assert_equal(controller_encryption_exists(handle,
CONTROLLER_TYPE,
CONTROLLER_SLOT)[0],
True)
def test_controller_encryption_modify():
controller_encryption_modify_security_key(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
existing_security_key='<KEY>',
security_key='<KEY>')
def test_controller_generated_keys():
key_id = controller_encryption_key_id_generate(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT)
assert_equal(len(key_id) <= 256 , True)
key = controller_encryption_key_generate(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT)
assert_equal(len(key) <= 32, True)
controller_encryption_modify_security_key(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
existing_security_key='<KEY>',
security_key=key)
'''
def test_controller_jbod_mode_enable():
controller_jbod_mode_enable(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT)
assert_equal(is_controller_jbod_mode_enabled(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT),
True)
'''
def test_pd_jbod_mode_enable():
physical_drive_set_jbod_mode(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
mo = physical_drive_get(handle, controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
assert_equal(mo.drive_state, 'JBOD')
@raises(Exception)
def test_invalid_pd_jbod_mode_enable():
physical_drive_set_jbod_mode(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=3)
def test_pd_encryption_enable():
global is_pd_capable
is_pd_capable = is_physical_drive_encryption_capable(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
if not is_pd_capable:
return
physical_drive_encryption_enable(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
enabled = is_physical_drive_encryption_enabled(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
assert_equal(enabled, True)
def test_pd_set_unconfigured_good():
physical_drive_set_unconfigured_good(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
mo = physical_drive_get(handle, controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
assert_equal(mo.drive_state, 'Unconfigured Good')
def test_pd_encryption_disable():
if not is_pd_capable:
return
physical_drive_encryption_disable(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
enabled = is_physical_drive_encryption_enabled(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
drive_slot=PD_DRIVE_SLOT)
assert_equal(enabled, False)
'''
def test_controller_jbod_mode_disable():
controller_jbod_mode_disable(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT)
assert_equal(is_controller_jbod_mode_enabled(
handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT),
False)
'''
def test_vd_create_delete_with_encryption():
virtual_drive_create(
handle,
drive_group=[[PD_DRIVE_SLOT]],
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
raid_level=0,
self_encrypt=True,
virtual_drive_name='test-vd')
exists, err = virtual_drive_exists(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
virtual_drive_name='test-vd')
assert_equal(exists, True)
time.sleep(2)
virtual_drive_delete(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
name='test-vd')
exists, err = virtual_drive_exists(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT,
virtual_drive_name='test-vd')
assert_equal(exists, False)
def test_controller_encryption_disable():
controller_encryption_disable(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT)
assert_equal(controller_encryption_exists(handle,
controller_type=CONTROLLER_TYPE,
controller_slot=CONTROLLER_SLOT)[0],
False)
| [
"imcsdk.apis.server.storage._pd_min_size_get",
"imcsdk.apis.server.storage.physical_drive_set_jbod_mode",
"imcsdk.apis.server.storage._human_to_bytes",
"time.sleep",
"imcsdk.apis.server.storage.controller_encryption_key_generate",
"imcsdk.apis.server.storage.physical_drive_encryption_disable",
"nose.too... | [((2786, 2803), 'nose.tools.raises', 'raises', (['Exception'], {}), '(Exception)\n', (2792, 2803), False, 'from nose.tools import assert_equal, raises\n'), ((11514, 11531), 'nose.tools.raises', 'raises', (['Exception'], {}), '(Exception)\n', (11520, 11531), False, 'from nose.tools import assert_equal, raises\n'), ((2839, 2857), 'imcsdk.apis.server.storage._flatten_list', '_flatten_list', (['[1]'], {}), '([1])\n', (2852, 2857), False, 'from imcsdk.apis.server.storage import _flatten_list\n'), ((7623, 7657), 'imcsdk.imccoreutils.get_server_dn', 'get_server_dn', (['handle'], {'server_id': '(1)'}), '(handle, server_id=1)\n', (7636, 7657), False, 'from imcsdk.imccoreutils import get_server_dn\n'), ((9110, 9254), 'imcsdk.apis.server.storage.controller_encryption_enable', 'controller_encryption_enable', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'key_id': '"""<KEY>"""', 'security_key': '"""<KEY>"""'}), "(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, key_id='<KEY>', security_key='<KEY>')\n", (9138, 9254), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((9604, 9782), 'imcsdk.apis.server.storage.controller_encryption_modify_security_key', 'controller_encryption_modify_security_key', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'existing_security_key': '"""<KEY>"""', 'security_key': '"""<KEY>"""'}), "(handle, controller_type=\n CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT, existing_security_key\n ='<KEY>', security_key='<KEY>')\n", (9645, 9782), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((9932, 10048), 'imcsdk.apis.server.storage.controller_encryption_key_id_generate', 'controller_encryption_key_id_generate', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT'}), '(handle, controller_type=\n CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT)\n', (9969, 10048), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((10148, 10260), 'imcsdk.apis.server.storage.controller_encryption_key_generate', 'controller_encryption_key_generate', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT)\n', (10182, 10260), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((10350, 10524), 'imcsdk.apis.server.storage.controller_encryption_modify_security_key', 'controller_encryption_modify_security_key', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'existing_security_key': '"""<KEY>"""', 'security_key': 'key'}), "(handle, controller_type=\n CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT, existing_security_key\n ='<KEY>', security_key=key)\n", (10391, 10524), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((11058, 11190), 'imcsdk.apis.server.storage.physical_drive_set_jbod_mode', 'physical_drive_set_jbod_mode', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (11086, 11190), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((11295, 11418), 'imcsdk.apis.server.storage.physical_drive_get', 'physical_drive_get', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE, controller_slot\n =CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (11313, 11418), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((11474, 11510), 'nose.tools.assert_equal', 'assert_equal', (['mo.drive_state', '"""JBOD"""'], {}), "(mo.drive_state, 'JBOD')\n", (11486, 11510), False, 'from nose.tools import assert_equal, raises\n'), ((11576, 11696), 'imcsdk.apis.server.storage.physical_drive_set_jbod_mode', 'physical_drive_set_jbod_mode', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': '(3)'}), '(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, drive_slot=3)\n', (11604, 11696), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((11872, 12013), 'imcsdk.apis.server.storage.is_physical_drive_encryption_capable', 'is_physical_drive_encryption_capable', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=\n CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (11908, 12013), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((12136, 12272), 'imcsdk.apis.server.storage.physical_drive_encryption_enable', 'physical_drive_encryption_enable', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (12168, 12272), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((12317, 12458), 'imcsdk.apis.server.storage.is_physical_drive_encryption_enabled', 'is_physical_drive_encryption_enabled', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=\n CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (12353, 12458), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((12523, 12550), 'nose.tools.assert_equal', 'assert_equal', (['enabled', '(True)'], {}), '(enabled, True)\n', (12535, 12550), False, 'from nose.tools import assert_equal, raises\n'), ((12594, 12735), 'imcsdk.apis.server.storage.physical_drive_set_unconfigured_good', 'physical_drive_set_unconfigured_good', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=\n CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (12630, 12735), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((12773, 12896), 'imcsdk.apis.server.storage.physical_drive_get', 'physical_drive_get', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE, controller_slot\n =CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (12791, 12896), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((12952, 13001), 'nose.tools.assert_equal', 'assert_equal', (['mo.drive_state', '"""Unconfigured Good"""'], {}), "(mo.drive_state, 'Unconfigured Good')\n", (12964, 13001), False, 'from nose.tools import assert_equal, raises\n'), ((13084, 13221), 'imcsdk.apis.server.storage.physical_drive_encryption_disable', 'physical_drive_encryption_disable', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (13117, 13221), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((13266, 13407), 'imcsdk.apis.server.storage.is_physical_drive_encryption_enabled', 'is_physical_drive_encryption_enabled', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'drive_slot': 'PD_DRIVE_SLOT'}), '(handle, controller_type=\n CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT, drive_slot=PD_DRIVE_SLOT)\n', (13302, 13407), False, 'from imcsdk.apis.server.storage import is_physical_drive_encryption_capable, physical_drive_set_jbod_mode, physical_drive_encryption_enable, physical_drive_encryption_disable, is_physical_drive_encryption_enabled, physical_drive_get, physical_drive_set_unconfigured_good\n'), ((13440, 13468), 'nose.tools.assert_equal', 'assert_equal', (['enabled', '(False)'], {}), '(enabled, False)\n', (13452, 13468), False, 'from nose.tools import assert_equal, raises\n'), ((13989, 14186), 'imcsdk.apis.server.storage.virtual_drive_create', 'virtual_drive_create', (['handle'], {'drive_group': '[[PD_DRIVE_SLOT]]', 'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'raid_level': '(0)', 'self_encrypt': '(True)', 'virtual_drive_name': '"""test-vd"""'}), "(handle, drive_group=[[PD_DRIVE_SLOT]], controller_type\n =CONTROLLER_TYPE, controller_slot=CONTROLLER_SLOT, raid_level=0,\n self_encrypt=True, virtual_drive_name='test-vd')\n", (14009, 14186), False, 'from imcsdk.apis.server.storage import virtual_drive_create\n'), ((14253, 14381), 'imcsdk.apis.server.storage.virtual_drive_exists', 'virtual_drive_exists', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'virtual_drive_name': '"""test-vd"""'}), "(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, virtual_drive_name='test-vd')\n", (14273, 14381), False, 'from imcsdk.apis.server.storage import virtual_drive_exists\n'), ((14499, 14525), 'nose.tools.assert_equal', 'assert_equal', (['exists', '(True)'], {}), '(exists, True)\n', (14511, 14525), False, 'from nose.tools import assert_equal, raises\n'), ((14531, 14544), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (14541, 14544), False, 'import time\n'), ((14549, 14663), 'imcsdk.apis.server.storage.virtual_drive_delete', 'virtual_drive_delete', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'name': '"""test-vd"""'}), "(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, name='test-vd')\n", (14569, 14663), False, 'from imcsdk.apis.server.storage import virtual_drive_delete\n'), ((14753, 14881), 'imcsdk.apis.server.storage.virtual_drive_exists', 'virtual_drive_exists', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT', 'virtual_drive_name': '"""test-vd"""'}), "(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT, virtual_drive_name='test-vd')\n", (14773, 14881), False, 'from imcsdk.apis.server.storage import virtual_drive_exists\n'), ((14987, 15014), 'nose.tools.assert_equal', 'assert_equal', (['exists', '(False)'], {}), '(exists, False)\n', (14999, 15014), False, 'from nose.tools import assert_equal, raises\n'), ((15063, 15170), 'imcsdk.apis.server.storage.controller_encryption_disable', 'controller_encryption_disable', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT)\n', (15092, 15170), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((8593, 8742), 'imcsdk.apis.server.storage.virtual_drive_create', 'virtual_drive_create', ([], {'handle': 'handle', 'drive_group': "t['dg']", 'controller_type': "t['ct']", 'controller_slot': "t['cs']", 'raid_level': "t['r']", 'self_encrypt': '(True)'}), "(handle=handle, drive_group=t['dg'], controller_type=t[\n 'ct'], controller_slot=t['cs'], raid_level=t['r'], self_encrypt=True)\n", (8613, 8742), False, 'from imcsdk.apis.server.storage import virtual_drive_create\n'), ((8916, 9009), 'imcsdk.apis.server.storage.virtual_drive_delete', 'virtual_drive_delete', ([], {'handle': 'handle', 'controller_slot': "t['cs']", 'name': 'vd.virtual_drive_name'}), "(handle=handle, controller_slot=t['cs'], name=vd.\n virtual_drive_name)\n", (8936, 9009), False, 'from imcsdk.apis.server.storage import virtual_drive_delete\n'), ((2450, 2477), 'imcsdk.apis.server.storage._list_to_string', '_list_to_string', (["t['input']"], {}), "(t['input'])\n", (2465, 2477), False, 'from imcsdk.apis.server.storage import _list_to_string\n'), ((2735, 2763), 'imcsdk.apis.server.storage._flatten_list', '_flatten_list', (["test['input']"], {}), "(test['input'])\n", (2748, 2763), False, 'from imcsdk.apis.server.storage import _flatten_list\n'), ((3096, 3129), 'imcsdk.apis.server.storage._flatten_to_string', '_flatten_to_string', (["test['input']"], {}), "(test['input'])\n", (3114, 3129), False, 'from imcsdk.apis.server.storage import _flatten_to_string\n'), ((3428, 3468), 'imcsdk.apis.server.storage.vd_name_derive', 'vd_name_derive', (["test['raid']", "test['dg']"], {}), "(test['raid'], test['dg'])\n", (3442, 3468), False, 'from imcsdk.apis.server.storage import vd_name_derive\n'), ((4207, 4237), 'imcsdk.apis.server.storage._human_to_bytes', '_human_to_bytes', (["test['input']"], {}), "(test['input'])\n", (4222, 4237), False, 'from imcsdk.apis.server.storage import _human_to_bytes\n'), ((5106, 5137), 'imcsdk.apis.server.storage._pd_min_size_get', '_pd_min_size_get', (["test['input']"], {}), "(test['input'])\n", (5122, 5137), False, 'from imcsdk.apis.server.storage import _pd_min_size_get\n'), ((5549, 5582), 'imcsdk.apis.server.storage._pd_total_size_get', '_pd_total_size_get', (["test['input']"], {}), "(test['input'])\n", (5567, 5582), False, 'from imcsdk.apis.server.storage import _pd_total_size_get\n'), ((5916, 5949), 'imcsdk.apis.server.storage._vd_span_depth_get', '_vd_span_depth_get', (["test['input']"], {}), "(test['input'])\n", (5934, 5949), False, 'from imcsdk.apis.server.storage import _vd_span_depth_get\n'), ((7228, 7280), 'imcsdk.apis.server.storage._raid_max_size_get', '_raid_max_size_get', (["t['r']", "t['s']", "t['ms']", "t['sd']"], {}), "(t['r'], t['s'], t['ms'], t['sd'])\n", (7246, 7280), False, 'from imcsdk.apis.server.storage import _raid_max_size_get\n'), ((9367, 9437), 'imcsdk.apis.server.storage.controller_encryption_exists', 'controller_encryption_exists', (['handle', 'CONTROLLER_TYPE', 'CONTROLLER_SLOT'], {}), '(handle, CONTROLLER_TYPE, CONTROLLER_SLOT)\n', (9395, 9437), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((15252, 15358), 'imcsdk.apis.server.storage.controller_encryption_exists', 'controller_encryption_exists', (['handle'], {'controller_type': 'CONTROLLER_TYPE', 'controller_slot': 'CONTROLLER_SLOT'}), '(handle, controller_type=CONTROLLER_TYPE,\n controller_slot=CONTROLLER_SLOT)\n', (15280, 15358), False, 'from imcsdk.apis.server.storage import controller_encryption_enable, controller_encryption_disable, controller_encryption_exists, controller_encryption_modify_security_key, controller_encryption_key_id_generate, controller_encryption_key_generate\n'), ((4660, 4706), 'imcsdk.apis.server.storage._bytes_to_human', '_bytes_to_human', (["test['input']", "test['format']"], {}), "(test['input'], test['format'])\n", (4675, 4706), False, 'from imcsdk.apis.server.storage import _bytes_to_human\n'), ((4765, 4795), 'imcsdk.apis.server.storage._bytes_to_human', '_bytes_to_human', (["test['input']"], {}), "(test['input'])\n", (4780, 4795), False, 'from imcsdk.apis.server.storage import _bytes_to_human\n')] |
from parsel import Selector
from utils import (
download,
remove_big_whitespaces_selector,
find_id_in_url,
catch_errors,
get_last_part_url,
)
from data import VideoContent, GalleryContent, ImageContent, Meme, Author, Page
import re
ROOT = "https://m.demotywatory.pl"
def scrap(url):
html = download(url)
return parse(html)
def parse(html):
document = Selector(text=html)
memes = [
catch_errors(parse_meme, element) for element in document.css(".demotivator")
]
memes = [meme for meme in memes if meme is not None]
title = document.css("title::text").get()
next_page_url = "/demotywatory/page/" + get_last_part_url(
document.css("a.next-page::attr(href)").get()
)
return Page(title, memes, next_page_url)
def parse_gallery(html):
title = html.css("a::text").get()
url = html.css("a::attr(href)").get()
slides = []
gallery_html = download(ROOT + url)
gallery_page_document = Selector(text=gallery_html)
for slide_element in gallery_page_document.css(".rsSlideContent"):
slide = slide_element.css("img::attr(src)").get()
slides = slides + [slide]
next_gallery_page_url = gallery_page_document.css(
".gall_next_page > a::attr(href)"
).get()
while next_gallery_page_url is not None:
gallery_html = download(ROOT + url + next_gallery_page_url)
gallery_page_document = Selector(text=gallery_html)
for slide_element in gallery_page_document.css(".rsSlideContent"):
slide = slide_element.css("img::attr(src)").get()
slides = slides + [slide]
next_gallery_page_url = gallery_page_document.css(
".gall_next_page > a::attr(href)"
).get()
slides = [slide for slide in slides if slide is not None]
return (title, url, GalleryContent(slides), None)
def parse_content(html):
clazz = html.attrib["class"]
if "image_gallery" in clazz:
return parse_gallery(html)
elif "image" in clazz or "image_gif" in clazz:
image = html.css("img.demot_pic")
title = image.attrib["alt"]
src = image.attrib["src"].replace("//upl", "/upl")
url = html.css("a::attr(href)").get()
return (title, url, ImageContent(src), None)
elif "video_mp4" in clazz:
src = html.css("source::attr(src)").get().replace("//upl", "/upl")
title = html.css(".demot_title::text").get()
description = html.css(".demot_description::text").get()
url = html.css("a::attr(href)").get()
return (title, url, VideoContent(src), description)
return (None, None, None, None)
def parse_meme(m):
title, url, content, description = parse_content(m)
if url is None:
return
points = None
points_text = m.css(".up_votes::text").get()
try:
points = int(points_text)
except:
pass
comment_count = None
comments_count_text = m.css(".demot-comments a::text").get()
try:
comment_count = int(comments_count_text)
except:
pass
return Meme(
title,
ROOT + url,
"/demotywatory/{}".format(find_id_in_url(url)),
content,
None,
None,
points,
comment_count,
)
| [
"utils.catch_errors",
"parsel.Selector",
"data.VideoContent",
"data.Page",
"utils.download",
"utils.find_id_in_url",
"data.ImageContent",
"data.GalleryContent"
] | [((318, 331), 'utils.download', 'download', (['url'], {}), '(url)\n', (326, 331), False, 'from utils import download, remove_big_whitespaces_selector, find_id_in_url, catch_errors, get_last_part_url\n'), ((389, 408), 'parsel.Selector', 'Selector', ([], {'text': 'html'}), '(text=html)\n', (397, 408), False, 'from parsel import Selector\n'), ((753, 786), 'data.Page', 'Page', (['title', 'memes', 'next_page_url'], {}), '(title, memes, next_page_url)\n', (757, 786), False, 'from data import VideoContent, GalleryContent, ImageContent, Meme, Author, Page\n'), ((930, 950), 'utils.download', 'download', (['(ROOT + url)'], {}), '(ROOT + url)\n', (938, 950), False, 'from utils import download, remove_big_whitespaces_selector, find_id_in_url, catch_errors, get_last_part_url\n'), ((979, 1006), 'parsel.Selector', 'Selector', ([], {'text': 'gallery_html'}), '(text=gallery_html)\n', (987, 1006), False, 'from parsel import Selector\n'), ((431, 464), 'utils.catch_errors', 'catch_errors', (['parse_meme', 'element'], {}), '(parse_meme, element)\n', (443, 464), False, 'from utils import download, remove_big_whitespaces_selector, find_id_in_url, catch_errors, get_last_part_url\n'), ((1348, 1392), 'utils.download', 'download', (['(ROOT + url + next_gallery_page_url)'], {}), '(ROOT + url + next_gallery_page_url)\n', (1356, 1392), False, 'from utils import download, remove_big_whitespaces_selector, find_id_in_url, catch_errors, get_last_part_url\n'), ((1425, 1452), 'parsel.Selector', 'Selector', ([], {'text': 'gallery_html'}), '(text=gallery_html)\n', (1433, 1452), False, 'from parsel import Selector\n'), ((1837, 1859), 'data.GalleryContent', 'GalleryContent', (['slides'], {}), '(slides)\n', (1851, 1859), False, 'from data import VideoContent, GalleryContent, ImageContent, Meme, Author, Page\n'), ((3162, 3181), 'utils.find_id_in_url', 'find_id_in_url', (['url'], {}), '(url)\n', (3176, 3181), False, 'from utils import download, remove_big_whitespaces_selector, find_id_in_url, catch_errors, get_last_part_url\n'), ((2259, 2276), 'data.ImageContent', 'ImageContent', (['src'], {}), '(src)\n', (2271, 2276), False, 'from data import VideoContent, GalleryContent, ImageContent, Meme, Author, Page\n'), ((2583, 2600), 'data.VideoContent', 'VideoContent', (['src'], {}), '(src)\n', (2595, 2600), False, 'from data import VideoContent, GalleryContent, ImageContent, Meme, Author, Page\n')] |
__all__ = ('FlexSlider', )
import os
import sys
root = os.path.abspath(
os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.realpath(__file__)))))
sys.path.insert(0,root)
from kivy.lang import Builder
from kivy_modules.widget.widget import Widget
from kivy.properties import (NumericProperty, AliasProperty, OptionProperty,
ReferenceListProperty, BoundedNumericProperty,
StringProperty, ListProperty, BooleanProperty)
# oldcwd = os.getcwd()
# os.chdir(path)
# module_name = "..__init__"
# class_name = "Builder"
# # klass = getattr(__import__(module_name), class_name)
# # print(klass)
# print(os.listdir())
# mod = __import__(module_name)
# print(mod)
class FlexSlider(Widget):
value = NumericProperty(0.)
min = NumericProperty(0.)
max = NumericProperty(100.)
padding = NumericProperty('16sp') # default: 16sp
orientation = OptionProperty('horizontal', options=(
'vertical', 'horizontal'))
range = ReferenceListProperty(min, max)
step = BoundedNumericProperty(0, min=0)
background_horizontal = StringProperty(
'atlas://data/images/defaulttheme/sliderh_background')
background_disabled_horizontal = StringProperty(
'atlas://data/images/defaulttheme/sliderh_background_disabled')
background_vertical = StringProperty(
'atlas://data/images/defaulttheme/sliderv_background')
background_disabled_vertical = StringProperty(
'atlas://data/images/defaulttheme/sliderv_background_disabled')
background_width = NumericProperty('36sp')
cursor_image = StringProperty(
'atlas://data/images/defaulttheme/slider_cursor')
cursor_disabled_image = StringProperty(
'atlas://data/images/defaulttheme/slider_cursor_disabled')
cursor_width = NumericProperty('32sp')
cursor_height = NumericProperty('32sp')
cursor_size = ReferenceListProperty(cursor_width, cursor_height)
border_horizontal = ListProperty([0, 18, 0, 18])
border_vertical = ListProperty([18, 0, 18, 0])
value_track = BooleanProperty(False)
value_track_color = ListProperty([1, 1, 1, 1])
value_track_width = NumericProperty('3dp')
sensitivity = OptionProperty('all', options=('all', 'handle'))
def on_min(self, *largs):
self.value = min(self.max, max(self.min, self.value))
def on_max(self, *largs):
self.value = min(self.max, max(self.min, self.value))
def get_norm_value(self):
vmin = self.min
d = self.max - vmin
if d == 0:
return 0
return (self.value - vmin) / float(d)
def set_norm_value(self, value):
vmin = self.min
vmax = self.max
step = self.step
val = min(value * (vmax - vmin) + vmin, vmax)
if step == 0:
self.value = val
else:
self.value = min(round((val - vmin) / step) * step + vmin,
vmax)
value_normalized = AliasProperty(get_norm_value, set_norm_value,
bind=('value', 'min', 'max'),
cache=True)
def get_value_pos(self):
padding = self.padding
x = self.x
y = self.y
nval = self.value_normalized
if self.orientation == 'horizontal':
return (x + padding + nval * (self.width - 2 * padding), y)
else:
return (x, y + padding + nval * (self.height - 2 * padding))
def set_value_pos(self, pos):
padding = self.padding
x = min(self.right - padding, max(pos[0], self.x + padding))
y = min(self.top - padding, max(pos[1], self.y + padding))
if self.orientation == 'horizontal':
if self.width == 0:
self.value_normalized = 0
else:
self.value_normalized = (x - self.x - padding
) / float(self.width - 2 * padding)
else:
if self.height == 0:
self.value_normalized = 0
else:
self.value_normalized = (y - self.y - padding
) / float(self.height - 2 * padding)
value_pos = AliasProperty(get_value_pos, set_value_pos,
bind=('pos', 'size', 'min', 'max', 'padding',
'value_normalized', 'orientation'),
cache=True)
def on_touch_down(self, touch):
if self.disabled or not self.collide_point(*touch.pos):
return
if touch.is_mouse_scrolling:
if 'down' in touch.button or 'left' in touch.button:
if self.step:
self.value = min(self.max, self.value + self.step)
else:
self.value = min(
self.max,
self.value + (self.max - self.min) / 20)
if 'up' in touch.button or 'right' in touch.button:
if self.step:
self.value = max(self.min, self.value - self.step)
else:
self.value = max(
self.min,
self.value - (self.max - self.min) / 20)
elif self.sensitivity == 'handle':
if self.children[0].collide_point(*touch.pos):
touch.grab(self)
else:
touch.grab(self)
self.value_pos = touch.pos
return True
def on_touch_move(self, touch):
if touch.grab_current == self:
self.value_pos = touch.pos
self.loading_value_pos = touch.pos[0] - 10, touch.pos[1]
return True
def on_touch_up(self, touch):
if touch.grab_current == self:
self.value_pos = touch.pos
return True
Builder.load_string("""
<FlexSlider>:
canvas:
Color:
rgb: 1, 1, 1
RoundedRectangle:
radius: self.border_horizontal if self.orientation == 'horizontal' else self.border_vertical
pos: (self.x + self.padding, self.center_y - self.background_width / 2) if self.orientation == 'horizontal' else (self.center_x - self.background_width / 2, self.y + self.padding)
size: (self.width - self.padding * 2, self.background_width) if self.orientation == 'horizontal' else (self.background_width, self.height - self.padding * 2)
Color:
rgba: root.value_track_color if self.value_track and self.orientation == 'horizontal' else [1, 1, 1, 0]
Line:
width: self.value_track_width
points: self.x + self.padding, self.center_y - self.value_track_width / 2, self.value_pos[0], self.center_y - self.value_track_width / 2
Color:
rgba: root.value_track_color if self.value_track and self.orientation == 'vertical' else [1, 1, 1, 0]
Line:
width: self.value_track_width
points: self.center_x, self.y + self.padding, self.center_x, self.value_pos[1]
Color:
rgb: 1, 1, 1
Label:
canvas:
Color:
rgb: 0, 1, 1
RoundedRectangle:
pos: (root.value_pos[0] - root.cursor_width / 2, root.center_y - root.cursor_height / 2) if root.orientation == 'horizontal' else (root.center_x - root.cursor_width / 2, root.value_pos[1] - root.cursor_height / 2)
size: root.cursor_size
""")
if __name__ == '__main__':
from kivy.app import App
class FlexSliderApp(App):
def build(self):
return FlexSlider(padding=25,
value_track = True,
value_track_color = [1,0,0,1])
FlexSliderApp().run() | [
"kivy.properties.BoundedNumericProperty",
"sys.path.insert",
"kivy.properties.NumericProperty",
"kivy.lang.Builder.load_string",
"os.path.realpath",
"kivy.properties.OptionProperty",
"kivy.properties.ListProperty",
"kivy.properties.BooleanProperty",
"kivy.properties.ReferenceListProperty",
"kivy.p... | [((178, 202), 'sys.path.insert', 'sys.path.insert', (['(0)', 'root'], {}), '(0, root)\n', (193, 202), False, 'import sys\n'), ((5858, 7481), 'kivy.lang.Builder.load_string', 'Builder.load_string', (['"""\n<FlexSlider>:\n canvas:\n Color:\n rgb: 1, 1, 1\n RoundedRectangle:\n radius: self.border_horizontal if self.orientation == \'horizontal\' else self.border_vertical\n pos: (self.x + self.padding, self.center_y - self.background_width / 2) if self.orientation == \'horizontal\' else (self.center_x - self.background_width / 2, self.y + self.padding)\n size: (self.width - self.padding * 2, self.background_width) if self.orientation == \'horizontal\' else (self.background_width, self.height - self.padding * 2)\n Color:\n rgba: root.value_track_color if self.value_track and self.orientation == \'horizontal\' else [1, 1, 1, 0]\n Line:\n width: self.value_track_width\n points: self.x + self.padding, self.center_y - self.value_track_width / 2, self.value_pos[0], self.center_y - self.value_track_width / 2\n Color:\n rgba: root.value_track_color if self.value_track and self.orientation == \'vertical\' else [1, 1, 1, 0]\n Line:\n width: self.value_track_width\n points: self.center_x, self.y + self.padding, self.center_x, self.value_pos[1]\n Color:\n rgb: 1, 1, 1\n Label:\n canvas:\n Color:\n rgb: 0, 1, 1\n RoundedRectangle:\n pos: (root.value_pos[0] - root.cursor_width / 2, root.center_y - root.cursor_height / 2) if root.orientation == \'horizontal\' else (root.center_x - root.cursor_width / 2, root.value_pos[1] - root.cursor_height / 2)\n size: root.cursor_size\n"""'], {}), '(\n """\n<FlexSlider>:\n canvas:\n Color:\n rgb: 1, 1, 1\n RoundedRectangle:\n radius: self.border_horizontal if self.orientation == \'horizontal\' else self.border_vertical\n pos: (self.x + self.padding, self.center_y - self.background_width / 2) if self.orientation == \'horizontal\' else (self.center_x - self.background_width / 2, self.y + self.padding)\n size: (self.width - self.padding * 2, self.background_width) if self.orientation == \'horizontal\' else (self.background_width, self.height - self.padding * 2)\n Color:\n rgba: root.value_track_color if self.value_track and self.orientation == \'horizontal\' else [1, 1, 1, 0]\n Line:\n width: self.value_track_width\n points: self.x + self.padding, self.center_y - self.value_track_width / 2, self.value_pos[0], self.center_y - self.value_track_width / 2\n Color:\n rgba: root.value_track_color if self.value_track and self.orientation == \'vertical\' else [1, 1, 1, 0]\n Line:\n width: self.value_track_width\n points: self.center_x, self.y + self.padding, self.center_x, self.value_pos[1]\n Color:\n rgb: 1, 1, 1\n Label:\n canvas:\n Color:\n rgb: 0, 1, 1\n RoundedRectangle:\n pos: (root.value_pos[0] - root.cursor_width / 2, root.center_y - root.cursor_height / 2) if root.orientation == \'horizontal\' else (root.center_x - root.cursor_width / 2, root.value_pos[1] - root.cursor_height / 2)\n size: root.cursor_size\n"""\n )\n', (5877, 7481), False, 'from kivy.lang import Builder\n'), ((783, 803), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0.0)'], {}), '(0.0)\n', (798, 803), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((813, 833), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0.0)'], {}), '(0.0)\n', (828, 833), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((843, 865), 'kivy.properties.NumericProperty', 'NumericProperty', (['(100.0)'], {}), '(100.0)\n', (858, 865), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((879, 902), 'kivy.properties.NumericProperty', 'NumericProperty', (['"""16sp"""'], {}), "('16sp')\n", (894, 902), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((937, 1001), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""horizontal"""'], {'options': "('vertical', 'horizontal')"}), "('horizontal', options=('vertical', 'horizontal'))\n", (951, 1001), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1023, 1054), 'kivy.properties.ReferenceListProperty', 'ReferenceListProperty', (['min', 'max'], {}), '(min, max)\n', (1044, 1054), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1066, 1098), 'kivy.properties.BoundedNumericProperty', 'BoundedNumericProperty', (['(0)'], {'min': '(0)'}), '(0, min=0)\n', (1088, 1098), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1127, 1196), 'kivy.properties.StringProperty', 'StringProperty', (['"""atlas://data/images/defaulttheme/sliderh_background"""'], {}), "('atlas://data/images/defaulttheme/sliderh_background')\n", (1141, 1196), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1243, 1321), 'kivy.properties.StringProperty', 'StringProperty', (['"""atlas://data/images/defaulttheme/sliderh_background_disabled"""'], {}), "('atlas://data/images/defaulttheme/sliderh_background_disabled')\n", (1257, 1321), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1357, 1426), 'kivy.properties.StringProperty', 'StringProperty', (['"""atlas://data/images/defaulttheme/sliderv_background"""'], {}), "('atlas://data/images/defaulttheme/sliderv_background')\n", (1371, 1426), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1471, 1549), 'kivy.properties.StringProperty', 'StringProperty', (['"""atlas://data/images/defaulttheme/sliderv_background_disabled"""'], {}), "('atlas://data/images/defaulttheme/sliderv_background_disabled')\n", (1485, 1549), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1582, 1605), 'kivy.properties.NumericProperty', 'NumericProperty', (['"""36sp"""'], {}), "('36sp')\n", (1597, 1605), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1625, 1689), 'kivy.properties.StringProperty', 'StringProperty', (['"""atlas://data/images/defaulttheme/slider_cursor"""'], {}), "('atlas://data/images/defaulttheme/slider_cursor')\n", (1639, 1689), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1727, 1800), 'kivy.properties.StringProperty', 'StringProperty', (['"""atlas://data/images/defaulttheme/slider_cursor_disabled"""'], {}), "('atlas://data/images/defaulttheme/slider_cursor_disabled')\n", (1741, 1800), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1829, 1852), 'kivy.properties.NumericProperty', 'NumericProperty', (['"""32sp"""'], {}), "('32sp')\n", (1844, 1852), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1873, 1896), 'kivy.properties.NumericProperty', 'NumericProperty', (['"""32sp"""'], {}), "('32sp')\n", (1888, 1896), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1915, 1965), 'kivy.properties.ReferenceListProperty', 'ReferenceListProperty', (['cursor_width', 'cursor_height'], {}), '(cursor_width, cursor_height)\n', (1936, 1965), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((1990, 2018), 'kivy.properties.ListProperty', 'ListProperty', (['[0, 18, 0, 18]'], {}), '([0, 18, 0, 18])\n', (2002, 2018), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2041, 2069), 'kivy.properties.ListProperty', 'ListProperty', (['[18, 0, 18, 0]'], {}), '([18, 0, 18, 0])\n', (2053, 2069), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2088, 2110), 'kivy.properties.BooleanProperty', 'BooleanProperty', (['(False)'], {}), '(False)\n', (2103, 2110), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2135, 2161), 'kivy.properties.ListProperty', 'ListProperty', (['[1, 1, 1, 1]'], {}), '([1, 1, 1, 1])\n', (2147, 2161), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2186, 2208), 'kivy.properties.NumericProperty', 'NumericProperty', (['"""3dp"""'], {}), "('3dp')\n", (2201, 2208), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2227, 2275), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""all"""'], {'options': "('all', 'handle')"}), "('all', options=('all', 'handle'))\n", (2241, 2275), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((2991, 3082), 'kivy.properties.AliasProperty', 'AliasProperty', (['get_norm_value', 'set_norm_value'], {'bind': "('value', 'min', 'max')", 'cache': '(True)'}), "(get_norm_value, set_norm_value, bind=('value', 'min', 'max'),\n cache=True)\n", (3004, 3082), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((4235, 4376), 'kivy.properties.AliasProperty', 'AliasProperty', (['get_value_pos', 'set_value_pos'], {'bind': "('pos', 'size', 'min', 'max', 'padding', 'value_normalized', 'orientation')", 'cache': '(True)'}), "(get_value_pos, set_value_pos, bind=('pos', 'size', 'min',\n 'max', 'padding', 'value_normalized', 'orientation'), cache=True)\n", (4248, 4376), False, 'from kivy.properties import NumericProperty, AliasProperty, OptionProperty, ReferenceListProperty, BoundedNumericProperty, StringProperty, ListProperty, BooleanProperty\n'), ((147, 173), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (163, 173), False, 'import os\n')] |
import pytest
from query_flight import utils
from query_flight.models import Search, Flight, Layover, Airport
from django.utils import timezone
# @pytest.fixture
# def basic_search():
# return Search.objects.create()
@pytest.fixture
def basic_sw_inputs():
return {'browser': 1, 'originationAirportCode': ['ATL', 'DAL'],
'destinationAirportCode': 'DEN',
'departureDate': timezone.now().date()}
@pytest.mark.django_db
@pytest.mark.parametrize('input,iterable', [
(['ATL', 'BOI', 'DEN'], True),
([1, 2, 3], True),
((1, 2, 3), True),
('string of garbage', False),
(b'string of garbage', False),
(1, False),
])
def test_check_iterable(input, iterable, basic_sw_inputs):
assert utils.SW_Sel_base(
**basic_sw_inputs)._check_iterable(input) == iterable
@pytest.mark.django_db
def test_create_search1(basic_sw_inputs):
search = Search.objects.create()
basic_sw_inputs.update({'search': search})
s = utils.SW_Sel_base(**basic_sw_inputs)
assert s.search is search
assert s.search.id == search.id
@pytest.mark.django_db
def test_create_search2(basic_sw_inputs):
s = utils.SW_Sel_base(**basic_sw_inputs)
assert isinstance(s.search, Search)
assert Search.objects.count() == 1
@pytest.mark.django_db
def test_create_search3(basic_sw_inputs):
basic_sw_inputs.update({'search': 1})
with pytest.raises(ValueError):
s = utils.SW_Sel_base(**basic_sw_inputs)
@pytest.mark.django_db
def test_cases(basic_sw_inputs):
s = utils.SW_Sel_Multiple(**basic_sw_inputs)
assert s.cases[0] == {'departureDate': timezone.now().date(),
'destinationAirportCode': 'DEN',
'originationAirportCode': 'ATL'}
assert s.cases[1] == {'departureDate': timezone.now().date(),
'destinationAirportCode': 'DEN',
'originationAirportCode': 'DAL'}
| [
"query_flight.utils.SW_Sel_Multiple",
"query_flight.models.Search.objects.count",
"django.utils.timezone.now",
"pytest.mark.parametrize",
"query_flight.utils.SW_Sel_base",
"pytest.raises",
"query_flight.models.Search.objects.create"
] | [((454, 649), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input,iterable"""', "[(['ATL', 'BOI', 'DEN'], True), ([1, 2, 3], True), ((1, 2, 3), True), (\n 'string of garbage', False), (b'string of garbage', False), (1, False)]"], {}), "('input,iterable', [(['ATL', 'BOI', 'DEN'], True), (\n [1, 2, 3], True), ((1, 2, 3), True), ('string of garbage', False), (\n b'string of garbage', False), (1, False)])\n", (477, 649), False, 'import pytest\n'), ((898, 921), 'query_flight.models.Search.objects.create', 'Search.objects.create', ([], {}), '()\n', (919, 921), False, 'from query_flight.models import Search, Flight, Layover, Airport\n'), ((979, 1015), 'query_flight.utils.SW_Sel_base', 'utils.SW_Sel_base', ([], {}), '(**basic_sw_inputs)\n', (996, 1015), False, 'from query_flight import utils\n'), ((1158, 1194), 'query_flight.utils.SW_Sel_base', 'utils.SW_Sel_base', ([], {}), '(**basic_sw_inputs)\n', (1175, 1194), False, 'from query_flight import utils\n'), ((1534, 1574), 'query_flight.utils.SW_Sel_Multiple', 'utils.SW_Sel_Multiple', ([], {}), '(**basic_sw_inputs)\n', (1555, 1574), False, 'from query_flight import utils\n'), ((1246, 1268), 'query_flight.models.Search.objects.count', 'Search.objects.count', ([], {}), '()\n', (1266, 1268), False, 'from query_flight.models import Search, Flight, Layover, Airport\n'), ((1392, 1417), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1405, 1417), False, 'import pytest\n'), ((1431, 1467), 'query_flight.utils.SW_Sel_base', 'utils.SW_Sel_base', ([], {}), '(**basic_sw_inputs)\n', (1448, 1467), False, 'from query_flight import utils\n'), ((405, 419), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (417, 419), False, 'from django.utils import timezone\n'), ((737, 773), 'query_flight.utils.SW_Sel_base', 'utils.SW_Sel_base', ([], {}), '(**basic_sw_inputs)\n', (754, 773), False, 'from query_flight import utils\n'), ((1619, 1633), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1631, 1633), False, 'from django.utils import timezone\n'), ((1804, 1818), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1816, 1818), False, 'from django.utils import timezone\n')] |
from django.contrib.auth import get_user_model
from django.db import migrations
from compte.models import UserPreferences
def add_preferences_to_users(apps, schema_editor):
users = get_user_model().objects.all()
for user in users:
UserPreferences.objects.create(user=user)
class Migration(migrations.Migration):
dependencies = [
("compte", "0002_userpreferences"),
]
operations = [
migrations.RunPython(add_preferences_to_users),
]
| [
"django.contrib.auth.get_user_model",
"django.db.migrations.RunPython",
"compte.models.UserPreferences.objects.create"
] | [((250, 291), 'compte.models.UserPreferences.objects.create', 'UserPreferences.objects.create', ([], {'user': 'user'}), '(user=user)\n', (280, 291), False, 'from compte.models import UserPreferences\n'), ((433, 479), 'django.db.migrations.RunPython', 'migrations.RunPython', (['add_preferences_to_users'], {}), '(add_preferences_to_users)\n', (453, 479), False, 'from django.db import migrations\n'), ((188, 204), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (202, 204), False, 'from django.contrib.auth import get_user_model\n')] |
""" es_runners for coordinate scans
"""
import automol
import elstruct
from mechroutines.es.runner import scan
from mechroutines.es.runner import qchem_params
from mechlib.amech_io import printer as ioprinter
from phydat import phycon
def hindered_rotor_scans(
zma, spc_info, mod_thy_info, thy_save_fs,
scn_run_fs, scn_save_fs,
rotors, tors_model, method_dct,
overwrite,
saddle=False,
increment=30.0*phycon.DEG2RAD,
retryfail=True, chkstab=None):
""" Perform scans over each of the torsional coordinates
"""
if tors_model != '1dhrfa':
script_str, kwargs = qchem_params(
method_dct, job=elstruct.Job.OPTIMIZATION)
scn_typ = 'relaxed'
else:
script_str, kwargs = qchem_params(
method_dct, job=elstruct.Job.ENERGY)
scn_typ = 'rigid'
run_tors_names = automol.rotor.names(rotors)
run_tors_grids = automol.rotor.grids(rotors, increment=increment)
# Set constraints
const_names = automol.zmat.set_constraint_names(
zma, run_tors_names, tors_model)
# Set appropriate value for check stability
# If not set, don't check if saddle=True
if chkstab is None:
chkstab = bool(not saddle)
ioprinter.run_rotors(run_tors_names, const_names)
# for tors_name, tors_grid in zip(tors_names, tors_grids):
for tors_names, tors_grids in zip(run_tors_names, run_tors_grids):
ioprinter.info_message(
'Running Rotor: {}...'.format(tors_names),
newline=1)
# Setting the constraints
constraint_dct = automol.zmat.constraint_dct(
zma, const_names, tors_names)
scan.execute_scan(
zma=zma,
spc_info=spc_info,
mod_thy_info=mod_thy_info,
thy_save_fs=thy_save_fs,
coord_names=tors_names,
coord_grids=tors_grids,
scn_run_fs=scn_run_fs,
scn_save_fs=scn_save_fs,
scn_typ=scn_typ,
script_str=script_str,
overwrite=overwrite,
update_guess=True,
reverse_sweep=True,
saddle=saddle,
constraint_dct=constraint_dct,
retryfail=retryfail,
chkstab=False,
**kwargs,
)
def check_hr_pot(tors_pots, tors_zmas, tors_paths, emax=-0.5, emin=-10.0):
""" Check hr pot to see if a new mimnimum is needed
"""
new_min_zma = None
print('\nAssessing the HR potential...')
for name in tors_pots:
print('- Rotor {}'.format(name))
pots = tors_pots[name].values()
zmas = tors_zmas[name].values()
paths = tors_paths[name].values()
for pot, zma, path in zip(pots, zmas, paths):
if emin < pot < emax:
new_min_zma = zma
emin = pot
print(' - New minimmum energy ZMA found for torsion')
print(' - Ene = {}'.format(pot))
print(' - Found at path: {}'.format(path))
print(automol.zmat.string(zma))
return new_min_zma
# Read and print the potential
# sp_fs = autofile.fs.single_point(ini_cnf_save_path)
# ref_ene = sp_fs[-1].file.energy.read(mod_ini_thy_info[1:4])
# ref_ene = ini_cnf_save_fs[-1].file.energy.read(ini_min_cnf_locs)
# tors_pots, tors_zmas = {}, {}
# for tors_names, tors_grids in zip(run_tors_names, run_tors_grids):
# constraint_dct = automol.zmat.build_constraint_dct(
# zma, const_names, tors_names)
# pot, _, _, _, zmas, _ = filesys.read.potential(
# tors_names, tors_grids,
# ini_cnf_save_path,
# mod_ini_thy_info, ref_ene,
# constraint_dct,
# read_zma=True)
# tors_pots[tors_names] = pot
# tors_zmas[tors_names] = zmas
# # Print potential
# ioprinter.hr_pot(tors_pots)
| [
"mechlib.amech_io.printer.run_rotors",
"automol.zmat.set_constraint_names",
"automol.rotor.names",
"automol.rotor.grids",
"automol.zmat.string",
"automol.zmat.constraint_dct",
"mechroutines.es.runner.qchem_params",
"mechroutines.es.runner.scan.execute_scan"
] | [((883, 910), 'automol.rotor.names', 'automol.rotor.names', (['rotors'], {}), '(rotors)\n', (902, 910), False, 'import automol\n'), ((932, 980), 'automol.rotor.grids', 'automol.rotor.grids', (['rotors'], {'increment': 'increment'}), '(rotors, increment=increment)\n', (951, 980), False, 'import automol\n'), ((1022, 1088), 'automol.zmat.set_constraint_names', 'automol.zmat.set_constraint_names', (['zma', 'run_tors_names', 'tors_model'], {}), '(zma, run_tors_names, tors_model)\n', (1055, 1088), False, 'import automol\n'), ((1256, 1305), 'mechlib.amech_io.printer.run_rotors', 'ioprinter.run_rotors', (['run_tors_names', 'const_names'], {}), '(run_tors_names, const_names)\n', (1276, 1305), True, 'from mechlib.amech_io import printer as ioprinter\n'), ((636, 691), 'mechroutines.es.runner.qchem_params', 'qchem_params', (['method_dct'], {'job': 'elstruct.Job.OPTIMIZATION'}), '(method_dct, job=elstruct.Job.OPTIMIZATION)\n', (648, 691), False, 'from mechroutines.es.runner import qchem_params\n'), ((772, 821), 'mechroutines.es.runner.qchem_params', 'qchem_params', (['method_dct'], {'job': 'elstruct.Job.ENERGY'}), '(method_dct, job=elstruct.Job.ENERGY)\n', (784, 821), False, 'from mechroutines.es.runner import qchem_params\n'), ((1612, 1669), 'automol.zmat.constraint_dct', 'automol.zmat.constraint_dct', (['zma', 'const_names', 'tors_names'], {}), '(zma, const_names, tors_names)\n', (1639, 1669), False, 'import automol\n'), ((1692, 2097), 'mechroutines.es.runner.scan.execute_scan', 'scan.execute_scan', ([], {'zma': 'zma', 'spc_info': 'spc_info', 'mod_thy_info': 'mod_thy_info', 'thy_save_fs': 'thy_save_fs', 'coord_names': 'tors_names', 'coord_grids': 'tors_grids', 'scn_run_fs': 'scn_run_fs', 'scn_save_fs': 'scn_save_fs', 'scn_typ': 'scn_typ', 'script_str': 'script_str', 'overwrite': 'overwrite', 'update_guess': '(True)', 'reverse_sweep': '(True)', 'saddle': 'saddle', 'constraint_dct': 'constraint_dct', 'retryfail': 'retryfail', 'chkstab': '(False)'}), '(zma=zma, spc_info=spc_info, mod_thy_info=mod_thy_info,\n thy_save_fs=thy_save_fs, coord_names=tors_names, coord_grids=tors_grids,\n scn_run_fs=scn_run_fs, scn_save_fs=scn_save_fs, scn_typ=scn_typ,\n script_str=script_str, overwrite=overwrite, update_guess=True,\n reverse_sweep=True, saddle=saddle, constraint_dct=constraint_dct,\n retryfail=retryfail, chkstab=False, **kwargs)\n', (1709, 2097), False, 'from mechroutines.es.runner import scan\n'), ((3056, 3080), 'automol.zmat.string', 'automol.zmat.string', (['zma'], {}), '(zma)\n', (3075, 3080), False, 'import automol\n')] |
import subprocess
wsl = "wsl python3.7 /mnt/" + YOUR_PATH_TO_HARVESTER
# to = "--timeout 900 -z"
to = "-p2 ai.terran.hard"
to2 = "-p2 ai.zerg.hard"
to3 = "-p2 ai.protoss.hard"
def ai_opponents(difficulty: str) -> str:
text = ""
for race in ["zerg", "protoss", "terran"]:
for build in ["rush", "timing", "power", "air", "air", "macro"]:
text += f"ai.{race}.{difficulty}.{build},"
return text.strip(",")
harvester_test_pattern = (
"harvesterzerg.learning,"
"harvesterzerg.scripted,"
"harvesterzerg.scripted.default.2,"
"harvesterzerg.learning.default.2,"
"harvesterzerg.scripted.default.3,"
"harvesterzerg.learning.default.3,"
"harvesterzerg.scripted.default.4,"
"harvesterzerg.learning.default.4,"
"harvesterzerg.scripted.default.5,"
"harvesterzerg.learning.default.5,"
"harvesterzerg.scripted.default.6,"
"harvesterzerg.learning.default.6,"
"harvesterzerg.scripted.default.7,"
"harvesterzerg.learning.default.7,"
"harvesterzerg.play.default.master,"
).strip(",")
cmd_list_ml = [
# f"{wsl} -p1 harvesterzerg.learning -p2 harvesterzerg.learning.default.2",
# f"{wsl} -p1 harvesterzerg.learning.default.2 -p2 harvesterzerg.learning.default.3",
# f"{wsl} -p1 harvesterzerg.learning -p2 harvesterzerg.learning.default.3",
]
for i in range(0, 15):
cmd_list_ml.append(f'{wsl} -p1 {harvester_test_pattern} -p2 {ai_opponents("hard")}')
for i in range(0, 15):
cmd_list_ml.append(f'{wsl} -p1 {harvester_test_pattern} -p2 {ai_opponents("veryhard")}')
index = 0
for cmd in cmd_list_ml:
index += 1
final_cmd = cmd + " --port " + str(10000 + index * 10)
cmds = final_cmd.split(" ")
subprocess.Popen(cmds)
| [
"subprocess.Popen"
] | [((1702, 1724), 'subprocess.Popen', 'subprocess.Popen', (['cmds'], {}), '(cmds)\n', (1718, 1724), False, 'import subprocess\n')] |
import re
welcome_message = """
Welcome to the Mad Libs game! YOu will be prompted to enter certain types of words. These words will be used in a mad lib and printed out for you.
"""
def fill_mad_lib(file):
new_mad_lib = ''
#import pdb; pdb.set_trace()
with open('text.txt', 'r+') as f:
try:
for line in f:
# use regex to find all instances of{ something }
array_of_word_types = find_all_instances(line)
for i, val in enumerate(array_of_word_types):
user_answer = input('Enter a ' + val + ': ')
line = replace_word(line, array_of_word_types[i], user_answer)
new_mad_lib += line
print(new_mad_lib)
except FileNotFoundError:
print('The file was not found')
def replace_word(line, old_word, new_word):
return line.replace(old_word, new_word, 1)
def find_all_instances(line):
regex = r"{[^{]+}"
return re.findall(regex, line)
if __name__ == '__main__':
fill_mad_lib('text.txt') | [
"re.findall"
] | [((995, 1018), 're.findall', 're.findall', (['regex', 'line'], {}), '(regex, line)\n', (1005, 1018), False, 'import re\n')] |
import sys
import os
import argparse
import numpy as np
parser = argparse.ArgumentParser(
description="""Command-line bin abundance estimator.
Print the median RPKM abundance for each bin in each sample to STDOUT.
Will read the RPKM file into memory - beware.""",
formatter_class=argparse.RawDescriptionHelpFormatter,
add_help=False)
parser.add_argument('rpkmpath', help='Path to RPKM file')
parser.add_argument('clusterspath', help='Path to clusters.tsv')
parser.add_argument('headerpath', help='Path to list of headers')
if len(sys.argv) == 1:
parser.print_help()
sys.exit()
args = parser.parse_args()
# Check files
for infile in (args.rpkmpath, args.clusterspath, args.headerpath):
if not os.path.isfile(infile):
raise FileNotFoundError(infile)
# Load Vamb
sys.path.append('../vamb')
import vamb
# Load in files
with open(args.headerpath) as file:
indexof = {line.strip():i for i,line in enumerate(file)}
with open(args.clusterspath) as file:
clusters = vamb.vambtools.read_clusters(file)
# Check that all clusters names are in headers:
for cluster in clusters.values():
for header in cluster:
if header not in indexof:
raise KeyError("Header not found in headerlist: {}".format(header))
# Load RPKM and check it
rpkm = vamb.vambtools.read_npz(args.rpkmpath)
nsamples = rpkm.shape[1]
if len(indexof) != len(rpkm):
raise ValueError("Not the same number of headers as rows in RPKM file")
# Now estimate abundances
for clustername, cluster in clusters.items():
depths = np.empty((len(cluster), nsamples), dtype=np.float32)
for row, header in enumerate(cluster):
index = indexof[header]
depths[row] = rpkm[index]
median_depths = np.median(depths, axis=0)
print(clustername, end='\t')
print('\t'.join([str(i) for i in median_depths]))
| [
"numpy.median",
"argparse.ArgumentParser",
"vamb.vambtools.read_npz",
"vamb.vambtools.read_clusters",
"os.path.isfile",
"sys.exit",
"sys.path.append"
] | [((66, 343), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Command-line bin abundance estimator.\nPrint the median RPKM abundance for each bin in each sample to STDOUT.\nWill read the RPKM file into memory - beware."""', 'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'add_help': '(False)'}), '(description=\n """Command-line bin abundance estimator.\nPrint the median RPKM abundance for each bin in each sample to STDOUT.\nWill read the RPKM file into memory - beware."""\n , formatter_class=argparse.RawDescriptionHelpFormatter, add_help=False)\n', (89, 343), False, 'import argparse\n'), ((798, 824), 'sys.path.append', 'sys.path.append', (['"""../vamb"""'], {}), "('../vamb')\n", (813, 824), False, 'import sys\n'), ((1297, 1335), 'vamb.vambtools.read_npz', 'vamb.vambtools.read_npz', (['args.rpkmpath'], {}), '(args.rpkmpath)\n', (1320, 1335), False, 'import vamb\n'), ((589, 599), 'sys.exit', 'sys.exit', ([], {}), '()\n', (597, 599), False, 'import sys\n'), ((1005, 1039), 'vamb.vambtools.read_clusters', 'vamb.vambtools.read_clusters', (['file'], {}), '(file)\n', (1033, 1039), False, 'import vamb\n'), ((1738, 1763), 'numpy.median', 'np.median', (['depths'], {'axis': '(0)'}), '(depths, axis=0)\n', (1747, 1763), True, 'import numpy as np\n'), ((721, 743), 'os.path.isfile', 'os.path.isfile', (['infile'], {}), '(infile)\n', (735, 743), False, 'import os\n')] |
from pupa.scrape import Scraper
from pupa.scrape import Event
import lxml.html
from datetime import datetime
import pytz
DUPLICATE_EVENT_URLS = ('http://miamidade.gov/wps/Events/EventDetail.jsp?eventID=445731',
'http://miamidade.gov/wps/Events/EventDetail.jsp?eventID=452515',
'http://miamidade.gov/wps/Events/EventDetail.jsp?eventID=452513')
class MiamidadeEventScraper(Scraper):
def lxmlize(self, url):
html = self.get(url).text
doc = lxml.html.fromstring(html)
doc.make_links_absolute(url)
return doc
def scrape(self):
local_timezone = pytz.timezone("US/Eastern")
base_calendar_url = "http://www.miamidade.gov/cob/county-commission-calendar.asp"
#things get messy more than a few months out
#so we're just pulling 3 months. If we want three
#more, they are called "nxx", "nxy" and "nxz"
months = ["cur","nex","nxw"]
for m in months:
doc = self.lxmlize(base_calendar_url + "?next={}".format(m))
events = doc.xpath("//table[contains(@style,'dotted #ccc')]")
for event in events:
rows = event.xpath(".//tr")
for row in rows:
heading, data = row.xpath(".//td")
h = heading.text_content().lower().replace(":","").strip()
if h == "event":
title = data.text_content()
link = data.xpath(".//a")[0].attrib["href"]
elif h == "event date":
when = datetime.strptime(data.text, '%m/%d/%y %H:%M%p')
when = local_timezone.localize(when)
elif h == "location":
where = data.text
elif h == "description":
description = data.text
if link in DUPLICATE_EVENT_URLS:
continue
if title == "Mayor's FY 2016-17 Proposed Budget Public Meeting":
continue
if not description:
description = ""
status = "confirmed"
if "cancelled" in title.lower():
status = "cancelled"
e = Event(name=title,
start_time=when,
timezone="US/Eastern",
location_name=where,
description=description,
status=status)
e.add_source(link)
yield e
e = Event(name="Mayor's FY 2016-17 Proposed Budget Public Meeting",
start_time=local_timezone.localize(datetime.strptime('08/08/16 06:00PM', '%m/%d/%y %H:%M%p')),
timezone="US/Eastern",
location_name='111 NW 1st Street',
description='Pursuant to Section 2-1800A of the County Code, a Public Meeting has been scheduled by the Honorable <NAME>, Mayor, Miami-Dade County, to discuss the FY 2016-17 budget, tax rates, and fee changes.',
status='confirmed')
e.add_source('http://miamidade.gov/wps/Events/EventDetail.jsp?eventID=447192')
yield e
| [
"datetime.datetime.strptime",
"pytz.timezone",
"pupa.scrape.Event"
] | [((642, 669), 'pytz.timezone', 'pytz.timezone', (['"""US/Eastern"""'], {}), "('US/Eastern')\n", (655, 669), False, 'import pytz\n'), ((2310, 2433), 'pupa.scrape.Event', 'Event', ([], {'name': 'title', 'start_time': 'when', 'timezone': '"""US/Eastern"""', 'location_name': 'where', 'description': 'description', 'status': 'status'}), "(name=title, start_time=when, timezone='US/Eastern', location_name=\n where, description=description, status=status)\n", (2315, 2433), False, 'from pupa.scrape import Event\n'), ((2783, 2840), 'datetime.datetime.strptime', 'datetime.strptime', (['"""08/08/16 06:00PM"""', '"""%m/%d/%y %H:%M%p"""'], {}), "('08/08/16 06:00PM', '%m/%d/%y %H:%M%p')\n", (2800, 2840), False, 'from datetime import datetime\n'), ((1610, 1658), 'datetime.datetime.strptime', 'datetime.strptime', (['data.text', '"""%m/%d/%y %H:%M%p"""'], {}), "(data.text, '%m/%d/%y %H:%M%p')\n", (1627, 1658), False, 'from datetime import datetime\n')] |
# Generated by Django 2.2.7 on 2019-11-12 16:03
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('research_note', '0005_auto_20191112_2255'),
]
operations = [
migrations.RemoveField(
model_name='researchnote',
name='is_written',
),
]
| [
"django.db.migrations.RemoveField"
] | [((233, 301), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""researchnote"""', 'name': '"""is_written"""'}), "(model_name='researchnote', name='is_written')\n", (255, 301), False, 'from django.db import migrations\n')] |
from datetime import datetime, date
from marqeta.response_models.address_response_model import AddressResponseModel
from marqeta.response_models.identification_response_model import IdentificationResponseModel
from marqeta.response_models import datetime_object
import json
import re
class BusinessProprietorResponseModel(object):
def __init__(self, json_response):
self.json_response = json_response
def __str__(self):
return json.dumps(self.json_response, default=self.json_serial)
@staticmethod
def json_serial(o):
if isinstance(o, datetime) or isinstance(o, date):
return o.__str__()
@property
def first_name(self):
return self.json_response.get('first_name', None)
@property
def middle_name(self):
return self.json_response.get('middle_name', None)
@property
def last_name(self):
return self.json_response.get('last_name', None)
@property
def alternative_names(self):
return self.json_response.get('alternative_names', None)
@property
def title(self):
return self.json_response.get('title', None)
@property
def home(self):
if 'home' in self.json_response:
return AddressResponseModel(self.json_response['home'])
@property
def ssn(self):
return self.json_response.get('ssn', None)
@property
def dob(self):
if 'dob' in self.json_response:
return datetime_object('dob', self.json_response)
@property
def phone(self):
return self.json_response.get('phone', None)
@property
def email(self):
return self.json_response.get('email', None)
@property
def identifications(self):
if 'identifications' in self.json_response:
return [IdentificationResponseModel(val) for val in self.json_response['identifications']]
def __repr__(self):
return '<Marqeta.response_models.business_proprietor_response_model.BusinessProprietorResponseModel>' + self.__str__()
| [
"marqeta.response_models.identification_response_model.IdentificationResponseModel",
"marqeta.response_models.datetime_object",
"json.dumps",
"marqeta.response_models.address_response_model.AddressResponseModel"
] | [((454, 510), 'json.dumps', 'json.dumps', (['self.json_response'], {'default': 'self.json_serial'}), '(self.json_response, default=self.json_serial)\n', (464, 510), False, 'import json\n'), ((1243, 1291), 'marqeta.response_models.address_response_model.AddressResponseModel', 'AddressResponseModel', (["self.json_response['home']"], {}), "(self.json_response['home'])\n", (1263, 1291), False, 'from marqeta.response_models.address_response_model import AddressResponseModel\n'), ((1471, 1513), 'marqeta.response_models.datetime_object', 'datetime_object', (['"""dob"""', 'self.json_response'], {}), "('dob', self.json_response)\n", (1486, 1513), False, 'from marqeta.response_models import datetime_object\n'), ((1813, 1845), 'marqeta.response_models.identification_response_model.IdentificationResponseModel', 'IdentificationResponseModel', (['val'], {}), '(val)\n', (1840, 1845), False, 'from marqeta.response_models.identification_response_model import IdentificationResponseModel\n')] |
from django.contrib import admin
from django.urls import path
from django.views.generic import TemplateView
from .views import MainPage
urlpatterns = [
path('admin/', admin.site.urls),
path('', MainPage.as_view(), name='books'),
]
| [
"django.urls.path"
] | [((157, 188), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (161, 188), False, 'from django.urls import path\n')] |
from __future__ import unicode_literals
import os.path
from django.core.files.storage import default_storage as storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils.six import StringIO
from django.utils.encoding import smart_text
try:
import Image
except ImportError:
try:
from PIL import Image
except ImportError:
raise ImportError('Cannot import Python Image Library')
class ImageManipulator():
def __init__(self, format="JPEG", extension="jpg", quality=75):
self.format = format
self.extension = extension
self.quality = quality
# define the path for the resized image
def resized_path(self, path, size, method):
directory, name = os.path.split(path)
image_name, ext = name.rsplit('.', 1)
return os.path.join(directory,
smart_text('{}_{}_{}.{}').format(image_name,
method,
size,
self.extension))
# take an image, create a copy and scale the copied image
def scale(self, image_field, size):
image_path = self.resized_path(image_field.name, size, 'scale')
image_dir, image_filename = os.path.split(image_path)
if not storage.exists(image_path):
f = storage.open(image_field.name, 'r')
image = Image.open(f)
if image.mode != 'RGB':
image = image.convert('RGB')
width, height = [int(i) for i in size.split('x')]
image.thumbnail((width, height), Image.ANTIALIAS)
f_scale = StringIO()
image.save(f_scale, self.format, quality=self.quality)
f_scale.seek(0)
suf = SimpleUploadedFile(os.path.split(image_path)[-1].split('.')[0],
f_scale.read(),
content_type='image/{}'.format(
self.format.lower()))
return image_filename, suf
return image_filename, None
| [
"PIL.Image.open",
"django.core.files.storage.default_storage.exists",
"django.utils.six.StringIO",
"django.utils.encoding.smart_text",
"django.core.files.storage.default_storage.open"
] | [((1395, 1421), 'django.core.files.storage.default_storage.exists', 'storage.exists', (['image_path'], {}), '(image_path)\n', (1409, 1421), True, 'from django.core.files.storage import default_storage as storage\n'), ((1439, 1474), 'django.core.files.storage.default_storage.open', 'storage.open', (['image_field.name', '"""r"""'], {}), "(image_field.name, 'r')\n", (1451, 1474), True, 'from django.core.files.storage import default_storage as storage\n'), ((1495, 1508), 'PIL.Image.open', 'Image.open', (['f'], {}), '(f)\n', (1505, 1508), False, 'from PIL import Image\n'), ((1739, 1749), 'django.utils.six.StringIO', 'StringIO', ([], {}), '()\n', (1747, 1749), False, 'from django.utils.six import StringIO\n'), ((881, 906), 'django.utils.encoding.smart_text', 'smart_text', (['"""{}_{}_{}.{}"""'], {}), "('{}_{}_{}.{}')\n", (891, 906), False, 'from django.utils.encoding import smart_text\n')] |
import os
import re
from pathlib import Path
from urllib.parse import urlparse
from github import Github
class CommitParser(object):
github_access_token: str
repository: str
jira_project: str
jira_server: str
def __init__(self, repository, jira_project, jira_server, github_access_token):
self.repository = repository
self.jira_project = jira_project
self.jira_server = jira_server
self.github_access_token = github_access_token
@property
def jira_regex_format(self) -> str:
return f"({self.jira_project}-[0-9]*)"
def jira_tickets(self, start_tag, end_tag) -> [str]:
commits = self._get_commits(start_tag, end_tag)
jira_tickets, github_prs = self._process_commits(commits, self.jira_regex_format)
jira_tickets += self._get_jira_tickets_from_github(github_prs, self.jira_regex_format)
return jira_tickets
def _get_commits(self, start, end) -> [str]:
return os.popen("git log --pretty=%s " + start + "..." + end)
def _process_commits(self, commits: [str], regex_format: str) -> ([str], [str]):
jira_ticket_regex = re.compile(regex_format)
# Github adds pull request number (#XXXX) at the end of its title.
github_pr_regex = re.compile("(\\(#[0-9]*\\))")
jira_tickets: [str] = []
github_prs: [str] = []
for commit in commits:
jira_search = jira_ticket_regex.search(commit)
if jira_search is not None:
jira_tickets.append(jira_search.group())
elif github_pr_regex.findall(commit):
pr_number_text = github_pr_regex.findall(commit)[-1]
# Keep only the PR number and remove (#).
pr_number = pr_number_text.translate({ord(i): None for i in "()#"})
github_prs.append(pr_number)
return (jira_tickets, github_prs)
def _get_jira_tickets_from_github(self, github_prs: [str], regex_format: str):
github = Github(self.github_access_token)
repo = github.get_repo(self.repository)
# Include the serve in the url.
server_netloc = urlparse(self.jira_server).netloc
url_regex = re.compile(f"https?:\\/\\/{server_netloc}\\b([-a-zA-Z0-9@:%_\\+.~#?&//=]*{regex_format})")
jira_ticket_regex = re.compile(regex_format)
jira_tickets = []
for pr_number in github_prs:
pr = repo.get_pull(int(pr_number))
url_match = url_regex.search(pr.body)
if url_match is None:
# If no url is found the PR will be skipped.
continue
jira_ticket_match = jira_ticket_regex.search(url_match.group())
url_path = Path(urlparse(url_match.group()).path)
# In case the ticket ends with 1XXXX, the regex match will not contain the XXXX.
# The match will be PROJECT-1, which is wrong.
# This check is to exclude this results.
if jira_ticket_match is not None and jira_ticket_match.group() == url_path.name:
jira_tickets.append(jira_ticket_match.group())
return jira_tickets
| [
"os.popen",
"urllib.parse.urlparse",
"github.Github",
"re.compile"
] | [((982, 1036), 'os.popen', 'os.popen', (["('git log --pretty=%s ' + start + '...' + end)"], {}), "('git log --pretty=%s ' + start + '...' + end)\n", (990, 1036), False, 'import os\n'), ((1151, 1175), 're.compile', 're.compile', (['regex_format'], {}), '(regex_format)\n', (1161, 1175), False, 'import re\n'), ((1277, 1306), 're.compile', 're.compile', (['"""(\\\\(#[0-9]*\\\\))"""'], {}), "('(\\\\(#[0-9]*\\\\))')\n", (1287, 1306), False, 'import re\n'), ((2008, 2040), 'github.Github', 'Github', (['self.github_access_token'], {}), '(self.github_access_token)\n', (2014, 2040), False, 'from github import Github\n'), ((2207, 2307), 're.compile', 're.compile', (['f"""https?:\\\\/\\\\/{server_netloc}\\\\b([-a-zA-Z0-9@:%_\\\\+.~#?&//=]*{regex_format})"""'], {}), "(\n f'https?:\\\\/\\\\/{server_netloc}\\\\b([-a-zA-Z0-9@:%_\\\\+.~#?&//=]*{regex_format})'\n )\n", (2217, 2307), False, 'import re\n'), ((2326, 2350), 're.compile', 're.compile', (['regex_format'], {}), '(regex_format)\n', (2336, 2350), False, 'import re\n'), ((2153, 2179), 'urllib.parse.urlparse', 'urlparse', (['self.jira_server'], {}), '(self.jira_server)\n', (2161, 2179), False, 'from urllib.parse import urlparse\n')] |
import numpy as np
from numpy import linalg as LA
import pickle
from collections import Counter
import csv
class Vocabulary(object):
def __init__(self, vocab_file, emb_file='', dim_emb=0):
with open(vocab_file, 'rb') as f:
self.size, self.word2id, self.id2word = pickle.load(f)
self.dim_emb = dim_emb
self.embedding = np.random.random_sample(
(self.size, self.dim_emb)) - 0.5
if emb_file:
with open(emb_file) as f:
for line in f:
parts = line.split()
word = parts[0]
vec = np.array([float(x) for x in parts[1:]])
if word in self.word2id:
self.embedding[self.word2id[word]] = vec
for i in range(self.size):
self.embedding[i] /= LA.norm(self.embedding[i])
def build_vocab(data, vocab_path, vocab_metadata_path, min_occur=5):
word2id = {'<pad>':0, '<go>':1, '<eos>':2, '<unk>':3}
id2word = ['<pad>', '<go>', '<eos>', '<unk>']
words = [word for sent in data for word in sent]
cnt = Counter(words)
for word in cnt:
if cnt[word] >= min_occur:
word2id[word] = len(word2id)
id2word.append(word)
vocab_size = len(word2id)
with open(vocab_path, 'wb') as f:
pickle.dump((vocab_size, word2id, id2word), f, pickle.HIGHEST_PROTOCOL)
"""Writes metadata file for Tensorboard word embedding visualizer as described here:
https://www.tensorflow.org/get_started/embedding_viz
"""
print("Writing word embedding metadata file to %s" % (vocab_metadata_path))
with open(vocab_metadata_path, "w") as f:
fieldnames = ['word']
writer = csv.DictWriter(f, delimiter="\t", fieldnames=fieldnames)
for w in id2word:
writer.writerow({"word": w}) | [
"csv.DictWriter",
"pickle.dump",
"numpy.random.random_sample",
"pickle.load",
"collections.Counter",
"numpy.linalg.norm"
] | [((1050, 1064), 'collections.Counter', 'Counter', (['words'], {}), '(words)\n', (1057, 1064), False, 'from collections import Counter\n'), ((1271, 1342), 'pickle.dump', 'pickle.dump', (['(vocab_size, word2id, id2word)', 'f', 'pickle.HIGHEST_PROTOCOL'], {}), '((vocab_size, word2id, id2word), f, pickle.HIGHEST_PROTOCOL)\n', (1282, 1342), False, 'import pickle\n'), ((1673, 1729), 'csv.DictWriter', 'csv.DictWriter', (['f'], {'delimiter': '"""\t"""', 'fieldnames': 'fieldnames'}), "(f, delimiter='\\t', fieldnames=fieldnames)\n", (1687, 1729), False, 'import csv\n'), ((278, 292), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (289, 292), False, 'import pickle\n'), ((341, 391), 'numpy.random.random_sample', 'np.random.random_sample', (['(self.size, self.dim_emb)'], {}), '((self.size, self.dim_emb))\n', (364, 391), True, 'import numpy as np\n'), ((780, 806), 'numpy.linalg.norm', 'LA.norm', (['self.embedding[i]'], {}), '(self.embedding[i])\n', (787, 806), True, 'from numpy import linalg as LA\n')] |
from datetime import datetime
import pytz
if __name__ == '__main__':
places_tz = ['Asia/Tokyo', 'Europe/Madrid', 'America/Argentina/Buenos_Aires', 'US/eastern', 'US/Pacific', 'UTC']
cities_name = ['Tokyo', 'Madrid', 'Buenos Aires', 'New York', 'California', 'UTC']
for place_tz, city_name in zip(places_tz, cities_name):
city_time = datetime.now(pytz.timezone(place_tz))
print(f'Fecha en {city_name} - {city_time}')
| [
"pytz.timezone"
] | [((367, 390), 'pytz.timezone', 'pytz.timezone', (['place_tz'], {}), '(place_tz)\n', (380, 390), False, 'import pytz\n')] |
import json
from unittest import TestCase
from mock import Mock, patch, call
from nose_parameterized import parameterized
from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter
from samcli.lib.logs.event import LogEvent
class TestLogsFormatter_pretty_print_event(TestCase):
def setUp(self):
self.colored_mock = Mock()
self.group_name = "group name"
self.stream_name = "stream name"
self.message = "message"
self.event_dict = {"timestamp": 1, "message": self.message, "logStreamName": self.stream_name}
def test_must_serialize_event(self):
colored_timestamp = "colored timestamp"
colored_stream_name = "colored stream name"
self.colored_mock.yellow.return_value = colored_timestamp
self.colored_mock.cyan.return_value = colored_stream_name
event = LogEvent(self.group_name, self.event_dict)
expected = " ".join([colored_stream_name, colored_timestamp, self.message])
result = LogsFormatter._pretty_print_event(event, self.colored_mock)
self.assertEquals(expected, result)
self.colored_mock.yellow.has_calls()
self.colored_mock.cyan.assert_called_with(self.stream_name)
def _passthru_formatter(event, colored):
return event
class TestLogsFormatter_do_format(TestCase):
def setUp(self):
self.colored_mock = Mock()
# Set formatter chain method to return the input unaltered.
self.chain_method1 = Mock(wraps=_passthru_formatter)
self.chain_method2 = Mock(wraps=_passthru_formatter)
self.chain_method3 = Mock(wraps=_passthru_formatter)
self.formatter_chain = [self.chain_method1, self.chain_method2, self.chain_method3]
@patch.object(LogsFormatter, "_pretty_print_event", wraps=_passthru_formatter)
def test_must_map_formatters_sequentially(self, pretty_print_mock):
events_iterable = [1, 2, 3]
expected_result = [1, 2, 3]
expected_call_order = [
call(1, colored=self.colored_mock),
call(2, colored=self.colored_mock),
call(3, colored=self.colored_mock),
]
formatter = LogsFormatter(self.colored_mock, self.formatter_chain)
result_iterable = formatter.do_format(events_iterable)
self.assertEquals(list(result_iterable), expected_result)
self.chain_method1.assert_has_calls(expected_call_order)
self.chain_method2.assert_has_calls(expected_call_order)
self.chain_method3.assert_has_calls(expected_call_order)
pretty_print_mock.assert_has_calls(expected_call_order) # Pretty Printer must always be called
@patch.object(LogsFormatter, "_pretty_print_event", wraps=_passthru_formatter)
def test_must_work_without_formatter_chain(self, pretty_print_mock):
events_iterable = [1, 2, 3]
expected_result = [1, 2, 3]
expected_call_order = [
call(1, colored=self.colored_mock),
call(2, colored=self.colored_mock),
call(3, colored=self.colored_mock),
]
# No formatter chain.
formatter = LogsFormatter(self.colored_mock)
result_iterable = formatter.do_format(events_iterable)
self.assertEquals(list(result_iterable), expected_result)
# Pretty Print is always called, even if there are no other formatters in the chain.
pretty_print_mock.assert_has_calls(expected_call_order)
self.chain_method1.assert_not_called()
self.chain_method2.assert_not_called()
self.chain_method3.assert_not_called()
class TestLambdaLogMsgFormatters_colorize_crashes(TestCase):
@parameterized.expand(
[
"Task timed out",
"Something happened. Task timed out. Something else happend",
"Process exited before completing request",
]
)
def test_must_color_crash_messages(self, input_msg):
color_result = "colored messaage"
colored = Mock()
colored.red.return_value = color_result
event = LogEvent("group_name", {"message": input_msg})
result = LambdaLogMsgFormatters.colorize_errors(event, colored)
self.assertEquals(result.message, color_result)
colored.red.assert_called_with(input_msg)
def test_must_ignore_other_messages(self):
colored = Mock()
event = LogEvent("group_name", {"message": "some msg"})
result = LambdaLogMsgFormatters.colorize_errors(event, colored)
self.assertEquals(result.message, "some msg")
colored.red.assert_not_called()
class TestKeywordHighlight_highlight_keyword(TestCase):
def test_must_highlight_all_keywords(self):
input_msg = "this keyword some keyword other keyword"
keyword = "keyword"
color_result = "colored"
expected_msg = "this colored some colored other colored"
colored = Mock()
colored.underline.return_value = color_result
event = LogEvent("group_name", {"message": input_msg})
result = KeywordHighlighter(keyword).highlight_keywords(event, colored)
self.assertEquals(result.message, expected_msg)
colored.underline.assert_called_with(keyword)
def test_must_ignore_if_keyword_is_absent(self):
colored = Mock()
input_msg = "this keyword some keyword other keyword"
event = LogEvent("group_name", {"message": input_msg})
result = KeywordHighlighter().highlight_keywords(event, colored)
self.assertEquals(result.message, input_msg)
colored.underline.assert_not_called()
class TestJSONMsgFormatter_format_json(TestCase):
def test_must_pretty_print_json(self):
data = {"a": "b"}
input_msg = '{"a": "b"}'
expected_msg = json.dumps(data, indent=2)
event = LogEvent("group_name", {"message": input_msg})
result = JSONMsgFormatter.format_json(event, None)
self.assertEquals(result.message, expected_msg)
@parameterized.expand(["this is not json", '{"not a valid json"}'])
def test_ignore_non_json(self, input_msg):
event = LogEvent("group_name", {"message": input_msg})
result = JSONMsgFormatter.format_json(event, None)
self.assertEquals(result.message, input_msg)
| [
"samcli.lib.logs.formatter.KeywordHighlighter",
"mock.call",
"mock.Mock",
"samcli.lib.logs.formatter.LambdaLogMsgFormatters.colorize_errors",
"samcli.lib.logs.formatter.LogsFormatter",
"json.dumps",
"mock.patch.object",
"nose_parameterized.parameterized.expand",
"samcli.lib.logs.event.LogEvent",
"... | [((1777, 1854), 'mock.patch.object', 'patch.object', (['LogsFormatter', '"""_pretty_print_event"""'], {'wraps': '_passthru_formatter'}), "(LogsFormatter, '_pretty_print_event', wraps=_passthru_formatter)\n", (1789, 1854), False, 'from mock import Mock, patch, call\n'), ((2698, 2775), 'mock.patch.object', 'patch.object', (['LogsFormatter', '"""_pretty_print_event"""'], {'wraps': '_passthru_formatter'}), "(LogsFormatter, '_pretty_print_event', wraps=_passthru_formatter)\n", (2710, 2775), False, 'from mock import Mock, patch, call\n'), ((3689, 3843), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (["['Task timed out',\n 'Something happened. Task timed out. Something else happend',\n 'Process exited before completing request']"], {}), "(['Task timed out',\n 'Something happened. Task timed out. Something else happend',\n 'Process exited before completing request'])\n", (3709, 3843), False, 'from nose_parameterized import parameterized\n'), ((6010, 6076), 'nose_parameterized.parameterized.expand', 'parameterized.expand', (['[\'this is not json\', \'{"not a valid json"}\']'], {}), '([\'this is not json\', \'{"not a valid json"}\'])\n', (6030, 6076), False, 'from nose_parameterized import parameterized\n'), ((386, 392), 'mock.Mock', 'Mock', ([], {}), '()\n', (390, 392), False, 'from mock import Mock, patch, call\n'), ((900, 942), 'samcli.lib.logs.event.LogEvent', 'LogEvent', (['self.group_name', 'self.event_dict'], {}), '(self.group_name, self.event_dict)\n', (908, 942), False, 'from samcli.lib.logs.event import LogEvent\n'), ((1045, 1104), 'samcli.lib.logs.formatter.LogsFormatter._pretty_print_event', 'LogsFormatter._pretty_print_event', (['event', 'self.colored_mock'], {}), '(event, self.colored_mock)\n', (1078, 1104), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((1419, 1425), 'mock.Mock', 'Mock', ([], {}), '()\n', (1423, 1425), False, 'from mock import Mock, patch, call\n'), ((1524, 1555), 'mock.Mock', 'Mock', ([], {'wraps': '_passthru_formatter'}), '(wraps=_passthru_formatter)\n', (1528, 1555), False, 'from mock import Mock, patch, call\n'), ((1585, 1616), 'mock.Mock', 'Mock', ([], {'wraps': '_passthru_formatter'}), '(wraps=_passthru_formatter)\n', (1589, 1616), False, 'from mock import Mock, patch, call\n'), ((1646, 1677), 'mock.Mock', 'Mock', ([], {'wraps': '_passthru_formatter'}), '(wraps=_passthru_formatter)\n', (1650, 1677), False, 'from mock import Mock, patch, call\n'), ((2207, 2261), 'samcli.lib.logs.formatter.LogsFormatter', 'LogsFormatter', (['self.colored_mock', 'self.formatter_chain'], {}), '(self.colored_mock, self.formatter_chain)\n', (2220, 2261), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((3159, 3191), 'samcli.lib.logs.formatter.LogsFormatter', 'LogsFormatter', (['self.colored_mock'], {}), '(self.colored_mock)\n', (3172, 3191), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((4014, 4020), 'mock.Mock', 'Mock', ([], {}), '()\n', (4018, 4020), False, 'from mock import Mock, patch, call\n'), ((4085, 4131), 'samcli.lib.logs.event.LogEvent', 'LogEvent', (['"""group_name"""', "{'message': input_msg}"], {}), "('group_name', {'message': input_msg})\n", (4093, 4131), False, 'from samcli.lib.logs.event import LogEvent\n'), ((4150, 4204), 'samcli.lib.logs.formatter.LambdaLogMsgFormatters.colorize_errors', 'LambdaLogMsgFormatters.colorize_errors', (['event', 'colored'], {}), '(event, colored)\n', (4188, 4204), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((4377, 4383), 'mock.Mock', 'Mock', ([], {}), '()\n', (4381, 4383), False, 'from mock import Mock, patch, call\n'), ((4400, 4447), 'samcli.lib.logs.event.LogEvent', 'LogEvent', (['"""group_name"""', "{'message': 'some msg'}"], {}), "('group_name', {'message': 'some msg'})\n", (4408, 4447), False, 'from samcli.lib.logs.event import LogEvent\n'), ((4466, 4520), 'samcli.lib.logs.formatter.LambdaLogMsgFormatters.colorize_errors', 'LambdaLogMsgFormatters.colorize_errors', (['event', 'colored'], {}), '(event, colored)\n', (4504, 4520), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((4928, 4934), 'mock.Mock', 'Mock', ([], {}), '()\n', (4932, 4934), False, 'from mock import Mock, patch, call\n'), ((5005, 5051), 'samcli.lib.logs.event.LogEvent', 'LogEvent', (['"""group_name"""', "{'message': input_msg}"], {}), "('group_name', {'message': input_msg})\n", (5013, 5051), False, 'from samcli.lib.logs.event import LogEvent\n'), ((5315, 5321), 'mock.Mock', 'Mock', ([], {}), '()\n', (5319, 5321), False, 'from mock import Mock, patch, call\n'), ((5400, 5446), 'samcli.lib.logs.event.LogEvent', 'LogEvent', (['"""group_name"""', "{'message': input_msg}"], {}), "('group_name', {'message': input_msg})\n", (5408, 5446), False, 'from samcli.lib.logs.event import LogEvent\n'), ((5797, 5823), 'json.dumps', 'json.dumps', (['data'], {'indent': '(2)'}), '(data, indent=2)\n', (5807, 5823), False, 'import json\n'), ((5841, 5887), 'samcli.lib.logs.event.LogEvent', 'LogEvent', (['"""group_name"""', "{'message': input_msg}"], {}), "('group_name', {'message': input_msg})\n", (5849, 5887), False, 'from samcli.lib.logs.event import LogEvent\n'), ((5906, 5947), 'samcli.lib.logs.formatter.JSONMsgFormatter.format_json', 'JSONMsgFormatter.format_json', (['event', 'None'], {}), '(event, None)\n', (5934, 5947), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((6141, 6187), 'samcli.lib.logs.event.LogEvent', 'LogEvent', (['"""group_name"""', "{'message': input_msg}"], {}), "('group_name', {'message': input_msg})\n", (6149, 6187), False, 'from samcli.lib.logs.event import LogEvent\n'), ((6206, 6247), 'samcli.lib.logs.formatter.JSONMsgFormatter.format_json', 'JSONMsgFormatter.format_json', (['event', 'None'], {}), '(event, None)\n', (6234, 6247), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((2044, 2078), 'mock.call', 'call', (['(1)'], {'colored': 'self.colored_mock'}), '(1, colored=self.colored_mock)\n', (2048, 2078), False, 'from mock import Mock, patch, call\n'), ((2092, 2126), 'mock.call', 'call', (['(2)'], {'colored': 'self.colored_mock'}), '(2, colored=self.colored_mock)\n', (2096, 2126), False, 'from mock import Mock, patch, call\n'), ((2140, 2174), 'mock.call', 'call', (['(3)'], {'colored': 'self.colored_mock'}), '(3, colored=self.colored_mock)\n', (2144, 2174), False, 'from mock import Mock, patch, call\n'), ((2966, 3000), 'mock.call', 'call', (['(1)'], {'colored': 'self.colored_mock'}), '(1, colored=self.colored_mock)\n', (2970, 3000), False, 'from mock import Mock, patch, call\n'), ((3014, 3048), 'mock.call', 'call', (['(2)'], {'colored': 'self.colored_mock'}), '(2, colored=self.colored_mock)\n', (3018, 3048), False, 'from mock import Mock, patch, call\n'), ((3062, 3096), 'mock.call', 'call', (['(3)'], {'colored': 'self.colored_mock'}), '(3, colored=self.colored_mock)\n', (3066, 3096), False, 'from mock import Mock, patch, call\n'), ((5070, 5097), 'samcli.lib.logs.formatter.KeywordHighlighter', 'KeywordHighlighter', (['keyword'], {}), '(keyword)\n', (5088, 5097), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n'), ((5465, 5485), 'samcli.lib.logs.formatter.KeywordHighlighter', 'KeywordHighlighter', ([], {}), '()\n', (5483, 5485), False, 'from samcli.lib.logs.formatter import LogsFormatter, LambdaLogMsgFormatters, KeywordHighlighter, JSONMsgFormatter\n')] |
#!/usr/bin/env python3
import argparse, pexpect
from getpass import getpass
from time import sleep
# Set up argument parser
parser = argparse.ArgumentParser(prog='openconnect-cli', description='Automate logins to the OpenConnect SSL VPN client')
# Type of VPN to initiate
parser_type = parser.add_mutually_exclusive_group(required=False)
parser_type.add_argument('--anyconnect', action='store_true', default=False, help='Cisco AnyConnect SSL VPN')
parser_type.add_argument('--fortinet', action='store_true', default=False, help='Fortinet FortiClient SSL VPN')
parser_type.add_argument('--pulsesecure', action='store_true', default=False, help='Juniper Network Connect / Pulse Secure SSL VPN')
parser_type.add_argument('--paloalto', action='store_true', default=False, help='Palo Alto Networks (PAN) GlobalProtect SSL VPN')
# VPN server details
parser_dst = parser.add_argument_group('VPN Server Details', 'Any missing fields will be prompted on launch')
parser_dst.add_argument('--host', type=str, default=False, help='DNS hostname of SSL VPN server')
parser_dst.add_argument('--user', type=str, default=False, help='Username for SSL VPN account')
parser_dst.add_argument('--pw', type=str, default=False, help='Password for SSL VPN account')
# Import options, output help if none provided
args = vars(parser.parse_args())
#args = vars(parser.parse_args(args=None if sys.argv[1:] else ['--help']))
def vpnTypePrompt():
try:
print('Please enter one of the following and press enter:')
print('1 for Cisco AnyConnect')
print('2 for Fortinet FortiClient')
print('3 for Pulse Secure or Juniper Network Connect')
print('4 for Palo Alto Networks GlobalProtect')
protocol = int(input('SSL VPN Type: '))
if protocol == 1:
return 'anyconnect'
elif protocol == 2:
return 'fortinet'
elif protocol == 3:
return 'nc'
elif protocol == 4:
return 'gp'
else:
return False
except:
return False
if 'anyconnect' in args and args['anyconnect']:
args['protocol'] = 'anyconnect'
elif 'fortinet' in args and args['fortinet']:
args['protocol'] = 'fortinet'
elif 'pulsesecure' in args and args['pulsesecure']:
args['protocol'] = 'nc'
elif 'paloalto' in args and args['paloalto']:
args['protocol'] = 'gp'
else:
args['protocol'] = False
while args['protocol'] == False:
args['protocol'] = vpnTypePrompt()
# Fields to prompt for when False
prompt_for = {
'host': 'DNS hostname of SSL VPN server: ',
'user': 'Username for SSL VPN account: ',
'pw': 'Password for SSL VPN account: '
}
# Interate through fields and prompt for missing ones
if 'help' not in args:
for field,prompt in prompt_for.items():
if str(field) not in args or not args[field]:
while args[field] == False:
try:
if field == 'pw' and args['protocol'] != 'gp':
args[field] = 'N/A'
elif field == 'pw':
args[field] = getpass(prompt)
else:
args[field] = input(prompt)
except:
pass
# Collate arguments for command
command = [
'sudo openconnect',
'--interface=vpn0',
'--script=/usr/share/vpnc-scripts/vpnc-script',
'--protocol="' + args['protocol'] + '"',
'--user="' + args['user'] + '"',
args['host']
]
# Compile command
command = ' '.join(command)
# Start process
process = pexpect.spawnu('/bin/bash', ['-c', command])
# Automate login process for Palo Alto GlobalProtect
if args['protocol'] == 'gp':
process.expect('Password: ')
process.sendline(args['pw'])
process.expect('GATEWAY: ')
process.sendline('Primary GP')
process.expect('anything else to view:')
process.sendline('yes')
process.expect('Password: ')
process.sendline(args['pw'])
# Clear remaining private data
args = None
command = None
# Hand over input to user, wait for process to end if interactive mode ends
process.interact()
while process.isalive():
sleep(5)
| [
"getpass.getpass",
"time.sleep",
"pexpect.spawnu",
"argparse.ArgumentParser"
] | [((135, 252), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""openconnect-cli"""', 'description': '"""Automate logins to the OpenConnect SSL VPN client"""'}), "(prog='openconnect-cli', description=\n 'Automate logins to the OpenConnect SSL VPN client')\n", (158, 252), False, 'import argparse, pexpect\n'), ((3346, 3390), 'pexpect.spawnu', 'pexpect.spawnu', (['"""/bin/bash"""', "['-c', command]"], {}), "('/bin/bash', ['-c', command])\n", (3360, 3390), False, 'import argparse, pexpect\n'), ((3912, 3920), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (3917, 3920), False, 'from time import sleep\n'), ((2925, 2940), 'getpass.getpass', 'getpass', (['prompt'], {}), '(prompt)\n', (2932, 2940), False, 'from getpass import getpass\n')] |
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests of the human_players levels."""
import collections
from unittest import mock
from absl.testing import absltest
from absl.testing import parameterized
from dm_env import specs
import numpy as np
import pygame
import dmlab2d
from meltingpot.python.configs.substrates import (
allelopathic_harvest as mp_allelopathic_harvest,
)
from meltingpot.python.configs.substrates import (
arena_running_with_scissors_in_the_matrix as mp_arena_running_with_scissors_itm,
)
from meltingpot.python.configs.substrates import (
bach_or_stravinsky_in_the_matrix as mp_bach_or_stravinsky_itm,
)
from meltingpot.python.configs.substrates import capture_the_flag as mp_capture_the_flag
from meltingpot.python.configs.substrates import (
chemistry_metabolic_cycles as mp_chemistry_metabolic_cycles,
)
from meltingpot.python.configs.substrates import chicken_in_the_matrix as mp_chicken_itm
from meltingpot.python.configs.substrates import clean_up as mp_clean_up
from meltingpot.python.configs.substrates import (
collaborative_cooking_passable as mp_collaborative_cooking_passable,
)
from meltingpot.python.configs.substrates import (
commons_harvest_closed as mp_commons_harvest_closed,
)
from meltingpot.python.configs.substrates import king_of_the_hill as mp_king_of_the_hill
from meltingpot.python.configs.substrates import (
prisoners_dilemma_in_the_matrix as mp_prisoners_dilemma_itm,
)
from meltingpot.python.configs.substrates import (
pure_coordination_in_the_matrix as mp_pure_coordination_itm,
)
from meltingpot.python.configs.substrates import (
rationalizable_coordination_in_the_matrix as mp_rationalizable_coordination_itm,
)
from meltingpot.python.configs.substrates import (
running_with_scissors_in_the_matrix as mp_running_with_scissors_itm,
)
from meltingpot.python.configs.substrates import (
stag_hunt_in_the_matrix as mp_stag_hunt_itm,
)
from meltingpot.python.configs.substrates import territory_rooms as mp_territory_rooms
from meltingpot.python.human_players import level_playing_utils
from meltingpot.python.human_players import play_allelopathic_harvest
from meltingpot.python.human_players import play_any_paintball_game
from meltingpot.python.human_players import play_anything_in_the_matrix
from meltingpot.python.human_players import play_clean_up
from meltingpot.python.human_players import play_collaborative_cooking
from meltingpot.python.human_players import play_commons_harvest
from meltingpot.python.human_players import play_grid_land
from meltingpot.python.human_players import play_territory
class HumanActionReaderTest(parameterized.TestCase):
@parameterized.parameters(
(
{ # Capture the following key events,
"move": level_playing_utils.get_direction_pressed,
}, # given this action name, key pressed, for this player index; and
pygame.K_w,
0,
# Expecting this action list out.
{"1.move": 1, "2.move": 0, "3.move": 0},
),
(
{ # Capture the following key events,
"move": level_playing_utils.get_direction_pressed,
}, # given this action name, key pressed, for this player index; and
pygame.K_s,
2,
# Expecting this action list out.
{"1.move": 0, "2.move": 0, "3.move": 3},
),
(
{ # Capture the following key events,
"move": level_playing_utils.get_direction_pressed,
}, # given this action name, key pressed, for this player index; and
pygame.K_s,
0,
# Expecting this action list out.
{"1.move": 3, "2.move": 0, "3.move": 0},
),
(
{ # Capture the following key events,
"move": level_playing_utils.get_direction_pressed,
}, # given action name, irrelevant key pressed, for player 0; and
pygame.K_x,
0,
# Expecting this action list out.
{"1.move": 0, "2.move": 0, "3.move": 0},
),
(
{ # Capture the following key events (don't need to make sense),
"move": level_playing_utils.get_space_key_pressed,
}, # given action name, irrelevant key pressed, for player 0; and
pygame.K_SPACE,
0,
# Expecting this action list out.
{"1.move": 1, "2.move": 0, "3.move": 0},
),
)
@mock.patch.object(pygame, "key")
def test_human_action(
self, action_map, key_pressed, player_index, expected_action, mock_key
):
retval = collections.defaultdict(bool)
retval[key_pressed] = True
mock_key.get_pressed.return_value = retval
move_array = specs.BoundedArray(
shape=tuple(), dtype=np.intc, minimum=0, maximum=4, name="move"
)
action_spec = {
"1.move": move_array,
"2.move": move_array,
"3.move": move_array,
}
with mock.patch.object(dmlab2d, "Lab2d") as env:
env.action_spec.return_value = action_spec
har = level_playing_utils.ActionReader(env, action_map)
np.testing.assert_array_equal(har.step(player_index), expected_action)
class PlayLevelTest(parameterized.TestCase):
@parameterized.parameters(
(mp_allelopathic_harvest, play_allelopathic_harvest),
(mp_arena_running_with_scissors_itm, play_anything_in_the_matrix),
(mp_bach_or_stravinsky_itm, play_anything_in_the_matrix),
(mp_capture_the_flag, play_any_paintball_game),
(mp_chemistry_metabolic_cycles, play_grid_land),
(mp_chicken_itm, play_anything_in_the_matrix),
(mp_clean_up, play_clean_up),
(mp_collaborative_cooking_passable, play_collaborative_cooking),
(mp_commons_harvest_closed, play_commons_harvest),
(mp_king_of_the_hill, play_any_paintball_game),
(mp_prisoners_dilemma_itm, play_anything_in_the_matrix),
(mp_pure_coordination_itm, play_anything_in_the_matrix),
(mp_rationalizable_coordination_itm, play_anything_in_the_matrix),
(mp_running_with_scissors_itm, play_anything_in_the_matrix),
(mp_stag_hunt_itm, play_anything_in_the_matrix),
(mp_territory_rooms, play_territory),
)
@mock.patch.object(pygame, "key")
@mock.patch.object(pygame, "display")
@mock.patch.object(pygame, "event")
@mock.patch.object(pygame, "time")
def test_run_level(
self, config_module, play_module, unused_k, unused_d, unused_e, unused_t
):
full_config = config_module.get_config()
full_config["lab2d_settings"]["episodeLengthFrames"] = 10
level_playing_utils.run_episode("RGB", {}, play_module._ACTION_MAP, full_config)
if __name__ == "__main__":
absltest.main()
| [
"meltingpot.python.human_players.level_playing_utils.run_episode",
"absl.testing.parameterized.parameters",
"absl.testing.absltest.main",
"collections.defaultdict",
"unittest.mock.patch.object",
"meltingpot.python.human_players.level_playing_utils.ActionReader"
] | [((3223, 3840), 'absl.testing.parameterized.parameters', 'parameterized.parameters', (["({'move': level_playing_utils.get_direction_pressed}, pygame.K_w, 0, {\n '1.move': 1, '2.move': 0, '3.move': 0})", "({'move': level_playing_utils.get_direction_pressed}, pygame.K_s, 2, {\n '1.move': 0, '2.move': 0, '3.move': 3})", "({'move': level_playing_utils.get_direction_pressed}, pygame.K_s, 0, {\n '1.move': 3, '2.move': 0, '3.move': 0})", "({'move': level_playing_utils.get_direction_pressed}, pygame.K_x, 0, {\n '1.move': 0, '2.move': 0, '3.move': 0})", "({'move': level_playing_utils.get_space_key_pressed}, pygame.K_SPACE, 0, {\n '1.move': 1, '2.move': 0, '3.move': 0})"], {}), "(({'move': level_playing_utils.\n get_direction_pressed}, pygame.K_w, 0, {'1.move': 1, '2.move': 0,\n '3.move': 0}), ({'move': level_playing_utils.get_direction_pressed},\n pygame.K_s, 2, {'1.move': 0, '2.move': 0, '3.move': 3}), ({'move':\n level_playing_utils.get_direction_pressed}, pygame.K_s, 0, {'1.move': 3,\n '2.move': 0, '3.move': 0}), ({'move': level_playing_utils.\n get_direction_pressed}, pygame.K_x, 0, {'1.move': 0, '2.move': 0,\n '3.move': 0}), ({'move': level_playing_utils.get_space_key_pressed},\n pygame.K_SPACE, 0, {'1.move': 1, '2.move': 0, '3.move': 0}))\n", (3247, 3840), False, 'from absl.testing import parameterized\n'), ((5080, 5112), 'unittest.mock.patch.object', 'mock.patch.object', (['pygame', '"""key"""'], {}), "(pygame, 'key')\n", (5097, 5112), False, 'from unittest import mock\n'), ((5938, 6869), 'absl.testing.parameterized.parameters', 'parameterized.parameters', (['(mp_allelopathic_harvest, play_allelopathic_harvest)', '(mp_arena_running_with_scissors_itm, play_anything_in_the_matrix)', '(mp_bach_or_stravinsky_itm, play_anything_in_the_matrix)', '(mp_capture_the_flag, play_any_paintball_game)', '(mp_chemistry_metabolic_cycles, play_grid_land)', '(mp_chicken_itm, play_anything_in_the_matrix)', '(mp_clean_up, play_clean_up)', '(mp_collaborative_cooking_passable, play_collaborative_cooking)', '(mp_commons_harvest_closed, play_commons_harvest)', '(mp_king_of_the_hill, play_any_paintball_game)', '(mp_prisoners_dilemma_itm, play_anything_in_the_matrix)', '(mp_pure_coordination_itm, play_anything_in_the_matrix)', '(mp_rationalizable_coordination_itm, play_anything_in_the_matrix)', '(mp_running_with_scissors_itm, play_anything_in_the_matrix)', '(mp_stag_hunt_itm, play_anything_in_the_matrix)', '(mp_territory_rooms, play_territory)'], {}), '((mp_allelopathic_harvest,\n play_allelopathic_harvest), (mp_arena_running_with_scissors_itm,\n play_anything_in_the_matrix), (mp_bach_or_stravinsky_itm,\n play_anything_in_the_matrix), (mp_capture_the_flag,\n play_any_paintball_game), (mp_chemistry_metabolic_cycles,\n play_grid_land), (mp_chicken_itm, play_anything_in_the_matrix), (\n mp_clean_up, play_clean_up), (mp_collaborative_cooking_passable,\n play_collaborative_cooking), (mp_commons_harvest_closed,\n play_commons_harvest), (mp_king_of_the_hill, play_any_paintball_game),\n (mp_prisoners_dilemma_itm, play_anything_in_the_matrix), (\n mp_pure_coordination_itm, play_anything_in_the_matrix), (\n mp_rationalizable_coordination_itm, play_anything_in_the_matrix), (\n mp_running_with_scissors_itm, play_anything_in_the_matrix), (\n mp_stag_hunt_itm, play_anything_in_the_matrix), (mp_territory_rooms,\n play_territory))\n', (5962, 6869), False, 'from absl.testing import parameterized\n'), ((6949, 6981), 'unittest.mock.patch.object', 'mock.patch.object', (['pygame', '"""key"""'], {}), "(pygame, 'key')\n", (6966, 6981), False, 'from unittest import mock\n'), ((6987, 7023), 'unittest.mock.patch.object', 'mock.patch.object', (['pygame', '"""display"""'], {}), "(pygame, 'display')\n", (7004, 7023), False, 'from unittest import mock\n'), ((7029, 7063), 'unittest.mock.patch.object', 'mock.patch.object', (['pygame', '"""event"""'], {}), "(pygame, 'event')\n", (7046, 7063), False, 'from unittest import mock\n'), ((7069, 7102), 'unittest.mock.patch.object', 'mock.patch.object', (['pygame', '"""time"""'], {}), "(pygame, 'time')\n", (7086, 7102), False, 'from unittest import mock\n'), ((7452, 7467), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (7465, 7467), False, 'from absl.testing import absltest\n'), ((5243, 5272), 'collections.defaultdict', 'collections.defaultdict', (['bool'], {}), '(bool)\n', (5266, 5272), False, 'import collections\n'), ((7338, 7423), 'meltingpot.python.human_players.level_playing_utils.run_episode', 'level_playing_utils.run_episode', (['"""RGB"""', '{}', 'play_module._ACTION_MAP', 'full_config'], {}), "('RGB', {}, play_module._ACTION_MAP, full_config\n )\n", (7369, 7423), False, 'from meltingpot.python.human_players import level_playing_utils\n'), ((5636, 5671), 'unittest.mock.patch.object', 'mock.patch.object', (['dmlab2d', '"""Lab2d"""'], {}), "(dmlab2d, 'Lab2d')\n", (5653, 5671), False, 'from unittest import mock\n'), ((5753, 5802), 'meltingpot.python.human_players.level_playing_utils.ActionReader', 'level_playing_utils.ActionReader', (['env', 'action_map'], {}), '(env, action_map)\n', (5785, 5802), False, 'from meltingpot.python.human_players import level_playing_utils\n')] |
from pyNastran.bdf.bdf import BDF
model = BDF()
model.is_nx = True
section = 5
#filename = r'D:\SNC IAS\01 - FAST Program\05 - Modified Sections\01 - AeroComBAT Files\section_{}.dat'.format(section)
filename = r'C:\Users\benna\Desktop\Work Temp\SNC\FAST\SIMPLE_SECTIONS\CTRIA6_1_100.dat'
model.read_bdf(filename, xref=True)
#Create Export File
f = open(filename[:-4]+'_AeroComBAT.dat','w')
f.write('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\n')
f.write('$$$$$$$$$$$$$ AEROCOMBAT INPUT FILE $$$$$$$$$$$$\n')
f.write('$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$\n')
for NID, node in model.nodes.items():
node_pos = node.get_position()
#Write node line
f.write('XNODE,{},{},{}\n'.format(NID,node_pos[1],node_pos[2]))
# TEMP LINE FOR MAT PROPERTY
f.write('MAT_ISO,1,Lower Upper Aeroshell,8781151.,0.232915,0.0004144,0.0083\n')
f.write('MAT_ISO,2,Ring Frame Flange,7627582.,0.201668,0.0004144,0.0083\n')
f.write('MAT_ISO,3,Cabin Skin,8473671.,0.259765,0.0004144,0.0083\n')
f.write('MAT_ISO,4,Hat Stiffeners,9283126.,0.206558,0.0004144,0.0083\n')
f.write('MAT_ISO,5,Lower Outer Aeroshell,6544552.,0.428299,0.0004144,0.0083\n')
f.write('MAT_ISO,6,Upper Cabin,8196235.,0.284012,0.0004144,0.0083\n')
f.write('MAT_ISO,7,Titanium,16000000.,0.31,0.0004144,0.0083\n')
f.write('MAT_ISO,8,Quasi Iso,7944519.,0.306626,0.000144,0.0083\n')
f.write('MAT_ISO,9,Outer Aeroshell,7505270,0.344368,0.000144,0.0083\n')
f.write('MAT_ISO,10,Aluminum,10300000.,0.33,0.0002615,0.0083\n')
EIDs = []
for EID, elem in model.elements.items():
if elem.pid==7000003:
tmp_MID=1
elif elem.pid == 7000004:
tmp_MID=2
elif elem.pid == 7000005:
tmp_MID=3
elif elem.pid == 7000006:
tmp_MID=4
elif elem.pid == 7000007:
tmp_MID=5
elif elem.pid == 7000008:
tmp_MID=6
elif elem.pid == 7000000:
tmp_MID=7
elif elem.pid == 7000001:
tmp_MID=8
elif elem.pid == 7000002:
tmp_MID=9
elif elem.pid == 7000009:
tmp_MID=10
else:
raise ValueError('Encountered an unexpected Material Prop {}',elem.pid)
EIDs += [EID]
node_ids = elem.node_ids
if elem.type=='CQUAD8':
n1 = node_ids[0]
n2 = node_ids[4]
n3 = node_ids[1]
n4 = node_ids[5]
n5 = node_ids[2]
n6 = node_ids[6]
n7 = node_ids[3]
n8 = node_ids[7]
f.write('XQUAD8,{},{},{},{},{},{},{},{},{},{}\n'.format(EID,\
n1,n2,n3,n4,n5,n6,n7,n8,tmp_MID))
elif elem.type=='CQUAD4':
n1 = node_ids[0]
n2 = node_ids[1]
n3 = node_ids[2]
n4 = node_ids[3]
f.write('XQUAD4,{},{},{},{},{},{}\n'.format(EID,\
n1,n2,n3,n4,tmp_MID))
elif elem.type=='CTRIA3':
n1 = node_ids[0]
n2 = node_ids[1]
n3 = node_ids[2]
f.write('XTRIA3,{},{},{},{},{}\n'.format(EID,\
n1,n2,n3,tmp_MID))
elif elem.type=='CTRIA6':
n1 = node_ids[0]
n2 = node_ids[1]
n3 = node_ids[2]
n4 = node_ids[3]
n5 = node_ids[4]
n6 = node_ids[5]
f.write('XTRIA6,{},{},{},{},{},{},{},{}\n'.format(EID,\
n1,n2,n3,n4,n5,n6,tmp_MID))
f.write('SECTIONG,{},{}\n'.format(section,section))
#EIDs = list(model.elements.keys())
f.write('LIST,{},INT,'.format(section)+str(EIDs)[1:-1]+'\n')
f.close() | [
"pyNastran.bdf.bdf.BDF"
] | [((43, 48), 'pyNastran.bdf.bdf.BDF', 'BDF', ([], {}), '()\n', (46, 48), False, 'from pyNastran.bdf.bdf import BDF\n')] |
## @file
## @defgroup flask flask
## @brief minimized Flask-based backend
## @ingroup web
import config
from core.env import *
from core.io import Dir
from .web import Web
from core.meta import Module
from core.time import *
from gen.js import jsFile
from gen.s import S
from web.html import htmlFile
import os, re
import flask
from flask_socketio import SocketIO, emit
env['static'] = Dir('static')
env['templates'] = Dir('templates')
## web application
## @ingroup flask
class App(Web, Module):
## @param[in] V string | File in form of `web.App(__file__)``
def __init__(self, V):
Module.__init__(self, V)
env << self
env >> self
#
self['static'] = Dir(self)
env.static // self.static
self['js'] = jsFile(self)
self.static // self['js']
#
self['templates'] = Dir(self)
env.templates // self.templates
self['html'] = htmlFile(self)
self.templates // self['html']
#
self.flask = flask
self.app = flask.Flask(self.value)
self.app.config['SECRET_KEY'] = config.SECRET_KEY
self.watch()
self.router()
#
self.sio = SocketIO(self.app)
self.socketio()
## configure SocketIO event processors
def socketio(self):
@self.sio.on('connect')
def connect(): self.sio.emit('localtime', LocalTime().json())
@self.sio.on('localtime')
def localtime(): self.sio.emit('localtime', LocalTime().json())
## put application name in page/window title
def title(self): return self.head(test=True)
## configure file watch list
def watch(self):
self.extra_files = [
'config.py', f'{self.value}.py',
'web/flask.py', 'core/object.py']
## lookup in global `env`
## @param[in] path slashed path to the needed element
def lookup(self, path):
assert isinstance(path, str)
ret = env
if not path:
return ret
for i in path.split('/'):
if re.match(r'^\d+$', i):
i = int(i)
ret = ret[i]
return ret
## configure routes statically
def router(self):
@self.app.route('/')
def index():
return flask.redirect(f'/{self.value}')
@self.app.route('/dump/<path:path>')
@self.app.route('/dump/')
@self.app.route('/dump')
def dump(path=''):
item = self.lookup(path)
return flask.render_template('dump.html', env=env, app=self, item=item)
@self.app.route(f'/{self.value}')
@self.app.route('/app')
def app():
return flask.render_template(f'{self.value}/index.html', env=env, app=self)
## run application as web backend
def run(self):
print(env)
self.sio.run(self.app,
host=config.HOST, port=config.PORT, debug=True,
extra_files=self.extra_files)
| [
"flask.render_template",
"flask.Flask",
"core.meta.Module.__init__",
"re.match",
"flask_socketio.SocketIO",
"web.html.htmlFile",
"core.io.Dir",
"flask.redirect",
"gen.js.jsFile"
] | [((392, 405), 'core.io.Dir', 'Dir', (['"""static"""'], {}), "('static')\n", (395, 405), False, 'from core.io import Dir\n'), ((425, 441), 'core.io.Dir', 'Dir', (['"""templates"""'], {}), "('templates')\n", (428, 441), False, 'from core.io import Dir\n'), ((605, 629), 'core.meta.Module.__init__', 'Module.__init__', (['self', 'V'], {}), '(self, V)\n', (620, 629), False, 'from core.meta import Module\n'), ((705, 714), 'core.io.Dir', 'Dir', (['self'], {}), '(self)\n', (708, 714), False, 'from core.io import Dir\n'), ((770, 782), 'gen.js.jsFile', 'jsFile', (['self'], {}), '(self)\n', (776, 782), False, 'from gen.js import jsFile\n'), ((855, 864), 'core.io.Dir', 'Dir', (['self'], {}), '(self)\n', (858, 864), False, 'from core.io import Dir\n'), ((928, 942), 'web.html.htmlFile', 'htmlFile', (['self'], {}), '(self)\n', (936, 942), False, 'from web.html import htmlFile\n'), ((1038, 1061), 'flask.Flask', 'flask.Flask', (['self.value'], {}), '(self.value)\n', (1049, 1061), False, 'import flask\n'), ((1192, 1210), 'flask_socketio.SocketIO', 'SocketIO', (['self.app'], {}), '(self.app)\n', (1200, 1210), False, 'from flask_socketio import SocketIO, emit\n'), ((2050, 2071), 're.match', 're.match', (['"""^\\\\d+$"""', 'i'], {}), "('^\\\\d+$', i)\n", (2058, 2071), False, 'import os, re\n'), ((2272, 2304), 'flask.redirect', 'flask.redirect', (['f"""/{self.value}"""'], {}), "(f'/{self.value}')\n", (2286, 2304), False, 'import flask\n'), ((2501, 2565), 'flask.render_template', 'flask.render_template', (['"""dump.html"""'], {'env': 'env', 'app': 'self', 'item': 'item'}), "('dump.html', env=env, app=self, item=item)\n", (2522, 2565), False, 'import flask\n'), ((2679, 2747), 'flask.render_template', 'flask.render_template', (['f"""{self.value}/index.html"""'], {'env': 'env', 'app': 'self'}), "(f'{self.value}/index.html', env=env, app=self)\n", (2700, 2747), False, 'import flask\n')] |
import logging
import sys
from datetime import datetime
import requests
from tqdm import tqdm
import log
from datamodel.advisory import AdvisoryRecord
from datamodel.commit import Commit
from filtering.filter import filter_commits
from git.git import GIT_CACHE, Git
from git.version_to_tag import get_tag_for_version
from log.util import init_local_logger
# from processing.commit.feature_extractor import extract_features
from processing.commit.preprocessor import preprocess_commit
from ranking.rank import rank
from ranking.rules import apply_rules
# from util.profile import profile
from stats.execution import Counter, ExecutionTimer, execution_statistics
_logger = init_local_logger()
SECS_PER_DAY = 86400
TIME_LIMIT_BEFORE = 3 * 365 * SECS_PER_DAY
TIME_LIMIT_AFTER = 180 * SECS_PER_DAY
MAX_CANDIDATES = 1000
core_statistics = execution_statistics.sub_collection("core")
# @profile
def prospector( # noqa: C901
vulnerability_id: str,
repository_url: str,
publication_date: str = "",
vuln_descr: str = "",
tag_interval: str = "",
version_interval: str = "",
modified_files: "list[str]" = [],
code_tokens: "list[str]" = [],
time_limit_before: int = TIME_LIMIT_BEFORE,
time_limit_after: int = TIME_LIMIT_AFTER,
use_nvd: bool = False,
nvd_rest_endpoint: str = "",
backend_address: str = "",
git_cache: str = GIT_CACHE,
limit_candidates: int = MAX_CANDIDATES,
active_rules: "list[str]" = ["ALL"],
model_name: str = "",
) -> "list[Commit]":
_logger.info("begin main commit and CVE processing")
# -------------------------------------------------------------------------
# advisory record extraction
# -------------------------------------------------------------------------
advisory_record = AdvisoryRecord(
vulnerability_id=vulnerability_id,
repository_url=repository_url,
description=vuln_descr,
from_nvd=use_nvd,
nvd_rest_endpoint=nvd_rest_endpoint,
)
_logger.pretty_log(advisory_record)
advisory_record.analyze(use_nvd=use_nvd)
_logger.info(f"{advisory_record.code_tokens=}")
if publication_date != "":
advisory_record.published_timestamp = int(
datetime.strptime(publication_date, r"%Y-%m-%dT%H:%M%z").timestamp()
)
if len(code_tokens) > 0:
advisory_record.code_tokens += tuple(code_tokens)
# drop duplicates
advisory_record.code_tokens = list(set(advisory_record.code_tokens))
# FIXME this should be handled better (or '' should not end up in the modified_files in
# the first place)
if modified_files != [""]:
advisory_record.paths += modified_files
_logger.info(f"{advisory_record.code_tokens=}")
# print(advisory_record.paths)
# -------------------------------------------------------------------------
# retrieval of commit candidates
# -------------------------------------------------------------------------
with ExecutionTimer(
core_statistics.sub_collection(name="retrieval of commit candidates")
):
_logger.info(
"Downloading repository {} in {}..".format(repository_url, git_cache)
)
repository = Git(repository_url, git_cache)
repository.clone()
tags = repository.get_tags()
_logger.debug(f"Found tags: {tags}")
_logger.info("Done retrieving %s" % repository_url)
prev_tag = None
following_tag = None
if tag_interval != "":
prev_tag, following_tag = tag_interval.split(":")
elif version_interval != "":
vuln_version, fixed_version = version_interval.split(":")
prev_tag = get_tag_for_version(tags, vuln_version)[0]
following_tag = get_tag_for_version(tags, fixed_version)[0]
since = None
until = None
if advisory_record.published_timestamp:
since = advisory_record.published_timestamp - time_limit_before
until = advisory_record.published_timestamp + time_limit_after
candidates = repository.get_commits(
since=since,
until=until,
ancestors_of=following_tag,
exclude_ancestors_of=prev_tag,
filter_files="*.java",
)
_logger.info("Found %d candidates" % len(candidates))
# if some code_tokens were found in the advisory text, require
# that candidate commits touch some file whose path contains those tokens
# NOTE: this works quite well for Java, not sure how general this criterion is
# -------------------------------------------------------------------------
# commit filtering
#
# Here we apply additional criteria to discard commits from the initial
# set extracted from the repository
# # -------------------------------------------------------------------------
# if advisory_record.code_tokens != []:
# _logger.info(
# "Detected tokens in advisory text, searching for files whose path contains those tokens"
# )
# _logger.info(advisory_record.code_tokens)
# if modified_files == [""]:
# modified_files = advisory_record.code_tokens
# else:
# modified_files.extend(advisory_record.code_tokens)
# candidates = filter_by_changed_files(candidates, modified_files, repository)
with ExecutionTimer(core_statistics.sub_collection(name="commit filtering")):
candidates = filter_commits(candidates)
_logger.debug(f"Collected {len(candidates)} candidates")
if len(candidates) > limit_candidates:
_logger.error(
"Number of candidates exceeds %d, aborting." % limit_candidates
)
_logger.error(
"Possible cause: the backend might be unreachable or otherwise unable to provide details about the advisory."
)
sys.exit(-1)
# -------------------------------------------------------------------------
# commit preprocessing
# -------------------------------------------------------------------------
with ExecutionTimer(
core_statistics.sub_collection(name="commit preprocessing")
) as timer:
raw_commit_data = dict()
missing = []
try:
# Exploit the preprocessed commits already stored in the backend
# and only process those that are missing. Note: the endpoint
# does not exist (yet)
r = requests.get(
backend_address
+ "/commits/"
+ repository_url
+ "?commit_id="
+ ",".join(candidates)
)
_logger.info("The backend returned status '%d'" % r.status_code)
if r.status_code != 200:
_logger.error("This is weird...Continuing anyway.")
missing = candidates
else:
raw_commit_data = r.json()
_logger.info(
"Found {} preprocessed commits".format(len(raw_commit_data))
)
except requests.exceptions.ConnectionError:
_logger.error(
"Could not reach backend, is it running? The result of commit pre-processing will not be saved.",
exc_info=log.config.level < logging.WARNING,
)
missing = candidates
preprocessed_commits: "list[Commit]" = []
for idx, commit in enumerate(raw_commit_data):
if (
commit
): # None results are not in the DB, collect them to missing list, they need local preprocessing
preprocessed_commits.append(Commit.parse_obj(commit))
else:
missing.append(candidates[idx])
_logger.info("Preprocessing commits...")
first_missing = len(preprocessed_commits)
pbar = tqdm(missing)
with Counter(
timer.collection.sub_collection(name="commit preprocessing")
) as counter:
counter.initialize("preprocessed commits", unit="commit")
for commit_id in pbar:
counter.increment("preprocessed commits")
preprocessed_commits.append(
preprocess_commit(repository.get_commit(commit_id))
)
_logger.pretty_log(advisory_record)
_logger.debug(f"preprocessed {len(preprocessed_commits)} commits")
payload = [c.__dict__ for c in preprocessed_commits[first_missing:]]
# -------------------------------------------------------------------------
# save preprocessed commits to backend
# -------------------------------------------------------------------------
with ExecutionTimer(
core_statistics.sub_collection(name="save preprocessed commits to backend")
):
_logger.info("Sending preprocessing commits to backend...")
try:
r = requests.post(backend_address + "/commits/", json=payload)
_logger.info(
"Saving to backend completed (status code: %d)" % r.status_code
)
except requests.exceptions.ConnectionError:
_logger.error(
"Could not reach backend, is it running?"
"The result of commit pre-processing will not be saved."
"Continuing anyway.....",
exc_info=log.config.level < logging.WARNING,
)
# TODO compute actual rank
# This can be done by a POST request that creates a "search" job
# whose inputs are the AdvisoryRecord, and the repository URL
# The API returns immediately indicating a job id. From this
# id, a URL can be constructed to poll the results asynchronously.
# ranked_results = [repository.get_commit(c) for c in preprocessed_commits]
# -------------------------------------------------------------------------
# analyze candidates by applying rules and ML predictor
# -------------------------------------------------------------------------
with ExecutionTimer(
core_statistics.sub_collection(name="analyze candidates")
) as timer:
_logger.info("Extracting features from commits...")
# annotated_candidates = []
# with Counter(timer.collection.sub_collection("commit analysing")) as counter:
# counter.initialize("analyzed commits", unit="commit")
# # TODO remove "proactive" invocation of feature extraction
# for commit in tqdm(preprocessed_commits):
# counter.increment("analyzed commits")
# annotated_candidates.append(extract_features(commit, advisory_record))
annotated_candidates = apply_rules(
preprocessed_commits, advisory_record, active_rules=active_rules
)
annotated_candidates = rank(annotated_candidates, model_name=model_name)
return annotated_candidates, advisory_record
# def filter_by_changed_files(
# candidates: "list[str]", modified_files: "list[str]", git_repository: Git
# ) -> list:
# """
# Takes a list of commit ids in input and returns in output the list
# of ids of the commits that modify at least one path that contains one of the strings
# in "modified_files"
# """
# modified_files = [f.lower() for f in modified_files if f != ""]
# if len(modified_files) == 0:
# return candidates
# filtered_candidates = []
# if len(modified_files) != 0:
# for commit_id in candidates:
# commit_obj = git_repository.get_commit(commit_id)
# commit_changed_files = commit_obj.get_changed_files()
# for ccf in commit_changed_files:
# for f in modified_files:
# ccf = ccf.lower()
# if f in ccf:
# # if f in [e.lower() for e in ccf]:
# # print(f, commit_obj.get_id())
# filtered_candidates.append(commit_obj.get_id())
# return list(set(filtered_candidates))
| [
"log.util.init_local_logger",
"datamodel.advisory.AdvisoryRecord",
"sys.exit",
"requests.post",
"git.version_to_tag.get_tag_for_version",
"datetime.datetime.strptime",
"filtering.filter.filter_commits",
"tqdm.tqdm",
"datamodel.commit.Commit.parse_obj",
"ranking.rules.apply_rules",
"stats.executi... | [((676, 695), 'log.util.init_local_logger', 'init_local_logger', ([], {}), '()\n', (693, 695), False, 'from log.util import init_local_logger\n'), ((841, 884), 'stats.execution.execution_statistics.sub_collection', 'execution_statistics.sub_collection', (['"""core"""'], {}), "('core')\n", (876, 884), False, 'from stats.execution import Counter, ExecutionTimer, execution_statistics\n'), ((1794, 1962), 'datamodel.advisory.AdvisoryRecord', 'AdvisoryRecord', ([], {'vulnerability_id': 'vulnerability_id', 'repository_url': 'repository_url', 'description': 'vuln_descr', 'from_nvd': 'use_nvd', 'nvd_rest_endpoint': 'nvd_rest_endpoint'}), '(vulnerability_id=vulnerability_id, repository_url=\n repository_url, description=vuln_descr, from_nvd=use_nvd,\n nvd_rest_endpoint=nvd_rest_endpoint)\n', (1808, 1962), False, 'from datamodel.advisory import AdvisoryRecord\n'), ((3231, 3261), 'git.git.Git', 'Git', (['repository_url', 'git_cache'], {}), '(repository_url, git_cache)\n', (3234, 3261), False, 'from git.git import GIT_CACHE, Git\n'), ((5475, 5501), 'filtering.filter.filter_commits', 'filter_commits', (['candidates'], {}), '(candidates)\n', (5489, 5501), False, 'from filtering.filter import filter_commits\n'), ((7917, 7930), 'tqdm.tqdm', 'tqdm', (['missing'], {}), '(missing)\n', (7921, 7930), False, 'from tqdm import tqdm\n'), ((10737, 10814), 'ranking.rules.apply_rules', 'apply_rules', (['preprocessed_commits', 'advisory_record'], {'active_rules': 'active_rules'}), '(preprocessed_commits, advisory_record, active_rules=active_rules)\n', (10748, 10814), False, 'from ranking.rules import apply_rules\n'), ((10868, 10917), 'ranking.rank.rank', 'rank', (['annotated_candidates'], {'model_name': 'model_name'}), '(annotated_candidates, model_name=model_name)\n', (10872, 10917), False, 'from ranking.rank import rank\n'), ((5916, 5928), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (5924, 5928), False, 'import sys\n'), ((8961, 9019), 'requests.post', 'requests.post', (["(backend_address + '/commits/')"], {'json': 'payload'}), "(backend_address + '/commits/', json=payload)\n", (8974, 9019), False, 'import requests\n'), ((2235, 2290), 'datetime.datetime.strptime', 'datetime.strptime', (['publication_date', '"""%Y-%m-%dT%H:%M%z"""'], {}), "(publication_date, '%Y-%m-%dT%H:%M%z')\n", (2252, 2290), False, 'from datetime import datetime\n'), ((3710, 3749), 'git.version_to_tag.get_tag_for_version', 'get_tag_for_version', (['tags', 'vuln_version'], {}), '(tags, vuln_version)\n', (3729, 3749), False, 'from git.version_to_tag import get_tag_for_version\n'), ((3781, 3821), 'git.version_to_tag.get_tag_for_version', 'get_tag_for_version', (['tags', 'fixed_version'], {}), '(tags, fixed_version)\n', (3800, 3821), False, 'from git.version_to_tag import get_tag_for_version\n'), ((7710, 7734), 'datamodel.commit.Commit.parse_obj', 'Commit.parse_obj', (['commit'], {}), '(commit)\n', (7726, 7734), False, 'from datamodel.commit import Commit\n')] |
import os
import uuid
import StringIO
import ConfigParser
from pyethereum.utils import data_dir
from pyethereum.packeter import Packeter
from pyethereum.utils import sha3
def default_data_dir():
data_dir._set_default()
return data_dir.path
def default_config_path():
return os.path.join(default_data_dir(), 'config.txt')
def default_client_version():
return Packeter.CLIENT_VERSION # FIXME
def default_node_id():
return sha3(str(uuid.uuid1())).encode('hex')
config_template = \
"""
# NETWORK OPTIONS ###########
[network]
# Connect to remote host/port
# poc-6.ethdev.com:30300
remote_host = 172.16.31.10
remote_port = 30303
# Listen on the given host/port for incoming connections
listen_host = 0.0.0.0
listen_port = 30303
# Number of peer to connections to establish
num_peers = 10
# unique id of this node
node_id = {0}
# API OPTIONS ###########
[api]
# Serve the restful json api on the given host/port
listen_host = 0.0.0.0
listen_port = 30203
# path to server the api at
api_path = /api/v02a
# MISC OIPTIONS #########
[misc]
# Load database from path
data_dir = {1}
# percent cpu devoted to mining 0=off
mining = 30
# how verbose should the client be (1-3)
verbosity = 3
# set log level and filters (WARM, INFO, DEBUG)
# examples:
# get every log message from every module
# :DEBUG
# get every warning from every module
# :WARN
# get every message from module chainmanager and all warnings
# pyethereum.chainmanager:DEBUG,:WARN
logging = pyethereum.chainmanager:DEBUG,pyethereum.synchronizer:DEBUG,:INFO
# WALLET OPTIONS ##################
[wallet]
# Set the coinbase (mining payout) address
coinbase = <KEY>
""".format(default_node_id(), default_data_dir())
def get_default_config():
f = StringIO.StringIO()
f.write(config_template)
f.seek(0)
config = ConfigParser.ConfigParser()
config.readfp(f)
config.set('network', 'client_version', default_client_version())
return config
def read_config(cfg_path = default_config_path()):
# create default if not existent
if not os.path.exists(cfg_path):
open(cfg_path, 'w').write(config_template)
# extend on the default config
config = get_default_config()
config.read(cfg_path)
return config
def dump_config(config):
r = ['']
for section in config.sections():
for a,v in config.items(section):
r.append('[%s] %s = %r' %( section, a, v))
return '\n'.join(r)
| [
"StringIO.StringIO",
"os.path.exists",
"ConfigParser.ConfigParser",
"uuid.uuid1",
"pyethereum.utils.data_dir._set_default"
] | [((202, 225), 'pyethereum.utils.data_dir._set_default', 'data_dir._set_default', ([], {}), '()\n', (223, 225), False, 'from pyethereum.utils import data_dir\n'), ((1770, 1789), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (1787, 1789), False, 'import StringIO\n'), ((1846, 1873), 'ConfigParser.ConfigParser', 'ConfigParser.ConfigParser', ([], {}), '()\n', (1871, 1873), False, 'import ConfigParser\n'), ((2083, 2107), 'os.path.exists', 'os.path.exists', (['cfg_path'], {}), '(cfg_path)\n', (2097, 2107), False, 'import os\n'), ((455, 467), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (465, 467), False, 'import uuid\n')] |
from sqlmodel import SQLModel
from sb_backend.app.service.base.base_service import ServiceBase
from sb_backend.app.crud.setup.crud_noseriesline import CRUDBase, noseriesline
class ServiceBase(ServiceBase[CRUDBase, SQLModel, SQLModel]):
pass
noseriesline_s = ServiceBase(noseriesline)
| [
"sb_backend.app.service.base.base_service.ServiceBase"
] | [((264, 289), 'sb_backend.app.service.base.base_service.ServiceBase', 'ServiceBase', (['noseriesline'], {}), '(noseriesline)\n', (275, 289), False, 'from sb_backend.app.service.base.base_service import ServiceBase\n')] |
# coding=utf-8
from selenium import webdriver
import time
driver = webdriver.PhantomJS(executable_path=r'E:\Documents\Apps\phantomjs-2.1.1-windows\bin\phantomjs.exe')
driver.get("https://movie.douban.com/typerank?type_name=剧情&type=11&interval_id=100:90&action=")
# 向下滚动10000像素
js = "document.body.scrollTop=10000"
#js="var q=document.documentElement.scrollTop=10000"
time.sleep(3)
#查看页面快照
driver.save_screenshot("29_js2a.png")
# 执行JS语句
driver.execute_script(js)
time.sleep(10)
#查看页面快照
driver.save_screenshot("29_js2b.png")
driver.quit() | [
"selenium.webdriver.PhantomJS",
"time.sleep"
] | [((69, 177), 'selenium.webdriver.PhantomJS', 'webdriver.PhantomJS', ([], {'executable_path': '"""E:\\\\Documents\\\\Apps\\\\phantomjs-2.1.1-windows\\\\bin\\\\phantomjs.exe"""'}), "(executable_path=\n 'E:\\\\Documents\\\\Apps\\\\phantomjs-2.1.1-windows\\\\bin\\\\phantomjs.exe')\n", (88, 177), False, 'from selenium import webdriver\n'), ((370, 383), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (380, 383), False, 'import time\n'), ((467, 481), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (477, 481), False, 'import time\n')] |
import sys
from collections import defaultdict
sys.stdin.readline()
my_results = defaultdict(int)
def add_contact(contact):
for index, _ in enumerate(contact):
my_contact = contact[0:index]
my_results[my_contact] +=1
for line in sys.stdin.readlines():
operation, contact = line.strip().split(' ')
if operation == 'add':
map(add_contact, contact)
else:
print(my_results[contact])
| [
"sys.stdin.readline",
"collections.defaultdict",
"sys.stdin.readlines"
] | [((48, 68), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (66, 68), False, 'import sys\n'), ((82, 98), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (93, 98), False, 'from collections import defaultdict\n'), ((254, 275), 'sys.stdin.readlines', 'sys.stdin.readlines', ([], {}), '()\n', (273, 275), False, 'import sys\n')] |
import copy
from troposphere import (
Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling,
If, GetAtt, Output
)
from troposphere import elasticloadbalancing as elb
from troposphere.autoscaling import Tag as ASTag
from troposphere.route53 import RecordSetType
from stacker.blueprints.base import Blueprint
from stacker.blueprints.variables.types import TroposphereType
from stacker.blueprints.variables.types import (
CFNCommaDelimitedList,
CFNNumber,
CFNString,
EC2VPCId,
EC2KeyPairKeyName,
EC2SecurityGroupId,
EC2SubnetIdList,
)
CLUSTER_SG_NAME = "%sSG"
ELB_SG_NAME = "%sElbSG"
ELB_NAME = "%sLoadBalancer"
class AutoscalingGroup(Blueprint):
VARIABLES = {
'VpcId': {'type': EC2VPCId, 'description': 'Vpc Id'},
'DefaultSG': {'type': EC2SecurityGroupId,
'description': 'Top level security group.'},
'BaseDomain': {
'type': CFNString,
'default': '',
'description': 'Base domain for the stack.'},
'PrivateSubnets': {'type': EC2SubnetIdList,
'description': 'Subnets to deploy private '
'instances in.'},
'PublicSubnets': {'type': EC2SubnetIdList,
'description': 'Subnets to deploy public (elb) '
'instances in.'},
'AvailabilityZones': {'type': CFNCommaDelimitedList,
'description': 'Availability Zones to deploy '
'instances in.'},
'InstanceType': {'type': CFNString,
'description': 'EC2 Instance Type',
'default': 'm3.medium'},
'MinSize': {'type': CFNNumber,
'description': 'Minimum # of instances.',
'default': '1'},
'MaxSize': {'type': CFNNumber,
'description': 'Maximum # of instances.',
'default': '5'},
'SshKeyName': {'type': EC2KeyPairKeyName},
'ImageName': {
'type': CFNString,
'description': 'The image name to use from the AMIMap (usually '
'found in the config file.)'},
'ELBHostName': {
'type': CFNString,
'description': 'A hostname to give to the ELB. If not given '
'no ELB will be created.',
'default': ''},
'ELBCertName': {
'type': CFNString,
'description': 'The SSL certificate name to use on the ELB.',
'default': ''},
'ELBCertType': {
'type': CFNString,
'description': 'The SSL certificate type to use on the ELB.',
'default': ''},
}
def create_conditions(self):
self.template.add_condition(
"CreateELB",
Not(Equals(Ref("ELBHostName"), "")))
self.template.add_condition(
"SetupDNS",
Not(Equals(Ref("BaseDomain"), "")))
self.template.add_condition(
"UseSSL",
Not(Equals(Ref("ELBCertName"), "")))
self.template.add_condition(
"CreateSSLELB",
And(Condition("CreateELB"), Condition("UseSSL")))
self.template.add_condition(
"SetupELBDNS",
And(Condition("CreateELB"), Condition("SetupDNS")))
self.template.add_condition(
"UseIAMCert",
Not(Equals(Ref("ELBCertType"), "acm")))
def create_security_groups(self):
t = self.template
asg_sg = CLUSTER_SG_NAME % self.name
elb_sg = ELB_SG_NAME % self.name
t.add_resource(ec2.SecurityGroup(
asg_sg,
GroupDescription=asg_sg,
VpcId=Ref("VpcId")))
# ELB Security group, if ELB is used
t.add_resource(
ec2.SecurityGroup(
elb_sg,
GroupDescription=elb_sg,
VpcId=Ref("VpcId"),
Condition="CreateELB"))
# Add SG rules here
# Allow ELB to connect to ASG on port 80
t.add_resource(ec2.SecurityGroupIngress(
"%sElbToASGPort80" % self.name,
IpProtocol="tcp", FromPort="80", ToPort="80",
SourceSecurityGroupId=Ref(elb_sg),
GroupId=Ref(asg_sg),
Condition="CreateELB"))
# Allow Internet to connect to ELB on port 80
t.add_resource(ec2.SecurityGroupIngress(
"InternetTo%sElbPort80" % self.name,
IpProtocol="tcp", FromPort="80", ToPort="80",
CidrIp="0.0.0.0/0",
GroupId=Ref(elb_sg),
Condition="CreateELB"))
t.add_resource(ec2.SecurityGroupIngress(
"InternetTo%sElbPort443" % self.name,
IpProtocol="tcp", FromPort="443", ToPort="443",
CidrIp="0.0.0.0/0",
GroupId=Ref(elb_sg),
Condition="CreateSSLELB"))
def setup_listeners(self):
no_ssl = [elb.Listener(
LoadBalancerPort=80,
Protocol='HTTP',
InstancePort=80,
InstanceProtocol='HTTP'
)]
# Choose proper certificate source
acm_cert = Join("", [
"arn:aws:acm:", Ref("AWS::Region"), ":", Ref("AWS::AccountId"),
":certificate/", Ref("ELBCertName")])
iam_cert = Join("", [
"arn:aws:iam::", Ref("AWS::AccountId"), ":server-certificate/",
Ref("ELBCertName")])
cert_id = If("UseIAMCert", iam_cert, acm_cert)
with_ssl = copy.deepcopy(no_ssl)
with_ssl.append(elb.Listener(
LoadBalancerPort=443,
InstancePort=80,
Protocol='HTTPS',
InstanceProtocol="HTTP",
SSLCertificateId=cert_id))
listeners = If("UseSSL", with_ssl, no_ssl)
return listeners
def create_load_balancer(self):
t = self.template
elb_name = ELB_NAME % self.name
elb_sg = ELB_SG_NAME % self.name
t.add_resource(elb.LoadBalancer(
elb_name,
HealthCheck=elb.HealthCheck(
Target='HTTP:80/',
HealthyThreshold=3,
UnhealthyThreshold=3,
Interval=5,
Timeout=3),
Listeners=self.setup_listeners(),
SecurityGroups=[Ref(elb_sg), ],
Subnets=Ref("PublicSubnets"),
Condition="CreateELB"))
# Setup ELB DNS
t.add_resource(
RecordSetType(
'%sDnsRecord' % elb_name,
# Appends a '.' to the end of the domain
HostedZoneName=Join("", [Ref("BaseDomain"), "."]),
Comment='Router ELB DNS',
Name=Join('.', [Ref("ELBHostName"), Ref("BaseDomain")]),
Type='CNAME',
TTL='120',
ResourceRecords=[
GetAtt(elb_name, 'DNSName')],
Condition="SetupELBDNS"))
def get_launch_configuration_parameters(self):
return {
'ImageId': FindInMap('AmiMap', Ref("AWS::Region"),
Ref('ImageName')),
'InstanceType': Ref("InstanceType"),
'KeyName': Ref("SshKeyName"),
'SecurityGroups': self.get_launch_configuration_security_groups(),
}
def get_autoscaling_group_parameters(self, launch_config_name, elb_name):
return {
'AvailabilityZones': Ref("AvailabilityZones"),
'LaunchConfigurationName': Ref(launch_config_name),
'MinSize': Ref("MinSize"),
'MaxSize': Ref("MaxSize"),
'VPCZoneIdentifier': Ref("PrivateSubnets"),
'LoadBalancerNames': If("CreateELB", [Ref(elb_name), ], []),
'Tags': [ASTag('Name', self.name, True)],
}
def get_launch_configuration_security_groups(self):
sg_name = CLUSTER_SG_NAME % self.name
return [Ref("DefaultSG"), Ref(sg_name)]
def create_autoscaling_group(self):
name = "%sASG" % self.name
launch_config = "%sLaunchConfig" % name
elb_name = ELB_NAME % self.name
t = self.template
t.add_resource(autoscaling.LaunchConfiguration(
launch_config,
**self.get_launch_configuration_parameters()
))
t.add_resource(autoscaling.AutoScalingGroup(
name,
**self.get_autoscaling_group_parameters(launch_config, elb_name)
))
def create_template(self):
self.create_conditions()
self.create_security_groups()
self.create_load_balancer()
self.create_autoscaling_group()
class FlexibleAutoScalingGroup(Blueprint):
""" A more flexible AutoscalingGroup Blueprint.
Uses TroposphereTypes to make creating AutoscalingGroups and their
associated LaunchConfiguration more flexible. This comes at the price of
doing less for you.
"""
VARIABLES = {
"LaunchConfiguration": {
"type": TroposphereType(autoscaling.LaunchConfiguration),
"description": "The LaunchConfiguration for the autoscaling "
"group.",
},
"AutoScalingGroup": {
"type": TroposphereType(autoscaling.AutoScalingGroup),
"description": "The Autoscaling definition. Do not provide a "
"LaunchConfiguration parameter, that will be "
"automatically added from the LaunchConfiguration "
"Variable.",
},
}
def create_launch_configuration(self):
t = self.template
variables = self.get_variables()
self.launch_config = t.add_resource(variables["LaunchConfiguration"])
t.add_output(
Output("LaunchConfiguration", Value=self.launch_config.Ref())
)
def add_launch_config_variable(self, asg):
if getattr(asg, "LaunchConfigurationName", False):
raise ValueError("Do not provide a LaunchConfigurationName "
"variable for the AutoScalingGroup config.")
asg.LaunchConfigurationName = self.launch_config.Ref()
return asg
def create_autoscaling_group(self):
t = self.template
variables = self.get_variables()
asg = variables["AutoScalingGroup"]
asg = self.add_launch_config_variable(asg)
t.add_resource(asg)
t.add_output(Output("AutoScalingGroup", Value=asg.Ref()))
def create_template(self):
self.create_launch_configuration()
self.create_autoscaling_group()
| [
"troposphere.GetAtt",
"troposphere.Ref",
"stacker.blueprints.variables.types.TroposphereType",
"troposphere.elasticloadbalancing.Listener",
"troposphere.If",
"troposphere.elasticloadbalancing.HealthCheck",
"troposphere.Condition",
"copy.deepcopy",
"troposphere.autoscaling.Tag"
] | [((5543, 5579), 'troposphere.If', 'If', (['"""UseIAMCert"""', 'iam_cert', 'acm_cert'], {}), "('UseIAMCert', iam_cert, acm_cert)\n", (5545, 5579), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((5600, 5621), 'copy.deepcopy', 'copy.deepcopy', (['no_ssl'], {}), '(no_ssl)\n', (5613, 5621), False, 'import copy\n'), ((5849, 5879), 'troposphere.If', 'If', (['"""UseSSL"""', 'with_ssl', 'no_ssl'], {}), "('UseSSL', with_ssl, no_ssl)\n", (5851, 5879), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((5034, 5130), 'troposphere.elasticloadbalancing.Listener', 'elb.Listener', ([], {'LoadBalancerPort': '(80)', 'Protocol': '"""HTTP"""', 'InstancePort': '(80)', 'InstanceProtocol': '"""HTTP"""'}), "(LoadBalancerPort=80, Protocol='HTTP', InstancePort=80,\n InstanceProtocol='HTTP')\n", (5046, 5130), True, 'from troposphere import elasticloadbalancing as elb\n'), ((5646, 5770), 'troposphere.elasticloadbalancing.Listener', 'elb.Listener', ([], {'LoadBalancerPort': '(443)', 'InstancePort': '(80)', 'Protocol': '"""HTTPS"""', 'InstanceProtocol': '"""HTTP"""', 'SSLCertificateId': 'cert_id'}), "(LoadBalancerPort=443, InstancePort=80, Protocol='HTTPS',\n InstanceProtocol='HTTP', SSLCertificateId=cert_id)\n", (5658, 5770), True, 'from troposphere import elasticloadbalancing as elb\n'), ((7239, 7258), 'troposphere.Ref', 'Ref', (['"""InstanceType"""'], {}), "('InstanceType')\n", (7242, 7258), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7283, 7300), 'troposphere.Ref', 'Ref', (['"""SshKeyName"""'], {}), "('SshKeyName')\n", (7286, 7300), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7520, 7544), 'troposphere.Ref', 'Ref', (['"""AvailabilityZones"""'], {}), "('AvailabilityZones')\n", (7523, 7544), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7585, 7608), 'troposphere.Ref', 'Ref', (['launch_config_name'], {}), '(launch_config_name)\n', (7588, 7608), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7633, 7647), 'troposphere.Ref', 'Ref', (['"""MinSize"""'], {}), "('MinSize')\n", (7636, 7647), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7672, 7686), 'troposphere.Ref', 'Ref', (['"""MaxSize"""'], {}), "('MaxSize')\n", (7675, 7686), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7721, 7742), 'troposphere.Ref', 'Ref', (['"""PrivateSubnets"""'], {}), "('PrivateSubnets')\n", (7724, 7742), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((8000, 8016), 'troposphere.Ref', 'Ref', (['"""DefaultSG"""'], {}), "('DefaultSG')\n", (8003, 8016), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((8018, 8030), 'troposphere.Ref', 'Ref', (['sg_name'], {}), '(sg_name)\n', (8021, 8030), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((9060, 9108), 'stacker.blueprints.variables.types.TroposphereType', 'TroposphereType', (['autoscaling.LaunchConfiguration'], {}), '(autoscaling.LaunchConfiguration)\n', (9075, 9108), False, 'from stacker.blueprints.variables.types import TroposphereType\n'), ((9282, 9327), 'stacker.blueprints.variables.types.TroposphereType', 'TroposphereType', (['autoscaling.AutoScalingGroup'], {}), '(autoscaling.AutoScalingGroup)\n', (9297, 9327), False, 'from stacker.blueprints.variables.types import TroposphereType\n'), ((3253, 3275), 'troposphere.Condition', 'Condition', (['"""CreateELB"""'], {}), "('CreateELB')\n", (3262, 3275), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((3277, 3296), 'troposphere.Condition', 'Condition', (['"""UseSSL"""'], {}), "('UseSSL')\n", (3286, 3296), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((3379, 3401), 'troposphere.Condition', 'Condition', (['"""CreateELB"""'], {}), "('CreateELB')\n", (3388, 3401), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((3403, 3424), 'troposphere.Condition', 'Condition', (['"""SetupDNS"""'], {}), "('SetupDNS')\n", (3412, 3424), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((5288, 5306), 'troposphere.Ref', 'Ref', (['"""AWS::Region"""'], {}), "('AWS::Region')\n", (5291, 5306), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((5313, 5334), 'troposphere.Ref', 'Ref', (['"""AWS::AccountId"""'], {}), "('AWS::AccountId')\n", (5316, 5334), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((5365, 5383), 'troposphere.Ref', 'Ref', (['"""ELBCertName"""'], {}), "('ELBCertName')\n", (5368, 5383), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((5445, 5466), 'troposphere.Ref', 'Ref', (['"""AWS::AccountId"""'], {}), "('AWS::AccountId')\n", (5448, 5466), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((5504, 5522), 'troposphere.Ref', 'Ref', (['"""ELBCertName"""'], {}), "('ELBCertName')\n", (5507, 5522), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7139, 7157), 'troposphere.Ref', 'Ref', (['"""AWS::Region"""'], {}), "('AWS::Region')\n", (7142, 7157), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7192, 7208), 'troposphere.Ref', 'Ref', (['"""ImageName"""'], {}), "('ImageName')\n", (7195, 7208), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7838, 7868), 'troposphere.autoscaling.Tag', 'ASTag', (['"""Name"""', 'self.name', '(True)'], {}), "('Name', self.name, True)\n", (7843, 7868), True, 'from troposphere.autoscaling import Tag as ASTag\n'), ((2929, 2947), 'troposphere.Ref', 'Ref', (['"""ELBHostName"""'], {}), "('ELBHostName')\n", (2932, 2947), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((3039, 3056), 'troposphere.Ref', 'Ref', (['"""BaseDomain"""'], {}), "('BaseDomain')\n", (3042, 3056), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((3146, 3164), 'troposphere.Ref', 'Ref', (['"""ELBCertName"""'], {}), "('ELBCertName')\n", (3149, 3164), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((3513, 3531), 'troposphere.Ref', 'Ref', (['"""ELBCertType"""'], {}), "('ELBCertType')\n", (3516, 3531), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((3810, 3822), 'troposphere.Ref', 'Ref', (['"""VpcId"""'], {}), "('VpcId')\n", (3813, 3822), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((4012, 4024), 'troposphere.Ref', 'Ref', (['"""VpcId"""'], {}), "('VpcId')\n", (4015, 4024), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((4328, 4339), 'troposphere.Ref', 'Ref', (['elb_sg'], {}), '(elb_sg)\n', (4331, 4339), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((4361, 4372), 'troposphere.Ref', 'Ref', (['asg_sg'], {}), '(asg_sg)\n', (4364, 4372), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((4672, 4683), 'troposphere.Ref', 'Ref', (['elb_sg'], {}), '(elb_sg)\n', (4675, 4683), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((4932, 4943), 'troposphere.Ref', 'Ref', (['elb_sg'], {}), '(elb_sg)\n', (4935, 4943), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((6137, 6240), 'troposphere.elasticloadbalancing.HealthCheck', 'elb.HealthCheck', ([], {'Target': '"""HTTP:80/"""', 'HealthyThreshold': '(3)', 'UnhealthyThreshold': '(3)', 'Interval': '(5)', 'Timeout': '(3)'}), "(Target='HTTP:80/', HealthyThreshold=3, UnhealthyThreshold=3,\n Interval=5, Timeout=3)\n", (6152, 6240), True, 'from troposphere import elasticloadbalancing as elb\n'), ((6429, 6449), 'troposphere.Ref', 'Ref', (['"""PublicSubnets"""'], {}), "('PublicSubnets')\n", (6432, 6449), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((7794, 7807), 'troposphere.Ref', 'Ref', (['elb_name'], {}), '(elb_name)\n', (7797, 7807), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((6393, 6404), 'troposphere.Ref', 'Ref', (['elb_sg'], {}), '(elb_sg)\n', (6396, 6404), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((6955, 6982), 'troposphere.GetAtt', 'GetAtt', (['elb_name', '"""DNSName"""'], {}), "(elb_name, 'DNSName')\n", (6961, 6982), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((6703, 6720), 'troposphere.Ref', 'Ref', (['"""BaseDomain"""'], {}), "('BaseDomain')\n", (6706, 6720), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((6803, 6821), 'troposphere.Ref', 'Ref', (['"""ELBHostName"""'], {}), "('ELBHostName')\n", (6806, 6821), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n'), ((6823, 6840), 'troposphere.Ref', 'Ref', (['"""BaseDomain"""'], {}), "('BaseDomain')\n", (6826, 6840), False, 'from troposphere import Ref, FindInMap, Not, Equals, And, Condition, Join, ec2, autoscaling, If, GetAtt, Output\n')] |
# Generated by Django 2.1.1 on 2019-05-24 00:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('input', '0015_auto_20190524_0052'),
]
operations = [
migrations.AlterField(
model_name='post',
name='ctime',
field=models.DateTimeField(auto_now=True),
),
]
| [
"django.db.models.DateTimeField"
] | [((346, 381), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (366, 381), False, 'from django.db import migrations, models\n')] |
import os
import logging
import requests
import ambari.api as api
from utils.utils import logmethodcall
class RangerRequestError(Exception):
pass
class Ranger:
def __init__(self, request_timeout=10):
self.timeout = request_timeout
self.ranger_schema = os.environ.get('RANGER_SCHEMA', 'http')
self.ranger_host = os.environ.get('RANGER_HOST', 'sandbox.hortonworks.com')
self.ranger_port = os.environ.get('RANGER_PORT', 6080)
logging.basicConfig(level=logging.DEBUG,
format='{asctime} ({levelname}) {funcName}(): {message}',
style="{",
filename='ranger.log')
self.logger = logging.getLogger(__name__)
@logmethodcall
def get_ranger_url(self):
return '{0}://{1}:{2}/'.format(self.ranger_schema, self.ranger_host, self.ranger_port)
@logmethodcall
def is_ranger_online(self):
try:
requests.get(self.get_ranger_url(), timeout=self.timeout)
return True
except:
return False
@logmethodcall
def stop_ranger_admin(self):
ambari = api.Api(logger=self.logger)
ranger_admin_ambari_info = ambari.get_component_info('RANGER', 'RANGER_ADMIN')
rnd_ranger_host, rnd_ranger_component = ambari.get_random_host_and_component_path(ranger_admin_ambari_info)
self.logger.info("Selected random Ranger admin host for stopping: {0}, {1}"
.format(rnd_ranger_host, rnd_ranger_component))
ambari.change_host_component_state_and_wait(rnd_ranger_component, state='INSTALLED')
@logmethodcall
def check_ranger_status(self):
ranger_url = '{0}://{1}:{2}/'.format(self.ranger_schema, self.ranger_host, self.ranger_port)
self.logger.debug(ranger_url)
response = requests.get(ranger_url, timeout=self.timeout)
self.verify_ranger_response(response)
@logmethodcall
def verify_ranger_response(self, response):
if response.status_code != 200:
self.logger.error(
"RangerResponse returned with error status [{0}], response was: {1}".format(response.status_code,
response.text))
raise RangerRequestError("RangerResponse returned with error status [{0}]".format(response.status_code))
| [
"logging.basicConfig",
"logging.getLogger",
"os.environ.get",
"requests.get",
"ambari.api.Api"
] | [((282, 321), 'os.environ.get', 'os.environ.get', (['"""RANGER_SCHEMA"""', '"""http"""'], {}), "('RANGER_SCHEMA', 'http')\n", (296, 321), False, 'import os\n'), ((349, 405), 'os.environ.get', 'os.environ.get', (['"""RANGER_HOST"""', '"""sandbox.hortonworks.com"""'], {}), "('RANGER_HOST', 'sandbox.hortonworks.com')\n", (363, 405), False, 'import os\n'), ((433, 468), 'os.environ.get', 'os.environ.get', (['"""RANGER_PORT"""', '(6080)'], {}), "('RANGER_PORT', 6080)\n", (447, 468), False, 'import os\n'), ((478, 620), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""{asctime} ({levelname}) {funcName}(): {message}"""', 'style': '"""{"""', 'filename': '"""ranger.log"""'}), "(level=logging.DEBUG, format=\n '{asctime} ({levelname}) {funcName}(): {message}', style='{', filename=\n 'ranger.log')\n", (497, 620), False, 'import logging\n'), ((718, 745), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (735, 745), False, 'import logging\n'), ((1161, 1188), 'ambari.api.Api', 'api.Api', ([], {'logger': 'self.logger'}), '(logger=self.logger)\n', (1168, 1188), True, 'import ambari.api as api\n'), ((1855, 1901), 'requests.get', 'requests.get', (['ranger_url'], {'timeout': 'self.timeout'}), '(ranger_url, timeout=self.timeout)\n', (1867, 1901), False, 'import requests\n')] |
import os
import numpy as np
import csv
import matplotlib.pyplot as plt
from moviepy.editor import *
from matplotlib.image import imsave
import matplotlib
matplotlib.use('Agg')
# import tensorflow as tf
# from stable_baselines.common.callbacks import BaseCallback, EvalCallback
# from stable_baselines.common.vec_env import DummyVecEnv
class MonitorCallback(EvalCallback):
"""
Callback for saving a model (the check is done every ``check_freq`` steps)
based on the training reward (in practice, we recommend using ``EvalCallback``).
:param check_freq: (int)
:param log_dir: (str) Path to the folder where the model will be saved.
It must contains the file created by the ``Monitor`` wrapper.
:param verbose: (int)
"""
def __init__(self, eval_env, check_freq: int, save_example_freq: int, log_dir: str,sacred=None, n_eval_episodes=5, render=False, verbose=1):
super(MonitorCallback, self).__init__(verbose=verbose,
eval_env=eval_env,
best_model_save_path=log_dir,
log_path=log_dir,
eval_freq=check_freq,
n_eval_episodes=n_eval_episodes,
deterministic=False,
render=render)
self.render = render
self.verbose = verbose
self.env = eval_env
self.check_freq = check_freq
self.save_example_freq = save_example_freq
self.log_dir = log_dir
self.save_path = os.path.join(log_dir, 'best_model')
self.best_mean_reward = -np.inf
self.sacred = sacred
self.sequence = False
if self.env.__class__.__name__ in ['DarSeqEnv','DummyVecEnv'] :
self.sequence = True
self.statistics = {
'step_reward': [],
'reward': [],
'std_reward': [],
'duration': [],
'GAP': [],
'GAP*': [],
'fit_solution': [],
'delivered': []
# 'policy_loss': [],
# 'value_loss': [],
# 'policy_entropy': []
}
def _init_callback(self) -> None:
# Create folder if needed
if self.log_dir is not None:
os.makedirs(self.log_dir, exist_ok=True)
def _on_training_start(self) -> None:
"""
This method is called before the first rollout starts.
"""
pass
def _on_training_end(self) -> None:
"""
This event is triggered before exiting the `learn()` method.
"""
pass
def plot_statistics(self, show=False):
# Print them
if self.verbose:
print('\t ->[Epoch %d]<- mean episodic reward: %.3f' % (self.num_timesteps + 1, self.statistics['reward'][-1]))
print('\t * Mean duration : %0.3f' % (self.statistics['duration'][-1]))
print('\t * Mean std_reward : %0.3f' % (self.statistics['std_reward'][-1]))
print('\t * Mean step_reward : %0.3f' % (self.statistics['step_reward'][-1]))
# print('\t ** policy_loss : %0.3f' % (self.statistics['policy_loss'][-1]))
# print('\t ** value_loss : %0.3f' % (self.statistics['value_loss'][-1]))
# print('\t ** policy_entropy : %0.3f' % (self.statistics['policy_entropy'][-1]))
# Create plot of the statiscs, saved in folder
colors = [plt.cm.tab20(0),plt.cm.tab20(1),plt.cm.tab20c(2),
plt.cm.tab20c(3), plt.cm.tab20c(4),
plt.cm.tab20c(5),plt.cm.tab20c(6),plt.cm.tab20c(7)]
fig, (axis) = plt.subplots(1, len(self.statistics), figsize=(20, 10))
fig.suptitle(' - PPO Training: ' + self.log_dir)
for i, key in enumerate(self.statistics):
# Sacred (The one thing to keep here)
if self.sacred :
self.sacred.get_logger().report_scalar(title='Train stats',
series=key, value=self.statistics[key][-1], iteration=self.num_timesteps)
# self.sacred.log_scalar(key, self.statistics[key][-1], len(self.statistics[key]))
axis[i].plot(self.statistics[key], color=colors[i])
axis[i].set_title(' Plot of ' + key)
if show :
fig.show()
fig.savefig(self.log_dir + '/result_figure.jpg')
fig.clf()
plt.close(fig)
# Save the statistics as CSV file
if not self.sacred:
try:
with open(self.log_dir + '/statistics.csv', 'w') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=self.statistics.keys())
writer.writeheader()
# for key in statistics
writer.writerow(self.statistics)
except IOError:
print("I/O error")
def save_image_batch(self, images, rewards, txt='test'):
''' Saving some examples of input -> output to see how the model behave '''
print(' - Saving some examples - ')
number_i = min(len(images), 50)
plt.figure()
fig, axis = plt.subplots(number_i, 2, figsize=(10, 50)) #2 rows for input, output
fig.tight_layout()
fig.suptitle(' - examples of network - ')
for i in range(min(self.batch_size, number_i)):
input_map = indices2image(data[0][i], self.image_size)
axis[i, 0].imshow(input_map)
im = indice_map2image(outputs[i], self.image_size).cpu().numpy()
normalized = (im - im.min() ) / (im.max() - im.min())
axis[i, 1].imshow(normalized)
img_name = self.path_name + '/example_' + str(self.num_timesteps) + '.png'
plt.savefig(img_name)
plt.close()
if self.sacred :
self.sacred.add_artifact(img_name, content_type='image')
def save_example(self, observations, rewards, number):
noms = []
dir = self.log_dir + '/example/' + str(self.num_timesteps) + '/ex_number' + str(number)
if dir is not None:
os.makedirs(dir, exist_ok=True)
for i, obs in enumerate(observations):
save_name = dir + '/' + str(i) + '_r=' + str(rewards[i]) + '.png' #[np.array(img) for i, img in enumerate(images)
if self.env.__class__.__name__ == 'DummyVecEnv':
image = self.norm_image(obs[0], scale=1)
else :
image = self.norm_image(obs, scale=1)
# print('SHae after image', np.shape(image))
imsave(save_name, image)
noms.append(save_name)
# Save the imges as video
video_name = dir + 'r=' + str(np.sum(rewards)) + '.mp4'
clips = [ImageClip(m).set_duration(0.2)
for m in noms]
concat_clip = concatenate_videoclips(clips, method="compose")
concat_clip.write_videofile(video_name, fps=24, verbose=None, logger=None)
if self.sacred :
self.sacred.get_logger().report_media('video', 'Res_' + str(number) + '_Rwd=' + str(np.sum(rewards)),
iteration=self.num_timesteps // self.check_freq,
local_path=video_name)
del concat_clip
del clips
def norm_image(self, image, type=None, scale=10):
image = np.kron(image, np.ones((scale, scale)))
if type=='rgb':
ret = np.empty((image.shape[0], image.shape[0], 3), dtype=np.uint8)
ret[:, :, 0] = image.copy()
ret[:, :, 1] = image.copy()
ret[:, :, 2] = image.copy()
image = ret.copy()
return (255 * (image - np.min(image)) / (np.max(image) - np.min(image))).astype(np.uint8)
def save_gif(self, observations, rewards):
# print(observations)
# print(rewards)
# length = min(len(observations), 10)
# observations = 255 * ((np.array(observations) + 1) / (np.max(observations) + 1)).astype(np.uint8)
save_name = self.log_dir + '/example' + str(self.num_timesteps) + '.gif'
images = [self.norm_image(observations[i]) for i in range(len(observations)) if rewards[i] >= 0] #[np.array(img) for i, img in enumerate(images)]
# imageio.mimsave(save_name, images, fps=1)
if self.sacred :
self.sacred.get_logger().report_media('GIF', 'isgif', iteration=self.num_timesteps, local_path=save_name)
def save_video(self, observations, rewards):
save_name = self.log_dir + '/example' + str(self.num_timesteps) + '.mp4'
images = [self.norm_image(observations[i], type='rgb') for i in range(len(observations)) if rewards[i] >= 0] #[np.array(img) for i, img in enumerate(images)
clips = [ImageClip(m).set_duration(2)
for m in images]
concat_clip = concatenate_videoclips(clips, method="compose").resize(100)
concat_clip.write_videofile(save_name, fps=24, verbose=False)
if self.sacred :
self.sacred.get_logger().report_media('video', 'results', iteration=self.num_timesteps, local_path=save_name)
def _on_step(self) -> bool:
"""
In addition to EvalCallback we needs
Examples of eviroonment elements -> Save them as gif for exemple
Statistics to save -> save as plot and in database
-> reward, length, loss, additional metrics (accuraccy, best move ?)
"""
# super(MonitorCallback, self)._on_step()
if self.num_timesteps % self.check_freq == 0 :
episode_rewards, episode_lengths = [], []
gap, fit_solution, delivered = [], [], []
wrapped_env = DummyVecEnv([lambda: self.env])
for i in range(self.n_eval_episodes):
obs = wrapped_env.reset()
done, state = False, None
last_time = 0
if self.sequence :
if self.env.__class__.__name__ == 'DummyVecEnv':
observations = [self.env.env_method('get_image_representation')]
else :
observations = [self.env.get_image_representation()]
else :
observations = [obs.copy()]
episode_reward = [0.0]
episode_lengths.append(0)
while not done:
# Run of simulation
action, state = self.model.predict(obs, state=state, deterministic=False)
new_obs, reward, done, info = wrapped_env.step(action)
obs = new_obs
# Save observation only if time step evolved
if self.sequence:
if self.env.__class__.__name__ == 'DummyVecEnv':
if self.env.get_attr('time_step')[0] > last_time :
last_time = self.env.get_attr('time_step')[0]
observations.append(self.env.env_method('get_image_representation'))
else :
if self.env.time_step > last_time :
last_time = self.env.time_step
observations.append(self.env.get_image_representation())
else :
observations.append(obs.copy())
episode_reward.append(reward)
episode_lengths[-1] += 1
if self.render:
self.env.render()
info = info[0]
gap.append(info['GAP'])
delivered.append(info['delivered'])
fit_solution.append(info['fit_solution'])
episode_rewards.append(np.sum(episode_reward))
# self.save_gif(observations, episode_reward)
if self.num_timesteps % self.save_example_freq == 0 :
self.save_example(observations, episode_reward,number=i)
del observations
self.statistics['GAP'].append(np.mean(gap))
self.statistics['GAP*'].append(np.min(gap))
self.statistics['fit_solution'].append(np.mean(fit_solution))
self.statistics['delivered'].append(np.mean(delivered))
self.statistics['reward'].append(np.mean(episode_rewards))
self.statistics['std_reward'].append(np.std(episode_rewards))
self.statistics['step_reward'].append(np.mean([episode_rewards[i]/episode_lengths[i] for i in range(len(episode_lengths))]))
self.statistics['duration'].append(np.mean(episode_lengths))
# self.statistics['policy_loss'].append(self.model.pg_loss.numpy())
# self.statistics['value_loss'].append(self.model.vf_loss.numpy())
# self.statistics['policy_entropy'].append(self.model.entropy.numpy())
self.plot_statistics()
# Save best model
if self.statistics['reward'][-1] == np.max(self.statistics['reward']):
save_path = self.log_dir + '/best_model'
if self.verbose > 0:
print("Saving new best model to {}".format(save_path))
self.model.save(save_path)
return True
| [
"numpy.mean",
"matplotlib.pyplot.savefig",
"numpy.ones",
"os.makedirs",
"matplotlib.use",
"matplotlib.pyplot.cm.tab20",
"numpy.std",
"os.path.join",
"matplotlib.pyplot.cm.tab20c",
"matplotlib.image.imsave",
"numpy.max",
"matplotlib.pyplot.close",
"numpy.sum",
"matplotlib.pyplot.figure",
... | [((155, 176), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (169, 176), False, 'import matplotlib\n'), ((1675, 1710), 'os.path.join', 'os.path.join', (['log_dir', '"""best_model"""'], {}), "(log_dir, 'best_model')\n", (1687, 1710), False, 'import os\n'), ((4490, 4504), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (4499, 4504), True, 'import matplotlib.pyplot as plt\n'), ((5198, 5210), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5208, 5210), True, 'import matplotlib.pyplot as plt\n'), ((5231, 5274), 'matplotlib.pyplot.subplots', 'plt.subplots', (['number_i', '(2)'], {'figsize': '(10, 50)'}), '(number_i, 2, figsize=(10, 50))\n', (5243, 5274), True, 'import matplotlib.pyplot as plt\n'), ((5818, 5839), 'matplotlib.pyplot.savefig', 'plt.savefig', (['img_name'], {}), '(img_name)\n', (5829, 5839), True, 'import matplotlib.pyplot as plt\n'), ((5848, 5859), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5857, 5859), True, 'import matplotlib.pyplot as plt\n'), ((2400, 2440), 'os.makedirs', 'os.makedirs', (['self.log_dir'], {'exist_ok': '(True)'}), '(self.log_dir, exist_ok=True)\n', (2411, 2440), False, 'import os\n'), ((3550, 3565), 'matplotlib.pyplot.cm.tab20', 'plt.cm.tab20', (['(0)'], {}), '(0)\n', (3562, 3565), True, 'import matplotlib.pyplot as plt\n'), ((3566, 3581), 'matplotlib.pyplot.cm.tab20', 'plt.cm.tab20', (['(1)'], {}), '(1)\n', (3578, 3581), True, 'import matplotlib.pyplot as plt\n'), ((3582, 3598), 'matplotlib.pyplot.cm.tab20c', 'plt.cm.tab20c', (['(2)'], {}), '(2)\n', (3595, 3598), True, 'import matplotlib.pyplot as plt\n'), ((3618, 3634), 'matplotlib.pyplot.cm.tab20c', 'plt.cm.tab20c', (['(3)'], {}), '(3)\n', (3631, 3634), True, 'import matplotlib.pyplot as plt\n'), ((3636, 3652), 'matplotlib.pyplot.cm.tab20c', 'plt.cm.tab20c', (['(4)'], {}), '(4)\n', (3649, 3652), True, 'import matplotlib.pyplot as plt\n'), ((3672, 3688), 'matplotlib.pyplot.cm.tab20c', 'plt.cm.tab20c', (['(5)'], {}), '(5)\n', (3685, 3688), True, 'import matplotlib.pyplot as plt\n'), ((3689, 3705), 'matplotlib.pyplot.cm.tab20c', 'plt.cm.tab20c', (['(6)'], {}), '(6)\n', (3702, 3705), True, 'import matplotlib.pyplot as plt\n'), ((3706, 3722), 'matplotlib.pyplot.cm.tab20c', 'plt.cm.tab20c', (['(7)'], {}), '(7)\n', (3719, 3722), True, 'import matplotlib.pyplot as plt\n'), ((6168, 6199), 'os.makedirs', 'os.makedirs', (['dir'], {'exist_ok': '(True)'}), '(dir, exist_ok=True)\n', (6179, 6199), False, 'import os\n'), ((7502, 7525), 'numpy.ones', 'np.ones', (['(scale, scale)'], {}), '((scale, scale))\n', (7509, 7525), True, 'import numpy as np\n'), ((7569, 7630), 'numpy.empty', 'np.empty', (['(image.shape[0], image.shape[0], 3)'], {'dtype': 'np.uint8'}), '((image.shape[0], image.shape[0], 3), dtype=np.uint8)\n', (7577, 7630), True, 'import numpy as np\n'), ((6667, 6691), 'matplotlib.image.imsave', 'imsave', (['save_name', 'image'], {}), '(save_name, image)\n', (6673, 6691), False, 'from matplotlib.image import imsave\n'), ((12190, 12202), 'numpy.mean', 'np.mean', (['gap'], {}), '(gap)\n', (12197, 12202), True, 'import numpy as np\n'), ((12247, 12258), 'numpy.min', 'np.min', (['gap'], {}), '(gap)\n', (12253, 12258), True, 'import numpy as np\n'), ((12311, 12332), 'numpy.mean', 'np.mean', (['fit_solution'], {}), '(fit_solution)\n', (12318, 12332), True, 'import numpy as np\n'), ((12382, 12400), 'numpy.mean', 'np.mean', (['delivered'], {}), '(delivered)\n', (12389, 12400), True, 'import numpy as np\n'), ((12448, 12472), 'numpy.mean', 'np.mean', (['episode_rewards'], {}), '(episode_rewards)\n', (12455, 12472), True, 'import numpy as np\n'), ((12523, 12546), 'numpy.std', 'np.std', (['episode_rewards'], {}), '(episode_rewards)\n', (12529, 12546), True, 'import numpy as np\n'), ((12732, 12756), 'numpy.mean', 'np.mean', (['episode_lengths'], {}), '(episode_lengths)\n', (12739, 12756), True, 'import numpy as np\n'), ((13114, 13147), 'numpy.max', 'np.max', (["self.statistics['reward']"], {}), "(self.statistics['reward'])\n", (13120, 13147), True, 'import numpy as np\n'), ((6804, 6819), 'numpy.sum', 'np.sum', (['rewards'], {}), '(rewards)\n', (6810, 6819), True, 'import numpy as np\n'), ((11880, 11902), 'numpy.sum', 'np.sum', (['episode_reward'], {}), '(episode_reward)\n', (11886, 11902), True, 'import numpy as np\n'), ((7183, 7198), 'numpy.sum', 'np.sum', (['rewards'], {}), '(rewards)\n', (7189, 7198), True, 'import numpy as np\n'), ((7831, 7844), 'numpy.max', 'np.max', (['image'], {}), '(image)\n', (7837, 7844), True, 'import numpy as np\n'), ((7847, 7860), 'numpy.min', 'np.min', (['image'], {}), '(image)\n', (7853, 7860), True, 'import numpy as np\n'), ((7813, 7826), 'numpy.min', 'np.min', (['image'], {}), '(image)\n', (7819, 7826), True, 'import numpy as np\n')] |
# Solution of;
# Project Euler Problem 685: Inverse Digit Sum II
# https://projecteuler.net/problem=685
#
# Writing down the numbers which have a digit sum of 10 in ascending order, we
# get:$19, 28, 37, 46,55,64,73,82,91,109, 118,\dots$Let $f(n,m)$ be the
# $m^{\text{th}}$ occurrence of the digit sum $n$. For example, $f(10,1)=19$,
# $f(10,10)=109$ and $f(10,100)=1423$. Let $\displaystyle S(k)=\sum_{n=1}^k
# f(n^3,n^4)$. For example $S(3)=7128$ and $S(10)\equiv 32287064 \mod
# 1\,000\,000\,007$. Find $S(10\,000)$ modulo $1\,000\,000\,007$.
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 685
timed.caller(dummy, n, i, prob_id)
| [
"timed.caller"
] | [((723, 757), 'timed.caller', 'timed.caller', (['dummy', 'n', 'i', 'prob_id'], {}), '(dummy, n, i, prob_id)\n', (735, 757), False, 'import timed\n')] |
import logging
import signal
import gevent
import msgpack
from zerorpc import Publisher, Puller, Pusher, Server
import numpy as np
import jsonpickle
from .store import store
from .data import Data
from .operations.operation import Operation
from .utils.singleton import Singleton
__all__ = ['ServerAPI']
class ServerAPI(Server, metaclass=Singleton):
"""
RPC server class.
"""
def __init__(self, publisher=None, *args, **kwargs):
super(ServerAPI, self).__init__(*args, **kwargs)
self.publisher = publisher
def undo(self):
"""
Undo an operation popping from the stack and calling its `undo` method.
"""
Operation.pop().undo()
def redo(self):
"""
Call the `redo` method on the latest operation to be added to stack.
"""
Operation.redo()
def register(self, msg):
pass
# self.publisher.testing("This is a test on client.")
def load_data(self, path, format):
"""
Load a data file given path and format.
"""
import astropy.units as u
# data = Data.read(path, format=format)
data = Data(np.random.sample(100) * u.Jy, spectral_axis=np.linspace(1100, 1200, 100) * u.AA)
self.publisher.data_loaded(data.identifier)
def create_data(self, *args, **kwargs):
data = Data(*args, **kwargs)
self.publisher.data_created(data.identifier)
return data.identifier
def query_loader_formats(self):
"""
Returns a list of available data loader formats.
"""
from specutils import Spectrum1D
from astropy.io import registry as io_registry
all_formats = io_registry.get_formats(Spectrum1D)['Format']
return all_formats
def query_data(self, identifier):
data = store[identifier]
data_dict = {
'name': data.name,
'identifier': data.identifier,
'spectral_axis': data.spectral_axis.value.tolist(),
'spectral_axis_unit': data.spectral_axis.unit.to_string(),
'flux': data.flux.value.tolist(),
'unit': data.flux.unit.to_string()
}
return data_dict
def query_data_attribute(self, identifier, name):
data = store[identifier]
data_attr = getattr(data, name)
packed_data_attr = data.encode(data_attr)
return packed_data_attr
def launch(server_address=None, publisher_address=None, block=True):
server_address = server_address or "tcp://127.0.0.1:4242"
publisher_address = publisher_address or "tcp://127.0.0.1:4243"
# Establish the publisher service. This will send events to any
# subscribed services along the designated address.
publisher = Publisher()
publisher.connect(publisher_address)
# Setup the server service. This will be the api that clients
# will send events to.
server = ServerAPI(publisher)
server.bind(server_address)
logging.info(
"Server is now listening on %s and sending on %s.",
server_address, publisher_address)
# Allow for stopping the server via ctrl-c
gevent.signal(signal.SIGINT, server.stop)
server.run() if block else gevent.spawn(server.run) | [
"zerorpc.Publisher",
"gevent.signal",
"numpy.linspace",
"numpy.random.sample",
"astropy.io.registry.get_formats",
"logging.info",
"gevent.spawn"
] | [((2766, 2777), 'zerorpc.Publisher', 'Publisher', ([], {}), '()\n', (2775, 2777), False, 'from zerorpc import Publisher, Puller, Pusher, Server\n'), ((2984, 3087), 'logging.info', 'logging.info', (['"""Server is now listening on %s and sending on %s."""', 'server_address', 'publisher_address'], {}), "('Server is now listening on %s and sending on %s.',\n server_address, publisher_address)\n", (2996, 3087), False, 'import logging\n'), ((3153, 3194), 'gevent.signal', 'gevent.signal', (['signal.SIGINT', 'server.stop'], {}), '(signal.SIGINT, server.stop)\n', (3166, 3194), False, 'import gevent\n'), ((3227, 3251), 'gevent.spawn', 'gevent.spawn', (['server.run'], {}), '(server.run)\n', (3239, 3251), False, 'import gevent\n'), ((1704, 1739), 'astropy.io.registry.get_formats', 'io_registry.get_formats', (['Spectrum1D'], {}), '(Spectrum1D)\n', (1727, 1739), True, 'from astropy.io import registry as io_registry\n'), ((1165, 1186), 'numpy.random.sample', 'np.random.sample', (['(100)'], {}), '(100)\n', (1181, 1186), True, 'import numpy as np\n'), ((1209, 1237), 'numpy.linspace', 'np.linspace', (['(1100)', '(1200)', '(100)'], {}), '(1100, 1200, 100)\n', (1220, 1237), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Author: t0pep0
# e-mail: <EMAIL>
# Jabber: <EMAIL>
# BTC : 1ipEA2fcVyjiUnBqUx7PVy5efktz2hucb
# donate free =)
# Forked and modified by <NAME>
# Compatible Python3
import hmac
import hashlib
import time
import urllib.request, urllib.parse, urllib.error
import json
class Api(object):
__username = ''
__api_key = ''
__api_secret = ''
__nonce_v = ''
# Init class
def __init__(self, username, api_key, api_secret):
self.__username = username
self.__api_key = api_key
self.__api_secret = api_secret
# get timestamp as nonce
def __nonce(self):
self.__nonce_v = '{:.10f}'.format(time.time() * 1000).split('.')[0]
# generate signature
def __signature(self):
byte_secret = bytes(self.__api_secret, "ascii")
string = self.__nonce_v + self.__username + self.__api_key # create string
encode_string = string.encode ('utf-8')
signature = hmac.new(byte_secret, encode_string, digestmod=hashlib.sha256).hexdigest().upper() # create signature
return signature
def __post(self, url, param): # Post Request (Low Level API call)
post_url = url
header = { 'User-agent': 'bot-cex.io-' }
params = urllib.parse.urlencode(param)
post_data = params.encode( "ascii")
req = urllib.request.Request(url = post_url, data = post_data, headers = header)
page = urllib.request.urlopen(req).read()
return page
def api_call(self, method, param={}, private=0, couple=''): # api call (Middle level)
url = 'https://cex.io/api/' + method + '/' # generate url
if couple != '':
url = url + couple + '/' # set couple if needed
if private == 1: # add auth-data if needed
self.__nonce()
param.update({
'key': self.__api_key,
'signature': self.__signature(),
'nonce': self.__nonce_v})
answer = self.__post(url, param) # Post Request
a = answer.decode("utf-8")
#return json.loads(answer) # generate dict and return
return a # generates a valid json output
def ticker(self, couple='MHC/BTC'):
return self.api_call('ticker', {}, 0, couple)
def tickers(self, couple='USD'):
return self.api_call('tickers', {}, 0, couple)
def ohlcv(self, end_date, couple='BTC/USD'):
return self.api_call('ohlcv/hd/'+str(end_date), {}, 0, couple)
def order_book(self, couple='MHC/BTC'):
return self.api_call('order_book', {}, 0, couple)
def trade_history(self, since=1, couple='MHC/BTC'):
return self.api_call('trade_history', {"since": str(since)}, 0, couple)
def balance(self):
return self.api_call('balance', {}, 1)
def current_orders(self, couple='MHC/BTC'):
return self.api_call('open_orders', {}, 1, couple)
def cancel_order(self, order_id):
return self.api_call('cancel_order', {"id": order_id}, 1)
def place_order(self, ptype='buy', amount=1, price=1, couple='MHC/BTC'):
return self.api_call('place_order', {"type": ptype, "amount": str(amount), "price": str(price)}, 1, couple)
def archived_orders(self, couple='XLM/USD'):
return self.api_call('archived_orders', {}, 1, couple)
def price_stats(self, last_hours, max_resp_arr_size, couple='MHC/BTC'):
return self.api_call(
'price_stats',
{"lastHours": last_hours, "maxRespArrSize": max_resp_arr_size},
0, couple)
| [
"hmac.new",
"time.time"
] | [((963, 1025), 'hmac.new', 'hmac.new', (['byte_secret', 'encode_string'], {'digestmod': 'hashlib.sha256'}), '(byte_secret, encode_string, digestmod=hashlib.sha256)\n', (971, 1025), False, 'import hmac\n'), ((668, 679), 'time.time', 'time.time', ([], {}), '()\n', (677, 679), False, 'import time\n')] |
import numpy as np
from GeneralUtils import list_to_sum
class Fourier:
def __init__(self,amp=[1],freq=[1],ph=[0]):
self.amp = amp
self.freq = freq
self.ph = ph
def __str__(self):
out = []
for i in range(len(self.amp)):
if self.amp[i] != 1:
a = f"{self.amp[i]}*"
else:
a = ""
if self.freq[i] != 1:
f = f"*{self.freq[i]}"
else:
f = ""
if self.ph[i] != 0:
p = f"+{self.ph[i]}"
else:
p = ""
out.append(f"{a}sin(x{f}{p})")
return list_to_sum(out)
def __add__(self,other):
a = self.amp + other.amp
f = self.freq + other.freq
p = self.ph + other.ph
return Fourier(a,f,p)
def evaluate_series(F,x):
out = np.zeros_like(x)
for i in range(len(F.amp)):
a = F.amp[i]
f = F.freq[i]
p = F.ph[i]
out += a*np.sin(x*f+p)
return out
| [
"numpy.sin",
"numpy.zeros_like",
"GeneralUtils.list_to_sum"
] | [((966, 982), 'numpy.zeros_like', 'np.zeros_like', (['x'], {}), '(x)\n', (979, 982), True, 'import numpy as np\n'), ((740, 756), 'GeneralUtils.list_to_sum', 'list_to_sum', (['out'], {}), '(out)\n', (751, 756), False, 'from GeneralUtils import list_to_sum\n'), ((1095, 1112), 'numpy.sin', 'np.sin', (['(x * f + p)'], {}), '(x * f + p)\n', (1101, 1112), True, 'import numpy as np\n')] |
from rest_framework.decorators import api_view
from rest_framework.views import APIView
from rest_framework import status
from rest_framework.response import Response
from .models import Book
from .serializers import BookSerializer
# Create your views here.
class GetAllData(APIView):
def get(self, request):
query = Book.objects.all().order_by('-create_at')
serializers = BookSerializer(query , many=True)
return Response(serializers.data, status=status.HTTP_200_OK)
@api_view(['GET'])
def allApi(request):
if request.method == 'GET':
query = Book.objects.all().order_by('-create_at')
serializer = BookSerializer(query, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@api_view(['POST'])
def SetData(request):
if request.method == 'POST':
serializer = BookSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATE)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class GetFavData(APIView):
def get(self,request):
query = Book.objects.filter(fav=True)
serializer = BookSerializer(query, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class UpdateFavData(APIView):
def get(self, request, pk):
query = Book.objects.get(pk=pk)
serializer = BookSerializer(query)
return Response(serializer.data, status=status.HTTP_200_OK)
def put(self, request, pk):
query = Book.objects.get(pk=pk)
serializer = BookSerializer(query, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class PostModelData(APIView):
def post(self, request):
serializer = BookSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class SearchData(APIView):
def get(self, request):
search = request.GET['name']
query = Book.objects.filter(store_name__contains=search)
serializer= BookSerializer(query, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class DeleteData(APIView):
def delete(self, request, pk):
query = Book.objects.get(pk=pk)
query.delete()
return Response(status=status.HTTP_204_NO_CONTENT) | [
"rest_framework.response.Response",
"rest_framework.decorators.api_view"
] | [((500, 517), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (508, 517), False, 'from rest_framework.decorators import api_view\n'), ((753, 771), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (761, 771), False, 'from rest_framework.decorators import api_view\n'), ((444, 497), 'rest_framework.response.Response', 'Response', (['serializers.data'], {'status': 'status.HTTP_200_OK'}), '(serializers.data, status=status.HTTP_200_OK)\n', (452, 497), False, 'from rest_framework.response import Response\n'), ((698, 750), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (706, 750), False, 'from rest_framework.response import Response\n'), ((1037, 1100), 'rest_framework.response.Response', 'Response', (['serializer.errors'], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n', (1045, 1100), False, 'from rest_framework.response import Response\n'), ((1281, 1333), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (1289, 1333), False, 'from rest_framework.response import Response\n'), ((1496, 1548), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (1504, 1548), False, 'from rest_framework.response import Response\n'), ((2197, 2260), 'rest_framework.response.Response', 'Response', (['serializer.errors'], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n', (2205, 2260), False, 'from rest_framework.response import Response\n'), ((2488, 2540), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (2496, 2540), False, 'from rest_framework.response import Response\n'), ((2690, 2733), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_204_NO_CONTENT'}), '(status=status.HTTP_204_NO_CONTENT)\n', (2698, 2733), False, 'from rest_framework.response import Response\n'), ((965, 1021), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATE'}), '(serializer.data, status=status.HTTP_201_CREATE)\n', (973, 1021), False, 'from rest_framework.response import Response\n'), ((1767, 1824), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (1775, 1824), False, 'from rest_framework.response import Response\n'), ((1862, 1925), 'rest_framework.response.Response', 'Response', (['serializer.errors'], {'status': 'status.HTTP_400_BAD_REQUEST'}), '(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n', (1870, 1925), False, 'from rest_framework.response import Response\n'), ((2124, 2181), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (2132, 2181), False, 'from rest_framework.response import Response\n')] |
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
# Examples:
# url(r'^$', 'mysite.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'polls.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
# user auth urls
url(r'^accounts/login/$', 'polls.views.login'),
url(r'^accounts/auth/$', 'polls.views.auth_view'),
url(r'^accounts/logout/$', 'polls.views.logout'),
url(r'^accounts/loggedin/$', 'polls.views.loggedin'),
url(r'^accounts/invalid/$', 'polls.views.invalid_login'),
url(r'^accounts/register/$', 'polls.views.register_user'),
url(r'^accounts/register_success/$', 'polls.views.register_success'),
# report
url(r'^reports/new/$', 'polls.views.new_report'),
url(r'^reports/list/$', 'polls.views.user_report'),
url(r'^reports/detail/(?P<id>\d+)/$', 'polls.views.report_details'),
url(r'^reports/delete/(?P<id>\d+)/$','polls.views.delete'),
url(r'^reports/all/$','polls.views.report_all'),
url(r'^reports/edit/(?P<id>\d+)/$', 'polls.views.edit_report'),
# folder
url(r'^folder/new/$', 'polls.views.new_folder'),
#search
url(r'^search-form/$', 'polls.views.search_form'),
url(r'^search/$', 'polls.views.search'),
]
| [
"django.conf.urls.include",
"django.conf.urls.url"
] | [((308, 350), 'django.conf.urls.url', 'url', (['"""^$"""', '"""polls.views.home"""'], {'name': '"""home"""'}), "('^$', 'polls.views.home', name='home')\n", (311, 350), False, 'from django.conf.urls import include, url\n'), ((442, 487), 'django.conf.urls.url', 'url', (['"""^accounts/login/$"""', '"""polls.views.login"""'], {}), "('^accounts/login/$', 'polls.views.login')\n", (445, 487), False, 'from django.conf.urls import include, url\n'), ((495, 543), 'django.conf.urls.url', 'url', (['"""^accounts/auth/$"""', '"""polls.views.auth_view"""'], {}), "('^accounts/auth/$', 'polls.views.auth_view')\n", (498, 543), False, 'from django.conf.urls import include, url\n'), ((551, 598), 'django.conf.urls.url', 'url', (['"""^accounts/logout/$"""', '"""polls.views.logout"""'], {}), "('^accounts/logout/$', 'polls.views.logout')\n", (554, 598), False, 'from django.conf.urls import include, url\n'), ((605, 656), 'django.conf.urls.url', 'url', (['"""^accounts/loggedin/$"""', '"""polls.views.loggedin"""'], {}), "('^accounts/loggedin/$', 'polls.views.loggedin')\n", (608, 656), False, 'from django.conf.urls import include, url\n'), ((663, 718), 'django.conf.urls.url', 'url', (['"""^accounts/invalid/$"""', '"""polls.views.invalid_login"""'], {}), "('^accounts/invalid/$', 'polls.views.invalid_login')\n", (666, 718), False, 'from django.conf.urls import include, url\n'), ((725, 781), 'django.conf.urls.url', 'url', (['"""^accounts/register/$"""', '"""polls.views.register_user"""'], {}), "('^accounts/register/$', 'polls.views.register_user')\n", (728, 781), False, 'from django.conf.urls import include, url\n'), ((788, 855), 'django.conf.urls.url', 'url', (['"""^accounts/register_success/$"""', '"""polls.views.register_success"""'], {}), "('^accounts/register_success/$', 'polls.views.register_success')\n", (791, 855), False, 'from django.conf.urls import include, url\n'), ((880, 927), 'django.conf.urls.url', 'url', (['"""^reports/new/$"""', '"""polls.views.new_report"""'], {}), "('^reports/new/$', 'polls.views.new_report')\n", (883, 927), False, 'from django.conf.urls import include, url\n'), ((935, 984), 'django.conf.urls.url', 'url', (['"""^reports/list/$"""', '"""polls.views.user_report"""'], {}), "('^reports/list/$', 'polls.views.user_report')\n", (938, 984), False, 'from django.conf.urls import include, url\n'), ((992, 1059), 'django.conf.urls.url', 'url', (['"""^reports/detail/(?P<id>\\\\d+)/$"""', '"""polls.views.report_details"""'], {}), "('^reports/detail/(?P<id>\\\\d+)/$', 'polls.views.report_details')\n", (995, 1059), False, 'from django.conf.urls import include, url\n'), ((1065, 1124), 'django.conf.urls.url', 'url', (['"""^reports/delete/(?P<id>\\\\d+)/$"""', '"""polls.views.delete"""'], {}), "('^reports/delete/(?P<id>\\\\d+)/$', 'polls.views.delete')\n", (1068, 1124), False, 'from django.conf.urls import include, url\n'), ((1129, 1176), 'django.conf.urls.url', 'url', (['"""^reports/all/$"""', '"""polls.views.report_all"""'], {}), "('^reports/all/$', 'polls.views.report_all')\n", (1132, 1176), False, 'from django.conf.urls import include, url\n'), ((1182, 1244), 'django.conf.urls.url', 'url', (['"""^reports/edit/(?P<id>\\\\d+)/$"""', '"""polls.views.edit_report"""'], {}), "('^reports/edit/(?P<id>\\\\d+)/$', 'polls.views.edit_report')\n", (1185, 1244), False, 'from django.conf.urls import include, url\n'), ((1279, 1325), 'django.conf.urls.url', 'url', (['"""^folder/new/$"""', '"""polls.views.new_folder"""'], {}), "('^folder/new/$', 'polls.views.new_folder')\n", (1282, 1325), False, 'from django.conf.urls import include, url\n'), ((1349, 1397), 'django.conf.urls.url', 'url', (['"""^search-form/$"""', '"""polls.views.search_form"""'], {}), "('^search-form/$', 'polls.views.search_form')\n", (1352, 1397), False, 'from django.conf.urls import include, url\n'), ((1404, 1442), 'django.conf.urls.url', 'url', (['"""^search/$"""', '"""polls.views.search"""'], {}), "('^search/$', 'polls.views.search')\n", (1407, 1442), False, 'from django.conf.urls import include, url\n'), ((373, 397), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (380, 397), False, 'from django.conf.urls import include, url\n')] |
from functools import partial
from matplotlib.pyplot import xcorr
import numpy as np
import jax
import jax.numpy as jnp
import flax
from flax import linen as nn
import distrax
from .jax_utils import batch_to_jax, extend_and_repeat, next_rng
def update_target_network(main_params, target_params, tau):
return jax.tree_multimap(
lambda x, y: tau * x + (1.0 - tau) * y,
main_params, target_params
)
# def multiple_action_q_function(forward):
# # Forward the q function with multiple actions on each state, to be used as a decorator
# def wrapped(self, observations, actions, **kwargs):
# multiple_actions = False
# batch_size = observations.shape[0]
# if actions.ndim == 3 and observations.ndim == 2:
# multiple_actions = True
# observations = extend_and_repeat(observations, 1, actions.shape[1]).reshape(-1, observations.shape[-1])
# actions = actions.reshape(-1, actions.shape[-1])
# q_values = forward(self, observations, actions, **kwargs)
# if multiple_actions:
# q_values = q_values.reshape(batch_size, -1)
# return q_values
# return wrapped
def multiple_action_q_function(forward):
# Forward the q function with multiple actions on each state, to be used as a decorator
def wrapped(self, observations, actions, **kwargs):
multiple_actions = False
batch_size = observations.shape[0]
if actions.ndim == 3 and observations.ndim == 2:
multiple_actions = True
observations = extend_and_repeat(observations, 1, actions.shape[1]).reshape(-1, observations.shape[-1])
actions = actions.reshape(-1, actions.shape[-1])
q_last_layer, q_values = forward(self, observations, actions, **kwargs)
if multiple_actions:
q_last_layer = q_last_layer.reshape(batch_size, -1)
q_values = q_values.reshape(batch_size, -1)
return q_last_layer, q_values
return wrapped
def multiple_action_encode_function(forward):
# Forward the rep function with multiple actions on each state, to be used as a decorator
def wrapped(self, rng, observations, actions, **kwargs):
multiple_actions = False
batch_size = observations.shape[0]
if actions.ndim == 3 and observations.ndim == 2:
repeat = actions.shape[1]
multiple_actions = True
observations = extend_and_repeat(observations, 1, actions.shape[1]).reshape(-1, observations.shape[-1])
actions = actions.reshape(-1, actions.shape[-1])
samples, log_probs = forward(self, rng, observations, actions, **kwargs)
if multiple_actions:
samples = samples.reshape(batch_size, repeat, -1)
log_probs = log_probs.reshape(batch_size, repeat, -1)
return samples, log_probs
return wrapped
def multiple_action_decode_function(forward):
# Forward the rep function with multiple actions on each state, to be used as a decorator
def wrapped(self, observations, actions, **kwargs):
multiple_actions = False
batch_size = observations.shape[0]
if actions.ndim == 3 and observations.ndim == 2:
repeat = actions.shape[1]
multiple_actions = True
observations = extend_and_repeat(observations, 1, actions.shape[1]).reshape(-1, observations.shape[-1])
actions = actions.reshape(-1, actions.shape[-1])
q_values = forward(self, observations, actions, **kwargs)
if multiple_actions:
q_values = q_values.reshape(batch_size, repeat, -1)
return q_values
return wrapped
class Scalar(nn.Module):
init_value: float
def setup(self):
self.value = self.param('value', lambda x:self.init_value)
def __call__(self):
return self.value
class FullyConnectedNetwork(nn.Module):
output_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
# batch_norm: bool = False
@nn.compact
def __call__(self, input_tensor):
x = input_tensor
hidden_sizes = [int(h) for h in self.arch.split('-')]
for h in hidden_sizes:
if self.orthogonal_init:
x = nn.Dense(
h,
kernel_init=jax.nn.initializers.orthogonal(jnp.sqrt(2.0)),
bias_init=jax.nn.initializers.zeros
)(x)
else:
x = nn.Dense(h)(x)
# if self.batch_norm:
# x = nn.BatchNorm(use_running_average=not train_mode, momentum=0.9,
# epsilon=1e-5,
# dtype=jnp.float32)(x)
x = nn.relu(x)
if self.orthogonal_init:
output = nn.Dense(
self.output_dim,
kernel_init=jax.nn.initializers.orthogonal(1e-2),
bias_init=jax.nn.initializers.zeros
)(x)
else:
output = nn.Dense(
self.output_dim,
kernel_init=jax.nn.initializers.variance_scaling(
1e-2, 'fan_in', 'uniform'
),
bias_init=jax.nn.initializers.zeros
)(x)
return output
class FullyConnectedNetworkWithLastLayer(nn.Module):
output_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
@nn.compact
def __call__(self, input_tensor):
x = input_tensor
batch, _ = jnp.shape(x)
hidden_sizes = [int(h) for h in self.arch.split('-')]
for h in hidden_sizes:
if self.orthogonal_init:
x = nn.Dense(
h,
kernel_init=jax.nn.initializers.orthogonal(jnp.sqrt(2.0)),
bias_init=jax.nn.initializers.zeros
)(x)
else:
x = nn.Dense(h)(x)
# x = nn.LayerNorm()(x)
x = nn.relu(x)
normalized = jnp.reshape(jnp.sqrt(jnp.sum(x**2, axis=-1) + 1e-6), (batch,1))
x = x / normalized
# x = x / (jnp.sqrt(jnp.sum(x**2, axis=-1) + 1e-6))
if self.orthogonal_init:
output = nn.Dense(
self.output_dim,
kernel_init=jax.nn.initializers.orthogonal(1e-2),
bias_init=jax.nn.initializers.zeros
)(x)
else:
output = nn.Dense(
self.output_dim,
kernel_init=jax.nn.initializers.variance_scaling(
1e-2, 'fan_in', 'uniform'
),
bias_init=jax.nn.initializers.zeros
)(x)
return x, output
class FullyConnectedQFunction(nn.Module):
observation_dim: int
action_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
@nn.compact
@multiple_action_q_function
def __call__(self, observations, actions):
x = jnp.concatenate([observations, actions], axis=-1)
# x = FullyConnectedNetwork(output_dim=1, arch=self.arch, orthogonal_init=self.orthogonal_init)(x)
last_layer_x, x = FullyConnectedNetworkWithLastLayer(output_dim=1, arch=self.arch, orthogonal_init=self.orthogonal_init)(x)
return last_layer_x, jnp.squeeze(x, -1)
class FullyConnectedActionQFunction(nn.Module):
observation_dim: int
action_dim: int
output_dim: int = 1
arch: str = '256-256'
orthogonal_init: bool = False
normalize: bool = False
@nn.compact
@multiple_action_q_function
def __call__(self, observations, actions):
x = jnp.concatenate([observations, actions], axis=-1)
batch, _ = jnp.shape(x)
hidden_sizes = [int(h) for h in self.arch.split('-')]
for h in hidden_sizes:
if self.orthogonal_init:
x = nn.Dense(
h,
kernel_init=jax.nn.initializers.orthogonal(jnp.sqrt(2.0)),
bias_init=jax.nn.initializers.zeros
)(x)
else:
x = nn.Dense(h)(jnp.concatenate([x, actions], axis=-1))
x = nn.relu(x)
if self.normalize:
normalized = jnp.reshape(jnp.sqrt(jnp.sum(x**2, axis=-1) + 1e-6), (batch,1))
x = x / normalized
if self.orthogonal_init:
output = nn.Dense(
self.output_dim,
kernel_init=jax.nn.initializers.orthogonal(1e-2),
bias_init=jax.nn.initializers.zeros
)(jnp.concatenate([x, actions], axis=-1))
else:
output = nn.Dense(
self.output_dim,
kernel_init=jax.nn.initializers.variance_scaling(
1e-2, 'fan_in', 'uniform'
),
bias_init=jax.nn.initializers.zeros
)(jnp.concatenate([x, actions], axis=-1))
return x, jnp.squeeze(output, -1)
class TanhGaussianPolicy(nn.Module):
observation_dim: int
action_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
log_std_multiplier: float = 1.0
log_std_offset: float = -1.0
action_scale: float = 1.0
def setup(self):
self.base_network = FullyConnectedNetwork(
output_dim=2 * self.action_dim, arch=self.arch, orthogonal_init=self.orthogonal_init
)
self.log_std_multiplier_module = Scalar(self.log_std_multiplier)
self.log_std_offset_module = Scalar(self.log_std_offset)
def log_prob(self, observations, actions):
if actions.ndim == 3:
observations = extend_and_repeat(observations, 1, actions.shape[1])
base_network_output = self.base_network(observations)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
return action_distribution.log_prob(actions / self.action_scale)
def __call__(self, rng, observations, deterministic=False, repeat=None):
if repeat is not None:
observations = extend_and_repeat(observations, 1, repeat)
base_network_output = self.base_network(observations)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
if deterministic:
mean = jnp.clip(mean, -6, 6)
samples = jnp.tanh(mean)
log_prob = action_distribution.log_prob(samples)
else:
samples, log_prob = action_distribution.sample_and_log_prob(seed=rng)
samples = samples * self.action_scale
return samples, log_prob
class ActionRepresentationPolicy(nn.Module):
observation_dim: int
action_dim: int
latent_action_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
no_tanh: bool = False
log_std_multiplier: float = 1.0
log_std_offset: float = -1.0
# batch_norm: bool = True
def setup(self):
self.base_network = FullyConnectedNetwork(
output_dim=2 * self.latent_action_dim, arch=self.arch, orthogonal_init=self.orthogonal_init
)
self.log_std_multiplier_module = Scalar(self.log_std_multiplier)
self.log_std_offset_module = Scalar(self.log_std_offset)
def log_prob(self, observations, actions, latent_actions):
if actions.ndim == 3:
observations = extend_and_repeat(observations, 1, actions.shape[1])
x = jnp.concatenate([observations, actions], axis=-1)
base_network_output = self.base_network(x)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
if self.no_tanh:
action_distribution = distrax.MultivariateNormalDiag(mean, jnp.exp(log_std))
else:
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
return action_distribution.log_prob(latent_actions)
@nn.compact
@multiple_action_encode_function
def __call__(self, rng, observations, actions, deterministic=False, repeat=None):
if repeat is not None:
observations = extend_and_repeat(observations, 1, repeat)
x = jnp.concatenate([observations, actions], axis=-1)
base_network_output = self.base_network(x)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
if self.no_tanh:
action_distribution = distrax.MultivariateNormalDiag(mean, jnp.exp(log_std))
else:
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
if deterministic:
samples = jnp.tanh(mean)
log_prob = action_distribution.log_prob(samples)
else:
samples, log_prob = action_distribution.sample_and_log_prob(seed=rng)
return samples, log_prob
def get_statistics(self, rng, observations, actions, deterministic=False, repeat=None):
if repeat is not None:
observations = extend_and_repeat(observations, 1, repeat)
x = jnp.concatenate([observations, actions], axis=-1)
base_network_output = self.base_network(x)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
if self.no_tanh:
action_distribution = distrax.MultivariateNormalDiag(mean, jnp.exp(log_std))
else:
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
if deterministic:
samples = jnp.tanh(mean)
log_prob = action_distribution.log_prob(samples)
else:
samples, log_prob = action_distribution.sample_and_log_prob(seed=rng)
return samples, mean, log_std
class ActionOnlyRepresentationPolicy(nn.Module):
action_dim: int
latent_action_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
no_tanh: bool = False
log_std_multiplier: float = 1.0
log_std_offset: float = -1.0
def setup(self):
self.base_network = FullyConnectedNetwork(
output_dim=2 * self.latent_action_dim, arch=self.arch, orthogonal_init=self.orthogonal_init
)
self.log_std_multiplier_module = Scalar(self.log_std_multiplier)
self.log_std_offset_module = Scalar(self.log_std_offset)
def log_prob(self, actions, latent_actions):
if actions.ndim == 3:
observations = extend_and_repeat(observations, 1, actions.shape[1])
base_network_output = self.base_network(actions)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
if self.no_tanh:
action_distribution = distrax.MultivariateNormalDiag(mean, jnp.exp(log_std))
else:
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
return action_distribution.log_prob(latent_actions)
@nn.compact
def __call__(self, rng, actions, deterministic=False, repeat=None):
if repeat is not None:
observations = extend_and_repeat(observations, 1, repeat)
base_network_output = self.base_network(actions)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
if self.no_tanh:
action_distribution = distrax.MultivariateNormalDiag(mean, jnp.exp(log_std))
else:
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
if deterministic:
samples = jnp.tanh(mean)
log_prob = action_distribution.log_prob(samples)
else:
samples, log_prob = action_distribution.sample_and_log_prob(seed=rng)
return samples, log_prob
def get_statistics(self, rng, actions, deterministic=False, repeat=None):
if repeat is not None:
observations = extend_and_repeat(observations, 1, repeat)
base_network_output = self.base_network(actions)
mean, log_std = jnp.split(base_network_output, 2, axis=-1)
log_std = self.log_std_multiplier_module() * log_std + self.log_std_offset_module()
log_std = jnp.clip(log_std, -20.0, 2.0)
if self.no_tanh:
action_distribution = distrax.MultivariateNormalDiag(mean, jnp.exp(log_std))
else:
action_distribution = distrax.Transformed(
distrax.MultivariateNormalDiag(mean, jnp.exp(log_std)),
distrax.Block(distrax.Tanh(), ndims=1)
)
if deterministic:
samples = jnp.tanh(mean)
log_prob = action_distribution.log_prob(samples)
else:
samples, log_prob = action_distribution.sample_and_log_prob(seed=rng)
return samples, mean, log_std
class ActionDecoder(nn.Module):
observation_dim: int
latent_action_dim: int
action_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
@nn.compact
@multiple_action_decode_function
def __call__(self, observations, latent_actions):
x = jnp.concatenate([observations, latent_actions], axis=-1)
hidden_sizes = [int(h) for h in self.arch.split('-')]
for h in hidden_sizes:
if self.orthogonal_init:
x = nn.Dense(
h,
kernel_init=jax.nn.initializers.orthogonal(jnp.sqrt(2.0)),
bias_init=jax.nn.initializers.zeros
)(x)
else:
x = nn.Dense(h)(x)
x = nn.relu(x)
if self.orthogonal_init:
x = nn.Dense(
self.action_dim,
kernel_init=jax.nn.initializers.orthogonal(1e-2),
bias_init=jax.nn.initializers.zeros
)(x)
else:
x = nn.Dense(
self.action_dim,
kernel_init=jax.nn.initializers.variance_scaling(
1e-2, 'fan_in', 'uniform'
),
bias_init=jax.nn.initializers.zeros
)(x)
output = nn.tanh(x)
return output
class ActionSeperatedDecoder(nn.Module):
observation_dim: int
latent_action_dim: int
action_dim: int
arch: str = '256-256'
orthogonal_init: bool = False
# batch_norm: bool = True
@nn.compact
@multiple_action_decode_function
def __call__(self, observations, latent_actions):
x = observations
hidden_sizes = [int(h) for h in self.arch.split('-')]
for h in hidden_sizes:
if self.orthogonal_init:
x = nn.Dense(
h,
kernel_init=jax.nn.initializers.orthogonal(jnp.sqrt(2.0)),
bias_init=jax.nn.initializers.zeros
)(jnp.concatenate([x, latent_actions], axis=-1))
else:
x = nn.Dense(h)(jnp.concatenate([x, latent_actions], axis=-1))
# if self.batch_norm:
# x = nn.BatchNorm(use_running_average=not train_mode, momentum=0.9,
# epsilon=1e-5,
# dtype=jnp.float32)(x)
x = nn.relu(x)
if self.orthogonal_init:
x = nn.Dense(
self.action_dim,
kernel_init=jax.nn.initializers.orthogonal(1e-2),
bias_init=jax.nn.initializers.zeros
)(jnp.concatenate([x, latent_actions], axis=-1))
else:
x = nn.Dense(
self.action_dim,
kernel_init=jax.nn.initializers.variance_scaling(
1e-2, 'fan_in', 'uniform'
),
bias_init=jax.nn.initializers.zeros
)(jnp.concatenate([x, latent_actions], axis=-1))
output = nn.tanh(x)
return output
class Discriminator(nn.Module):
observation_dim: int
latent_action_dim: int
arch: str = '512-256'
dropout: bool = True
@nn.compact
def __call__(self, observations, latent_actions, train=False):
x = jnp.concatenate([observations, latent_actions], axis=-1)
hidden_sizes = [int(h) for h in self.arch.split('-')]
for h in hidden_sizes:
x = nn.Dense(h)(x)
# dropout
# layer norm
x = nn.leaky_relu(x, 0.2)
if self.dropout:
x = nn.Dropout(0.1)(x, deterministic=not train)
output = nn.Dense(1)(x)
return output
class SamplerPolicy(object):
def __init__(self, policy, params):
self.policy = policy
self.params = params
def update_params(self, params):
self.params = params
return self
@partial(jax.jit, static_argnames=('self', 'deterministic'))
def act(self, params, rng, observations, deterministic):
return self.policy.apply(params, rng, observations, deterministic, repeat=None)
def __call__(self, observations, deterministic=False):
actions, _ = self.act(self.params, next_rng(), observations, deterministic=deterministic)
assert jnp.all(jnp.isfinite(actions))
return jax.device_get(actions)
class SamplerDecoder(object):
def __init__(self, decoder, params):
self.decoder = decoder
self.params = params
def update_params(self, params):
self.params = params
return self
@partial(jax.jit, static_argnames=('self'))
def act(self, params, observations, actions_rep):
return self.decoder.apply(params, observations, actions_rep)
def __call__(self, observations, actions_rep):
actions = self.act(self.params, observations, actions_rep)
assert jnp.all(jnp.isfinite(actions))
return jax.device_get(actions)
class SamplerEncoder(object):
def __init__(self, encoder, params):
self.encoder = encoder
self.params = params
def update_params(self, params):
self.params = params
return self
@partial(jax.jit, static_argnames=('self'))
def act(self, params, rng, observations, actions):
return self.encoder.apply(params, rng, observations, actions)[0]
def __call__(self, rng, observations, actions):
actions = self.act(self.params, rng, observations, actions)
assert jnp.all(jnp.isfinite(actions))
return jax.device_get(actions) | [
"flax.linen.Dense",
"jax.nn.initializers.variance_scaling",
"jax.numpy.shape",
"jax.numpy.tanh",
"jax.tree_multimap",
"flax.linen.tanh",
"jax.numpy.concatenate",
"jax.numpy.split",
"distrax.Tanh",
"jax.numpy.clip",
"jax.nn.initializers.orthogonal",
"jax.device_get",
"flax.linen.leaky_relu",
... | [((316, 405), 'jax.tree_multimap', 'jax.tree_multimap', (['(lambda x, y: tau * x + (1.0 - tau) * y)', 'main_params', 'target_params'], {}), '(lambda x, y: tau * x + (1.0 - tau) * y, main_params,\n target_params)\n', (333, 405), False, 'import jax\n'), ((22184, 22243), 'functools.partial', 'partial', (['jax.jit'], {'static_argnames': "('self', 'deterministic')"}), "(jax.jit, static_argnames=('self', 'deterministic'))\n", (22191, 22243), False, 'from functools import partial\n'), ((22862, 22902), 'functools.partial', 'partial', (['jax.jit'], {'static_argnames': '"""self"""'}), "(jax.jit, static_argnames='self')\n", (22869, 22902), False, 'from functools import partial\n'), ((23462, 23502), 'functools.partial', 'partial', (['jax.jit'], {'static_argnames': '"""self"""'}), "(jax.jit, static_argnames='self')\n", (23469, 23502), False, 'from functools import partial\n'), ((5486, 5498), 'jax.numpy.shape', 'jnp.shape', (['x'], {}), '(x)\n', (5495, 5498), True, 'import jax.numpy as jnp\n'), ((6919, 6968), 'jax.numpy.concatenate', 'jnp.concatenate', (['[observations, actions]'], {'axis': '(-1)'}), '([observations, actions], axis=-1)\n', (6934, 6968), True, 'import jax.numpy as jnp\n'), ((7571, 7620), 'jax.numpy.concatenate', 'jnp.concatenate', (['[observations, actions]'], {'axis': '(-1)'}), '([observations, actions], axis=-1)\n', (7586, 7620), True, 'import jax.numpy as jnp\n'), ((7640, 7652), 'jax.numpy.shape', 'jnp.shape', (['x'], {}), '(x)\n', (7649, 7652), True, 'import jax.numpy as jnp\n'), ((9698, 9740), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (9707, 9740), True, 'import jax.numpy as jnp\n'), ((9851, 9880), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (9859, 9880), True, 'import jax.numpy as jnp\n'), ((10399, 10441), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (10408, 10441), True, 'import jax.numpy as jnp\n'), ((10552, 10581), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (10560, 10581), True, 'import jax.numpy as jnp\n'), ((11918, 11967), 'jax.numpy.concatenate', 'jnp.concatenate', (['[observations, actions]'], {'axis': '(-1)'}), '([observations, actions], axis=-1)\n', (11933, 11967), True, 'import jax.numpy as jnp\n'), ((12043, 12085), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (12052, 12085), True, 'import jax.numpy as jnp\n'), ((12196, 12225), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (12204, 12225), True, 'import jax.numpy as jnp\n'), ((12863, 12912), 'jax.numpy.concatenate', 'jnp.concatenate', (['[observations, actions]'], {'axis': '(-1)'}), '([observations, actions], axis=-1)\n', (12878, 12912), True, 'import jax.numpy as jnp\n'), ((12988, 13030), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (12997, 13030), True, 'import jax.numpy as jnp\n'), ((13141, 13170), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (13149, 13170), True, 'import jax.numpy as jnp\n'), ((13955, 14004), 'jax.numpy.concatenate', 'jnp.concatenate', (['[observations, actions]'], {'axis': '(-1)'}), '([observations, actions], axis=-1)\n', (13970, 14004), True, 'import jax.numpy as jnp\n'), ((14080, 14122), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (14089, 14122), True, 'import jax.numpy as jnp\n'), ((14233, 14262), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (14241, 14262), True, 'import jax.numpy as jnp\n'), ((15666, 15708), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (15675, 15708), True, 'import jax.numpy as jnp\n'), ((15819, 15848), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (15827, 15848), True, 'import jax.numpy as jnp\n'), ((16504, 16546), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (16513, 16546), True, 'import jax.numpy as jnp\n'), ((16657, 16686), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (16665, 16686), True, 'import jax.numpy as jnp\n'), ((17526, 17568), 'jax.numpy.split', 'jnp.split', (['base_network_output', '(2)'], {'axis': '(-1)'}), '(base_network_output, 2, axis=-1)\n', (17535, 17568), True, 'import jax.numpy as jnp\n'), ((17679, 17708), 'jax.numpy.clip', 'jnp.clip', (['log_std', '(-20.0)', '(2.0)'], {}), '(log_std, -20.0, 2.0)\n', (17687, 17708), True, 'import jax.numpy as jnp\n'), ((18578, 18634), 'jax.numpy.concatenate', 'jnp.concatenate', (['[observations, latent_actions]'], {'axis': '(-1)'}), '([observations, latent_actions], axis=-1)\n', (18593, 18634), True, 'import jax.numpy as jnp\n'), ((19581, 19591), 'flax.linen.tanh', 'nn.tanh', (['x'], {}), '(x)\n', (19588, 19591), True, 'from flax import linen as nn\n'), ((21282, 21292), 'flax.linen.tanh', 'nn.tanh', (['x'], {}), '(x)\n', (21289, 21292), True, 'from flax import linen as nn\n'), ((21548, 21604), 'jax.numpy.concatenate', 'jnp.concatenate', (['[observations, latent_actions]'], {'axis': '(-1)'}), '([observations, latent_actions], axis=-1)\n', (21563, 21604), True, 'import jax.numpy as jnp\n'), ((22612, 22635), 'jax.device_get', 'jax.device_get', (['actions'], {}), '(actions)\n', (22626, 22635), False, 'import jax\n'), ((23208, 23231), 'jax.device_get', 'jax.device_get', (['actions'], {}), '(actions)\n', (23222, 23231), False, 'import jax\n'), ((23815, 23838), 'jax.device_get', 'jax.device_get', (['actions'], {}), '(actions)\n', (23829, 23838), False, 'import jax\n'), ((4708, 4718), 'flax.linen.relu', 'nn.relu', (['x'], {}), '(x)\n', (4715, 4718), True, 'from flax import linen as nn\n'), ((5943, 5953), 'flax.linen.relu', 'nn.relu', (['x'], {}), '(x)\n', (5950, 5953), True, 'from flax import linen as nn\n'), ((7237, 7255), 'jax.numpy.squeeze', 'jnp.squeeze', (['x', '(-1)'], {}), '(x, -1)\n', (7248, 7255), True, 'import jax.numpy as jnp\n'), ((8098, 8108), 'flax.linen.relu', 'nn.relu', (['x'], {}), '(x)\n', (8105, 8108), True, 'from flax import linen as nn\n'), ((8868, 8891), 'jax.numpy.squeeze', 'jnp.squeeze', (['output', '(-1)'], {}), '(output, -1)\n', (8879, 8891), True, 'import jax.numpy as jnp\n'), ((10807, 10828), 'jax.numpy.clip', 'jnp.clip', (['mean', '(-6)', '(6)'], {}), '(mean, -6, 6)\n', (10815, 10828), True, 'import jax.numpy as jnp\n'), ((10851, 10865), 'jax.numpy.tanh', 'jnp.tanh', (['mean'], {}), '(mean)\n', (10859, 10865), True, 'import jax.numpy as jnp\n'), ((13543, 13557), 'jax.numpy.tanh', 'jnp.tanh', (['mean'], {}), '(mean)\n', (13551, 13557), True, 'import jax.numpy as jnp\n'), ((14635, 14649), 'jax.numpy.tanh', 'jnp.tanh', (['mean'], {}), '(mean)\n', (14643, 14649), True, 'import jax.numpy as jnp\n'), ((17059, 17073), 'jax.numpy.tanh', 'jnp.tanh', (['mean'], {}), '(mean)\n', (17067, 17073), True, 'import jax.numpy as jnp\n'), ((18081, 18095), 'jax.numpy.tanh', 'jnp.tanh', (['mean'], {}), '(mean)\n', (18089, 18095), True, 'import jax.numpy as jnp\n'), ((19043, 19053), 'flax.linen.relu', 'nn.relu', (['x'], {}), '(x)\n', (19050, 19053), True, 'from flax import linen as nn\n'), ((20655, 20665), 'flax.linen.relu', 'nn.relu', (['x'], {}), '(x)\n', (20662, 20665), True, 'from flax import linen as nn\n'), ((21792, 21813), 'flax.linen.leaky_relu', 'nn.leaky_relu', (['x', '(0.2)'], {}), '(x, 0.2)\n', (21805, 21813), True, 'from flax import linen as nn\n'), ((21924, 21935), 'flax.linen.Dense', 'nn.Dense', (['(1)'], {}), '(1)\n', (21932, 21935), True, 'from flax import linen as nn\n'), ((22574, 22595), 'jax.numpy.isfinite', 'jnp.isfinite', (['actions'], {}), '(actions)\n', (22586, 22595), True, 'import jax.numpy as jnp\n'), ((23170, 23191), 'jax.numpy.isfinite', 'jnp.isfinite', (['actions'], {}), '(actions)\n', (23182, 23191), True, 'import jax.numpy as jnp\n'), ((23777, 23798), 'jax.numpy.isfinite', 'jnp.isfinite', (['actions'], {}), '(actions)\n', (23789, 23798), True, 'import jax.numpy as jnp\n'), ((8495, 8533), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, actions]'], {'axis': '(-1)'}), '([x, actions], axis=-1)\n', (8510, 8533), True, 'import jax.numpy as jnp\n'), ((8810, 8848), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, actions]'], {'axis': '(-1)'}), '([x, actions], axis=-1)\n', (8825, 8848), True, 'import jax.numpy as jnp\n'), ((9981, 9997), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (9988, 9997), True, 'import jax.numpy as jnp\n'), ((10026, 10040), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (10038, 10040), False, 'import distrax\n'), ((10682, 10698), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (10689, 10698), True, 'import jax.numpy as jnp\n'), ((10727, 10741), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (10739, 10741), False, 'import distrax\n'), ((12322, 12338), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (12329, 12338), True, 'import jax.numpy as jnp\n'), ((13267, 13283), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (13274, 13283), True, 'import jax.numpy as jnp\n'), ((14359, 14375), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (14366, 14375), True, 'import jax.numpy as jnp\n'), ((15945, 15961), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (15952, 15961), True, 'import jax.numpy as jnp\n'), ((16783, 16799), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (16790, 16799), True, 'import jax.numpy as jnp\n'), ((17805, 17821), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (17812, 17821), True, 'import jax.numpy as jnp\n'), ((20892, 20937), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, latent_actions]'], {'axis': '(-1)'}), '([x, latent_actions], axis=-1)\n', (20907, 20937), True, 'import jax.numpy as jnp\n'), ((21209, 21254), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, latent_actions]'], {'axis': '(-1)'}), '([x, latent_actions], axis=-1)\n', (21224, 21254), True, 'import jax.numpy as jnp\n'), ((21714, 21725), 'flax.linen.Dense', 'nn.Dense', (['h'], {}), '(h)\n', (21722, 21725), True, 'from flax import linen as nn\n'), ((4470, 4481), 'flax.linen.Dense', 'nn.Dense', (['h'], {}), '(h)\n', (4478, 4481), True, 'from flax import linen as nn\n'), ((5876, 5887), 'flax.linen.Dense', 'nn.Dense', (['h'], {}), '(h)\n', (5884, 5887), True, 'from flax import linen as nn\n'), ((5996, 6020), 'jax.numpy.sum', 'jnp.sum', (['(x ** 2)'], {'axis': '(-1)'}), '(x ** 2, axis=-1)\n', (6003, 6020), True, 'import jax.numpy as jnp\n'), ((8030, 8041), 'flax.linen.Dense', 'nn.Dense', (['h'], {}), '(h)\n', (8038, 8041), True, 'from flax import linen as nn\n'), ((8042, 8080), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, actions]'], {'axis': '(-1)'}), '([x, actions], axis=-1)\n', (8057, 8080), True, 'import jax.numpy as jnp\n'), ((12462, 12478), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (12469, 12478), True, 'import jax.numpy as jnp\n'), ((12511, 12525), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (12523, 12525), False, 'import distrax\n'), ((13407, 13423), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (13414, 13423), True, 'import jax.numpy as jnp\n'), ((13456, 13470), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (13468, 13470), False, 'import distrax\n'), ((14499, 14515), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (14506, 14515), True, 'import jax.numpy as jnp\n'), ((14548, 14562), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (14560, 14562), False, 'import distrax\n'), ((16085, 16101), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (16092, 16101), True, 'import jax.numpy as jnp\n'), ((16134, 16148), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (16146, 16148), False, 'import distrax\n'), ((16923, 16939), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (16930, 16939), True, 'import jax.numpy as jnp\n'), ((16972, 16986), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (16984, 16986), False, 'import distrax\n'), ((17945, 17961), 'jax.numpy.exp', 'jnp.exp', (['log_std'], {}), '(log_std)\n', (17952, 17961), True, 'import jax.numpy as jnp\n'), ((17994, 18008), 'distrax.Tanh', 'distrax.Tanh', ([], {}), '()\n', (18006, 18008), False, 'import distrax\n'), ((19012, 19023), 'flax.linen.Dense', 'nn.Dense', (['h'], {}), '(h)\n', (19020, 19023), True, 'from flax import linen as nn\n'), ((20288, 20333), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, latent_actions]'], {'axis': '(-1)'}), '([x, latent_actions], axis=-1)\n', (20303, 20333), True, 'import jax.numpy as jnp\n'), ((20373, 20384), 'flax.linen.Dense', 'nn.Dense', (['h'], {}), '(h)\n', (20381, 20384), True, 'from flax import linen as nn\n'), ((20385, 20430), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, latent_actions]'], {'axis': '(-1)'}), '([x, latent_actions], axis=-1)\n', (20400, 20430), True, 'import jax.numpy as jnp\n'), ((21863, 21878), 'flax.linen.Dropout', 'nn.Dropout', (['(0.1)'], {}), '(0.1)\n', (21873, 21878), True, 'from flax import linen as nn\n'), ((4845, 4881), 'jax.nn.initializers.orthogonal', 'jax.nn.initializers.orthogonal', (['(0.01)'], {}), '(0.01)\n', (4875, 4881), False, 'import jax\n'), ((5058, 5121), 'jax.nn.initializers.variance_scaling', 'jax.nn.initializers.variance_scaling', (['(0.01)', '"""fan_in"""', '"""uniform"""'], {}), "(0.01, 'fan_in', 'uniform')\n", (5094, 5121), False, 'import jax\n'), ((6252, 6288), 'jax.nn.initializers.orthogonal', 'jax.nn.initializers.orthogonal', (['(0.01)'], {}), '(0.01)\n', (6282, 6288), False, 'import jax\n'), ((6465, 6528), 'jax.nn.initializers.variance_scaling', 'jax.nn.initializers.variance_scaling', (['(0.01)', '"""fan_in"""', '"""uniform"""'], {}), "(0.01, 'fan_in', 'uniform')\n", (6501, 6528), False, 'import jax\n'), ((8191, 8215), 'jax.numpy.sum', 'jnp.sum', (['(x ** 2)'], {'axis': '(-1)'}), '(x ** 2, axis=-1)\n', (8198, 8215), True, 'import jax.numpy as jnp\n'), ((8391, 8427), 'jax.nn.initializers.orthogonal', 'jax.nn.initializers.orthogonal', (['(0.01)'], {}), '(0.01)\n', (8421, 8427), False, 'import jax\n'), ((8641, 8704), 'jax.nn.initializers.variance_scaling', 'jax.nn.initializers.variance_scaling', (['(0.01)', '"""fan_in"""', '"""uniform"""'], {}), "(0.01, 'fan_in', 'uniform')\n", (8677, 8704), False, 'import jax\n'), ((19175, 19211), 'jax.nn.initializers.orthogonal', 'jax.nn.initializers.orthogonal', (['(0.01)'], {}), '(0.01)\n', (19205, 19211), False, 'import jax\n'), ((19383, 19446), 'jax.nn.initializers.variance_scaling', 'jax.nn.initializers.variance_scaling', (['(0.01)', '"""fan_in"""', '"""uniform"""'], {}), "(0.01, 'fan_in', 'uniform')\n", (19419, 19446), False, 'import jax\n'), ((20788, 20824), 'jax.nn.initializers.orthogonal', 'jax.nn.initializers.orthogonal', (['(0.01)'], {}), '(0.01)\n', (20818, 20824), False, 'import jax\n'), ((21040, 21103), 'jax.nn.initializers.variance_scaling', 'jax.nn.initializers.variance_scaling', (['(0.01)', '"""fan_in"""', '"""uniform"""'], {}), "(0.01, 'fan_in', 'uniform')\n", (21076, 21103), False, 'import jax\n'), ((4339, 4352), 'jax.numpy.sqrt', 'jnp.sqrt', (['(2.0)'], {}), '(2.0)\n', (4347, 4352), True, 'import jax.numpy as jnp\n'), ((5745, 5758), 'jax.numpy.sqrt', 'jnp.sqrt', (['(2.0)'], {}), '(2.0)\n', (5753, 5758), True, 'import jax.numpy as jnp\n'), ((7899, 7912), 'jax.numpy.sqrt', 'jnp.sqrt', (['(2.0)'], {}), '(2.0)\n', (7907, 7912), True, 'import jax.numpy as jnp\n'), ((18881, 18894), 'jax.numpy.sqrt', 'jnp.sqrt', (['(2.0)'], {}), '(2.0)\n', (18889, 18894), True, 'import jax.numpy as jnp\n'), ((20198, 20211), 'jax.numpy.sqrt', 'jnp.sqrt', (['(2.0)'], {}), '(2.0)\n', (20206, 20211), True, 'import jax.numpy as jnp\n')] |
"""Place fixtures in this file for use across all test files"""
import pytest
@pytest.fixture(scope="function")
def logger(caplog):
caplog.set_level("DEBUG")
return caplog
@pytest.fixture
def log_and_exit_mock(mocker):
return mocker.patch("scripts.generate_pipeline.log_and_exit")
| [
"pytest.fixture"
] | [((81, 113), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (95, 113), False, 'import pytest\n')] |
import numpy as np
import moch
import soch
import os
import sys
import scipy.io
import thorns
def main(parseID):
parseIn = parseID + 'In.mat'
parseOut = parseID + 'Out.mat'
parse = scipy.io.loadmat(parseIn)
os.remove(parseIn)
lagSpace = 1. * parse['lagSpace'] / 1000
parsStruct = parse['pars'][0, 0]
# Parametres
est = {'duration' : 1. * parsStruct['est'][0,0]['dur'][0][0] / 1000,
'loudness' : 1. * parsStruct['est'][0,0]['loud'][0][0],
'intv' : 1. * parsStruct['est'][0,0]['interval'][0] / 1000,
'onset' : 1. * parsStruct['est'][0,0]['onset' ][0][0] / 1000,
'tail' : 1. * parsStruct['est'][0,0]['tail'][0][0] / 1000,
'maskN' : parsStruct['est'][0,0]['maskNoise'][0][0],
'filename' : parsStruct['est'][0,0]['filename'][0],
'bandpass' : parsStruct['est'][0,0]['bandpass'][0],
'save' : parsStruct['est'][0,0]['save'][0]
}
if est['filename'] == -1:
est['type'] = parsStruct['est'][0,0]['type'][0]
est['freq'] = parsStruct['est'][0,0]['f'][0][0]
est['harms'] = parsStruct['est'][0,0]['harms'][0]
est['harmFact'] = parsStruct['est'][0,0]['harmFact'][0][0]
est['shift'] = parsStruct['est'][0,0]['shift'][0][0]
est['nOfIts'] = parsStruct['est'][0,0]['nOfIts'][0][0]
est['notes'] = parsStruct['est'][0,0]['notes'][0]
est['tuning'] = parsStruct['est'][0,0]['tuning'][0]
est['noiseOff'] = 1. * parsStruct['est'][0,0]['noiseOff'][0][0] / 1000
else:
est['type'] = 'external'
par = {'periphFs' : 100000,
'cochChanns' : (125, 10000, 30),
'SACFTau' : 1. * parsStruct['tauSACF'][0,0] / 1000,
'subCortTau' : 1. * parsStruct['tauSubthal'][0,0] / 1000,
'solvOnset' : 1. * parsStruct['solvOnset'][0] / 1000,
'subCortFs' : 100000,
'subCortAff' : parsStruct['subCortAff'][0,0],
'regularise' : parsStruct['regularise'][0,0],
'mu0' : parsStruct['mu0'][0,0],
'SACFGround' : parsStruct['SACFGround'][0,0],
'cortFs' : parsStruct['cortFs'][0,0],
'subDelay' : 1. * parsStruct['subDelay'][0,0] / 1000,
'subDelayDy' : 1. * parsStruct['subDelayDy'][0,0] / 1000,
}
if ('chord' in est['type']) and (est['notes'][0] != est['notes'][1]):
est['onset'] += par['subDelayDy']
par['mu0'] = 2 * par['mu0']
else:
est['onset'] += par['subDelay']
[A, n, b] = thalamicInput(lagSpace, par, est)
duration = 1.* len(A) / par['cortFs']
dti = 1./par['cortFs']
timeSpace = np.arange(start = dti, stop = duration + dti, step = dti)
if 'off' in est.keys():
timeSpace = timeSpace - est['off']
scipy.io.savemat(parseOut, {'A':A, 'n':n, 'b':b, 'timeSpace': timeSpace})
def thalamicInput(lagSpace, par, est, raster = False):
fs = par['periphFs']
# Subcortical processing
sound = soch.createStimulus(est, par['periphFs'])
prob = moch.peripheral(sound, par)
[A, n, b] = moch.subcortical(prob, lagSpace, par)
for i in range(1, par['subCortAff']):
sound = soch.createStimulus(est, par['periphFs'])
prob = moch.peripheral(sound, par)
[A0, n0, b0] = moch.subcortical(prob, lagSpace, par)
A = A + A0
n = n + n0
b = b + b0
A = (1. / par['subCortAff']) * A
n = (1. / par['subCortAff']) * n
b = (1. / par['subCortAff']) * b
if raster:
anfTrains = moch.peripheralSpikes(sound, par, fs = -1)
thorns.plot_raster(anfTrains)
thorns.show()
return [A, n, b]
main(sys.argv[1])
| [
"moch.peripheral",
"moch.peripheralSpikes",
"thorns.show",
"soch.createStimulus",
"thorns.plot_raster",
"moch.subcortical",
"numpy.arange",
"os.remove"
] | [((234, 252), 'os.remove', 'os.remove', (['parseIn'], {}), '(parseIn)\n', (243, 252), False, 'import os\n'), ((2726, 2777), 'numpy.arange', 'np.arange', ([], {'start': 'dti', 'stop': '(duration + dti)', 'step': 'dti'}), '(start=dti, stop=duration + dti, step=dti)\n', (2735, 2777), True, 'import numpy as np\n'), ((3065, 3106), 'soch.createStimulus', 'soch.createStimulus', (['est', "par['periphFs']"], {}), "(est, par['periphFs'])\n", (3084, 3106), False, 'import soch\n'), ((3118, 3145), 'moch.peripheral', 'moch.peripheral', (['sound', 'par'], {}), '(sound, par)\n', (3133, 3145), False, 'import moch\n'), ((3163, 3200), 'moch.subcortical', 'moch.subcortical', (['prob', 'lagSpace', 'par'], {}), '(prob, lagSpace, par)\n', (3179, 3200), False, 'import moch\n'), ((3260, 3301), 'soch.createStimulus', 'soch.createStimulus', (['est', "par['periphFs']"], {}), "(est, par['periphFs'])\n", (3279, 3301), False, 'import soch\n'), ((3317, 3344), 'moch.peripheral', 'moch.peripheral', (['sound', 'par'], {}), '(sound, par)\n', (3332, 3344), False, 'import moch\n'), ((3368, 3405), 'moch.subcortical', 'moch.subcortical', (['prob', 'lagSpace', 'par'], {}), '(prob, lagSpace, par)\n', (3384, 3405), False, 'import moch\n'), ((3615, 3655), 'moch.peripheralSpikes', 'moch.peripheralSpikes', (['sound', 'par'], {'fs': '(-1)'}), '(sound, par, fs=-1)\n', (3636, 3655), False, 'import moch\n'), ((3666, 3695), 'thorns.plot_raster', 'thorns.plot_raster', (['anfTrains'], {}), '(anfTrains)\n', (3684, 3695), False, 'import thorns\n'), ((3704, 3717), 'thorns.show', 'thorns.show', ([], {}), '()\n', (3715, 3717), False, 'import thorns\n')] |
# (C) British Crown Copyright 2011 - 2018, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
import types
import numpy as np
from numpy.testing import assert_array_almost_equal as assert_arr_almost
import pytest
import shapely.geometry as sgeom
import cartopy.crs as ccrs
import cartopy.io.img_tiles as cimgt
#: Maps Google tile coordinates to native mercator coordinates as defined
#: by https://goo.gl/pgJi.
KNOWN_EXTENTS = {(0, 0, 0): (-20037508.342789244, 20037508.342789244,
-20037508.342789244, 20037508.342789244),
(2, 0, 2): (0., 10018754.17139462,
10018754.17139462, 20037508.342789244),
(0, 2, 2): (-20037508.342789244, -10018754.171394622,
-10018754.171394622, 0),
(2, 2, 2): (0, 10018754.17139462,
-10018754.171394622, 0),
(8, 9, 4): (0, 2504688.542848654,
-5009377.085697312, -2504688.542848654),
}
if ccrs.PROJ4_VERSION == (5, 0, 0):
KNOWN_EXTENTS = {
(0, 0, 0): (-20037508.342789244, 20037508.342789244,
-19994827.892149, 19994827.892149),
(2, 0, 2): (0, 10018754.171395,
9997413.946075, 19994827.892149),
(0, 2, 2): (-20037508.342789244, -10018754.171394622,
-9997413.946075, 0),
(2, 2, 2): (0, 10018754.171395,
-9997413.946075, 0),
(8, 9, 4): (0, 2504688.542849,
-4998706.973037, -2499353.486519),
}
def GOOGLE_IMAGE_URL_REPLACEMENT(self, tile):
url = ('https://chart.googleapis.com/chart?chst=d_text_outline&'
'chs=256x256&chf=bg,s,00000055&chld=FFFFFF|16|h|000000|b||||'
'Google:%20%20(' + str(tile[0]) + ',' + str(tile[1]) + ')'
'|Zoom%20' + str(tile[2]) + '||||||______________________'
'______')
return url
def test_google_tile_styles():
"""
Tests that setting the Google Maps tile style works as expected.
This is essentially just assures information is properly propagated through
the class structure.
"""
reference_url = ("https://mts0.google.com/vt/lyrs={style}@177000000&hl=en"
"&src=api&x=1&y=2&z=3&s=G")
tile = ["1", "2", "3"]
# Default is street.
gt = cimgt.GoogleTiles()
url = gt._image_url(tile)
assert reference_url.format(style="m") == url
# Street
gt = cimgt.GoogleTiles(style="street")
url = gt._image_url(tile)
assert reference_url.format(style="m") == url
# Satellite
gt = cimgt.GoogleTiles(style="satellite")
url = gt._image_url(tile)
assert reference_url.format(style="s") == url
# Terrain
gt = cimgt.GoogleTiles(style="terrain")
url = gt._image_url(tile)
assert reference_url.format(style="t") == url
# Streets only
gt = cimgt.GoogleTiles(style="only_streets")
url = gt._image_url(tile)
assert reference_url.format(style="h") == url
# Exception is raised if unknown style is passed.
with pytest.raises(ValueError):
cimgt.GoogleTiles(style="random_style")
def test_google_wts():
gt = cimgt.GoogleTiles()
ll_target_domain = sgeom.box(-15, 50, 0, 60)
multi_poly = gt.crs.project_geometry(ll_target_domain, ccrs.PlateCarree())
target_domain = multi_poly.geoms[0]
with pytest.raises(AssertionError):
list(gt.find_images(target_domain, -1))
assert (tuple(gt.find_images(target_domain, 0)) ==
((0, 0, 0),))
assert (tuple(gt.find_images(target_domain, 2)) ==
((1, 1, 2), (2, 1, 2)))
assert (list(gt.subtiles((0, 0, 0))) ==
[(0, 0, 1), (0, 1, 1), (1, 0, 1), (1, 1, 1)])
assert (list(gt.subtiles((1, 0, 1))) ==
[(2, 0, 2), (2, 1, 2), (3, 0, 2), (3, 1, 2)])
with pytest.raises(AssertionError):
gt.tileextent((0, 1, 0))
assert_arr_almost(gt.tileextent((0, 0, 0)), KNOWN_EXTENTS[(0, 0, 0)])
assert_arr_almost(gt.tileextent((2, 0, 2)), KNOWN_EXTENTS[(2, 0, 2)])
assert_arr_almost(gt.tileextent((0, 2, 2)), KNOWN_EXTENTS[(0, 2, 2)])
assert_arr_almost(gt.tileextent((2, 2, 2)), KNOWN_EXTENTS[(2, 2, 2)])
assert_arr_almost(gt.tileextent((8, 9, 4)), KNOWN_EXTENTS[(8, 9, 4)])
def test_tile_bbox_y0_at_south_pole():
tms = cimgt.MapQuestOpenAerial()
# Check the y0_at_north_pole keywords returns the appropriate bounds.
assert_arr_almost(tms.tile_bbox(8, 6, 4, y0_at_north_pole=False),
np.array(KNOWN_EXTENTS[(8, 9, 4)]).reshape([2, 2]))
def test_tile_find_images():
gt = cimgt.GoogleTiles()
# Test the find_images method on a GoogleTiles instance.
ll_target_domain = sgeom.box(-10, 50, 10, 60)
multi_poly = gt.crs.project_geometry(ll_target_domain, ccrs.PlateCarree())
target_domain = multi_poly.geoms[0]
assert (list(gt.find_images(target_domain, 4)) ==
[(7, 4, 4), (7, 5, 4), (8, 4, 4), (8, 5, 4)])
@pytest.mark.network
def test_image_for_domain():
gt = cimgt.GoogleTiles()
gt._image_url = types.MethodType(GOOGLE_IMAGE_URL_REPLACEMENT, gt)
ll_target_domain = sgeom.box(-10, 50, 10, 60)
multi_poly = gt.crs.project_geometry(ll_target_domain, ccrs.PlateCarree())
target_domain = multi_poly.geoms[0]
_, extent, _ = gt.image_for_domain(target_domain, 6)
ll_extent = ccrs.Geodetic().transform_points(gt.crs,
np.array(extent[:2]),
np.array(extent[2:]))
if ccrs.PROJ4_VERSION == (5, 0, 0):
assert_arr_almost(ll_extent[:, :2],
[[-11.25, 49.033955],
[11.25, 61.687101]])
else:
assert_arr_almost(ll_extent[:, :2],
[[-11.25, 48.92249926],
[11.25, 61.60639637]])
def test_quadtree_wts():
qt = cimgt.QuadtreeTiles()
ll_target_domain = sgeom.box(-15, 50, 0, 60)
multi_poly = qt.crs.project_geometry(ll_target_domain, ccrs.PlateCarree())
target_domain = multi_poly.geoms[0]
with pytest.raises(ValueError):
list(qt.find_images(target_domain, 0))
assert qt.tms_to_quadkey((1, 1, 1)) == '1'
assert qt.quadkey_to_tms('1') == (1, 1, 1)
assert qt.tms_to_quadkey((8, 9, 4)) == '1220'
assert qt.quadkey_to_tms('1220') == (8, 9, 4)
assert tuple(qt.find_images(target_domain, 1)) == ('0', '1')
assert tuple(qt.find_images(target_domain, 2)) == ('03', '12')
assert list(qt.subtiles('0')) == ['00', '01', '02', '03']
assert list(qt.subtiles('11')) == ['110', '111', '112', '113']
with pytest.raises(ValueError):
qt.tileextent('4')
assert_arr_almost(qt.tileextent(''), KNOWN_EXTENTS[(0, 0, 0)])
assert_arr_almost(qt.tileextent(qt.tms_to_quadkey((2, 0, 2), google=True)),
KNOWN_EXTENTS[(2, 0, 2)])
assert_arr_almost(qt.tileextent(qt.tms_to_quadkey((0, 2, 2), google=True)),
KNOWN_EXTENTS[(0, 2, 2)])
assert_arr_almost(qt.tileextent(qt.tms_to_quadkey((2, 0, 2), google=True)),
KNOWN_EXTENTS[(2, 0, 2)])
assert_arr_almost(qt.tileextent(qt.tms_to_quadkey((2, 2, 2), google=True)),
KNOWN_EXTENTS[(2, 2, 2)])
assert_arr_almost(qt.tileextent(qt.tms_to_quadkey((8, 9, 4), google=True)),
KNOWN_EXTENTS[(8, 9, 4)])
def test_mapbox_tiles_api_url():
token = 'foo'
map_name = 'bar'
tile = [0, 1, 2]
exp_url = ('https://api.mapbox.com/v4/mapbox.bar'
'/2/0/1.png?access_token=foo')
mapbox_sample = cimgt.MapboxTiles(token, map_name)
url_str = mapbox_sample._image_url(tile)
assert url_str == exp_url
def test_mapbox_style_tiles_api_url():
token = 'foo'
username = 'baz'
map_id = 'bar'
tile = [0, 1, 2]
exp_url = ('https://api.mapbox.com/styles/v1/'
'baz/bar/tiles/256/2/0/1'
'?access_token=foo')
mapbox_sample = cimgt.MapboxStyleTiles(token, username, map_id)
url_str = mapbox_sample._image_url(tile)
assert url_str == exp_url
| [
"cartopy.io.img_tiles.MapboxStyleTiles",
"numpy.testing.assert_array_almost_equal",
"cartopy.io.img_tiles.GoogleTiles",
"cartopy.io.img_tiles.MapQuestOpenAerial",
"shapely.geometry.box",
"cartopy.io.img_tiles.MapboxTiles",
"cartopy.crs.PlateCarree",
"cartopy.io.img_tiles.QuadtreeTiles",
"numpy.array... | [((3086, 3105), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {}), '()\n', (3103, 3105), True, 'import cartopy.io.img_tiles as cimgt\n'), ((3209, 3242), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {'style': '"""street"""'}), "(style='street')\n", (3226, 3242), True, 'import cartopy.io.img_tiles as cimgt\n'), ((3349, 3385), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {'style': '"""satellite"""'}), "(style='satellite')\n", (3366, 3385), True, 'import cartopy.io.img_tiles as cimgt\n'), ((3490, 3524), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {'style': '"""terrain"""'}), "(style='terrain')\n", (3507, 3524), True, 'import cartopy.io.img_tiles as cimgt\n'), ((3634, 3673), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {'style': '"""only_streets"""'}), "(style='only_streets')\n", (3651, 3673), True, 'import cartopy.io.img_tiles as cimgt\n'), ((3927, 3946), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {}), '()\n', (3944, 3946), True, 'import cartopy.io.img_tiles as cimgt\n'), ((3971, 3996), 'shapely.geometry.box', 'sgeom.box', (['(-15)', '(50)', '(0)', '(60)'], {}), '(-15, 50, 0, 60)\n', (3980, 3996), True, 'import shapely.geometry as sgeom\n'), ((5088, 5114), 'cartopy.io.img_tiles.MapQuestOpenAerial', 'cimgt.MapQuestOpenAerial', ([], {}), '()\n', (5112, 5114), True, 'import cartopy.io.img_tiles as cimgt\n'), ((5374, 5393), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {}), '()\n', (5391, 5393), True, 'import cartopy.io.img_tiles as cimgt\n'), ((5478, 5504), 'shapely.geometry.box', 'sgeom.box', (['(-10)', '(50)', '(10)', '(60)'], {}), '(-10, 50, 10, 60)\n', (5487, 5504), True, 'import shapely.geometry as sgeom\n'), ((5798, 5817), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {}), '()\n', (5815, 5817), True, 'import cartopy.io.img_tiles as cimgt\n'), ((5838, 5888), 'types.MethodType', 'types.MethodType', (['GOOGLE_IMAGE_URL_REPLACEMENT', 'gt'], {}), '(GOOGLE_IMAGE_URL_REPLACEMENT, gt)\n', (5854, 5888), False, 'import types\n'), ((5913, 5939), 'shapely.geometry.box', 'sgeom.box', (['(-10)', '(50)', '(10)', '(60)'], {}), '(-10, 50, 10, 60)\n', (5922, 5939), True, 'import shapely.geometry as sgeom\n'), ((6687, 6708), 'cartopy.io.img_tiles.QuadtreeTiles', 'cimgt.QuadtreeTiles', ([], {}), '()\n', (6706, 6708), True, 'import cartopy.io.img_tiles as cimgt\n'), ((6733, 6758), 'shapely.geometry.box', 'sgeom.box', (['(-15)', '(50)', '(0)', '(60)'], {}), '(-15, 50, 0, 60)\n', (6742, 6758), True, 'import shapely.geometry as sgeom\n'), ((8409, 8443), 'cartopy.io.img_tiles.MapboxTiles', 'cimgt.MapboxTiles', (['token', 'map_name'], {}), '(token, map_name)\n', (8426, 8443), True, 'import cartopy.io.img_tiles as cimgt\n'), ((8788, 8835), 'cartopy.io.img_tiles.MapboxStyleTiles', 'cimgt.MapboxStyleTiles', (['token', 'username', 'map_id'], {}), '(token, username, map_id)\n', (8810, 8835), True, 'import cartopy.io.img_tiles as cimgt\n'), ((3818, 3843), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3831, 3843), False, 'import pytest\n'), ((3853, 3892), 'cartopy.io.img_tiles.GoogleTiles', 'cimgt.GoogleTiles', ([], {'style': '"""random_style"""'}), "(style='random_style')\n", (3870, 3892), True, 'import cartopy.io.img_tiles as cimgt\n'), ((4056, 4074), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (4072, 4074), True, 'import cartopy.crs as ccrs\n'), ((4126, 4155), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (4139, 4155), False, 'import pytest\n'), ((4602, 4631), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (4615, 4631), False, 'import pytest\n'), ((5564, 5582), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (5580, 5582), True, 'import cartopy.crs as ccrs\n'), ((5999, 6017), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (6015, 6017), True, 'import cartopy.crs as ccrs\n'), ((6224, 6244), 'numpy.array', 'np.array', (['extent[:2]'], {}), '(extent[:2])\n', (6232, 6244), True, 'import numpy as np\n'), ((6295, 6315), 'numpy.array', 'np.array', (['extent[2:]'], {}), '(extent[2:])\n', (6303, 6315), True, 'import numpy as np\n'), ((6365, 6443), 'numpy.testing.assert_array_almost_equal', 'assert_arr_almost', (['ll_extent[:, :2]', '[[-11.25, 49.033955], [11.25, 61.687101]]'], {}), '(ll_extent[:, :2], [[-11.25, 49.033955], [11.25, 61.687101]])\n', (6382, 6443), True, 'from numpy.testing import assert_array_almost_equal as assert_arr_almost\n'), ((6515, 6602), 'numpy.testing.assert_array_almost_equal', 'assert_arr_almost', (['ll_extent[:, :2]', '[[-11.25, 48.92249926], [11.25, 61.60639637]]'], {}), '(ll_extent[:, :2], [[-11.25, 48.92249926], [11.25, \n 61.60639637]])\n', (6532, 6602), True, 'from numpy.testing import assert_array_almost_equal as assert_arr_almost\n'), ((6818, 6836), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (6834, 6836), True, 'import cartopy.crs as ccrs\n'), ((6888, 6913), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6901, 6913), False, 'import pytest\n'), ((7431, 7456), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7444, 7456), False, 'import pytest\n'), ((6134, 6149), 'cartopy.crs.Geodetic', 'ccrs.Geodetic', ([], {}), '()\n', (6147, 6149), True, 'import cartopy.crs as ccrs\n'), ((5282, 5314), 'numpy.array', 'np.array', (['KNOWN_EXTENTS[8, 9, 4]'], {}), '(KNOWN_EXTENTS[8, 9, 4])\n', (5290, 5314), True, 'import numpy as np\n')] |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import doctest
import os
from subprocess import check_output
from typing import Dict, List, TextIO
THRIFT_HEADER = """
# This file is generated by `fbcode/thrift/test/testset:generator`
# {'@' + 'generated'}
namespace cpp2 apache.thrift.test.testset
"""
FIELD_COUNT = 2 # Number of fields per structs
def format_dict(
d: Dict[str, str], key_format: str, value_format: str
) -> Dict[str, str]:
"""Format key/value of dict
>>> result = format_dict({"foo_k": "foo_v", "bar_k": "bar_v"}, 'prefix_{}', "{}_suffix")
>>> result == {'prefix_foo_k': 'foo_v_suffix', 'prefix_bar_k': 'bar_v_suffix'}
True
"""
return {key_format.format(k): value_format.format(d[k]) for k in d}
PRIMITIVE_TYPES = [
"bool",
"byte",
"i16",
"i32",
"i64",
"float",
"double",
"binary",
"string",
]
def generate_union_names_to_types() -> Dict[str, str]:
""" Generate display name to thrift type mapping in union. Display name will be used in file name, rule name, etc """
ret = {t: t for t in PRIMITIVE_TYPES}
ret.update(format_dict(ret, "set_{}", "set<{}>"))
ret.update(format_dict(ret, "map_string_{}", "map<string, {}>"))
ret.update(format_dict(ret, "{}_cpp_ref", "{} (cpp.ref = 'true')"))
return ret
def generate_struct_names_to_types() -> Dict[str, str]:
""" Similar to thrift types in union. Difference is that unions cannot contain qualified fields. """
ret = generate_union_names_to_types()
ret.update(
**format_dict(ret, "optional_{}", "optional {}"),
**format_dict(ret, "required_{}", "required {}"),
)
return ret
def generate_class(class_type: str, name: str, types: List[str]) -> str:
"""Generate thrift struct from types
>>> print(generate_class("struct", "Foo", ["i64", "optional string", "set<i32> (cpp.ref = 'true')"]))
struct Foo {
1: i64 field_1;
2: optional string field_2;
3: set<i32> (cpp.ref = 'true') field_3;
}
"""
lines = [f"{class_type} {name} {{"]
for i, t in enumerate(types):
lines.append(" {0}: {1} field_{0};".format(i + 1, t))
lines.append(f'}} (any_type.name="facebook.com/thrift/test/testset/{name}")')
return "\n".join(lines)
def print_thrift_class(
file: TextIO, class_type: str, names_to_types: Dict[str, str]
) -> None:
name = "empty_" + class_type
print(generate_class(class_type, name, []), file=file)
classes = [name]
for display_name, type in names_to_types.items():
class_name = class_type + "_" + display_name
classes.append(class_name)
print(generate_class(class_type, class_name, [type] * FIELD_COUNT), file=file)
# Thrift class that contains all other generated classes with same-type
print(generate_class(class_type, class_type + "_all", classes), file=file)
def gen_struct_all(path: str) -> None:
with open(path, "w") as file:
print(THRIFT_HEADER, file=file)
print_thrift_class(file, "struct", generate_struct_names_to_types())
print_thrift_class(file, "union", generate_union_names_to_types())
def main() -> None:
doctest.testmod()
os.chdir(check_output(["buck", "root"]).strip())
parser = argparse.ArgumentParser()
parser.add_argument("--install_dir", required=True)
parser.add_argument("--filename", required=True)
args = parser.parse_args()
gen_struct_all(os.path.join(args.install_dir, args.filename))
if __name__ == "__main__":
main()
| [
"os.path.join",
"subprocess.check_output",
"doctest.testmod",
"argparse.ArgumentParser"
] | [((3736, 3753), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (3751, 3753), False, 'import doctest\n'), ((3820, 3845), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3843, 3845), False, 'import argparse\n'), ((4005, 4050), 'os.path.join', 'os.path.join', (['args.install_dir', 'args.filename'], {}), '(args.install_dir, args.filename)\n', (4017, 4050), False, 'import os\n'), ((3767, 3797), 'subprocess.check_output', 'check_output', (["['buck', 'root']"], {}), "(['buck', 'root'])\n", (3779, 3797), False, 'from subprocess import check_output\n')] |
from tornado.testing import AsyncHTTPTestCase
import unittest
def run_tests(application):
BaseAsyncTest.application = application
BaseAsyncTest.database_name = application.settings['database_name']
BaseAsyncTest.conn = application.settings['conn']
testsuite = unittest.TestLoader().discover('test')
return unittest.TextTestRunner(verbosity=2).run(testsuite)
class BaseAsyncTest(AsyncHTTPTestCase):
application = None
conn = None
database_name = ''
def get_app(self):
return self.application
| [
"unittest.TextTestRunner",
"unittest.TestLoader"
] | [((278, 299), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (297, 299), False, 'import unittest\n'), ((328, 364), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (351, 364), False, 'import unittest\n')] |
# C O D E A Z/ Samil
from userbot import BOT_USERNAME
from userbot.events import register
# ██████ LANGUAGE CONSTANTS ██████ #
from userbot.language import get_value
LANG = get_value("__helpme")
# ████████████████████████████████ #
@register(outgoing=True, pattern="^.yard[iı]m|^.help")
async def yardim(event):
tgbotusername = BOT_USERNAME
if tgbotusername is not None:
results = await event.client.inline_query(
tgbotusername,
"@Codeaz"
)
await results[0].click(
event.chat_id,
reply_to=event.reply_to_msg_id,
hide_via=True
)
await event.delete()
else:
await event.edit(LANG["NO_BOT"])
| [
"userbot.events.register",
"userbot.language.get_value"
] | [((177, 198), 'userbot.language.get_value', 'get_value', (['"""__helpme"""'], {}), "('__helpme')\n", (186, 198), False, 'from userbot.language import get_value\n'), ((239, 292), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.yard[iı]m|^.help"""'}), "(outgoing=True, pattern='^.yard[iı]m|^.help')\n", (247, 292), False, 'from userbot.events import register\n')] |
from dataclasses import dataclass
import struct
from typing import Tuple
from plugins.mgba_bridge.script_disassembler.utils import barray_to_u16_hex, u16_to_hex
from plugins.mgba_bridge.script_disassembler.definitions import get_pointer, commands, parameters, get_script_label, used_labels
# Disassembler for tmc scripts
# Input 'macros' to generate the macros for the script commands
# Input the script bytes as hex to disassemble the script
# Build macros: echo "macros" | python script_disassembler.py > ~/git/tmc/github/asm/macros/scripts.inc
@dataclass
class Context:
ptr: int
data: bytes
script_addr: int
# Remove the ScriptCommand_ prefix for the asm macros
def build_script_command(name: str):
name = name.replace("ScriptCommand_", "")
if name[0].isdigit(): # asm macros cannot start with an _
return f'_{name}'
return name
def print_rest_bytes(ctx):
print('\n'.join(['.byte ' + hex(x) for x in ctx.data[ctx.ptr:]]))
@dataclass
class Instruction:
addr: int
text: str
def disassemble_command(ctx: Context, add_all_annotations=False) -> Tuple[int, Instruction]:
global used_labels
# if (add_all_annotations or ctx.script_addr + ctx.ptr in used_labels) and ctx.ptr != 0:
# print offsets to debug when manually inserting labels
#print(f'{get_script_label(ctx.script_addr + ctx.ptr)}:')
cmd = struct.unpack('H', ctx.data[ctx.ptr:ctx.ptr + 2])[0]
if cmd == 0:
# this does not need to be the end of the script
#print('\t.2byte 0x0000')
ctx.ptr += 2
return (1, Instruction(ctx.ptr-2, '0000'))
if cmd == 0xffff:
ctx.ptr += 2
# print('SCRIPT_END')
if ctx.ptr >= len(ctx.data) - 1:
# This is already the end
return (2, Instruction(ctx.ptr-2, 'SCRIPT_END'))
cmd = struct.unpack('H', ctx.data[ctx.ptr:ctx.ptr + 2])[0]
if cmd == 0x0000:
# This is actually the end of the script
#print('\t.2byte 0x0000')
ctx.ptr += 2
return (2, Instruction(ctx.ptr-4, 'SCRIPT_END'))
# There is a SCRIPT_END without 0x0000 afterwards, but still split into a new file, please
return (3, Instruction(ctx.ptr-2, 'SCRIPT_END'))
commandStartAddress = ctx.ptr
commandSize = cmd >> 0xA
if commandSize == 0:
raise Exception(f'Zero commandSize not allowed')
commandId = cmd & 0x3FF
if commandId >= len(commands):
raise Exception(
f'Invalid commandId {commandId} / {len(commands)} {cmd}')
command = commands[commandId]
param_length = commandSize - 1
if commandSize > 1:
if ctx.ptr + 2 * commandSize > len(ctx.data):
raise Exception(f'Not enough data to fetch {commandSize-1} params')
# Handle parameters
if not 'params' in command:
raise Exception(
f'Parameters not defined for {command["fun"]}. Should be of length {str(param_length)}')
params = None
suffix = ''
# When there are multiple variants of parameters, choose the one with the correct count for this
if isinstance(command['params'], list):
for i, param in enumerate(command['params']):
if not param in parameters:
raise Exception(f'Parameter configuration {param} not defined')
candidate = parameters[param]
if candidate['length'] == commandSize - 1:
params = candidate
if i != 0:
# We need to add a suffix to distinguish the correct parameter variant
suffix = f'_{params["length"]}'
break
if params is None:
raise Exception(
f'No suitable parameter configuration with length {commandSize-1} found for {command["fun"]}')
else:
if not command['params'] in parameters:
raise Exception(
f'Parameter configuration {command["params"]} not defined')
params = parameters[command['params']]
command_name = f'{command["fun"]}{suffix}'
if params['length'] == -1: # variable parameter length
# print(f'\t.2byte {u16_to_hex(cmd)} @ {build_script_command(command_name)} with {commandSize-1} parameters')
# if commandSize > 1:
# print('\n'.join(['\t.2byte ' + x for x in barray_to_u16_hex(ctx.data[ctx.ptr + 2:ctx.ptr + commandSize * 2])]))
# print(f'@ End of parameters')
ctx.ptr += commandSize * 2
return (1, Instruction(commandStartAddress, 'TODO'))
elif params['length'] == -2: # point and var
# print(f'\t.2byte {u16_to_hex(cmd)} @ {build_script_command(command_name)} with {commandSize-3} parameters')
# print('\t.4byte ' + get_pointer(ctx.data[ctx.ptr + 2:ctx.ptr + 6]))
# if commandSize > 3:
# print('\n'.join(['\t.2byte ' + x for x in barray_to_u16_hex(ctx.data[ctx.ptr + 6:ctx.ptr + commandSize * 2])]))
# print(f'@ End of parameters')
ctx.ptr += commandSize * 2
return (1, Instruction(commandStartAddress, 'TODO'))
if commandSize-1 != params['length']:
raise Exception(
f'Call {command_name} with {commandSize-1} length, while length of {params["length"]} defined')
# print(f'\t{build_script_command(command_name)} {params["read"](ctx)}')
# Execute script
ctx.ptr += commandSize * 2
return (1, Instruction(commandStartAddress, 'TODO'))
def disassemble_script(input_bytes, script_addr, add_all_annotations=False) -> Tuple[int, list[Instruction]]:
ctx = Context(0, input_bytes, script_addr)
foundEnd = False
instructions: list[Instruction] = []
while True:
# End of file (there need to be at least two bytes remaining for the next operation id)
if ctx.ptr >= len(ctx.data) - 1:
break
#print('remaining', len(ctx.data)-ctx.ptr)
(res, instruction) = disassemble_command(ctx, add_all_annotations)
# print(instruction.addr)
instructions.append(instruction)
if res == 0:
break
elif res == 2:
foundEnd = True
break
elif res == 3:
# End in the middle of the script, please create a new file
return (ctx.ptr, instructions)
# Print rest (did not manage to get there)
if ctx.ptr < len(ctx.data):
if (len(ctx.data) - ctx.ptr) % 2 != 0:
print_rest_bytes(ctx)
raise Exception(
f'There is extra data at the end {ctx.ptr} / {len(ctx.data)}')
print(
'\n'.join(['.2byte ' + x for x in barray_to_u16_hex(ctx.data[ctx.ptr:])]))
raise Exception(
f'There is extra data at the end {ctx.ptr} / {len(ctx.data)}')
if not foundEnd:
# Sadly, there are script files without and end?
return (0, instructions)
#print('\033[93mNo end found\033[0m')
return (0, instructions)
def generate_macros():
print('@ All the macro functions for scripts')
print('@ Generated by disassemble_script.py')
print('.macro SCRIPT_START name')
print(' .globl \\name')
print(' .section .text')
print('\\name:')
print('.endm')
print('.macro SCRIPT_END')
print(' .2byte 0xffff')
print('.endm')
print('')
for num, command in enumerate(commands):
if not 'params' in command:
raise Exception(f'Parameters not defined for {command["fun"]}')
def emit_macro(command_name, id, params):
print(f'.macro {command_name} {params["param"]}')
print(f' .2byte {u16_to_hex(id)}')
if params['expr'] != '':
print(params['expr'])
print('.endm')
print('')
if isinstance(command['params'], list):
# emit macros for all variants
for i, variant in enumerate(command['params']):
if not variant in parameters:
raise Exception(
f'Parameter configuration {variant} not defined')
params = parameters[variant]
id = ((params['length'] + 1) << 0xA) + num
suffix = ''
if i != 0:
suffix = f'_{params["length"]}'
emit_macro(
f'{build_script_command(command["fun"])}{suffix}', id, params)
else:
if not command['params'] in parameters:
raise Exception(
f'Parameter configuration {command["params"]} not defined')
params = parameters[command['params']]
id = ((params['length'] + 1) << 0xA) + num
if params['length'] < 0: # Don't emit anything for variable parameters
continue
emit_macro(build_script_command(command['fun']), id, params)
print('')
def main():
# Read input
input_data = input()
if input_data.strip() == 'macros':
generate_macros()
return
disassemble_script(bytearray.fromhex(input_data))
if __name__ == '__main__':
main()
| [
"struct.unpack",
"plugins.mgba_bridge.script_disassembler.utils.barray_to_u16_hex",
"plugins.mgba_bridge.script_disassembler.utils.u16_to_hex"
] | [((1373, 1422), 'struct.unpack', 'struct.unpack', (['"""H"""', 'ctx.data[ctx.ptr:ctx.ptr + 2]'], {}), "('H', ctx.data[ctx.ptr:ctx.ptr + 2])\n", (1386, 1422), False, 'import struct\n'), ((1834, 1883), 'struct.unpack', 'struct.unpack', (['"""H"""', 'ctx.data[ctx.ptr:ctx.ptr + 2]'], {}), "('H', ctx.data[ctx.ptr:ctx.ptr + 2])\n", (1847, 1883), False, 'import struct\n'), ((6678, 6715), 'plugins.mgba_bridge.script_disassembler.utils.barray_to_u16_hex', 'barray_to_u16_hex', (['ctx.data[ctx.ptr:]'], {}), '(ctx.data[ctx.ptr:])\n', (6695, 6715), False, 'from plugins.mgba_bridge.script_disassembler.utils import barray_to_u16_hex, u16_to_hex\n'), ((7661, 7675), 'plugins.mgba_bridge.script_disassembler.utils.u16_to_hex', 'u16_to_hex', (['id'], {}), '(id)\n', (7671, 7675), False, 'from plugins.mgba_bridge.script_disassembler.utils import barray_to_u16_hex, u16_to_hex\n')] |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import ujson as json
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from blueapps.account.decorators import login_exempt
from gcloud import err_code
from gcloud.apigw.decorators import api_verify_proj_perms
from gcloud.apigw.decorators import mark_request_whether_is_trust
from gcloud.apigw.decorators import project_inject
from gcloud.contrib.analysis.analyse_items import task_flow_instance
from gcloud.core.permissions import project_resource
from gcloud.apigw.views.utils import logger
try:
from bkoauth.decorators import apigw_required
except ImportError:
from packages.bkoauth.decorators import apigw_required
@login_exempt
@csrf_exempt
@require_POST
@apigw_required
@mark_request_whether_is_trust
@project_inject
@api_verify_proj_perms([project_resource.actions.view])
def query_task_count(request, project_id):
"""
@summary: 按照不同维度统计业务任务总数
@param request:
@param project_id:
@return:
"""
try:
params = json.loads(request.body)
except Exception:
return JsonResponse(
{
"result": False,
"message": "invalid json format",
"code": err_code.REQUEST_PARAM_INVALID.code,
}
)
project = request.project
conditions = params.get("conditions", {})
group_by = params.get("group_by")
if not isinstance(conditions, dict):
message = (
"[API] query_task_list params conditions[%s] are invalid dict data"
% conditions
)
logger.error(message)
return JsonResponse(
{
"result": False,
"message": message,
"code": err_code.REQUEST_PARAM_INVALID.code,
}
)
if group_by not in ["category", "create_method", "flow_type", "status"]:
message = "[API] query_task_list params group_by[%s] is invalid" % group_by
logger.error(message)
return JsonResponse(
{
"result": False,
"message": message,
"code": err_code.REQUEST_PARAM_INVALID.code,
}
)
filters = {"project_id": project.id, "is_deleted": False}
filters.update(conditions)
success, content = task_flow_instance.dispatch(group_by, filters)
if not success:
return JsonResponse(
{"result": False, "message": content, "code": err_code.UNKNOWN_ERROR.code}
)
return JsonResponse(
{"result": True, "data": content, "code": err_code.SUCCESS.code}
)
| [
"django.http.JsonResponse",
"gcloud.contrib.analysis.analyse_items.task_flow_instance.dispatch",
"ujson.loads",
"gcloud.apigw.decorators.api_verify_proj_perms",
"gcloud.apigw.views.utils.logger.error"
] | [((1564, 1618), 'gcloud.apigw.decorators.api_verify_proj_perms', 'api_verify_proj_perms', (['[project_resource.actions.view]'], {}), '([project_resource.actions.view])\n', (1585, 1618), False, 'from gcloud.apigw.decorators import api_verify_proj_perms\n'), ((3069, 3115), 'gcloud.contrib.analysis.analyse_items.task_flow_instance.dispatch', 'task_flow_instance.dispatch', (['group_by', 'filters'], {}), '(group_by, filters)\n', (3096, 3115), False, 'from gcloud.contrib.analysis.analyse_items import task_flow_instance\n'), ((3273, 3351), 'django.http.JsonResponse', 'JsonResponse', (["{'result': True, 'data': content, 'code': err_code.SUCCESS.code}"], {}), "({'result': True, 'data': content, 'code': err_code.SUCCESS.code})\n", (3285, 3351), False, 'from django.http import JsonResponse\n'), ((1789, 1813), 'ujson.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (1799, 1813), True, 'import ujson as json\n'), ((2345, 2366), 'gcloud.apigw.views.utils.logger.error', 'logger.error', (['message'], {}), '(message)\n', (2357, 2366), False, 'from gcloud.apigw.views.utils import logger\n'), ((2382, 2483), 'django.http.JsonResponse', 'JsonResponse', (["{'result': False, 'message': message, 'code': err_code.\n REQUEST_PARAM_INVALID.code}"], {}), "({'result': False, 'message': message, 'code': err_code.\n REQUEST_PARAM_INVALID.code})\n", (2394, 2483), False, 'from django.http import JsonResponse\n'), ((2733, 2754), 'gcloud.apigw.views.utils.logger.error', 'logger.error', (['message'], {}), '(message)\n', (2745, 2754), False, 'from gcloud.apigw.views.utils import logger\n'), ((2770, 2871), 'django.http.JsonResponse', 'JsonResponse', (["{'result': False, 'message': message, 'code': err_code.\n REQUEST_PARAM_INVALID.code}"], {}), "({'result': False, 'message': message, 'code': err_code.\n REQUEST_PARAM_INVALID.code})\n", (2782, 2871), False, 'from django.http import JsonResponse\n'), ((3151, 3244), 'django.http.JsonResponse', 'JsonResponse', (["{'result': False, 'message': content, 'code': err_code.UNKNOWN_ERROR.code}"], {}), "({'result': False, 'message': content, 'code': err_code.\n UNKNOWN_ERROR.code})\n", (3163, 3244), False, 'from django.http import JsonResponse\n'), ((1851, 1965), 'django.http.JsonResponse', 'JsonResponse', (["{'result': False, 'message': 'invalid json format', 'code': err_code.\n REQUEST_PARAM_INVALID.code}"], {}), "({'result': False, 'message': 'invalid json format', 'code':\n err_code.REQUEST_PARAM_INVALID.code})\n", (1863, 1965), False, 'from django.http import JsonResponse\n')] |
# -*- coding: utf-8 -
#
# This file is part of gaffer. See the NOTICE for more information.
import os
import time
import pytest
import pyuv
from gaffer import __version__
from gaffer.manager import Manager
from gaffer.http_handler import HttpEndpoint, HttpHandler
from gaffer.httpclient import (Server, Process, ProcessId,
GafferNotFound, GafferConflict)
from test_manager import dummy_cmd
TEST_HOST = '127.0.0.1'
TEST_PORT = (os.getpid() % 31000) + 1024
TEST_PORT2 = (os.getpid() % 31000) + 1023
def start_manager():
http_endpoint = HttpEndpoint(uri="%s:%s" % (TEST_HOST, TEST_PORT))
http_handler = HttpHandler(endpoints=[http_endpoint])
m = Manager()
m.start(apps=[http_handler])
time.sleep(0.2)
return m
def get_server(loop):
return Server("http://%s:%s" % (TEST_HOST, TEST_PORT), loop=loop)
def init():
m = start_manager()
s = get_server(m.loop)
return (m, s)
def test_basic():
m = start_manager()
s = get_server(m.loop)
assert s.version == __version__
m.stop()
m.run()
def test_multiple_handers():
http_endpoint = HttpEndpoint(uri="%s:%s" % (TEST_HOST, TEST_PORT))
http_endpoint2 = HttpEndpoint(uri="%s:%s" % (TEST_HOST, TEST_PORT2))
http_handler = HttpHandler(endpoints=[http_endpoint, http_endpoint2])
m = Manager()
m.start(apps=[http_handler])
time.sleep(0.2)
s = Server("http://%s:%s" % (TEST_HOST, TEST_PORT), loop=m.loop)
s2 = Server("http://%s:%s" % (TEST_HOST, TEST_PORT2), loop=m.loop)
assert TEST_PORT != TEST_PORT2
assert s.version == __version__
assert s2.version == __version__
m.stop()
m.run()
def test_processes():
m, s = init()
assert s.processes() == []
testfile, cmd, args, wdir = dummy_cmd()
m.add_process("dummy", cmd, args=args, cwd=wdir, start=False)
time.sleep(0.2)
assert len(m.processes) == 1
assert len(s.processes()) == 1
assert s.processes()[0] == "dummy"
m.stop()
m.run()
def test_process_create():
m, s = init()
testfile, cmd, args, wdir = dummy_cmd()
s.add_process("dummy", cmd, args=args, cwd=wdir, start=False)
time.sleep(0.2)
assert len(m.processes) == 1
assert len(s.processes()) == 1
assert s.processes()[0] == "dummy"
assert "dummy" in m.processes
assert len(m.running) == 0
with pytest.raises(GafferConflict):
s.add_process("dummy", cmd, args=args, cwd=wdir, start=False)
p = s.get_process("dummy")
assert isinstance(p, Process)
m.stop()
m.run()
def test_process_remove():
m, s = init()
testfile, cmd, args, wdir = dummy_cmd()
s.add_process("dummy", cmd, args=args, cwd=wdir, start=False)
assert s.processes()[0] == "dummy"
s.remove_process("dummy")
assert len(s.processes()) == 0
assert len(m.processes) == 0
m.stop()
m.run()
def test_notfound():
m, s = init()
with pytest.raises(GafferNotFound):
s.get_process("dummy")
m.stop()
m.run()
def test_process_start_stop():
m, s = init()
testfile, cmd, args, wdir = dummy_cmd()
p = s.add_process("dummy", cmd, args=args, cwd=wdir, start=False)
assert isinstance(p, Process)
p.start()
time.sleep(0.2)
assert len(m.running) == 1
status = p.status()
assert status['running'] == 1
assert status['active'] == True
assert status['max_processes'] == 1
p.stop()
time.sleep(0.2)
assert len(m.running) == 0
assert p.active == False
s.remove_process("dummy")
assert len(s.processes()) == 0
p = s.add_process("dummy", cmd, args=args, cwd=wdir, start=True)
time.sleep(0.2)
assert len(m.running) == 1
assert p.active == True
p.restart()
time.sleep(0.4)
assert len(m.running) == 1
assert p.active == True
m.stop()
m.run()
def test_process_add_sub():
m, s = init()
testfile, cmd, args, wdir = dummy_cmd()
p = s.add_process("dummy", cmd, args=args, cwd=wdir)
time.sleep(0.2)
assert isinstance(p, Process)
assert p.active == True
assert p.numprocesses == 1
p.add(3)
time.sleep(0.2)
assert p.numprocesses == 4
assert p.running == 4
p.sub(3)
time.sleep(0.2)
assert p.numprocesses == 1
assert p.running == 1
m.stop()
m.run()
def test_running():
m, s = init()
testfile, cmd, args, wdir = dummy_cmd()
s.add_process("dummy", cmd, args=args, cwd=wdir)
time.sleep(0.2)
assert len(m.running) == 1
assert len(s.running()) == 1
assert 1 in m.running
assert s.running()[0] == 1
m.stop()
m.run()
def test_pids():
m, s = init()
testfile, cmd, args, wdir = dummy_cmd()
p = s.add_process("dummy", cmd, args=args, cwd=wdir)
time.sleep(0.2)
p = s.get_process("dummy")
assert isinstance(p, Process) == True
pid = s.get_process(1)
assert isinstance(pid, ProcessId) == True
assert pid.pid == 1
assert pid.process.get('name') == "dummy"
assert p.pids == [1]
pid.stop()
assert 1 not in m.running
time.sleep(0.2)
assert p.pids == [2]
m.stop()
m.run()
def test_groups():
m, s = init()
started = []
stopped = []
def cb(evtype, info):
if evtype == "start":
started.append(info['name'])
elif evtype == "stop":
stopped.append(info['name'])
m.subscribe('start', cb)
m.subscribe('stop', cb)
testfile, cmd, args, wdir = dummy_cmd()
m.add_process("ga:a", cmd, args=args, cwd=wdir, start=False)
m.add_process("ga:b", cmd, args=args, cwd=wdir, start=False)
m.add_process("gb:a", cmd, args=args, cwd=wdir, start=False)
groups = sorted(s.groups())
ga1 = s.get_group('ga')
gb1 = s.get_group('gb')
s.start_group("ga")
s.stop_group("ga")
time.sleep(0.2)
m.remove_process("ga:a")
time.sleep(0.2)
ga2 = s.get_group('ga')
m.stop_group("gb")
def stop(handle):
m.unsubscribe("start", cb)
m.unsubscribe("stop", cb)
m.stop()
t = pyuv.Timer(m.loop)
t.start(stop, 0.4, 0.0)
m.run()
assert groups == ['ga', 'gb']
assert ga1 == ['ga:a', 'ga:b']
assert gb1 == ['gb:a']
assert started == ['ga:a', 'ga:b']
assert stopped == ['ga:a', 'ga:b', 'gb:a']
assert ga2 == ['ga:b']
| [
"gaffer.http_handler.HttpHandler",
"test_manager.dummy_cmd",
"gaffer.manager.Manager",
"time.sleep",
"pyuv.Timer",
"gaffer.http_handler.HttpEndpoint",
"pytest.raises",
"os.getpid",
"gaffer.httpclient.Server"
] | [((554, 604), 'gaffer.http_handler.HttpEndpoint', 'HttpEndpoint', ([], {'uri': "('%s:%s' % (TEST_HOST, TEST_PORT))"}), "(uri='%s:%s' % (TEST_HOST, TEST_PORT))\n", (566, 604), False, 'from gaffer.http_handler import HttpEndpoint, HttpHandler\n'), ((624, 662), 'gaffer.http_handler.HttpHandler', 'HttpHandler', ([], {'endpoints': '[http_endpoint]'}), '(endpoints=[http_endpoint])\n', (635, 662), False, 'from gaffer.http_handler import HttpEndpoint, HttpHandler\n'), ((671, 680), 'gaffer.manager.Manager', 'Manager', ([], {}), '()\n', (678, 680), False, 'from gaffer.manager import Manager\n'), ((718, 733), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (728, 733), False, 'import time\n'), ((781, 839), 'gaffer.httpclient.Server', 'Server', (["('http://%s:%s' % (TEST_HOST, TEST_PORT))"], {'loop': 'loop'}), "('http://%s:%s' % (TEST_HOST, TEST_PORT), loop=loop)\n", (787, 839), False, 'from gaffer.httpclient import Server, Process, ProcessId, GafferNotFound, GafferConflict\n'), ((1104, 1154), 'gaffer.http_handler.HttpEndpoint', 'HttpEndpoint', ([], {'uri': "('%s:%s' % (TEST_HOST, TEST_PORT))"}), "(uri='%s:%s' % (TEST_HOST, TEST_PORT))\n", (1116, 1154), False, 'from gaffer.http_handler import HttpEndpoint, HttpHandler\n'), ((1176, 1227), 'gaffer.http_handler.HttpEndpoint', 'HttpEndpoint', ([], {'uri': "('%s:%s' % (TEST_HOST, TEST_PORT2))"}), "(uri='%s:%s' % (TEST_HOST, TEST_PORT2))\n", (1188, 1227), False, 'from gaffer.http_handler import HttpEndpoint, HttpHandler\n'), ((1247, 1301), 'gaffer.http_handler.HttpHandler', 'HttpHandler', ([], {'endpoints': '[http_endpoint, http_endpoint2]'}), '(endpoints=[http_endpoint, http_endpoint2])\n', (1258, 1301), False, 'from gaffer.http_handler import HttpEndpoint, HttpHandler\n'), ((1310, 1319), 'gaffer.manager.Manager', 'Manager', ([], {}), '()\n', (1317, 1319), False, 'from gaffer.manager import Manager\n'), ((1357, 1372), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (1367, 1372), False, 'import time\n'), ((1382, 1442), 'gaffer.httpclient.Server', 'Server', (["('http://%s:%s' % (TEST_HOST, TEST_PORT))"], {'loop': 'm.loop'}), "('http://%s:%s' % (TEST_HOST, TEST_PORT), loop=m.loop)\n", (1388, 1442), False, 'from gaffer.httpclient import Server, Process, ProcessId, GafferNotFound, GafferConflict\n'), ((1452, 1513), 'gaffer.httpclient.Server', 'Server', (["('http://%s:%s' % (TEST_HOST, TEST_PORT2))"], {'loop': 'm.loop'}), "('http://%s:%s' % (TEST_HOST, TEST_PORT2), loop=m.loop)\n", (1458, 1513), False, 'from gaffer.httpclient import Server, Process, ProcessId, GafferNotFound, GafferConflict\n'), ((1754, 1765), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (1763, 1765), False, 'from test_manager import dummy_cmd\n'), ((1836, 1851), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (1846, 1851), False, 'import time\n'), ((2064, 2075), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (2073, 2075), False, 'from test_manager import dummy_cmd\n'), ((2146, 2161), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (2156, 2161), False, 'import time\n'), ((2617, 2628), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (2626, 2628), False, 'from test_manager import dummy_cmd\n'), ((3082, 3093), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (3091, 3093), False, 'from test_manager import dummy_cmd\n'), ((3217, 3232), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3227, 3232), False, 'import time\n'), ((3417, 3432), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3427, 3432), False, 'import time\n'), ((3633, 3648), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3643, 3648), False, 'import time\n'), ((3729, 3744), 'time.sleep', 'time.sleep', (['(0.4)'], {}), '(0.4)\n', (3739, 3744), False, 'import time\n'), ((3910, 3921), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (3919, 3921), False, 'from test_manager import dummy_cmd\n'), ((3983, 3998), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (3993, 3998), False, 'import time\n'), ((4111, 4126), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (4121, 4126), False, 'import time\n'), ((4202, 4217), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (4212, 4217), False, 'import time\n'), ((4373, 4384), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (4382, 4384), False, 'from test_manager import dummy_cmd\n'), ((4442, 4457), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (4452, 4457), False, 'import time\n'), ((4677, 4688), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (4686, 4688), False, 'from test_manager import dummy_cmd\n'), ((4750, 4765), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (4760, 4765), False, 'import time\n'), ((5061, 5076), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (5071, 5076), False, 'import time\n'), ((5461, 5472), 'test_manager.dummy_cmd', 'dummy_cmd', ([], {}), '()\n', (5470, 5472), False, 'from test_manager import dummy_cmd\n'), ((5807, 5822), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (5817, 5822), False, 'import time\n'), ((5856, 5871), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (5866, 5871), False, 'import time\n'), ((6041, 6059), 'pyuv.Timer', 'pyuv.Timer', (['m.loop'], {}), '(m.loop)\n', (6051, 6059), False, 'import pyuv\n'), ((440, 451), 'os.getpid', 'os.getpid', ([], {}), '()\n', (449, 451), False, 'import os\n'), ((483, 494), 'os.getpid', 'os.getpid', ([], {}), '()\n', (492, 494), False, 'import os\n'), ((2344, 2373), 'pytest.raises', 'pytest.raises', (['GafferConflict'], {}), '(GafferConflict)\n', (2357, 2373), False, 'import pytest\n'), ((2911, 2940), 'pytest.raises', 'pytest.raises', (['GafferNotFound'], {}), '(GafferNotFound)\n', (2924, 2940), False, 'import pytest\n')] |
from app import app
from flask import render_template, redirect, session, request, send_from_directory
from app import models, db, reqs
from flask_socketio import SocketIO, emit
import json
from xhtml2pdf import pisa
import os
from datetime import datetime
socketio = SocketIO(app)
if __name__ == '__main__':
socketio.run(app)
usernames = {}
class Inside_date:
def __init__(self, d, m, y):
months = ['января', 'февраля', 'марта', 'апреля', 'мая', 'июня', 'июля', 'августа',
'сентября', 'октября', 'ноября', 'декабря']
self.d = d
self.m = months[m - 1]
self.y = y
@socketio.on('connection')
def user_connected():
print("user connect")
@socketio.on('add user')
def add_user(data):
global usernames
print("add user")
print(data)
session['username'] = data
usernames[data] = session['username']
emit('user joined', {'username': session['username']}, broadcast= True)
def sendTasks():
tasks = []
Tasks = models.Tasks.query.all()
if models.User.query.filter_by(login=session['username']).first():
user = models.User.query.filter_by(login=session['username']).first()
else:
user = models.User.query.filter_by(email=session['username']).first()
for i in Tasks:
try:
if 'all' in json.loads(i.Visibility):
tasks.append(json.loads(table_to_json([i]))[0])
elif str(user.id) in json.loads(i.Visibility):
tasks.append(json.loads(table_to_json([i]))[0])
except Exception as er:
print(er)
emit('showTasks', json.dumps(tasks))
@socketio.on('addTask')
def addTask(message):
task = models.Tasks()
task.Visibility = json.dumps(message['data']['task_whom'])
task.User_id = int(message['data']['task_who'])
task.Type = message['data']['task_type']
task.Date = message['data']['task_date']
task.Time = message['data']['task_time']
task.Comment = message['data']['task_comment']
db.session.add(task)
db.session.commit()
emit('refreshTasks', broadcast=True)
@socketio.on('showTasks')
def showTasks():
sendTasks()
def table_to_json(query):
result = []
for i in query:
subres = i.__dict__
if '_sa_instance_state' in subres:
subres.pop('_sa_instance_state', None)
if 'Date' in subres:
if subres['Date'] != None:
try:
subres['Date'] = subres['Date'].strftime("%d.%m.%Y")
except Exception:
print(subres['Date'])
result.append(subres)
return json.dumps(result)
def to_PDF(owner, name, delivery):
document = models.Document()
if name == "Договор":
name = "Dogovor"
document.Type = 'Dogovor'
else:
name = "Zayavka"
document.Type = 'Zayavka'
f = open(os.path.dirname(__file__) + '/upload/{}.pdf'.format(owner.__tablename__ + str(owner.id)), "w+b")
info = reqs.getINNinfo(owner.UHH)['suggestions'][0]
date = Inside_date(d=str(datetime.now().day), m=int(datetime.now().month), y=str(datetime.now().year))
document.Client_name = info['value']
document.UHH = owner.UHH
document.Date = str(datetime.now().month) + '/' + str(datetime.now().year)
document.Client_contact_name = info['data']['management']['name']
document.Bik = owner.Bik
document.KPP = info['data']['kpp']
document.rc = owner.rc
document.kc = owner.kc
document.Owner_id = owner.id
document.MonthNum = document.getMonthNum()
document.OGRN = info['data']['ogrn']
if delivery != '':
Client = models.Client.query.filter_by(Name=delivery.Client).first()
Delivery_client_info = {'Name': Client.Name, 'Address': Client.Adress}
else:
Delivery_client_info = ''
db.session.add(document)
db.session.commit()
html = render_template('{}.html'.format(name), document=document, date=date,
owner=owner, path=os.path.dirname(__file__), delivery=delivery,
Delivery_client_info=Delivery_client_info)
pisa.CreatePDF(html, dest=f, encoding='utf-8')
f.close()
dir_u = os.path.abspath(os.path.dirname(__file__) + '/upload')
return send_from_directory(directory=dir_u, filename='{}.pdf'.format(owner.__tablename__ + str(owner.id)))
@app.route('/')
@app.route('/index')
def index():
try:
print(session['username'])
except Exception:
print("Not logged in")
if 'username' in session:
return render_template('index.html')
else:
return render_template('login.html')
@app.route('/getAllTasks')
def getAllTasks():
return table_to_json(models.Tasks.query.all())
@app.route('/auth', methods=['GET'])
def auth():
if 'login' in request.args:
login = request.args['login']
else:
return 'ERROR 400 BAD REQUEST'
if 'password' in request.args:
password = request.args['password']
else:
return 'ERROR 400 BAD REQUEST'
if models.User.query.filter_by(login=login).first():
user = models.User.query.filter_by(login=login).first()
if user.password == password:
session['username'] = login
# join_room('all')
return redirect('/', code=302)
return json.dumps({'message': 'Неверный пароль', 'success': False})
elif models.User.query.filter_by(email=login).first():
user = models.User.query.filter_by(email=login).first()
if user.password == password:
session['username'] = login
return redirect('/', code=302)
return json.dumps({'message': 'Неверный пароль', 'success': False})
return json.dumps({'message': 'Неверный логин/email', 'success': False})
@app.route('/logout', methods=['GET'])
def logout():
if 'username' in session:
session.pop('username', None)
return redirect('/', code=302)
@app.route('/addAccountPaymentHistory', methods=['GET'])
def addAccountPaymentHistory():
table = models.Account.query.filter_by(id=request.args['account_id']).first()
table.Payment_history = request.args['account_payment_history']
db.session.commit()
return 'OK'
@app.route('/getTemplates', methods=['GET'])
def getTemplates():
if 'username' in session:
return table_to_json(models.Template.query.all())
else:
return redirect('/', code=302)
@app.route('/downloadDoc', methods=['GET'])
def downloadDoc():
if request.args['name'] == 'Заявка':
delivery = models.Delivery.query.filter_by(id=request.args['delivery_id'])
else:
delivery = ''
if 'username' in session:
if request.args['category'] == 'client':
owner = models.Client.query.filter_by(id=request.args['card_id']).first()
elif request.args['category'] == 'provider':
owner = models.Provider.query.filter_by(id=request.args['card_id']).first()
elif request.args['category'] == 'carrier':
owner = models.Carrier.query.filter_by(id=request.args['card_id']).first()
else:
return 'Error 400'
return to_PDF(owner, request.args['name'], delivery)
else:
return redirect('/', code=302)
@app.route('/getClients', methods=['GET'])
def getClients():
if 'username' in session:
return table_to_json(models.Client.query.all())
else:
return redirect('/', code=302)
@app.route('/deleteMember', methods=['GET'])
def deleteMember():
user = models.User.query.filter_by(id=request.args['id']).first()
db.session.delete(user)
db.session.commit()
return 'OK'
@app.route('/stockTransit', methods=['GET'])
def stockTransit():
Item = models.Item.query.filter_by(Item_id=request.args['id_product']).first()
Stock = models.Stock.query.filter_by(Name=request.args['stock_select']).first()
for i in Stock.Items:
if i.Name == Item.Name:
Item.Volume = str(int(Item.Volume) - int(request.args['product_volume']))
i.Volume = str(int(i.Volume) + int(request.args['product_volume']))
db.session.commit()
return 'OK'
item = models.Item()
item.Weight = Item.Weight
item.Packing = Item.Packing
item.Fraction = Item.Fraction
item.Creator = Item.Creator
item.Name = Item.Name
item.Cost = Item.Cost
Item.Volume = str(int(Item.Volume) - int(request.args['product_volume']))
item.Volume = str(request.args['product_volume'])
item.NDS = Item.NDS
item.Group_id = Item.Group_id
item.Prefix = Item.Prefix
item.Group_name = Item.Group_name
Stock.Items.append(item)
db.session.commit()
return 'OK'
@app.route('/findContacts', methods=['GET'])
def findContacts():
if 'username' in session:
result = []
data = request.args['data']
Contacts = models.Contacts.query.all()
Deliveryies = models.Delivery.query.all()
Users = models.User.query.all()
for i in Deliveryies:
if i['Contact_End'] == data or i['Contact_Number'] == data:
result.append(json.loads(table_to_json([i]))[0])
for i in Contacts:
if i.Number == data or i.Email == data:
result.append(json.loads(table_to_json([i]))[0])
for i in Users:
if i.email == data:
subres = json.loads(table_to_json([i]))[0]
subres.pop('password', None)
result.append(subres)
return json.dumps(result)
else:
return redirect('/', code=302)
@app.route('/getMessages', methods=['GET'])
def getMessages():
if 'username' in session:
if request.args['category'] == 'client':
client = request.args['id']
Client = models.Client.query.filter_by(id=client).first()
return table_to_json(models.Notes.query.filter_by(Author=Client).all())
elif request.args['category'] == 'provider':
provider = request.args['id']
Provider = models.Provider.query.filter_by(id=provider).first()
return table_to_json(models.Notes.query.filter_by(Provider=Provider).all())
elif request.args['category'] == 'carrier':
carrier = request.args['id']
Carrier = models.Provider.query.filter_by(id=carrier).first()
return table_to_json(models.Notes.query.filter_by(Carrier=Carrier).all())
else:
return 'ERROR 400 BAD REQUEST'
else:
return redirect('/', code=302)
@app.route('/addMessages', methods=['GET'])
def addMessages():
if 'username' in session:
if request.args['category'] == 'client':
Owner = models.Client.query.filter_by(id=request.args['id']).first()
elif request.args['category'] == 'provider':
Owner = models.Provider.query.filter_by(id=request.args['id']).first()
else:
Owner = models.Carrier.query.filter_by(id=request.args['id']).first()
i = json.loads(request.args['comments'])
Message = models.Notes()
Message.Date = i['comment_date']
Message.Manager = i['comment_role']
Message.Note = i['comment_content']
if request.args['category'] == 'client':
Message.Client_id = request.args['id']
elif request.args['category'] == 'provider':
Message.Provider_id = request.args['id']
elif request.args['category'] == 'carrier':
Message.Carrier_id = request.args['id']
Owner.Notes.append(Message)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/getDeliveries', methods=['GET'])
def getDeliveries():
if 'username' in session:
deliveries = models.Delivery.query.all()
result = []
carriers = models.Carrier.query.all()
for delivery in deliveries:
if delivery.Carrier_id:
print(len(carriers))
carrier = carriers[delivery.Carrier_id - 1]
result.append({'carrier': json.loads(table_to_json([carrier]))[0],
'delivery': json.loads(table_to_json([delivery]))[0]})
else:
result.append({'carrier': None, 'delivery': json.loads(table_to_json([delivery]))[0]})
return json.dumps(result)
else:
return redirect('/', code=302)
@app.route('/addDelivery', methods=['GET'])
def addDelivery():
if 'username' in session:
data = request.args
print(data['delivery_id'])
if data['delivery_id'] == 'new':
table = models.Delivery()
else:
table = models.Delivery.query.filter_by(id=data['delivery_id']).first()
table.Name = data['delivery_name']
table.Date = data['delivery_date']
table.Price = data['delivery_price']
table.Contact_Number = data['delivery_contact_number']
table.Contact_Name = data['delivery_contact_name']
if data['delivery_carrier_id']:
table.Carrier_id = data['delivery_carrier_id']
if data['delivery_account_id']:
table.Account_id = data['delivery_account_id']
table.Comment = data['delivery_comment']
table.Client = data['delivery_client']
table.NDS = data['delivery_vat']
table.Contact_End = data['delivery_contact_end']
table.Customer = data['delivery_customer']
table.End_date = data['delivery_end_date']
table.Load_type = data['delivery_load_type']
table.Payment_date = data['delivery_payment_date']
table.Prefix = data['delivery_prefix']
table.Start_date = data['delivery_start_date']
table.Stock = data['delivery_stock']
table.Type = data['delivery_type']
table.Item_ids = data['delivery_item_ids']
table.Amounts = data['delivery_amounts']
table.Auto = data['delivery_car']
table.Passport_data = data['delivery_passport']
if 'payment_list' in data:
table.Payment_list = data['payment_list']
else:
table.Payment_list = None
if data['delivery_id'] == 'new':
db.session.add(table)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/getContacts', methods=['GET'])
def getContacts():
if 'username' in session:
if request.args['category'] == 'client':
client = request.args['id']
Client = models.Client.query.filter_by(id=client).first()
return table_to_json(models.Contacts.query.filter_by(Owner=Client).all())
elif request.args['category'] == 'provider':
provider = request.args['id']
Provider = models.Provider.query.filter_by(id=provider).first()
return table_to_json(models.Contacts.query.filter_by(Provider=Provider).all())
elif request.args['category'] == 'carrier':
carrier = request.args['id']
Carrier = models.Provider.query.filter_by(id=carrier).first()
return table_to_json(models.Contacts.query.filter_by(Carrier=Carrier).all())
else:
return 'ERROR 400 BAD REQUEST'
else:
return redirect('/', code=302)
@app.route('/addStock', methods=['GET'])
def addStock():
if 'username' in session:
name = request.args['stock_name']
stock = models.Stock()
stock.Name = name
db.session.add(stock)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/getStockTable', methods=['GET'])
def getStockTable():
if 'username' in session:
result = []
Stocks = models.Stock.query.all()
for stock in Stocks:
subres = {'items': json.loads(table_to_json(stock.Items))}
subres['stock_address'] = stock.Name
result.append(subres)
return json.dumps(result)
else:
return redirect('/', code=302)
@app.route('/getStockItems', methods=['GET'])
def getStockItems():
if 'username' in session:
data = request.args
Stocks = models.Stock.query.filter_by(id=data['stock_id']).all()
if len(Stocks):
Stock = Stocks[0]
else:
return 'Bad Stock'
return table_to_json(Stock.Items)
else:
return redirect('/', code=302)
@app.route('/addItemGroup', methods=['GET'])
def addItemGroup():
if 'username' in session:
group = models.Item_groups()
group.Group = request.args['group_name']
db.session.add(group)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/getItemGroup', methods=['GET'])
def getItemGroup():
if 'username' in session:
return table_to_json(models.Item_groups.query.all())
else:
return redirect('/', code=302)
@app.route('/getAllItems', methods=['GET'])
def getAllItems():
if 'username' in session:
return table_to_json(models.Item.query.all())
else:
return redirect('/', code=302)
@app.route('/getAccounts', methods=['GET'])
def getAccounts():
if 'username' in session:
result = []
Items = models.Item.query.all()
for i in models.Account.query.all():
items = []
for j in json.loads(i.Item_ids):
print(j)
item = Items[int(j['id']) - 1]
subres = json.loads(table_to_json([item]))[0]
subres['Transferred_volume'] = j['volume']
items.append(subres)
account = json.loads(table_to_json([i]))[0]
subres = {'items': items, 'account': account}
result.append(subres)
return json.dumps(result)
else:
return redirect('/', code=302)
@app.route('/addUser', methods=['GET'])
def addUser():
if 'username' in session:
data = request.args
if data['id'] == 'new':
user = models.User()
else:
user = models.User.query.filter_by(id=data['id']).first()
user.login = data['create_login']
user.email = data['create_email']
user.second_name = data['create_last_name']
user.name = data['create_first_name']
user.third_name = data['create_patronymic']
user.role = data['create_role']
user.password = data['<PASSWORD>']
if data['id'] == 'new':
db.session.add(user)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/addAccount', methods=['GET'])
def addAccount():
if 'username' in session:
data = request.args
table = models.Account()
table.Name = data['name']
table.Status = data['status']
table.Date = data['date']
table.Hello = data['hello']
table.Sale = data['sale']
table.Shipping = data['shipping']
table.Sum = data['sum']
table.Item_ids = data['item_ids']
db.session.add(table)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/addItemToStock', methods=['GET'])
def addItemToStock():
if 'username' in session:
data = request.args
Stocks = models.Stock.query.filter_by(id=data['stock_id']).all()
if len(Stocks):
Stock = Stocks[0]
else:
return 'Bad Stock'
item = models.Item()
item.Weight = data['item_weight']
item.Packing = data['item_packing']
item.Fraction = data['item_fraction']
item.Creator = data['item_creator']
item.Name = data['item_product']
item.Cost = data['item_price']
item.Volume = data['item_volume']
item.NDS = data['item_vat']
item.Group_id = data['group_id']
item.Prefix = data['item_prefix']
item.Group_name = models.Item_groups.query.filter_by(id=data['group_id']).first().Group
Stock.Items.append(item)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/getStocks', methods=['GET'])
def getStocks():
if 'username' in session:
return table_to_json(models.Stock.query.all())
else:
return redirect('/', code=302)
@app.route('/addContacts', methods=['GET'])
def addContacts():
if 'username' in session:
if request.args['category'] == 'client':
Owner = models.Client.query.filter_by(id=request.args['id']).first()
elif request.args['category'] == 'provider':
Owner = models.Provider.query.filter_by(id=request.args['id']).first()
else:
Owner = models.Carrier.query.filter_by(id=request.args['id']).first()
Contacts = []
args = json.loads(request.args['contacts'])
for i in args:
Contact = models.Contacts()
Contact.Name = i['first_name']
Contact.Last_name = i['last_name']
Contact.Number = i['phone']
Contact.Email = i['email']
Contact.Position = i['role']
Contact.Visible = i['visible']
if request.args['category'] == 'client':
Contact.Client_id = request.args['id']
elif request.args['category'] == 'provider':
Contact.Provider_id = request.args['id']
elif request.args['category'] == 'carrier':
Contact.Carrier_id = request.args['id']
Contacts.append(Contact)
Owner.Contacts = Contacts
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/addManagerToCard', methods=['GET'])
def addManagerToCard():
if 'username' in session:
if request.args['category'] == 'client':
Owner = models.Client.query.filter_by(id=request.args['card_id']).first()
elif request.args['category'] == 'provider':
Owner = models.Provider.query.filter_by(id=request.args['card_id']).first()
else:
return '400 BAD REQUEST'
Owner.Manager_active = True
Owner.Manager_id = request.args['manager_id']
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/deleteManagerFromCard', methods=['GET'])
def deleteManagerFromCard():
if 'username' in session:
if request.args['category'] == 'client':
Owner = models.Client.query.filter_by(id=request.args['card_id']).first()
elif request.args['category'] == 'provider':
Owner = models.Provider.query.filter_by(id=request.args['card_id']).first()
else:
return '400 BAD REQUEST'
Owner.Manager_active = False
Owner.Manager_date = request.args['date']
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/getThisUser', methods=['GET'])
def getThisUser():
if 'username' in session:
if models.User.query.filter_by(login=session['username']).first():
user = models.User.query.filter_by(login=session['username']).first()
else:
user = models.User.query.filter_by(email=session['username']).first()
result = json.loads(table_to_json([user]))[0]
result.pop('password', None)
return json.dumps(result)
else:
return redirect('/', code=302)
@app.route('/addItems', methods=['GET'])
def addItems():
if 'username' in session:
if request.args['category'] == 'client':
isClient = True
Owner = models.Client.query.filter_by(id=request.args['id']).first()
elif request.args['category'] == 'provider':
isClient = False
Owner = models.Provider.query.filter_by(id=request.args['id']).first()
else:
return '400 BAD REQUEST'
Items = []
args = json.loads(request.args['item'])
for i in args:
if i['item_product']:
Item = models.Junk_item()
if isClient:
Item.Volume = i['item_volume']
Item.Creator = i['item_creator']
Item.Client_id = request.args['id']
else:
Item.NDS = i['item_vat']
Item.Fraction = i['item_fraction']
Item.Packing = i['item_packing']
Item.Weight = i['item_weight']
Item.Provider_id = request.args['id']
Item.Name = i['item_product']
Item.Cost = i['item_price']
Items.append(Item)
Owner.Junk_items = Items
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/getItems', methods=['GET'])
def getItems():
if 'username' in session:
if request.args['category'] == 'client':
client = request.args['id']
Client = models.Client.query.filter_by(id=client).first()
return table_to_json(models.Junk_item.query.filter_by(Client=Client).all())
elif request.args['category'] == 'provider':
provider = request.args['id']
Provider = models.Provider.query.filter_by(id=provider).first()
return table_to_json(models.Junk_item.query.filter_by(Provider=Provider).all())
elif request.args['category'] == 'carrier':
carrier = request.args['id']
Carrier = models.Provider.query.filter_by(id=carrier).first()
return table_to_json(models.Junk_item.query.filter_by(Carrier=Carrier).all())
else:
return 'ERROR 400 BAD REQUEST'
else:
return redirect('/', code=302)
@app.route('/getProviders', methods=['GET'])
def getProviders():
if 'username' in session:
return table_to_json(models.Provider.query.all())
else:
return redirect('/', code=302)
@app.route('/getTasks', methods=['GET'])
def getTasks(login):
if 'username' in session:
user = models.User.query.filter_by(login=login).first()
return user.get_task_by_login()
else:
return redirect('/', code=302)
@app.route('/getUsers', methods=['GET'])
def getUsers():
if 'username' in session:
return table_to_json(models.User.query.all())
else:
return redirect('/', code=302)
@app.route('/getCarriers', methods=['GET'])
def getCarriers():
if 'username' in session:
return table_to_json(models.Carrier.query.all())
else:
return redirect('/', code=302)
@app.route('/addProvider', methods=['GET'])
def addProvider():
if 'username' in session:
data = request.args
if data['provider_data'] != 'new':
new = False
else:
new = True
if not new:
Provider = models.Provider.query.filter_by(id=data['provider_data']).first()
else:
Provider = models.Provider()
Provider.Name = data['provider_name']
Provider.Rayon = data['provider_area']
Provider.Category = data['provider_category']
Provider.Distance = data['provider_distance']
Provider.UHH = data['provider_inn']
Provider.Price = data['provider_price']
Provider.Oblast = data['provider_region']
Provider.Train = data['provider_station']
Provider.Tag = data['provider_tag']
Provider.Adress = data['provider_address']
Provider.NDS = data['provider_vat']
Provider.Merc = data['provider_merc']
Provider.Volume = data['provider_volume']
Provider.Holding = data['provider_holding']
if new:
db.session.add(Provider)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/addComment', methods=['GET'])
def addComment():
if 'username' in session:
if request.args['category'] == 'client':
Owner = models.Client.query.filter_by(id=request.args['id']).first()
elif request.args['category'] == 'provider':
Owner = models.Provider.query.filter_by(id=request.args['id']).first()
elif request.args['category'] == 'carrier':
Owner = models.Carrier.query.filter_by(id=request.args['id']).first()
else:
return '400 BAD REQUEST'
Owner.Comment = request['comment']
db.session.commit()
else:
return redirect('/', code=302)
@app.route('/getComment', methods=['GET'])
def getComment():
if 'username' in session:
if request.args['category'] == 'client':
Owner = models.Client.query.filter_by(id=request.args['id']).first()
elif request.args['category'] == 'provider':
Owner = models.Provider.query.filter_by(id=request.args['id']).first()
elif request.args['category'] == 'carrier':
Owner = models.Carrier.query.filter_by(id=request.args['id']).first()
else:
return '400 BAD REQUEST'
return Owner.Comment
else:
return redirect('/', code=302)
@app.route('/addCarrier', methods=['GET'])
def addCarier():
if 'username' in session:
data = request.args
if data['carrier_data'] != 'new':
new = False
else:
new = True
if not new:
Carrier = models.Carrier.query.filter_by(id=data['carrier_data']).first()
else:
Carrier = models.Carrier()
Carrier.Name = data['carrier_name']
Carrier.Address = data['carrier_address']
Carrier.Area = data['carrier_area']
Carrier.Capacity = data['carrier_capacity']
Carrier.UHH = data['carrier_inn']
Carrier.Region = data['carrier_region']
Carrier.View = data['carrier_view']
if new:
db.session.add(Carrier)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
@app.route('/addClient', methods=['GET'])
def addClient():
if 'username' in session:
data = request.args
if data['client_data'] != 'new':
new = False
else:
new = True
if not new:
Client = models.Client.query.filter_by(id=data['client_data']).first()
else:
Client = models.Client()
Client.Name = data['client_name']
Client.Rayon = data['client_area']
Client.Category = data['client_category']
Client.Distance = data['client_distance']
Client.Segment = data['client_industry']
Client.UHH = data['client_inn']
Client.Price = data['client_price']
Client.Oblast = data['client_region']
Client.Station = data['client_station']
Client.Tag = data['client_tag']
Client.Adress = data['client_address']
Client.Holding = data['client_holding']
Client.Site = data['client_site']
Client.Demand_item = data['demand_product']
Client.Demand_volume = data['demand_volume']
Client.Livestock_all = data['livestock_general']
Client.Livestock_milking = data['livestock_milking']
Client.Livestock_milkyield = data['livestock_milkyield']
if new:
db.session.add(Client)
db.session.commit()
return 'OK'
else:
return redirect('/', code=302)
| [
"xhtml2pdf.pisa.CreatePDF",
"app.db.session.commit",
"flask.render_template",
"app.models.Item.query.all",
"app.models.Stock.query.filter_by",
"app.models.Item_groups.query.filter_by",
"app.models.Carrier.query.filter_by",
"app.models.Contacts.query.all",
"app.models.User",
"flask_socketio.SocketI... | [((269, 282), 'flask_socketio.SocketIO', 'SocketIO', (['app'], {}), '(app)\n', (277, 282), False, 'from flask_socketio import SocketIO, emit\n'), ((4374, 4388), 'app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (4383, 4388), False, 'from app import app\n'), ((4390, 4409), 'app.app.route', 'app.route', (['"""/index"""'], {}), "('/index')\n", (4399, 4409), False, 'from app import app\n'), ((4654, 4679), 'app.app.route', 'app.route', (['"""/getAllTasks"""'], {}), "('/getAllTasks')\n", (4663, 4679), False, 'from app import app\n'), ((4753, 4788), 'app.app.route', 'app.route', (['"""/auth"""'], {'methods': "['GET']"}), "('/auth', methods=['GET'])\n", (4762, 4788), False, 'from app import app\n'), ((5805, 5842), 'app.app.route', 'app.route', (['"""/logout"""'], {'methods': "['GET']"}), "('/logout', methods=['GET'])\n", (5814, 5842), False, 'from app import app\n'), ((5963, 6018), 'app.app.route', 'app.route', (['"""/addAccountPaymentHistory"""'], {'methods': "['GET']"}), "('/addAccountPaymentHistory', methods=['GET'])\n", (5972, 6018), False, 'from app import app\n'), ((6244, 6287), 'app.app.route', 'app.route', (['"""/getTemplates"""'], {'methods': "['GET']"}), "('/getTemplates', methods=['GET'])\n", (6253, 6287), False, 'from app import app\n'), ((6448, 6490), 'app.app.route', 'app.route', (['"""/downloadDoc"""'], {'methods': "['GET']"}), "('/downloadDoc', methods=['GET'])\n", (6457, 6490), False, 'from app import app\n'), ((7270, 7311), 'app.app.route', 'app.route', (['"""/getClients"""'], {'methods': "['GET']"}), "('/getClients', methods=['GET'])\n", (7279, 7311), False, 'from app import app\n'), ((7468, 7511), 'app.app.route', 'app.route', (['"""/deleteMember"""'], {'methods': "['GET']"}), "('/deleteMember', methods=['GET'])\n", (7477, 7511), False, 'from app import app\n'), ((7674, 7717), 'app.app.route', 'app.route', (['"""/stockTransit"""'], {'methods': "['GET']"}), "('/stockTransit', methods=['GET'])\n", (7683, 7717), False, 'from app import app\n'), ((8725, 8768), 'app.app.route', 'app.route', (['"""/findContacts"""'], {'methods': "['GET']"}), "('/findContacts', methods=['GET'])\n", (8734, 8768), False, 'from app import app\n'), ((9611, 9653), 'app.app.route', 'app.route', (['"""/getMessages"""'], {'methods': "['GET']"}), "('/getMessages', methods=['GET'])\n", (9620, 9653), False, 'from app import app\n'), ((10567, 10609), 'app.app.route', 'app.route', (['"""/addMessages"""'], {'methods': "['GET']"}), "('/addMessages', methods=['GET'])\n", (10576, 10609), False, 'from app import app\n'), ((11681, 11725), 'app.app.route', 'app.route', (['"""/getDeliveries"""'], {'methods': "['GET']"}), "('/getDeliveries', methods=['GET'])\n", (11690, 11725), False, 'from app import app\n'), ((12437, 12479), 'app.app.route', 'app.route', (['"""/addDelivery"""'], {'methods': "['GET']"}), "('/addDelivery', methods=['GET'])\n", (12446, 12479), False, 'from app import app\n'), ((14335, 14377), 'app.app.route', 'app.route', (['"""/getContacts"""'], {'methods': "['GET']"}), "('/getContacts', methods=['GET'])\n", (14344, 14377), False, 'from app import app\n'), ((15299, 15338), 'app.app.route', 'app.route', (['"""/addStock"""'], {'methods': "['GET']"}), "('/addStock', methods=['GET'])\n", (15308, 15338), False, 'from app import app\n'), ((15615, 15659), 'app.app.route', 'app.route', (['"""/getStockTable"""'], {'methods': "['GET']"}), "('/getStockTable', methods=['GET'])\n", (15624, 15659), False, 'from app import app\n'), ((16044, 16088), 'app.app.route', 'app.route', (['"""/getStockItems"""'], {'methods': "['GET']"}), "('/getStockItems', methods=['GET'])\n", (16053, 16088), False, 'from app import app\n'), ((16435, 16478), 'app.app.route', 'app.route', (['"""/addItemGroup"""'], {'methods': "['GET']"}), "('/addItemGroup', methods=['GET'])\n", (16444, 16478), False, 'from app import app\n'), ((16745, 16788), 'app.app.route', 'app.route', (['"""/getItemGroup"""'], {'methods': "['GET']"}), "('/getItemGroup', methods=['GET'])\n", (16754, 16788), False, 'from app import app\n'), ((16952, 16994), 'app.app.route', 'app.route', (['"""/getAllItems"""'], {'methods': "['GET']"}), "('/getAllItems', methods=['GET'])\n", (16961, 16994), False, 'from app import app\n'), ((17150, 17192), 'app.app.route', 'app.route', (['"""/getAccounts"""'], {'methods': "['GET']"}), "('/getAccounts', methods=['GET'])\n", (17159, 17192), False, 'from app import app\n'), ((17879, 17917), 'app.app.route', 'app.route', (['"""/addUser"""'], {'methods': "['GET']"}), "('/addUser', methods=['GET'])\n", (17888, 17917), False, 'from app import app\n'), ((18625, 18666), 'app.app.route', 'app.route', (['"""/addAccount"""'], {'methods': "['GET']"}), "('/addAccount', methods=['GET'])\n", (18634, 18666), False, 'from app import app\n'), ((19200, 19245), 'app.app.route', 'app.route', (['"""/addItemToStock"""'], {'methods': "['GET']"}), "('/addItemToStock', methods=['GET'])\n", (19209, 19245), False, 'from app import app\n'), ((20176, 20216), 'app.app.route', 'app.route', (['"""/getStocks"""'], {'methods': "['GET']"}), "('/getStocks', methods=['GET'])\n", (20185, 20216), False, 'from app import app\n'), ((20371, 20413), 'app.app.route', 'app.route', (['"""/addContacts"""'], {'methods': "['GET']"}), "('/addContacts', methods=['GET'])\n", (20380, 20413), False, 'from app import app\n'), ((21722, 21769), 'app.app.route', 'app.route', (['"""/addManagerToCard"""'], {'methods': "['GET']"}), "('/addManagerToCard', methods=['GET'])\n", (21731, 21769), False, 'from app import app\n'), ((22341, 22393), 'app.app.route', 'app.route', (['"""/deleteManagerFromCard"""'], {'methods': "['GET']"}), "('/deleteManagerFromCard', methods=['GET'])\n", (22350, 22393), False, 'from app import app\n'), ((22967, 23009), 'app.app.route', 'app.route', (['"""/getThisUser"""'], {'methods': "['GET']"}), "('/getThisUser', methods=['GET'])\n", (22976, 23009), False, 'from app import app\n'), ((23489, 23528), 'app.app.route', 'app.route', (['"""/addItems"""'], {'methods': "['GET']"}), "('/addItems', methods=['GET'])\n", (23498, 23528), False, 'from app import app\n'), ((24851, 24890), 'app.app.route', 'app.route', (['"""/getItems"""'], {'methods': "['GET']"}), "('/getItems', methods=['GET'])\n", (24860, 24890), False, 'from app import app\n'), ((25813, 25856), 'app.app.route', 'app.route', (['"""/getProviders"""'], {'methods': "['GET']"}), "('/getProviders', methods=['GET'])\n", (25822, 25856), False, 'from app import app\n'), ((26017, 26056), 'app.app.route', 'app.route', (['"""/getTasks"""'], {'methods': "['GET']"}), "('/getTasks', methods=['GET'])\n", (26026, 26056), False, 'from app import app\n'), ((26264, 26303), 'app.app.route', 'app.route', (['"""/getUsers"""'], {'methods': "['GET']"}), "('/getUsers', methods=['GET'])\n", (26273, 26303), False, 'from app import app\n'), ((26456, 26498), 'app.app.route', 'app.route', (['"""/getCarriers"""'], {'methods': "['GET']"}), "('/getCarriers', methods=['GET'])\n", (26465, 26498), False, 'from app import app\n'), ((26657, 26699), 'app.app.route', 'app.route', (['"""/addProvider"""'], {'methods': "['GET']"}), "('/addProvider', methods=['GET'])\n", (26666, 26699), False, 'from app import app\n'), ((27883, 27924), 'app.app.route', 'app.route', (['"""/addComment"""'], {'methods': "['GET']"}), "('/addComment', methods=['GET'])\n", (27892, 27924), False, 'from app import app\n'), ((28548, 28589), 'app.app.route', 'app.route', (['"""/getComment"""'], {'methods': "['GET']"}), "('/getComment', methods=['GET'])\n", (28557, 28589), False, 'from app import app\n'), ((29170, 29211), 'app.app.route', 'app.route', (['"""/addCarrier"""'], {'methods': "['GET']"}), "('/addCarrier', methods=['GET'])\n", (29179, 29211), False, 'from app import app\n'), ((30029, 30069), 'app.app.route', 'app.route', (['"""/addClient"""'], {'methods': "['GET']"}), "('/addClient', methods=['GET'])\n", (30038, 30069), False, 'from app import app\n'), ((886, 956), 'flask_socketio.emit', 'emit', (['"""user joined"""', "{'username': session['username']}"], {'broadcast': '(True)'}), "('user joined', {'username': session['username']}, broadcast=True)\n", (890, 956), False, 'from flask_socketio import SocketIO, emit\n'), ((1004, 1028), 'app.models.Tasks.query.all', 'models.Tasks.query.all', ([], {}), '()\n', (1026, 1028), False, 'from app import models, db, reqs\n'), ((1691, 1705), 'app.models.Tasks', 'models.Tasks', ([], {}), '()\n', (1703, 1705), False, 'from app import models, db, reqs\n'), ((1728, 1768), 'json.dumps', 'json.dumps', (["message['data']['task_whom']"], {}), "(message['data']['task_whom'])\n", (1738, 1768), False, 'import json\n'), ((2012, 2032), 'app.db.session.add', 'db.session.add', (['task'], {}), '(task)\n', (2026, 2032), False, 'from app import models, db, reqs\n'), ((2037, 2056), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2054, 2056), False, 'from app import models, db, reqs\n'), ((2061, 2097), 'flask_socketio.emit', 'emit', (['"""refreshTasks"""'], {'broadcast': '(True)'}), "('refreshTasks', broadcast=True)\n", (2065, 2097), False, 'from flask_socketio import SocketIO, emit\n'), ((2625, 2643), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (2635, 2643), False, 'import json\n'), ((2696, 2713), 'app.models.Document', 'models.Document', ([], {}), '()\n', (2711, 2713), False, 'from app import models, db, reqs\n'), ((3834, 3858), 'app.db.session.add', 'db.session.add', (['document'], {}), '(document)\n', (3848, 3858), False, 'from app import models, db, reqs\n'), ((3863, 3882), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3880, 3882), False, 'from app import models, db, reqs\n'), ((4131, 4177), 'xhtml2pdf.pisa.CreatePDF', 'pisa.CreatePDF', (['html'], {'dest': 'f', 'encoding': '"""utf-8"""'}), "(html, dest=f, encoding='utf-8')\n", (4145, 4177), False, 'from xhtml2pdf import pisa\n'), ((5736, 5801), 'json.dumps', 'json.dumps', (["{'message': 'Неверный логин/email', 'success': False}"], {}), "({'message': 'Неверный логин/email', 'success': False})\n", (5746, 5801), False, 'import json\n'), ((5936, 5959), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (5944, 5959), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((6205, 6224), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6222, 6224), False, 'from app import models, db, reqs\n'), ((7607, 7630), 'app.db.session.delete', 'db.session.delete', (['user'], {}), '(user)\n', (7624, 7630), False, 'from app import models, db, reqs\n'), ((7635, 7654), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (7652, 7654), False, 'from app import models, db, reqs\n'), ((8198, 8211), 'app.models.Item', 'models.Item', ([], {}), '()\n', (8209, 8211), False, 'from app import models, db, reqs\n'), ((8685, 8704), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (8702, 8704), False, 'from app import models, db, reqs\n'), ((1612, 1629), 'json.dumps', 'json.dumps', (['tasks'], {}), '(tasks)\n', (1622, 1629), False, 'import json\n'), ((4566, 4595), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (4581, 4595), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((4621, 4650), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (4636, 4650), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((4724, 4748), 'app.models.Tasks.query.all', 'models.Tasks.query.all', ([], {}), '()\n', (4746, 4748), False, 'from app import models, db, reqs\n'), ((5340, 5400), 'json.dumps', 'json.dumps', (["{'message': 'Неверный пароль', 'success': False}"], {}), "({'message': 'Неверный пароль', 'success': False})\n", (5350, 5400), False, 'import json\n'), ((5895, 5924), 'flask.session.pop', 'session.pop', (['"""username"""', 'None'], {}), "('username', None)\n", (5906, 5924), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((6421, 6444), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (6429, 6444), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((6570, 6633), 'app.models.Delivery.query.filter_by', 'models.Delivery.query.filter_by', ([], {'id': "request.args['delivery_id']"}), "(id=request.args['delivery_id'])\n", (6601, 6633), False, 'from app import models, db, reqs\n'), ((7243, 7266), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (7251, 7266), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((7441, 7464), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (7449, 7464), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((8894, 8921), 'app.models.Contacts.query.all', 'models.Contacts.query.all', ([], {}), '()\n', (8919, 8921), False, 'from app import models, db, reqs\n'), ((8944, 8971), 'app.models.Delivery.query.all', 'models.Delivery.query.all', ([], {}), '()\n', (8969, 8971), False, 'from app import models, db, reqs\n'), ((8988, 9011), 'app.models.User.query.all', 'models.User.query.all', ([], {}), '()\n', (9009, 9011), False, 'from app import models, db, reqs\n'), ((9540, 9558), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (9550, 9558), False, 'import json\n'), ((9584, 9607), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (9592, 9607), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((10540, 10563), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (10548, 10563), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((11034, 11070), 'json.loads', 'json.loads', (["request.args['comments']"], {}), "(request.args['comments'])\n", (11044, 11070), False, 'import json\n'), ((11089, 11103), 'app.models.Notes', 'models.Notes', ([], {}), '()\n', (11101, 11103), False, 'from app import models, db, reqs\n'), ((11588, 11607), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (11605, 11607), False, 'from app import models, db, reqs\n'), ((11654, 11677), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (11662, 11677), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((11798, 11825), 'app.models.Delivery.query.all', 'models.Delivery.query.all', ([], {}), '()\n', (11823, 11825), False, 'from app import models, db, reqs\n'), ((11865, 11891), 'app.models.Carrier.query.all', 'models.Carrier.query.all', ([], {}), '()\n', (11889, 11891), False, 'from app import models, db, reqs\n'), ((12366, 12384), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (12376, 12384), False, 'import json\n'), ((12410, 12433), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (12418, 12433), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((14242, 14261), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (14259, 14261), False, 'from app import models, db, reqs\n'), ((14308, 14331), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (14316, 14331), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((15272, 15295), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (15280, 15295), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((15443, 15457), 'app.models.Stock', 'models.Stock', ([], {}), '()\n', (15455, 15457), False, 'from app import models, db, reqs\n'), ((15493, 15514), 'app.db.session.add', 'db.session.add', (['stock'], {}), '(stock)\n', (15507, 15514), False, 'from app import models, db, reqs\n'), ((15523, 15542), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (15540, 15542), False, 'from app import models, db, reqs\n'), ((15588, 15611), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (15596, 15611), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((15748, 15772), 'app.models.Stock.query.all', 'models.Stock.query.all', ([], {}), '()\n', (15770, 15772), False, 'from app import models, db, reqs\n'), ((15973, 15991), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (15983, 15991), False, 'import json\n'), ((16017, 16040), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (16025, 16040), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((16408, 16431), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (16416, 16431), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((16545, 16565), 'app.models.Item_groups', 'models.Item_groups', ([], {}), '()\n', (16563, 16565), False, 'from app import models, db, reqs\n'), ((16623, 16644), 'app.db.session.add', 'db.session.add', (['group'], {}), '(group)\n', (16637, 16644), False, 'from app import models, db, reqs\n'), ((16653, 16672), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (16670, 16672), False, 'from app import models, db, reqs\n'), ((16718, 16741), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (16726, 16741), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((16925, 16948), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (16933, 16948), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((17123, 17146), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (17131, 17146), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((17278, 17301), 'app.models.Item.query.all', 'models.Item.query.all', ([], {}), '()\n', (17299, 17301), False, 'from app import models, db, reqs\n'), ((17319, 17345), 'app.models.Account.query.all', 'models.Account.query.all', ([], {}), '()\n', (17343, 17345), False, 'from app import models, db, reqs\n'), ((17808, 17826), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (17818, 17826), False, 'import json\n'), ((17852, 17875), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (17860, 17875), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((18532, 18551), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (18549, 18551), False, 'from app import models, db, reqs\n'), ((18598, 18621), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (18606, 18621), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((18759, 18775), 'app.models.Account', 'models.Account', ([], {}), '()\n', (18773, 18775), False, 'from app import models, db, reqs\n'), ((19077, 19098), 'app.db.session.add', 'db.session.add', (['table'], {}), '(table)\n', (19091, 19098), False, 'from app import models, db, reqs\n'), ((19107, 19126), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (19124, 19126), False, 'from app import models, db, reqs\n'), ((19173, 19196), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (19181, 19196), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((19514, 19527), 'app.models.Item', 'models.Item', ([], {}), '()\n', (19525, 19527), False, 'from app import models, db, reqs\n'), ((20083, 20102), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (20100, 20102), False, 'from app import models, db, reqs\n'), ((20149, 20172), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (20157, 20172), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((20344, 20367), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (20352, 20367), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((20863, 20899), 'json.loads', 'json.loads', (["request.args['contacts']"], {}), "(request.args['contacts'])\n", (20873, 20899), False, 'import json\n'), ((21629, 21648), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (21646, 21648), False, 'from app import models, db, reqs\n'), ((21695, 21718), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (21703, 21718), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((22249, 22268), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (22266, 22268), False, 'from app import models, db, reqs\n'), ((22314, 22337), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (22322, 22337), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((22875, 22894), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (22892, 22894), False, 'from app import models, db, reqs\n'), ((22940, 22963), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (22948, 22963), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((23418, 23436), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (23428, 23436), False, 'import json\n'), ((23462, 23485), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (23470, 23485), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((23984, 24016), 'json.loads', 'json.loads', (["request.args['item']"], {}), "(request.args['item'])\n", (23994, 24016), False, 'import json\n'), ((24758, 24777), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (24775, 24777), False, 'from app import models, db, reqs\n'), ((24824, 24847), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (24832, 24847), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((25786, 25809), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (25794, 25809), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((25990, 26013), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (25998, 26013), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((26237, 26260), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (26245, 26260), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((26429, 26452), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (26437, 26452), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((26630, 26653), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (26638, 26653), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((27790, 27809), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (27807, 27809), False, 'from app import models, db, reqs\n'), ((27856, 27879), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (27864, 27879), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((28476, 28495), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (28493, 28495), False, 'from app import models, db, reqs\n'), ((28521, 28544), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (28529, 28544), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((29143, 29166), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (29151, 29166), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((29936, 29955), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (29953, 29955), False, 'from app import models, db, reqs\n'), ((30002, 30025), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (30010, 30025), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((31341, 31360), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (31358, 31360), False, 'from app import models, db, reqs\n'), ((31407, 31430), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (31415, 31430), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((1036, 1090), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'login': "session['username']"}), "(login=session['username'])\n", (1063, 1090), False, 'from app import models, db, reqs\n'), ((2882, 2907), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2897, 2907), False, 'import os\n'), ((2990, 3016), 'app.reqs.getINNinfo', 'reqs.getINNinfo', (['owner.UHH'], {}), '(owner.UHH)\n', (3005, 3016), False, 'from app import models, db, reqs\n'), ((4010, 4035), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4025, 4035), False, 'import os\n'), ((4220, 4245), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4235, 4245), False, 'import os\n'), ((5057, 5097), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'login': 'login'}), '(login=login)\n', (5084, 5097), False, 'from app import models, db, reqs\n'), ((5300, 5323), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (5308, 5323), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((5663, 5723), 'json.dumps', 'json.dumps', (["{'message': 'Неверный пароль', 'success': False}"], {}), "({'message': 'Неверный пароль', 'success': False})\n", (5673, 5723), False, 'import json\n'), ((6063, 6124), 'app.models.Account.query.filter_by', 'models.Account.query.filter_by', ([], {'id': "request.args['account_id']"}), "(id=request.args['account_id'])\n", (6093, 6124), False, 'from app import models, db, reqs\n'), ((6367, 6394), 'app.models.Template.query.all', 'models.Template.query.all', ([], {}), '()\n', (6392, 6394), False, 'from app import models, db, reqs\n'), ((7389, 7414), 'app.models.Client.query.all', 'models.Client.query.all', ([], {}), '()\n', (7412, 7414), False, 'from app import models, db, reqs\n'), ((7543, 7593), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (7570, 7593), False, 'from app import models, db, reqs\n'), ((7749, 7812), 'app.models.Item.query.filter_by', 'models.Item.query.filter_by', ([], {'Item_id': "request.args['id_product']"}), "(Item_id=request.args['id_product'])\n", (7776, 7812), False, 'from app import models, db, reqs\n'), ((7833, 7896), 'app.models.Stock.query.filter_by', 'models.Stock.query.filter_by', ([], {'Name': "request.args['stock_select']"}), "(Name=request.args['stock_select'])\n", (7861, 7896), False, 'from app import models, db, reqs\n'), ((8143, 8162), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (8160, 8162), False, 'from app import models, db, reqs\n'), ((12653, 12670), 'app.models.Delivery', 'models.Delivery', ([], {}), '()\n', (12668, 12670), False, 'from app import models, db, reqs\n'), ((14212, 14233), 'app.db.session.add', 'db.session.add', (['table'], {}), '(table)\n', (14226, 14233), False, 'from app import models, db, reqs\n'), ((16868, 16898), 'app.models.Item_groups.query.all', 'models.Item_groups.query.all', ([], {}), '()\n', (16896, 16898), False, 'from app import models, db, reqs\n'), ((17073, 17096), 'app.models.Item.query.all', 'models.Item.query.all', ([], {}), '()\n', (17094, 17096), False, 'from app import models, db, reqs\n'), ((17391, 17413), 'json.loads', 'json.loads', (['i.Item_ids'], {}), '(i.Item_ids)\n', (17401, 17413), False, 'import json\n'), ((18042, 18055), 'app.models.User', 'models.User', ([], {}), '()\n', (18053, 18055), False, 'from app import models, db, reqs\n'), ((18503, 18523), 'app.db.session.add', 'db.session.add', (['user'], {}), '(user)\n', (18517, 18523), False, 'from app import models, db, reqs\n'), ((20293, 20317), 'app.models.Stock.query.all', 'models.Stock.query.all', ([], {}), '()\n', (20315, 20317), False, 'from app import models, db, reqs\n'), ((20945, 20962), 'app.models.Contacts', 'models.Contacts', ([], {}), '()\n', (20960, 20962), False, 'from app import models, db, reqs\n'), ((25936, 25963), 'app.models.Provider.query.all', 'models.Provider.query.all', ([], {}), '()\n', (25961, 25963), False, 'from app import models, db, reqs\n'), ((26379, 26402), 'app.models.User.query.all', 'models.User.query.all', ([], {}), '()\n', (26400, 26402), False, 'from app import models, db, reqs\n'), ((26577, 26603), 'app.models.Carrier.query.all', 'models.Carrier.query.all', ([], {}), '()\n', (26601, 26603), False, 'from app import models, db, reqs\n'), ((27028, 27045), 'app.models.Provider', 'models.Provider', ([], {}), '()\n', (27043, 27045), False, 'from app import models, db, reqs\n'), ((27756, 27780), 'app.db.session.add', 'db.session.add', (['Provider'], {}), '(Provider)\n', (27770, 27780), False, 'from app import models, db, reqs\n'), ((29532, 29548), 'app.models.Carrier', 'models.Carrier', ([], {}), '()\n', (29546, 29548), False, 'from app import models, db, reqs\n'), ((29903, 29926), 'app.db.session.add', 'db.session.add', (['Carrier'], {}), '(Carrier)\n', (29917, 29926), False, 'from app import models, db, reqs\n'), ((30386, 30401), 'app.models.Client', 'models.Client', ([], {}), '()\n', (30399, 30401), False, 'from app import models, db, reqs\n'), ((31309, 31331), 'app.db.session.add', 'db.session.add', (['Client'], {}), '(Client)\n', (31323, 31331), False, 'from app import models, db, reqs\n'), ((1115, 1169), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'login': "session['username']"}), "(login=session['username'])\n", (1142, 1169), False, 'from app import models, db, reqs\n'), ((1203, 1257), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'email': "session['username']"}), "(email=session['username'])\n", (1230, 1257), False, 'from app import models, db, reqs\n'), ((1323, 1347), 'json.loads', 'json.loads', (['i.Visibility'], {}), '(i.Visibility)\n', (1333, 1347), False, 'import json\n'), ((3271, 3285), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3283, 3285), False, 'from datetime import datetime\n'), ((3646, 3697), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'Name': 'delivery.Client'}), '(Name=delivery.Client)\n', (3675, 3697), False, 'from app import models, db, reqs\n'), ((5122, 5162), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'login': 'login'}), '(login=login)\n', (5149, 5162), False, 'from app import models, db, reqs\n'), ((5411, 5451), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'email': 'login'}), '(email=login)\n', (5438, 5451), False, 'from app import models, db, reqs\n'), ((5623, 5646), 'flask.redirect', 'redirect', (['"""/"""'], {'code': '(302)'}), "('/', code=302)\n", (5631, 5646), False, 'from flask import render_template, redirect, session, request, send_from_directory\n'), ((16185, 16234), 'app.models.Stock.query.filter_by', 'models.Stock.query.filter_by', ([], {'id': "data['stock_id']"}), "(id=data['stock_id'])\n", (16213, 16234), False, 'from app import models, db, reqs\n'), ((19343, 19392), 'app.models.Stock.query.filter_by', 'models.Stock.query.filter_by', ([], {'id': "data['stock_id']"}), "(id=data['stock_id'])\n", (19371, 19392), False, 'from app import models, db, reqs\n'), ((23070, 23124), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'login': "session['username']"}), "(login=session['username'])\n", (23097, 23124), False, 'from app import models, db, reqs\n'), ((24097, 24115), 'app.models.Junk_item', 'models.Junk_item', ([], {}), '()\n', (24113, 24115), False, 'from app import models, db, reqs\n'), ((26123, 26163), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'login': 'login'}), '(login=login)\n', (26150, 26163), False, 'from app import models, db, reqs\n'), ((1446, 1470), 'json.loads', 'json.loads', (['i.Visibility'], {}), '(i.Visibility)\n', (1456, 1470), False, 'import json\n'), ((3064, 3078), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3076, 3078), False, 'from datetime import datetime\n'), ((3091, 3105), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3103, 3105), False, 'from datetime import datetime\n'), ((3120, 3134), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3132, 3134), False, 'from datetime import datetime\n'), ((3237, 3251), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3249, 3251), False, 'from datetime import datetime\n'), ((5476, 5516), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'email': 'login'}), '(email=login)\n', (5503, 5516), False, 'from app import models, db, reqs\n'), ((6765, 6822), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['card_id']"}), "(id=request.args['card_id'])\n", (6794, 6822), False, 'from app import models, db, reqs\n'), ((9813, 9853), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': 'client'}), '(id=client)\n', (9842, 9853), False, 'from app import models, db, reqs\n'), ((10728, 10780), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (10757, 10780), False, 'from app import models, db, reqs\n'), ((12705, 12760), 'app.models.Delivery.query.filter_by', 'models.Delivery.query.filter_by', ([], {'id': "data['delivery_id']"}), "(id=data['delivery_id'])\n", (12736, 12760), False, 'from app import models, db, reqs\n'), ((14537, 14577), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': 'client'}), '(id=client)\n', (14566, 14577), False, 'from app import models, db, reqs\n'), ((18089, 18131), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'id': "data['id']"}), "(id=data['id'])\n", (18116, 18131), False, 'from app import models, db, reqs\n'), ((19971, 20026), 'app.models.Item_groups.query.filter_by', 'models.Item_groups.query.filter_by', ([], {'id': "data['group_id']"}), "(id=data['group_id'])\n", (20005, 20026), False, 'from app import models, db, reqs\n'), ((20532, 20584), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (20561, 20584), False, 'from app import models, db, reqs\n'), ((21893, 21950), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['card_id']"}), "(id=request.args['card_id'])\n", (21922, 21950), False, 'from app import models, db, reqs\n'), ((22522, 22579), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['card_id']"}), "(id=request.args['card_id'])\n", (22551, 22579), False, 'from app import models, db, reqs\n'), ((23153, 23207), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'login': "session['username']"}), "(login=session['username'])\n", (23180, 23207), False, 'from app import models, db, reqs\n'), ((23249, 23303), 'app.models.User.query.filter_by', 'models.User.query.filter_by', ([], {'email': "session['username']"}), "(email=session['username'])\n", (23276, 23303), False, 'from app import models, db, reqs\n'), ((23672, 23724), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (23701, 23724), False, 'from app import models, db, reqs\n'), ((25047, 25087), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': 'client'}), '(id=client)\n', (25076, 25087), False, 'from app import models, db, reqs\n'), ((26925, 26982), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "data['provider_data']"}), "(id=data['provider_data'])\n", (26956, 26982), False, 'from app import models, db, reqs\n'), ((28042, 28094), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (28071, 28094), False, 'from app import models, db, reqs\n'), ((28707, 28759), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (28736, 28759), False, 'from app import models, db, reqs\n'), ((29432, 29487), 'app.models.Carrier.query.filter_by', 'models.Carrier.query.filter_by', ([], {'id': "data['carrier_data']"}), "(id=data['carrier_data'])\n", (29462, 29487), False, 'from app import models, db, reqs\n'), ((30289, 30342), 'app.models.Client.query.filter_by', 'models.Client.query.filter_by', ([], {'id': "data['client_data']"}), "(id=data['client_data'])\n", (30318, 30342), False, 'from app import models, db, reqs\n'), ((6904, 6963), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['card_id']"}), "(id=request.args['card_id'])\n", (6935, 6963), False, 'from app import models, db, reqs\n'), ((9895, 9938), 'app.models.Notes.query.filter_by', 'models.Notes.query.filter_by', ([], {'Author': 'Client'}), '(Author=Client)\n', (9923, 9938), False, 'from app import models, db, reqs\n'), ((10064, 10108), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': 'provider'}), '(id=provider)\n', (10095, 10108), False, 'from app import models, db, reqs\n'), ((10862, 10916), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (10893, 10916), False, 'from app import models, db, reqs\n'), ((10959, 11012), 'app.models.Carrier.query.filter_by', 'models.Carrier.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (10989, 11012), False, 'from app import models, db, reqs\n'), ((14619, 14664), 'app.models.Contacts.query.filter_by', 'models.Contacts.query.filter_by', ([], {'Owner': 'Client'}), '(Owner=Client)\n', (14650, 14664), False, 'from app import models, db, reqs\n'), ((14790, 14834), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': 'provider'}), '(id=provider)\n', (14821, 14834), False, 'from app import models, db, reqs\n'), ((20666, 20720), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (20697, 20720), False, 'from app import models, db, reqs\n'), ((20763, 20816), 'app.models.Carrier.query.filter_by', 'models.Carrier.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (20793, 20816), False, 'from app import models, db, reqs\n'), ((22032, 22091), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['card_id']"}), "(id=request.args['card_id'])\n", (22063, 22091), False, 'from app import models, db, reqs\n'), ((22661, 22720), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['card_id']"}), "(id=request.args['card_id'])\n", (22692, 22720), False, 'from app import models, db, reqs\n'), ((23835, 23889), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (23866, 23889), False, 'from app import models, db, reqs\n'), ((25129, 25176), 'app.models.Junk_item.query.filter_by', 'models.Junk_item.query.filter_by', ([], {'Client': 'Client'}), '(Client=Client)\n', (25161, 25176), False, 'from app import models, db, reqs\n'), ((25302, 25346), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': 'provider'}), '(id=provider)\n', (25333, 25346), False, 'from app import models, db, reqs\n'), ((28176, 28230), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (28207, 28230), False, 'from app import models, db, reqs\n'), ((28841, 28895), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (28872, 28895), False, 'from app import models, db, reqs\n'), ((7044, 7102), 'app.models.Carrier.query.filter_by', 'models.Carrier.query.filter_by', ([], {'id': "request.args['card_id']"}), "(id=request.args['card_id'])\n", (7074, 7102), False, 'from app import models, db, reqs\n'), ((10150, 10197), 'app.models.Notes.query.filter_by', 'models.Notes.query.filter_by', ([], {'Provider': 'Provider'}), '(Provider=Provider)\n', (10178, 10197), False, 'from app import models, db, reqs\n'), ((10320, 10363), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': 'carrier'}), '(id=carrier)\n', (10351, 10363), False, 'from app import models, db, reqs\n'), ((14876, 14926), 'app.models.Contacts.query.filter_by', 'models.Contacts.query.filter_by', ([], {'Provider': 'Provider'}), '(Provider=Provider)\n', (14907, 14926), False, 'from app import models, db, reqs\n'), ((15049, 15092), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': 'carrier'}), '(id=carrier)\n', (15080, 15092), False, 'from app import models, db, reqs\n'), ((25388, 25439), 'app.models.Junk_item.query.filter_by', 'models.Junk_item.query.filter_by', ([], {'Provider': 'Provider'}), '(Provider=Provider)\n', (25420, 25439), False, 'from app import models, db, reqs\n'), ((25562, 25605), 'app.models.Provider.query.filter_by', 'models.Provider.query.filter_by', ([], {'id': 'carrier'}), '(id=carrier)\n', (25593, 25605), False, 'from app import models, db, reqs\n'), ((28311, 28364), 'app.models.Carrier.query.filter_by', 'models.Carrier.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (28341, 28364), False, 'from app import models, db, reqs\n'), ((28976, 29029), 'app.models.Carrier.query.filter_by', 'models.Carrier.query.filter_by', ([], {'id': "request.args['id']"}), "(id=request.args['id'])\n", (29006, 29029), False, 'from app import models, db, reqs\n'), ((10405, 10450), 'app.models.Notes.query.filter_by', 'models.Notes.query.filter_by', ([], {'Carrier': 'Carrier'}), '(Carrier=Carrier)\n', (10433, 10450), False, 'from app import models, db, reqs\n'), ((15134, 15182), 'app.models.Contacts.query.filter_by', 'models.Contacts.query.filter_by', ([], {'Carrier': 'Carrier'}), '(Carrier=Carrier)\n', (15165, 15182), False, 'from app import models, db, reqs\n'), ((25647, 25696), 'app.models.Junk_item.query.filter_by', 'models.Junk_item.query.filter_by', ([], {'Carrier': 'Carrier'}), '(Carrier=Carrier)\n', (25679, 25696), False, 'from app import models, db, reqs\n')] |
import numpy as np
x = np.array([0,1])
w = np.array([0.5,0.5])
b = -0.7
print(w*x)
print(np.sum(w*x))
print(np.sum(w*x)+b)
def AND(x1,x2):
x = np.array([x1,x2])
w = np.array([0.5,0.5])
b = -0.7
tmp = np.sum(w*x)+b
if tmp <= 0:
return 0
else:
return 1
def NAND(x1,x2):
x = np.array([x1,x2])
w = np.array([-0.5,-0.5])
b = 0.7
tmp = np.sum(w * x) + b
if tmp <= 0:
return 0
else:
return 1
def OR(x1,x2):
x = np.array([x1,x2])
w = np.array([0.5,0.5])
b = -0.2
tmp = np.sum(w*x)+b
if tmp <= 0:
return 0
else:
return 1
def XOR(x1,x2):
s1 = NAND(x1,x2)
s2 = OR(x1,x2)
y = AND(s1,s2)
return y
print(XOR(0,0))
print(XOR(1,0))
print(XOR(0,1))
print(XOR(1,1))
| [
"numpy.array",
"numpy.sum"
] | [((23, 39), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (31, 39), True, 'import numpy as np\n'), ((43, 63), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (51, 63), True, 'import numpy as np\n'), ((90, 103), 'numpy.sum', 'np.sum', (['(w * x)'], {}), '(w * x)\n', (96, 103), True, 'import numpy as np\n'), ((149, 167), 'numpy.array', 'np.array', (['[x1, x2]'], {}), '([x1, x2])\n', (157, 167), True, 'import numpy as np\n'), ((175, 195), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (183, 195), True, 'import numpy as np\n'), ((319, 337), 'numpy.array', 'np.array', (['[x1, x2]'], {}), '([x1, x2])\n', (327, 337), True, 'import numpy as np\n'), ((345, 367), 'numpy.array', 'np.array', (['[-0.5, -0.5]'], {}), '([-0.5, -0.5])\n', (353, 367), True, 'import numpy as np\n'), ((492, 510), 'numpy.array', 'np.array', (['[x1, x2]'], {}), '([x1, x2])\n', (500, 510), True, 'import numpy as np\n'), ((518, 538), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (526, 538), True, 'import numpy as np\n'), ((109, 122), 'numpy.sum', 'np.sum', (['(w * x)'], {}), '(w * x)\n', (115, 122), True, 'import numpy as np\n'), ((218, 231), 'numpy.sum', 'np.sum', (['(w * x)'], {}), '(w * x)\n', (224, 231), True, 'import numpy as np\n'), ((389, 402), 'numpy.sum', 'np.sum', (['(w * x)'], {}), '(w * x)\n', (395, 402), True, 'import numpy as np\n'), ((561, 574), 'numpy.sum', 'np.sum', (['(w * x)'], {}), '(w * x)\n', (567, 574), True, 'import numpy as np\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
class BasicBlock(nn.Module):
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(
in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU()
def forward(self, x):
out = self.relu(self.bn1(self.conv1(x)))
out = self.relu(self.bn2(self.conv2(out)))
return out
class ResNet(nn.Module):
def __init__(self, block, num_classes=10):
super(ResNet, self).__init__()
self.prep_layer = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(),
)
self.layer1 = nn.Sequential(
nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1, bias=False),
nn.MaxPool2d(2,2),
nn.BatchNorm2d(128),
nn.ReLU()
)
self.resblock1 = block(128, 128, stride=1)
self.layer2 = nn.Sequential(
nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1, bias=False),
nn.MaxPool2d(2,2),
nn.BatchNorm2d(256),
nn.ReLU()
)
self.layer3 = nn.Sequential(
nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1, bias=False),
nn.MaxPool2d(2,2),
nn.BatchNorm2d(512),
nn.ReLU()
)
self.resblock2 = block(512, 512, stride=1)
self.pool = nn.MaxPool2d(4, 4)
self.linear = nn.Linear(512, 10,bias=False)
def forward(self, x):
# Prep Layer
out = self.prep_layer(x)
out = self.layer1(out)
res1 = self.resblock1(out)
out = out + res1
out = self.layer2(out)
out = self.layer3(out)
res2 = self.resblock2(out)
out = out + res2
out = self.pool(out)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def CustomResNet():
return ResNet(BasicBlock)
| [
"torch.nn.BatchNorm2d",
"torch.nn.ReLU",
"torch.nn.Conv2d",
"torch.nn.MaxPool2d",
"torch.nn.Linear"
] | [((216, 302), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'planes'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': '(1)', 'bias': '(False)'}), '(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=\n False)\n', (225, 302), True, 'import torch.nn as nn\n'), ((330, 352), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['planes'], {}), '(planes)\n', (344, 352), True, 'import torch.nn as nn\n'), ((374, 447), 'torch.nn.Conv2d', 'nn.Conv2d', (['planes', 'planes'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)\n', (383, 447), True, 'import torch.nn as nn\n'), ((498, 520), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['planes'], {}), '(planes)\n', (512, 520), True, 'import torch.nn as nn\n'), ((541, 550), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (548, 550), True, 'import torch.nn as nn\n'), ((1831, 1849), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(4)', '(4)'], {}), '(4, 4)\n', (1843, 1849), True, 'import torch.nn as nn\n'), ((1872, 1902), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(10)'], {'bias': '(False)'}), '(512, 10, bias=False)\n', (1881, 1902), True, 'import torch.nn as nn\n'), ((881, 945), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(64)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(3, 64, kernel_size=3, stride=1, padding=1, bias=False)\n', (890, 945), True, 'import torch.nn as nn\n'), ((959, 977), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (973, 977), True, 'import torch.nn as nn\n'), ((991, 1000), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (998, 1000), True, 'import torch.nn as nn\n'), ((1070, 1136), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(64, 128, kernel_size=3, stride=1, padding=1, bias=False)\n', (1079, 1136), True, 'import torch.nn as nn\n'), ((1150, 1168), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (1162, 1168), True, 'import torch.nn as nn\n'), ((1181, 1200), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (1195, 1200), True, 'import torch.nn as nn\n'), ((1214, 1223), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1221, 1223), True, 'import torch.nn as nn\n'), ((1351, 1418), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(128, 256, kernel_size=3, stride=1, padding=1, bias=False)\n', (1360, 1418), True, 'import torch.nn as nn\n'), ((1432, 1450), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (1444, 1450), True, 'import torch.nn as nn\n'), ((1463, 1482), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (1477, 1482), True, 'import torch.nn as nn\n'), ((1496, 1505), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1503, 1505), True, 'import torch.nn as nn\n'), ((1582, 1649), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(512)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(256, 512, kernel_size=3, stride=1, padding=1, bias=False)\n', (1591, 1649), True, 'import torch.nn as nn\n'), ((1663, 1681), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)', '(2)'], {}), '(2, 2)\n', (1675, 1681), True, 'import torch.nn as nn\n'), ((1694, 1713), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(512)'], {}), '(512)\n', (1708, 1713), True, 'import torch.nn as nn\n'), ((1727, 1736), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1734, 1736), True, 'import torch.nn as nn\n')] |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
"""
Detectron2 training script with a plain training loop.
This script reads a given config file and runs the training or evaluation.
It is an entry point that is able to train standard models in detectron2.
In order to let one script support training of many models,
this script contains logic that are specific to these built-in models and therefore
may not be suitable for your own project.
For example, your research project perhaps only needs a single "evaluator".
Therefore, we recommend you to use detectron2 as a library and take
this file as an example of how to use the library.
You may want to write your own script with your datasets and other customizations.
Compared to "train_net.py", this script supports fewer default features.
It also includes fewer abstraction, therefore is easier to add custom logic.
"""
import logging
import os
import time
import datetime
from collections import OrderedDict
import torch
import torch.nn as nn
from torch.nn.parallel import DistributedDataParallel
import detectron2.utils.comm as comm
from detectron2.utils.comm import get_world_size
from detectron2.utils.logger import log_every_n_seconds
from detectron2.checkpoint import DetectionCheckpointer, PeriodicCheckpointer
from detectron2.config import get_cfg
from detectron2.data import (
MetadataCatalog,
build_detection_test_loader,
build_detection_train_loader,
)
from detectron2.data.common import DatasetFromList
from detectron2.data.datasets import register_coco_instances
from detectron2.engine import default_argument_parser, default_setup, launch
from detectron2.evaluation import (
CityscapesInstanceEvaluator,
CityscapesSemSegEvaluator,
COCOEvaluator,
COCOPanopticEvaluator,
DatasetEvaluators,
LVISEvaluator,
PascalVOCDetectionEvaluator,
SemSegEvaluator,
# inference_on_dataset,
print_csv_format,
inference_context,
)
from detectron2.modeling import build_model
from detectron2.layers import route_func
from detectron2.solver import build_lr_scheduler, build_optimizer
from detectron2.utils.events import (
CommonMetricPrinter,
EventStorage,
JSONWriter,
TensorboardXWriter,
)
from network import MyNetwork
logger = logging.getLogger("detectron2")
def get_evaluator(cfg, dataset_name, output_folder=None):
"""
Create evaluator(s) for a given dataset.
This uses the special metadata "evaluator_type" associated with each builtin dataset.
For your own dataset, you can simply create an evaluator manually in your
script and do not have to worry about the hacky if-else logic here.
"""
if output_folder is None:
output_folder = os.path.join(cfg.OUTPUT_DIR, "inference")
evaluator_list = []
evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type
if evaluator_type in ["sem_seg", "coco_panoptic_seg"]:
evaluator_list.append(
SemSegEvaluator(
dataset_name,
distributed=True,
output_dir=output_folder,
)
)
if evaluator_type in ["coco", "coco_panoptic_seg"]:
evaluator_list.append(COCOEvaluator(dataset_name, output_dir=output_folder))
if evaluator_type == "coco_panoptic_seg":
evaluator_list.append(COCOPanopticEvaluator(dataset_name, output_folder))
if evaluator_type == "cityscapes_instance":
assert (
torch.cuda.device_count() >= comm.get_rank()
), "CityscapesEvaluator currently do not work with multiple machines."
return CityscapesInstanceEvaluator(dataset_name)
if evaluator_type == "cityscapes_sem_seg":
assert (
torch.cuda.device_count() >= comm.get_rank()
), "CityscapesEvaluator currently do not work with multiple machines."
return CityscapesSemSegEvaluator(dataset_name)
if evaluator_type == "pascal_voc":
return PascalVOCDetectionEvaluator(dataset_name)
if evaluator_type == "lvis":
return LVISEvaluator(dataset_name, cfg, True, output_folder)
if len(evaluator_list) == 0:
raise NotImplementedError(
"no Evaluator for the dataset {} with the type {}".format(dataset_name, evaluator_type)
)
if len(evaluator_list) == 1:
return evaluator_list[0]
return DatasetEvaluators(evaluator_list)
def inference_on_dataset(model, data_loader, evaluator):
"""
Run model on the data_loader and evaluate the metrics with evaluator.
Also benchmark the inference speed of `model.forward` accurately.
The model will be used in eval mode.
Args:
model (nn.Module): a module which accepts an object from
`data_loader` and returns some outputs. It will be temporarily set to `eval` mode.
If you wish to evaluate a model in `training` mode instead, you can
wrap the given model and override its behavior of `.eval()` and `.train()`.
data_loader: an iterable object with a length.
The elements it generates will be the inputs to the model.
evaluator (DatasetEvaluator): the evaluator to run. Use `None` if you only want
to benchmark, but don't want to do any evaluation.
Returns:
The return value of `evaluator.evaluate()`
"""
num_devices = get_world_size()
logger = logging.getLogger(__name__)
logger.info("Start inference on {} images".format(len(data_loader)))
total = len(data_loader) # inference data loader must have a fixed length
if evaluator is None:
# create a no-op evaluator
evaluator = DatasetEvaluators([])
evaluator.reset()
num_warmup = min(5, total - 1)
start_time = time.perf_counter()
total_compute_time = 0
routing_weights = torch.load("routing_weights.pth")
with inference_context(model), torch.no_grad():
for idx, inputs in enumerate(data_loader):
if idx == num_warmup:
start_time = time.perf_counter()
total_compute_time = 0
start_compute_time = time.perf_counter()
routing_weight = routing_weights[idx].unsqueeze(0)
routing_weight = [routing_weight[:, :8],
routing_weight[:, 8:16],
routing_weight[:, 16:]]
outputs = model(inputs, routing_weight)
if torch.cuda.is_available():
torch.cuda.synchronize()
total_compute_time += time.perf_counter() - start_compute_time
evaluator.process(inputs, outputs)
iters_after_start = idx + 1 - num_warmup * int(idx >= num_warmup)
seconds_per_img = total_compute_time / iters_after_start
if idx >= num_warmup * 2 or seconds_per_img > 5:
total_seconds_per_img = (time.perf_counter() - start_time) / iters_after_start
eta = datetime.timedelta(seconds=int(total_seconds_per_img * (total - idx - 1)))
log_every_n_seconds(
logging.INFO,
"Inference done {}/{}. {:.4f} s / img. ETA={}".format(
idx + 1, total, seconds_per_img, str(eta)
),
n=5,
)
# Measure the time only for this worker (before the synchronization barrier)
total_time = time.perf_counter() - start_time
total_time_str = str(datetime.timedelta(seconds=total_time))
# NOTE this format is parsed by grep
logger.info(
"Total inference time: {} ({:.6f} s / img per device, on {} devices)".format(
total_time_str, total_time / (total - num_warmup), num_devices
)
)
total_compute_time_str = str(datetime.timedelta(seconds=int(total_compute_time)))
logger.info(
"Total inference pure compute time: {} ({:.6f} s / img per device, on {} devices)".format(
total_compute_time_str, total_compute_time / (total - num_warmup), num_devices
)
)
results = evaluator.evaluate()
# An evaluator may return None when not in main process.
# Replace it by an empty dict instead to make it easier for downstream code to handle
if results is None:
results = {}
return results
def do_test(cfg, model):
results = OrderedDict()
for dataset_name in cfg.DATASETS.TEST:
data_loader = build_detection_test_loader(cfg, dataset_name)
evaluator = get_evaluator(
cfg, dataset_name, os.path.join(cfg.OUTPUT_DIR, "inference", dataset_name)
)
results_i = inference_on_dataset(model, data_loader, evaluator)
results[dataset_name] = results_i
if comm.is_main_process():
logger.info("Evaluation results for {} in csv format:".format(dataset_name))
print_csv_format(results_i)
if len(results) == 1:
results = list(results.values())[0]
return results
def do_train(cfg, model, resume=False):
model.train()
model_weights = torch.load(cfg.MODEL.WEIGHTS)
if "model" in model_weights:
model_weights = model_weights["model"]
model.load_state_dict(model_weights, strict=False) # should better set True for once to see if it's loaded right
assert cfg.SOLVER.IMS_PER_BATCH == 1, f"should set batchsize=1"
sampler = torch.utils.data.sampler.SequentialSampler(range(1725))
data_loader = build_detection_train_loader(cfg, sampler=sampler, aspect_ratio_grouping=False)
num_images = 1725
params = []
for m in model.modules():
if isinstance(m, route_func):
print("found")
params = params + list(m.parameters())
optimizer = torch.optim.SGD(params, lr=cfg.SOLVER.BASE_LR,
momentum=cfg.SOLVER.MOMENTUM, weight_decay=0)
logger.info("Starting solving optimized routing weights")
all_routing_weights = []
with EventStorage(start_iter=0) as storage:
for data, iteration in zip(data_loader, range(num_images)):
storage.iter = iteration
print(iteration)
for _ in range(1):
loss_dict, routing_weights = model(data)
losses = sum(loss_dict.values())
assert torch.isfinite(losses).all(), loss_dict
loss_dict_reduced = {k: v.item() for k, v in comm.reduce_dict(loss_dict).items()}
losses_reduced = sum(loss for loss in loss_dict_reduced.values())
if comm.is_main_process():
storage.put_scalars(total_loss=losses_reduced, **loss_dict_reduced)
optimizer.zero_grad()
losses.backward()
# optimizer.step()
print(losses.item())
all_routing_weights.append(routing_weights)
print(routing_weights.shape)
routing_weights = torch.cat(all_routing_weights).cpu()
torch.save(routing_weights, "routing_weights.pth")
return routing_weights
def setup(args):
"""
Create configs and perform basic setups.
"""
register_coco_instances("domain", {}, "domain/annotations.json", "domain")
register_coco_instances("domain_train", {}, "domain/train_annotations.json", "domain")
register_coco_instances("domain_test", {}, "domain/test_annotations.json", "domain")
register_coco_instances("routine_train", {}, "domain/train_routine_5fc766.json", "domain")
register_coco_instances("routine_test", {}, "domain/test_routine_5fc877.json", "domain")
cfg = get_cfg()
assert args.config_file == "", f"This code automatically uses the config file in this directory"
args.config_file = "configs.yaml"
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.freeze()
default_setup(
cfg, args
) # if you don't like any of the default setup, write your own setup code
return cfg
def main(args):
cfg = setup(args)
model = MyNetwork(cfg)
model.to(torch.device(cfg.MODEL.DEVICE))
logger.info("Model:\n{}".format(model))
if args.eval_only:
DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(
cfg.MODEL.WEIGHTS, resume=args.resume
)
return do_test(cfg, model)
distributed = comm.get_world_size() > 1
if distributed:
model = DistributedDataParallel(
model, device_ids=[comm.get_local_rank()], broadcast_buffers=False
)
return do_train(cfg, model, resume=args.resume)
if __name__ == "__main__":
args = default_argument_parser().parse_args()
print("Command Line Args:", args)
launch(
main,
args.num_gpus,
num_machines=args.num_machines,
machine_rank=args.machine_rank,
dist_url=args.dist_url,
args=(args,),
)
| [
"logging.getLogger",
"detectron2.evaluation.print_csv_format",
"detectron2.evaluation.COCOPanopticEvaluator",
"torch.cuda.device_count",
"torch.cuda.synchronize",
"torch.cuda.is_available",
"datetime.timedelta",
"detectron2.utils.events.EventStorage",
"detectron2.evaluation.COCOEvaluator",
"detect... | [((2282, 2313), 'logging.getLogger', 'logging.getLogger', (['"""detectron2"""'], {}), "('detectron2')\n", (2299, 2313), False, 'import logging\n'), ((4348, 4381), 'detectron2.evaluation.DatasetEvaluators', 'DatasetEvaluators', (['evaluator_list'], {}), '(evaluator_list)\n', (4365, 4381), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((5342, 5358), 'detectron2.utils.comm.get_world_size', 'get_world_size', ([], {}), '()\n', (5356, 5358), False, 'from detectron2.utils.comm import get_world_size\n'), ((5372, 5399), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (5389, 5399), False, 'import logging\n'), ((5731, 5750), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (5748, 5750), False, 'import time\n'), ((5800, 5833), 'torch.load', 'torch.load', (['"""routing_weights.pth"""'], {}), "('routing_weights.pth')\n", (5810, 5833), False, 'import torch\n'), ((8321, 8334), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (8332, 8334), False, 'from collections import OrderedDict\n'), ((9026, 9055), 'torch.load', 'torch.load', (['cfg.MODEL.WEIGHTS'], {}), '(cfg.MODEL.WEIGHTS)\n', (9036, 9055), False, 'import torch\n'), ((9411, 9490), 'detectron2.data.build_detection_train_loader', 'build_detection_train_loader', (['cfg'], {'sampler': 'sampler', 'aspect_ratio_grouping': '(False)'}), '(cfg, sampler=sampler, aspect_ratio_grouping=False)\n', (9439, 9490), False, 'from detectron2.data import MetadataCatalog, build_detection_test_loader, build_detection_train_loader\n'), ((9692, 9788), 'torch.optim.SGD', 'torch.optim.SGD', (['params'], {'lr': 'cfg.SOLVER.BASE_LR', 'momentum': 'cfg.SOLVER.MOMENTUM', 'weight_decay': '(0)'}), '(params, lr=cfg.SOLVER.BASE_LR, momentum=cfg.SOLVER.MOMENTUM,\n weight_decay=0)\n', (9707, 9788), False, 'import torch\n'), ((10914, 10964), 'torch.save', 'torch.save', (['routing_weights', '"""routing_weights.pth"""'], {}), "(routing_weights, 'routing_weights.pth')\n", (10924, 10964), False, 'import torch\n'), ((11076, 11150), 'detectron2.data.datasets.register_coco_instances', 'register_coco_instances', (['"""domain"""', '{}', '"""domain/annotations.json"""', '"""domain"""'], {}), "('domain', {}, 'domain/annotations.json', 'domain')\n", (11099, 11150), False, 'from detectron2.data.datasets import register_coco_instances\n'), ((11155, 11245), 'detectron2.data.datasets.register_coco_instances', 'register_coco_instances', (['"""domain_train"""', '{}', '"""domain/train_annotations.json"""', '"""domain"""'], {}), "('domain_train', {}, 'domain/train_annotations.json',\n 'domain')\n", (11178, 11245), False, 'from detectron2.data.datasets import register_coco_instances\n'), ((11246, 11334), 'detectron2.data.datasets.register_coco_instances', 'register_coco_instances', (['"""domain_test"""', '{}', '"""domain/test_annotations.json"""', '"""domain"""'], {}), "('domain_test', {}, 'domain/test_annotations.json',\n 'domain')\n", (11269, 11334), False, 'from detectron2.data.datasets import register_coco_instances\n'), ((11335, 11429), 'detectron2.data.datasets.register_coco_instances', 'register_coco_instances', (['"""routine_train"""', '{}', '"""domain/train_routine_5fc766.json"""', '"""domain"""'], {}), "('routine_train', {},\n 'domain/train_routine_5fc766.json', 'domain')\n", (11358, 11429), False, 'from detectron2.data.datasets import register_coco_instances\n'), ((11430, 11522), 'detectron2.data.datasets.register_coco_instances', 'register_coco_instances', (['"""routine_test"""', '{}', '"""domain/test_routine_5fc877.json"""', '"""domain"""'], {}), "('routine_test', {},\n 'domain/test_routine_5fc877.json', 'domain')\n", (11453, 11522), False, 'from detectron2.data.datasets import register_coco_instances\n'), ((11529, 11538), 'detectron2.config.get_cfg', 'get_cfg', ([], {}), '()\n', (11536, 11538), False, 'from detectron2.config import get_cfg\n'), ((11776, 11800), 'detectron2.engine.default_setup', 'default_setup', (['cfg', 'args'], {}), '(cfg, args)\n', (11789, 11800), False, 'from detectron2.engine import default_argument_parser, default_setup, launch\n'), ((11956, 11970), 'network.MyNetwork', 'MyNetwork', (['cfg'], {}), '(cfg)\n', (11965, 11970), False, 'from network import MyNetwork\n'), ((12625, 12759), 'detectron2.engine.launch', 'launch', (['main', 'args.num_gpus'], {'num_machines': 'args.num_machines', 'machine_rank': 'args.machine_rank', 'dist_url': 'args.dist_url', 'args': '(args,)'}), '(main, args.num_gpus, num_machines=args.num_machines, machine_rank=\n args.machine_rank, dist_url=args.dist_url, args=(args,))\n', (12631, 12759), False, 'from detectron2.engine import default_argument_parser, default_setup, launch\n'), ((2728, 2769), 'os.path.join', 'os.path.join', (['cfg.OUTPUT_DIR', '"""inference"""'], {}), "(cfg.OUTPUT_DIR, 'inference')\n", (2740, 2769), False, 'import os\n'), ((2815, 2848), 'detectron2.data.MetadataCatalog.get', 'MetadataCatalog.get', (['dataset_name'], {}), '(dataset_name)\n', (2834, 2848), False, 'from detectron2.data import MetadataCatalog, build_detection_test_loader, build_detection_train_loader\n'), ((3598, 3639), 'detectron2.evaluation.CityscapesInstanceEvaluator', 'CityscapesInstanceEvaluator', (['dataset_name'], {}), '(dataset_name)\n', (3625, 3639), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((3855, 3894), 'detectron2.evaluation.CityscapesSemSegEvaluator', 'CityscapesSemSegEvaluator', (['dataset_name'], {}), '(dataset_name)\n', (3880, 3894), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((3949, 3990), 'detectron2.evaluation.PascalVOCDetectionEvaluator', 'PascalVOCDetectionEvaluator', (['dataset_name'], {}), '(dataset_name)\n', (3976, 3990), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((4039, 4092), 'detectron2.evaluation.LVISEvaluator', 'LVISEvaluator', (['dataset_name', 'cfg', '(True)', 'output_folder'], {}), '(dataset_name, cfg, True, output_folder)\n', (4052, 4092), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((5634, 5655), 'detectron2.evaluation.DatasetEvaluators', 'DatasetEvaluators', (['[]'], {}), '([])\n', (5651, 5655), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((5843, 5867), 'detectron2.evaluation.inference_context', 'inference_context', (['model'], {}), '(model)\n', (5860, 5867), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((5869, 5884), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5882, 5884), False, 'import torch\n'), ((7387, 7406), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (7404, 7406), False, 'import time\n'), ((7445, 7483), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'total_time'}), '(seconds=total_time)\n', (7463, 7483), False, 'import datetime\n'), ((8400, 8446), 'detectron2.data.build_detection_test_loader', 'build_detection_test_loader', (['cfg', 'dataset_name'], {}), '(cfg, dataset_name)\n', (8427, 8446), False, 'from detectron2.data import MetadataCatalog, build_detection_test_loader, build_detection_train_loader\n'), ((8704, 8726), 'detectron2.utils.comm.is_main_process', 'comm.is_main_process', ([], {}), '()\n', (8724, 8726), True, 'import detectron2.utils.comm as comm\n'), ((9919, 9945), 'detectron2.utils.events.EventStorage', 'EventStorage', ([], {'start_iter': '(0)'}), '(start_iter=0)\n', (9931, 9945), False, 'from detectron2.utils.events import CommonMetricPrinter, EventStorage, JSONWriter, TensorboardXWriter\n'), ((11984, 12014), 'torch.device', 'torch.device', (['cfg.MODEL.DEVICE'], {}), '(cfg.MODEL.DEVICE)\n', (11996, 12014), False, 'import torch\n'), ((12275, 12296), 'detectron2.utils.comm.get_world_size', 'comm.get_world_size', ([], {}), '()\n', (12294, 12296), True, 'import detectron2.utils.comm as comm\n'), ((2966, 3039), 'detectron2.evaluation.SemSegEvaluator', 'SemSegEvaluator', (['dataset_name'], {'distributed': '(True)', 'output_dir': 'output_folder'}), '(dataset_name, distributed=True, output_dir=output_folder)\n', (2981, 3039), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((3199, 3252), 'detectron2.evaluation.COCOEvaluator', 'COCOEvaluator', (['dataset_name'], {'output_dir': 'output_folder'}), '(dataset_name, output_dir=output_folder)\n', (3212, 3252), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((3330, 3380), 'detectron2.evaluation.COCOPanopticEvaluator', 'COCOPanopticEvaluator', (['dataset_name', 'output_folder'], {}), '(dataset_name, output_folder)\n', (3351, 3380), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((3459, 3484), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (3482, 3484), False, 'import torch\n'), ((3488, 3503), 'detectron2.utils.comm.get_rank', 'comm.get_rank', ([], {}), '()\n', (3501, 3503), True, 'import detectron2.utils.comm as comm\n'), ((3716, 3741), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (3739, 3741), False, 'import torch\n'), ((3745, 3760), 'detectron2.utils.comm.get_rank', 'comm.get_rank', ([], {}), '()\n', (3758, 3760), True, 'import detectron2.utils.comm as comm\n'), ((6093, 6112), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6110, 6112), False, 'import time\n'), ((6419, 6444), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6442, 6444), False, 'import torch\n'), ((8513, 8568), 'os.path.join', 'os.path.join', (['cfg.OUTPUT_DIR', '"""inference"""', 'dataset_name'], {}), "(cfg.OUTPUT_DIR, 'inference', dataset_name)\n", (8525, 8568), False, 'import os\n'), ((8829, 8856), 'detectron2.evaluation.print_csv_format', 'print_csv_format', (['results_i'], {}), '(results_i)\n', (8845, 8856), False, 'from detectron2.evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator, COCOEvaluator, COCOPanopticEvaluator, DatasetEvaluators, LVISEvaluator, PascalVOCDetectionEvaluator, SemSegEvaluator, print_csv_format, inference_context\n'), ((10873, 10903), 'torch.cat', 'torch.cat', (['all_routing_weights'], {}), '(all_routing_weights)\n', (10882, 10903), False, 'import torch\n'), ((12544, 12569), 'detectron2.engine.default_argument_parser', 'default_argument_parser', ([], {}), '()\n', (12567, 12569), False, 'from detectron2.engine import default_argument_parser, default_setup, launch\n'), ((6000, 6019), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6017, 6019), False, 'import time\n'), ((6462, 6486), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (6484, 6486), False, 'import torch\n'), ((6521, 6540), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6538, 6540), False, 'import time\n'), ((10492, 10514), 'detectron2.utils.comm.is_main_process', 'comm.is_main_process', ([], {}), '()\n', (10512, 10514), True, 'import detectron2.utils.comm as comm\n'), ((12091, 12144), 'detectron2.checkpoint.DetectionCheckpointer', 'DetectionCheckpointer', (['model'], {'save_dir': 'cfg.OUTPUT_DIR'}), '(model, save_dir=cfg.OUTPUT_DIR)\n', (12112, 12144), False, 'from detectron2.checkpoint import DetectionCheckpointer, PeriodicCheckpointer\n'), ((12393, 12414), 'detectron2.utils.comm.get_local_rank', 'comm.get_local_rank', ([], {}), '()\n', (12412, 12414), True, 'import detectron2.utils.comm as comm\n'), ((6859, 6878), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (6876, 6878), False, 'import time\n'), ((10252, 10274), 'torch.isfinite', 'torch.isfinite', (['losses'], {}), '(losses)\n', (10266, 10274), False, 'import torch\n'), ((10354, 10381), 'detectron2.utils.comm.reduce_dict', 'comm.reduce_dict', (['loss_dict'], {}), '(loss_dict)\n', (10370, 10381), True, 'import detectron2.utils.comm as comm\n')] |
"""
Author: <NAME>, Rice ECE (nkg2 at rice.edu)
Code for converting ocean drifter data from Schaub's format to ours.
"""
import h5py
from trajectory_analysis.synthetic_data_gen import *
dataset_folder = 'buoy'
f = h5py.File('dataBuoys.jld2', 'r')
print(f.keys())
### Load arrays from file
## Graph
# elist (edge list)
edge_list = f['elist'][:] - 1 # 1-index -> 0-index
# tlist (triangle list)
face_list = f['tlist'][:] - 1
# NodeToHex (map node id <-> hex coords) # nodes are 1-indexed in data source
node_hex_map = [tuple(f[x][()]) for x in f['NodeToHex'][:]]
hex_node_map = {tuple(hex_coords): node for node, hex_coords in enumerate(node_hex_map)}
## trajectories
# coords
hex_coords = np.array([tuple(x) for x in f['HexcentersXY'][()]])
# nodes
traj_nodes = [[f[x][()] - 1 for x in f[ref][()]] for ref in f['TrajectoriesNodes'][:]]
#### Convert to SCoNe dataset
# generate graph + faces
G = nx.Graph()
G.add_edges_from([(edge_list[0][i], edge_list[1][i]) for i in range(len(edge_list[0]))])
V, E = np.array(sorted(G.nodes)), np.array([sorted(x) for x in sorted(G.edges)])
faces = np.array(sorted([[face_list[j][i] for j in range(3)] for i in range(len(face_list[0]))]))
edge_to_idx = {tuple(e): i for i, e in enumerate(E)}
coords = hex_coords
valid_idxs = np.arange(len(coords))
# B1, B2
B1, B2 = incidence_matrices(G, V, E, faces, edge_to_idx)
# Trajectories
G_undir = G.to_undirected()
stripped_paths = strip_paths(traj_nodes)
paths = [path[-10:] for path in stripped_paths if len(path) >= 5]
# Print graph info
print(np.mean([len(G[i]) for i in V]))
print('# nodes: {}, # edges: {}, # faces: {}'.format(*B1.shape, B2.shape[1]))
print('# paths: {}, # paths with prefix length >= 3: {}'.format(len(traj_nodes), len(paths)))
rev_paths = [path[::-1] for path in paths]
# Save graph image to file
color_faces(G, V, coords, faces_from_B2(B2, E), filename='madagascar_graph_faces_paths.pdf', paths=[paths[1], paths[48], paths[125]])
# train / test masks
np.random.seed(1)
train_mask = np.asarray([1] * round(len(paths) * 0.8) + [0] * round(len(paths) * 0.2))
np.random.shuffle(train_mask)
test_mask = 1 - train_mask
max_degree = np.max([deg for n, deg in G_undir.degree()])
## Consolidate dataset
# forward
prefix_flows_1hop, targets_1hop, last_nodes_1hop, suffixes_1hop, \
prefix_flows_2hop, targets_2hop, last_nodes_2hop, suffixes_2hop = path_dataset(G_undir, E, edge_to_idx,
paths, max_degree, include_2hop=True,
truncate_paths=False)
# reversed
rev_prefix_flows_1hop, rev_targets_1hop, rev_last_nodes_1hop, rev_suffixes_1hop, \
rev_prefix_flows_2hop, rev_targets_2hop, rev_last_nodes_2hop, rev_suffixes_2hop = path_dataset(G_undir, E, edge_to_idx,
rev_paths, max_degree,
include_2hop=True,
truncate_paths=False)
dataset_1hop = [prefix_flows_1hop, B1, B2, targets_1hop, train_mask, test_mask, G_undir, last_nodes_1hop,
suffixes_1hop, rev_prefix_flows_1hop, rev_targets_1hop, rev_last_nodes_1hop, rev_suffixes_1hop]
dataset_2hop = [prefix_flows_2hop, B1, B2, targets_2hop, train_mask, test_mask, G_undir, last_nodes_2hop,
suffixes_2hop, rev_prefix_flows_2hop, rev_targets_2hop, rev_last_nodes_2hop, rev_suffixes_2hop]
print('Train samples:', sum(train_mask))
print('Test samples:', sum(test_mask))
### Save datasets
folder_1hop = '../trajectory_analysis/trajectory_data_1hop_' + dataset_folder
folder_2hop = '../trajectory_analysis/trajectory_data_2hop_' + dataset_folder
try:
os.mkdir(folder_1hop)
except:
pass
try:
os.mkdir(folder_2hop)
except:
pass
# Save files
filenames = (
'flows_in', 'B1', 'B2', 'targets', 'train_mask', 'test_mask', 'G_undir', 'last_nodes', 'target_nodes', 'rev_flows_in',
'rev_targets', 'rev_last_nodes', 'rev_target_nodes')
for arr_1hop, arr_2hop, filename in zip(dataset_1hop, dataset_2hop, filenames):
if filename == 'G_undir':
nx.readwrite.gpickle.write_gpickle(G_undir, os.path.join(folder_1hop, filename + '.pkl'))
nx.readwrite.gpickle.write_gpickle(G_undir, os.path.join(folder_2hop, filename + '.pkl'))
else:
np.save(os.path.join(folder_1hop, filename + '.npy'), arr_1hop)
np.save(os.path.join(folder_2hop, filename + '.npy'), arr_2hop)
# Save prefixes file
edge_set = set()
for path in paths:
for i in range(1, len(path)):
edge = tuple(sorted(path[i-1:i+1]))
edge_set.add(edge)
np.save(folder_1hop + '/prefixes.npy', [path[:-2] for path in paths])
| [
"h5py.File"
] | [((218, 250), 'h5py.File', 'h5py.File', (['"""dataBuoys.jld2"""', '"""r"""'], {}), "('dataBuoys.jld2', 'r')\n", (227, 250), False, 'import h5py\n')] |
import os
import csv
pollresults = os.path.join(".","raw_data","election_data_1.csv")
output = os.path.join(".","results.txt")
with open(pollresults, newline = '') as polldata:
pollreader = csv.reader(polldata, delimiter = ",")
firstline = polldata.readline()
votes = 0
poll_results = {}
for row in pollreader:
votes = votes + 1
if row[2] in poll_results.keys():
poll_results[row[2]] = poll_results[row[2]] + 1
else:
poll_results[row[2]] = 1
vote_results = []
for i, k in poll_results.items():
vote_results.append((i, k, (round((float(k/votes)*100),2))))
max_votes = 0
winner = ''
for j in vote_results:
if j[1] >= max_votes:
max_votes = j[1]
winner = j[0]
with open(output,'w') as resultsfile:
resultsfile.write("Election Results\n")
resultsfile.write("----------------\n")
for result in vote_results:
resultsfile.writelines(result[0] + ": " + str(result[2]) + "% (" + str(result[1]) + ")\n")
resultsfile.write("----------------\n")
resultsfile.write("Winner: " + winner)
with open(output, 'r') as readresults:
print(readresults.read()) | [
"os.path.join",
"csv.reader"
] | [((36, 88), 'os.path.join', 'os.path.join', (['"""."""', '"""raw_data"""', '"""election_data_1.csv"""'], {}), "('.', 'raw_data', 'election_data_1.csv')\n", (48, 88), False, 'import os\n'), ((96, 128), 'os.path.join', 'os.path.join', (['"""."""', '"""results.txt"""'], {}), "('.', 'results.txt')\n", (108, 128), False, 'import os\n'), ((197, 232), 'csv.reader', 'csv.reader', (['polldata'], {'delimiter': '""","""'}), "(polldata, delimiter=',')\n", (207, 232), False, 'import csv\n')] |
# !/usr/bin/env python
#
# dates.py
"""
Utilities for working with dates and times.
.. extras-require:: dates
:pyproject:
**Data:**
.. autosummary::
~domdf_python_tools.dates.months
~domdf_python_tools.dates.month_full_names
~domdf_python_tools.dates.month_short_names
"""
#
# Copyright © 2020 <NAME> <<EMAIL>>
#
# Parts of the docstrings based on the Python 3.8.2 Documentation
# Licensed under the Python Software Foundation License Version 2.
# Copyright © 2001-2020 Python Software Foundation. All rights reserved.
# Copyright © 2000 BeOpen.com. All rights reserved.
# Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved.
# Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
# calc_easter from https://code.activestate.com/recipes/576517-calculate-easter-western-given-a-year/
# Copyright © 2008 <NAME>
# Licensed under the MIT License
#
# stdlib
import datetime
import sys
import typing
from collections import OrderedDict
from types import ModuleType
from typing import Optional, Union
__all__ = [
"current_tzinfo",
"set_timezone",
"utc_timestamp_to_datetime",
"months",
"parse_month",
"get_month_number",
"check_date",
"calc_easter",
"month_short_names",
"month_full_names",
]
def current_tzinfo() -> Optional[datetime.tzinfo]:
"""
Returns a tzinfo object for the current timezone.
"""
return datetime.datetime.now().astimezone().tzinfo # pragma: no cover (hard to test)
#
# def datetime_to_utc_timestamp(datetime, current_tzinfo=None):
# """
# Convert a :class:`datetime.datetime` object to seconds since UNIX epoch, in UTC time
#
# :param datetime:
# :type datetime: :class:`datetime.datetime`
# :param current_tzinfo: A tzinfo object representing the current timezone.
# If None it will be inferred.
# :type current_tzinfo: :class:`datetime.tzinfo`
#
# :return: Timestamp in UTC timezone
# :rtype: float
# """
#
# return datetime.astimezone(current_tzinfo).timestamp()
#
def set_timezone(obj: datetime.datetime, tzinfo: datetime.tzinfo) -> datetime.datetime:
"""
Sets the timezone / tzinfo of the given :class:`datetime.datetime` object.
This will not convert the time (i.e. the hours will stay the same).
Use :meth:`datetime.datetime.astimezone` to accomplish that.
:param obj:
:param tzinfo:
"""
return obj.replace(tzinfo=tzinfo)
def utc_timestamp_to_datetime(
utc_timestamp: Union[float, int],
output_tz: Optional[datetime.tzinfo] = None,
) -> datetime.datetime:
"""
Convert UTC timestamp (seconds from UNIX epoch) to a :class:`datetime.datetime` object.
If ``output_tz`` is :py:obj:`None` the timestamp is converted to the platform’s local date and time,
and the local timezone is inferred and set for the object.
If ``output_tz`` is not :py:obj:`None`, it must be an instance of a :class:`datetime.tzinfo` subclass,
and the timestamp is converted to ``output_tz``’s time zone.
:param utc_timestamp: The timestamp to convert to a datetime object
:param output_tz: The timezone to output the datetime object for.
If :py:obj:`None` it will be inferred.
:return: The timestamp as a datetime object.
:raises OverflowError: if the timestamp is out of the range
of values supported by the platform C localtime() or gmtime() functions,
and OSError on localtime() or gmtime() failure. It’s common for this to
be restricted to years in 1970 through 2038.
"""
new_datetime = datetime.datetime.fromtimestamp(utc_timestamp, output_tz)
return new_datetime.astimezone(output_tz)
if sys.version_info <= (3, 7, 2): # pragma: no cover (py37+)
MonthsType = OrderedDict
else: # pragma: no cover (<py37)
MonthsType = typing.OrderedDict[str, str] # type: ignore # noqa: TYP006
#: Mapping of 3-character shortcodes to full month names.
months: MonthsType = OrderedDict(
Jan="January",
Feb="February",
Mar="March",
Apr="April",
May="May",
Jun="June",
Jul="July",
Aug="August",
Sep="September",
Oct="October",
Nov="November",
Dec="December",
)
month_short_names = tuple(months.keys())
"""
List of the short names for months in the Gregorian calendar.
.. versionadded:: 2.0.0
"""
month_full_names = tuple(months.values())
"""
List of the full names for months in the Gregorian calendar.
.. versionadded:: 2.0.0
"""
def parse_month(month: Union[str, int]) -> str:
"""
Converts an integer or shorthand month into the full month name.
:param month: The month number or shorthand name
:return: The full name of the month
"""
error_text = f"The given month ({month!r}) is not recognised."
try:
month = int(month)
except ValueError:
try:
return months[month.capitalize()[:3]] # type: ignore
except KeyError:
raise ValueError(error_text)
# Only get here if first try succeeded
if 0 < month <= 12:
return list(months.values())[month - 1]
else:
raise ValueError(error_text)
def get_month_number(month: Union[str, int]) -> int:
"""
Returns the number of the given month.
If ``month`` is already a number between 1 and 12 it will be returned immediately.
:param month: The month to convert to a number
:return: The number of the month
"""
if isinstance(month, int):
if 0 < month <= 12:
return month
else:
raise ValueError(f"The given month ({month!r}) is not recognised.")
else:
month = parse_month(month)
return list(months.values()).index(month) + 1
def check_date(month: Union[str, int], day: int, leap_year: bool = True) -> bool:
"""
Returns :py:obj:`True` if the day number is valid for the given month.
.. note::
This function will return :py:obj:`True` for the 29th Feb.
If you don't want this behaviour set ``leap_year`` to :py:obj:`False`.
.. latex:vspace:: -10px
:param month: The month to test.
:param day: The day number to test.
:param leap_year: Whether to return :py:obj:`True` for 29th Feb.
"""
# Ensure day is an integer
day = int(day)
month = get_month_number(month)
year = 2020 if leap_year else 2019
try:
datetime.date(year, month, day)
return True
except ValueError:
return False
def calc_easter(year: int) -> datetime.date:
"""
Returns the date of Easter in the given year.
.. versionadded:: 1.4.0
:param year:
"""
a = year % 19
b = year // 100
c = year % 100
d = (19 * a + b - b // 4 - ((b - (b + 8) // 25 + 1) // 3) + 15) % 30
e = (32 + 2 * (b % 4) + 2 * (c // 4) - d - (c % 4)) % 7
f = d + e - 7 * ((a + 11 * d + 22 * e) // 451) + 114
month = f // 31
day = f % 31 + 1
return datetime.date(year, month, day)
def get_utc_offset(
tz: Union[datetime.tzinfo, str],
date: Optional[datetime.datetime] = None,
) -> Optional[datetime.timedelta]:
"""
Returns the offset between UTC and the requested timezone on the given date.
If ``date`` is :py:obj:`None` then the current date is used.
:param tz: ``pytz.timezone`` or a string representing the timezone
:param date: The date to obtain the UTC offset for
"""
if date is None:
date = datetime.datetime.utcnow()
timezone: Optional[datetime.tzinfo]
if isinstance(tz, str):
timezone = get_timezone(tz, date)
else:
timezone = tz # pragma: no cover (hard to test)
return date.replace(tzinfo=pytz.utc).astimezone(timezone).utcoffset()
def get_timezone(tz: str, date: Optional[datetime.datetime] = None) -> Optional[datetime.tzinfo]:
"""
Returns a localized ``pytz.timezone`` object for the given date.
If ``date`` is :py:obj:`None` then the current date is used.
.. latex:vspace:: -10px
:param tz: A string representing a pytz timezone
:param date: The date to obtain the timezone for
"""
if date is None: # pragma: no cover (hard to test)
date = datetime.datetime.utcnow()
d = date.replace(tzinfo=None)
return pytz.timezone(tz).localize(d).tzinfo
_pytz_functions = ["get_utc_offset", "get_timezone"]
try:
# 3rd party
import pytz
__all__.extend(_pytz_functions)
except ImportError as e:
if __name__ == "__main__":
# stdlib
import warnings
# this package
from domdf_python_tools.words import word_join
warnings.warn(
f"""\
'{word_join(_pytz_functions)}' require pytz (https://pypi.org/project/pytz/), but it could not be imported.
The error was: {e}.
"""
)
else:
_actual_module = sys.modules[__name__]
class SelfWrapper(ModuleType):
def __getattr__(self, name):
if name in _pytz_functions:
raise ImportError(
f"{name!r} requires pytz (https://pypi.org/project/pytz/), but it could not be imported."
)
else:
return getattr(_actual_module, name)
sys.modules[__name__] = SelfWrapper(__name__)
| [
"pytz.timezone",
"collections.OrderedDict",
"datetime.datetime.fromtimestamp",
"datetime.datetime.utcnow",
"domdf_python_tools.words.word_join",
"datetime.datetime.now",
"datetime.date"
] | [((4909, 5100), 'collections.OrderedDict', 'OrderedDict', ([], {'Jan': '"""January"""', 'Feb': '"""February"""', 'Mar': '"""March"""', 'Apr': '"""April"""', 'May': '"""May"""', 'Jun': '"""June"""', 'Jul': '"""July"""', 'Aug': '"""August"""', 'Sep': '"""September"""', 'Oct': '"""October"""', 'Nov': '"""November"""', 'Dec': '"""December"""'}), "(Jan='January', Feb='February', Mar='March', Apr='April', May=\n 'May', Jun='June', Jul='July', Aug='August', Sep='September', Oct=\n 'October', Nov='November', Dec='December')\n", (4920, 5100), False, 'from collections import OrderedDict\n'), ((4529, 4586), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['utc_timestamp', 'output_tz'], {}), '(utc_timestamp, output_tz)\n', (4560, 4586), False, 'import datetime\n'), ((7582, 7613), 'datetime.date', 'datetime.date', (['year', 'month', 'day'], {}), '(year, month, day)\n', (7595, 7613), False, 'import datetime\n'), ((7082, 7113), 'datetime.date', 'datetime.date', (['year', 'month', 'day'], {}), '(year, month, day)\n', (7095, 7113), False, 'import datetime\n'), ((8051, 8077), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (8075, 8077), False, 'import datetime\n'), ((8737, 8763), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (8761, 8763), False, 'import datetime\n'), ((2488, 2511), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2509, 2511), False, 'import datetime\n'), ((8805, 8822), 'pytz.timezone', 'pytz.timezone', (['tz'], {}), '(tz)\n', (8818, 8822), False, 'import pytz\n'), ((9153, 9179), 'domdf_python_tools.words.word_join', 'word_join', (['_pytz_functions'], {}), '(_pytz_functions)\n', (9162, 9179), False, 'from domdf_python_tools.words import word_join\n')] |
'''
Copyright 2013 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
.. codeauthor:: Radu Viorel Cosnita <<EMAIL>>
.. py:module:fantastico.mvc.models.tests.test_model_filter_compound
'''
from fantastico.exceptions import FantasticoNotSupportedError, FantasticoError
from fantastico.mvc.models.model_filter import ModelFilter
from fantastico.mvc.models.model_filter_compound import ModelFilterAnd, \
ModelFilterOr
from fantastico.tests.base_case import FantasticoUnitTestsCase
from mock import Mock
from sqlalchemy.schema import Column
from sqlalchemy.types import Integer
class ModelFilterOrTests(FantasticoUnitTestsCase):
'''This class provides test suite for compound *and* model filter.'''
def init(self):
self._model = Mock()
self._model.id = Column("id", Integer)
def test_modelfilteror_noargs(self):
'''This test case ensures compound **or** filter can not be built without arguments.'''
with self.assertRaises(FantasticoNotSupportedError):
ModelFilterOr()
def test_modelfilteror_notenoughargs(self):
'''This test case ensures compound **or** filter can not be built with a single argument.'''
with self.assertRaises(FantasticoNotSupportedError):
ModelFilterOr(ModelFilter(self._model.id, 1, ModelFilter.EQ))
def test_modelfilteror_wrongargtype(self):
'''This test case ensures compound **or** filter works only with ModelFilter arguments.'''
with self.assertRaises(FantasticoNotSupportedError):
ModelFilterAnd(Mock(), Mock())
def test_modelfilteror_ok(self):
'''This test case ensures compound **or** filter correctly transform the filter into sql alchemy and_ statement.'''
self._or_invoked = False
model_filter = ModelFilterOr(ModelFilter(self._model.id, 1, ModelFilter.EQ),
ModelFilter(self._model.id, 1, ModelFilter.EQ),
ModelFilter(self._model.id, 1, ModelFilter.EQ))
query = Mock()
def filter_fn(expr):
self._or_invoked = True
return Mock()
self._model.id.table = Mock()
query._primary_entity = query
query.selectable = self._model.id.table
query.filter = filter_fn
query_new = model_filter.build(query)
self.assertTrue(self._or_invoked)
self.assertIsInstance(query_new, Mock)
def test_modelfiteror_unhandled_exception(self):
'''This test case ensures unhandled exceptions raised from ModelFilter are gracefully handled by ModelFilterOr build.'''
model_filter = ModelFilter(self._model.id, 1, ModelFilter.EQ)
model_filter.get_expression = Mock(side_effect=Exception("Unhandled exception"))
model_filter_or = ModelFilterOr(model_filter, model_filter, model_filter)
with self.assertRaises(FantasticoError):
model_filter_or.build(Mock()) | [
"mock.Mock",
"sqlalchemy.schema.Column",
"fantastico.mvc.models.model_filter_compound.ModelFilterOr",
"fantastico.mvc.models.model_filter.ModelFilter"
] | [((1737, 1743), 'mock.Mock', 'Mock', ([], {}), '()\n', (1741, 1743), False, 'from mock import Mock\n'), ((1769, 1790), 'sqlalchemy.schema.Column', 'Column', (['"""id"""', 'Integer'], {}), "('id', Integer)\n", (1775, 1790), False, 'from sqlalchemy.schema import Column\n'), ((3092, 3098), 'mock.Mock', 'Mock', ([], {}), '()\n', (3096, 3098), False, 'from mock import Mock\n'), ((3256, 3262), 'mock.Mock', 'Mock', ([], {}), '()\n', (3260, 3262), False, 'from mock import Mock\n'), ((3759, 3805), 'fantastico.mvc.models.model_filter.ModelFilter', 'ModelFilter', (['self._model.id', '(1)', 'ModelFilter.EQ'], {}), '(self._model.id, 1, ModelFilter.EQ)\n', (3770, 3805), False, 'from fantastico.mvc.models.model_filter import ModelFilter\n'), ((3930, 3985), 'fantastico.mvc.models.model_filter_compound.ModelFilterOr', 'ModelFilterOr', (['model_filter', 'model_filter', 'model_filter'], {}), '(model_filter, model_filter, model_filter)\n', (3943, 3985), False, 'from fantastico.mvc.models.model_filter_compound import ModelFilterAnd, ModelFilterOr\n'), ((2015, 2030), 'fantastico.mvc.models.model_filter_compound.ModelFilterOr', 'ModelFilterOr', ([], {}), '()\n', (2028, 2030), False, 'from fantastico.mvc.models.model_filter_compound import ModelFilterAnd, ModelFilterOr\n'), ((2847, 2893), 'fantastico.mvc.models.model_filter.ModelFilter', 'ModelFilter', (['self._model.id', '(1)', 'ModelFilter.EQ'], {}), '(self._model.id, 1, ModelFilter.EQ)\n', (2858, 2893), False, 'from fantastico.mvc.models.model_filter import ModelFilter\n'), ((2933, 2979), 'fantastico.mvc.models.model_filter.ModelFilter', 'ModelFilter', (['self._model.id', '(1)', 'ModelFilter.EQ'], {}), '(self._model.id, 1, ModelFilter.EQ)\n', (2944, 2979), False, 'from fantastico.mvc.models.model_filter import ModelFilter\n'), ((3019, 3065), 'fantastico.mvc.models.model_filter.ModelFilter', 'ModelFilter', (['self._model.id', '(1)', 'ModelFilter.EQ'], {}), '(self._model.id, 1, ModelFilter.EQ)\n', (3030, 3065), False, 'from fantastico.mvc.models.model_filter import ModelFilter\n'), ((3205, 3211), 'mock.Mock', 'Mock', ([], {}), '()\n', (3209, 3211), False, 'from mock import Mock\n'), ((2289, 2335), 'fantastico.mvc.models.model_filter.ModelFilter', 'ModelFilter', (['self._model.id', '(1)', 'ModelFilter.EQ'], {}), '(self._model.id, 1, ModelFilter.EQ)\n', (2300, 2335), False, 'from fantastico.mvc.models.model_filter import ModelFilter\n'), ((2581, 2587), 'mock.Mock', 'Mock', ([], {}), '()\n', (2585, 2587), False, 'from mock import Mock\n'), ((2589, 2595), 'mock.Mock', 'Mock', ([], {}), '()\n', (2593, 2595), False, 'from mock import Mock\n'), ((4078, 4084), 'mock.Mock', 'Mock', ([], {}), '()\n', (4082, 4084), False, 'from mock import Mock\n')] |
import json
import os
class Notifyer():
def __init__(self):
self.subscribers = []
def add_subscriber(self, subscriber):
self.subscribers.append(subscriber)
def notify(self, triggerId):
for sub in self.subscribers:
sub.notify(triggerId)
global_characters = []
global_items = []
global_notifyer = Notifyer()
class State():
def __init__(self, id, desc, entry_condition, reactive_items):
self.id = id
self.desc = desc
self.neighbours = []
self.entry_condition = entry_condition
self.reactive_items = reactive_items
def add_neighbour(self, neighbour):
self.neighbours.append(neighbour)
class StateMachine():
def __init__(self, state_dto):
self.states = [State(s["id"], "\n".join(s["lines"]), s["entry_condition"], s["reactive_items"]) for s in state_dto]
for dto, state in zip(state_dto, self.states):
for n in dto["neighbours"]:
state.add_neighbour(next(x for x in self.states if x.id == n))
self.current_state = self.states[0]
global_notifyer.add_subscriber(self)
def get_desc(self):
return self.current_state.desc
def trigger(self, item_id):
trigger_id = self.current_state.reactive_items[str(item_id)]
global_notifyer.notify(trigger_id)
def notify(self, trigger_id):
new_state = next(x for x in self.current_state.neighbours if x.entry_condition == trigger_id)
if new_state:
self.current_state = new_state
class Item():
def __init__(self, id, item_name, desc):
self.id = id
self.name = item_name
self.desc = desc
class Portal():
def __init__(self, name, room_id, trigger_id, open):
self.name = name
self.room_id = room_id
self.trigger_id = trigger_id
self.open = open
global_notifyer.add_subscriber(self)
def notify(self, trigger_id):
if trigger_id == self.trigger_id:
self.open = True
class Character():
def __init__(self, id, name, desc, states):
self.id = id
self.name = name
self.desc = desc
self.state_machine = StateMachine(states)
def look(self):
return self.desc
def talk(self):
return self.state_machine.get_desc()
def use(self, player, item_name):
item_id = next((x.id for x in global_items if x.name == item_name and x.id in player.inventory), None)
if item_id != None:
self.state_machine.trigger(item_id)
class Room():
def __init__(self, id, entryText, desc, neighbours, items_ids, characters_ids):
self.id = id
self.entryText = entryText
self.desc = desc
self.neighbours = [Portal(x["name"], x["roomId"], x["triggerId"], x["open"]) for x in neighbours]
self.items_ids = items_ids
self.characters_ids = characters_ids
def enter_room(self):
return self.entryText
def get_character(self, name):
character = next((x for x in global_characters if x.name == name), None)
if character and character.id in self.characters_ids:
return character
else:
raise Exception("Character doesn't exists in this room")
def take_item(self, item_name):
item_id = next((x.id for x in global_items if x.name == item_name), None)
if item_id in self.items_ids:
self.items_ids.remove(item_id)
return True
else:
print("You can't find a {} in this room".format(item_name))
return False
def enter(self, portal_name):
portal = next(x for x in self.neighbours if x.name == portal_name)
if portal.open:
return portal.room_id
class Player():
def __init__(self):
self.inventory = []
def add_to_inventory(self, item_name):
item_id = next((x.id for x in global_items if x.name == item_name), None)
if item_id != None:
self.inventory.append(item_id)
print("You have aquired a {}".format(item_name))
def remove_from_inventory(self, item_name):
item_id = next((x.id for x in global_items if x.name == item_name), None)
if item_id:
self.inventory.remove(item_id)
def main():
with open("game.json") as layout_file:
layout = json.load(layout_file)
print(layout["title"])
print("Press any key to start")
input()
rooms = []
for roomDto in layout["rooms"]:
r = Room(roomDto["id"],
roomDto["entryText"],
roomDto["description"],
roomDto["neighbours"],
roomDto["items"],
roomDto["characters"])
rooms.append(r)
room = rooms[0]
characters = []
for c in layout["characters"]:
character = Character(c["id"], c["name"], c["description"], c["states"])
characters.append(character)
global_characters.extend(characters)
active_characters = [x for x in characters if x.id in room.characters_ids]
entering = True
player = Player()
global_items.extend([Item(x["id"], x["name"], x["description"]) for x in layout["items"]])
os.system('cls')
while(True):
if entering:
entering = False
print(room.enter_room())
inpt = input()
verbs = ["look", "talk", "take", "use", "go"]
comps = [x.lower() for x in inpt.split()]
if comps[0] in verbs:
if comps[0] == "look":
if len(comps) == 1:
print(room.desc)
elif len(comps) == 2:
char_name = comps[1]
char = next((x for x in active_characters if x.name == char_name), None)
if not char:
print("Character {} doesn't exist".format(char_name))
else:
print(char.look())
elif comps[0] == "talk":
char_name = comps[1]
char = next((x for x in active_characters if x.name == char_name), None)
if not char:
print("Character {} doesn't exist".format(char_name))
else:
print(char.talk())
elif comps[0] == "take":
item_name = comps[1]
if room.take_item(item_name):
player.add_to_inventory(item_name)
elif comps[0] == "use":
item_name = comps[1]
target = comps[2]
character = room.get_character(target)
character.use(player, item_name)
elif comps[0] == "go":
portal_name = comps[1]
room_id = room.enter(portal_name)
if room_id != None:
r = next(x for x in rooms if x.id == room_id)
if r:
room = r
entering = True
else:
print("I don't understand")
else:
print("I don't understand")
if __name__ == "__main__":
main() | [
"json.load",
"os.system"
] | [((5241, 5257), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (5250, 5257), False, 'import os\n'), ((4385, 4407), 'json.load', 'json.load', (['layout_file'], {}), '(layout_file)\n', (4394, 4407), False, 'import json\n')] |
"""
Added DAG master table and DAG permissions table
Revision ID: f3bee20314a2
Revises: <KEY>
Create Date: 2021-12-14 14:41:16.096297
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "f3bee20314a2"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"deployed_dags",
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.String(length=128), nullable=False),
sa.Column("description", sa.TEXT(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"permission_dag",
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("dag_id", sa.String(length=128), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["dag_id"],
["deployed_dags.id"],
),
sa.ForeignKeyConstraint(
["user_id"],
["users.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("dag_id", "user_id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("permission_dag")
op.drop_table("deployed_dags")
# ### end Alembic commands ###
| [
"sqlalchemy.ForeignKeyConstraint",
"sqlalchemy.DateTime",
"alembic.op.drop_table",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.TEXT",
"sqlalchemy.Integer",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.String"
] | [((1666, 1697), 'alembic.op.drop_table', 'op.drop_table', (['"""permission_dag"""'], {}), "('permission_dag')\n", (1679, 1697), False, 'from alembic import op\n'), ((1702, 1732), 'alembic.op.drop_table', 'op.drop_table', (['"""deployed_dags"""'], {}), "('deployed_dags')\n", (1715, 1732), False, 'from alembic import op\n'), ((771, 800), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (794, 800), True, 'import sqlalchemy as sa\n'), ((1258, 1315), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['dag_id']", "['deployed_dags.id']"], {}), "(['dag_id'], ['deployed_dags.id'])\n", (1281, 1315), True, 'import sqlalchemy as sa\n'), ((1360, 1410), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['user_id']", "['users.id']"], {}), "(['user_id'], ['users.id'])\n", (1383, 1410), True, 'import sqlalchemy as sa\n'), ((1455, 1484), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1478, 1484), True, 'import sqlalchemy as sa\n'), ((1494, 1534), 'sqlalchemy.UniqueConstraint', 'sa.UniqueConstraint', (['"""dag_id"""', '"""user_id"""'], {}), "('dag_id', 'user_id')\n", (1513, 1534), True, 'import sqlalchemy as sa\n'), ((480, 493), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (491, 493), True, 'import sqlalchemy as sa\n'), ((544, 557), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (555, 557), True, 'import sqlalchemy as sa\n'), ((608, 621), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (619, 621), True, 'import sqlalchemy as sa\n'), ((663, 684), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (672, 684), True, 'import sqlalchemy as sa\n'), ((736, 745), 'sqlalchemy.TEXT', 'sa.TEXT', ([], {}), '()\n', (743, 745), True, 'import sqlalchemy as sa\n'), ((888, 901), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (899, 901), True, 'import sqlalchemy as sa\n'), ((952, 965), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (963, 965), True, 'import sqlalchemy as sa\n'), ((1016, 1029), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1027, 1029), True, 'import sqlalchemy as sa\n'), ((1071, 1083), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1081, 1083), True, 'import sqlalchemy as sa\n'), ((1150, 1171), 'sqlalchemy.String', 'sa.String', ([], {'length': '(128)'}), '(length=128)\n', (1159, 1171), True, 'import sqlalchemy as sa\n'), ((1219, 1231), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (1229, 1231), True, 'import sqlalchemy as sa\n')] |
__author__ = "<NAME>, <NAME> and <NAME>"
__version__ = "0.0.1"
__license__ = "BSD"
from autoPyTorch.core.api import AutoNet
class AutoNetFeatureData(AutoNet):
@classmethod
def get_default_pipeline(cls):
from autoPyTorch.pipeline.base.pipeline import Pipeline
from autoPyTorch.pipeline.nodes.autonet_settings import AutoNetSettings
from autoPyTorch.pipeline.nodes.optimization_algorithm import OptimizationAlgorithm
from autoPyTorch.pipeline.nodes.cross_validation import CrossValidation
from autoPyTorch.pipeline.nodes.imputation import Imputation
from autoPyTorch.pipeline.nodes.normalization_strategy_selector import NormalizationStrategySelector
from autoPyTorch.pipeline.nodes.one_hot_encoding import OneHotEncoding
from autoPyTorch.pipeline.nodes.preprocessor_selector import PreprocessorSelector
from autoPyTorch.pipeline.nodes.resampling_strategy_selector import ResamplingStrategySelector
from autoPyTorch.pipeline.nodes.embedding_selector import EmbeddingSelector
from autoPyTorch.pipeline.nodes.network_selector import NetworkSelector
from autoPyTorch.pipeline.nodes.optimizer_selector import OptimizerSelector
from autoPyTorch.pipeline.nodes.lr_scheduler_selector import LearningrateSchedulerSelector
from autoPyTorch.pipeline.nodes.log_functions_selector import LogFunctionsSelector
from autoPyTorch.pipeline.nodes.metric_selector import MetricSelector
from autoPyTorch.pipeline.nodes.loss_module_selector import LossModuleSelector
from autoPyTorch.pipeline.nodes.train_node import TrainNode
# build the pipeline
pipeline = Pipeline([
AutoNetSettings(),
OptimizationAlgorithm([
CrossValidation([
Imputation(),
NormalizationStrategySelector(),
OneHotEncoding(),
PreprocessorSelector(),
ResamplingStrategySelector(),
EmbeddingSelector(),
NetworkSelector(),
OptimizerSelector(),
LearningrateSchedulerSelector(),
LogFunctionsSelector(),
MetricSelector(),
LossModuleSelector(),
TrainNode()
])
])
])
cls._apply_default_pipeline_settings(pipeline)
return pipeline
@staticmethod
def _apply_default_pipeline_settings(pipeline):
from autoPyTorch.pipeline.nodes.normalization_strategy_selector import NormalizationStrategySelector
from autoPyTorch.pipeline.nodes.preprocessor_selector import PreprocessorSelector
from autoPyTorch.pipeline.nodes.embedding_selector import EmbeddingSelector
from autoPyTorch.pipeline.nodes.network_selector import NetworkSelector
from autoPyTorch.pipeline.nodes.optimizer_selector import OptimizerSelector
from autoPyTorch.pipeline.nodes.lr_scheduler_selector import LearningrateSchedulerSelector
from autoPyTorch.pipeline.nodes.train_node import TrainNode
from autoPyTorch.components.networks.feature import MlpNet, ResNet, ShapedMlpNet, ShapedResNet
from autoPyTorch.components.optimizer.optimizer import AdamOptimizer, SgdOptimizer
from autoPyTorch.components.lr_scheduler.lr_schedulers import SchedulerCosineAnnealingWithRestartsLR, SchedulerNone, \
SchedulerCyclicLR, SchedulerExponentialLR, SchedulerReduceLROnPlateau, SchedulerReduceLROnPlateau, SchedulerStepLR
from autoPyTorch.components.networks.feature import LearnedEntityEmbedding
from sklearn.preprocessing import MinMaxScaler, StandardScaler, MaxAbsScaler
from autoPyTorch.components.preprocessing.feature_preprocessing import \
TruncatedSVD, FastICA, RandomKitchenSinks, KernelPCA, Nystroem
from autoPyTorch.training.early_stopping import EarlyStopping
from autoPyTorch.training.mixup import Mixup
pre_selector = pipeline[PreprocessorSelector.get_name()]
pre_selector.add_preprocessor('truncated_svd', TruncatedSVD)
pre_selector.add_preprocessor('fast_ica', FastICA)
pre_selector.add_preprocessor('kitchen_sinks', RandomKitchenSinks)
pre_selector.add_preprocessor('kernel_pca', KernelPCA)
pre_selector.add_preprocessor('nystroem', Nystroem)
norm_selector = pipeline[NormalizationStrategySelector.get_name()]
norm_selector.add_normalization_strategy('minmax', MinMaxScaler)
norm_selector.add_normalization_strategy('standardize', StandardScaler)
norm_selector.add_normalization_strategy('maxabs', MaxAbsScaler)
emb_selector = pipeline[EmbeddingSelector.get_name()]
emb_selector.add_embedding_module('learned', LearnedEntityEmbedding)
net_selector = pipeline[NetworkSelector.get_name()]
net_selector.add_network('mlpnet', MlpNet)
net_selector.add_network('shapedmlpnet', ShapedMlpNet)
net_selector.add_network('resnet', ResNet)
net_selector.add_network('shapedresnet', ShapedResNet)
opt_selector = pipeline[OptimizerSelector.get_name()]
opt_selector.add_optimizer('adam', AdamOptimizer)
opt_selector.add_optimizer('sgd', SgdOptimizer)
lr_selector = pipeline[LearningrateSchedulerSelector.get_name()]
lr_selector.add_lr_scheduler('cosine_annealing', SchedulerCosineAnnealingWithRestartsLR)
lr_selector.add_lr_scheduler('cyclic', SchedulerCyclicLR)
lr_selector.add_lr_scheduler('exponential', SchedulerExponentialLR)
lr_selector.add_lr_scheduler('step', SchedulerStepLR)
lr_selector.add_lr_scheduler('plateau', SchedulerReduceLROnPlateau)
lr_selector.add_lr_scheduler('none', SchedulerNone)
train_node = pipeline[TrainNode.get_name()]
train_node.add_training_technique("early_stopping", EarlyStopping)
train_node.add_batch_loss_computation_technique("mixup", Mixup)
| [
"autoPyTorch.pipeline.nodes.log_functions_selector.LogFunctionsSelector",
"autoPyTorch.pipeline.nodes.network_selector.NetworkSelector",
"autoPyTorch.pipeline.nodes.metric_selector.MetricSelector",
"autoPyTorch.pipeline.nodes.preprocessor_selector.PreprocessorSelector.get_name",
"autoPyTorch.pipeline.nodes.... | [((4122, 4153), 'autoPyTorch.pipeline.nodes.preprocessor_selector.PreprocessorSelector.get_name', 'PreprocessorSelector.get_name', ([], {}), '()\n', (4151, 4153), False, 'from autoPyTorch.pipeline.nodes.preprocessor_selector import PreprocessorSelector\n'), ((4515, 4555), 'autoPyTorch.pipeline.nodes.normalization_strategy_selector.NormalizationStrategySelector.get_name', 'NormalizationStrategySelector.get_name', ([], {}), '()\n', (4553, 4555), False, 'from autoPyTorch.pipeline.nodes.normalization_strategy_selector import NormalizationStrategySelector\n'), ((4818, 4846), 'autoPyTorch.pipeline.nodes.embedding_selector.EmbeddingSelector.get_name', 'EmbeddingSelector.get_name', ([], {}), '()\n', (4844, 4846), False, 'from autoPyTorch.pipeline.nodes.embedding_selector import EmbeddingSelector\n'), ((4958, 4984), 'autoPyTorch.pipeline.nodes.network_selector.NetworkSelector.get_name', 'NetworkSelector.get_name', ([], {}), '()\n', (4982, 4984), False, 'from autoPyTorch.pipeline.nodes.network_selector import NetworkSelector\n'), ((5259, 5287), 'autoPyTorch.pipeline.nodes.optimizer_selector.OptimizerSelector.get_name', 'OptimizerSelector.get_name', ([], {}), '()\n', (5285, 5287), False, 'from autoPyTorch.pipeline.nodes.optimizer_selector import OptimizerSelector\n'), ((5436, 5476), 'autoPyTorch.pipeline.nodes.lr_scheduler_selector.LearningrateSchedulerSelector.get_name', 'LearningrateSchedulerSelector.get_name', ([], {}), '()\n', (5474, 5476), False, 'from autoPyTorch.pipeline.nodes.lr_scheduler_selector import LearningrateSchedulerSelector\n'), ((5994, 6014), 'autoPyTorch.pipeline.nodes.train_node.TrainNode.get_name', 'TrainNode.get_name', ([], {}), '()\n', (6012, 6014), False, 'from autoPyTorch.pipeline.nodes.train_node import TrainNode\n'), ((1732, 1749), 'autoPyTorch.pipeline.nodes.autonet_settings.AutoNetSettings', 'AutoNetSettings', ([], {}), '()\n', (1747, 1749), False, 'from autoPyTorch.pipeline.nodes.autonet_settings import AutoNetSettings\n'), ((1841, 1853), 'autoPyTorch.pipeline.nodes.imputation.Imputation', 'Imputation', ([], {}), '()\n', (1851, 1853), False, 'from autoPyTorch.pipeline.nodes.imputation import Imputation\n'), ((1875, 1906), 'autoPyTorch.pipeline.nodes.normalization_strategy_selector.NormalizationStrategySelector', 'NormalizationStrategySelector', ([], {}), '()\n', (1904, 1906), False, 'from autoPyTorch.pipeline.nodes.normalization_strategy_selector import NormalizationStrategySelector\n'), ((1928, 1944), 'autoPyTorch.pipeline.nodes.one_hot_encoding.OneHotEncoding', 'OneHotEncoding', ([], {}), '()\n', (1942, 1944), False, 'from autoPyTorch.pipeline.nodes.one_hot_encoding import OneHotEncoding\n'), ((1966, 1988), 'autoPyTorch.pipeline.nodes.preprocessor_selector.PreprocessorSelector', 'PreprocessorSelector', ([], {}), '()\n', (1986, 1988), False, 'from autoPyTorch.pipeline.nodes.preprocessor_selector import PreprocessorSelector\n'), ((2010, 2038), 'autoPyTorch.pipeline.nodes.resampling_strategy_selector.ResamplingStrategySelector', 'ResamplingStrategySelector', ([], {}), '()\n', (2036, 2038), False, 'from autoPyTorch.pipeline.nodes.resampling_strategy_selector import ResamplingStrategySelector\n'), ((2060, 2079), 'autoPyTorch.pipeline.nodes.embedding_selector.EmbeddingSelector', 'EmbeddingSelector', ([], {}), '()\n', (2077, 2079), False, 'from autoPyTorch.pipeline.nodes.embedding_selector import EmbeddingSelector\n'), ((2101, 2118), 'autoPyTorch.pipeline.nodes.network_selector.NetworkSelector', 'NetworkSelector', ([], {}), '()\n', (2116, 2118), False, 'from autoPyTorch.pipeline.nodes.network_selector import NetworkSelector\n'), ((2140, 2159), 'autoPyTorch.pipeline.nodes.optimizer_selector.OptimizerSelector', 'OptimizerSelector', ([], {}), '()\n', (2157, 2159), False, 'from autoPyTorch.pipeline.nodes.optimizer_selector import OptimizerSelector\n'), ((2181, 2212), 'autoPyTorch.pipeline.nodes.lr_scheduler_selector.LearningrateSchedulerSelector', 'LearningrateSchedulerSelector', ([], {}), '()\n', (2210, 2212), False, 'from autoPyTorch.pipeline.nodes.lr_scheduler_selector import LearningrateSchedulerSelector\n'), ((2234, 2256), 'autoPyTorch.pipeline.nodes.log_functions_selector.LogFunctionsSelector', 'LogFunctionsSelector', ([], {}), '()\n', (2254, 2256), False, 'from autoPyTorch.pipeline.nodes.log_functions_selector import LogFunctionsSelector\n'), ((2278, 2294), 'autoPyTorch.pipeline.nodes.metric_selector.MetricSelector', 'MetricSelector', ([], {}), '()\n', (2292, 2294), False, 'from autoPyTorch.pipeline.nodes.metric_selector import MetricSelector\n'), ((2316, 2336), 'autoPyTorch.pipeline.nodes.loss_module_selector.LossModuleSelector', 'LossModuleSelector', ([], {}), '()\n', (2334, 2336), False, 'from autoPyTorch.pipeline.nodes.loss_module_selector import LossModuleSelector\n'), ((2358, 2369), 'autoPyTorch.pipeline.nodes.train_node.TrainNode', 'TrainNode', ([], {}), '()\n', (2367, 2369), False, 'from autoPyTorch.pipeline.nodes.train_node import TrainNode\n')] |
from typing import List, Optional
from fastapi.encoders import jsonable_encoder
from sqlalchemy.sql.expression import true
from .models import IncidentPriority, IncidentPriorityCreate, IncidentPriorityUpdate
def get(*, db_session, incident_priority_id: int) -> Optional[IncidentPriority]:
"""Returns an incident priority based on the given priority id."""
return (
db_session.query(IncidentPriority)
.filter(IncidentPriority.id == incident_priority_id)
.one_or_none()
)
def get_default(*, db_session):
"""Returns the current default incident_priority."""
return (
db_session.query(IncidentPriority).filter(IncidentPriority.default == true()).one_or_none()
)
def get_by_name(*, db_session, name: str) -> Optional[IncidentPriority]:
"""Returns an incident priority based on the given priority name."""
return db_session.query(IncidentPriority).filter(IncidentPriority.name == name).one_or_none()
def get_by_slug(*, db_session, slug: str) -> Optional[IncidentPriority]:
"""Returns an incident priority based on the given type slug."""
return db_session.query(IncidentPriority).filter(IncidentPriority.slug == slug).one_or_none()
def get_all(*, db_session) -> List[Optional[IncidentPriority]]:
"""Returns all incident priorities."""
return db_session.query(IncidentPriority)
def create(*, db_session, incident_priority_in: IncidentPriorityCreate) -> IncidentPriority:
"""Creates an incident priority."""
incident_priority = IncidentPriority(**incident_priority_in.dict())
db_session.add(incident_priority)
db_session.commit()
return incident_priority
def update(
*, db_session, incident_priority: IncidentPriority, incident_priority_in: IncidentPriorityUpdate
) -> IncidentPriority:
"""Updates an incident priority."""
incident_priority_data = jsonable_encoder(incident_priority)
update_data = incident_priority_in.dict(skip_defaults=True)
for field in incident_priority_data:
if field in update_data:
setattr(incident_priority, field, update_data[field])
db_session.add(incident_priority)
db_session.commit()
return incident_priority
def delete(*, db_session, incident_priority_id: int):
"""Deletes an incident priority."""
db_session.query(IncidentPriority).filter(IncidentPriority.id == incident_priority_id).delete()
db_session.commit()
| [
"sqlalchemy.sql.expression.true",
"fastapi.encoders.jsonable_encoder"
] | [((1868, 1903), 'fastapi.encoders.jsonable_encoder', 'jsonable_encoder', (['incident_priority'], {}), '(incident_priority)\n', (1884, 1903), False, 'from fastapi.encoders import jsonable_encoder\n'), ((692, 698), 'sqlalchemy.sql.expression.true', 'true', ([], {}), '()\n', (696, 698), False, 'from sqlalchemy.sql.expression import true\n')] |
#!/usr/bin/env python3
from functools import partial
def combine(qubit_count, gates):
return [partial(g, i) for g in gates for i in range(qubit_count)]
def repeat_none(index, count):
return [partial(apply_none, index)] * count
def apply_none(index, circuit):
pass
def apply_not(index, circuit):
qr = circuit.qregs[0]
circuit.x(qr[index])
def apply_phase_flip(index, circuit):
qr = circuit.qregs[0]
circuit.z(qr[index])
def apply_hadamard(index, circuit):
qr = circuit.qregs[0]
circuit.h(qr[index])
def apply_y_rotation(theta, index, circuit):
qr = circuit.qregs[0]
circuit.ry(theta, qr[index])
def apply_z_rotation(phi, index, circuit):
qr = circuit.qregs[0]
circuit.rz(phi, qr[index])
| [
"functools.partial"
] | [((102, 115), 'functools.partial', 'partial', (['g', 'i'], {}), '(g, i)\n', (109, 115), False, 'from functools import partial\n'), ((204, 230), 'functools.partial', 'partial', (['apply_none', 'index'], {}), '(apply_none, index)\n', (211, 230), False, 'from functools import partial\n')] |
import sys
from itertools import chain
from pathlib import Path
import pytest
import acconeer.exptool as et
HERE = Path(__file__).parent
path = (HERE / ".." / ".." / "utils").resolve()
sys.path.append(path.as_posix())
from convert_to_csv import record_to_csv # noqa: E402
@pytest.mark.parametrize("test_file", chain(HERE.glob("**/*.h5"), HERE.glob("**/*.npz")))
def test_csv_conversion_is_exact(test_file):
# The idea is to test the csv conversion corresponds exactly to the data file.
# Aimed to catch rounding errors and flipped cols/rows.
record = et.recording.load(test_file)
if record.mode == et.Mode.SPARSE:
pytest.skip("CSV-ifying of sparse data is not supported at this moment.")
data = record.data.squeeze()
assert data.ndim == 2
csv_table = record_to_csv(record)
csv_table_sac = record_to_csv(record, sweep_as_column=True)
assert data.shape == csv_table.shape
assert data.T.shape == csv_table_sac.shape
for row in range(data.shape[0]):
for col in range(data.shape[1]):
assert data[row, col] == complex(csv_table[row, col])
assert data[row, col] == complex(csv_table_sac[col, row])
| [
"convert_to_csv.record_to_csv",
"acconeer.exptool.recording.load",
"pytest.skip",
"pathlib.Path"
] | [((119, 133), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (123, 133), False, 'from pathlib import Path\n'), ((571, 599), 'acconeer.exptool.recording.load', 'et.recording.load', (['test_file'], {}), '(test_file)\n', (588, 599), True, 'import acconeer.exptool as et\n'), ((797, 818), 'convert_to_csv.record_to_csv', 'record_to_csv', (['record'], {}), '(record)\n', (810, 818), False, 'from convert_to_csv import record_to_csv\n'), ((839, 882), 'convert_to_csv.record_to_csv', 'record_to_csv', (['record'], {'sweep_as_column': '(True)'}), '(record, sweep_as_column=True)\n', (852, 882), False, 'from convert_to_csv import record_to_csv\n'), ((646, 719), 'pytest.skip', 'pytest.skip', (['"""CSV-ifying of sparse data is not supported at this moment."""'], {}), "('CSV-ifying of sparse data is not supported at this moment.')\n", (657, 719), False, 'import pytest\n')] |
"""
Copyright (c) Django Software Foundation and individual contributors.
Copyright (c) Dependable Systems Laboratory, EPFL
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
import pkgutil
import importlib
import sys
from s2e_env.command import BaseCommand, CommandError, CommandParser
from s2e_env.utils import log
COMMANDS_DIR = os.path.join(os.path.dirname(__file__), 'commands')
def find_commands():
"""
Give a path to a management directory, returns a list of all the command
names that are available.
Returns an empty list if no commands are defined.
"""
return [name for _, name, ispkg in pkgutil.iter_modules([COMMANDS_DIR])
if not ispkg and not name.startswith('_')]
def load_command_class(name):
"""
Given a command name, returns the Command class instance. All errors raised
by the import process (ImportError, AttributeError) are allowed to
propagate.
"""
module = importlib.import_module(f's2e_env.commands.{name}')
return module.Command()
def call_command(command_name, *args, **options):
"""
Call the given command, with the given options and args/kwargs.
This is the primary API you should use for calling specific commands.
`name` may be a string or a command object. Using a string is preferred
unless the command object is required for further processing or testing.
"""
if isinstance(command_name, BaseCommand):
# Command object passed in
command = command_name
command_name = command.__class__.__module__.split('.')[-1]
else:
# Load the command object by name
command = load_command_class(command_name)
# Simulate argument parsing to get the option defaults
parser = command.create_parser('', command_name)
# Use the `dest` option name from the parser option
# pylint: disable=protected-access
opt_mapping = {
min(s_opt.option_strings).lstrip('-').replace('-', '_'): s_opt.dest
for s_opt in parser._actions if s_opt.option_strings
}
arg_options = {opt_mapping.get(key, key): value for
key, value in options.items()}
defaults = parser.parse_args(args=args)
# pylint: disable=protected-access
defaults = dict(defaults._get_kwargs(), **arg_options)
# Move positional args out of options to mimic legacy optparse
args = defaults.pop('args', ())
return command.execute(*args, **defaults)
class CommandManager:
"""
Manages and executes commands.
"""
def __init__(self, argv):
# We must do a copy by value of the arguments, because the original sys.argv
# may be sometimes changed arbitrarily by a call to import_module.
self._argv = argv[:]
self._prog_name = os.path.basename(self._argv[0])
def main_help_text(self, commands_only=False):
"""
Return's the main help text, as a string.
"""
if commands_only:
usage = sorted(find_commands())
else:
usage = [
'',
f'Type \'{self._prog_name} help <subcommand>\' for help on a specific '
'subcommand.',
'',
'Available subcommands:',
]
for command in find_commands():
usage.append(f' {command}')
return '\n'.join(usage)
def fetch_command(self, subcommand):
"""
Tries to fetch the given subcommand, printing a message with the
appropriate command called from the command line if it can't be found.
"""
commands = find_commands()
if subcommand not in commands:
sys.stderr.write(f'Unknown command - {subcommand}. Type \'{self._prog_name} help\' for usage\n')
sys.exit(1)
return load_command_class(subcommand)
def execute(self):
"""
Given the command-line arguments, this figures out which subcommand is
being run, creates a parser appropriate to that command, and runs it.
"""
try:
subcommand = self._argv[1]
except IndexError:
# Display help if no arguments were given
subcommand = 'help'
parser = CommandParser(None,
usage='%(prog)s subcommand [options] [args]',
add_help=False)
parser.add_argument('args', nargs='*') # catch-all
try:
options, args = parser.parse_known_args(self._argv[2:])
except CommandError:
pass # Ignore any option errors at this point
if subcommand == 'help':
if '--commands' in args:
sys.stdout.write(f'{self.main_help_text(commands_only=True)}\n')
elif len(options.args) < 1:
sys.stdout.write(f'{self.main_help_text()}\n')
else:
self.fetch_command(options.args[0]).print_help(self._prog_name, options.args[0])
elif self._argv[1:] in (['--help'], ['-h']):
sys.stdout.write(f'{self.main_help_text()}\n')
else:
self.fetch_command(subcommand).run_from_argv(self._argv)
def main():
"""
The main function.
Use the command manager to execute a command.
"""
log.configure_logging()
manager = CommandManager(sys.argv)
manager.execute()
if __name__ == '__main__':
main()
| [
"importlib.import_module",
"s2e_env.utils.log.configure_logging",
"s2e_env.command.CommandParser",
"os.path.dirname",
"sys.stderr.write",
"os.path.basename",
"sys.exit",
"pkgutil.iter_modules"
] | [((1795, 1820), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1810, 1820), False, 'import os\n'), ((2393, 2444), 'importlib.import_module', 'importlib.import_module', (['f"""s2e_env.commands.{name}"""'], {}), "(f's2e_env.commands.{name}')\n", (2416, 2444), False, 'import importlib\n'), ((6720, 6743), 's2e_env.utils.log.configure_logging', 'log.configure_logging', ([], {}), '()\n', (6741, 6743), False, 'from s2e_env.utils import log\n'), ((4212, 4243), 'os.path.basename', 'os.path.basename', (['self._argv[0]'], {}), '(self._argv[0])\n', (4228, 4243), False, 'import os\n'), ((5675, 5761), 's2e_env.command.CommandParser', 'CommandParser', (['None'], {'usage': '"""%(prog)s subcommand [options] [args]"""', 'add_help': '(False)'}), "(None, usage='%(prog)s subcommand [options] [args]', add_help=\n False)\n", (5688, 5761), False, 'from s2e_env.command import BaseCommand, CommandError, CommandParser\n'), ((2074, 2110), 'pkgutil.iter_modules', 'pkgutil.iter_modules', (['[COMMANDS_DIR]'], {}), '([COMMANDS_DIR])\n', (2094, 2110), False, 'import pkgutil\n'), ((5119, 5223), 'sys.stderr.write', 'sys.stderr.write', (['f"""Unknown command - {subcommand}. Type \'{self._prog_name} help\' for usage\n"""'], {}), '(\n f"Unknown command - {subcommand}. Type \'{self._prog_name} help\' for usage\\n"\n )\n', (5135, 5223), False, 'import sys\n'), ((5228, 5239), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5236, 5239), False, 'import sys\n')] |
import tensorflow as tf
def cosine_similarity(x, y, eps=1e-6):
z = tf.batch_matmul(x, tf.transpose(y, perm=[0,2,1]))
z /= tf.sqrt(tf.multiply(tf.expand_dims(tf.reduce_sum(tf.multiply(x,x), 2), 2),tf.expand_dims(tf.reduce_sum(tf.multiply(y,y), 2), 1)) + eps)
return z
| [
"tensorflow.multiply",
"tensorflow.transpose"
] | [((88, 119), 'tensorflow.transpose', 'tf.transpose', (['y'], {'perm': '[0, 2, 1]'}), '(y, perm=[0, 2, 1])\n', (100, 119), True, 'import tensorflow as tf\n'), ((174, 191), 'tensorflow.multiply', 'tf.multiply', (['x', 'x'], {}), '(x, x)\n', (185, 191), True, 'import tensorflow as tf\n'), ((228, 245), 'tensorflow.multiply', 'tf.multiply', (['y', 'y'], {}), '(y, y)\n', (239, 245), True, 'import tensorflow as tf\n')] |
#!/usr/bin/env python3
#
# Copyright (c) 2019 LG Electronics, Inc.
#
# This software contains code licensed as described in LICENSE.
#
import os
import lgsvl
sim = lgsvl.Simulator(os.environ.get("SIMULATOR_HOST", "127.0.0.1"), 8181)
if sim.current_scene == "BorregasAve":
sim.reset()
else:
sim.load("BorregasAve")
# The next few lines spawns an EGO vehicle in the map
spawns = sim.get_spawn()
state = lgsvl.AgentState()
state.transform = spawns[0]
forward = lgsvl.utils.transform_to_forward(state.transform)
right = lgsvl.utils.transform_to_right(state.transform)
up = lgsvl.utils.transform_to_up(state.transform)
sim.add_agent("Lincoln2017MKZ (Apollo 5.0)", lgsvl.AgentType.EGO, state)
# This is the point from which the rays will originate from. It is raised 1m from the ground
p = spawns[0].position
p.y += 1
# useful bits in layer mask
# 0 - Default (road & ground)
# 9 - EGO vehicles
# 10 - NPC vehicles
# 11 - Pedestrian
# 12 - Obstacle
# Included layers can be hit by the rays. Otherwise the ray will go through the layer
layer_mask = 0
for bit in [0, 10, 11, 12]: # do not put 9 here, to not hit EGO vehicle itself
layer_mask |= 1 << bit
# raycast returns None if the ray doesn't collide with anything
# hit also has the point property which is the Unity position vector of where the ray collided with something
hit = sim.raycast(p, right, layer_mask)
if hit:
print("Distance right:", hit.distance)
hit = sim.raycast(p, -right, layer_mask)
if hit:
print("Distance left:", hit.distance)
hit = sim.raycast(p, -forward, layer_mask)
if hit:
print("Distance back:", hit.distance)
hit = sim.raycast(p, forward, layer_mask)
if hit:
print("Distance forward:", hit.distance)
hit = sim.raycast(p, up, layer_mask)
if hit:
print("Distance up:", hit.distance)
hit = sim.raycast(p, -up, layer_mask)
if hit:
print("Distance down:", hit.distance)
| [
"lgsvl.utils.transform_to_up",
"os.environ.get",
"lgsvl.utils.transform_to_forward",
"lgsvl.utils.transform_to_right",
"lgsvl.AgentState"
] | [((409, 427), 'lgsvl.AgentState', 'lgsvl.AgentState', ([], {}), '()\n', (425, 427), False, 'import lgsvl\n'), ((466, 515), 'lgsvl.utils.transform_to_forward', 'lgsvl.utils.transform_to_forward', (['state.transform'], {}), '(state.transform)\n', (498, 515), False, 'import lgsvl\n'), ((524, 571), 'lgsvl.utils.transform_to_right', 'lgsvl.utils.transform_to_right', (['state.transform'], {}), '(state.transform)\n', (554, 571), False, 'import lgsvl\n'), ((577, 621), 'lgsvl.utils.transform_to_up', 'lgsvl.utils.transform_to_up', (['state.transform'], {}), '(state.transform)\n', (604, 621), False, 'import lgsvl\n'), ((182, 227), 'os.environ.get', 'os.environ.get', (['"""SIMULATOR_HOST"""', '"""127.0.0.1"""'], {}), "('SIMULATOR_HOST', '127.0.0.1')\n", (196, 227), False, 'import os\n')] |
import argparse, sys
__version__ = '1.0.2'
class CSArgParser(argparse.ArgumentParser):
""" Argument parser that shows help if there is an error """
def error(self, message, exit=False):
sys.stderr.write('Error: {}\n'.format(message))
self.print_help()
if exit:
sys.exit(2)
| [
"sys.exit"
] | [((308, 319), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (316, 319), False, 'import argparse, sys\n')] |
"""Sensor platform for Google Home"""
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import DEVICE_CLASS_TIMESTAMP, STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.entity import Entity, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
ALARM_AND_TIMER_ID_LENGTH,
DATA_CLIENT,
DATA_COORDINATOR,
DOMAIN,
GOOGLE_HOME_ALARM_DEFAULT_VALUE,
ICON_ALARMS,
ICON_TIMERS,
ICON_TOKEN,
LABEL_ALARMS,
LABEL_DEVICE,
LABEL_TIMERS,
SERVICE_ATTR_ALARM_ID,
SERVICE_ATTR_TIMER_ID,
SERVICE_DELETE_ALARM,
SERVICE_DELETE_TIMER,
SERVICE_REBOOT,
)
from .entity import GoogleHomeBaseEntity
from .models import GoogleHomeAlarmStatus, GoogleHomeDevice, GoogleHomeTimerStatus
from .types import (
AlarmsAttributes,
DeviceAttributes,
GoogleHomeAlarmDict,
GoogleHomeTimerDict,
TimersAttributes,
)
_LOGGER: logging.Logger = logging.getLogger(__package__)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_devices: AddEntitiesCallback,
) -> bool:
"""Setup sensor platform."""
client = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
coordinator = hass.data[DOMAIN][entry.entry_id][DATA_COORDINATOR]
sensors: list[Entity] = []
for device in coordinator.data:
sensors.append(
GoogleHomeDeviceSensor(
coordinator,
client,
device.device_id,
device.name,
device.hardware,
)
)
if device.auth_token and device.available:
sensors += [
GoogleHomeAlarmsSensor(
coordinator,
client,
device.device_id,
device.name,
device.hardware,
),
GoogleHomeTimersSensor(
coordinator,
client,
device.device_id,
device.name,
device.hardware,
),
]
async_add_devices(sensors)
platform = entity_platform.current_platform.get()
# Services
platform.async_register_entity_service(
SERVICE_DELETE_ALARM,
{vol.Required(SERVICE_ATTR_ALARM_ID): cv.string},
"async_delete_alarm",
)
platform.async_register_entity_service(
SERVICE_DELETE_TIMER,
{vol.Required(SERVICE_ATTR_TIMER_ID): cv.string},
"async_delete_timer",
)
platform.async_register_entity_service(
SERVICE_REBOOT,
{},
"async_reboot_device",
)
return True
class GoogleHomeDeviceSensor(GoogleHomeBaseEntity):
"""Google Home Device sensor."""
_attr_icon = ICON_TOKEN
_attr_entity_category = EntityCategory.DIAGNOSTIC
@property
def label(self) -> str:
"""Label to use for name and unique id."""
return LABEL_DEVICE
@property
def state(self) -> str | None:
device = self.get_device()
return device.ip_address if device else None
@property
def extra_state_attributes(self) -> DeviceAttributes:
"""Return the state attributes."""
device = self.get_device()
attributes: DeviceAttributes = {
"device_id": None,
"device_name": self.device_name,
"auth_token": None,
"ip_address": None,
"available": False,
}
return self.get_device_attributes(device) if device else attributes
@staticmethod
def get_device_attributes(device: GoogleHomeDevice) -> DeviceAttributes:
"""Device representation as dictionary"""
return {
"device_id": device.device_id,
"device_name": device.name,
"auth_token": device.auth_token,
"ip_address": device.ip_address,
"available": device.available,
}
async def async_reboot_device(self) -> None:
"""Reboot the device."""
device = self.get_device()
if device is None:
_LOGGER.error("Device %s is not found.", self.device_name)
return
await self.client.reboot_google_device(device)
class GoogleHomeAlarmsSensor(GoogleHomeBaseEntity):
"""Google Home Alarms sensor."""
_attr_icon = ICON_ALARMS
_attr_device_class = DEVICE_CLASS_TIMESTAMP
@property
def label(self) -> str:
"""Label to use for name and unique id."""
return LABEL_ALARMS
@property
def state(self) -> str | None:
device = self.get_device()
if not device:
return None
next_alarm = device.get_next_alarm()
return (
next_alarm.local_time_iso
if next_alarm
and next_alarm.status
not in (GoogleHomeAlarmStatus.INACTIVE, GoogleHomeAlarmStatus.MISSED)
else STATE_UNAVAILABLE
)
@property
def extra_state_attributes(self) -> AlarmsAttributes:
"""Return the state attributes."""
return {
"next_alarm_status": self._get_next_alarm_status(),
"alarm_volume": self._get_alarm_volume(),
"alarms": self._get_alarms_data(),
}
def _get_next_alarm_status(self) -> str:
"""Update next alarm status from coordinator"""
device = self.get_device()
next_alarm = device.get_next_alarm() if device else None
return (
next_alarm.status.name.lower()
if next_alarm
else GoogleHomeAlarmStatus.NONE.name.lower()
)
def _get_alarm_volume(self) -> float:
"""Update alarm volume status from coordinator"""
device = self.get_device()
alarm_volume = device.get_alarm_volume() if device else None
return alarm_volume if alarm_volume else GOOGLE_HOME_ALARM_DEFAULT_VALUE
def _get_alarms_data(self) -> list[GoogleHomeAlarmDict]:
"""Update alarms data extracting it from coordinator"""
device = self.get_device()
return (
[alarm.as_dict() for alarm in device.get_sorted_alarms()] if device else []
)
@staticmethod
def is_valid_alarm_id(alarm_id: str) -> bool:
"""Checks if the alarm id provided is valid."""
return (
alarm_id.startswith("alarm/") and len(alarm_id) == ALARM_AND_TIMER_ID_LENGTH
)
async def async_delete_alarm(self, alarm_id: str) -> None:
"""Service call to delete alarm on device"""
device = self.get_device()
if device is None:
_LOGGER.error("Device %s is not found.", self.device_name)
return
if not self.is_valid_alarm_id(alarm_id):
_LOGGER.error(
"Incorrect ID format! Please provide a valid alarm ID. "
"See services tab for more info."
)
return
await self.client.delete_alarm_or_timer(device=device, item_to_delete=alarm_id)
class GoogleHomeTimersSensor(GoogleHomeBaseEntity):
"""Google Home Timers sensor."""
_attr_icons = ICON_TIMERS
_attr_device_class = DEVICE_CLASS_TIMESTAMP
@property
def label(self) -> str:
"""Label to use for name and unique id."""
return LABEL_TIMERS
@property
def state(self) -> str | None:
device = self.get_device()
if not device:
return None
timer = device.get_next_timer()
return (
timer.local_time_iso
if timer and timer.local_time_iso
else STATE_UNAVAILABLE
)
@property
def extra_state_attributes(self) -> TimersAttributes:
"""Return the state attributes."""
return {
"next_timer_status": self._get_next_timer_status(),
"timers": self._get_timers_data(),
}
def _get_next_timer_status(self) -> str:
"""Update next timer status from coordinator"""
device = self.get_device()
next_timer = device.get_next_timer() if device else None
return (
next_timer.status.name.lower()
if next_timer
else GoogleHomeTimerStatus.NONE.name.lower()
)
def _get_timers_data(self) -> list[GoogleHomeTimerDict]:
"""Update timers data extracting it from coordinator"""
device = self.get_device()
return (
[timer.as_dict() for timer in device.get_sorted_timers()] if device else []
)
@staticmethod
def is_valid_timer_id(timer_id: str) -> bool:
"""Checks if the timer id provided is valid."""
return (
timer_id.startswith("timer/") and len(timer_id) == ALARM_AND_TIMER_ID_LENGTH
)
async def async_delete_timer(self, timer_id: str) -> None:
"""Service call to delete alarm on device"""
device = self.get_device()
if device is None:
_LOGGER.error("Device %s is not found.", self.device_name)
return
if not self.is_valid_timer_id(timer_id):
_LOGGER.error(
"Incorrect ID format! Please provide a valid timer ID. "
"See services tab for more info."
)
return
await self.client.delete_alarm_or_timer(device=device, item_to_delete=timer_id)
| [
"logging.getLogger",
"voluptuous.Required",
"homeassistant.helpers.entity_platform.current_platform.get"
] | [((1160, 1190), 'logging.getLogger', 'logging.getLogger', (['__package__'], {}), '(__package__)\n', (1177, 1190), False, 'import logging\n'), ((2382, 2420), 'homeassistant.helpers.entity_platform.current_platform.get', 'entity_platform.current_platform.get', ([], {}), '()\n', (2418, 2420), False, 'from homeassistant.helpers import config_validation as cv, entity_platform\n'), ((2520, 2555), 'voluptuous.Required', 'vol.Required', (['SERVICE_ATTR_ALARM_ID'], {}), '(SERVICE_ATTR_ALARM_ID)\n', (2532, 2555), True, 'import voluptuous as vol\n'), ((2689, 2724), 'voluptuous.Required', 'vol.Required', (['SERVICE_ATTR_TIMER_ID'], {}), '(SERVICE_ATTR_TIMER_ID)\n', (2701, 2724), True, 'import voluptuous as vol\n')] |
import redis
import json
from . import config
redis_instance = None
def set_up(host, port, db):
global redis_instance
redis_instance = redis.StrictRedis(host=host, port=port, db=db)
class ModuleStorage():
def __init__(self, module_id):
self.key_prefix = "module:" + config.config.enabled_modules[module_id]["storage_prefix"]
@property
def redis(self):
return redis_instance
def prefixed_key(self, key):
return f"{self.key_prefix}:{key}"
def get(self, key):
data_json = redis_instance.get(self.prefixed_key(key))
if not data_json:
return None
data = json.loads(data_json)
return data.get("data")
def set(self, key, value):
data_json = json.dumps({"data": value})
return redis_instance.set(self.prefixed_key(key), data_json)
| [
"json.loads",
"json.dumps",
"redis.StrictRedis"
] | [((146, 192), 'redis.StrictRedis', 'redis.StrictRedis', ([], {'host': 'host', 'port': 'port', 'db': 'db'}), '(host=host, port=port, db=db)\n', (163, 192), False, 'import redis\n'), ((645, 666), 'json.loads', 'json.loads', (['data_json'], {}), '(data_json)\n', (655, 666), False, 'import json\n'), ((751, 778), 'json.dumps', 'json.dumps', (["{'data': value}"], {}), "({'data': value})\n", (761, 778), False, 'import json\n')] |
#!/usr/bin/env python
import argparse
import os
import sys
import pickle
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('filename')
args = parser.parse_args()
if os.path.splitext(args.filename)[1] != '.abc':
# We can not read this file type
sys.exit(1)
with open(args.filename, 'rb') as f:
obj = pickle.load(f)
name = obj['name']
print("<p>Your name is {}</p>".format(name))
| [
"os.path.splitext",
"pickle.load",
"argparse.ArgumentParser",
"sys.exit"
] | [((115, 140), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (138, 140), False, 'import argparse\n'), ((311, 322), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (319, 322), False, 'import sys\n'), ((379, 393), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (390, 393), False, 'import pickle\n'), ((216, 247), 'os.path.splitext', 'os.path.splitext', (['args.filename'], {}), '(args.filename)\n', (232, 247), False, 'import os\n')] |
"""=========================================
Read Mapping parameter titration pipeline
=========================================
* align reads to the genome using a range of different parameters
* calculate alignment statistics
Requirements
------------
On top of the default CGAT setup, the pipeline requires the following
software to be in the path:
+--------------------+-------------------+------------------------------------------------+
|*Program* |*Version* |*Purpose* |
+--------------------+-------------------+------------------------------------------------+
|bowtie_ |>=0.12.7 |read mapping |
+--------------------+-------------------+------------------------------------------------+
Pipline Output
==============
The results of the computation are all stored in an sqlite relational
database :file:`csvdb`.
Glossary
========
.. glossary::
bowtie
bowtie_ - a read mapper
.. _bowtie: http://bowtie-bio.sourceforge.net/index.shtml
Code
====
"""
import sys
import os
import CGAT.Experiment as E
from ruffus import *
import pysam
import CGATPipelines.PipelineMapping as PipelineMapping
import CGATPipelines.Pipeline as P
USECLUSTER = True
###################################################
###################################################
###################################################
# Pipeline configuration
###################################################
P.getParameters(["%s/pipeline.ini" %
os.path.splitext(__file__)[0], "../pipeline.ini", "pipeline.ini"])
PARAMS = P.PARAMS
bowtie_options = {'n0m1': "-n 0 -a --best --strata -m 1 -3 1", 'n1m1': "-n 1 -a --best --strata -m 1 -3 1", 'n2m1': "-n 2 -a --best --strata -m 1 -3 1", 'n3m1': "-n 3 -a --best --strata -m 1 -3 1",
'n0m2': "-n 0 -a --best --strata -m 2 -3 1", 'n1m2': "-n 1 -a --best --strata -m 2 -3 1", 'n2m2': "-n 2 -a --best --strata -m 2 -3 1", 'n3m2': "-n 3 -a --best --strata -m 2 -3 1",
'n0m3': "-n 0 -a --best --strata -m 3 -3 1", 'n1m3': "-n 1 -a --best --strata -m 3 -3 1", 'n2m3': "-n 2 -a --best --strata -m 3 -3 1", 'n3m3': "-n 3 -a --best --strata -m 3 -3 1",
'n0m4': "-n 0 -a --best --strata -m 4 -3 1", 'n1m4': "-n 1 -a --best --strata -m 4 -3 1", 'n2m4': "-n 2 -a --best --strata -m 4 -3 1", 'n3m4': "-n 3 -a --best --strata -m 4 -3 1",
'n0m5': "-n 0 -a --best --strata -m 5 -3 1", 'n1m5': "-n 1 -a --best --strata -m 5 -3 1", 'n2m5': "-n 2 -a --best --strata -m 5 -3 1", 'n3m5': "-n 3 -a --best --strata -m 5 -3 1",
'v0m1': "-v 0 -a --best --strata -m 1 -3 1", 'v1m1': "-v 1 -a --best --strata -m 1 -3 1", 'v2m1': "-v 2 -a --best --strata -m 1 -3 1", 'v3m1': "-v 3 -a --best --strata -m 1 -3 1",
'v0m2': "-v 0 -a --best --strata -m 2 -3 1", 'v1m2': "-v 1 -a --best --strata -m 2 -3 1", 'v2m2': "-v 2 -a --best --strata -m 2 -3 1", 'v3m2': "-v 3 -a --best --strata -m 2 -3 1",
'v0m3': "-v 0 -a --best --strata -m 3 -3 1", 'v1m3': "-v 1 -a --best --strata -m 3 -3 1", 'v2m3': "-v 2 -a --best --strata -m 3 -3 1", 'v3m3': "-v 3 -a --best --strata -m 3 -3 1",
'v0m4': "-v 0 -a --best --strata -m 4 -3 1", 'v1m4': "-v 1 -a --best --strata -m 4 -3 1", 'v2m4': "-v 2 -a --best --strata -m 4 -3 1", 'v3m4': "-v 3 -a --best --strata -m 4 -3 1",
'v0m5': "-v 0 -a --best --strata -m 5 -3 1", 'v1m5': "-v 1 -a --best --strata -m 5 -3 1", 'v2m5': "-v 2 -a --best --strata -m 5 -3 1", 'v3m5': "-v 3 -a --best --strata -m 5 -3 1"}
###################################################################
###################################################################
###################################################################
# MAP READS
@files([(PARAMS["test_file"], "%s.bam" % x, bowtie_options.get(x)) for x in list(bowtie_options.keys())])
def buildBAM(infile, outfile, options):
'''map reads with bowtie'''
job_threads = PARAMS["bowtie_threads"]
m = PipelineMapping.Bowtie()
reffile = PARAMS["samtools_genome"]
bowtie_options = options
statement = m.build((infile,), outfile)
# print(statement)
P.run()
#########################################################################
@transform(buildBAM,
regex(r"(\S+).bam"),
r"\1.nsrt.bam")
def sortByName(infile, outfile):
'''Add number of hits tags to sam file'''
to_cluster = USECLUSTER
track = P.snip(outfile, ".bam")
statement = '''samtools sort -n %(infile)s %(track)s;'''
P.run()
#########################################################################
@transform(sortByName,
regex(r"(\S+).nsrt.bam"),
r"\1.nh.bam")
def addNHTag(infile, outfile):
'''Add number of hits tags to sam file'''
to_cluster = USECLUSTER
inf = pysam.Samfile(infile, "rb")
outf = pysam.Samfile(outfile, "wb", template=inf)
for readset in read_sets(inf, keep_unmapped=True):
nh = len(readset)
for read in readset:
if (read.is_unmapped):
nh = 0
read.tags = read.tags + [("NH", nh)]
outf.write(read)
inf.close()
outf.close()
#########################################################################
@transform(addNHTag,
regex(r"(\S+).bam"),
r"\1.srt.bam")
def sortByPosition(infile, outfile):
'''Add number of hits tags to sam file'''
to_cluster = USECLUSTER
track = P.snip(outfile, ".bam")
statement = '''samtools sort %(infile)s %(track)s;'''
P.run()
#########################################################################
@transform(sortByPosition,
regex(r"(\S+).nh.srt.bam"),
r"\1.dedup.bam")
def dedup(infiles, outfile):
'''Remove duplicate alignments from BAM files.'''
to_cluster = USECLUSTER
track = P.snip(outfile, ".bam")
statement = '''MarkDuplicates INPUT=%(infiles)s ASSUME_SORTED=true OUTPUT=%(outfile)s METRICS_FILE=%(track)s.dupstats VALIDATION_STRINGENCY=SILENT; ''' % locals(
)
statement += '''samtools index %(outfile)s; ''' % locals()
# print statement
P.run()
#########################################################################
@merge(dedup, "picard_duplicate_stats.load")
def loadPicardDuplicateStats(infiles, outfile):
'''Merge Picard duplicate stats into single table and load into SQLite.'''
tablename = P.toTable(outfile)
outf = open('dupstats.txt', 'w')
first = True
for f in infiles:
track = P.snip(os.path.basename(f), ".dedup.bam")
statfile = P.snip(f, ".bam") + ".dupstats"
if not os.path.exists(statfile):
E.warn("File %s missing" % statfile)
continue
lines = [x for x in open(
statfile, "r").readlines() if not x.startswith("#") and x.strip()]
if first:
outf.write("%s\t%s" % ("track", lines[0]))
first = False
outf.write("%s\t%s" % (track, lines[1]))
outf.close()
tmpfilename = outf.name
statement = '''cat %(tmpfilename)s
| cgat csv2db
--add-index=track
--table=%(tablename)s
> %(outfile)s
'''
P.run()
#########################################################################
@transform(dedup,
regex(r"(\S+).dedup.bam"),
r"\1.readstats")
def buildBAMStats(infile, outfile):
'''Count number of reads mapped, duplicates, etc. '''
to_cluster = USECLUSTER
scriptsdir = PARAMS["general_scriptsdir"]
statement = '''cgat bam2stats --force-output
--output-filename-pattern=%(outfile)s.%%s < %(infile)s > %(outfile)s'''
P.run()
#########################################################################
@merge(buildBAMStats, "bam_stats.load")
def loadBAMStats(infiles, outfile):
'''Import bam statistics into SQLite'''
scriptsdir = PARAMS["general_scriptsdir"]
header = ",".join([P.snip(os.path.basename(x), ".readstats")
for x in infiles])
filenames = " ".join(["<( cut -f 1,2 < %s)" % x for x in infiles])
tablename = P.toTable(outfile)
E.info("loading bam stats - summary")
statement = """cgat combine_tables
--header-names=%(header)s
--missing-value=0
--ignore-empty
%(filenames)s
| perl -p -e "s/bin/track/"
| perl -p -e "s/unique/unique_alignments/"
| cgat table2table --transpose
| cgat csv2db
--allow-empty-file
--add-index=track
--table=%(tablename)s
> %(outfile)s"""
P.run()
for suffix in ("nm", "nh"):
E.info("loading bam stats - %s" % suffix)
filenames = " ".join(["%s.%s" % (x, suffix) for x in infiles])
tname = "%s_%s" % (tablename, suffix)
statement = """cgat combine_tables
--header-names=%(header)s
--skip-titles
--missing-value=0
--ignore-empty
%(filenames)s
| perl -p -e "s/bin/%(suffix)s/"
| cgat csv2db
--table=%(tname)s
--allow-empty-file
>> %(outfile)s """
P.run()
#########################################################################
@transform(dedup,
regex(r"(\S+)/bam/(\S+).bam"),
r"\1/bam/\2.alignstats")
def buildPicardAlignStats(infile, outfile):
'''Gather BAM file alignment statistics using Picard '''
to_cluster = USECLUSTER
track = P.snip(os.path.basename(infile), ".bam")
statement = '''CollectAlignmentSummaryMetrics INPUT=%(infile)s REFERENCE_SEQUENCE=%%(samtools_genome)s ASSUME_SORTED=true OUTPUT=%(outfile)s VALIDATION_STRINGENCY=SILENT ''' % locals(
)
P.run()
############################################################
@merge(buildPicardAlignStats, "picard_align_stats.load")
def loadPicardAlignStats(infiles, outfile):
'''Merge Picard alignment stats into single table and load into SQLite.'''
tablename = P.toTable(outfile)
outf = P.getTempFile()
first = True
for f in infiles:
track = P.snip(os.path.basename(f), ".dedup.alignstats")
if not os.path.exists(f):
E.warn("File %s missing" % f)
continue
lines = [
x for x in open(f, "r").readlines() if not x.startswith("#") and x.strip()]
if first:
outf.write("%s\t%s" % ("track", lines[0]))
first = False
for i in range(1, len(lines)):
outf.write("%s\t%s" % (track, lines[i]))
outf.close()
tmpfilename = outf.name
statement = '''cat %(tmpfilename)s
| cgat csv2db
--add-index=track
--table=%(tablename)s
> %(outfile)s
'''
P.run()
os.unlink(tmpfilename)
############################################################
############################################################
############################################################
# Pipeline organisation
@follows(buildBAM, sortByName, addNHTag, sortByPosition, dedup,
loadPicardDuplicateStats, buildBAMStats, loadBAMStats)
def mapReads():
'''Align reads to target genome.'''
@follows(mapReads)
def full():
'''run the full pipeline.'''
############################################################
############################################################
############################################################
# REPORTS
@follows(mkdir("report"))
def build_report():
'''build report from scratch.'''
E.info("starting documentation build process from scratch")
P.run_report(clean=True)
@follows(mkdir("report"))
def update_report():
'''update report.'''
E.info("updating documentation")
P.run_report(clean=False)
def main(argv=None):
if argv is None:
argv = sys.argv
P.main(argv)
if __name__ == "__main__":
sys.exit(P.main(sys.argv))
| [
"CGATPipelines.Pipeline.main",
"os.path.exists",
"CGATPipelines.Pipeline.run",
"CGATPipelines.PipelineMapping.Bowtie",
"os.path.splitext",
"CGATPipelines.Pipeline.getTempFile",
"CGATPipelines.Pipeline.toTable",
"CGAT.Experiment.info",
"os.unlink",
"CGATPipelines.Pipeline.run_report",
"pysam.Samf... | [((4096, 4120), 'CGATPipelines.PipelineMapping.Bowtie', 'PipelineMapping.Bowtie', ([], {}), '()\n', (4118, 4120), True, 'import CGATPipelines.PipelineMapping as PipelineMapping\n'), ((4261, 4268), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (4266, 4268), True, 'import CGATPipelines.Pipeline as P\n'), ((4545, 4568), 'CGATPipelines.Pipeline.snip', 'P.snip', (['outfile', '""".bam"""'], {}), "(outfile, '.bam')\n", (4551, 4568), True, 'import CGATPipelines.Pipeline as P\n'), ((4634, 4641), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (4639, 4641), True, 'import CGATPipelines.Pipeline as P\n'), ((4920, 4947), 'pysam.Samfile', 'pysam.Samfile', (['infile', '"""rb"""'], {}), "(infile, 'rb')\n", (4933, 4947), False, 'import pysam\n'), ((4959, 5001), 'pysam.Samfile', 'pysam.Samfile', (['outfile', '"""wb"""'], {'template': 'inf'}), "(outfile, 'wb', template=inf)\n", (4972, 5001), False, 'import pysam\n'), ((5560, 5583), 'CGATPipelines.Pipeline.snip', 'P.snip', (['outfile', '""".bam"""'], {}), "(outfile, '.bam')\n", (5566, 5583), True, 'import CGATPipelines.Pipeline as P\n'), ((5646, 5653), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (5651, 5653), True, 'import CGATPipelines.Pipeline as P\n'), ((5948, 5971), 'CGATPipelines.Pipeline.snip', 'P.snip', (['outfile', '""".bam"""'], {}), "(outfile, '.bam')\n", (5954, 5971), True, 'import CGATPipelines.Pipeline as P\n'), ((6234, 6241), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (6239, 6241), True, 'import CGATPipelines.Pipeline as P\n'), ((6508, 6526), 'CGATPipelines.Pipeline.toTable', 'P.toTable', (['outfile'], {}), '(outfile)\n', (6517, 6526), True, 'import CGATPipelines.Pipeline as P\n'), ((7336, 7343), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (7341, 7343), True, 'import CGATPipelines.Pipeline as P\n'), ((7818, 7825), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (7823, 7825), True, 'import CGATPipelines.Pipeline as P\n'), ((8264, 8282), 'CGATPipelines.Pipeline.toTable', 'P.toTable', (['outfile'], {}), '(outfile)\n', (8273, 8282), True, 'import CGATPipelines.Pipeline as P\n'), ((8287, 8324), 'CGAT.Experiment.info', 'E.info', (['"""loading bam stats - summary"""'], {}), "('loading bam stats - summary')\n", (8293, 8324), True, 'import CGAT.Experiment as E\n'), ((8865, 8872), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (8870, 8872), True, 'import CGATPipelines.Pipeline as P\n'), ((10079, 10086), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (10084, 10086), True, 'import CGATPipelines.Pipeline as P\n'), ((10348, 10366), 'CGATPipelines.Pipeline.toTable', 'P.toTable', (['outfile'], {}), '(outfile)\n', (10357, 10366), True, 'import CGATPipelines.Pipeline as P\n'), ((10379, 10394), 'CGATPipelines.Pipeline.getTempFile', 'P.getTempFile', ([], {}), '()\n', (10392, 10394), True, 'import CGATPipelines.Pipeline as P\n'), ((11144, 11151), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (11149, 11151), True, 'import CGATPipelines.Pipeline as P\n'), ((11157, 11179), 'os.unlink', 'os.unlink', (['tmpfilename'], {}), '(tmpfilename)\n', (11166, 11179), False, 'import os\n'), ((11923, 11982), 'CGAT.Experiment.info', 'E.info', (['"""starting documentation build process from scratch"""'], {}), "('starting documentation build process from scratch')\n", (11929, 11982), True, 'import CGAT.Experiment as E\n'), ((11987, 12011), 'CGATPipelines.Pipeline.run_report', 'P.run_report', ([], {'clean': '(True)'}), '(clean=True)\n', (11999, 12011), True, 'import CGATPipelines.Pipeline as P\n'), ((12091, 12123), 'CGAT.Experiment.info', 'E.info', (['"""updating documentation"""'], {}), "('updating documentation')\n", (12097, 12123), True, 'import CGAT.Experiment as E\n'), ((12128, 12153), 'CGATPipelines.Pipeline.run_report', 'P.run_report', ([], {'clean': '(False)'}), '(clean=False)\n', (12140, 12153), True, 'import CGATPipelines.Pipeline as P\n'), ((12226, 12238), 'CGATPipelines.Pipeline.main', 'P.main', (['argv'], {}), '(argv)\n', (12232, 12238), True, 'import CGATPipelines.Pipeline as P\n'), ((8914, 8955), 'CGAT.Experiment.info', 'E.info', (["('loading bam stats - %s' % suffix)"], {}), "('loading bam stats - %s' % suffix)\n", (8920, 8955), True, 'import CGAT.Experiment as E\n'), ((9515, 9522), 'CGATPipelines.Pipeline.run', 'P.run', ([], {}), '()\n', (9520, 9522), True, 'import CGATPipelines.Pipeline as P\n'), ((9847, 9871), 'os.path.basename', 'os.path.basename', (['infile'], {}), '(infile)\n', (9863, 9871), False, 'import os\n'), ((12281, 12297), 'CGATPipelines.Pipeline.main', 'P.main', (['sys.argv'], {}), '(sys.argv)\n', (12287, 12297), True, 'import CGATPipelines.Pipeline as P\n'), ((6628, 6647), 'os.path.basename', 'os.path.basename', (['f'], {}), '(f)\n', (6644, 6647), False, 'import os\n'), ((6682, 6699), 'CGATPipelines.Pipeline.snip', 'P.snip', (['f', '""".bam"""'], {}), "(f, '.bam')\n", (6688, 6699), True, 'import CGATPipelines.Pipeline as P\n'), ((6729, 6753), 'os.path.exists', 'os.path.exists', (['statfile'], {}), '(statfile)\n', (6743, 6753), False, 'import os\n'), ((6767, 6803), 'CGAT.Experiment.warn', 'E.warn', (["('File %s missing' % statfile)"], {}), "('File %s missing' % statfile)\n", (6773, 6803), True, 'import CGAT.Experiment as E\n'), ((10458, 10477), 'os.path.basename', 'os.path.basename', (['f'], {}), '(f)\n', (10474, 10477), False, 'import os\n'), ((10515, 10532), 'os.path.exists', 'os.path.exists', (['f'], {}), '(f)\n', (10529, 10532), False, 'import os\n'), ((10546, 10575), 'CGAT.Experiment.warn', 'E.warn', (["('File %s missing' % f)"], {}), "('File %s missing' % f)\n", (10552, 10575), True, 'import CGAT.Experiment as E\n'), ((1581, 1607), 'os.path.splitext', 'os.path.splitext', (['__file__'], {}), '(__file__)\n', (1597, 1607), False, 'import os\n'), ((8100, 8119), 'os.path.basename', 'os.path.basename', (['x'], {}), '(x)\n', (8116, 8119), False, 'import os\n')] |
from src.deployment import Deployment
from src.end_point import EndPoint
from src.etcd import Etcd
from src.pod import Pod
from src.pid_controller import PIDController
from src.request import Request
from src.worker_node import WorkerNode
import threading
import random
#The APIServer handles the communication between controllers and the cluster. It houses
#the methods that can be called for cluster management
class APIServer:
def __init__(self, ctrlValues = [0, 0, 0]):
self.etcd = Etcd()
self.etcdLock = threading.Lock()
self.kubeletList = []
self.requestWaiting = threading.Event()
self.controller = PIDController(ctrlValues[0], ctrlValues[1], ctrlValues[2])#Tune your controller
# GetDeployments method returns the list of deployments stored in etcd
def GetDeployments(self):
return self.etcd.deploymentList.copy()
def GetDepByLabel(self, label):
return next(filter(lambda deployment: deployment.deploymentLabel == label, self.etcd.deploymentList), None)
# GetWorkers method returns the list of WorkerNodes stored in etcd
def GetWorkers(self):
return self.etcd.nodeList.copy()
# GetPending method returns the list of PendingPods stored in etcd
def GetPending(self):
return self.etcd.pendingPodList.copy()
# GetEndPoints method returns the list of EndPoints stored in etcd
def GetEndPoints(self):
return self.etcd.endPointList.copy()
# CreateWorker creates a WorkerNode from a list of arguments and adds it to the etcd nodeList
def CreateWorker(self, info):
worker = WorkerNode(info)
self.etcd.nodeList.append(worker)
print("Worker_Node " + worker.label + " created")
# CreateDeployment creates a Deployment object from a list of arguments and adds it to the etcd deploymentList
def CreateDeployment(self, info):
deployment = Deployment(info)
self.etcd.deploymentList.append(deployment)
print("Deployment " + deployment.deploymentLabel + " created")
# RemoveDeployment deletes the associated Deployment object from etcd and sets the status of all associated pods to 'TERMINATING'
def RemoveDeployment(self, info):
for deployment in self.etcd.deploymentList:
if deployment.deploymentLabel == info[0]:
deployment.expectedReplicas = 0
# CreateEndpoint creates an EndPoint object using information from a provided Pod and Node and appends it
# to the endPointList in etcd
def CreateEndPoint(self, pod, worker):
endPoint = EndPoint(pod, pod.deploymentLabel, worker)
self.etcd.endPointList.append(endPoint)
print("New Endpoint for "+endPoint.deploymentLabel+"- NODE: "+ endPoint.node.label + " POD: " + endPoint.pod.podName)
# GetEndPointsByLabel returns a list of EndPoints associated with a given deployment
def GetEndPointsByLabel(self, deploymentLabel):
endPoints = []
for endPoint in self.etcd.endPointList:
if endPoint.deploymentLabel == deploymentLabel:
endPoints.append(endPoint)
return endPoints
#RemoveEndPoint removes the EndPoint from the list within etcd
def RemoveEndPoint(self, endPoint):
endPoint.node.available_cpu+=endPoint.pod.assigned_cpu
print("Removing EndPoint for: "+endPoint.deploymentLabel)
self.etcd.endPointList.remove(endPoint)
#GeneratePodName creates a random label for a pod
def GeneratePodName(self):
label = random.randint(111,999)
for pod in self.etcd.runningPodList:
if pod.podName == label:
label = self.GeneratePodName()
for pod in self.etcd.pendingPodList:
if pod.podName == label:
label = self.GeneratePodName()
return label
# CreatePod finds the resource allocations associated with a deployment and creates a pod using those metrics
def CreatePod(self, deployment):
podName = deployment.deploymentLabel + "_" + str(self.GeneratePodName())
pod = Pod(podName, deployment.cpuCost, deployment.deploymentLabel)
print("Pod " + pod.podName + " created")
self.etcd.pendingPodList.append(pod)
# GetPod returns the pod object associated with an EndPoint
def GetPod(self, endPoint):
return endPoint.pod
#TerminatePod gracefully shuts down a Pod
def TerminatePod(self, endPoint):
pod = endPoint.pod
pod.status="TERMINATING"
self.RemoveEndPoint(endPoint)
print("Removing Pod "+pod.podName)
# CrashPod finds a pod from a given deployment and sets its status to 'FAILED'
# Any resource utilisation on the pod will be reset to the base 0
def CrashPod(self, info):
endPoints = self.GetEndPointsByLabel(info[0])
if len(endPoints) == 0:
print("No Pods to crash")
else:
print("GETTING PODS")
pod = self.GetPod(endPoints[0])
pod.status = "FAILED"
pod.crash.set()
print ("Pod "+pod.podName+" crashed")
# Alter these method so that the requests are pushed to Deployments instead of etcd
def PushReq(self, info):
self.etcd.reqCreator.submit(self.ReqPusher, info)
def ReqPusher(self, info):
self.etcd.pendingReqs.append(Request(info))
self.requestWaiting.set()
| [
"src.etcd.Etcd",
"src.end_point.EndPoint",
"threading.Lock",
"src.pod.Pod",
"threading.Event",
"src.deployment.Deployment",
"src.worker_node.WorkerNode",
"src.pid_controller.PIDController",
"src.request.Request",
"random.randint"
] | [((491, 497), 'src.etcd.Etcd', 'Etcd', ([], {}), '()\n', (495, 497), False, 'from src.etcd import Etcd\n'), ((516, 532), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (530, 532), False, 'import threading\n'), ((582, 599), 'threading.Event', 'threading.Event', ([], {}), '()\n', (597, 599), False, 'import threading\n'), ((620, 678), 'src.pid_controller.PIDController', 'PIDController', (['ctrlValues[0]', 'ctrlValues[1]', 'ctrlValues[2]'], {}), '(ctrlValues[0], ctrlValues[1], ctrlValues[2])\n', (633, 678), False, 'from src.pid_controller import PIDController\n'), ((1523, 1539), 'src.worker_node.WorkerNode', 'WorkerNode', (['info'], {}), '(info)\n', (1533, 1539), False, 'from src.worker_node import WorkerNode\n'), ((1792, 1808), 'src.deployment.Deployment', 'Deployment', (['info'], {}), '(info)\n', (1802, 1808), False, 'from src.deployment import Deployment\n'), ((2410, 2452), 'src.end_point.EndPoint', 'EndPoint', (['pod', 'pod.deploymentLabel', 'worker'], {}), '(pod, pod.deploymentLabel, worker)\n', (2418, 2452), False, 'from src.end_point import EndPoint\n'), ((3260, 3284), 'random.randint', 'random.randint', (['(111)', '(999)'], {}), '(111, 999)\n', (3274, 3284), False, 'import random\n'), ((3732, 3792), 'src.pod.Pod', 'Pod', (['podName', 'deployment.cpuCost', 'deployment.deploymentLabel'], {}), '(podName, deployment.cpuCost, deployment.deploymentLabel)\n', (3735, 3792), False, 'from src.pod import Pod\n'), ((4838, 4851), 'src.request.Request', 'Request', (['info'], {}), '(info)\n', (4845, 4851), False, 'from src.request import Request\n')] |
import new_module as nm
if __name__ == '__main__':
nm.say_hi() | [
"new_module.say_hi"
] | [((56, 67), 'new_module.say_hi', 'nm.say_hi', ([], {}), '()\n', (65, 67), True, 'import new_module as nm\n')] |
import gzip
import pickle
import os
def analyze(data_path):
"""
Run the comparison on the given data file
:param data_path:
:return:
"""
if data_path.endswith(".gz"):
with gzip.open(data_path, 'r') as f:
S, true_model = pickle.load(f)
else:
with open(data_path, 'r') as f:
S, true_model = pickle.load(f)
print("True model:")
print(true_model)
T = float(S.shape[0])
N = S.sum(axis=0)
print("lambda0: ", true_model.bias_model.lambda0.mean())
print("Average event count: ", N.mean(), " +- ", N.std())
print("Average event count: ", (N/T).mean(), " +- ", (N/T).std())
# seed = 2650533028
K = 50
C = 5
T = 100000
data_path = os.path.join("data", "synthetic", "synthetic_K%d_C%d_T%d.pkl.gz" % (K,C,T))
analyze(data_path)
| [
"pickle.load",
"os.path.join",
"gzip.open"
] | [((724, 801), 'os.path.join', 'os.path.join', (['"""data"""', '"""synthetic"""', "('synthetic_K%d_C%d_T%d.pkl.gz' % (K, C, T))"], {}), "('data', 'synthetic', 'synthetic_K%d_C%d_T%d.pkl.gz' % (K, C, T))\n", (736, 801), False, 'import os\n'), ((208, 233), 'gzip.open', 'gzip.open', (['data_path', '"""r"""'], {}), "(data_path, 'r')\n", (217, 233), False, 'import gzip\n'), ((268, 282), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (279, 282), False, 'import pickle\n'), ((361, 375), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (372, 375), False, 'import pickle\n')] |
# Run Keras on CPU
import os
# os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
# os.environ["CUDA_VISIBLE_DEVICES"] = " " # -1 if CPU
# Importations
from IPython.display import Image
# Compressed pickle
import pickle
from compress_pickle import dump as cdump
from compress_pickle import load as cload
import io
# Importations
import numpy as np
import pandas as pd
from time import time
import re
import os
import random
import time
# Deep learning
import tensorflow as tf
import keras
from keras.models import Sequential, Model, load_model
from keras.regularizers import l2
from keras.layers import Dense, Input, Flatten, Dropout, BatchNormalization, Activation
from keras.wrappers.scikit_learn import KerasClassifier
from keras.constraints import maxnorm
from keras.callbacks import ModelCheckpoint, EarlyStopping, LearningRateScheduler
from keras.utils.vis_utils import plot_model
from keras.utils import np_utils
from keras.layers.convolutional import Conv2D
from keras.layers.pooling import MaxPooling2D, AveragePooling2D
from keras.layers.recurrent import LSTM, GRU
from keras.layers.wrappers import TimeDistributed
from keras.layers.merge import concatenate
from keras.optimizers import SGD, Adam
from keras.preprocessing.image import load_img, img_to_array, ImageDataGenerator
# Image Processing
from imutils import paths, build_montages
import imutils
import cv2
# Gridsearch
from sklearn.model_selection import GridSearchCV, KFold, train_test_split, cross_val_score
from keras.wrappers.scikit_learn import KerasClassifier, KerasRegressor
from sklearn.preprocessing import Normalizer, StandardScaler, MinMaxScaler, LabelBinarizer, MultiLabelBinarizer, LabelEncoder
from sklearn.utils import shuffle
from sklearn.pipeline import Pipeline
from sklearn.metrics import mean_squared_error, roc_auc_score, auc, confusion_matrix, accuracy_score, classification_report
# Visuals
import seaborn as sns
import matplotlib.pyplot as plt
# Plotting training class
from IPython.display import clear_output
# Visuals scripts
import sys
sys.path.append('..') # Parent folder
from drawer.keras_util import convert_drawer_model
from drawer.pptx_util import save_model_to_pptx
from drawer.matplotlib_util import save_model_to_file | [
"sys.path.append"
] | [((2066, 2087), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (2081, 2087), False, 'import sys\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from openstack_dashboard.test.integration_tests.pages import loginpage
from openstack_dashboard.test.integration_tests.regions import messages
LOGGER = logging.getLogger(__name__)
def login(test_case):
test_case.login_pg = loginpage.LoginPage(test_case.driver,
test_case.CONFIG)
test_case.login_pg.go_to_login_page()
test_case.create_demo_user()
test_case.home_pg = test_case.login_pg.login(test_case.TEST_USER_NAME,
test_case.TEST_PASSWORD)
test_case.home_pg.change_project(test_case.HOME_PROJECT)
test_case.assertTrue(
test_case.home_pg.find_message_and_dismiss(messages.SUCCESS))
test_case.assertFalse(
test_case.home_pg.find_message_and_dismiss(messages.ERROR))
yield
if test_case.home_pg.is_logged_in:
test_case.home_pg.log_out()
else:
LOGGER.warn("{!r} isn't logged in".format(test_case.TEST_USER_NAME))
| [
"logging.getLogger",
"openstack_dashboard.test.integration_tests.pages.loginpage.LoginPage"
] | [((742, 769), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (759, 769), False, 'import logging\n'), ((819, 874), 'openstack_dashboard.test.integration_tests.pages.loginpage.LoginPage', 'loginpage.LoginPage', (['test_case.driver', 'test_case.CONFIG'], {}), '(test_case.driver, test_case.CONFIG)\n', (838, 874), False, 'from openstack_dashboard.test.integration_tests.pages import loginpage\n')] |
#!/usr/bin/env python
"""
Computer practical 6.1. Fresnel diffraction, plane wavefront.
=============================================================
This is part of the 'computer practical' set of assignments.
Demonstrates Fresnel diffraction when a plane wavefront enters
a round hole.
Measure the values of z and d for which minima and/or maxima on-axis occur
and apply the Fresnel-zone theory to find the wavelength of the light.
"""
import matplotlib
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import matplotlib.pyplot as plt
import sys
import webbrowser
if sys.version_info[0] < 3:
from Tkinter import *
import Tkinter as Tk
else:
from tkinter import *
import tkinter as Tk
from LightPipes import *
root = Tk.Tk()
root.wm_title("Computer practical: 6.1 Fresnel plane wavefront. LP-version = " + LPversion)
root.wm_protocol("WM_DELETE_WINDOW", root.quit)
wavelength=530*nm;
size=5*mm;
N=200; N2=int(N/2)
z=20*cm
R=0.5*mm
D=DoubleVar()
Z=DoubleVar()
D.set(2*R/mm)
Z.set(z/cm)
fig=plt.figure(figsize=(8,8))
ax1 = fig.add_subplot(111)
canvas = FigureCanvasTkAgg(fig, master=root)
canvas._tkcanvas.pack(side=Tk.LEFT, fill=Tk.BOTH, expand=1)
v=StringVar()
def TheExample(event):
global I
F=Begin(size,wavelength,N)
z=Z.get()*cm
R=D.get()/2*mm
F=CircAperture(R,0,0,F)
FN=R*R/z/wavelength
if (FN >= 15.0):
F=Forvard(z,F)
else:
F=Fresnel(z,F)
I=Intensity(0,F)
ax1.clear()
ax1.contourf(I,50,cmap='hot'); ax1.axis('off'); ax1.axis('equal')
str='Intensity distribution\ncenter-irradiance = %3.3f [a.u.]' %I[N2][N2]
ax1.set_title(str)
canvas.draw()
def motion(event):
x=event.xdata;y=event.ydata
if (x and y is not None and x>0 and x<N and y>0 and y<N):
v.set('x=%3.2f mm, y=%3.2f mm\n I=%3.3f [a.u.]' %((-size/2+x*size/N)/mm,(-size/2+y*size/N)/mm,I[int(x)][int(y)]))
root.configure(cursor='crosshair')
else:
v.set('')
root.configure(cursor='arrow')
def openbrowser(event):
webbrowser.open_new(r"https://opticspy.github.io/lightpipes/FresnelDiffraction.html")
def _quit():
root.quit()
Scale( root,
takefocus = 1,
orient='horizontal',
label = 'diameter aperture [mm]',
length = 200, from_=0.5, to=size/2/mm,
resolution = 0.001,
variable = D,
cursor="hand2",
command = TheExample).pack()
Scale( root,
takefocus = 1,
orient='horizontal',
label = 'z [cm]',
length = 200,
from_=0.01, to=200.0,
resolution = 0.01,
variable = Z,
cursor="hand2",
command = TheExample).pack()
Button( root,
width = 24,
text='Quit',
cursor="hand2",
command=_quit).pack(pady=10)
link = Label(root, text="help", fg="blue", cursor="hand2")
link.pack()
link.bind("<Button-1>", openbrowser)
Label(root, textvariable=v).pack(pady=50)
cid = fig.canvas.mpl_connect('motion_notify_event', motion)
TheExample(0)
root.mainloop()
root.destroy()
| [
"matplotlib.use",
"webbrowser.open_new",
"matplotlib.pyplot.figure",
"tkinter.Tk",
"matplotlib.backends.backend_tkagg.FigureCanvasTkAgg"
] | [((486, 509), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (500, 509), False, 'import matplotlib\n'), ((805, 812), 'tkinter.Tk', 'Tk.Tk', ([], {}), '()\n', (810, 812), True, 'import tkinter as Tk\n'), ((1088, 1114), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (1098, 1114), True, 'import matplotlib.pyplot as plt\n'), ((1151, 1186), 'matplotlib.backends.backend_tkagg.FigureCanvasTkAgg', 'FigureCanvasTkAgg', (['fig'], {'master': 'root'}), '(fig, master=root)\n', (1168, 1186), False, 'from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg\n'), ((2096, 2185), 'webbrowser.open_new', 'webbrowser.open_new', (['"""https://opticspy.github.io/lightpipes/FresnelDiffraction.html"""'], {}), "(\n 'https://opticspy.github.io/lightpipes/FresnelDiffraction.html')\n", (2115, 2185), False, 'import webbrowser\n')] |
"""Router and Logic for the Recipe of the Website"""
from datetime import timedelta
from typing import Union
import fastapi
from fastapi.responses import HTMLResponse
from sqlalchemy.orm import Session
from starlette.requests import Request
from starlette.templating import Jinja2Templates
from db.database import get_db
from schemes import scheme_cuisine
from schemes import scheme_filter
from schemes.scheme_user import User
from services import service_rec
from tools.security import get_current_user
templates = Jinja2Templates("templates")
router = fastapi.APIRouter()
@router.get("/findrecipe", response_class=HTMLResponse)
async def findrecipe(
request: Request,
length: int,
keywords: Union[str, None] = None,
db_session: Session = fastapi.Depends(get_db),
current_user: User = fastapi.Depends(get_current_user),
):
"""Requests user settings and search for recipe.
Args:
request (Request): the http request
length (int): the minimal length
keywords (Union[str, None], optional): the keywords. Defaults to None.
Returns:
TemplateResponse: the http response
"""
if length == 0:
total_length = timedelta(days=100)
else:
total_length = timedelta(seconds=length)
rec_filter = scheme_filter.FilterRecipe(
cuisines=[scheme_cuisine.PydanticCuisine(name="Restaurant")],
rating=1,
keyword=keywords,
total_time=total_length,
)
recipe = service_rec.search_recipe(db_session=db_session, user=current_user, recipe_filter=rec_filter)
prep_time_total_seconds = recipe.prepTime.total_seconds()
prep_time_days = int(prep_time_total_seconds // 86400)
prep_time_hours = int((prep_time_total_seconds % 86400) // 3600)
prep_time_minutes = int((prep_time_total_seconds % 3600) // 60)
prep_time_seconds = int(prep_time_total_seconds % 60)
cook_time_total_seconds = recipe.cookTime.total_seconds()
cook_time_days = int(cook_time_total_seconds // 86400)
cook_time_minutes = int((cook_time_total_seconds % 86400) // 3600)
cook_time_hours = int(cook_time_total_seconds // 3600)
cook_time_minutes = int((cook_time_total_seconds % 3600) // 60)
cook_time_seconds = int(cook_time_total_seconds % 60)
prep_time = {
"days": prep_time_days,
"hours": prep_time_hours,
"minutes": prep_time_minutes,
"seconds": prep_time_seconds,
}
cook_time = {
"days": cook_time_days,
"hours": cook_time_hours,
"minutes": cook_time_minutes,
"seconds": cook_time_seconds,
}
return templates.TemplateResponse(
"recipe/recipe_result.html",
{"request": request, "recipe": recipe, "prepTime": prep_time, "cookTime": cook_time},
)
| [
"schemes.scheme_cuisine.PydanticCuisine",
"services.service_rec.search_recipe",
"fastapi.APIRouter",
"datetime.timedelta",
"starlette.templating.Jinja2Templates",
"fastapi.Depends"
] | [((520, 548), 'starlette.templating.Jinja2Templates', 'Jinja2Templates', (['"""templates"""'], {}), "('templates')\n", (535, 548), False, 'from starlette.templating import Jinja2Templates\n'), ((558, 577), 'fastapi.APIRouter', 'fastapi.APIRouter', ([], {}), '()\n', (575, 577), False, 'import fastapi\n'), ((762, 785), 'fastapi.Depends', 'fastapi.Depends', (['get_db'], {}), '(get_db)\n', (777, 785), False, 'import fastapi\n'), ((812, 845), 'fastapi.Depends', 'fastapi.Depends', (['get_current_user'], {}), '(get_current_user)\n', (827, 845), False, 'import fastapi\n'), ((1477, 1574), 'services.service_rec.search_recipe', 'service_rec.search_recipe', ([], {'db_session': 'db_session', 'user': 'current_user', 'recipe_filter': 'rec_filter'}), '(db_session=db_session, user=current_user,\n recipe_filter=rec_filter)\n', (1502, 1574), False, 'from services import service_rec\n'), ((1187, 1206), 'datetime.timedelta', 'timedelta', ([], {'days': '(100)'}), '(days=100)\n', (1196, 1206), False, 'from datetime import timedelta\n'), ((1240, 1265), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'length'}), '(seconds=length)\n', (1249, 1265), False, 'from datetime import timedelta\n'), ((1329, 1378), 'schemes.scheme_cuisine.PydanticCuisine', 'scheme_cuisine.PydanticCuisine', ([], {'name': '"""Restaurant"""'}), "(name='Restaurant')\n", (1359, 1378), False, 'from schemes import scheme_cuisine\n')] |
print("From python: Within python module")
import os,sys
HERE = os.getcwd()
sys.path.insert(0,HERE)
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
data_array = np.zeros(shape=(2001,258)) # Very important that this matches the number of timesteps in the main solver
x = np.arange(start=0,stop=2.0*np.pi,step=2.0*np.pi/256)
iternum = 0
def collection_func(input_array):
global data_array,iternum
data_array[iternum,:] = input_array[:]
iternum+=1
return None
def analyses_func():
global data_array, x
plt.figure()
for i in range(0,np.shape(data_array)[0],400):
plt.plot(x,data_array[i,1:-1],label='Timestep '+str(i))
plt.legend()
plt.xlabel('x')
plt.xlabel('u')
plt.title('Field evolution')
plt.savefig('Field_evolution.png')
plt.close()
# Perform an SVD
data_array = data_array[:,1:-1]
print('Performing SVD')
u,s,v = np.linalg.svd(data_array,full_matrices=False)
# Plot SVD eigenvectors
plt.figure()
plt.plot(x, v[0,:],label='Mode 0')
plt.plot(x, v[1,:],label='Mode 1')
plt.plot(x, v[2,:],label='Mode 2')
plt.legend()
plt.title('SVD Eigenvectors')
plt.xlabel('x')
plt.xlabel('u')
plt.savefig('SVD_Eigenvectors.png')
plt.close()
np.save('eigenvectors.npy',v[0:3,:].T)
# Train an LSTM on the coefficients of the eigenvectors
time_series = np.matmul(v[0:3,:],data_array.T).T
num_timesteps = np.shape(time_series)[0]
train_series = time_series[:num_timesteps//2]
test_series = time_series[num_timesteps//2:]
# import the LSTM architecture and initialize
from ml_module import standard_lstm
ml_model = standard_lstm(train_series)
# Train the model
ml_model.train_model()
# Restore best weights and perform an inference
print('Performing inference on testing data')
ml_model.model_inference(test_series)
return_data = v[0:3,:].T
return return_data
if __name__ == '__main__':
pass | [
"sys.path.insert",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"os.getcwd",
"matplotlib.pyplot.close",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.matmul",
"numpy.linalg.svd",
"matplotlib.pyplot.title",
"numpy.shape",
... | [((65, 76), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (74, 76), False, 'import os, sys\n'), ((77, 101), 'sys.path.insert', 'sys.path.insert', (['(0)', 'HERE'], {}), '(0, HERE)\n', (92, 101), False, 'import os, sys\n'), ((191, 218), 'numpy.zeros', 'np.zeros', ([], {'shape': '(2001, 258)'}), '(shape=(2001, 258))\n', (199, 218), True, 'import numpy as np\n'), ((300, 360), 'numpy.arange', 'np.arange', ([], {'start': '(0)', 'stop': '(2.0 * np.pi)', 'step': '(2.0 * np.pi / 256)'}), '(start=0, stop=2.0 * np.pi, step=2.0 * np.pi / 256)\n', (309, 360), True, 'import numpy as np\n'), ((562, 574), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (572, 574), True, 'import matplotlib.pyplot as plt\n'), ((694, 706), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (704, 706), True, 'import matplotlib.pyplot as plt\n'), ((711, 726), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (721, 726), True, 'import matplotlib.pyplot as plt\n'), ((731, 746), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""u"""'], {}), "('u')\n", (741, 746), True, 'import matplotlib.pyplot as plt\n'), ((751, 779), 'matplotlib.pyplot.title', 'plt.title', (['"""Field evolution"""'], {}), "('Field evolution')\n", (760, 779), True, 'import matplotlib.pyplot as plt\n'), ((784, 818), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Field_evolution.png"""'], {}), "('Field_evolution.png')\n", (795, 818), True, 'import matplotlib.pyplot as plt\n'), ((823, 834), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (832, 834), True, 'import matplotlib.pyplot as plt\n'), ((933, 979), 'numpy.linalg.svd', 'np.linalg.svd', (['data_array'], {'full_matrices': '(False)'}), '(data_array, full_matrices=False)\n', (946, 979), True, 'import numpy as np\n'), ((1012, 1024), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1022, 1024), True, 'import matplotlib.pyplot as plt\n'), ((1029, 1065), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'v[0, :]'], {'label': '"""Mode 0"""'}), "(x, v[0, :], label='Mode 0')\n", (1037, 1065), True, 'import matplotlib.pyplot as plt\n'), ((1068, 1104), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'v[1, :]'], {'label': '"""Mode 1"""'}), "(x, v[1, :], label='Mode 1')\n", (1076, 1104), True, 'import matplotlib.pyplot as plt\n'), ((1107, 1143), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'v[2, :]'], {'label': '"""Mode 2"""'}), "(x, v[2, :], label='Mode 2')\n", (1115, 1143), True, 'import matplotlib.pyplot as plt\n'), ((1146, 1158), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1156, 1158), True, 'import matplotlib.pyplot as plt\n'), ((1163, 1192), 'matplotlib.pyplot.title', 'plt.title', (['"""SVD Eigenvectors"""'], {}), "('SVD Eigenvectors')\n", (1172, 1192), True, 'import matplotlib.pyplot as plt\n'), ((1197, 1212), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""x"""'], {}), "('x')\n", (1207, 1212), True, 'import matplotlib.pyplot as plt\n'), ((1217, 1232), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""u"""'], {}), "('u')\n", (1227, 1232), True, 'import matplotlib.pyplot as plt\n'), ((1237, 1272), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""SVD_Eigenvectors.png"""'], {}), "('SVD_Eigenvectors.png')\n", (1248, 1272), True, 'import matplotlib.pyplot as plt\n'), ((1277, 1288), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1286, 1288), True, 'import matplotlib.pyplot as plt\n'), ((1294, 1334), 'numpy.save', 'np.save', (['"""eigenvectors.npy"""', 'v[0:3, :].T'], {}), "('eigenvectors.npy', v[0:3, :].T)\n", (1301, 1334), True, 'import numpy as np\n'), ((1698, 1725), 'ml_module.standard_lstm', 'standard_lstm', (['train_series'], {}), '(train_series)\n', (1711, 1725), False, 'from ml_module import standard_lstm\n'), ((1412, 1446), 'numpy.matmul', 'np.matmul', (['v[0:3, :]', 'data_array.T'], {}), '(v[0:3, :], data_array.T)\n', (1421, 1446), True, 'import numpy as np\n'), ((1467, 1488), 'numpy.shape', 'np.shape', (['time_series'], {}), '(time_series)\n', (1475, 1488), True, 'import numpy as np\n'), ((596, 616), 'numpy.shape', 'np.shape', (['data_array'], {}), '(data_array)\n', (604, 616), True, 'import numpy as np\n')] |
""" This is a forms.py that helps to work on the payload of front-end """
from django import forms
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from django.forms.widgets import TextInput
# from extra_views import ModelFormSetView, FormSetView
from .models import EvaluationSkill, Skill, Evaluation
class SkillForm(forms.ModelForm):
class Meta:
model = Skill
fields = '__all__'
class EvaluationForm(forms.ModelForm):
class Meta:
model = Evaluation
exclude = ()
class EvaluationSkillForm(forms.ModelForm):
class Meta:
model = EvaluationSkill
exclude = ()
EvaluationSkillFormSet = inlineformset_factory(
Evaluation, EvaluationSkill, form=EvaluationSkillForm,
fields=['evaluation', 'skill', 'grade'], extra=1, can_delete=True
) | [
"django.forms.models.inlineformset_factory"
] | [((838, 985), 'django.forms.models.inlineformset_factory', 'inlineformset_factory', (['Evaluation', 'EvaluationSkill'], {'form': 'EvaluationSkillForm', 'fields': "['evaluation', 'skill', 'grade']", 'extra': '(1)', 'can_delete': '(True)'}), "(Evaluation, EvaluationSkill, form=EvaluationSkillForm,\n fields=['evaluation', 'skill', 'grade'], extra=1, can_delete=True)\n", (859, 985), False, 'from django.forms.models import inlineformset_factory\n')] |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import googletest
exp = np.exp
log = np.log
# changes to turn test into imperative mode test
try:
from tensorflow.contrib import imperative
from tensorflow.contrib.imperative.python.imperative import test_util
except:
import imperative
from imperative import test_util
import tensorflow as tf
env = imperative.Env(tf)
math_ops = env.tf
constant_op = env.tf
class ReduceTest(test_util.TensorFlowTestCase):
def testReduceAllDims(self):
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
with self.test_session():
y_tf = math_ops.reduce_sum(x).eval()
self.assertEqual(y_tf, 21)
class RoundTest(test_util.TensorFlowTestCase):
def testRounding(self):
try:
x = [0.49, 0.7, -0.3, -0.8]
for dtype in [np.float32, np.double]:
x_np = np.array(x, dtype=dtype)
for use_gpu in [True, False]:
with self.test_session(use_gpu=use_gpu):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.round(x_tf)
y_tf_np = y_tf.eval()
y_np = np.round(x_np)
self.assertAllClose(y_tf_np, y_np, atol=1e-2)
except:
import sys, pdb, traceback
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
class ModTest(test_util.TensorFlowTestCase):
def testFloat(self):
x = [0.5, 0.7, 0.3]
for dtype in [np.float32, np.double]:
# Test scalar and vector versions.
for denom in [x[0], [x[0]] * 3]:
x_np = np.array(x, dtype=dtype)
with self.test_session():
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.mod(x_tf, denom)
y_tf_np = y_tf.eval()
y_np = np.fmod(x_np, denom)
self.assertAllClose(y_tf_np, y_np, atol=1e-2)
def testFixed(self):
x = [5, 10, 23]
for dtype in [np.int32, np.int64]:
# Test scalar and vector versions.
for denom in [x[0], x]:
x_np = np.array(x, dtype=dtype)
with self.test_session():
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.mod(x_tf, denom)
y_tf_np = y_tf.eval()
y_np = np.mod(x_np, denom)
self.assertAllClose(y_tf_np, y_np)
class SquaredDifferenceTest(test_util.TensorFlowTestCase):
def testSquaredDifference(self):
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
y = np.array([-3, -2, -1], dtype=np.int32)
z = (x - y)*(x - y)
with self.test_session():
z_tf = math_ops.squared_difference(x, y).eval()
self.assertAllClose(z, z_tf)
if __name__ == "__main__":
googletest.main()
| [
"imperative.Env",
"tensorflow.python.ops.math_ops.round",
"pdb.post_mortem",
"tensorflow.python.ops.math_ops.reduce_sum",
"tensorflow.python.framework.constant_op.constant",
"tensorflow.python.ops.math_ops.squared_difference",
"numpy.array",
"sys.exc_info",
"tensorflow.python.platform.googletest.mai... | [((1291, 1309), 'imperative.Env', 'imperative.Env', (['tf'], {}), '(tf)\n', (1305, 1309), False, 'import imperative\n'), ((3592, 3609), 'tensorflow.python.platform.googletest.main', 'googletest.main', ([], {}), '()\n', (3607, 3609), False, 'from tensorflow.python.platform import googletest\n'), ((1439, 1487), 'numpy.array', 'np.array', (['[[1, 2, 3], [4, 5, 6]]'], {'dtype': 'np.int32'}), '([[1, 2, 3], [4, 5, 6]], dtype=np.int32)\n', (1447, 1487), True, 'import numpy as np\n'), ((3323, 3371), 'numpy.array', 'np.array', (['[[1, 2, 3], [4, 5, 6]]'], {'dtype': 'np.int32'}), '([[1, 2, 3], [4, 5, 6]], dtype=np.int32)\n', (3331, 3371), True, 'import numpy as np\n'), ((3380, 3418), 'numpy.array', 'np.array', (['[-3, -2, -1]'], {'dtype': 'np.int32'}), '([-3, -2, -1], dtype=np.int32)\n', (3388, 3418), True, 'import numpy as np\n'), ((1771, 1795), 'numpy.array', 'np.array', (['x'], {'dtype': 'dtype'}), '(x, dtype=dtype)\n', (1779, 1795), True, 'import numpy as np\n'), ((2184, 2198), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2196, 2198), False, 'import sys, pdb, traceback\n'), ((2205, 2226), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (2224, 2226), False, 'import sys, pdb, traceback\n'), ((2233, 2252), 'pdb.post_mortem', 'pdb.post_mortem', (['tb'], {}), '(tb)\n', (2248, 2252), False, 'import sys, pdb, traceback\n'), ((2485, 2509), 'numpy.array', 'np.array', (['x'], {'dtype': 'dtype'}), '(x, dtype=dtype)\n', (2493, 2509), True, 'import numpy as np\n'), ((2942, 2966), 'numpy.array', 'np.array', (['x'], {'dtype': 'dtype'}), '(x, dtype=dtype)\n', (2950, 2966), True, 'import numpy as np\n'), ((1531, 1553), 'tensorflow.python.ops.math_ops.reduce_sum', 'math_ops.reduce_sum', (['x'], {}), '(x)\n', (1550, 1553), False, 'from tensorflow.python.ops import math_ops\n'), ((2561, 2605), 'tensorflow.python.framework.constant_op.constant', 'constant_op.constant', (['x_np'], {'shape': 'x_np.shape'}), '(x_np, shape=x_np.shape)\n', (2581, 2605), False, 'from tensorflow.python.framework import constant_op\n'), ((2623, 2648), 'tensorflow.python.ops.math_ops.mod', 'math_ops.mod', (['x_tf', 'denom'], {}), '(x_tf, denom)\n', (2635, 2648), False, 'from tensorflow.python.ops import math_ops\n'), ((2698, 2718), 'numpy.fmod', 'np.fmod', (['x_np', 'denom'], {}), '(x_np, denom)\n', (2705, 2718), True, 'import numpy as np\n'), ((3018, 3062), 'tensorflow.python.framework.constant_op.constant', 'constant_op.constant', (['x_np'], {'shape': 'x_np.shape'}), '(x_np, shape=x_np.shape)\n', (3038, 3062), False, 'from tensorflow.python.framework import constant_op\n'), ((3080, 3105), 'tensorflow.python.ops.math_ops.mod', 'math_ops.mod', (['x_tf', 'denom'], {}), '(x_tf, denom)\n', (3092, 3105), False, 'from tensorflow.python.ops import math_ops\n'), ((3155, 3174), 'numpy.mod', 'np.mod', (['x_np', 'denom'], {}), '(x_np, denom)\n', (3161, 3174), True, 'import numpy as np\n'), ((3486, 3519), 'tensorflow.python.ops.math_ops.squared_difference', 'math_ops.squared_difference', (['x', 'y'], {}), '(x, y)\n', (3513, 3519), False, 'from tensorflow.python.ops import math_ops\n'), ((1904, 1948), 'tensorflow.python.framework.constant_op.constant', 'constant_op.constant', (['x_np'], {'shape': 'x_np.shape'}), '(x_np, shape=x_np.shape)\n', (1924, 1948), False, 'from tensorflow.python.framework import constant_op\n'), ((1968, 1988), 'tensorflow.python.ops.math_ops.round', 'math_ops.round', (['x_tf'], {}), '(x_tf)\n', (1982, 1988), False, 'from tensorflow.python.ops import math_ops\n'), ((2042, 2056), 'numpy.round', 'np.round', (['x_np'], {}), '(x_np)\n', (2050, 2056), True, 'import numpy as np\n')] |
import keyboard
from logparser import parselog, validate_log
import os
from psutil import process_iter
from pyautogui import click
import subprocess
from turnhandler import backupturn, clonegame, cleanturns, delete_log, delete_temp
import yaml
from time import sleep
import threading
import time
import win32gui
import win32con
failed_rounds = []
def wait_screen_load(path):
"""
Waits Nation Selection screen to load
:param path: dominions log path
:return: True if load was complete
"""
valid = False
i = 0
while i < 1000000:
try:
with open(path + 'log.txt') as file:
blurb = file.read()
load_complete = blurb.rfind('playturn: autohost') # battle loaded
if load_complete == -1:
i += 1
continue
if load_complete != -1: # Player Won
valid = True
break
except FileNotFoundError:
i += 1
return valid
def select_nation():
"""
Selects the first Nation on Nation selection screen.
:return: True if Dominions window handle was found.
"""
# Loop until Dominions Window Handle is found
hwnd = 0
while hwnd == 0:
hwnd = win32gui.FindWindow(None, 'Dominions 5')
# Get Dominions Windows Coordinates
x, y = win32gui.ClientToScreen(hwnd, (0, 0))
# Move cursor by 400x280 to select first Nation
click((x + 400, y + 280))
return True
def go_to_province(province):
"""
Automates keyboard shortcuts to generate log.
:param province: Province number where battle occurs
:return: True when all commands were executed
"""
keyboard.press_and_release('esc') # exit messages
keyboard.press_and_release('g') # go to screen
keyboard.write(str(province)) # select province
keyboard.press_and_release('enter') # confirm
keyboard.press_and_release('c') # view casualities
keyboard.press_and_release('esc') # back to map
keyboard.press_and_release('d') # try to add PD
return True
def wait_host(path, start_time):
"""
Waits Dominions to Host battle.
:param path: dominions game path
:param start_time: Time when ftherlnd was last updated
:return: True if ftherlnd was updated
"""
# Loop until host is finished
done = False
while done is False:
# check if ftherlnd was updated
ftherlnd_update_time = os.path.getmtime(path + 'ftherlnd')
if ftherlnd_update_time > start_time:
done = True
break
# check for host error
hwnd = win32gui.FindWindow(None, 'NÃ¥got gick fel!')
if hwnd > 0:
win32gui.SetForegroundWindow(hwnd)
win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0)
break
return done
def run_dominions(province, game='', switch='', turn=-1):
"""
Runs Dominions.
:param province: Province where battle occurs
:param game: Name of the game being simulated
:param switch: Additional Dominions switches
:param turn: Turn of the simulation
:return: True after process is terminated
"""
global failed_rounds
# Get Paths
with open('./battlefortune/data/config.yaml') as file:
paths = yaml.load(file)
dpath = paths['dompath']
gpath = paths['gamepath']
if turn > -1:
idx = gpath.rfind("/")
gpath = gpath[:idx] + str(turn) + gpath[idx:]
game = game + str(turn)
start_time = os.path.getmtime(gpath + 'ftherlnd') # ftherlnd last update
# Run Dominions on minimal settings
switches = ' --simpgui --nosteam --res 960 720 -waxsco' + switch + ' '
program = '/k cd /d' + dpath + ' & Dominions5.exe'
cmd = 'cmd ' + program + switches + game
process = subprocess.Popen(cmd) # run Dominions
if switch == 'g -T': # if auto hosting battle
success = wait_host(path=gpath, start_time=start_time)
if not success:
failed_rounds.append(turn)
else:
# Generate Log
wait_screen_load(dpath) # wait nation selection screen to load
select_nation() # select first nation
go_to_province(province) # check battle report
# Validate Round
valid = validate_log(dpath) # validate log
if not valid:
failed_rounds.append(turn)
# Terminate process
process.kill()
if switch != 'g -T':
if "Dominions5.exe" in (p.name() for p in process_iter()):
os.system("TASKKILL /F /IM Dominions5.exe")
return True
def host_battle(game, province, rounds):
""""
Host games concurrently based on the number of threads.
:param game: game name
:param province: province where battle occurs
:param rounds: number of rounds to be hosted
"""
switch = 'g -T'
threads = []
max_threads = yaml.load(open('./battlefortune/data/config.yaml'))['maxthreads']
start_range = 1
end_range = start_range + max_threads
if end_range > (rounds + 1):
end_range = rounds + 1
while start_range < (rounds + 1):
for i in range(start_range, end_range):
t = threading.Thread(target=run_dominions, args=(province, game, switch, i))
threads.append(t)
t.start()
for thread in threads:
thread.join()
threads = []
start_range = start_range + max_threads
end_range = end_range + max_threads
if end_range > (rounds + 1):
end_range = rounds + 1
def finalize_turn(game, province, turn=1):
"""
Generates the log for each simulation round, one at a time.
:param game: name of the game to be hosted
:param province: number of the province where battle occurs
:param turn: number of the simulation round
:return: turn log
"""
global failed_rounds
run_dominions(province=province, game=game, switch='d', turn=turn) # generate battle logs
turn_log = {}
if turn not in failed_rounds:
backupturn(turn) # back-up turn files
turn_log = parselog(turn) # read and parse battle log
# delete log
delete_log()
delete_temp()
return turn_log
def batchrun(rounds, game, province):
"""
Runs X numbers of Simulation Rounds.
:param rounds: Number of rounds to be simulated
:param game: game name that will be simulated
:param province: province number where battle occurs
:return:
"""
global failed_rounds
winners = []
battles = []
nations = {}
for i in range(1, rounds + 1):
clonegame(i)
host_battle(game, province, rounds)
for i in range(1, rounds + 1):
if i in failed_rounds:
continue
log = finalize_turn(game, province, i) # get turn log
if i in failed_rounds:
continue
nations = log['nations'] # get nation ids
winners.append(log['turn_score']) # get turn winner
for j in range(len(log['battlelog'])):
battles.append(log['battlelog'][j]) # get battle report
print('Round: ' + str(i))
cleanturns(rounds)
failed_rounds = []
output = {
'nations': nations,
'winners': winners,
'battles': battles
}
return output
| [
"turnhandler.backupturn",
"yaml.load",
"logparser.parselog",
"win32gui.SetForegroundWindow",
"subprocess.Popen",
"pyautogui.click",
"turnhandler.clonegame",
"keyboard.press_and_release",
"os.system",
"win32gui.PostMessage",
"turnhandler.delete_log",
"os.path.getmtime",
"threading.Thread",
... | [((1370, 1407), 'win32gui.ClientToScreen', 'win32gui.ClientToScreen', (['hwnd', '(0, 0)'], {}), '(hwnd, (0, 0))\n', (1393, 1407), False, 'import win32gui\n'), ((1465, 1490), 'pyautogui.click', 'click', (['(x + 400, y + 280)'], {}), '((x + 400, y + 280))\n', (1470, 1490), False, 'from pyautogui import click\n'), ((1718, 1751), 'keyboard.press_and_release', 'keyboard.press_and_release', (['"""esc"""'], {}), "('esc')\n", (1744, 1751), False, 'import keyboard\n'), ((1773, 1804), 'keyboard.press_and_release', 'keyboard.press_and_release', (['"""g"""'], {}), "('g')\n", (1799, 1804), False, 'import keyboard\n'), ((1878, 1913), 'keyboard.press_and_release', 'keyboard.press_and_release', (['"""enter"""'], {}), "('enter')\n", (1904, 1913), False, 'import keyboard\n'), ((1929, 1960), 'keyboard.press_and_release', 'keyboard.press_and_release', (['"""c"""'], {}), "('c')\n", (1955, 1960), False, 'import keyboard\n'), ((1985, 2018), 'keyboard.press_and_release', 'keyboard.press_and_release', (['"""esc"""'], {}), "('esc')\n", (2011, 2018), False, 'import keyboard\n'), ((2038, 2069), 'keyboard.press_and_release', 'keyboard.press_and_release', (['"""d"""'], {}), "('d')\n", (2064, 2069), False, 'import keyboard\n'), ((3543, 3579), 'os.path.getmtime', 'os.path.getmtime', (["(gpath + 'ftherlnd')"], {}), "(gpath + 'ftherlnd')\n", (3559, 3579), False, 'import os\n'), ((3835, 3856), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {}), '(cmd)\n', (3851, 3856), False, 'import subprocess\n'), ((6192, 6204), 'turnhandler.delete_log', 'delete_log', ([], {}), '()\n', (6202, 6204), False, 'from turnhandler import backupturn, clonegame, cleanturns, delete_log, delete_temp\n'), ((6209, 6222), 'turnhandler.delete_temp', 'delete_temp', ([], {}), '()\n', (6220, 6222), False, 'from turnhandler import backupturn, clonegame, cleanturns, delete_log, delete_temp\n'), ((7161, 7179), 'turnhandler.cleanturns', 'cleanturns', (['rounds'], {}), '(rounds)\n', (7171, 7179), False, 'from turnhandler import backupturn, clonegame, cleanturns, delete_log, delete_temp\n'), ((1277, 1317), 'win32gui.FindWindow', 'win32gui.FindWindow', (['None', '"""Dominions 5"""'], {}), "(None, 'Dominions 5')\n", (1296, 1317), False, 'import win32gui\n'), ((2477, 2512), 'os.path.getmtime', 'os.path.getmtime', (["(path + 'ftherlnd')"], {}), "(path + 'ftherlnd')\n", (2493, 2512), False, 'import os\n'), ((2647, 2692), 'win32gui.FindWindow', 'win32gui.FindWindow', (['None', '"""NÃ¥got gick fel!"""'], {}), "(None, 'NÃ¥got gick fel!')\n", (2666, 2692), False, 'import win32gui\n'), ((3309, 3324), 'yaml.load', 'yaml.load', (['file'], {}), '(file)\n', (3318, 3324), False, 'import yaml\n'), ((4304, 4323), 'logparser.validate_log', 'validate_log', (['dpath'], {}), '(dpath)\n', (4316, 4323), False, 'from logparser import parselog, validate_log\n'), ((6068, 6084), 'turnhandler.backupturn', 'backupturn', (['turn'], {}), '(turn)\n', (6078, 6084), False, 'from turnhandler import backupturn, clonegame, cleanturns, delete_log, delete_temp\n'), ((6126, 6140), 'logparser.parselog', 'parselog', (['turn'], {}), '(turn)\n', (6134, 6140), False, 'from logparser import parselog, validate_log\n'), ((6634, 6646), 'turnhandler.clonegame', 'clonegame', (['i'], {}), '(i)\n', (6643, 6646), False, 'from turnhandler import backupturn, clonegame, cleanturns, delete_log, delete_temp\n'), ((2726, 2760), 'win32gui.SetForegroundWindow', 'win32gui.SetForegroundWindow', (['hwnd'], {}), '(hwnd)\n', (2754, 2760), False, 'import win32gui\n'), ((2773, 2824), 'win32gui.PostMessage', 'win32gui.PostMessage', (['hwnd', 'win32con.WM_CLOSE', '(0)', '(0)'], {}), '(hwnd, win32con.WM_CLOSE, 0, 0)\n', (2793, 2824), False, 'import win32gui\n'), ((4549, 4592), 'os.system', 'os.system', (['"""TASKKILL /F /IM Dominions5.exe"""'], {}), "('TASKKILL /F /IM Dominions5.exe')\n", (4558, 4592), False, 'import os\n'), ((5209, 5281), 'threading.Thread', 'threading.Thread', ([], {'target': 'run_dominions', 'args': '(province, game, switch, i)'}), '(target=run_dominions, args=(province, game, switch, i))\n', (5225, 5281), False, 'import threading\n'), ((4520, 4534), 'psutil.process_iter', 'process_iter', ([], {}), '()\n', (4532, 4534), False, 'from psutil import process_iter\n')] |
"""
this script contains the function to compute z from sparse v , pi and w
"""
import torch
from estimation.truncated_poisson import TruncatedPoisson
def compute_z(log_w: torch.tensor, pi: torch.sparse, c: torch.sparse):
"""
This function computes the class indicators given cluster proportion vector pi and weight matrix w.
:param c: a sparse matrix to indicate the cluster membership.
:param log_w: weight matrix for all nodes in clusters.
:return: a sparse matrix for the number of hidden edges
"""
indices = c._indices()
indices_0, indices_1 = indices[0], indices[1]
poisson_para_tmp = torch.index_select(log_w, 1, indices_0) + torch.index_select(log_w, 1, indices_1) # K(the number of clusters) X number of edges
poisson_para = torch.gather(poisson_para_tmp, dim=0, index=torch.unsqueeze(c._values(), 0)) # https://zhuanlan.zhihu.com/p/352877584
poisson_para += torch.index_select(torch.log(pi), dim=0, index=c._values())
poisson_para = torch.where(indices_0==indices_1, poisson_para, poisson_para *2.0)
samples = TruncatedPoisson(torch.squeeze(torch.exp(poisson_para) + 1e-10)).sample()
z = torch.sparse_coo_tensor(indices, samples, c.size())
return z
| [
"torch.index_select",
"torch.log",
"torch.exp",
"torch.where"
] | [((999, 1068), 'torch.where', 'torch.where', (['(indices_0 == indices_1)', 'poisson_para', '(poisson_para * 2.0)'], {}), '(indices_0 == indices_1, poisson_para, poisson_para * 2.0)\n', (1010, 1068), False, 'import torch\n'), ((631, 670), 'torch.index_select', 'torch.index_select', (['log_w', '(1)', 'indices_0'], {}), '(log_w, 1, indices_0)\n', (649, 670), False, 'import torch\n'), ((673, 712), 'torch.index_select', 'torch.index_select', (['log_w', '(1)', 'indices_1'], {}), '(log_w, 1, indices_1)\n', (691, 712), False, 'import torch\n'), ((939, 952), 'torch.log', 'torch.log', (['pi'], {}), '(pi)\n', (948, 952), False, 'import torch\n'), ((1111, 1134), 'torch.exp', 'torch.exp', (['poisson_para'], {}), '(poisson_para)\n', (1120, 1134), False, 'import torch\n')] |
# Third-party modules
try:
import simplejson as json
except ImportError:
import json
import sqlalchemy
from sqlalchemy.ext import mutable
# Custom modules
from . import track
class NestedMutable(mutable.MutableDict, track.TrackedDict):
"""SQLAlchemy `mutable` extension dictionary with nested change tracking."""
def __setitem__(self, key, value):
"""Ensure that items set are converted to change-tracking types."""
super(NestedMutable, self).__setitem__(key, self.convert(value, self))
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class _JsonTypeDecorator(sqlalchemy.TypeDecorator):
"""Enables JSON storage by encoding and decoding on the fly."""
impl = sqlalchemy.String
def process_bind_param(self, value, dialect):
return json.dumps(value)
def process_result_value(self, value, dialect):
return json.loads(value)
class JsonObject(_JsonTypeDecorator):
"""JSON object type for SQLAlchemy with change tracking as base level."""
class NestedJsonObject(_JsonTypeDecorator):
"""JSON object type for SQLAlchemy with nested change tracking."""
mutable.MutableDict.associate_with(JsonObject)
NestedMutable.associate_with(NestedJsonObject)
| [
"sqlalchemy.ext.mutable.MutableDict.associate_with",
"json.dumps",
"json.loads"
] | [((1292, 1338), 'sqlalchemy.ext.mutable.MutableDict.associate_with', 'mutable.MutableDict.associate_with', (['JsonObject'], {}), '(JsonObject)\n', (1326, 1338), False, 'from sqlalchemy.ext import mutable\n'), ((961, 978), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (971, 978), False, 'import json\n'), ((1041, 1058), 'json.loads', 'json.loads', (['value'], {}), '(value)\n', (1051, 1058), False, 'import json\n')] |
import cupy as cp
from csrc.activation import SoftMax
from csrc.layers.layer import Layer
# Cu sinapsa comparativa GPU
from csrc.comp_syn import cp_comp
class C2FullyConnected(Layer):
"""Densely connected layer (comparative).
Attributes
----------
size : int
Number of neurons.
activation : Activation
Neurons' activation's function.
is_softmax : bool
Whether or not the activation is softmax.
cache : dict
Cache.
w : numpy.ndarray
Weights.
b : numpy.ndarray
Biases.
"""
def __init__(self, size, activation):
super().__init__()
self.size = size
self.activation = activation
self.is_softmax = isinstance(self.activation, SoftMax)
self.cache = {}
self.w = None
self.b = None
def init(self, in_dim):
# He initialization
self.w = (cp.random.randn(self.size, in_dim) * cp.sqrt(2 / in_dim)).astype('float32')
# S-a trecut la tip float32 pentru a putea apela operatorul cp_comp
self.b = cp.zeros((1, self.size)).astype('float32')
def forward(self, a_prev, training):
#print('Forma1: ',cp.shape(a_prev))
#print('Forma1: ',cp.shape(self.w.T))
z = cp_comp(a_prev, self.w.T) + self.b # strat comparativ
a = self.activation.f(z)
if training:
# Cache for backward pass
self.cache.update({'a_prev': a_prev, 'z': z, 'a': a})
return a
def backward(self, da):
a_prev, z, a = (self.cache[key] for key in ('a_prev', 'z', 'a'))
batch_size = a_prev.shape[0]
# ------- aici propagarea erorii da prin neliniaritatea functiei de activare
if self.is_softmax:
# Get back y from the gradient wrt the cost of this layer's activations
# That is get back y from - y/a = da
y = da * (-a)
dz = a - y
else:
dz = da * self.activation.df(z, cached_y=a)
#---------- aici update weights si bias --------
dw = 1 / batch_size * cp.dot(dz.T, a_prev)
'''
# aici ar trebui inlocuit dz.T = (clase,batch) * (batch, intrari)
m1=cp.shape(dz.T)[0]
n1=cp.shape(a_prev)[0]
n2=cp.shape(a_prev)[1]
dw=cp.zeros((m1,n2))
for k in range(m1):
dw[k,:]=cp.sum(dz.T[k,:] * a_prev.T, axis=1)
#dw[k,:]=0.5*cp.sum(cp.abs(dz.T[k,:]+a_prev.T)-cp.abs(dz.T[k,:]-a_prev.T),axis=1)
#dw[k,:]=0.002*cp.sum(cp.sign(dz.T[k,:]+a_prev.T)+cp.sign(dz.T[k,:]-a_prev.T),axis=1)
dw = 1 / batch_size * dw
#print('Forma dz.T : ',cp.shape(dz.T))
#print('Forma a_prev : ',cp.shape(a_prev))
# NOTA: antrenarea cu sign() functioneaza numai cu gamma=0.002
# optimizer=grad_descent si eta 1..10 --> rezulta max 83%
# pe fully connected cu USPS
# Cu un strat suplimentar merge "rau"
# Pentru train e rcmd. sa ramana vechile formule !!
# sign() cu tanh() devine antrenarea mai lenta
#----------- R.D. 26 iul 2021 ----------------
'''
db = 1 / batch_size * dz.sum(axis=0, keepdims=True)
#------------ aici propagarea inversa a erorii
da_prev = cp.dot(dz, self.w)
#print('Forma dz: ',cp.shape(dz))
#print('Forma w: ',cp.shape(self.w))
return da_prev, dw, db
def update_params(self, dw, db):
self.w -= dw
self.b -= db
def get_params(self):
return self.w, self.b
def get_output_dim(self):
return self.size
| [
"csrc.comp_syn.cp_comp",
"cupy.sqrt",
"cupy.dot",
"cupy.random.randn",
"cupy.zeros"
] | [((3326, 3344), 'cupy.dot', 'cp.dot', (['dz', 'self.w'], {}), '(dz, self.w)\n', (3332, 3344), True, 'import cupy as cp\n'), ((1261, 1286), 'csrc.comp_syn.cp_comp', 'cp_comp', (['a_prev', 'self.w.T'], {}), '(a_prev, self.w.T)\n', (1268, 1286), False, 'from csrc.comp_syn import cp_comp\n'), ((2110, 2130), 'cupy.dot', 'cp.dot', (['dz.T', 'a_prev'], {}), '(dz.T, a_prev)\n', (2116, 2130), True, 'import cupy as cp\n'), ((1074, 1098), 'cupy.zeros', 'cp.zeros', (['(1, self.size)'], {}), '((1, self.size))\n', (1082, 1098), True, 'import cupy as cp\n'), ((904, 938), 'cupy.random.randn', 'cp.random.randn', (['self.size', 'in_dim'], {}), '(self.size, in_dim)\n', (919, 938), True, 'import cupy as cp\n'), ((941, 960), 'cupy.sqrt', 'cp.sqrt', (['(2 / in_dim)'], {}), '(2 / in_dim)\n', (948, 960), True, 'import cupy as cp\n')] |
"""LiveSimulator: This class reads in various Bro IDS logs. The class utilizes
the BroLogReader and simply loops over the static bro log
file, replaying rows and changing any time stamps
Args:
eps (int): Events Per Second that the simulator will emit events (default = 10)
max_rows (int): The maximum number of rows to generate (default = None (go forever))
"""
from __future__ import print_function
import os
import time
import datetime
import itertools
# Third party
import numpy as np
# Local Imports
from brothon import bro_log_reader
from brothon.utils import file_utils
class LiveSimulator(object):
"""LiveSimulator: This class reads in various Bro IDS logs. The class utilizes the
BroLogReader and simply loops over the static bro log file
replaying rows at the specified EPS and changing timestamps to 'now()'
"""
def __init__(self, filepath, eps=10, max_rows=None):
"""Initialization for the LiveSimulator Class
Args:
eps (int): Events Per Second that the simulator will emit events (default = 10)
max_rows (int): The maximum number of rows to generate (default = None (go forever))
"""
# Compute EPS timer
# Logic:
# - Normal distribution centered around 1.0/eps
# - Make sure never less than 0
# - Precompute 1000 deltas and then just cycle around
self.eps_timer = itertools.cycle([max(0, delta) for delta in np.random.normal(1.0/float(eps), .5/float(eps), size=1000)])
# Initialize the Bro log reader
self.log_reader = bro_log_reader.BroLogReader(filepath, tail=False)
# Store max_rows
self.max_rows = max_rows
def readrows(self):
"""Using the BroLogReader this method yields each row of the log file
replacing timestamps, looping and emitting rows based on EPS rate
"""
# Loop forever or until max_rows is reached
num_rows = 0
while True:
# Yield the rows from the internal reader
for row in self.log_reader.readrows():
yield self.replace_timestamp(row)
# Sleep and count rows
time.sleep(next(self.eps_timer))
num_rows += 1
# Check for max_rows
if self.max_rows and (num_rows >= self.max_rows):
return
@staticmethod
def replace_timestamp(row):
"""Replace the timestamp with now()"""
if 'ts' in row:
row['ts'] = datetime.datetime.utcnow()
return row
def test():
"""Test for LiveSimulator Python Class"""
# Grab a test file
data_path = file_utils.relative_dir(__file__, '../data')
test_path = os.path.join(data_path, 'conn.log')
print('Opening Data File: {:s}'.format(test_path))
# Create a LiveSimulator reader
reader = LiveSimulator(test_path, max_rows=10)
for line in reader.readrows():
print(line)
print('Read with max_rows Test successful!')
if __name__ == '__main__':
# Run the test for easy testing/debugging
test()
| [
"brothon.bro_log_reader.BroLogReader",
"brothon.utils.file_utils.relative_dir",
"os.path.join",
"datetime.datetime.utcnow"
] | [((2786, 2830), 'brothon.utils.file_utils.relative_dir', 'file_utils.relative_dir', (['__file__', '"""../data"""'], {}), "(__file__, '../data')\n", (2809, 2830), False, 'from brothon.utils import file_utils\n'), ((2847, 2882), 'os.path.join', 'os.path.join', (['data_path', '"""conn.log"""'], {}), "(data_path, 'conn.log')\n", (2859, 2882), False, 'import os\n'), ((1693, 1742), 'brothon.bro_log_reader.BroLogReader', 'bro_log_reader.BroLogReader', (['filepath'], {'tail': '(False)'}), '(filepath, tail=False)\n', (1720, 1742), False, 'from brothon import bro_log_reader\n'), ((2640, 2666), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2664, 2666), False, 'import datetime\n')] |
import jq
from dotenv import load_dotenv
from gh_projects import (
update_project_with_repo_issues,
fetch_all_issues,
push_issues_to_project_next,
)
load_dotenv()
PROJECT_ID = "PN_kwHOACdIos4AAto7"
# fetch_project_item_issue_ids("PN_kwHOACdIos4AAYbQ")
all_issues = fetch_all_issues("machow", "pins-python", ["projectNext(number: 1) { id }"])
need_project = (
jq.compile(".[] | select(.projectNext.id == null) | .id").input(all_issues).all()
)
push_issues_to_project_next(PROJECT_ID, need_project)
update_project_with_repo_issues(
"machow",
"pins-python",
PROJECT_ID,
{
".updatedAt": "MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODEw",
".createdAt": "MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODM4",
".closedAt": "MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODM5",
".author.login": "<KEY>",
".comments.totalCount": "MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODk4",
".comments.nodes[] | .createdAt": "MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODk3",
".comments.nodes[] | .author.login": "<KEY>",
".isReadByViewer": "MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODc3",
},
query_fragment="""
updatedAt
createdAt
closedAt
author { login }
isReadByViewer
comments(last: 1) {
totalCount
nodes {
createdAt
author {
login
}
}
}
""",
)
| [
"gh_projects.push_issues_to_project_next",
"gh_projects.update_project_with_repo_issues",
"dotenv.load_dotenv",
"jq.compile",
"gh_projects.fetch_all_issues"
] | [((164, 177), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (175, 177), False, 'from dotenv import load_dotenv\n'), ((283, 359), 'gh_projects.fetch_all_issues', 'fetch_all_issues', (['"""machow"""', '"""pins-python"""', "['projectNext(number: 1) { id }']"], {}), "('machow', 'pins-python', ['projectNext(number: 1) { id }'])\n", (299, 359), False, 'from gh_projects import update_project_with_repo_issues, fetch_all_issues, push_issues_to_project_next\n'), ((466, 519), 'gh_projects.push_issues_to_project_next', 'push_issues_to_project_next', (['PROJECT_ID', 'need_project'], {}), '(PROJECT_ID, need_project)\n', (493, 519), False, 'from gh_projects import update_project_with_repo_issues, fetch_all_issues, push_issues_to_project_next\n'), ((522, 1332), 'gh_projects.update_project_with_repo_issues', 'update_project_with_repo_issues', (['"""machow"""', '"""pins-python"""', 'PROJECT_ID', "{'.updatedAt': 'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODEw', '.createdAt':\n 'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODM4', '.closedAt':\n 'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODM5', '.author.login': '<KEY>',\n '.comments.totalCount': 'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODk4',\n '.comments.nodes[] | .createdAt':\n 'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODk3',\n '.comments.nodes[] | .author.login': '<KEY>', '.isReadByViewer':\n 'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODc3'}"], {'query_fragment': '"""\n updatedAt\n createdAt\n closedAt\n author { login }\n isReadByViewer\n comments(last: 1) {\n totalCount\n nodes {\n createdAt\n author {\n login\n }\n }\n }\n """'}), '(\'machow\', \'pins-python\', PROJECT_ID, {\n \'.updatedAt\': \'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODEw\', \'.createdAt\':\n \'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODM4\', \'.closedAt\':\n \'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODM5\', \'.author.login\': \'<KEY>\',\n \'.comments.totalCount\': \'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODk4\',\n \'.comments.nodes[] | .createdAt\':\n \'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODk3\',\n \'.comments.nodes[] | .author.login\': \'<KEY>\', \'.isReadByViewer\':\n \'MDE2OlByb2plY3ROZXh0RmllbGQyNjI0ODc3\'}, query_fragment=\n """\n updatedAt\n createdAt\n closedAt\n author { login }\n isReadByViewer\n comments(last: 1) {\n totalCount\n nodes {\n createdAt\n author {\n login\n }\n }\n }\n """\n )\n', (553, 1332), False, 'from gh_projects import update_project_with_repo_issues, fetch_all_issues, push_issues_to_project_next\n'), ((381, 438), 'jq.compile', 'jq.compile', (['""".[] | select(.projectNext.id == null) | .id"""'], {}), "('.[] | select(.projectNext.id == null) | .id')\n", (391, 438), False, 'import jq\n')] |
import nltk
from nltk import word_tokenize
def create_POS_tags(sentence):
parsedSentence = word_tokenize(sentence)
return nltk.pos_tag(parsedSentence)
| [
"nltk.pos_tag",
"nltk.word_tokenize"
] | [((97, 120), 'nltk.word_tokenize', 'word_tokenize', (['sentence'], {}), '(sentence)\n', (110, 120), False, 'from nltk import word_tokenize\n'), ((133, 161), 'nltk.pos_tag', 'nltk.pos_tag', (['parsedSentence'], {}), '(parsedSentence)\n', (145, 161), False, 'import nltk\n')] |
import matplotlib.pyplot as plt
import numpy.random as rnd
from matplotlib.patches import Ellipse
NUM = 250
ells = [Ellipse(xy=rnd.rand(2)*10, width=rnd.rand(), height=rnd.rand(), angle=rnd.rand()*360)
for i in range(NUM)]
fig = plt.figure(0)
ax = fig.add_subplot(111, aspect='equal')
for e in ells:
ax.add_artist(e)
e.set_clip_box(ax.bbox)
e.set_alpha(rnd.rand())
e.set_facecolor(rnd.rand(3))
ax.set_xlim(0, 10)
ax.set_ylim(0, 10)
plt.show()
| [
"matplotlib.pyplot.figure",
"numpy.random.rand",
"matplotlib.pyplot.show"
] | [((240, 253), 'matplotlib.pyplot.figure', 'plt.figure', (['(0)'], {}), '(0)\n', (250, 253), True, 'import matplotlib.pyplot as plt\n'), ((461, 471), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (469, 471), True, 'import matplotlib.pyplot as plt\n'), ((376, 386), 'numpy.random.rand', 'rnd.rand', ([], {}), '()\n', (384, 386), True, 'import numpy.random as rnd\n'), ((408, 419), 'numpy.random.rand', 'rnd.rand', (['(3)'], {}), '(3)\n', (416, 419), True, 'import numpy.random as rnd\n'), ((151, 161), 'numpy.random.rand', 'rnd.rand', ([], {}), '()\n', (159, 161), True, 'import numpy.random as rnd\n'), ((170, 180), 'numpy.random.rand', 'rnd.rand', ([], {}), '()\n', (178, 180), True, 'import numpy.random as rnd\n'), ((129, 140), 'numpy.random.rand', 'rnd.rand', (['(2)'], {}), '(2)\n', (137, 140), True, 'import numpy.random as rnd\n'), ((188, 198), 'numpy.random.rand', 'rnd.rand', ([], {}), '()\n', (196, 198), True, 'import numpy.random as rnd\n')] |
# <NAME> 1.0 by Adam
# http://robotgame.net/viewrobot/7819
import rg
escapeSquares = []
globTurn = 0
class Robot:
def act(self, game):
# reset the escape squares for this turn
global escapeSquares
global globTurn
if globTurn != game.turn:
globTurn = game.turn
# refresh list of used escape squares
escapeSquares = []
badSpawnLocs = [(3, 3), (3, 15), (15, 3), (15, 15)]
goodSpawnLocs = [(3, 4), (4, 3), (3, 14), (4, 15), (14, 3), (15, 4), (14, 15), (15, 4), (2, 6), (6, 2), (2, 12), (6, 16), (12, 2), (16, 6), (12, 16), (16, 12)]
# set the location that would take us towards the centre
towardCentre=rg.toward(self.location, rg.CENTER_POINT)
# build info about adjacent and close robots
adjEnemyCount = 0
adjEnemyLocs = []
closeEnemyCount = 0
closeEnemyLocs = []
closeEnemyTargets = []
adjFriendlyCount = 0
adjFriendlyLocs = []
closeFriendlyCount = 0
closeFriendlyLocs = []
closeFriendlyTargets = []
nearbyFriendlyCount = 0
nearbyFriendlyLocs = []
for loc, bot in game.robots.iteritems():
if bot.player_id != self.player_id:
if rg.wdist(loc, self.location) == 1:
adjEnemyCount += 1
adjEnemyLocs = adjEnemyLocs + [loc]
if rg.wdist(loc, self.location) == 2:
closeEnemyCount += 1
closeEnemyLocs = closeEnemyLocs + [loc]
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
for poss in rg.locs_around(loc, filter_out=('invalid', 'obstacle')):
if poss == dest:
closeEnemyTargets = closeEnemyTargets + [poss]
if bot.player_id == self.player_id:
if rg.wdist(loc, self.location) == 1:
adjFriendlyCount += 1
adjFriendlyLocs = adjFriendlyLocs + [loc]
if rg.wdist(loc, self.location) == 2:
closeFriendlyCount += 1
closeFriendlyLocs = closeFriendlyLocs + [loc]
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
for poss in rg.locs_around(loc, filter_out=('invalid', 'obstacle')):
if poss == dest:
closeFriendlyTargets = closeFriendlyTargets + [poss]
if rg.wdist(loc, self.location) <= 3:
if loc != self.location:
nearbyFriendlyCount += 1
nearbyFriendlyLocs = nearbyFriendlyLocs + [loc]
# if it's nearly respawning time...
if game.turn % 10 in [9, 0] and game.turn != 99:
# if we're on the edge, move away from spawn locations
if 'spawn' in rg.loc_types(self.location):
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle', 'spawn')):
if dest not in game.robots:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if this isn't possible and we have a spare turn, try a new spawn location
if game.turn % 10 == 9:
if 'spawn' in rg.loc_types(towardCentre):
if towardCentre not in game.robots:
if towardCentre not in escapeSquares:
escapeSquares = escapeSquares + [towardCentre]
return ['move', towardCentre]
# otherwise commit suicide
if game.turn % 10 == 0:
return ['suicide']
# if it's nearly respawning time...
if game.turn % 10 in [9, 0] and game.turn != 99:
# try to bump spawning robots
for loc in closeEnemyLocs:
if 'spawn' in rg.loc_types(loc):
if game.turn % 10 == 0 or self.hp >= 9:
# try to attack the square on its path to the centre
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle', 'spawn')):
if rg.toward(loc, rg.CENTER_POINT) == dest:
if dest not in game.robots:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if not, and it's turn 10, try to attack any square it could move to
if game.turn % 10 == 0:
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle', 'spawn')):
for poss in rg.locs_around(loc, filter_out=('invalid', 'obstacle')):
if poss == dest:
if dest not in game.robots:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if we're next to 3+ enemy bots, and low on health, commit suicide
if adjEnemyCount >= 3:
if self.hp <= adjEnemyCount * 9:
return ['suicide']
# if we're next to one enemy bot on low health, try to kill it (as long as we're not more likely to die ourselves)
if adjEnemyCount == 1:
for loc, bot in game.robots.iteritems():
if loc in adjEnemyLocs:
if bot.hp <= 7 or self.hp >= 10:
return ['attack', loc]
if bot.hp <= self.hp:
return ['attack', loc]
# if we're next to 2 enemy bots, or next to one enemy bot and low on health, run away (but not next to an enemy robot)
if adjEnemyCount >= 1:
if self.hp <= 9 or adjEnemyCount >= 2:
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle', 'spawn')):
if dest not in game.robots:
if dest not in closeEnemyTargets:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# allow spawn squares if absolutely necessary and we're not near respawn time
if game.turn % 10 not in [8, 9, 0] or game.turn in [98, 99]:
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
if dest not in game.robots:
if dest not in closeEnemyTargets:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if we're next to an ally in a spawn square, try to free it up by moving towards the centre
if 'spawn' not in rg.loc_types(self.location):
for loc in adjFriendlyLocs:
if 'spawn' in rg.loc_types(loc):
if towardCentre not in game.robots:
if towardCentre not in escapeSquares:
surplusHP = self.hp
for dest in closeEnemyTargets:
if dest == towardCentre:
surplusHP -= 9
if surplusHP > 0 or closeEnemyCount == 0:
escapeSquares = escapeSquares + [towardCentre]
return ['move', towardCentre]
# if we're next to an enemy bot, attack it
for loc in adjEnemyLocs:
return ['attack', loc]
# if we're in a spawn square, try to escape to a safe square
if 'spawn' in rg.loc_types(self.location):
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle', 'spawn')):
if dest not in game.robots:
if dest not in closeEnemyTargets:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if this isn't possible, try a 'good' spawn location
for dest in goodSpawnLocs:
if dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
if dest not in game.robots:
if dest not in closeEnemyTargets:
if dest not in closeFriendlyTargets:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if this isn't possible, try a non-bad spawn location
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
if 'spawn' in rg.loc_types(dest):
if dest not in badSpawnLocs:
if dest not in game.robots:
if dest not in closeEnemyTargets:
if dest not in closeFriendlyTargets:
if dest not in escapeSquares:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if we're close to another bot who's in a battle, help attack it, unless this would bring us into a big battle!
if game.turn != 99:
for loc in closeEnemyLocs:
for ally in rg.locs_around(loc, filter_out=('invalid')):
if ally in nearbyFriendlyLocs:
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
for poss in rg.locs_around(loc, filter_out=('invalid', 'obstacle')):
if poss == dest:
if dest not in game.robots:
if dest not in escapeSquares:
# check for other enemies around the square we're about to move into
moveIn = 1
for enemy in rg.locs_around(dest, filter_out=('invalid')):
if enemy in closeEnemyLocs:
if enemy != loc:
moveIn = 0
if moveIn == 1:
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if we're close to another bot, attack the square we think it's going to move into (provided there isn't another bot in it)
for loc in closeEnemyLocs:
# try to attack the square on its path to the centre
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
if rg.toward(loc, rg.CENTER_POINT) == dest:
if dest not in game.robots:
return ['attack', dest]
# if not, try to attack any square it could move to
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle')):
for poss in rg.locs_around(loc, filter_out=('invalid', 'obstacle')):
if poss == dest:
if dest not in game.robots:
return ['attack', dest]
# if we're next to friends, try to move away from them
if adjFriendlyCount >=1:
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle', 'spawn')):
if dest not in game.robots:
if dest not in closeEnemyTargets: # it won't be, but there's no harm in double checking
if dest not in closeFriendlyTargets:
if dest not in escapeSquares: # it won't be by the above condition, but there's no harm in double checking
escapeSquares = escapeSquares + [dest]
return ['move', dest]
# if we're in the center, stay put
if self.location == rg.CENTER_POINT:
return ['guard']
# move toward the centre if there's a bot that needs room, even if there's a friend there that might be moving
for loc in adjFriendlyLocs:
if rg.toward(loc, rg.CENTER_POINT) == self.location:
for dest in rg.locs_around(self.location, filter_out=('invalid', 'obstacle', 'spawn')):
if rg.wdist(dest, rg.CENTER_POINT) < rg.wdist(self.location, rg.CENTER_POINT):
if dest not in escapeSquares:
escapeSquares = escapeSquares + [towardCentre]
return ['move', towardCentre]
# if there's no free escape squares, just try to go towards the centre
if towardCentre not in escapeSquares:
escapeSquares = escapeSquares + [towardCentre]
return ['move', towardCentre]
# move toward the centre (as long as we won't then be next to a friend)
if towardCentre not in closeFriendlyTargets:
if towardCentre not in escapeSquares: # it won't be by the above condition
escapeSquares = escapeSquares + [towardCentre]
return ['move', towardCentre]
return ['guard']
| [
"rg.loc_types",
"rg.wdist",
"rg.locs_around",
"rg.toward"
] | [((710, 751), 'rg.toward', 'rg.toward', (['self.location', 'rg.CENTER_POINT'], {}), '(self.location, rg.CENTER_POINT)\n', (719, 751), False, 'import rg\n'), ((7400, 7427), 'rg.loc_types', 'rg.loc_types', (['self.location'], {}), '(self.location)\n', (7412, 7427), False, 'import rg\n'), ((8274, 8301), 'rg.loc_types', 'rg.loc_types', (['self.location'], {}), '(self.location)\n', (8286, 8301), False, 'import rg\n'), ((8327, 8401), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle', 'spawn')"}), "(self.location, filter_out=('invalid', 'obstacle', 'spawn'))\n", (8341, 8401), False, 'import rg\n'), ((9328, 9393), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (9342, 9393), False, 'import rg\n'), ((11538, 11603), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (11552, 11603), False, 'import rg\n'), ((11849, 11914), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (11863, 11914), False, 'import rg\n'), ((12263, 12337), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle', 'spawn')"}), "(self.location, filter_out=('invalid', 'obstacle', 'spawn'))\n", (12277, 12337), False, 'import rg\n'), ((2987, 3014), 'rg.loc_types', 'rg.loc_types', (['self.location'], {}), '(self.location)\n', (2999, 3014), False, 'import rg\n'), ((3044, 3118), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle', 'spawn')"}), "(self.location, filter_out=('invalid', 'obstacle', 'spawn'))\n", (3058, 3118), False, 'import rg\n'), ((6328, 6402), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle', 'spawn')"}), "(self.location, filter_out=('invalid', 'obstacle', 'spawn'))\n", (6342, 6402), False, 'import rg\n'), ((10101, 10142), 'rg.locs_around', 'rg.locs_around', (['loc'], {'filter_out': '"""invalid"""'}), "(loc, filter_out='invalid')\n", (10115, 10142), False, 'import rg\n'), ((11944, 11999), 'rg.locs_around', 'rg.locs_around', (['loc'], {'filter_out': "('invalid', 'obstacle')"}), "(loc, filter_out=('invalid', 'obstacle'))\n", (11958, 11999), False, 'import rg\n'), ((13103, 13134), 'rg.toward', 'rg.toward', (['loc', 'rg.CENTER_POINT'], {}), '(loc, rg.CENTER_POINT)\n', (13112, 13134), False, 'import rg\n'), ((13181, 13255), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle', 'spawn')"}), "(self.location, filter_out=('invalid', 'obstacle', 'spawn'))\n", (13195, 13255), False, 'import rg\n'), ((1279, 1307), 'rg.wdist', 'rg.wdist', (['loc', 'self.location'], {}), '(loc, self.location)\n', (1287, 1307), False, 'import rg\n'), ((1428, 1456), 'rg.wdist', 'rg.wdist', (['loc', 'self.location'], {}), '(loc, self.location)\n', (1436, 1456), False, 'import rg\n'), ((1596, 1661), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (1610, 1661), False, 'import rg\n'), ((1947, 1975), 'rg.wdist', 'rg.wdist', (['loc', 'self.location'], {}), '(loc, self.location)\n', (1955, 1975), False, 'import rg\n'), ((2105, 2133), 'rg.wdist', 'rg.wdist', (['loc', 'self.location'], {}), '(loc, self.location)\n', (2113, 2133), False, 'import rg\n'), ((2282, 2347), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (2296, 2347), False, 'import rg\n'), ((2591, 2619), 'rg.wdist', 'rg.wdist', (['loc', 'self.location'], {}), '(loc, self.location)\n', (2599, 2619), False, 'import rg\n'), ((4135, 4152), 'rg.loc_types', 'rg.loc_types', (['loc'], {}), '(loc)\n', (4147, 4152), False, 'import rg\n'), ((6896, 6961), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (6910, 6961), False, 'import rg\n'), ((7499, 7516), 'rg.loc_types', 'rg.loc_types', (['loc'], {}), '(loc)\n', (7511, 7516), False, 'import rg\n'), ((8804, 8869), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (8818, 8869), False, 'import rg\n'), ((9425, 9443), 'rg.loc_types', 'rg.loc_types', (['dest'], {}), '(dest)\n', (9437, 9443), False, 'import rg\n'), ((11624, 11655), 'rg.toward', 'rg.toward', (['loc', 'rg.CENTER_POINT'], {}), '(loc, rg.CENTER_POINT)\n', (11633, 11655), False, 'import rg\n'), ((1699, 1754), 'rg.locs_around', 'rg.locs_around', (['loc'], {'filter_out': "('invalid', 'obstacle')"}), "(loc, filter_out=('invalid', 'obstacle'))\n", (1713, 1754), False, 'import rg\n'), ((2385, 2440), 'rg.locs_around', 'rg.locs_around', (['loc'], {'filter_out': "('invalid', 'obstacle')"}), "(loc, filter_out=('invalid', 'obstacle'))\n", (2399, 2440), False, 'import rg\n'), ((3505, 3531), 'rg.loc_types', 'rg.loc_types', (['towardCentre'], {}), '(towardCentre)\n', (3517, 3531), False, 'import rg\n'), ((4327, 4401), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle', 'spawn')"}), "(self.location, filter_out=('invalid', 'obstacle', 'spawn'))\n", (4341, 4401), False, 'import rg\n'), ((10233, 10298), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle')"}), "(self.location, filter_out=('invalid', 'obstacle'))\n", (10247, 10298), False, 'import rg\n'), ((13280, 13311), 'rg.wdist', 'rg.wdist', (['dest', 'rg.CENTER_POINT'], {}), '(dest, rg.CENTER_POINT)\n', (13288, 13311), False, 'import rg\n'), ((13314, 13354), 'rg.wdist', 'rg.wdist', (['self.location', 'rg.CENTER_POINT'], {}), '(self.location, rg.CENTER_POINT)\n', (13322, 13354), False, 'import rg\n'), ((4924, 4998), 'rg.locs_around', 'rg.locs_around', (['self.location'], {'filter_out': "('invalid', 'obstacle', 'spawn')"}), "(self.location, filter_out=('invalid', 'obstacle', 'spawn'))\n", (4938, 4998), False, 'import rg\n'), ((10340, 10395), 'rg.locs_around', 'rg.locs_around', (['loc'], {'filter_out': "('invalid', 'obstacle')"}), "(loc, filter_out=('invalid', 'obstacle'))\n", (10354, 10395), False, 'import rg\n'), ((4434, 4465), 'rg.toward', 'rg.toward', (['loc', 'rg.CENTER_POINT'], {}), '(loc, rg.CENTER_POINT)\n', (4443, 4465), False, 'import rg\n'), ((5044, 5099), 'rg.locs_around', 'rg.locs_around', (['loc'], {'filter_out': "('invalid', 'obstacle')"}), "(loc, filter_out=('invalid', 'obstacle'))\n", (5058, 5099), False, 'import rg\n'), ((10805, 10847), 'rg.locs_around', 'rg.locs_around', (['dest'], {'filter_out': '"""invalid"""'}), "(dest, filter_out='invalid')\n", (10819, 10847), False, 'import rg\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the CLI tools classes."""
import argparse
import io
import sys
import unittest
from plaso.cli import tools
from plaso.lib import errors
from tests.cli import test_lib
class CLIToolTest(test_lib.CLIToolTestCase):
"""Tests for the CLI tool base class."""
_EXPECTED_BASIC_OPTIONS = u'\n'.join([
u'usage: tool_test.py [-h] [-V]',
u'',
u'Test argument parser.',
u'',
u'optional arguments:',
u' -h, --help show this help message and exit.',
u' -V, --version show the version information.',
u''])
_EXPECTED_DATA_OPTION = u'\n'.join([
u'usage: tool_test.py [--data PATH]',
u'',
u'Test argument parser.',
u'',
u'optional arguments:',
u' --data PATH the location of the data files.',
u''])
_EXPECTED_INFORMATIONAL_OPTIONS = u'\n'.join([
u'usage: tool_test.py [-d] [-q]',
u'',
u'Test argument parser.',
u'',
u'optional arguments:',
u' -d, --debug enable debug output.',
u' -q, --quiet disable informational output.',
u''])
_EXPECTED_TIMEZONE_OPTION = u'\n'.join([
u'usage: tool_test.py [-z TIMEZONE]',
u'',
u'Test argument parser.',
u'',
u'optional arguments:',
u' -z TIMEZONE, --zone TIMEZONE, --timezone TIMEZONE',
(u' explicitly define the timezone. Typically '
u'the timezone'),
(u' is determined automatically where possible. '
u'Use "-z'),
u' list" to see a list of available timezones.',
u''])
def testAddBasicOptions(self):
"""Tests the AddBasicOptions function."""
argument_parser = argparse.ArgumentParser(
prog=u'tool_test.py', description=u'Test argument parser.',
add_help=False, formatter_class=argparse.RawDescriptionHelpFormatter)
test_tool = tools.CLITool()
test_tool.AddBasicOptions(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_BASIC_OPTIONS)
def testAddDataLocationOption(self):
"""Tests the AddDataLocationOption function."""
argument_parser = argparse.ArgumentParser(
prog=u'tool_test.py', description=u'Test argument parser.',
add_help=False, formatter_class=argparse.RawDescriptionHelpFormatter)
test_tool = tools.CLITool()
test_tool.AddDataLocationOption(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_DATA_OPTION)
def testAddInformationalOptions(self):
"""Tests the AddInformationalOptions function."""
argument_parser = argparse.ArgumentParser(
prog=u'tool_test.py', description=u'Test argument parser.',
add_help=False, formatter_class=argparse.RawDescriptionHelpFormatter)
test_tool = tools.CLITool()
test_tool.AddInformationalOptions(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_INFORMATIONAL_OPTIONS)
def testAddTimezoneOption(self):
"""Tests the AddTimezoneOption function."""
argument_parser = argparse.ArgumentParser(
prog=u'tool_test.py', description=u'Test argument parser.',
add_help=False, formatter_class=argparse.RawDescriptionHelpFormatter)
test_tool = tools.CLITool()
test_tool.AddTimezoneOption(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_TIMEZONE_OPTION)
def testGetCommandLineArguments(self):
"""Tests the GetCommandLineArguments function."""
cli_tool = tools.CLITool()
cli_tool.preferred_encoding = u'UTF-8'
command_line_arguments = cli_tool.GetCommandLineArguments()
self.assertIsNotNone(command_line_arguments)
def testListTimeZones(self):
"""Tests the ListTimeZones function."""
output_writer = test_lib.TestOutputWriter()
cli_tool = tools.CLITool(output_writer=output_writer)
cli_tool.ListTimeZones()
string = output_writer.ReadOutput()
expected_string = (
b'\n'
b'************************************ Zones '
b'*************************************\n'
b' Timezone : UTC Offset\n'
b'----------------------------------------'
b'----------------------------------------\n')
self.assertTrue(string.startswith(expected_string))
def testParseStringOption(self):
"""Tests the ParseStringOption function."""
encoding = sys.stdin.encoding
# Note that sys.stdin.encoding can be None.
if not encoding:
encoding = self.preferred_encoding
cli_tool = tools.CLITool()
cli_tool.preferred_encoding = u'UTF-8'
expected_string = u'Test Unicode string'
options = test_lib.TestOptions()
options.test = expected_string
string = cli_tool.ParseStringOption(options, u'test')
self.assertEqual(string, expected_string)
options = test_lib.TestOptions()
string = cli_tool.ParseStringOption(options, u'test')
self.assertIsNone(string)
string = cli_tool.ParseStringOption(
options, u'test', default_value=expected_string)
self.assertEqual(string, expected_string)
options = test_lib.TestOptions()
options.test = expected_string.encode(encoding)
string = cli_tool.ParseStringOption(options, u'test')
self.assertEqual(string, expected_string)
if not sys.stdin.encoding and sys.stdin.encoding.upper() == u'UTF-8':
options = test_lib.TestOptions()
options.test = (
b'\xad\xfd\xab\x73\x99\xc7\xb4\x78\xd0\x8c\x8a\xee\x6d\x6a\xcb\x90')
with self.assertRaises(errors.BadConfigOption):
cli_tool.ParseStringOption(options, u'test')
def testPrintSeparatorLine(self):
"""Tests the PrintSeparatorLine function."""
output_writer = test_lib.TestOutputWriter()
cli_tool = tools.CLITool(output_writer=output_writer)
cli_tool.PrintSeparatorLine()
string = output_writer.ReadOutput()
expected_string = (
b'----------------------------------------'
b'----------------------------------------\n')
self.assertEqual(string, expected_string)
class StdinInputReaderTest(unittest.TestCase):
"""The unit test case for a stdin input reader."""
_TEST_DATA = (
b'A first string\n'
b'A 2nd string\n'
b'\xc3\xberi\xc3\xb0ja string\n'
b'\xff\xfef\x00j\x00\xf3\x00r\x00\xf0\x00a\x00 \x00b\x00a\x00n\x00d\x00')
def testReadAscii(self):
"""Tests the Read function with ASCII encoding."""
original_stdin = sys.stdin
sys.stdin = io.BytesIO(self._TEST_DATA)
input_reader = tools.StdinInputReader(encoding=u'ascii')
string = input_reader.Read()
self.assertEqual(string, u'A first string\n')
string = input_reader.Read()
self.assertEqual(string, u'A 2nd string\n')
# UTF-8 string with non-ASCII characters.
string = input_reader.Read()
self.assertEqual(string, u'\ufffd\ufffdri\ufffd\ufffdja string\n')
# UTF-16 string with non-ASCII characters.
string = input_reader.Read()
expected_string = (
u'\ufffd\ufffdf\x00j\x00\ufffd\x00r\x00\ufffd\x00a\x00 '
u'\x00b\x00a\x00n\x00d\x00')
self.assertEqual(string, expected_string)
sys.stdin = original_stdin
def testReadUtf8(self):
"""Tests the Read function with UTF-8 encoding."""
original_stdin = sys.stdin
sys.stdin = io.BytesIO(self._TEST_DATA)
input_reader = tools.StdinInputReader()
string = input_reader.Read()
self.assertEqual(string, u'A first string\n')
string = input_reader.Read()
self.assertEqual(string, u'A 2nd string\n')
# UTF-8 string with non-ASCII characters.
string = input_reader.Read()
self.assertEqual(string, u'þriðja string\n')
# UTF-16 string with non-ASCII characters.
string = input_reader.Read()
expected_string = (
u'\ufffd\ufffdf\x00j\x00\ufffd\x00r\x00\ufffd\x00a\x00 '
u'\x00b\x00a\x00n\x00d\x00')
self.assertEqual(string, expected_string)
sys.stdin = original_stdin
class FileObjectOutputWriterTest(unittest.TestCase):
"""The unit test case for a file-like object output writer."""
def testWriteAscii(self):
"""Tests the Write function with ASCII encoding."""
output_writer = test_lib.TestOutputWriter(encoding=u'ascii')
output_writer.Write(u'A first string\n')
string = output_writer.ReadOutput()
self.assertEqual(string, b'A first string\n')
# Byte string with ASCII characters.
output_writer.Write(b'A 2nd string\n')
string = output_writer.ReadOutput()
self.assertEqual(string, b'A 2nd string\n')
# Unicode string with non-ASCII characters.
output_writer.Write(u'þriðja string\n')
string = output_writer.ReadOutput()
self.assertEqual(string, b'?ri?ja string\n')
# Byte string with non-ASCII characters.
with self.assertRaises(UnicodeDecodeError):
# This fails because the byte string cannot be converted to
# a Unicode string before the call to encode().
output_writer.Write(b'\xc3\xberi\xc3\xb0ja string\n')
def testWriteUtf8(self):
"""Tests the Write function with UTF-8 encoding."""
output_writer = test_lib.TestOutputWriter()
output_writer.Write(u'A first string\n')
string = output_writer.ReadOutput()
self.assertEqual(string, b'A first string\n')
# Byte string with ASCII characters.
output_writer.Write(b'A 2nd string\n')
string = output_writer.ReadOutput()
self.assertEqual(string, b'A 2nd string\n')
# Unicode string with non-ASCII characters.
output_writer.Write(u'þriðja string\n')
string = output_writer.ReadOutput()
self.assertEqual(string, b'\xc3\xberi\xc3\xb0ja string\n')
# Byte string with non-ASCII characters.
with self.assertRaises(UnicodeDecodeError):
# This fails because the byte string cannot be converted to
# a Unicode string before the call to encode().
output_writer.Write(b'\xc3\xberi\xc3\xb0ja string\n')
if __name__ == '__main__':
unittest.main()
| [
"tests.cli.test_lib.TestOutputWriter",
"sys.stdin.encoding.upper",
"argparse.ArgumentParser",
"io.BytesIO",
"plaso.cli.tools.CLITool",
"tests.cli.test_lib.TestOptions",
"unittest.main",
"plaso.cli.tools.StdinInputReader"
] | [((10134, 10149), 'unittest.main', 'unittest.main', ([], {}), '()\n', (10147, 10149), False, 'import unittest\n'), ((1761, 1924), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': 'u"""tool_test.py"""', 'description': 'u"""Test argument parser."""', 'add_help': '(False)', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), "(prog=u'tool_test.py', description=\n u'Test argument parser.', add_help=False, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n", (1784, 1924), False, 'import argparse\n'), ((1949, 1964), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {}), '()\n', (1962, 1964), False, 'from plaso.cli import tools\n'), ((2244, 2407), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': 'u"""tool_test.py"""', 'description': 'u"""Test argument parser."""', 'add_help': '(False)', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), "(prog=u'tool_test.py', description=\n u'Test argument parser.', add_help=False, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n", (2267, 2407), False, 'import argparse\n'), ((2432, 2447), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {}), '()\n', (2445, 2447), False, 'from plaso.cli import tools\n'), ((2735, 2898), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': 'u"""tool_test.py"""', 'description': 'u"""Test argument parser."""', 'add_help': '(False)', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), "(prog=u'tool_test.py', description=\n u'Test argument parser.', add_help=False, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n", (2758, 2898), False, 'import argparse\n'), ((2923, 2938), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {}), '()\n', (2936, 2938), False, 'from plaso.cli import tools\n'), ((3226, 3389), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': 'u"""tool_test.py"""', 'description': 'u"""Test argument parser."""', 'add_help': '(False)', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), "(prog=u'tool_test.py', description=\n u'Test argument parser.', add_help=False, formatter_class=argparse.\n RawDescriptionHelpFormatter)\n", (3249, 3389), False, 'import argparse\n'), ((3414, 3429), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {}), '()\n', (3427, 3429), False, 'from plaso.cli import tools\n'), ((3710, 3725), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {}), '()\n', (3723, 3725), False, 'from plaso.cli import tools\n'), ((3979, 4006), 'tests.cli.test_lib.TestOutputWriter', 'test_lib.TestOutputWriter', ([], {}), '()\n', (4004, 4006), False, 'from tests.cli import test_lib\n'), ((4022, 4064), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {'output_writer': 'output_writer'}), '(output_writer=output_writer)\n', (4035, 4064), False, 'from plaso.cli import tools\n'), ((4747, 4762), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {}), '()\n', (4760, 4762), False, 'from plaso.cli import tools\n'), ((4866, 4888), 'tests.cli.test_lib.TestOptions', 'test_lib.TestOptions', ([], {}), '()\n', (4886, 4888), False, 'from tests.cli import test_lib\n'), ((5044, 5066), 'tests.cli.test_lib.TestOptions', 'test_lib.TestOptions', ([], {}), '()\n', (5064, 5066), False, 'from tests.cli import test_lib\n'), ((5316, 5338), 'tests.cli.test_lib.TestOptions', 'test_lib.TestOptions', ([], {}), '()\n', (5336, 5338), False, 'from tests.cli import test_lib\n'), ((5926, 5953), 'tests.cli.test_lib.TestOutputWriter', 'test_lib.TestOutputWriter', ([], {}), '()\n', (5951, 5953), False, 'from tests.cli import test_lib\n'), ((5969, 6011), 'plaso.cli.tools.CLITool', 'tools.CLITool', ([], {'output_writer': 'output_writer'}), '(output_writer=output_writer)\n', (5982, 6011), False, 'from plaso.cli import tools\n'), ((6683, 6710), 'io.BytesIO', 'io.BytesIO', (['self._TEST_DATA'], {}), '(self._TEST_DATA)\n', (6693, 6710), False, 'import io\n'), ((6731, 6772), 'plaso.cli.tools.StdinInputReader', 'tools.StdinInputReader', ([], {'encoding': 'u"""ascii"""'}), "(encoding=u'ascii')\n", (6753, 6772), False, 'from plaso.cli import tools\n'), ((7504, 7531), 'io.BytesIO', 'io.BytesIO', (['self._TEST_DATA'], {}), '(self._TEST_DATA)\n', (7514, 7531), False, 'import io\n'), ((7552, 7576), 'plaso.cli.tools.StdinInputReader', 'tools.StdinInputReader', ([], {}), '()\n', (7574, 7576), False, 'from plaso.cli import tools\n'), ((8382, 8426), 'tests.cli.test_lib.TestOutputWriter', 'test_lib.TestOutputWriter', ([], {'encoding': 'u"""ascii"""'}), "(encoding=u'ascii')\n", (8407, 8426), False, 'from tests.cli import test_lib\n'), ((9296, 9323), 'tests.cli.test_lib.TestOutputWriter', 'test_lib.TestOutputWriter', ([], {}), '()\n', (9321, 9323), False, 'from tests.cli import test_lib\n'), ((5587, 5609), 'tests.cli.test_lib.TestOptions', 'test_lib.TestOptions', ([], {}), '()\n', (5607, 5609), False, 'from tests.cli import test_lib\n'), ((5531, 5557), 'sys.stdin.encoding.upper', 'sys.stdin.encoding.upper', ([], {}), '()\n', (5555, 5557), False, 'import sys\n')] |
"""
Core implementation of :mod:`sklearndf.transformation.wrapper`
"""
import logging
from abc import ABCMeta, abstractmethod
from typing import Any, Generic, List, Optional, TypeVar, Union
import numpy as np
import pandas as pd
from sklearn.base import TransformerMixin
from sklearn.compose import ColumnTransformer
from sklearn.impute import MissingIndicator, SimpleImputer
from sklearn.kernel_approximation import AdditiveChi2Sampler
from sklearn.manifold import Isomap
from sklearn.preprocessing import KBinsDiscretizer, OneHotEncoder, PolynomialFeatures
from pytools.api import AllTracker
from ... import TransformerDF
from ...wrapper import TransformerWrapperDF
log = logging.getLogger(__name__)
__all__ = [
"BaseDimensionalityReductionWrapperDF",
"BaseMultipleInputsPerOutputTransformerWrapperDF",
"ColumnPreservingTransformerWrapperDF",
"ColumnSubsetTransformerWrapperDF",
"ComponentsDimensionalityReductionWrapperDF",
"FeatureSelectionWrapperDF",
"NComponentsDimensionalityReductionWrapperDF",
"NumpyTransformerWrapperDF",
"ColumnTransformerWrapperDF",
"IsomapWrapperDF",
"ImputerWrapperDF",
"MissingIndicatorWrapperDF",
"AdditiveChi2SamplerWrapperDF",
"KBinsDiscretizerWrapperDF",
"PolynomialFeaturesWrapperDF",
"OneHotEncoderWrapperDF",
]
#
# type variables
#
T_Transformer = TypeVar("T_Transformer", bound=TransformerMixin)
# T_Imputer is needed because sklearn's _BaseImputer only exists from v0.22 onwards.
# Once we drop support for sklearn 0.21, _BaseImputer can be used instead.
# The following TypeVar helps to annotate availability of "add_indicator" and
# "missing_values" attributes on an imputer instance for ImputerWrapperDF below
# noinspection PyProtectedMember
from sklearn.impute._iterative import IterativeImputer
T_Imputer = TypeVar("T_Imputer", SimpleImputer, IterativeImputer)
#
# Ensure all symbols introduced below are included in __all__
#
__tracker = AllTracker(globals())
#
# wrapper classes for transformers
#
class NumpyTransformerWrapperDF(
TransformerWrapperDF[T_Transformer], Generic[T_Transformer], metaclass=ABCMeta
):
"""
Abstract base class of DF wrappers for transformers that only accept numpy arrays.
Converts data frames to numpy arrays before handing off to the native transformer.
Implementations must define :meth:`_get_features_original`.
"""
# noinspection PyPep8Naming
def _adjust_X_type_for_delegate(
self, X: pd.DataFrame, *, to_numpy: Optional[bool] = None
) -> np.ndarray:
assert to_numpy is not False, "X must be converted to a numpy array"
return super()._adjust_X_type_for_delegate(X, to_numpy=True)
def _adjust_y_type_for_delegate(
self,
y: Optional[Union[pd.Series, pd.DataFrame]],
*,
to_numpy: Optional[bool] = None,
) -> Optional[np.ndarray]:
assert to_numpy is not False, "y must be converted to a numpy array"
return super()._adjust_y_type_for_delegate(y, to_numpy=True)
class ColumnSubsetTransformerWrapperDF(
TransformerWrapperDF[T_Transformer], Generic[T_Transformer], metaclass=ABCMeta
):
"""
Abstract base class of DF wrappers for transformers that do not change column names,
but that may remove one or more columns.
Implementations must define :meth:`_get_features_out`.
"""
@abstractmethod
def _get_features_out(self) -> pd.Index:
# return column labels for arrays returned by the fitted transformer.
pass
def _get_features_original(self) -> pd.Series:
# return the series with output columns in index and output columns as values
features_out = self._get_features_out()
return pd.Series(index=features_out, data=features_out.values)
class ColumnPreservingTransformerWrapperDF(
ColumnSubsetTransformerWrapperDF[T_Transformer],
Generic[T_Transformer],
):
"""
DF wrapper for transformers whose output columns match the input columns.
The native transformer must not add, remove, reorder, or rename any of the input
columns.
"""
def _get_features_out(self) -> pd.Index:
return self.feature_names_in_
class BaseMultipleInputsPerOutputTransformerWrapperDF(
TransformerWrapperDF[T_Transformer], Generic[T_Transformer]
):
"""
DF wrapper for transformers mapping multiple input columns to individual output
columns.
"""
@abstractmethod
def _get_features_out(self) -> pd.Index:
# make this method abstract to ensure subclasses override the default
# behaviour, which usually relies on method ``_get_features_original``
pass
def _get_features_original(self) -> pd.Series:
raise NotImplementedError(
f"{type(self.native_estimator).__name__} transformers map multiple "
"inputs to individual output columns; current sklearndf implementation "
"only supports many-to-1 mappings from output columns to input columns"
)
class BaseDimensionalityReductionWrapperDF(
BaseMultipleInputsPerOutputTransformerWrapperDF[T_Transformer],
Generic[T_Transformer],
metaclass=ABCMeta,
):
"""
Base class of DF wrappers for dimensionality-reducing transformers.
The native transformer is considered to map all input columns to each output column.
"""
@property
@abstractmethod
def _n_components_(self) -> int:
pass
def _get_features_out(self) -> pd.Index:
return pd.Index([f"x_{i}" for i in range(self._n_components_)])
class NComponentsDimensionalityReductionWrapperDF(
BaseDimensionalityReductionWrapperDF[T_Transformer],
Generic[T_Transformer],
metaclass=ABCMeta,
):
"""
Base class of DF wrappers for dimensionality-reducing transformers supporting the
:attr:`n_components` attribute.
Subclasses must implement :meth:`_get_features_original`.
"""
_ATTR_N_COMPONENTS = "n_components"
def _validate_delegate_estimator(self) -> None:
self._validate_delegate_attribute(attribute_name=self._ATTR_N_COMPONENTS)
@property
def _n_components_(self) -> int:
return getattr(self.native_estimator, self._ATTR_N_COMPONENTS)
class ComponentsDimensionalityReductionWrapperDF(
BaseDimensionalityReductionWrapperDF[T_Transformer],
Generic[T_Transformer],
metaclass=ABCMeta,
):
"""
Base class of DF wrappers for dimensionality-reducing transformers supporting the
``components_`` attribute.
The native transformer must provide a ``components_`` attribute once fitted,
as an array of shape (n_components, n_features).
"""
_ATTR_COMPONENTS = "components_"
# noinspection PyPep8Naming
def _post_fit(
self, X: pd.DataFrame, y: Optional[pd.Series] = None, **fit_params
) -> None:
# noinspection PyProtectedMember
super()._post_fit(X, y, **fit_params)
self._validate_delegate_attribute(attribute_name=self._ATTR_COMPONENTS)
@property
def _n_components_(self) -> int:
return len(getattr(self.native_estimator, self._ATTR_COMPONENTS))
class FeatureSelectionWrapperDF(
ColumnSubsetTransformerWrapperDF[T_Transformer],
Generic[T_Transformer],
metaclass=ABCMeta,
):
"""
DF wrapper for feature selection transformers.
The native transformer must implement a ``get_support`` method, providing the
indices of the selected input columns
"""
_ATTR_GET_SUPPORT = "get_support"
def _validate_delegate_estimator(self) -> None:
self._validate_delegate_attribute(attribute_name=self._ATTR_GET_SUPPORT)
def _get_features_out(self) -> pd.Index:
get_support = getattr(self.native_estimator, self._ATTR_GET_SUPPORT)
return self.feature_names_in_[get_support()]
class ColumnTransformerWrapperDF(
TransformerWrapperDF[ColumnTransformer], metaclass=ABCMeta
):
"""
DF wrapper for :class:`sklearn.compose.ColumnTransformer`.
Requires all transformers passed as the ``transformers`` parameter to implement
:class:`.TransformerDF`.
"""
__DROP = "drop"
__PASSTHROUGH = "passthrough"
__SPECIAL_TRANSFORMERS = (__DROP, __PASSTHROUGH)
def _validate_delegate_estimator(self) -> None:
column_transformer: ColumnTransformer = self.native_estimator
if (
column_transformer.remainder
not in ColumnTransformerWrapperDF.__SPECIAL_TRANSFORMERS
):
raise ValueError(
f"unsupported value for arg remainder: ({column_transformer.remainder})"
)
non_compliant_transformers: List[str] = [
type(transformer).__name__
for _, transformer, _ in column_transformer.transformers
if not (
isinstance(transformer, TransformerDF)
or transformer in ColumnTransformerWrapperDF.__SPECIAL_TRANSFORMERS
)
]
if non_compliant_transformers:
from .. import ColumnTransformerDF
raise ValueError(
f"{ColumnTransformerDF.__name__} only accepts instances of "
f"{TransformerDF.__name__} or special values "
f'"{" and ".join(ColumnTransformerWrapperDF.__SPECIAL_TRANSFORMERS)}" '
"as valid transformers, but "
f'also got: {", ".join(non_compliant_transformers)}'
)
def _get_features_original(self) -> pd.Series:
"""
Return the series mapping output column names to original columns names.
:return: the series with index the column names of the output dataframe and
values the corresponding input column names.
"""
def _features_original(df_transformer: TransformerDF, columns: List[Any]):
if df_transformer == ColumnTransformerWrapperDF.__PASSTHROUGH:
# we may get positional indices for columns selected by the
# 'passthrough' transformer, and in that case so need to look up the
# associated column names
if all(isinstance(column, int) for column in columns):
column_names = self._get_features_in()[columns]
else:
column_names = columns
return pd.Series(index=column_names, data=column_names)
else:
return df_transformer.feature_names_original_
return pd.concat(
[
_features_original(df_transformer, columns)
for _, df_transformer, columns in self.native_estimator.transformers_
if (
len(columns) > 0
and df_transformer != ColumnTransformerWrapperDF.__DROP
)
]
)
class ImputerWrapperDF(TransformerWrapperDF[T_Imputer], metaclass=ABCMeta):
"""
DF wrapper for imputation transformers, e.g., :class:`sklearn.impute.SimpleImputer`.
"""
def _get_features_original(self) -> pd.Series:
# get the columns that were dropped during imputation
delegate_estimator = self.native_estimator
nan_mask = []
def _nan_mask_from_statistics(stats: np.array):
if issubclass(stats.dtype.type, float):
na_mask = np.isnan(stats)
else:
na_mask = [
x is None or (isinstance(x, float) and np.isnan(x)) for x in stats
]
return na_mask
# implementation for i.e. SimpleImputer
if hasattr(delegate_estimator, "statistics_"):
nan_mask = _nan_mask_from_statistics(stats=delegate_estimator.statistics_)
# implementation for IterativeImputer
elif hasattr(delegate_estimator, "initial_imputer_"):
initial_imputer: SimpleImputer = delegate_estimator.initial_imputer_
nan_mask = _nan_mask_from_statistics(stats=initial_imputer.statistics_)
# implementation for i.e. KNNImputer
elif hasattr(delegate_estimator, "_mask_fit_X"):
# noinspection PyProtectedMember
nan_mask = np.all(delegate_estimator._mask_fit_X, axis=0)
# the imputed columns are all ingoing columns, except the ones that were dropped
imputed_columns = self.feature_names_in_.delete(np.argwhere(nan_mask).tolist())
features_original = pd.Series(
index=imputed_columns, data=imputed_columns.values
)
# if the add_indicator flag is set, we will get additional "missing" columns
if delegate_estimator.add_indicator:
from .. import MissingIndicatorDF
missing_indicator = MissingIndicatorDF.from_fitted(
estimator=delegate_estimator.indicator_,
features_in=self.feature_names_in_,
n_outputs=self.n_outputs_,
)
return features_original.append(missing_indicator.feature_names_original_)
else:
return features_original
class MissingIndicatorWrapperDF(
TransformerWrapperDF[MissingIndicator], metaclass=ABCMeta
):
"""
DF wrapper for :class:`sklearn.impute.MissingIndicator`.
"""
def _get_features_original(self) -> pd.Series:
features_original: np.ndarray = self.feature_names_in_[
self.native_estimator.features_
].values
features_out = pd.Index([f"{name}__missing" for name in features_original])
return pd.Series(index=features_out, data=features_original)
class IsomapWrapperDF(BaseDimensionalityReductionWrapperDF[Isomap], metaclass=ABCMeta):
"""
DF wrapper for :class:`sklearn.manifold.Isomap`.
"""
@property
def _n_components_(self) -> int:
return self.native_estimator.embedding_.shape[1]
class AdditiveChi2SamplerWrapperDF(
BaseDimensionalityReductionWrapperDF[AdditiveChi2Sampler], metaclass=ABCMeta
):
"""
DF wrapper for :class:`sklearn.kernel_approximation.AdditiveChi2Sampler`.
"""
@property
def _n_components_(self) -> int:
return len(self._features_in) * (2 * self.native_estimator.sample_steps + 1)
class PolynomialFeaturesWrapperDF(
BaseMultipleInputsPerOutputTransformerWrapperDF[PolynomialFeatures],
metaclass=ABCMeta,
):
"""
DF wrapper for :class:`sklearn.preprocessing.PolynomialFeatures`.
"""
def _get_features_out(self) -> pd.Index:
return pd.Index(
data=self.native_estimator.get_feature_names(
input_features=self.feature_names_in_.astype(str)
)
)
class OneHotEncoderWrapperDF(TransformerWrapperDF[OneHotEncoder], metaclass=ABCMeta):
"""
DF wrapper for :class:`sklearn.preprocessing.OneHotEncoder`.
"""
def _validate_delegate_estimator(self) -> None:
if self.native_estimator.sparse:
raise NotImplementedError("sparse matrices not supported; use sparse=False")
def _get_features_original(self) -> pd.Series:
# Return the series mapping output column names to original column names.
#
# Remove 1st category column if argument drop == 'first'
# Remove 1st category column only of binary features if arg drop == 'if_binary'
feature_names_out = pd.Index(
self.native_estimator.get_feature_names(self.feature_names_in_)
)
if self.drop == "first":
feature_names_in = [
column_original
for column_original, category in zip(
self.feature_names_in_, self.native_estimator.categories_
)
for _ in range(len(category) - 1)
]
elif self.drop == "if_binary":
feature_names_in = [
column_original
for column_original, category in zip(
self.feature_names_in_, self.native_estimator.categories_
)
for _ in (range(1) if len(category) == 2 else category)
]
else:
feature_names_in = [
column_original
for column_original, category in zip(
self.feature_names_in_, self.native_estimator.categories_
)
for _ in category
]
return pd.Series(index=feature_names_out, data=feature_names_in)
class KBinsDiscretizerWrapperDF(
TransformerWrapperDF[KBinsDiscretizer], metaclass=ABCMeta
):
"""
DF wrapper for :class:`sklearn.preprocessing.KBinsDiscretizer`.
"""
def _validate_delegate_estimator(self) -> None:
if self.native_estimator.encode == "onehot":
raise NotImplementedError(
'property encode="onehot" is not supported due to sparse matrices;'
'consider using "onehot-dense" instead'
)
def _get_features_original(self) -> pd.Series:
"""
Return the series mapping output column names to original columns names.
:return: the series with index the column names of the output dataframe and
values the corresponding input column names.
"""
if self.native_estimator.encode == "onehot-dense":
n_bins_per_feature = self.native_estimator.n_bins_
features_in, features_out = zip(
*(
(feature_name, f"{feature_name}_bin_{bin_index}")
for feature_name, n_bins in zip(
self.feature_names_in_, n_bins_per_feature
)
for bin_index in range(n_bins)
)
)
return pd.Series(index=features_out, data=features_in)
elif self.native_estimator.encode == "ordinal":
return pd.Series(
index=self.feature_names_in_.astype(str) + "_bin",
data=self.feature_names_in_,
)
else:
raise ValueError(
f"unexpected value for property encode={self.native_estimator.encode}"
)
#
# validate __all__
#
__tracker.validate()
| [
"logging.getLogger",
"pandas.Series",
"pandas.Index",
"numpy.argwhere",
"numpy.isnan",
"numpy.all",
"typing.TypeVar"
] | [((679, 706), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (696, 706), False, 'import logging\n'), ((1360, 1408), 'typing.TypeVar', 'TypeVar', (['"""T_Transformer"""'], {'bound': 'TransformerMixin'}), "('T_Transformer', bound=TransformerMixin)\n", (1367, 1408), False, 'from typing import Any, Generic, List, Optional, TypeVar, Union\n'), ((1830, 1883), 'typing.TypeVar', 'TypeVar', (['"""T_Imputer"""', 'SimpleImputer', 'IterativeImputer'], {}), "('T_Imputer', SimpleImputer, IterativeImputer)\n", (1837, 1883), False, 'from typing import Any, Generic, List, Optional, TypeVar, Union\n'), ((3738, 3793), 'pandas.Series', 'pd.Series', ([], {'index': 'features_out', 'data': 'features_out.values'}), '(index=features_out, data=features_out.values)\n', (3747, 3793), True, 'import pandas as pd\n'), ((12399, 12460), 'pandas.Series', 'pd.Series', ([], {'index': 'imputed_columns', 'data': 'imputed_columns.values'}), '(index=imputed_columns, data=imputed_columns.values)\n', (12408, 12460), True, 'import pandas as pd\n'), ((13406, 13466), 'pandas.Index', 'pd.Index', (["[f'{name}__missing' for name in features_original]"], {}), "([f'{name}__missing' for name in features_original])\n", (13414, 13466), True, 'import pandas as pd\n'), ((13482, 13535), 'pandas.Series', 'pd.Series', ([], {'index': 'features_out', 'data': 'features_original'}), '(index=features_out, data=features_original)\n', (13491, 13535), True, 'import pandas as pd\n'), ((16318, 16375), 'pandas.Series', 'pd.Series', ([], {'index': 'feature_names_out', 'data': 'feature_names_in'}), '(index=feature_names_out, data=feature_names_in)\n', (16327, 16375), True, 'import pandas as pd\n'), ((17655, 17702), 'pandas.Series', 'pd.Series', ([], {'index': 'features_out', 'data': 'features_in'}), '(index=features_out, data=features_in)\n', (17664, 17702), True, 'import pandas as pd\n'), ((10317, 10365), 'pandas.Series', 'pd.Series', ([], {'index': 'column_names', 'data': 'column_names'}), '(index=column_names, data=column_names)\n', (10326, 10365), True, 'import pandas as pd\n'), ((11316, 11331), 'numpy.isnan', 'np.isnan', (['stats'], {}), '(stats)\n', (11324, 11331), True, 'import numpy as np\n'), ((12146, 12192), 'numpy.all', 'np.all', (['delegate_estimator._mask_fit_X'], {'axis': '(0)'}), '(delegate_estimator._mask_fit_X, axis=0)\n', (12152, 12192), True, 'import numpy as np\n'), ((12339, 12360), 'numpy.argwhere', 'np.argwhere', (['nan_mask'], {}), '(nan_mask)\n', (12350, 12360), True, 'import numpy as np\n'), ((11437, 11448), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (11445, 11448), True, 'import numpy as np\n')] |
import re
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
from localshop.apps.packages.xmlrpc import handle_request
admin.autodiscover()
static_prefix = re.escape(settings.STATIC_URL.lstrip('/'))
urlpatterns = patterns('',
url(r'^$', 'localshop.views.index', name='index'),
# Default path for xmlrpc calls
url(r'^RPC2$', handle_request),
url(r'^packages/',
include('localshop.apps.packages.urls', namespace='packages')),
url(r'^simple/', include('localshop.apps.packages.urls_simple',
namespace='packages-simple')),
# We add a separate route for simple without the trailing slash so that
# POST requests to /simple/ and /simple both work
url(r'^simple$', 'localshop.apps.packages.views.simple_index'),
url(r'^permissions/',
include('localshop.apps.permissions.urls', namespace='permissions')),
url(r'^accounts/signup/', RedirectView.as_view(url="/")),
url(r'^accounts/', include('userena.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^%s(?P<path>.*)$' % static_prefix,
'django.contrib.staticfiles.views.serve', {'insecure': True}),
)
| [
"django.conf.urls.url",
"django.conf.urls.include",
"django.conf.settings.STATIC_URL.lstrip",
"django.views.generic.base.RedirectView.as_view",
"django.contrib.admin.autodiscover"
] | [((239, 259), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (257, 259), False, 'from django.contrib import admin\n'), ((287, 318), 'django.conf.settings.STATIC_URL.lstrip', 'settings.STATIC_URL.lstrip', (['"""/"""'], {}), "('/')\n", (313, 318), False, 'from django.conf import settings\n'), ((353, 401), 'django.conf.urls.url', 'url', (['"""^$"""', '"""localshop.views.index"""'], {'name': '"""index"""'}), "('^$', 'localshop.views.index', name='index')\n", (356, 401), False, 'from django.conf.urls import patterns, include, url\n'), ((445, 474), 'django.conf.urls.url', 'url', (['"""^RPC2$"""', 'handle_request'], {}), "('^RPC2$', handle_request)\n", (448, 474), False, 'from django.conf.urls import patterns, include, url\n'), ((816, 877), 'django.conf.urls.url', 'url', (['"""^simple$"""', '"""localshop.apps.packages.views.simple_index"""'], {}), "('^simple$', 'localshop.apps.packages.views.simple_index')\n", (819, 877), False, 'from django.conf.urls import patterns, include, url\n'), ((1151, 1256), 'django.conf.urls.url', 'url', (["('^%s(?P<path>.*)$' % static_prefix)", '"""django.contrib.staticfiles.views.serve"""', "{'insecure': True}"], {}), "('^%s(?P<path>.*)$' % static_prefix,\n 'django.contrib.staticfiles.views.serve', {'insecure': True})\n", (1154, 1256), False, 'from django.conf.urls import patterns, include, url\n'), ((509, 570), 'django.conf.urls.include', 'include', (['"""localshop.apps.packages.urls"""'], {'namespace': '"""packages"""'}), "('localshop.apps.packages.urls', namespace='packages')\n", (516, 570), False, 'from django.conf.urls import patterns, include, url\n'), ((595, 670), 'django.conf.urls.include', 'include', (['"""localshop.apps.packages.urls_simple"""'], {'namespace': '"""packages-simple"""'}), "('localshop.apps.packages.urls_simple', namespace='packages-simple')\n", (602, 670), False, 'from django.conf.urls import patterns, include, url\n'), ((915, 982), 'django.conf.urls.include', 'include', (['"""localshop.apps.permissions.urls"""'], {'namespace': '"""permissions"""'}), "('localshop.apps.permissions.urls', namespace='permissions')\n", (922, 982), False, 'from django.conf.urls import patterns, include, url\n'), ((1016, 1045), 'django.views.generic.base.RedirectView.as_view', 'RedirectView.as_view', ([], {'url': '"""/"""'}), "(url='/')\n", (1036, 1045), False, 'from django.views.generic.base import RedirectView\n'), ((1072, 1095), 'django.conf.urls.include', 'include', (['"""userena.urls"""'], {}), "('userena.urls')\n", (1079, 1095), False, 'from django.conf.urls import patterns, include, url\n'), ((1119, 1143), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (1126, 1143), False, 'from django.conf.urls import patterns, include, url\n')] |
#!/usr/bin/env python
"""Tests for ais.util."""
import unittest
from ais import util
import six
class UtilTest(unittest.TestCase):
def testMaybeToNumber(self):
self.assertEqual(util.MaybeToNumber(None), None)
self.assertEqual(util.MaybeToNumber([]), [])
self.assertEqual(util.MaybeToNumber({}), {})
self.assertEqual(util.MaybeToNumber('a'), 'a')
self.assertEqual(util.MaybeToNumber(1), 1)
self.assertEqual(util.MaybeToNumber(-3.12), -3.12)
self.assertEqual(util.MaybeToNumber('-1'), -1)
self.assertIsInstance(util.MaybeToNumber('-1'), int)
self.assertEqual(util.MaybeToNumber('42.0'), 42.0)
self.assertIsInstance(util.MaybeToNumber('42.0'), float)
value = 9999999999999999999999999
value_str = '9999999999999999999999999'
self.assertEqual(util.MaybeToNumber(value_str), value)
self.assertIsInstance(util.MaybeToNumber(value_str), six.integer_types)
self.assertEqual(
util.MaybeToNumber('1e99999999999999999999999'), float('inf'))
self.assertEqual(
util.MaybeToNumber('-1e99999999999999999999999'), float('-inf'))
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"ais.util.MaybeToNumber"
] | [((1134, 1149), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1147, 1149), False, 'import unittest\n'), ((187, 211), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['None'], {}), '(None)\n', (205, 211), False, 'from ais import util\n'), ((240, 262), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['[]'], {}), '([])\n', (258, 262), False, 'from ais import util\n'), ((289, 311), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['{}'], {}), '({})\n', (307, 311), False, 'from ais import util\n'), ((338, 361), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['"""a"""'], {}), "('a')\n", (356, 361), False, 'from ais import util\n'), ((389, 410), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['(1)'], {}), '(1)\n', (407, 410), False, 'from ais import util\n'), ((436, 461), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['(-3.12)'], {}), '(-3.12)\n', (454, 461), False, 'from ais import util\n'), ((492, 516), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['"""-1"""'], {}), "('-1')\n", (510, 516), False, 'from ais import util\n'), ((548, 572), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['"""-1"""'], {}), "('-1')\n", (566, 572), False, 'from ais import util\n'), ((601, 627), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['"""42.0"""'], {}), "('42.0')\n", (619, 627), False, 'from ais import util\n'), ((661, 687), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['"""42.0"""'], {}), "('42.0')\n", (679, 687), False, 'from ais import util\n'), ((800, 829), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['value_str'], {}), '(value_str)\n', (818, 829), False, 'from ais import util\n'), ((864, 893), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['value_str'], {}), '(value_str)\n', (882, 893), False, 'from ais import util\n'), ((945, 992), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['"""1e99999999999999999999999"""'], {}), "('1e99999999999999999999999')\n", (963, 992), False, 'from ais import util\n'), ((1038, 1086), 'ais.util.MaybeToNumber', 'util.MaybeToNumber', (['"""-1e99999999999999999999999"""'], {}), "('-1e99999999999999999999999')\n", (1056, 1086), False, 'from ais import util\n')] |
import numpy as np
import pandas as pd
from pandas import DataFrame, Series
import matplotlib.pyplot as plt
num = np.array(['3.14','-2.7','30'], dtype=np.string_) #코드 이해 쉽게 : dtype=np.string_
# num=num.astype(int)
# print(num)
# ValueError: invalid literal for int() with base 10: '3.14'
num=num.astype(float).astype(int)
print(num)
# [ 3 -2 30] : 바로 int형 변형이 안되면 float으로 바꿨다가 바꿀 수 있다.
num=num.astype(float)
print(num)
# [ 3.14 -2.7 30. ]
arr=np.arange(32).reshape((8,4))
print(arr)
# [[ 0 1 2 3]
# [ 4 5 6 7]
# [ 8 9 10 11]
# [12 13 14 15]
# [16 17 18 19]
# [20 21 22 23]
# [24 25 26 27]
# [28 29 30 31]]
print(arr[[1,5,7,2],[0,3,1,2]]) #지정된 데이터 추출[[행번호],[열번호]]==>(행,열)순서쌍으로 요소 확인
# [ 4 23 29 10]
print(arr[[1,5,7,2]][:,[0,3,1,2]]) #[[행]][:,[열]] : 연속의 의미==>행 1,5,7,2번 index에 해당하는 행
# [[ 4 7 5 6]
# [20 23 21 22]
# [28 31 29 30]
# [ 8 11 9 10]]
print(arr[[1,5,7,2]][:,[3,1]]) #[[행]][:,[열]] : 연속의 의미==>index행에 대한 열 1,3번 index에 해당하는 열
# [[ 7 5]
# [23 21]
# [31 29]
# [11 9]]
import random
walk = []
position =0
steps=1000
for i in range(steps):
step = 1 if random.randint(0,1) else -1 #randint,randn,rannormal
position+=step
walk.append(position)
print("position : ",position)
# position : 18
print("walk : ",walk)
# walk : [-1, 0, 1, 0, -1, -2, -1, -....]
print(min(walk))
# -7
print(max(walk))
# 28
# print(abs(walk)) #abs : 절대값 변환
obj = Series([1,2,-3,4])
print(obj)
# 0 1
# 1 2
# 2 -3
# 3 4
# dtype: int64
print(obj.values) #values : 값만 추출함(속성, 함수)
# [ 1 2 -3 4]
print(obj.index) #index : 인덱스 추출
# RangeIndex(start=0, stop=4, step=1)
#인덱스 지정
obj = Series([1,2,-3,4],index=['x','y','z','k']) #index의 이름을 직접 부여
print(obj)
# 지정 인덱스 출력
# x 1
# y 2
# z -3
# k 4
# dtype: int64
print(obj['y'])
# 2
obj['x']=10
print(obj)
# x 10
# y 2
# z -3
# k 4
# dtype: int64
#여러개를 참조하는 방법
# print(obj['x','y'])
# # KeyError: ('x', 'y')
print(obj[['x','y','z']]) #index 1개 참조시 [],2개이상 참조시 [[]]
# x 10
# y 2
# z -3
# dtype: int64
print('='*50)
print(obj>0) #조건식 사용 가능
# x True
# y True
# z False
# k True
# dtype: bool
print(obj[obj>0]) #조건식으로 추출 가능
# x 10
# y 2
# k 4
# dtype: int64
print(obj*2) # 사칙연산 가능
# x 20
# y 4
# z -6
# k 8
# dtype: int64
print(np.exp(obj)) # 지수승
# x 22026.465795
# y 7.389056
# z 0.049787
# k 54.598150
# dtype: float64
# null(초기화 되지 않은 상태), na(결측치)
print(obj)
print('a' in obj) #in : 특정 문자가 있는지 확인
print('x' in obj) # 열: 특징, 행 : 관측치
print('='*50)
#key & value->Series->index & value 변환(key=>index,value=>value)
sdata = {'Ohio': 35000, 'Texas': 71000, "Oregon":16000, "Utah":5000}
obj3=Series(sdata) #dictionaly도 Series로 변환 가능
print(obj3)
# Ohio 35000
# Texas 71000
# Oregon 16000
# Utah 5000
# dtype: int64
print(type(obj3))
# <class 'pandas.core.series.Series'>
states = ['California','Ohio','Oregon','Texas']
obj99 = Series(states) #list를 Series로 변환
# print(obj99)
# # 0 California
# # 1 Ohio
# # 2 Oregon
# # 3 Texas
# # dtype: object
obj4 = Series(sdata, index=states) #sdata를 사용하여 index는 states기준으로 Series자료구조 변환
print(obj4)
# California NaN
# Ohio 35000.0
# Oregon 16000.0
# Texas 71000.0
# dtype: float64
print(pd.isnull(obj4))
# California True
# Ohio False
# Oregon False
# Texas False
# dtype: bool
#일반적인 개념 nan : 숫자가 아닌 문자같은 것.
#na : 값이 누락, null : 값이 초기화 되지 않은 상태
#pandas개념 : 혼용하여 사용
#isnull함수 : na(null,nan) 인지 아닌지 확인
print(obj4+obj3) # 교집합만의 value만 출력
obj4.name = 'population'
obj.index.name = 'state'
print(obj4)
# California NaN
# Ohio 35000.0
# Oregon 16000.0
# Texas 71000.0
# Name: population, dtype: float64
obj4.index=['w','x','y','z'] #index를 직접 변환
print(obj4)
# w NaN
# x 35000.0
# y 16000.0
# z 71000.0
# Name: population, dtype: float64
data = {
'state' : ['Ohio','Ohio','Ohio','Nevada','Nevada'],
'year': [2000,2001,2002,2001,2002],
'pop': [1.5,1.7,3.6,2.4,2.9]}
frame = DataFrame(data) #series 들의 묶음과 같음
print(frame)
# state year pop
# 0 Ohio 2000 1.5
# 1 Ohio 2001 1.7
# 2 Ohio 2002 3.6
# 3 Nevada 2001 2.4
# 4 Nevada 2002 2.9
print(DataFrame(data, columns=['year','state','pop'])) # column의 순서 변경(임시적)
# year state pop
# 0 2000 Ohio 1.5
# 1 2001 Ohio 1.7
# 2 2002 Ohio 3.6
# 3 2001 Nevada 2.4
# 4 2002 Nevada 2.9
frame = DataFrame(data, columns=['year','state','pop']) #fram으로 완전히 순서 변경
frame2= DataFrame(data, columns=['year','state','pop','debt'], index=['one','two','three','four','five'])
print(frame2)
# year state pop debt
# one 2000 Ohio 1.5 NaN
# two 2001 Ohio 1.7 NaN
# three 2002 Ohio 3.6 NaN
# four 2001 Nevada 2.4 NaN
# five 2002 Nevada 2.9 NaN
print(frame2['state']) # 원하는 열만 출력
# one Ohio
# two Ohio
# three Ohio
# four Nevada
# five Nevada
# Name: state, dtype: object
print(frame2['year'])
# one 2000
# two 2001
# three 2002
# four 2001
# five 2002
# Name: year, dtype: int64
print(frame2.ix['three']) #ix : 특정 index(행)만 참조
#두개 이상의 열 또는 행을 추출 => [[]]사용
# print(frame2[['year','state']])
#
# print(frame2.ix[['three','five']])
print(frame2)
frame2['debt']=16.5
print(frame2)
# year state pop debt
# one 2000 Ohio 1.5 16.5
# two 2001 Ohio 1.7 16.5
# three 2002 Ohio 3.6 16.5
# four 2001 Nevada 2.4 16.5
# five 2002 Nevada 2.9 16.5
# frame2['debt']=np.arange(3)
# print(frame2)
# # ValueError: Length of values does not match length of index
frame2['debt']=np.arange(5)
print(frame2)
# year state pop debt
# one 2000 Ohio 1.5 0
# two 2001 Ohio 1.7 1
# three 2002 Ohio 3.6 2
# four 2001 Nevada 2.4 3
# five 2002 Nevada 2.9 4
print('='*50)
val = Series([-1.2,-1.5,-1.7],index=['two','three','five'])
print(val)
# two -1.2
# three -1.5
# five -1.7
# dtype: float64
#길이가 다른 데이터 열을추가시 -> 시리즈를 생성하여 추가
frame2['debt']=val # index를 지정하여 value 변경(index의 숫자가 동일하지 않아도 index가 지정되어있어서 대입가능)
print(frame2)
# 새로운 열 추가 : 동부에 속하는 Ohio는 True, 나머지는 False로 한다.(조건 제시형)
frame2['eastern']=frame2.state=='Ohio'
print(frame2)
# year state pop debt eastern
# one 2000 Ohio 1.5 NaN True
# two 2001 Ohio 1.7 -1.2 True
# three 2002 Ohio 3.6 -1.5 True
# four 2001 Nevada 2.4 NaN False
# five 2002 Nevada 2.9 -1.7 False
#열 제거
del frame2['eastern']
print(frame2)
# year state pop debt
# one 2000 Ohio 1.5 NaN
# two 2001 Ohio 1.7 -1.2
# three 2002 Ohio 3.6 -1.5
# four 2001 Nevada 2.4 NaN
# five 2002 Nevada 2.9 -1.7
print(frame2.columns)
# Index(['year', 'state', 'pop', 'debt'], dtype='object')
print(frame2.index)
# Index(['one', 'two', 'three', 'four', 'five'], dtype='object')
pop = {'Nevada' : {2001 : 2.4,2002:2.9},'Ohio' : {2000 : 1.5,2001:1.7,2002:3.6}}
frame3 = DataFrame(pop)
print(frame3)
# Nevada Ohio
# 2000 NaN 1.5
# 2001 2.4 1.7
# 2002 2.9 3.6
# 열과 행 바꿈(transfer)
print(frame3.T)
# 2000 2001 2002
# Nevada NaN 2.4 2.9
# Ohio 1.5 1.7 3.6
# frame4 = DataFrame(pop,index=[2001,2002,2003]) #index 지정을 하려면 DataFrame을 사용해야한다.(딕셔너리엔 index가 없음)
# print(frame4)
# # AttributeError: 'list' object has no attribute 'astype'
frame4 = DataFrame(frame3,index=[2001,2002,2003])
print(frame4)
# Nevada Ohio
# 2001 2.4 1.7
# 2002 2.9 3.6
# 2003 NaN NaN
print(frame3)
# Nevada Ohio
# 2000 NaN 1.5
# 2001 2.4 1.7
# 2002 2.9 3.6
pdata = {'Ohio':frame3['Ohio'][:-1],'Nevada':frame3['Nevada'][:2]} #[:-1] : 마지막 행 제외,[:2] : 0,1 행만 출력
frame5=DataFrame(pdata)
print(frame5)
# Ohio Nevada
# 2000 1.5 NaN
# 2001 1.7 2.4
pdata = {'Ohio':frame3['Ohio'][:-1],'Nevada':frame3['Nevada']}
#'Nevada'-모두 출력이기 때문에 [:-1]사용으로 자료가 없는 'Ohio'의 2002는 NaN이 된다.
frame5=DataFrame(pdata)
print(frame5)
# Ohio Nevada
# 2000 1.5 NaN
# 2001 1.7 2.4
# 2002 NaN 2.9
| [
"pandas.Series",
"pandas.isnull",
"numpy.exp",
"numpy.array",
"pandas.DataFrame",
"random.randint",
"numpy.arange"
] | [((115, 165), 'numpy.array', 'np.array', (["['3.14', '-2.7', '30']"], {'dtype': 'np.string_'}), "(['3.14', '-2.7', '30'], dtype=np.string_)\n", (123, 165), True, 'import numpy as np\n'), ((1457, 1478), 'pandas.Series', 'Series', (['[1, 2, -3, 4]'], {}), '([1, 2, -3, 4])\n', (1463, 1478), False, 'from pandas import DataFrame, Series\n'), ((1738, 1787), 'pandas.Series', 'Series', (['[1, 2, -3, 4]'], {'index': "['x', 'y', 'z', 'k']"}), "([1, 2, -3, 4], index=['x', 'y', 'z', 'k'])\n", (1744, 1787), False, 'from pandas import DataFrame, Series\n'), ((2932, 2945), 'pandas.Series', 'Series', (['sdata'], {}), '(sdata)\n', (2938, 2945), False, 'from pandas import DataFrame, Series\n'), ((3238, 3252), 'pandas.Series', 'Series', (['states'], {}), '(states)\n', (3244, 3252), False, 'from pandas import DataFrame, Series\n'), ((3421, 3448), 'pandas.Series', 'Series', (['sdata'], {'index': 'states'}), '(sdata, index=states)\n', (3427, 3448), False, 'from pandas import DataFrame, Series\n'), ((4462, 4477), 'pandas.DataFrame', 'DataFrame', (['data'], {}), '(data)\n', (4471, 4477), False, 'from pandas import DataFrame, Series\n'), ((4920, 4969), 'pandas.DataFrame', 'DataFrame', (['data'], {'columns': "['year', 'state', 'pop']"}), "(data, columns=['year', 'state', 'pop'])\n", (4929, 4969), False, 'from pandas import DataFrame, Series\n'), ((5002, 5110), 'pandas.DataFrame', 'DataFrame', (['data'], {'columns': "['year', 'state', 'pop', 'debt']", 'index': "['one', 'two', 'three', 'four', 'five']"}), "(data, columns=['year', 'state', 'pop', 'debt'], index=['one',\n 'two', 'three', 'four', 'five'])\n", (5011, 5110), False, 'from pandas import DataFrame, Series\n'), ((6169, 6181), 'numpy.arange', 'np.arange', (['(5)'], {}), '(5)\n', (6178, 6181), True, 'import numpy as np\n'), ((6416, 6474), 'pandas.Series', 'Series', (['[-1.2, -1.5, -1.7]'], {'index': "['two', 'three', 'five']"}), "([-1.2, -1.5, -1.7], index=['two', 'three', 'five'])\n", (6422, 6474), False, 'from pandas import DataFrame, Series\n'), ((7552, 7566), 'pandas.DataFrame', 'DataFrame', (['pop'], {}), '(pop)\n', (7561, 7566), False, 'from pandas import DataFrame, Series\n'), ((7984, 8027), 'pandas.DataFrame', 'DataFrame', (['frame3'], {'index': '[2001, 2002, 2003]'}), '(frame3, index=[2001, 2002, 2003])\n', (7993, 8027), False, 'from pandas import DataFrame, Series\n'), ((8344, 8360), 'pandas.DataFrame', 'DataFrame', (['pdata'], {}), '(pdata)\n', (8353, 8360), False, 'from pandas import DataFrame, Series\n'), ((8571, 8587), 'pandas.DataFrame', 'DataFrame', (['pdata'], {}), '(pdata)\n', (8580, 8587), False, 'from pandas import DataFrame, Series\n'), ((2485, 2496), 'numpy.exp', 'np.exp', (['obj'], {}), '(obj)\n', (2491, 2496), True, 'import numpy as np\n'), ((3642, 3657), 'pandas.isnull', 'pd.isnull', (['obj4'], {}), '(obj4)\n', (3651, 3657), True, 'import pandas as pd\n'), ((4694, 4743), 'pandas.DataFrame', 'DataFrame', (['data'], {'columns': "['year', 'state', 'pop']"}), "(data, columns=['year', 'state', 'pop'])\n", (4703, 4743), False, 'from pandas import DataFrame, Series\n'), ((458, 471), 'numpy.arange', 'np.arange', (['(32)'], {}), '(32)\n', (467, 471), True, 'import numpy as np\n'), ((1124, 1144), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (1138, 1144), False, 'import random\n')] |
#! -*- coding: utf-8 -*-
# SimBERT_v2预训练代码stage2,把simbert的相似度蒸馏到roformer-sim上
# 官方项目:https://github.com/ZhuiyiTechnology/roformer-sim
import json
import numpy as np
import torch
from torch import nn, optim
from torch.utils.data import DataLoader
import torch.nn.functional as F
from bert4torch.models import build_transformer_model, BaseModel
from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences
from bert4torch.tokenizers import Tokenizer
import jieba
jieba.initialize()
# 基本信息
maxlen = 64
batch_size = 12
# bert配置,需要加载stage1训练后的权重,这里直接加载官方最终的权重以示例
config_path = 'F:/Projects/pretrain_ckpt/simbert/[sushen_torch_base]--roformer_chinese_sim_char_base/config.json'
checkpoint_path = 'F:/Projects/pretrain_ckpt/simbert/[sushen_torch_base]--roformer_chinese_sim_char_base/pytorch_model.bin'
dict_path = 'F:/Projects/pretrain_ckpt/simbert/[sushen_torch_base]--roformer_chinese_sim_char_base/vocab.txt'
device = 'cuda' if torch.cuda.is_available() else 'cpu'
# 建立分词器
tokenizer = Tokenizer(dict_path, do_lower_case=True)
# 这里语料和stage1保持一致
class MyDataset(ListDataset):
@staticmethod
def load_data(filename):
"""读取语料,每行一个json
示例:{"text": "懂英语的来!", "synonyms": ["懂英语的来!!!", "懂英语的来", "一句英语翻译 懂英语的来"]}
"""
D = []
with open(filename, encoding='utf-8') as f:
for l in f:
D.append(json.loads(l))
return D
def truncate(text):
"""截断句子
"""
seps, strips = u'\n。!?!?;;,, ', u';;,, '
return text_segmentate(text, maxlen - 2, seps, strips)[0]
def masked_encode(text):
"""wwm随机mask
"""
words = jieba.lcut(text)
rands = np.random.random(len(words))
source, target = [tokenizer._token_start_id], [0]
for r, w in zip(rands, words):
ids = tokenizer.encode(w)[0][1:-1]
if r < 0.15 * 0.8:
source.extend([tokenizer._token_mask_id] * len(ids))
target.extend(ids)
elif r < 0.15 * 0.9:
source.extend(ids)
target.extend(ids)
elif r < 0.15:
source.extend(
np.random.choice(tokenizer._vocab_size - 1, size=len(ids)) + 1
)
target.extend(ids)
else:
source.extend(ids)
target.extend([0] * len(ids))
source = source[:maxlen - 1] + [tokenizer._token_end_id]
target = target[:maxlen - 1] + [0]
return source, target
# ========== 蒸馏用:开始 ==========
# simbert配置
sim_config_path = 'F:/Projects/pretrain_ckpt/simbert/[sushen_torch_base]--simbert_chinese_base/config.json'
sim_checkpoint_path = 'F:/Projects/pretrain_ckpt/simbert/[sushen_torch_base]--simbert_chinese_base/pytorch_model.bin'
sim_dict_path = 'F:/Projects/pretrain_ckpt/simbert/[sushen_torch_base]--simbert_chinese_base/vocab.txt'
# 建立分词器
sim_tokenizer = Tokenizer(sim_dict_path, do_lower_case=True) # 建立分词器
# 建立加载模型
simbert = build_transformer_model(sim_config_path, sim_checkpoint_path, with_pool='linear', application='unilm').to(device)
# ========== 蒸馏用:结束 ==========
def collate_fn(batch):
batch_token_ids, batch_segment_ids = [], []
batch_sim_token_ids, batch_sim_segment_ids = [], []
for d in batch:
text, synonyms = d['text'], d['synonyms']
synonyms = [text] + synonyms
np.random.shuffle(synonyms)
for _ in range(2):
text, synonym = synonyms[:2]
if np.random.random() < 0.5:
text_ids = masked_encode(text)[0]
else:
text_ids = tokenizer.encode(text)[0]
synonym_ids = tokenizer.encode(synonym)[0][1:]
truncate_sequences(maxlen * 2, -2, text_ids, synonym_ids)
token_ids = text_ids + synonym_ids
segment_ids = [0] * len(text_ids) + [1] * len(synonym_ids)
batch_token_ids.append(token_ids)
batch_segment_ids.append(segment_ids)
# ==== 蒸馏用:开始 ====
token_ids, segment_ids = sim_tokenizer.encode(text, maxlen=maxlen)
batch_sim_token_ids.append(token_ids)
batch_sim_segment_ids.append(segment_ids)
# ==== 蒸馏用:结束 ====
text, synonym = synonym, text
batch_token_ids = torch.tensor(sequence_padding(batch_token_ids), dtype=torch.long, device=device)
batch_segment_ids = torch.tensor(sequence_padding(batch_segment_ids), dtype=torch.long, device=device)
# ==== 蒸馏用:开始 ====
batch_sim_token_ids = torch.tensor(sequence_padding(batch_sim_token_ids), dtype=torch.long, device=device)
batch_sim_segment_ids = torch.tensor(sequence_padding(batch_sim_segment_ids), dtype=torch.long, device=device)
sim_vecs = simbert.predict([batch_sim_token_ids, batch_sim_segment_ids])[1]
sim_vecs /= (sim_vecs**2).sum(dim=-1, keepdims=True)**0.5
sims = torch.matmul(sim_vecs, sim_vecs.T)
# ==== 蒸馏用:结束 ====
return [batch_token_ids, batch_segment_ids], [batch_token_ids, batch_segment_ids, sims]
train_dataloader = DataLoader(MyDataset('../datasets/data_similarity.json'), batch_size=batch_size, shuffle=True, collate_fn=collate_fn)
# 建立加载模型
class Model(BaseModel):
def __init__(self, pool_method='cls'):
super().__init__()
self.bert = build_transformer_model(config_path=config_path, checkpoint_path=checkpoint_path, model='roformer',
with_pool='linear', with_mlm='linear', dropout_rate=0.2, application='unilm')
self.pool_method = pool_method
def get_pool_emb(self, hidden_state, pool_cls, attention_mask):
if self.pool_method == 'cls':
return pool_cls
elif self.pool_method == 'mean':
hidden_state = torch.sum(hidden_state * attention_mask[:, :, None], dim=1)
attention_mask = torch.sum(attention_mask, dim=1)[:, None]
return hidden_state / attention_mask
elif self.pool_method == 'max':
seq_state = hidden_state * attention_mask[:, :, None]
return torch.max(seq_state, dim=1)
else:
raise ValueError('pool_method illegal')
def forward(self, token_ids, segment_ids):
hidden_state, pool_cls, seq_logit = self.bert([token_ids, segment_ids])
sen_emb = self.get_pool_emb(hidden_state, pool_cls, attention_mask=token_ids.gt(0).long())
return seq_logit, sen_emb
model = Model(pool_method='cls').to(device)
class TotalLoss(nn.Module):
"""loss分两部分,一是seq2seq的交叉熵,二是相似度的交叉熵。
"""
def forward(self, outputs, target):
seq_logit, sen_emb = outputs
seq_label, seq_mask, sims = target
seq2seq_loss = self.compute_loss_of_seq2seq(seq_logit, seq_label, seq_mask)
similarity_loss = self.compute_loss_of_similarity(sen_emb, sims)
return {'loss': seq2seq_loss + similarity_loss, 'seq2seq_loss': seq2seq_loss, 'similarity_loss': similarity_loss}
def compute_loss_of_seq2seq(self, y_pred, y_true, y_mask):
'''
y_pred: [btz, seq_len, hdsz]
y_true: [btz, seq_len]
y_mask: [btz, seq_len]
'''
y_true = y_true[:, 1:] # 目标token_ids
y_mask = y_mask[:, 1:] # 指示了要预测的部分
y_pred = y_pred[:, :-1, :] # 预测序列,错开一位
y_pred = y_pred.reshape(-1, y_pred.shape[-1])
y_true = (y_true*y_mask).flatten()
return F.cross_entropy(y_pred, y_true, ignore_index=0)
def compute_loss_of_similarity(self, y_pred, y_true):
y_pred = F.normalize(y_pred, p=2, dim=-1) # 句向量归一化
similarities = torch.matmul(y_pred, y_pred.T) # 相似度矩阵
loss = 100 * torch.mean((similarities - y_true) ** 2)
return loss
model.compile(loss=TotalLoss(), optimizer=optim.Adam(model.parameters(), 1e-5), metrics=['seq2seq_loss', 'similarity_loss'])
class SynonymsGenerator(AutoRegressiveDecoder):
"""seq2seq解码器
"""
@AutoRegressiveDecoder.wraps('logits')
def predict(self, inputs, output_ids, states):
token_ids, segment_ids = inputs
token_ids = torch.cat([token_ids, output_ids], 1)
segment_ids = torch.cat([segment_ids, torch.ones_like(output_ids, device=device)], 1)
seq_logit, _ = model.predict([token_ids, segment_ids])
return seq_logit[:, -1, :]
def generate(self, text, n=1, topk=5):
token_ids, segment_ids = tokenizer.encode(text, maxlen=maxlen)
output_ids = self.random_sample([token_ids, segment_ids], n, topk) # 基于随机采样
return [tokenizer.decode(ids.cpu().numpy()) for ids in output_ids]
synonyms_generator = SynonymsGenerator(start_id=None, end_id=tokenizer._token_end_id, maxlen=maxlen, device=device)
def cal_sen_emb(text_list):
'''输入text的list,计算sentence的embedding
'''
X, S = [], []
for t in text_list:
x, s = tokenizer.encode(t)
X.append(x)
S.append(s)
X = torch.tensor(sequence_padding(X), dtype=torch.long, device=device)
S = torch.tensor(sequence_padding(S), dtype=torch.long, device=device)
_, Z = model.predict([X, S])
return Z
def gen_synonyms(text, n=100, k=20):
""""含义: 产生sent的n个相似句,然后返回最相似的k个。
做法:用seq2seq生成,并用encoder算相似度并排序。
效果:
>>> gen_synonyms(u'微信和支付宝哪个好?')
[
u'微信和支付宝,哪个好?',
u'微信和支付宝哪个好',
u'支付宝和微信哪个好',
u'支付宝和微信哪个好啊',
u'微信和支付宝那个好用?',
u'微信和支付宝哪个好用',
u'支付宝和微信那个更好',
u'支付宝和微信哪个好用',
u'微信和支付宝用起来哪个好?',
u'微信和支付宝选哪个好',
]
"""
r = synonyms_generator.generate(text, n)
r = [i for i in set(r) if i != text] # 不和原文相同
r = [text] + r
Z = cal_sen_emb(r)
Z /= (Z**2).sum(dim=1, keepdims=True)**0.5
argsort = torch.matmul(Z[1:], -Z[0]).argsort()
return [r[i + 1] for i in argsort[:k]]
def just_show(some_samples):
"""随机观察一些样本的效果
"""
S = [np.random.choice(some_samples) for _ in range(3)]
for s in S:
try:
print(u'原句子:%s' % s)
print(u'同义句子:', gen_synonyms(s, 10, 10))
print()
except:
pass
class Evaluator(Callback):
"""评估模型
"""
def __init__(self):
self.lowest = 1e10
def on_epoch_end(self, global_step, epoch, logs=None):
# 保存最优
if logs['loss'] <= self.lowest:
self.lowest = logs['loss']
# model.save_weights('./best_model.pt')
# 演示效果
just_show(['微信和支付宝拿个好用?',
'微信和支付宝,哪个好?',
'微信和支付宝哪个好',
'支付宝和微信哪个好',
'支付宝和微信哪个好啊',
'微信和支付宝那个好用?',
'微信和支付宝哪个好用',
'支付宝和微信那个更好',
'支付宝和微信哪个好用',
'微信和支付宝用起来哪个好?',
'微信和支付宝选哪个好'
])
if __name__ == '__main__':
evaluator = Evaluator()
model.fit(train_dataloader, epochs=50, steps_per_epoch=200, callbacks=[evaluator])
else:
model.load_weights('./best_model.pt')
| [
"jieba.initialize",
"torch.max",
"bert4torch.tokenizers.Tokenizer",
"torch.cuda.is_available",
"torch.sum",
"bert4torch.snippets.AutoRegressiveDecoder.wraps",
"jieba.lcut",
"torch.mean",
"numpy.random.random",
"torch.matmul",
"torch.ones_like",
"json.loads",
"numpy.random.choice",
"bert4to... | [((533, 551), 'jieba.initialize', 'jieba.initialize', ([], {}), '()\n', (549, 551), False, 'import jieba\n'), ((1057, 1097), 'bert4torch.tokenizers.Tokenizer', 'Tokenizer', (['dict_path'], {'do_lower_case': '(True)'}), '(dict_path, do_lower_case=True)\n', (1066, 1097), False, 'from bert4torch.tokenizers import Tokenizer\n'), ((2862, 2906), 'bert4torch.tokenizers.Tokenizer', 'Tokenizer', (['sim_dict_path'], {'do_lower_case': '(True)'}), '(sim_dict_path, do_lower_case=True)\n', (2871, 2906), False, 'from bert4torch.tokenizers import Tokenizer\n'), ((999, 1024), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1022, 1024), False, 'import torch\n'), ((1672, 1688), 'jieba.lcut', 'jieba.lcut', (['text'], {}), '(text)\n', (1682, 1688), False, 'import jieba\n'), ((4837, 4871), 'torch.matmul', 'torch.matmul', (['sim_vecs', 'sim_vecs.T'], {}), '(sim_vecs, sim_vecs.T)\n', (4849, 4871), False, 'import torch\n'), ((7865, 7902), 'bert4torch.snippets.AutoRegressiveDecoder.wraps', 'AutoRegressiveDecoder.wraps', (['"""logits"""'], {}), "('logits')\n", (7892, 7902), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((1558, 1605), 'bert4torch.snippets.text_segmentate', 'text_segmentate', (['text', '(maxlen - 2)', 'seps', 'strips'], {}), '(text, maxlen - 2, seps, strips)\n', (1573, 1605), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((2936, 3043), 'bert4torch.models.build_transformer_model', 'build_transformer_model', (['sim_config_path', 'sim_checkpoint_path'], {'with_pool': '"""linear"""', 'application': '"""unilm"""'}), "(sim_config_path, sim_checkpoint_path, with_pool=\n 'linear', application='unilm')\n", (2959, 3043), False, 'from bert4torch.models import build_transformer_model, BaseModel\n'), ((3325, 3352), 'numpy.random.shuffle', 'np.random.shuffle', (['synonyms'], {}), '(synonyms)\n', (3342, 3352), True, 'import numpy as np\n'), ((4255, 4288), 'bert4torch.snippets.sequence_padding', 'sequence_padding', (['batch_token_ids'], {}), '(batch_token_ids)\n', (4271, 4288), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((4360, 4395), 'bert4torch.snippets.sequence_padding', 'sequence_padding', (['batch_segment_ids'], {}), '(batch_segment_ids)\n', (4376, 4395), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((4497, 4534), 'bert4torch.snippets.sequence_padding', 'sequence_padding', (['batch_sim_token_ids'], {}), '(batch_sim_token_ids)\n', (4513, 4534), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((4610, 4649), 'bert4torch.snippets.sequence_padding', 'sequence_padding', (['batch_sim_segment_ids'], {}), '(batch_sim_segment_ids)\n', (4626, 4649), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((5251, 5438), 'bert4torch.models.build_transformer_model', 'build_transformer_model', ([], {'config_path': 'config_path', 'checkpoint_path': 'checkpoint_path', 'model': '"""roformer"""', 'with_pool': '"""linear"""', 'with_mlm': '"""linear"""', 'dropout_rate': '(0.2)', 'application': '"""unilm"""'}), "(config_path=config_path, checkpoint_path=\n checkpoint_path, model='roformer', with_pool='linear', with_mlm=\n 'linear', dropout_rate=0.2, application='unilm')\n", (5274, 5438), False, 'from bert4torch.models import build_transformer_model, BaseModel\n'), ((7345, 7392), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['y_pred', 'y_true'], {'ignore_index': '(0)'}), '(y_pred, y_true, ignore_index=0)\n', (7360, 7392), True, 'import torch.nn.functional as F\n'), ((7469, 7501), 'torch.nn.functional.normalize', 'F.normalize', (['y_pred'], {'p': '(2)', 'dim': '(-1)'}), '(y_pred, p=2, dim=-1)\n', (7480, 7501), True, 'import torch.nn.functional as F\n'), ((7535, 7565), 'torch.matmul', 'torch.matmul', (['y_pred', 'y_pred.T'], {}), '(y_pred, y_pred.T)\n', (7547, 7565), False, 'import torch\n'), ((8014, 8051), 'torch.cat', 'torch.cat', (['[token_ids, output_ids]', '(1)'], {}), '([token_ids, output_ids], 1)\n', (8023, 8051), False, 'import torch\n'), ((8853, 8872), 'bert4torch.snippets.sequence_padding', 'sequence_padding', (['X'], {}), '(X)\n', (8869, 8872), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((8928, 8947), 'bert4torch.snippets.sequence_padding', 'sequence_padding', (['S'], {}), '(S)\n', (8944, 8947), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((9839, 9869), 'numpy.random.choice', 'np.random.choice', (['some_samples'], {}), '(some_samples)\n', (9855, 9869), True, 'import numpy as np\n'), ((3654, 3711), 'bert4torch.snippets.truncate_sequences', 'truncate_sequences', (['(maxlen * 2)', '(-2)', 'text_ids', 'synonym_ids'], {}), '(maxlen * 2, -2, text_ids, synonym_ids)\n', (3672, 3711), False, 'from bert4torch.snippets import sequence_padding, ListDataset, text_segmentate, AutoRegressiveDecoder, Callback, truncate_sequences\n'), ((7596, 7636), 'torch.mean', 'torch.mean', (['((similarities - y_true) ** 2)'], {}), '((similarities - y_true) ** 2)\n', (7606, 7636), False, 'import torch\n'), ((9692, 9718), 'torch.matmul', 'torch.matmul', (['Z[1:]', '(-Z[0])'], {}), '(Z[1:], -Z[0])\n', (9704, 9718), False, 'import torch\n'), ((3436, 3454), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (3452, 3454), True, 'import numpy as np\n'), ((5716, 5775), 'torch.sum', 'torch.sum', (['(hidden_state * attention_mask[:, :, None])'], {'dim': '(1)'}), '(hidden_state * attention_mask[:, :, None], dim=1)\n', (5725, 5775), False, 'import torch\n'), ((8098, 8140), 'torch.ones_like', 'torch.ones_like', (['output_ids'], {'device': 'device'}), '(output_ids, device=device)\n', (8113, 8140), False, 'import torch\n'), ((1429, 1442), 'json.loads', 'json.loads', (['l'], {}), '(l)\n', (1439, 1442), False, 'import json\n'), ((5805, 5837), 'torch.sum', 'torch.sum', (['attention_mask'], {'dim': '(1)'}), '(attention_mask, dim=1)\n', (5814, 5837), False, 'import torch\n'), ((6021, 6048), 'torch.max', 'torch.max', (['seq_state'], {'dim': '(1)'}), '(seq_state, dim=1)\n', (6030, 6048), False, 'import torch\n')] |
from fastapi import APIRouter
router_helloworld = APIRouter()
@router_helloworld.get("/")
def get_helloworld():
return {"Hello": "World"}
| [
"fastapi.APIRouter"
] | [((51, 62), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (60, 62), False, 'from fastapi import APIRouter\n')] |
from random import sample, randint
"""
Randomizer for available lists
plus radio broadcasting randomizer
"""
# Available lists randomizer class
class Randomize_and_pop_on_call:
"""
Randomize given array and on call pop given value from array.
If array is empty - returns None
"""
# created only to ease-up code a little
def __init__(self, array):
self.array = sample(array, len(array))
def __call__(self):
return self.array.pop() if len(self.array) else None
# alias
randomize = Randomize_and_pop_on_call
# random radio
broadcast = "Евгеника" if randint(1, 4) == 1 else "Маяк"
__all__ = ['randomize', 'broadcast']
if __name__ == '__main__':
# randomizer check
ar = randomize([1,2,3,4,5])
print(ar(), ar(), ar(), ar(), ar(), ar())
| [
"random.randint"
] | [((600, 613), 'random.randint', 'randint', (['(1)', '(4)'], {}), '(1, 4)\n', (607, 613), False, 'from random import sample, randint\n')] |
import pygame
import sys
pygame.init()
screen = pygame.display.set_mode((640, 480))
clock = pygame.time.Clock()
x = 0
y = 0
# use a (r, g, b) tuple for color
yellow = (255, 255, 0)
# create the basic window/screen and a title/caption
# default is a black background
screen = pygame.display.set_mode((640, 280))
pygame.display.set_caption("Text adventures with Pygame")
# pick a font you have and set its size
myfont = pygame.font.SysFont(None, 30)
pygame.display.set_caption('Animation')
while 1:
clock.tick(30)
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
key = pygame.key.get_pressed()
if key[pygame.K_UP]:
y += 1
print(y)
elif key[pygame.K_DOWN]:
y -= 1
print(y)
elif key[pygame.K_RIGHT]:
x += 1
print(x)
elif key[pygame.K_LEFT]:
x -= 1
print(x)
pygame.display.flip()
pygame.quit() | [
"sys.exit",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.time.Clock",
"pygame.key.get_pressed",
"pygame.display.set_caption",
"pygame.font.SysFont"
] | [((26, 39), 'pygame.init', 'pygame.init', ([], {}), '()\n', (37, 39), False, 'import pygame\n'), ((49, 84), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(640, 480)'], {}), '((640, 480))\n', (72, 84), False, 'import pygame\n'), ((93, 112), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (110, 112), False, 'import pygame\n'), ((277, 312), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(640, 280)'], {}), '((640, 280))\n', (300, 312), False, 'import pygame\n'), ((313, 370), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Text adventures with Pygame"""'], {}), "('Text adventures with Pygame')\n", (339, 370), False, 'import pygame\n'), ((420, 449), 'pygame.font.SysFont', 'pygame.font.SysFont', (['None', '(30)'], {}), '(None, 30)\n', (439, 449), False, 'import pygame\n'), ((451, 490), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Animation"""'], {}), "('Animation')\n", (477, 490), False, 'import pygame\n'), ((836, 849), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (847, 849), False, 'import pygame\n'), ((530, 548), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (546, 548), False, 'import pygame\n'), ((607, 631), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (629, 631), False, 'import pygame\n'), ((814, 835), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (833, 835), False, 'import pygame\n'), ((585, 595), 'sys.exit', 'sys.exit', ([], {}), '()\n', (593, 595), False, 'import sys\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by <NAME> and <NAME>.
JRC Biomass Project.
Unit D1 Bioeconomy.
This script will rename the header column of the file:
* /common/associations.csv
Before running this script the headers are simply "A", "B", "C".
After running this script, the new headers will be:
* "category"
* "name_input"
* "name_aidb"
"""
# Built-in modules #
# Third party modules #
import pandas
from tqdm import tqdm
# First party modules #
# Internal modules #
from libcbm_runner.core.continent import continent
###############################################################################
class RenameAssociations(object):
def __init__(self, country):
# Main attributes #
self.country = country
def __call__(self, verbose=False):
# Get path #
path = self.country.orig_data.paths.associations
# Load dataframe #
df = pandas.read_csv(str(path))
# Modify dataframe #
df.columns = ["category", "name_input", "name_aidb"]
# Write dataframe back to disk #
df.to_csv(str(path), index=False, float_format='%g')
###############################################################################
if __name__ == '__main__':
# Make renamer objects, one per country #
renamers = [RenameAssociations(c) for c in continent]
# Run them all #
for merger in tqdm(renamers):
merger()
| [
"tqdm.tqdm"
] | [((1391, 1405), 'tqdm.tqdm', 'tqdm', (['renamers'], {}), '(renamers)\n', (1395, 1405), False, 'from tqdm import tqdm\n')] |
# https://www.hackerrank.com/challenges/matching-anything-but-new-line/problem
import re
# Inputs
standard_input = """123.456.abc.def"""
regex_pattern = r".{3}\..{3}\..{3}\..{3}$" # Do not delete 'r'.
test_string = input()
# 123.456.abc.def
match = re.match(regex_pattern, test_string) is not None
print(str(match).lower())
# true
| [
"re.match"
] | [((259, 295), 're.match', 're.match', (['regex_pattern', 'test_string'], {}), '(regex_pattern, test_string)\n', (267, 295), False, 'import re\n')] |
import time
import numpy as np
import tensorflow as tf
from yalenet import YaleNet
from Mybase.solver import Solver
"""
def test():
mdl = YaleNet(cls_num=1000, reg=1e-4, typ=tf.float32)
sov = Solver(mdl,
opm_cfg={
'lr_base': 0.005,
'decay_rule': 'fixed',
#'decay_rule': 'exponential',
'decay_rate': 0.5,
'decay_step': 50,
'staircase': False,
#'optim_rule': 'adam',
'optim_rule': 'momentum',
'momentum': 0.9,
'use_nesterov': True
},
gpu_lst = '0',
bat_siz = 50,
tra_num = 2000,
val_num = 100,
epc_num = 200000,
min_que_tra = 10000,
min_que_val = 1000,
prt_ena = True,
itr_per_prt = 20,
tst_num = None,
tst_shw = True,
tst_sav = True,
mdl_nam = 'model.ckpt',
mdl_dir = 'Mybase/Model',
log_dir = 'Mybase/logdata',
dat_dir = 'Mybase/datasets',
mov_ave_dca = 0.99)
print('TRAINING...')
sov.train()
'''
print('TESTING...')
sov.test()
sov.display_detections()
#sov.show_loss_acc()
'''
"""
def test():
mdl = YaleNet(cls_num=21, reg=1e-4, typ=tf.float32)
sov = Solver(mdl,
opm_cfg={
'lr_base': 1e-5,
'decay_rule': 'fixed',
#'decay_rule': 'exponential',
'decay_rate': 0.5,
'decay_step': 50,
'staircase': False,
#'optim_rule': 'adam',
'optim_rule': 'momentum',
'momentum': 0.9,
'use_nesterov': True
},
gpu_lst = '0,1,2,3',
bat_siz = 4,
tra_num = 2000,
val_num = 100,
epc_num = 200000,
min_que_tra = 4000,
min_que_val = 200,
prt_ena = True,
itr_per_prt = 20,
tst_num = None,
tst_shw = True,
tst_sav = True,
mdl_nam = 'model.ckpt',
mdl_dir = 'Mybase/Model',
log_dir = 'Mybase/logdata',
dat_dir = 'Mybase/datasets',
mov_ave_dca = 0.99)
print('TRAINING...')
sov.train()
'''
print('TESTING...')
#sov.test()
sov.display_detections()
#sov.show_loss_acc()
'''
test()
| [
"yalenet.YaleNet",
"Mybase.solver.Solver"
] | [((1587, 1634), 'yalenet.YaleNet', 'YaleNet', ([], {'cls_num': '(21)', 'reg': '(0.0001)', 'typ': 'tf.float32'}), '(cls_num=21, reg=0.0001, typ=tf.float32)\n', (1594, 1634), False, 'from yalenet import YaleNet\n'), ((1643, 2149), 'Mybase.solver.Solver', 'Solver', (['mdl'], {'opm_cfg': "{'lr_base': 1e-05, 'decay_rule': 'fixed', 'decay_rate': 0.5, 'decay_step': \n 50, 'staircase': False, 'optim_rule': 'momentum', 'momentum': 0.9,\n 'use_nesterov': True}", 'gpu_lst': '"""0,1,2,3"""', 'bat_siz': '(4)', 'tra_num': '(2000)', 'val_num': '(100)', 'epc_num': '(200000)', 'min_que_tra': '(4000)', 'min_que_val': '(200)', 'prt_ena': '(True)', 'itr_per_prt': '(20)', 'tst_num': 'None', 'tst_shw': '(True)', 'tst_sav': '(True)', 'mdl_nam': '"""model.ckpt"""', 'mdl_dir': '"""Mybase/Model"""', 'log_dir': '"""Mybase/logdata"""', 'dat_dir': '"""Mybase/datasets"""', 'mov_ave_dca': '(0.99)'}), "(mdl, opm_cfg={'lr_base': 1e-05, 'decay_rule': 'fixed', 'decay_rate':\n 0.5, 'decay_step': 50, 'staircase': False, 'optim_rule': 'momentum',\n 'momentum': 0.9, 'use_nesterov': True}, gpu_lst='0,1,2,3', bat_siz=4,\n tra_num=2000, val_num=100, epc_num=200000, min_que_tra=4000,\n min_que_val=200, prt_ena=True, itr_per_prt=20, tst_num=None, tst_shw=\n True, tst_sav=True, mdl_nam='model.ckpt', mdl_dir='Mybase/Model',\n log_dir='Mybase/logdata', dat_dir='Mybase/datasets', mov_ave_dca=0.99)\n", (1649, 2149), False, 'from Mybase.solver import Solver\n')] |