code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import datetime
import os
import sys
from cmath import inf
from typing import Any
import hypothesis.extra.numpy as xps
import hypothesis.strategies as st
import numpy
import pytest
from hypothesis import assume, given
from eopf.product.utils import (
apply_xpath,
conv,
convert_to_unix_time,
is_date,
parse_xml,
reverse_conv,
translate_structure,
)
@pytest.fixture
def tree(EMBEDED_TEST_DATA_FOLDER: str):
snippet_path = os.path.join(EMBEDED_TEST_DATA_FOLDER, "snippet_xfdumanifest.xml")
with open(snippet_path) as f:
return parse_xml(f)
@st.composite
def value_with_type(draw, elements=st.integers(), expected_type=int, expected_container_type=None):
if isinstance(expected_type, st.SearchStrategy):
expected_type = draw(expected_type)
if expected_container_type is not None:
if isinstance(expected_container_type, st.SearchStrategy):
expected_container_type = draw(expected_container_type)
return (draw(elements), expected_type, expected_container_type)
return (draw(elements), expected_type)
@st.composite
def numpy_value(draw, dtype_st=xps.scalar_dtypes(), allow_infinity=True, allow_nan=True):
return draw(xps.from_dtype(draw(dtype_st), allow_infinity=allow_infinity, allow_nan=allow_nan))
@pytest.mark.unit
def test_parse_xml(tree):
"""Given an input xml,
the output of the function must match the expected output"""
result = ""
display_namespaces = True
for element in tree.iter():
tag = element.tag
result += f"{tag}\n"
if display_namespaces:
display_namespaces = False
for key, value in element.nsmap.items():
result += f"{key} : {value}\n"
attributes = element.attrib
for key, value in attributes.items():
result += f"{key} : {value}\n"
textual_content = element.text
if textual_content and textual_content.strip():
result += textual_content + "\n"
file_path = os.path.join(os.path.abspath("tests/data"), "solutions.txt")
with open(file_path, "r") as f:
expected = f.read()
assert result == expected
@pytest.mark.unit
def test_translate_structure(tree):
"""Given an input xml,
the output of the function must match the expected output"""
MAP = {
"title": "concat('',metadataSection/metadataObject[@ID='generalProductInformation']/metadataWrap/xmlData/"
"sentinel3:generalProductInformation/sentinel3:productName/text())",
"Conventions": "'CF-1.9'",
}
NAMESPACES = {
"xfdu": "urn:ccsds:schema:xfdu:1",
"gml": "http://www.opengis.net/gml",
"sentinel-safe": "http://www.esa.int/safe/sentinel/1.1",
"sentinel3": "http://www.esa.int/safe/sentinel/sentinel-3/1.0",
"olci": "http://www.esa.int/safe/sentinel/sentinel-3/olci/1.0",
}
result = translate_structure(MAP, tree, NAMESPACES)
assert result == {
"title": "S3A_OL_1_EFR____20220116T092821_20220116T093121_20220117T134858_0179_081_036_2160_LN1_O_NT_002.SEN3",
"Conventions": "CF-1.9",
}
@pytest.mark.unit
def test_apply_xpath(tree):
"""Given an input xml,
the output of the function must match the expected output"""
MAP = {
"title": "concat('',metadataSection/metadataObject[@ID='generalProductInformation']/metadataWrap/xmlData/"
"sentinel3:generalProductInformation/sentinel3:productName/text())",
"Conventions": "'CF-1.9'",
}
NAMESPACES = {
"xfdu": "urn:ccsds:schema:xfdu:1",
"gml": "http://www.opengis.net/gml",
"sentinel-safe": "http://www.esa.int/safe/sentinel/1.1",
"sentinel3": "http://www.esa.int/safe/sentinel/sentinel-3/1.0",
"olci": "http://www.esa.int/safe/sentinel/sentinel-3/olci/1.0",
}
result = {attr: apply_xpath(tree, MAP[attr], NAMESPACES) for attr in MAP}
assert result == {
"title": "S3A_OL_1_EFR____20220116T092821_20220116T093121_20220117T134858_0179_081_036_2160_LN1_O_NT_002.SEN3",
"Conventions": "CF-1.9",
}
@pytest.mark.unit
def test_is_date():
string_date_1 = "2020-03-31T17:19:29.230522Z" # Zulu time
string_date_2 = "2020-03-31T17:19:29.230522GMT+3" # GMT+3 Time
string_date_3 = "some_random_string"
dt_date = datetime.datetime(2020, 3, 31, 17, 19, 29, 230522)
assert is_date(string_date_1)
assert is_date(string_date_2)
assert is_date(str(dt_date))
assert not is_date(string_date_3)
@pytest.mark.unit
def test_convert_unix_time():
import pytz
# Define datetime-like string and verify if conversion match with datetime object and expected unix time. (MS)
string_date = "2020-03-31T17:19:29.230522Z"
dt_date = datetime.datetime(2020, 3, 31, 17, 19, 29, 230522, pytz.UTC)
expected_unix_time = 1585675169230522
assert convert_to_unix_time(string_date) == convert_to_unix_time(dt_date) == expected_unix_time
# Define datetime-like string in Zulu Time Zone, and verify that it doesnt match with expected unix time
string_date = "2020-03-31T17:19:29.230522GMT-3"
assert convert_to_unix_time(string_date) != convert_to_unix_time(dt_date)
assert convert_to_unix_time(string_date) != expected_unix_time
#
try:
string_date = "a string that is not a valid date"
convert_to_unix_time(string_date)
except ValueError:
assert True
@pytest.mark.unit
@given(
value_and_types=st.one_of(
value_with_type(
st.lists(elements=st.floats(allow_infinity=False, allow_nan=False), unique=True, min_size=10),
float,
list,
),
value_with_type(st.lists(elements=st.integers(), unique=True, min_size=10), int, list),
value_with_type(st.lists(elements=st.booleans(), unique=True, min_size=2), int, list),
value_with_type(st.sets(elements=st.floats(allow_infinity=False, allow_nan=False), min_size=10), float, set),
value_with_type(st.sets(elements=st.integers(), min_size=10), int, set),
value_with_type(st.sets(elements=st.booleans(), min_size=2), int, set),
value_with_type(st.dictionaries(st.text(), st.integers(), min_size=10), int, dict),
value_with_type(st.dictionaries(st.text(), st.booleans(), min_size=10), int, dict),
value_with_type(
st.dictionaries(st.text(), st.floats(allow_infinity=False, allow_nan=False), min_size=10),
float,
dict,
),
value_with_type(xps.arrays(xps.floating_dtypes(), 10, unique=True), float, list),
value_with_type(xps.arrays(xps.integer_dtypes(), 10, unique=True), int, list),
value_with_type(xps.arrays(xps.boolean_dtypes(), 10, unique=True), int, list),
),
)
def test_conv_sequences(value_and_types: tuple[Any, type, type]):
values, type_, container_type = value_and_types
assume(inf not in values)
converted_list = conv(values)
assert isinstance(converted_list, container_type)
# Check if size of converted value doesn't change
assert len(converted_list) == len(values)
# Check if type of each item from converted value is correct
if isinstance(converted_list, dict):
iterator = converted_list.values()
original = values.values()
else:
iterator = converted_list
original = values
for converted_value, value in zip(sorted(iterator), sorted(original)):
assert isinstance(converted_value, type_)
conv_value = conv(value)
# check if converted values are the same or both are nan
assert converted_value == conv_value or (converted_value != converted_value and conv_value != conv_value)
@pytest.mark.unit
@pytest.mark.parametrize("EPSILON", [0.1])
@given(value=numpy_value(xps.floating_dtypes(), allow_infinity=False, allow_nan=False))
def test_epsilon_on_fp_conv(value, EPSILON):
converted_value = conv(value)
assert value - converted_value < EPSILON
assert converted_value - value < EPSILON
@pytest.mark.unit
@given(
value_and_type=st.one_of(
value_with_type(
elements=numpy_value(xps.floating_dtypes(), allow_infinity=False, allow_nan=False),
expected_type=float,
),
value_with_type(
elements=numpy_value(xps.integer_dtypes(), allow_infinity=False, allow_nan=False),
expected_type=int,
),
value_with_type(
elements=st.datetimes(),
expected_type=int,
),
),
)
def test_conv(value_and_type):
value, expected_type = value_and_type
converted_value = conv(value)
assert isinstance(converted_value, expected_type)
@pytest.mark.unit
@pytest.mark.parametrize(
"sysmax, maxint",
[
(numpy.int64(sys.maxsize), numpy.int64(9223372036854775807)),
],
)
def test_maxint_conv(sysmax, maxint):
# Robustness
assert conv(sysmax) == maxint
@pytest.mark.unit
@given(
value_and_types=st.one_of(
value_with_type(
st.integers(min_value=numpy.iinfo("int64").min, max_value=numpy.iinfo("int64").max),
int,
xps.integer_dtypes(endianness="=", sizes=(64,)),
),
value_with_type(
st.integers(min_value=numpy.iinfo("int32").min, max_value=numpy.iinfo("int32").max),
int,
xps.integer_dtypes(endianness="=", sizes=(32,)),
),
value_with_type(
st.integers(min_value=numpy.iinfo("int16").min, max_value=numpy.iinfo("int16").max),
int,
xps.integer_dtypes(endianness="=", sizes=(16,)),
),
value_with_type(
st.integers(min_value=numpy.iinfo("int8").min, max_value=numpy.iinfo("int8").max),
int,
xps.integer_dtypes(endianness="=", sizes=(8,)),
),
value_with_type(st.floats(width=16), float, xps.floating_dtypes(endianness="=", sizes=(16,))),
value_with_type(st.floats(width=32), float, xps.floating_dtypes(endianness="=", sizes=(32,))),
value_with_type(st.floats(width=64), float, xps.floating_dtypes(endianness="=", sizes=(64,))),
),
)
def test_reverse_conv(value_and_types):
value, current_type, data_type = value_and_types
# verify if the current data type is as expected (int or float)
assert isinstance(value, current_type)
# convert value to given data type (int64, int32, float64 etc .. )
converted_value = reverse_conv(data_type, value)
# check if conversion is performed according to given data (int -> numpy.int64, float -> numpy.float64)
assert numpy.issubdtype(type(converted_value), data_type)
# check if converted data type is changed and not match with old one
assert type(converted_value) != current_type
| [
"numpy.iinfo",
"eopf.product.utils.convert_to_unix_time",
"hypothesis.extra.numpy.integer_dtypes",
"eopf.product.utils.conv",
"hypothesis.extra.numpy.boolean_dtypes",
"datetime.datetime",
"eopf.product.utils.is_date",
"numpy.int64",
"hypothesis.strategies.booleans",
"hypothesis.strategies.text",
... | [((7718, 7759), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""EPSILON"""', '[0.1]'], {}), "('EPSILON', [0.1])\n", (7741, 7759), False, 'import pytest\n'), ((457, 523), 'os.path.join', 'os.path.join', (['EMBEDED_TEST_DATA_FOLDER', '"""snippet_xfdumanifest.xml"""'], {}), "(EMBEDED_TEST_DATA_FOLDER, 'snippet_xfdumanifest.xml')\n", (469, 523), False, 'import os\n'), ((637, 650), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (648, 650), True, 'import hypothesis.strategies as st\n'), ((1142, 1161), 'hypothesis.extra.numpy.scalar_dtypes', 'xps.scalar_dtypes', ([], {}), '()\n', (1159, 1161), True, 'import hypothesis.extra.numpy as xps\n'), ((2906, 2948), 'eopf.product.utils.translate_structure', 'translate_structure', (['MAP', 'tree', 'NAMESPACES'], {}), '(MAP, tree, NAMESPACES)\n', (2925, 2948), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((4324, 4374), 'datetime.datetime', 'datetime.datetime', (['(2020)', '(3)', '(31)', '(17)', '(19)', '(29)', '(230522)'], {}), '(2020, 3, 31, 17, 19, 29, 230522)\n', (4341, 4374), False, 'import datetime\n'), ((4386, 4408), 'eopf.product.utils.is_date', 'is_date', (['string_date_1'], {}), '(string_date_1)\n', (4393, 4408), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((4420, 4442), 'eopf.product.utils.is_date', 'is_date', (['string_date_2'], {}), '(string_date_2)\n', (4427, 4442), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((4758, 4818), 'datetime.datetime', 'datetime.datetime', (['(2020)', '(3)', '(31)', '(17)', '(19)', '(29)', '(230522)', 'pytz.UTC'], {}), '(2020, 3, 31, 17, 19, 29, 230522, pytz.UTC)\n', (4775, 4818), False, 'import datetime\n'), ((6892, 6917), 'hypothesis.assume', 'assume', (['(inf not in values)'], {}), '(inf not in values)\n', (6898, 6917), False, 'from hypothesis import assume, given\n'), ((6939, 6951), 'eopf.product.utils.conv', 'conv', (['values'], {}), '(values)\n', (6943, 6951), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((7915, 7926), 'eopf.product.utils.conv', 'conv', (['value'], {}), '(value)\n', (7919, 7926), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((8610, 8621), 'eopf.product.utils.conv', 'conv', (['value'], {}), '(value)\n', (8614, 8621), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((10433, 10463), 'eopf.product.utils.reverse_conv', 'reverse_conv', (['data_type', 'value'], {}), '(data_type, value)\n', (10445, 10463), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((573, 585), 'eopf.product.utils.parse_xml', 'parse_xml', (['f'], {}), '(f)\n', (582, 585), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((2036, 2065), 'os.path.abspath', 'os.path.abspath', (['"""tests/data"""'], {}), "('tests/data')\n", (2051, 2065), False, 'import os\n'), ((3858, 3898), 'eopf.product.utils.apply_xpath', 'apply_xpath', (['tree', 'MAP[attr]', 'NAMESPACES'], {}), '(tree, MAP[attr], NAMESPACES)\n', (3869, 3898), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((4491, 4513), 'eopf.product.utils.is_date', 'is_date', (['string_date_3'], {}), '(string_date_3)\n', (4498, 4513), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((4873, 4906), 'eopf.product.utils.convert_to_unix_time', 'convert_to_unix_time', (['string_date'], {}), '(string_date)\n', (4893, 4906), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((4910, 4939), 'eopf.product.utils.convert_to_unix_time', 'convert_to_unix_time', (['dt_date'], {}), '(dt_date)\n', (4930, 4939), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((5135, 5168), 'eopf.product.utils.convert_to_unix_time', 'convert_to_unix_time', (['string_date'], {}), '(string_date)\n', (5155, 5168), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((5172, 5201), 'eopf.product.utils.convert_to_unix_time', 'convert_to_unix_time', (['dt_date'], {}), '(dt_date)\n', (5192, 5201), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((5213, 5246), 'eopf.product.utils.convert_to_unix_time', 'convert_to_unix_time', (['string_date'], {}), '(string_date)\n', (5233, 5246), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((5351, 5384), 'eopf.product.utils.convert_to_unix_time', 'convert_to_unix_time', (['string_date'], {}), '(string_date)\n', (5371, 5384), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((7506, 7517), 'eopf.product.utils.conv', 'conv', (['value'], {}), '(value)\n', (7510, 7517), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((8895, 8907), 'eopf.product.utils.conv', 'conv', (['sysmax'], {}), '(sysmax)\n', (8899, 8907), False, 'from eopf.product.utils import apply_xpath, conv, convert_to_unix_time, is_date, parse_xml, reverse_conv, translate_structure\n'), ((7785, 7806), 'hypothesis.extra.numpy.floating_dtypes', 'xps.floating_dtypes', ([], {}), '()\n', (7804, 7806), True, 'import hypothesis.extra.numpy as xps\n'), ((8759, 8783), 'numpy.int64', 'numpy.int64', (['sys.maxsize'], {}), '(sys.maxsize)\n', (8770, 8783), False, 'import numpy\n'), ((8785, 8817), 'numpy.int64', 'numpy.int64', (['(9223372036854775807)'], {}), '(9223372036854775807)\n', (8796, 8817), False, 'import numpy\n'), ((9128, 9175), 'hypothesis.extra.numpy.integer_dtypes', 'xps.integer_dtypes', ([], {'endianness': '"""="""', 'sizes': '(64,)'}), "(endianness='=', sizes=(64,))\n", (9146, 9175), True, 'import hypothesis.extra.numpy as xps\n'), ((9339, 9386), 'hypothesis.extra.numpy.integer_dtypes', 'xps.integer_dtypes', ([], {'endianness': '"""="""', 'sizes': '(32,)'}), "(endianness='=', sizes=(32,))\n", (9357, 9386), True, 'import hypothesis.extra.numpy as xps\n'), ((9550, 9597), 'hypothesis.extra.numpy.integer_dtypes', 'xps.integer_dtypes', ([], {'endianness': '"""="""', 'sizes': '(16,)'}), "(endianness='=', sizes=(16,))\n", (9568, 9597), True, 'import hypothesis.extra.numpy as xps\n'), ((9759, 9805), 'hypothesis.extra.numpy.integer_dtypes', 'xps.integer_dtypes', ([], {'endianness': '"""="""', 'sizes': '(8,)'}), "(endianness='=', sizes=(8,))\n", (9777, 9805), True, 'import hypothesis.extra.numpy as xps\n'), ((9842, 9861), 'hypothesis.strategies.floats', 'st.floats', ([], {'width': '(16)'}), '(width=16)\n', (9851, 9861), True, 'import hypothesis.strategies as st\n'), ((9870, 9918), 'hypothesis.extra.numpy.floating_dtypes', 'xps.floating_dtypes', ([], {'endianness': '"""="""', 'sizes': '(16,)'}), "(endianness='=', sizes=(16,))\n", (9889, 9918), True, 'import hypothesis.extra.numpy as xps\n'), ((9945, 9964), 'hypothesis.strategies.floats', 'st.floats', ([], {'width': '(32)'}), '(width=32)\n', (9954, 9964), True, 'import hypothesis.strategies as st\n'), ((9973, 10021), 'hypothesis.extra.numpy.floating_dtypes', 'xps.floating_dtypes', ([], {'endianness': '"""="""', 'sizes': '(32,)'}), "(endianness='=', sizes=(32,))\n", (9992, 10021), True, 'import hypothesis.extra.numpy as xps\n'), ((10048, 10067), 'hypothesis.strategies.floats', 'st.floats', ([], {'width': '(64)'}), '(width=64)\n', (10057, 10067), True, 'import hypothesis.strategies as st\n'), ((10076, 10124), 'hypothesis.extra.numpy.floating_dtypes', 'xps.floating_dtypes', ([], {'endianness': '"""="""', 'sizes': '(64,)'}), "(endianness='=', sizes=(64,))\n", (10095, 10124), True, 'import hypothesis.extra.numpy as xps\n'), ((6177, 6186), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (6184, 6186), True, 'import hypothesis.strategies as st\n'), ((6188, 6201), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (6199, 6201), True, 'import hypothesis.strategies as st\n'), ((6269, 6278), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (6276, 6278), True, 'import hypothesis.strategies as st\n'), ((6280, 6293), 'hypothesis.strategies.booleans', 'st.booleans', ([], {}), '()\n', (6291, 6293), True, 'import hypothesis.strategies as st\n'), ((6374, 6383), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (6381, 6383), True, 'import hypothesis.strategies as st\n'), ((6385, 6433), 'hypothesis.strategies.floats', 'st.floats', ([], {'allow_infinity': '(False)', 'allow_nan': '(False)'}), '(allow_infinity=False, allow_nan=False)\n', (6394, 6433), True, 'import hypothesis.strategies as st\n'), ((6532, 6553), 'hypothesis.extra.numpy.floating_dtypes', 'xps.floating_dtypes', ([], {}), '()\n', (6551, 6553), True, 'import hypothesis.extra.numpy as xps\n'), ((6622, 6642), 'hypothesis.extra.numpy.integer_dtypes', 'xps.integer_dtypes', ([], {}), '()\n', (6640, 6642), True, 'import hypothesis.extra.numpy as xps\n'), ((6709, 6729), 'hypothesis.extra.numpy.boolean_dtypes', 'xps.boolean_dtypes', ([], {}), '()\n', (6727, 6729), True, 'import hypothesis.extra.numpy as xps\n'), ((8448, 8462), 'hypothesis.strategies.datetimes', 'st.datetimes', ([], {}), '()\n', (8460, 8462), True, 'import hypothesis.strategies as st\n'), ((5542, 5590), 'hypothesis.strategies.floats', 'st.floats', ([], {'allow_infinity': '(False)', 'allow_nan': '(False)'}), '(allow_infinity=False, allow_nan=False)\n', (5551, 5590), True, 'import hypothesis.strategies as st\n'), ((5709, 5722), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (5720, 5722), True, 'import hypothesis.strategies as st\n'), ((5805, 5818), 'hypothesis.strategies.booleans', 'st.booleans', ([], {}), '()\n', (5816, 5818), True, 'import hypothesis.strategies as st\n'), ((5899, 5947), 'hypothesis.strategies.floats', 'st.floats', ([], {'allow_infinity': '(False)', 'allow_nan': '(False)'}), '(allow_infinity=False, allow_nan=False)\n', (5908, 5947), True, 'import hypothesis.strategies as st\n'), ((6017, 6030), 'hypothesis.strategies.integers', 'st.integers', ([], {}), '()\n', (6028, 6030), True, 'import hypothesis.strategies as st\n'), ((6098, 6111), 'hypothesis.strategies.booleans', 'st.booleans', ([], {}), '()\n', (6109, 6111), True, 'import hypothesis.strategies as st\n'), ((8133, 8154), 'hypothesis.extra.numpy.floating_dtypes', 'xps.floating_dtypes', ([], {}), '()\n', (8152, 8154), True, 'import hypothesis.extra.numpy as xps\n'), ((8298, 8318), 'hypothesis.extra.numpy.integer_dtypes', 'xps.integer_dtypes', ([], {}), '()\n', (8316, 8318), True, 'import hypothesis.extra.numpy as xps\n'), ((9036, 9056), 'numpy.iinfo', 'numpy.iinfo', (['"""int64"""'], {}), "('int64')\n", (9047, 9056), False, 'import numpy\n'), ((9072, 9092), 'numpy.iinfo', 'numpy.iinfo', (['"""int64"""'], {}), "('int64')\n", (9083, 9092), False, 'import numpy\n'), ((9247, 9267), 'numpy.iinfo', 'numpy.iinfo', (['"""int32"""'], {}), "('int32')\n", (9258, 9267), False, 'import numpy\n'), ((9283, 9303), 'numpy.iinfo', 'numpy.iinfo', (['"""int32"""'], {}), "('int32')\n", (9294, 9303), False, 'import numpy\n'), ((9458, 9478), 'numpy.iinfo', 'numpy.iinfo', (['"""int16"""'], {}), "('int16')\n", (9469, 9478), False, 'import numpy\n'), ((9494, 9514), 'numpy.iinfo', 'numpy.iinfo', (['"""int16"""'], {}), "('int16')\n", (9505, 9514), False, 'import numpy\n'), ((9669, 9688), 'numpy.iinfo', 'numpy.iinfo', (['"""int8"""'], {}), "('int8')\n", (9680, 9688), False, 'import numpy\n'), ((9704, 9723), 'numpy.iinfo', 'numpy.iinfo', (['"""int8"""'], {}), "('int8')\n", (9715, 9723), False, 'import numpy\n')] |
import pytest
from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder
from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser
from app.settings import setting
class TestInlineBuilder:
""" Inline要素からHTML文字列が得られるか検証 """
# HTML組み立て
@pytest.mark.parametrize(
('inline_text', 'expected'),
[
(
'plain text',
'plain text'
),
(
'[参考リンク](https://docs.python.org/3/)',
f'<a href="https://docs.python.org/3/" class="{setting["class_name"]["a"]}">参考リンク</a>'
),
(
'',
'<img src="image.png" alt="awesome image">'
),
(
'`DependencyInjection`',
f'<code class="{setting["class_name"]["code"]}">DependencyInjection</code>'
),
],
ids=['plain', 'link', 'image', 'code'])
def test_build(self, inline_text: str, expected: str):
# GIVEN
sut = InlineBuilder()
inline = InlineParser().parse(inline_text)[0]
# WHEN
actual = sut.build(inline)
# THEN
assert actual == expected
class TestLinkBuilder:
""" LinkInline要素からaタグと対応するHTML文字列が得られるか検証 """
# 対象判定
@pytest.mark.parametrize(
('inline_text', 'expected'),
[
('[this is a link](url)', True),
('plain text', False),
('[参考リンク](https://www.google.com/)', True)
],
ids=['target', 'not target', 'normal link'])
def test_target(self, inline_text: str, expected: bool):
# GIVEN
sut = LinkBuilder()
inline = InlineParser().parse(inline_text)[0]
# WHEN
actual = sut.is_target(inline)
# THEN
assert actual == expected
# HTML組み立て
@pytest.mark.parametrize(
('inline_text', 'expected'),
[
(
'[this is a link](url)',
f'<a href="url" class="{setting["class_name"]["a"]}">this is a link</a>'
),
(
'[参考リンク](https://www.google.com/)',
f'<a href="https://www.google.com/" class="{setting["class_name"]["a"]}">参考リンク</a>'
)
],
ids=['url', 'google'])
def test_build(self, inline_text: str, expected: str):
# GIVEN
sut = LinkBuilder()
inline = LinkParser().parse(inline_text)
# WHEN
actual = sut.build(inline)
# THEN
assert actual == expected
class TestCodeBuilder:
""" CodeInline要素からcodeタグと対応するHTML文字列が得られるか検証 """
# 対象判定
@pytest.mark.parametrize(
('inline_text', 'expected'), [
('`#`', True),
('[this is a link](url)', False),
],
ids=['target', 'not target'])
def test_target(self, inline_text: str, expected: bool):
# GIVEN
sut = CodeBuilder()
inline = InlineParser().parse(inline_text)[0]
# WHEN
actual = sut.is_target(inline)
# THEN
assert actual == expected
# HTML組み立て
@pytest.mark.parametrize(
('inline_text', 'expected'), [
(
'`plain text`',
f'<code class="{setting["class_name"]["code"]}">plain text</code>'
),
(
'`codeタグ`',
f'<code class="{setting["class_name"]["code"]}">codeタグ</code>'
),
],
ids=['plain', 'full width'])
def test_build(self, inline_text: str, expected: str):
# GIVEN
sut = CodeBuilder()
inline = CodeParser().parse(inline_text)
# WHEN
actual = sut.build(inline)
# THEN
assert actual == expected
class TestImageBuilder:
""" ImageInline要素からimgタグと対応するHTML文字列が得られるか検証 """
# 対象判定
@pytest.mark.parametrize(
('inline_text', 'expected'),
[
('', True),
('`code text`', False),
],
ids=['target', 'not target'])
def test_target(self, inline_text: str, expected: bool):
# GIVEN
sut = ImageBuilder()
inline = InlineParser().parse(inline_text)[0]
# WHEN
actual = sut.is_target(inline)
# THEN
assert actual == expected
# HTML組み立て
@pytest.mark.parametrize(
('inline_text', 'expected'),
[
(
'',
'<img src="images/dog.png" alt="わんこ">'
),
(
'',
'<img src="http://localhost/image.png" alt="画像">'
),
],
ids=['path_expression', 'url_expression'])
def test_build(self, inline_text, expected: str):
# GIVEN
sut = ImageBuilder()
inline = ImageParser().parse(inline_text)
# WHEN
actual = sut.build(inline)
# THEN
assert actual == expected
| [
"app.html.inline_builder.InlineBuilder",
"app.html.inline_builder.ImageBuilder",
"app.html.inline_builder.CodeBuilder",
"pytest.mark.parametrize",
"app.html.inline_builder.LinkBuilder",
"app.markdown.inline_parser.LinkParser",
"app.markdown.inline_parser.CodeParser",
"app.markdown.inline_parser.Inline... | [((313, 782), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", '[(\'plain text\', \'plain text\'), (\'[参考リンク](https://docs.python.org/3/)\',\n f\'<a href="https://docs.python.org/3/" class="{setting[\\\'class_name\\\'][\\\'a\\\']}">参考リンク</a>\'\n ), (\'\',\n \'<img src="image.png" alt="awesome image">\'), (\'`DependencyInjection`\',\n f\'<code class="{setting[\\\'class_name\\\'][\\\'code\\\']}">DependencyInjection</code>\'\n )]'], {'ids': "['plain', 'link', 'image', 'code']"}), '((\'inline_text\', \'expected\'), [(\'plain text\',\n \'plain text\'), (\'[参考リンク](https://docs.python.org/3/)\',\n f\'<a href="https://docs.python.org/3/" class="{setting[\\\'class_name\\\'][\\\'a\\\']}">参考リンク</a>\'\n ), (\'\',\n \'<img src="image.png" alt="awesome image">\'), (\'`DependencyInjection`\',\n f\'<code class="{setting[\\\'class_name\\\'][\\\'code\\\']}">DependencyInjection</code>\'\n )], ids=[\'plain\', \'link\', \'image\', \'code\'])\n', (336, 782), False, 'import pytest\n'), ((1367, 1580), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", "[('[this is a link](url)', True), ('plain text', False), (\n '[参考リンク](https://www.google.com/)', True)]"], {'ids': "['target', 'not target', 'normal link']"}), "(('inline_text', 'expected'), [(\n '[this is a link](url)', True), ('plain text', False), (\n '[参考リンク](https://www.google.com/)', True)], ids=['target', 'not target',\n 'normal link'])\n", (1390, 1580), False, 'import pytest\n'), ((1921, 2253), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", '[(\'[this is a link](url)\',\n f\'<a href="url" class="{setting[\\\'class_name\\\'][\\\'a\\\']}">this is a link</a>\'\n ), (\'[参考リンク](https://www.google.com/)\',\n f\'<a href="https://www.google.com/" class="{setting[\\\'class_name\\\'][\\\'a\\\']}">参考リンク</a>\'\n )]'], {'ids': "['url', 'google']"}), '((\'inline_text\', \'expected\'), [(\n \'[this is a link](url)\',\n f\'<a href="url" class="{setting[\\\'class_name\\\'][\\\'a\\\']}">this is a link</a>\'\n ), (\'[参考リンク](https://www.google.com/)\',\n f\'<a href="https://www.google.com/" class="{setting[\\\'class_name\\\'][\\\'a\\\']}">参考リンク</a>\'\n )], ids=[\'url\', \'google\'])\n', (1944, 2253), False, 'import pytest\n'), ((2721, 2859), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", "[('`#`', True), ('[this is a link](url)', False)]"], {'ids': "['target', 'not target']"}), "(('inline_text', 'expected'), [('`#`', True), (\n '[this is a link](url)', False)], ids=['target', 'not target'])\n", (2744, 2859), False, 'import pytest\n'), ((3190, 3461), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", '[(\'`plain text`\',\n f\'<code class="{setting[\\\'class_name\\\'][\\\'code\\\']}">plain text</code>\'),\n (\'`codeタグ`\',\n f\'<code class="{setting[\\\'class_name\\\'][\\\'code\\\']}">codeタグ</code>\')]'], {'ids': "['plain', 'full width']"}), '((\'inline_text\', \'expected\'), [(\'`plain text`\',\n f\'<code class="{setting[\\\'class_name\\\'][\\\'code\\\']}">plain text</code>\'),\n (\'`codeタグ`\',\n f\'<code class="{setting[\\\'class_name\\\'][\\\'code\\\']}">codeタグ</code>\')],\n ids=[\'plain\', \'full width\'])\n', (3213, 3461), False, 'import pytest\n'), ((3929, 4077), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", "[('', True), ('`code text`', False)]"], {'ids': "['target', 'not target']"}), "(('inline_text', 'expected'), [(\n '', True), ('`code text`', False)], ids=['target',\n 'not target'])\n", (3952, 4077), False, 'import pytest\n'), ((4413, 4687), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", '[(\'\', \'<img src="images/dog.png" alt="わんこ">\'), (\n \'\',\n \'<img src="http://localhost/image.png" alt="画像">\')]'], {'ids': "['path_expression', 'url_expression']"}), '((\'inline_text\', \'expected\'), [(\n \'\', \'<img src="images/dog.png" alt="わんこ">\'), (\n \'\',\n \'<img src="http://localhost/image.png" alt="画像">\')], ids=[\n \'path_expression\', \'url_expression\'])\n', (4436, 4687), False, 'import pytest\n'), ((1106, 1121), 'app.html.inline_builder.InlineBuilder', 'InlineBuilder', ([], {}), '()\n', (1119, 1121), False, 'from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder\n'), ((1729, 1742), 'app.html.inline_builder.LinkBuilder', 'LinkBuilder', ([], {}), '()\n', (1740, 1742), False, 'from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder\n'), ((2463, 2476), 'app.html.inline_builder.LinkBuilder', 'LinkBuilder', ([], {}), '()\n', (2474, 2476), False, 'from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder\n'), ((2998, 3011), 'app.html.inline_builder.CodeBuilder', 'CodeBuilder', ([], {}), '()\n', (3009, 3011), False, 'from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder\n'), ((3671, 3684), 'app.html.inline_builder.CodeBuilder', 'CodeBuilder', ([], {}), '()\n', (3682, 3684), False, 'from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder\n'), ((4220, 4234), 'app.html.inline_builder.ImageBuilder', 'ImageBuilder', ([], {}), '()\n', (4232, 4234), False, 'from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder\n'), ((4905, 4919), 'app.html.inline_builder.ImageBuilder', 'ImageBuilder', ([], {}), '()\n', (4917, 4919), False, 'from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder\n'), ((2494, 2506), 'app.markdown.inline_parser.LinkParser', 'LinkParser', ([], {}), '()\n', (2504, 2506), False, 'from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser\n'), ((3702, 3714), 'app.markdown.inline_parser.CodeParser', 'CodeParser', ([], {}), '()\n', (3712, 3714), False, 'from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser\n'), ((4937, 4950), 'app.markdown.inline_parser.ImageParser', 'ImageParser', ([], {}), '()\n', (4948, 4950), False, 'from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser\n'), ((1139, 1153), 'app.markdown.inline_parser.InlineParser', 'InlineParser', ([], {}), '()\n', (1151, 1153), False, 'from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser\n'), ((1760, 1774), 'app.markdown.inline_parser.InlineParser', 'InlineParser', ([], {}), '()\n', (1772, 1774), False, 'from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser\n'), ((3029, 3043), 'app.markdown.inline_parser.InlineParser', 'InlineParser', ([], {}), '()\n', (3041, 3043), False, 'from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser\n'), ((4252, 4266), 'app.markdown.inline_parser.InlineParser', 'InlineParser', ([], {}), '()\n', (4264, 4266), False, 'from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser\n')] |
import common
import student_code
import array
class bcolors:
RED = "\x1b[31m"
GREEN = "\x1b[32m"
NORMAL = "\x1b[0m"
def read_data(training_data, test_data1, gold_data1, filename):
data = array.array('f')
test = array.array('f')
with open(filename, 'rb') as fd:
data.fromfile(fd, common.constants.TRAINING_SIZE *
(common.constants.DATA_DIM+1))
test.fromfile(fd, common.constants.TEST_SIZE *
(common.constants.DATA_DIM+1))
for i in range(common.constants.TRAINING_SIZE):
for j in range(common.constants.DATA_DIM+1):
training_data[i][j] = data[i*(common.constants.DATA_DIM+1)+j]
for i in range(common.constants.TEST_SIZE):
for j in range(common.constants.DATA_DIM):
test_data1[i][j] = test[i*(common.constants.DATA_DIM+1)+j]
test_data1[i][common.constants.DATA_DIM] = -1
gold_data1[i] = test[i *
(common.constants.DATA_DIM+1)+common.constants.DATA_DIM]
f = open(filename.split('.dat')[0] + '.csv', 'w')
for i in training_data:
f.write(str(i[0]) + ',' + str(i[1]) + ',' + str(i[2]) + '\n')
for i in range(len(test_data1)):
f.write(str(test_data1[i][0]) + ',' + str(test_data1[i]
[1]) + ',' + str(gold_data1[i]) + '\n')
f.close()
def read_data_csv(training_data, test_data1, gold_data1, filename):
data = []
test = []
with open(filename, 'r') as fd:
lines = fd.readlines()
for i in range(common.constants.TRAINING_SIZE + common.constants.TEST_SIZE):
if i < common.constants.TRAINING_SIZE:
data += [float(j) for j in lines[i].strip().split(',')]
else:
test += [float(j) for j in lines[i].strip().split(',')]
for i in range(common.constants.TRAINING_SIZE):
for j in range(common.constants.DATA_DIM+1):
training_data[i][j] = data[i*(common.constants.DATA_DIM+1)+j]
for i in range(common.constants.TEST_SIZE):
for j in range(common.constants.DATA_DIM):
test_data1[i][j] = test[i*(common.constants.DATA_DIM+1)+j]
test_data1[i][common.constants.DATA_DIM] = -1
gold_data1[i] = test[i *
(common.constants.DATA_DIM+1)+common.constants.DATA_DIM]
def run_experiment1(filename):
gold_data = [0 for x in range(common.constants.TEST_SIZE)]
test_data = [[0, 0, 0] for x in range(common.constants.TEST_SIZE)]
training_data = [[0, 0, 0] for x in range(common.constants.TRAINING_SIZE)]
# generating data should be hidden from students!
read_data_csv(training_data, test_data, gold_data, filename)
# this is one of the two student functions
# print (training_data)#, test_data, gold_data)
student_code.part_one_classifier(training_data, test_data)
# part 1 grading
error = 0
for i in range(common.constants.TEST_SIZE):
if(test_data[i][common.constants.DATA_DIM] != gold_data[i]):
error += 1
print("Incorrect classification is "+str(error) +
" out of " + str(common.constants.TEST_SIZE))
success = True
if (error <= float(common.constants.TEST_SIZE)*.05):
print("(" + bcolors.GREEN + "SUCCESS" + bcolors.NORMAL + ")")
else:
success = False
print("(" + bcolors.RED + "FAIL" + bcolors.NORMAL + ") maximum " +
str(float(common.constants.TEST_SIZE)*.05))
print
return success
def run_experiment2(filename):
gold_data = [0 for x in range(common.constants.TEST_SIZE)]
test_data = [[0, 0, 0] for x in range(common.constants.TEST_SIZE)]
training_data = [[0, 0, 0] for x in range(common.constants.TRAINING_SIZE)]
# generating data should be hidden from students!
read_data_csv(training_data, test_data, gold_data, filename)
# this is one of the two student functions
student_code.part_two_classifier(training_data, test_data)
# part 2 grading
error = 0
for i in range(common.constants.TEST_SIZE):
if(test_data[i][common.constants.DATA_DIM] != gold_data[i]):
# print("error is data", test_data[i])
error += 1
print("Incorrect classification is "+str(error) +
" out of " + str(common.constants.TEST_SIZE))
success = True
if (error <= float(common.constants.TEST_SIZE)*.05):
print("(" + bcolors.GREEN + "SUCCESS" + bcolors.NORMAL + ")")
else:
success = False
print("(" + bcolors.RED + "FAIL" + bcolors.NORMAL + ") maximum " +
str(float(common.constants.TEST_SIZE)*.05))
print
return success
all_passed = True
filename1 = "../data1.csv"
print("Linear Classifier : Dataset 1")
all_passed = run_experiment1(filename1) and all_passed
filename2 = "../data2.csv"
print("Linear Classifier : Dataset 2")
all_passed = run_experiment1(filename2) and all_passed
filename3 = "../data3.csv"
print("Linear Classifier : Dataset 3")
all_passed = run_experiment1(filename3) and all_passed
filename4 = "../data4.csv"
print("Linear Classifier : Dataset 4")
all_passed = run_experiment1(filename4) and all_passed
filename5 = "../datar1.csv"
print("Accelerometer : Dataset 1")
all_passed = run_experiment2(filename5) and all_passed
filename6 = "../datar2.csv"
print("Accelerometer : Dataset 2")
all_passed = run_experiment2(filename6) and all_passed
filename7 = "../datar3.csv"
print("Accelerometer : Dataset 3")
all_passed = run_experiment2(filename7) and all_passed
if all_passed:
exit(0)
else:
exit(1)
| [
"student_code.part_two_classifier",
"array.array",
"student_code.part_one_classifier"
] | [((220, 236), 'array.array', 'array.array', (['"""f"""'], {}), "('f')\n", (231, 236), False, 'import array\n'), ((249, 265), 'array.array', 'array.array', (['"""f"""'], {}), "('f')\n", (260, 265), False, 'import array\n'), ((2886, 2944), 'student_code.part_one_classifier', 'student_code.part_one_classifier', (['training_data', 'test_data'], {}), '(training_data, test_data)\n', (2918, 2944), False, 'import student_code\n'), ((4024, 4082), 'student_code.part_two_classifier', 'student_code.part_two_classifier', (['training_data', 'test_data'], {}), '(training_data, test_data)\n', (4056, 4082), False, 'import student_code\n')] |
from bottle import request, response, HTTPResponse
import os, datetime, re
import json as JSON
import jwt
class auth:
def gettoken(mypass):
secret = str(os.getenv('API_SCRT', '!@ws4RT4ws212@#%'))
password = str(os.getenv('API_PASS', 'password'))
if mypass == password:
exp = datetime.datetime.utcnow() + datetime.timedelta(hours=24)
ret = jwt.encode({'exp': exp, 'password': hash(password + secret)}, secret).decode('utf-8')
return [True, {'exp': str(exp), "token": str(ret)}, None, {"token": str(ret)}]
return [False, "Invalid password", 403]
def verify(token):
secret = str(os.getenv('API_SCRT', '!@ws4RT4ws212@#%'))
password = str(os.getenv('API_PASS', 'password'))
try:
decoded = jwt.decode(token, secret, leeway=10, algorithms=['HS256'])
if decoded["password"] != hash(password + secret):
raise
except jwt.ExpiredSignature:
return [False, "Signature expired", 403]
except:
return [False, "Invalid token", 403]
return [True, None, None]
class ret:
def __init__(self, route = None, params=None, header = None, cookie = None, anon = None) :
self.data = {
'queryInfos' : {
'route': route,
'params': params,
'header': header,
'cookie': cookie
},
'status' : 200,
'error' : None,
'data' : None,
'succes' : False,
'mod' : None
}
self.err = False
self.anon = anon
def get(self):
return self.data
def ret(self):
self.__anon()
self.data['mod'] = self.anon
if self.data['error'] is None :
self.data['succes'] = True
self.data['status'] = 200
return self.data
def __anon(self):
level = self.__getannon()
if level == 0 :
return
if level == 2 :
if "queryInfos" in self.data:
del self.data["queryInfos"]
return
forb = ["content-type", "connection", "x-real-ip", "x-forwarded-for",
"x-forwarded-proto", "x-forwarded-ssl", "x-forwarded-port",
"user-agent", "accept", "cache-control", "accept-encoding",
"cookie", "content-length"]
for i in self.data["queryInfos"]:
if i is None:
continue
for i2 in forb:
if self.data["queryInfos"][i] is None or i2 not in self.data["queryInfos"][i]:
continue
del self.data["queryInfos"][i][i2]
def add_error(self, error = None, code = None):
self.data['error'] = error
self.data['status'] = code
self.data['data'] = None
if code is None:
self.add_error("Bad code input", 500)
return 1
if error is None:
self.add_error("Bad error input", 500)
return 1
self.err = True
return self.ret()
def add_data(self, data = None):
self.data['data'] = data
self.set_code(200)
if data is None:
self.add_error("Bad data input", 500)
return 1
return 0
def set_code(self, code = None):
self.data['status'] = code
if code is None:
self.add_error("Bad code input", 500)
return 1
return 0
def __getannon(self):
self.anon = self.anon if self.anon is not None else str(os.getenv('API_MOD', 'PROD'))
return self.__anon_to_lvl(self.anon)
def __anon_to_lvl(self, anon = 'PROD'):
mod = {
"PROD": 2,
"DEV" : 1,
"TEST": 0
}
if anon in mod:
return mod[anon]
return 2
class check:
def contain(json, array, type = "body"):
type = type.upper()
if json is None:
return [False, "Invalid json received ", 400]
for i in array:
if isinstance(i, list):
if not check.contain_opt(json, i):
return [False, "[" + type +"] Missing on of parameters: " + JSON.dumps(i), 400]
json = check.setnoneopt(json, i)
elif i not in json:
return [False, "[" + type +"] Missing parameter : " + i, 400]
elif json[i] is None:
return [False, "[" + type +"] Null parameter : " + i, 400]
return [True, json, 200]
def contain_opt(json, arr_opt):
for i in arr_opt:
if isinstance(i, list):
if check.contain(json, i):
return True
elif i in json:
return True
return False
def setnoneopt(json, arr_opt):
for i in arr_opt:
if i not in json:
json[i] = None
return json
def json(request):
res = {}
#try:
res = request.json
#except:
# pass
return res
def head_json(request, cookie = None):
res = {}
try:
for i in cookie:
res[i.lower()] = cookie[i]
for i in request.headers.keys():
res[i.lower()] = request.headers.raw(i)
except:
pass
return res
def cookies_json(request):
res = {}
try:
cookie = request.headers.raw("Cookie")
for i in cookie.split(";"):
i = i.split("=")
res[i[0].strip()] = i[1]
except:
pass
return res
def route_json(request):
res = {}
dat = request.path[1:].split('/')
i = 0
while i < len(dat) - 1:
res[str(dat[i])] = str(dat[i + 1])
i += 1
return res
class callnext:
def __init__(self, req, resp = None, err = None, anonlvl = None):
self.pr = check.json(req)
self.ck = check.cookies_json(req)
self.hd = check.head_json(req, self.ck)
self.rt = check.route_json(req)
self.get = dict(req.query.decode())
self.private = {}
self.cookie = {}
self.toret = ret(req.path, self.pr, self.hd, self.ck, anonlvl)
self.req = req
self.resp = resp
self.err = err
def call(self, nextc):
if self.req.method == 'OPTIONS':
return {}
if len(nextc) == 0:
return self.ret()
return nextc[0](self, nextc)
def call_next(self, nextc, err = [True]):
if not err[0]:
self.resp.status = err[2]
return self.toret.add_error(err[1], err[2])
nextc.pop(0)
if len(nextc) == 0:
if len(err) >= 4 and err[3] is not None:
self.__merge_cookie(err[3])
self.toret.add_data(err[1])
return self.ret()
return nextc[0](self, nextc)
def ret(self):
if self.resp is not None:
for cookie in self.cookie:
self.resp.set_cookie(cookie, self.cookie[cookie], path='/')
self.resp.content_type = 'application/json'
self.resp.status = self.toret.data['status']
return self.toret.ret()
return self.toret.ret()
def __merge_cookie(self, cookies):
self.cookie = merged = {**self.cookie, **cookies}
| [
"jwt.decode",
"os.getenv",
"datetime.datetime.utcnow",
"json.dumps",
"bottle.request.headers.keys",
"datetime.timedelta",
"bottle.request.headers.raw"
] | [((177, 218), 'os.getenv', 'os.getenv', (['"""API_SCRT"""', '"""!@ws4RT4ws212@#%"""'], {}), "('API_SCRT', '!@ws4RT4ws212@#%')\n", (186, 218), False, 'import os, datetime, re\n'), ((245, 278), 'os.getenv', 'os.getenv', (['"""API_PASS"""', '"""password"""'], {}), "('API_PASS', 'password')\n", (254, 278), False, 'import os, datetime, re\n'), ((686, 727), 'os.getenv', 'os.getenv', (['"""API_SCRT"""', '"""!@ws4RT4ws212@#%"""'], {}), "('API_SCRT', '!@ws4RT4ws212@#%')\n", (695, 727), False, 'import os, datetime, re\n'), ((754, 787), 'os.getenv', 'os.getenv', (['"""API_PASS"""', '"""password"""'], {}), "('API_PASS', 'password')\n", (763, 787), False, 'import os, datetime, re\n'), ((826, 884), 'jwt.decode', 'jwt.decode', (['token', 'secret'], {'leeway': '(10)', 'algorithms': "['HS256']"}), "(token, secret, leeway=10, algorithms=['HS256'])\n", (836, 884), False, 'import jwt\n'), ((5428, 5450), 'bottle.request.headers.keys', 'request.headers.keys', ([], {}), '()\n', (5448, 5450), False, 'from bottle import request, response, HTTPResponse\n'), ((5652, 5681), 'bottle.request.headers.raw', 'request.headers.raw', (['"""Cookie"""'], {}), "('Cookie')\n", (5671, 5681), False, 'from bottle import request, response, HTTPResponse\n'), ((331, 357), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (355, 357), False, 'import os, datetime, re\n'), ((360, 388), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(24)'}), '(hours=24)\n', (378, 388), False, 'import os, datetime, re\n'), ((3720, 3748), 'os.getenv', 'os.getenv', (['"""API_MOD"""', '"""PROD"""'], {}), "('API_MOD', 'PROD')\n", (3729, 3748), False, 'import os, datetime, re\n'), ((5486, 5508), 'bottle.request.headers.raw', 'request.headers.raw', (['i'], {}), '(i)\n', (5505, 5508), False, 'from bottle import request, response, HTTPResponse\n'), ((4374, 4387), 'json.dumps', 'JSON.dumps', (['i'], {}), '(i)\n', (4384, 4387), True, 'import json as JSON\n')] |
from django.conf.urls import include, url
from django.conf import settings
from .views import data_sniffer_health_check
if settings.DATA_SNIFFER_ENABLED:
urlpatterns = [
url(r'^(?P<key>[-\w]+)', data_sniffer_health_check, name="data_sniffer_health_check"),
]
else:
urlpatterns = []
| [
"django.conf.urls.url"
] | [((184, 274), 'django.conf.urls.url', 'url', (['"""^(?P<key>[-\\\\w]+)"""', 'data_sniffer_health_check'], {'name': '"""data_sniffer_health_check"""'}), "('^(?P<key>[-\\\\w]+)', data_sniffer_health_check, name=\n 'data_sniffer_health_check')\n", (187, 274), False, 'from django.conf.urls import include, url\n')] |
# Generated by Django 2.1.2 on 2018-10-19 14:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mediumwave', '0010_auto_20181017_1937'),
]
operations = [
migrations.AddField(
model_name='transmitter',
name='iso',
field=models.CharField(blank=True, max_length=3),
),
]
| [
"django.db.models.CharField"
] | [((339, 381), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(3)'}), '(blank=True, max_length=3)\n', (355, 381), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
import sys
import csv
import json
mostRatingUser = None
mostRatingCount = 0
mostRatingInfo = None
for line in sys.stdin:
line = line.strip()
user, genreString = line.split("\t", 1)
genreInfo = json.loads(genreString)
if not mostRatingUser or len(genreInfo) > mostRatingCount:
mostRatingUser = user
mostRatingCount = len(genreInfo)
mostRatingInfo = genreInfo
# print(mostRatingCount)
genreDict = {}
for genreList in mostRatingInfo:
for genre in genreList:
if genre not in genreDict:
genreDict[genre] = 0
genreDict[genre] += 1
mostRatedCount = 0
mostRatedGenre = None
for genre, count in genreDict.items():
if count > mostRatedCount:
mostRatedCount = count
mostRatedGenre = genre
print("%s -- Total Rating Counts: %d -- Most Rated Genre: %s - %d" % (mostRatingUser, mostRatingCount, mostRatedGenre, mostRatedCount)) | [
"json.loads"
] | [((230, 253), 'json.loads', 'json.loads', (['genreString'], {}), '(genreString)\n', (240, 253), False, 'import json\n')] |
import socket
from datetime import datetime
# Author @inforkgodara
ip_address = input("IP Address: ")
splitted_ip_digits = ip_address.split('.')
dot = '.'
first_three_ip_digits = splitted_ip_digits[0] + dot + splitted_ip_digits[1] + dot + splitted_ip_digits[2] + dot
starting_number = int(input("Starting IP Number: "))
ending_number = int(input("Ending IP Number: "))
ending_number = ending_number + 1
start_time = datetime.now()
def scan(ip_address):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.setdefaulttimeout(1)
result = sock.connect_ex((ip_address, 135))
if result == 0:
return 1
else:
return 0
def execute():
for ip in range(starting_number, ending_number):
ip_address = first_three_ip_digits + str(ip)
if (scan(ip_address)):
print(ip_address, "is live")
execute()
end_time = datetime.now()
total_time = end_time - start_time
print("Scanning completed in: ", total_time) | [
"datetime.datetime.now",
"socket.socket",
"socket.setdefaulttimeout"
] | [((419, 433), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (431, 433), False, 'from datetime import datetime\n'), ((878, 892), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (890, 892), False, 'from datetime import datetime\n'), ((468, 517), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (481, 517), False, 'import socket\n'), ((522, 549), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(1)'], {}), '(1)\n', (546, 549), False, 'import socket\n')] |
# Simulate user activity for Windows
# Can trigger Brave Ads
import random
from time import sleep
import pydirectinput
import os
# clear log function
def cls():
os.system('cls' if os.name == 'nt' else 'clear')
# main simulate function
def simulate():
while True:
# u can change x,y with your screen resolution
# defined screen resolution
rand_x = random.randint(2567, 4460)
rand_x2 = random.randint(2567, 4460)
rand_y = random.randint(1337, 2986)
rand_y2 = random.randint(1337, 2986)
# random generator True/False
choice = random.choice([True, False])
choice2 = random.choice([True, False])
choice3 = random.choice([True, False])
# random move to xy
pydirectinput.moveTo(rand_x2, rand_y2)
if choice:
# move from current pos
pydirectinput.move(rand_x, rand_y)
if choice2:
print('shift')
# press down shift key
pydirectinput.keyDown('shift')
# random sleep
sleep(random.randint(1, 2))
# release shift key
pydirectinput.keyUp('shift')
if choice3:
print('ctrl')
pydirectinput.keyDown('ctrl')
sleep(random.randint(1, 2))
pydirectinput.keyUp('ctrl')
sleep(20)
# move to defined xy, then click (notif window)
pydirectinput.moveTo(2321, 1304)
print('Click')
# press left mouse button
pydirectinput.click()
print('Sleep')
sleep(random.randint(25, 35))
if choice and choice2 and choice3:
print('Sleep 6m')
sleep(350)
cls()
if __name__ == '__main__':
simulate()
| [
"random.choice",
"pydirectinput.move",
"time.sleep",
"pydirectinput.click",
"pydirectinput.keyDown",
"pydirectinput.keyUp",
"pydirectinput.moveTo",
"os.system",
"random.randint"
] | [((168, 216), 'os.system', 'os.system', (["('cls' if os.name == 'nt' else 'clear')"], {}), "('cls' if os.name == 'nt' else 'clear')\n", (177, 216), False, 'import os\n'), ((384, 410), 'random.randint', 'random.randint', (['(2567)', '(4460)'], {}), '(2567, 4460)\n', (398, 410), False, 'import random\n'), ((429, 455), 'random.randint', 'random.randint', (['(2567)', '(4460)'], {}), '(2567, 4460)\n', (443, 455), False, 'import random\n'), ((473, 499), 'random.randint', 'random.randint', (['(1337)', '(2986)'], {}), '(1337, 2986)\n', (487, 499), False, 'import random\n'), ((518, 544), 'random.randint', 'random.randint', (['(1337)', '(2986)'], {}), '(1337, 2986)\n', (532, 544), False, 'import random\n'), ((600, 628), 'random.choice', 'random.choice', (['[True, False]'], {}), '([True, False])\n', (613, 628), False, 'import random\n'), ((647, 675), 'random.choice', 'random.choice', (['[True, False]'], {}), '([True, False])\n', (660, 675), False, 'import random\n'), ((694, 722), 'random.choice', 'random.choice', (['[True, False]'], {}), '([True, False])\n', (707, 722), False, 'import random\n'), ((759, 797), 'pydirectinput.moveTo', 'pydirectinput.moveTo', (['rand_x2', 'rand_y2'], {}), '(rand_x2, rand_y2)\n', (779, 797), False, 'import pydirectinput\n'), ((865, 899), 'pydirectinput.move', 'pydirectinput.move', (['rand_x', 'rand_y'], {}), '(rand_x, rand_y)\n', (883, 899), False, 'import pydirectinput\n'), ((994, 1024), 'pydirectinput.keyDown', 'pydirectinput.keyDown', (['"""shift"""'], {}), "('shift')\n", (1015, 1024), False, 'import pydirectinput\n'), ((1136, 1164), 'pydirectinput.keyUp', 'pydirectinput.keyUp', (['"""shift"""'], {}), "('shift')\n", (1155, 1164), False, 'import pydirectinput\n'), ((1223, 1252), 'pydirectinput.keyDown', 'pydirectinput.keyDown', (['"""ctrl"""'], {}), "('ctrl')\n", (1244, 1252), False, 'import pydirectinput\n'), ((1305, 1332), 'pydirectinput.keyUp', 'pydirectinput.keyUp', (['"""ctrl"""'], {}), "('ctrl')\n", (1324, 1332), False, 'import pydirectinput\n'), ((1345, 1354), 'time.sleep', 'sleep', (['(20)'], {}), '(20)\n', (1350, 1354), False, 'from time import sleep\n'), ((1427, 1459), 'pydirectinput.moveTo', 'pydirectinput.moveTo', (['(2321)', '(1304)'], {}), '(2321, 1304)\n', (1447, 1459), False, 'import pydirectinput\n'), ((1537, 1558), 'pydirectinput.click', 'pydirectinput.click', ([], {}), '()\n', (1556, 1558), False, 'import pydirectinput\n'), ((1596, 1618), 'random.randint', 'random.randint', (['(25)', '(35)'], {}), '(25, 35)\n', (1610, 1618), False, 'import random\n'), ((1705, 1715), 'time.sleep', 'sleep', (['(350)'], {}), '(350)\n', (1710, 1715), False, 'from time import sleep\n'), ((1070, 1090), 'random.randint', 'random.randint', (['(1)', '(2)'], {}), '(1, 2)\n', (1084, 1090), False, 'import random\n'), ((1271, 1291), 'random.randint', 'random.randint', (['(1)', '(2)'], {}), '(1, 2)\n', (1285, 1291), False, 'import random\n')] |
from TOKEN import LexToken
class Lexer:
def __init__(self,text):
self.my_bool = False
self.result = ''
self.names = {
"case" : "CASE",
"class" : "CLASS",
"else" : "ELSE",
"esac" : "ESAC",
"fi" : "FI",
"if" : "IF",
"in" : "IN",
"inherits" : "INHERITS",
"isvoid" : "ISVOID",
"let" : "LET",
"loop" : "LOOP",
"new" : "NEW",
"of" : "OF",
"pool" : "POOL",
"then" : "THEN",
"while" : "WHILE",
"not" : "NOT",
"true" : "TRUE",
"false" : "FALSE",
"(" : "LPAREN",
")" : "RPAREN",
"{" : "LBRACE",
"}" : "RBRACE",
":" : "TDOTS",
"," : "COMMA",
"." : "DOT",
";" : "SEMICOLON",
"@" : "AT",
"*" : "MULTIPLY",
"/" : "DIVIDE",
"+" : "PLUS",
"-" : "MINUS",
"~" : "INT_COMP",
"<" : "LT",
"=" : "EQ",
"<=" : "LTEQ",
"<-" : "ASSIGN",
"=>" : "ARROW", }
self.token_list = []
self.Simple_tokens = ['(', ')', '{', '}', ':', ',','.',';','@','*','/','+','-','~','<','=','<=','<-','=>']
self.error_tokens = ['!','$','%','^','?','[',']','#','&']
self.ABC = ['A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z']
self.abc = [str.lower(item) for item in self.ABC]
self._int = ['0','1','2','3','4','5','6','7','8','9']
self.get_tokens(text)
def error(self,line,column,value):
message = f'({line}, {column}) - LexicographicError: ERROR "'
message += value
message +='"'
if self.result =='':
self.result = message
self.my_bool = True
def error_String_null(self,line,column):
if self.result=='':
self.result = f'({line}, {column}) - LexicographicError: String contains null character'
self.my_bool = True
def error_Comment_EOF(self,line,column):
if self.result=='':
self.result = f"({line}, {column}) - LexicographicError: EOF in comment"
self.my_bool = True
def error_String_EOF(self,line,column):
if self.result=='':
self.result = f'({line}, {column}) - LexicographicError: EOF in string constant'
self.my_bool = True
def error_String_New_Line(self,line,column):
if self.result == '':
self.result = f'({line}, {column}) - LexicographicError: Unterminated string constant'
self.my_bool = True
def get_tokens(self,text):
i=-1
n = len(text)
Ln = 1
Col = 1
current1 = ''
current2 = ''
open_comments = 0
while i < n - 1:
i += 1
if text[i] in self.error_tokens:
Col+=len(current1)
self.error(Ln, Col, text[i])
break
if text[i] == '\t':
Col+=1
continue
if text[i] == ' ':
Col+=1
continue
if text[i] == '\n': #end line
Col=1
Ln+=1
continue
if text[i] == '-' and text[i + 1] == '-': #ignore comment
while not text[i] == '\n': i+=1
Col=1
Ln+=1
continue
if text[i] == '(' and text[i + 1] == '*': #ignore comment
open_comments += 1
while open_comments > 0:
i+=1
Col+=1
if i == len(text):
self.error_Comment_EOF(Ln,Col)
i=len(text) #end
break
if text[i] == '\n':
Ln+=1
Col=0
if text[i] == '(' and text[i + 1] == '*':
open_comments += 1
if text[i] == '*' and text[i + 1] == ')':
i+=1
open_comments -= 1
continue
if text[i] == '"':
i+=1
length = 1
if i==len(text):
Col+=length
self.error_String_EOF(Ln,Col)
break
while not text[i] == '"':
if text[i] == '\n':
Col+=length
self.error_String_New_Line(Ln,Col)
i=len(text)
break
if text[i]=='\0':
Col+=length
self.error_String_null(Ln,Col)
i=len(text)
break
if text[i]=='\\':
if not text[i+1]=='b' and not text[i+1]=='t' and not text[i+1]=='n' and not text[i+1]=='f':
current1+=text[i+1]
length+=2
if text[i+1]=='\n':
Ln+=1
Col=0
length=1
i+=2
continue
current1 += text[i]
length+=1
i+=1
if i==len(text):
Col+=length
self.error_String_EOF(Ln,Col)
break
self.token_list.append(LexToken('STRING',current1,Ln,Col))
Col+=length + 1
current1 = ''
continue
current1 += text[i]
if i + 1 < len(text): current2 = current1 + text[i + 1]
else: current2 = current1
_next = current2[-1] #text[i + 1]
if current1[0] == '_':
self.error(Ln,Col,current1[0])
break
if current1[0] in self._int:
i+=1
while text[i] in self._int:
current1 += text[i]
i+=1
i-=1
self.token_list.append(LexToken('INTEGER',int(current1), Ln,Col))
Col+=len(current1)
current1 = ''
continue
if current2 in self.Simple_tokens:
self.token_list.append(LexToken(self.names[current2],current2,Ln,Col))
Col+=len(current2)
i+=1
current1 = ''
continue
if current1 in self.Simple_tokens:
self.token_list.append(LexToken(self.names[current1],current1,Ln,Col))
Col+=len(current1)
current1 = ''
continue
if _next in self.Simple_tokens or _next == ' ' or _next == '\n' or _next == '\t' or i+1==len(text):
lower = str.lower(current1)
if self.names.__contains__(lower):
self.token_list.append(LexToken(self.names[lower],lower,Ln,Col))
Col+=len(current1)
current1 = ''
continue
if current1[0] in self.ABC:
self.token_list.append(LexToken('TYPE',current1,Ln,Col))
Col+=len(current1)
current1 = ''
continue
if current1[0] in self.abc:
self.token_list.append(LexToken('ID',current1,Ln,Col))
Col+=len(current1)
current1 = ''
| [
"TOKEN.LexToken"
] | [((6007, 6044), 'TOKEN.LexToken', 'LexToken', (['"""STRING"""', 'current1', 'Ln', 'Col'], {}), "('STRING', current1, Ln, Col)\n", (6015, 6044), False, 'from TOKEN import LexToken\n'), ((6885, 6934), 'TOKEN.LexToken', 'LexToken', (['self.names[current2]', 'current2', 'Ln', 'Col'], {}), '(self.names[current2], current2, Ln, Col)\n', (6893, 6934), False, 'from TOKEN import LexToken\n'), ((7131, 7180), 'TOKEN.LexToken', 'LexToken', (['self.names[current1]', 'current1', 'Ln', 'Col'], {}), '(self.names[current1], current1, Ln, Col)\n', (7139, 7180), False, 'from TOKEN import LexToken\n'), ((7538, 7581), 'TOKEN.LexToken', 'LexToken', (['self.names[lower]', 'lower', 'Ln', 'Col'], {}), '(self.names[lower], lower, Ln, Col)\n', (7546, 7581), False, 'from TOKEN import LexToken\n'), ((7786, 7821), 'TOKEN.LexToken', 'LexToken', (['"""TYPE"""', 'current1', 'Ln', 'Col'], {}), "('TYPE', current1, Ln, Col)\n", (7794, 7821), False, 'from TOKEN import LexToken\n'), ((8026, 8059), 'TOKEN.LexToken', 'LexToken', (['"""ID"""', 'current1', 'Ln', 'Col'], {}), "('ID', current1, Ln, Col)\n", (8034, 8059), False, 'from TOKEN import LexToken\n')] |
import pandas as pd
import numpy as np
import math
import matplotlib.pyplot as plt
from sklearn import feature_selection as fs
from sklearn import naive_bayes
from sklearn import model_selection
from sklearn import metrics
from sklearn import linear_model
from sklearn import svm
from imblearn.under_sampling import NeighbourhoodCleaningRule
from imblearn.over_sampling import SMOTE, RandomOverSampler
COLUMN_NAMES = ['sex', 'length', 'diameter', 'height',
'whole weight', 'shucked weight', 'viscera weight',
'shell weight', 'rings']
# feature selection
def cal_features_mutual_info(data):
y = data['rings']
features = data.loc[:, data.columns != 'rings']
info = fs.mutual_info_regression(features, y)
print('========== mutual info ==============')
for idx, col in enumerate(COLUMN_NAMES):
if col == 'rings':
break
name = COLUMN_NAMES[idx]
print('{0} ==> {1}'.format(name, info[idx]))
def cal_feature_variance(data):
vt = fs.VarianceThreshold()
vt.fit_transform(data)
print('======== variance ================')
for idx, col in enumerate(COLUMN_NAMES):
print('{0} ==> {1}'.format(col, vt.variances_[idx]))
def draw_class_hist(Y):
bins = [x for x in range(1, 29, 5)]
Y.plot.hist(bins=bins)
plt.show()
# data loading / preprocessing
def preprocessing(data):
_, v = np.unique(data['sex'], return_inverse=True)
data['sex'] = v
def load_data():
data = pd.read_csv('../uci_data/abalone.data.txt', header=None, names=COLUMN_NAMES)
preprocessing(data)
print(data.describe())
return data
def oversampling(X, Y):
# some class has only one sample
# to apply SMOTE we first oversample it randomly
X_resampled, Y_resampled = RandomOverSampler().fit_sample(X, Y)
X_resampled, Y_resampled = SMOTE().fit_sample(X_resampled, Y_resampled)
return (X_resampled, Y_resampled)
def undersampling(X, Y):
rus = NeighbourhoodCleaningRule(ratio='majority')
x_new, y_new = rus.fit_sample(X, Y)
return (x_new, y_new)
# metrics
# 1. metrics for multi-class classification problem
def cal_metrics(y_test, y_pred, label):
acc = metrics.accuracy_score(y_test, y_pred)
print('{0} acc: {1}'.format(label, acc))
prec = metrics.precision_score(y_test, y_pred, average='weighted')
print('{0} precision: {1}'.format(label, prec))
recall = metrics.recall_score(y_test, y_pred, average='weighted')
print('{0} recall: {1}'.format(label, recall))
# models
def gaussian_naive_bayes(x_train, y_train, x_test, y_test):
model = naive_bayes.GaussianNB()
model.fit(x_train, y_train)
y_pred = model.predict(x_test)
cal_metrics(y_test, y_pred, 'gaussianNB')
def multinomial_naive_bayes(x_train, y_train, x_test, y_test):
model = naive_bayes.MultinomialNB()
model.fit(x_train, y_train)
y_pred = model.predict(x_test)
cal_metrics(y_test, y_pred, 'multinomialNB')
def logistics_regression(x_train, y_train, x_test, y_test):
model = linear_model.LogisticRegression(solver='sag', multi_class='multinomial')
model.fit(x_train, y_train)
y_pred = model.predict(x_test)
cal_metrics(y_test, y_pred, 'logisticsc regression')
def select_features_by_stat_info(data):
cal_features_mutual_info(data)
cal_feature_variance(data)
print('==================')
# ignore features with low variance
return['sex', 'length', 'whole weight',
'shucked weight', 'viscera weight',
'shell weight']
def select_feature_by_L1(data_train, data_test):
all_cols = ['sex', 'length', 'diameter', 'height',
'whole weight', 'shucked weight', 'viscera weight',
'shell weight']
Y = data_train['rings']
X = data_train[all_cols]
X_test = data_test[all_cols]
svc = svm.LinearSVC(penalty='l1', dual=False).fit(X, Y)
model = fs.SelectFromModel(svc, threshold=0.5, prefit=True)
return (model.transform(X), model.transform(X_test))
if __name__ == '__main__':
data = load_data()
split_point = math.floor(len(data) * 0.8)
data_train = data[: split_point]
data_test = data[split_point:]
y_train = data_train['rings']
y_test = data_test['rings']
print('======== select features by stat info ========')
selected_features = select_features_by_stat_info(data)
x_train = data_train[selected_features]
x_test = data_test[selected_features]
gaussian_naive_bayes(x_train, y_train, x_test, y_test)
logistics_regression(x_train, y_train, x_test, y_test)
multinomial_naive_bayes(x_train, y_train, x_test, y_test)
print('=========== select features by L1 =============')
x_train, x_test = select_feature_by_L1(data_train, data_test)
gaussian_naive_bayes(x_train, y_train, x_test, y_test)
logistics_regression(x_train, y_train, x_test, y_test)
multinomial_naive_bayes(x_train, y_train, x_test, y_test)
print('============ under sampling ==============')
x_res, y_res = undersampling(x_train, y_train)
gaussian_naive_bayes(x_res, y_res, x_test, y_test)
logistics_regression(x_res, y_res, x_test, y_test)
multinomial_naive_bayes(x_res, y_res, x_test, y_test)
print('============ over sampling ==============')
x_res, y_res = oversampling(x_train, y_train)
gaussian_naive_bayes(x_res, y_res, x_test, y_test)
logistics_regression(x_res, y_res, x_test, y_test)
multinomial_naive_bayes(x_res, y_res, x_test, y_test)
#draw_class_hist(data['rings']) | [
"numpy.unique",
"sklearn.feature_selection.VarianceThreshold",
"pandas.read_csv",
"imblearn.under_sampling.NeighbourhoodCleaningRule",
"sklearn.feature_selection.SelectFromModel",
"imblearn.over_sampling.SMOTE",
"sklearn.svm.LinearSVC",
"sklearn.metrics.precision_score",
"sklearn.metrics.recall_scor... | [((714, 752), 'sklearn.feature_selection.mutual_info_regression', 'fs.mutual_info_regression', (['features', 'y'], {}), '(features, y)\n', (739, 752), True, 'from sklearn import feature_selection as fs\n'), ((1027, 1049), 'sklearn.feature_selection.VarianceThreshold', 'fs.VarianceThreshold', ([], {}), '()\n', (1047, 1049), True, 'from sklearn import feature_selection as fs\n'), ((1331, 1341), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1339, 1341), True, 'import matplotlib.pyplot as plt\n'), ((1411, 1454), 'numpy.unique', 'np.unique', (["data['sex']"], {'return_inverse': '(True)'}), "(data['sex'], return_inverse=True)\n", (1420, 1454), True, 'import numpy as np\n'), ((1504, 1580), 'pandas.read_csv', 'pd.read_csv', (['"""../uci_data/abalone.data.txt"""'], {'header': 'None', 'names': 'COLUMN_NAMES'}), "('../uci_data/abalone.data.txt', header=None, names=COLUMN_NAMES)\n", (1515, 1580), True, 'import pandas as pd\n'), ((1981, 2024), 'imblearn.under_sampling.NeighbourhoodCleaningRule', 'NeighbourhoodCleaningRule', ([], {'ratio': '"""majority"""'}), "(ratio='majority')\n", (2006, 2024), False, 'from imblearn.under_sampling import NeighbourhoodCleaningRule\n'), ((2205, 2243), 'sklearn.metrics.accuracy_score', 'metrics.accuracy_score', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (2227, 2243), False, 'from sklearn import metrics\n'), ((2301, 2360), 'sklearn.metrics.precision_score', 'metrics.precision_score', (['y_test', 'y_pred'], {'average': '"""weighted"""'}), "(y_test, y_pred, average='weighted')\n", (2324, 2360), False, 'from sklearn import metrics\n'), ((2427, 2483), 'sklearn.metrics.recall_score', 'metrics.recall_score', (['y_test', 'y_pred'], {'average': '"""weighted"""'}), "(y_test, y_pred, average='weighted')\n", (2447, 2483), False, 'from sklearn import metrics\n'), ((2618, 2642), 'sklearn.naive_bayes.GaussianNB', 'naive_bayes.GaussianNB', ([], {}), '()\n', (2640, 2642), False, 'from sklearn import naive_bayes\n'), ((2832, 2859), 'sklearn.naive_bayes.MultinomialNB', 'naive_bayes.MultinomialNB', ([], {}), '()\n', (2857, 2859), False, 'from sklearn import naive_bayes\n'), ((3049, 3121), 'sklearn.linear_model.LogisticRegression', 'linear_model.LogisticRegression', ([], {'solver': '"""sag"""', 'multi_class': '"""multinomial"""'}), "(solver='sag', multi_class='multinomial')\n", (3080, 3121), False, 'from sklearn import linear_model\n'), ((3942, 3993), 'sklearn.feature_selection.SelectFromModel', 'fs.SelectFromModel', (['svc'], {'threshold': '(0.5)', 'prefit': '(True)'}), '(svc, threshold=0.5, prefit=True)\n', (3960, 3993), True, 'from sklearn import feature_selection as fs\n'), ((1794, 1813), 'imblearn.over_sampling.RandomOverSampler', 'RandomOverSampler', ([], {}), '()\n', (1811, 1813), False, 'from imblearn.over_sampling import SMOTE, RandomOverSampler\n'), ((1862, 1869), 'imblearn.over_sampling.SMOTE', 'SMOTE', ([], {}), '()\n', (1867, 1869), False, 'from imblearn.over_sampling import SMOTE, RandomOverSampler\n'), ((3880, 3919), 'sklearn.svm.LinearSVC', 'svm.LinearSVC', ([], {'penalty': '"""l1"""', 'dual': '(False)'}), "(penalty='l1', dual=False)\n", (3893, 3919), False, 'from sklearn import svm\n')] |
"""For each repo in DEPS, git config an appropriate depot-tools.upstream.
This will allow git new-branch to set the correct tracking branch.
"""
import argparse
import hashlib
import json
import os
import sys
import textwrap
import gclient_utils
import git_common
def _GclientEntriesToString(entries):
entries_str = json.dumps(entries, sort_keys=True)
return entries_str
def ConfigUpstream(repo_dir, url):
"""Determine the upstream branch for this repo, and run git config."""
if not os.path.exists(repo_dir):
sys.stderr.write('%s not found\n' % repo_dir)
return False
os.chdir(repo_dir)
unused_url, revision = gclient_utils.SplitUrlRevision(url)
if revision.find('remotes/origin') != -1:
upstream = revision
else:
# Ignore e.g. a pinned sha1, or other unusual remote.
sys.stderr.write('Skipping %s with upstream %s\n' % (repo_dir, revision))
return True
# Check git's current upstream config, if any.
current_upstream = git_common.root()
if current_upstream:
current_upstream = current_upstream.strip()
if current_upstream != upstream:
sys.stdout.write(
'In %s, setting %s to %s\n' %
(repo_dir, 'depot-tools.upstream', upstream))
git_common.set_config('depot-tools.upstream', upstream)
return True
def Main(args):
"""For each repo in the gclient root, set the upstream config."""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(__doc__))
parser.add_argument('-f', '--force', required=False, action='store_true',
help='Force the script to run, ignoring cached results.')
options = parser.parse_args(args)
# We expect this to be run as a hook in the gclient root directory.
root_dir, gclient_entries = gclient_utils.GetGClientRootAndEntries()
# Compute a hash combined of the .gclient_entries and this script.
# We should re-run if either changes.
md5 = hashlib.md5()
md5.update(_GclientEntriesToString(gclient_entries))
with open(__file__) as f:
md5.update(f.read())
current_hash = md5.hexdigest()
already_processed_hash = None
entries_hash_file = os.path.join(root_dir, '.git_config_entries_hash')
if os.path.exists(entries_hash_file):
with open(entries_hash_file) as f:
already_processed_hash = f.readlines()[0]
if current_hash == already_processed_hash and not options.force:
return 0
results = []
for dirname in sorted(gclient_entries):
abs_path = os.path.normpath(os.path.join(root_dir, dirname))
results.append(ConfigUpstream(abs_path, gclient_entries[dirname]))
if all(results):
# Success. Write the new hash to the cached location.
with open(entries_hash_file, 'wb') as f:
f.write(current_hash)
return 0
else:
return 1
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))
| [
"git_common.root",
"os.path.exists",
"textwrap.dedent",
"hashlib.md5",
"json.dumps",
"os.path.join",
"sys.stdout.write",
"os.chdir",
"sys.stderr.write",
"git_common.set_config",
"gclient_utils.SplitUrlRevision",
"gclient_utils.GetGClientRootAndEntries"
] | [((322, 357), 'json.dumps', 'json.dumps', (['entries'], {'sort_keys': '(True)'}), '(entries, sort_keys=True)\n', (332, 357), False, 'import json\n'), ((595, 613), 'os.chdir', 'os.chdir', (['repo_dir'], {}), '(repo_dir)\n', (603, 613), False, 'import os\n'), ((639, 674), 'gclient_utils.SplitUrlRevision', 'gclient_utils.SplitUrlRevision', (['url'], {}), '(url)\n', (669, 674), False, 'import gclient_utils\n'), ((974, 991), 'git_common.root', 'git_common.root', ([], {}), '()\n', (989, 991), False, 'import git_common\n'), ((1807, 1847), 'gclient_utils.GetGClientRootAndEntries', 'gclient_utils.GetGClientRootAndEntries', ([], {}), '()\n', (1845, 1847), False, 'import gclient_utils\n'), ((1966, 1979), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (1977, 1979), False, 'import hashlib\n'), ((2176, 2226), 'os.path.join', 'os.path.join', (['root_dir', '""".git_config_entries_hash"""'], {}), "(root_dir, '.git_config_entries_hash')\n", (2188, 2226), False, 'import os\n'), ((2232, 2265), 'os.path.exists', 'os.path.exists', (['entries_hash_file'], {}), '(entries_hash_file)\n', (2246, 2265), False, 'import os\n'), ((499, 523), 'os.path.exists', 'os.path.exists', (['repo_dir'], {}), '(repo_dir)\n', (513, 523), False, 'import os\n'), ((529, 574), 'sys.stderr.write', 'sys.stderr.write', (["('%s not found\\n' % repo_dir)"], {}), "('%s not found\\n' % repo_dir)\n", (545, 574), False, 'import sys\n'), ((813, 886), 'sys.stderr.write', 'sys.stderr.write', (["('Skipping %s with upstream %s\\n' % (repo_dir, revision))"], {}), "('Skipping %s with upstream %s\\n' % (repo_dir, revision))\n", (829, 886), False, 'import sys\n'), ((1102, 1198), 'sys.stdout.write', 'sys.stdout.write', (["('In %s, setting %s to %s\\n' % (repo_dir, 'depot-tools.upstream', upstream))"], {}), "('In %s, setting %s to %s\\n' % (repo_dir,\n 'depot-tools.upstream', upstream))\n", (1118, 1198), False, 'import sys\n'), ((1216, 1271), 'git_common.set_config', 'git_common.set_config', (['"""depot-tools.upstream"""', 'upstream'], {}), "('depot-tools.upstream', upstream)\n", (1237, 1271), False, 'import git_common\n'), ((1487, 1511), 'textwrap.dedent', 'textwrap.dedent', (['__doc__'], {}), '(__doc__)\n', (1502, 1511), False, 'import textwrap\n'), ((2525, 2556), 'os.path.join', 'os.path.join', (['root_dir', 'dirname'], {}), '(root_dir, dirname)\n', (2537, 2556), False, 'import os\n')] |
import os
import socket
import codecs
import urllib3
from urllib.parse import urlparse
def __process__(command):
try:
process = os.popen(command)
results = str(process.read())
return results
except Exception as e:
raise e
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
print("[+] Directory created")
else:
# print("[!] Directory exists")
pass
def write_file(filepath, data=''):
f = open(filepath, 'w')
f.write('' if not data else data)
f.close()
return
def get_domain_name(url):
print("[+] Resolving Domain Name")
try:
parsed = urlparse(url)
if not parsed.scheme:
print("[!] No protocol scheme not found, default to https.")
url = 'https://%s' % url
parsed = urlparse(url)
domain_name = parsed.netloc
return domain_name, url
except:
print("[!] Failed to resolve Domain Name.")
def get_whois(domain_name):
print("[+] Fetching WhoIs Data.")
result = None
command = "whois %s" % domain_name
try:
return __process__(command)
except:
print("[!] Failed to get Whois Data.")
return result
def get_ip_address(domain_name):
print("[+] Fetching IP Address of Domain")
try:
ip_address = socket.gethostbyname(domain_name)
return ip_address
except:
print("[!] Failed to resolve IP Address.")
def get_nmap(options, ip):
print("[+] Retrieving Nmap Data.")
command = "nmap %s %s" % (options, ip)
try:
return __process__(command)
except:
print("[!] Failed to retrieve Nmap Data.")
def get_robots_txt(url):
print("[+] Fetching robots.txt.")
if url.endswith('/'):
path = url[:-1]
try:
req = urllib2.Request('%s/robots.txt' % path, data=None)
response = urllib2.urlopen(req)
page = response.read()
page = page.encode('utf8')
return page
except:
print("[+] Failed to retrieve robots.txt.")
| [
"os.path.exists",
"socket.gethostbyname",
"urllib.parse.urlparse",
"os.makedirs",
"os.popen"
] | [((133, 150), 'os.popen', 'os.popen', (['command'], {}), '(command)\n', (141, 150), False, 'import os\n'), ((273, 298), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (287, 298), False, 'import os\n'), ((302, 324), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (313, 324), False, 'import os\n'), ((603, 616), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (611, 616), False, 'from urllib.parse import urlparse\n'), ((1195, 1228), 'socket.gethostbyname', 'socket.gethostbyname', (['domain_name'], {}), '(domain_name)\n', (1215, 1228), False, 'import socket\n'), ((745, 758), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (753, 758), False, 'from urllib.parse import urlparse\n')] |
from collections import defaultdict
from typing import List, Any, Tuple
from util.helpers import solution_timer
from util.input_helper import read_entire_input
from util.console import console
from year_2019.intcode import IntCode, parse
data = read_entire_input(2019,11)
def run_robot(data:List[str], init=0):
debug = False
robot = IntCode(data, debug=debug)
is_white = defaultdict(lambda: 0)
is_white[(0,0)] = init
position = (0,0)
direction = (0,1)
while not robot.halted:
colour = robot.run([is_white[position]])
turn = robot.run([])
if debug:
console.print(f"ROBO pos{position}-dir{direction} -> current colour: {is_white[position]} -> {colour}. Turning {turn}")
is_white[position] = colour
if turn == 0: # Turn left
direction = -direction[1], direction[0]
elif turn == 1:
direction = direction[1], -direction[0]
position = position[0] + direction[0], position[1] + direction[1]
if debug:
console.print(f"MOVE pos{position}-dir{direction} -> current colour: {is_white[position]}")
return is_white
@solution_timer(2019,11,1)
def part_one(data: List[str]):
instructions = parse(data)
is_white = run_robot(instructions)
return len(is_white)
@solution_timer(2019,11,2)
def part_two(data: List[str]):
instructions = parse(data)
is_white = run_robot(instructions, 1)
xmin = min(i[0] for i in is_white.keys())
xmax = max(i[0] for i in is_white.keys())
ymin = min(i[1] for i in is_white.keys())
ymax = max(i[1] for i in is_white.keys())
image = '\n'.join(''.join(chr(9608) if is_white[(i,j)] else ' ' for i in range(xmin, xmax+1)) for j in range(ymax, ymin-1, -1))
return '\n'+image
if __name__ == "__main__":
data = read_entire_input(2019,11)
part_one(data)
part_two(data) | [
"util.input_helper.read_entire_input",
"util.console.console.print",
"util.helpers.solution_timer",
"collections.defaultdict",
"year_2019.intcode.parse",
"year_2019.intcode.IntCode"
] | [((246, 273), 'util.input_helper.read_entire_input', 'read_entire_input', (['(2019)', '(11)'], {}), '(2019, 11)\n', (263, 273), False, 'from util.input_helper import read_entire_input\n'), ((1151, 1178), 'util.helpers.solution_timer', 'solution_timer', (['(2019)', '(11)', '(1)'], {}), '(2019, 11, 1)\n', (1165, 1178), False, 'from util.helpers import solution_timer\n'), ((1305, 1332), 'util.helpers.solution_timer', 'solution_timer', (['(2019)', '(11)', '(2)'], {}), '(2019, 11, 2)\n', (1319, 1332), False, 'from util.helpers import solution_timer\n'), ((343, 369), 'year_2019.intcode.IntCode', 'IntCode', (['data'], {'debug': 'debug'}), '(data, debug=debug)\n', (350, 369), False, 'from year_2019.intcode import IntCode, parse\n'), ((385, 408), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (396, 408), False, 'from collections import defaultdict\n'), ((1227, 1238), 'year_2019.intcode.parse', 'parse', (['data'], {}), '(data)\n', (1232, 1238), False, 'from year_2019.intcode import IntCode, parse\n'), ((1381, 1392), 'year_2019.intcode.parse', 'parse', (['data'], {}), '(data)\n', (1386, 1392), False, 'from year_2019.intcode import IntCode, parse\n'), ((1812, 1839), 'util.input_helper.read_entire_input', 'read_entire_input', (['(2019)', '(11)'], {}), '(2019, 11)\n', (1829, 1839), False, 'from util.input_helper import read_entire_input\n'), ((615, 744), 'util.console.console.print', 'console.print', (['f"""ROBO pos{position}-dir{direction} -> current colour: {is_white[position]} -> {colour}. Turning {turn}"""'], {}), "(\n f'ROBO pos{position}-dir{direction} -> current colour: {is_white[position]} -> {colour}. Turning {turn}'\n )\n", (628, 744), False, 'from util.console import console\n'), ((1037, 1138), 'util.console.console.print', 'console.print', (['f"""MOVE pos{position}-dir{direction} -> current colour: {is_white[position]}"""'], {}), "(\n f'MOVE pos{position}-dir{direction} -> current colour: {is_white[position]}'\n )\n", (1050, 1138), False, 'from util.console import console\n')] |
#!/usr/bin/env python
import datetime
import optparse
import os
import os.path
import struct
import sys
# sudo pip3 install piexif
import piexif
# Make this negative to subtract time, e.g.:
# -datetime.timedelta(hours=5, minutes=9)
#TIME_ADJUSTMENT = datetime.timedelta(hours=5, minutes=9)
#TIME_ADJUSTMENT = datetime.timedelta(days=1)
TIME_ADJUSTMENT = datetime.timedelta(days=788, seconds=13032)
def main():
EXIF_TIME_FORMAT = '%Y:%m:%d %H:%M:%S'
EXIF_UNSET = 'unset'
infile_name = parse_options()
# Get the mtime of the file
infile_mtime_original = os.path.getmtime(infile_name)
try:
exif_data = piexif.load(infile_name)
except struct.error:
exif_data = {'0th': {}, '1st': {}, 'Exif': {}}
exif_dt_location = None
if piexif.ImageIFD.DateTime in exif_data['0th']:
exif_dt_location = '0th'
elif piexif.ImageIFD.DateTime in exif_data['1st']:
exif_dt_location = '1st'
# DateTime is when the image was last changed
exif_dt = EXIF_UNSET
if exif_dt_location != None:
# I've seen timestamp values that look like this: ' : : : : '
try:
exif_dt = datetime.datetime.strptime(exif_data[exif_dt_location][piexif.ImageIFD.DateTime].decode('utf8'), EXIF_TIME_FORMAT)
except ValueError as e:
sys.stderr.write('WARNING: Malformed DateTime\n')
sys.stderr.write('\tValueError: {}\n'.format(e))
else:
exif_dt_location = '0th'
# DateTimeDigitized is when the image was stored digitally (may be different from DateTimeOriginal if image was scanned)
exif_dtd = EXIF_UNSET
if piexif.ExifIFD.DateTimeDigitized in exif_data['Exif']:
try:
exif_dtd = datetime.datetime.strptime(exif_data['Exif'][piexif.ExifIFD.DateTimeDigitized].decode('utf8'), EXIF_TIME_FORMAT)
except ValueError as e:
sys.stderr.write('WARNING: Malformed DateTimeDigitized\n')
sys.stderr.write('\tValueError: {}\n'.format(e))
# DateTimeOriginal is when the image was taken
exif_dto = EXIF_UNSET
if piexif.ExifIFD.DateTimeOriginal in exif_data['Exif']:
try:
exif_dto = datetime.datetime.strptime(exif_data['Exif'][piexif.ExifIFD.DateTimeOriginal].decode('utf8'), EXIF_TIME_FORMAT)
except ValueError as e:
sys.stderr.write('WARNING: Malformed DateTimeOriginal\n')
sys.stderr.write('\tValueError: {}\n'.format(e))
# If only the Exif DateTime isn't set, set it based on DateTimeOriginal
if exif_dt == EXIF_UNSET and exif_dtd != EXIF_UNSET and exif_dto != EXIF_UNSET and exif_dtd == exif_dto:
set_exif_timestamp(exif_dto)
exif_dt = exif_dto
print('Exif DateTime is {}'.format(exif_dt))
print('Exif DateTimeDigitized is {}'.format(exif_dtd))
print('Exif DateTimeOriginal is {}'.format(exif_dto))
new_exif_dt = exif_dt + TIME_ADJUSTMENT
new_exif_dtd = exif_dtd + TIME_ADJUSTMENT
new_exif_dto = exif_dto + TIME_ADJUSTMENT
print('\nNew values:')
print('Exif DateTime: {}'.format(new_exif_dt))
print('Exif DateTimeDigitized: {}'.format(new_exif_dtd))
print('Exif DateTimeOriginal: {}'.format(new_exif_dto))
if parser.values.yes:
response = 'y'
else:
response = input('\nProceed? (y/n) ')
if response.lower() == 'y':
exif_data[exif_dt_location][piexif.ImageIFD.DateTime] = new_exif_dt.strftime(EXIF_TIME_FORMAT).encode('utf8')
exif_data['Exif'][piexif.ExifIFD.DateTimeDigitized] = new_exif_dtd.strftime(EXIF_TIME_FORMAT).encode('utf8')
exif_data['Exif'][piexif.ExifIFD.DateTimeOriginal] = new_exif_dto.strftime(EXIF_TIME_FORMAT).encode('utf8')
# Write the changes to the file
piexif.insert(piexif.dump(exif_data), infile_name)
# Set the atime and mtime of the file back to their original values
os.utime(infile_name, (infile_mtime_original, infile_mtime_original))
def parse_options():
''' set up and parse command line arguments
'''
global parser
usage = ('Usage: %prog FILE [options]\n'
'Where FILE = full path to jpeg file to adjust EXIF tags')
parser = optparse.OptionParser(usage=usage)
# command line options to parse
parser.add_option('-y', '--yes', action='store_true', dest='yes',
default=False, help='Adjust files without asking for confirmation')
# parse the arguments
(options, args) = parser.parse_args()
if len(args) < 1:
parser.print_help()
sys.exit('Error: FILE is required')
return args[0]
if __name__ == '__main__':
main()
| [
"optparse.OptionParser",
"os.utime",
"piexif.load",
"sys.stderr.write",
"sys.exit",
"os.path.getmtime",
"datetime.timedelta",
"piexif.dump"
] | [((358, 401), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(788)', 'seconds': '(13032)'}), '(days=788, seconds=13032)\n', (376, 401), False, 'import datetime\n'), ((588, 617), 'os.path.getmtime', 'os.path.getmtime', (['infile_name'], {}), '(infile_name)\n', (604, 617), False, 'import os\n'), ((4256, 4290), 'optparse.OptionParser', 'optparse.OptionParser', ([], {'usage': 'usage'}), '(usage=usage)\n', (4277, 4290), False, 'import optparse\n'), ((648, 672), 'piexif.load', 'piexif.load', (['infile_name'], {}), '(infile_name)\n', (659, 672), False, 'import piexif\n'), ((3949, 4018), 'os.utime', 'os.utime', (['infile_name', '(infile_mtime_original, infile_mtime_original)'], {}), '(infile_name, (infile_mtime_original, infile_mtime_original))\n', (3957, 4018), False, 'import os\n'), ((4614, 4649), 'sys.exit', 'sys.exit', (['"""Error: FILE is required"""'], {}), "('Error: FILE is required')\n", (4622, 4649), False, 'import sys\n'), ((3827, 3849), 'piexif.dump', 'piexif.dump', (['exif_data'], {}), '(exif_data)\n', (3838, 3849), False, 'import piexif\n'), ((1347, 1396), 'sys.stderr.write', 'sys.stderr.write', (['"""WARNING: Malformed DateTime\n"""'], {}), "('WARNING: Malformed DateTime\\n')\n", (1363, 1396), False, 'import sys\n'), ((1912, 1970), 'sys.stderr.write', 'sys.stderr.write', (['"""WARNING: Malformed DateTimeDigitized\n"""'], {}), "('WARNING: Malformed DateTimeDigitized\\n')\n", (1928, 1970), False, 'import sys\n'), ((2367, 2424), 'sys.stderr.write', 'sys.stderr.write', (['"""WARNING: Malformed DateTimeOriginal\n"""'], {}), "('WARNING: Malformed DateTimeOriginal\\n')\n", (2383, 2424), False, 'import sys\n')] |
import os
import sys
import numpy as np
import pandas as pd
import tensorflow as tf
from losses import focal_loss,weighted_binary_crossentropy
from utils import Dataset
class DeepFM(object):
def __init__(self, params):
self.feature_size = params['feature_size']
self.field_size = params['field_size']
self.embedding_size = params['embedding_size']
self.deep_layers = params['deep_layers']
self.l2_reg_coef = params['l2_reg']
self.learning_rate = params['learning_rate']
self.pos_ratio = params['pos_ratio']
self.keep_prob_v = params['keep_prob']
self.activate = tf.nn.relu
self.weight = {}
self.saver=None
self.checkpoint_dir = params['checkpoint_dir']
self.build()
def build(self):
"""
feature_size: N
field_size: F
embedding_size: K
batch_size: None
"""
self.feat_index = tf.placeholder(tf.int32, shape=[None, None], name='feature_index')
self.feat_value = tf.placeholder(tf.float32, shape=[None, None], name='feature_value')
self.label = tf.placeholder(tf.float32, shape=[None,1], name='label')
self.keep_prob = tf.placeholder(tf.float32, shape=[], name='keep_prob') # scaler
self.is_training= tf.placeholder(tf.bool, shape=[],name='is_training')
#1、-------------------------定义权值-----------------------------------------
# FM部分中一次项的权值定义
self.weight['first_order'] = tf.Variable(tf.random_normal([self.feature_size, 1], 0.0, 0.05), # N * 1
name='first_order')
# One-hot编码后的输入层与Dense embeddings层的权值定义,即DNN的输入embedding。
self.weight['embedding_weight'] = tf.Variable(tf.random_normal([self.feature_size, self.embedding_size], 0.0, 0.05), # N*K
name='embedding_weight')
# deep网络部分的weight和bias, deep网络初始输入维度:input_size = F*K
num_layer = len(self.deep_layers)
input_size = self.field_size * self.embedding_size
# glorot_normal = np.sqrt(2.0 / (input_size + self.deep_layers[0])) # for sigmoid
he_normal = np.sqrt(2.0 /input_size) # for relu
self.weight['layer_0'] = tf.Variable(np.random.normal(loc=0, scale=he_normal, size=(input_size, self.deep_layers[0])), dtype=np.float32)
self.weight['bias_0'] = tf.Variable(np.random.normal(loc=0, scale=he_normal, size=(1, self.deep_layers[0])), dtype=np.float32)
# 生成deep network里面每层的weight 和 bias
for i in range(1, num_layer):
he_normal = np.sqrt(2.0 / (self.deep_layers[i - 1]))
self.weight['layer_' + str(i)] = tf.Variable(np.random.normal(loc=0, scale=he_normal, size=(self.deep_layers[i - 1], self.deep_layers[i])),
dtype=np.float32)
self.weight['bias_' + str(i)] = tf.Variable(np.random.normal(loc=0, scale=he_normal, size=(1, self.deep_layers[i])),dtype=np.float32)
# deep部分output_size + 一次项output_size + 二次项output_size
last_layer_size = self.deep_layers[-1] + self.field_size + self.embedding_size
glorot_normal = np.sqrt(2.0 / (last_layer_size + 1))
# 生成最后一层的weight和bias
self.weight['last_layer'] = tf.Variable(np.random.normal(loc=0, scale=glorot_normal, size=(last_layer_size, 1)), dtype=np.float32)
self.weight['last_bias'] = tf.Variable(tf.constant(0.0), dtype=np.float32)
#2、----------------------前向传播------------------------------------
# None*F*K
self.embedding_index = tf.nn.embedding_lookup(self.weight['embedding_weight'],self.feat_index)
# [None*F*K] .*[None*F*1] = None*F*K
self.embedding_part = tf.multiply(self.embedding_index, tf.reshape(self.feat_value, [-1, self.field_size, 1]))
# FM部分一阶特征
# None * F*1
self.embedding_first = tf.nn.embedding_lookup(self.weight['first_order'],
self.feat_index)
#[None*F*1].*[None*F*1] = None*F*1
self.embedding_first = tf.multiply(self.embedding_first, tf.reshape(self.feat_value, [-1, self.field_size, 1]))
# None*F
self.first_order = tf.reduce_sum(self.embedding_first, 2)
# 二阶特征 None*K
self.sum_second_order = tf.reduce_sum(self.embedding_part, 1)
self.sum_second_order_square = tf.square(self.sum_second_order)
self.square_second_order = tf.square(self.embedding_part)
self.square_second_order_sum = tf.reduce_sum(self.square_second_order, 1)
# 1/2*((a+b)^2 - a^2 - b^2)=ab
# None*K
self.second_order = 0.5 * tf.subtract(self.sum_second_order_square, self.square_second_order_sum)
# FM部分的输出 None*(F+K)
self.fm_part = tf.concat([self.first_order, self.second_order], axis=1)
# DNN部分
# None*(F*K)
self.deep_embedding = tf.reshape(self.embedding_part, [-1, self.field_size * self.embedding_size])
# 全连接部分
for i in range(0, len(self.deep_layers)):
self.deep_embedding = tf.add(tf.matmul(self.deep_embedding, self.weight["layer_%d" % i]),
self.weight["bias_%d" % i])
# self.deep_embedding =tf.matmul(self.deep_embedding, self.weight["layer_%d" % i])
self.bn_out = tf.layers.batch_normalization(self.deep_embedding, training=self.is_training)
# self.bn_out = tf.layers.dropout(self.deep_embedding, rate=self.keep_prob,training=self.is_training)
self.deep_embedding = self.activate(self.bn_out)
self.deep_embedding = tf.layers.dropout(self.deep_embedding, rate =1.0-self.keep_prob, training= self.is_training)
# FM输出与DNN输出拼接 None*(F+K+layer[-1]])
din_all = tf.concat([self.fm_part, self.deep_embedding], axis=1)
#None*1
self.out = tf.add(tf.matmul(din_all, self.weight['last_layer']), self.weight['last_bias'])
#3. ------------------确定损失---------------------------------------
# loss部分 None*1
self.prob = tf.nn.sigmoid(self.out)
# self.entropy_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels= self.label, logits= self.out))
# self.entropy_loss = -tf.reduce_mean(
# self.label * tf.log(tf.clip_by_value(self.prob, 1e-10, 1.0))+ (1 - self.label)* tf.log(tf.clip_by_value(1-self.prob,1e-10,1.0)))
self.entropy_loss = focal_loss(self.prob, self.label, alpha=0.5, gamma=2)
# self.entropy_loss = weighted_binary_crossentropy(self.prob, self.label, pos_ratio=self.pos_ratio)
# 正则:sum(w^2)/2*l2_reg_coef
self.reg_loss = tf.contrib.layers.l2_regularizer(self.l2_reg_coef)(self.weight["last_layer"])
for i in range(len(self.deep_layers)):
self.reg_loss += tf.contrib.layers.l2_regularizer(self.l2_reg_coef)(self.weight["layer_%d" % i])
# tf.add_to_collection('losses', tf.contrib.layers.l2_regularizer(self.l2_reg_coef)(self.weight['layer_1']))
# print(self.entropy_loss.shape.as_list(), self.reg_loss.shape.as_list())
self.loss = self.entropy_loss + self.reg_loss
self.global_step = tf.Variable(0, trainable=False, name='global_step')
self.learning_rate = tf.train.exponential_decay(self.learning_rate, self.global_step,3000, 0.99,staircase=False)
opt = tf.train.AdamOptimizer(self.learning_rate)
# opt = tf.train.GradientDescentOptimizer(self.learning_rate)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
trainable_params = tf.trainable_variables()
gradients = tf.gradients(self.loss, trainable_params)
clip_gradients, _ = tf.clip_by_global_norm(gradients, 5)
with tf.control_dependencies(update_ops):
# self.train_op = opt.minimize(self.loss, global_step = self.global_step)
self.train_op = opt.apply_gradients(zip(clip_gradients, trainable_params), global_step=self.global_step)
self.saver = tf.train.Saver(max_to_keep=3)
def train(self, sess, feat_index, feat_value, label):
_, step = sess.run([self.train_op, self.global_step], feed_dict={
self.feat_index: feat_index,
self.feat_value: feat_value,
self.label: label,
self.keep_prob: self.keep_prob_v,
self.is_training:True})
return step
def predict(self, sess, feat_index, feat_value, batch_size=None):
if batch_size is None:
prob = sess.run([self.prob], feed_dict={
self.feat_index: feat_index,
self.feat_value: feat_value,
self.keep_prob: 1,
self.is_training:False})[0]
else:
data =Dataset(feat_value, feat_index, [None]*len(feat_index), batch_size, shuffle=False)
probs =[]
for feat_index, feat_value, _ in data:
prob = sess.run([self.prob], feed_dict={
self.feat_index: feat_index,
self.feat_value: feat_value,
self.keep_prob: 1,
self.is_training:False})[0]
probs.append(prob.ravel())
prob = np.concatenate(probs)
return prob.ravel()
def evaluate(self, sess, feat_index, feat_value, label, batch_size=None):
tloss, entloss,regloss = 0,0,0
if batch_size is None:
tloss, entloss,regloss = sess.run([self.loss, self.entropy_loss, self.reg_loss],feed_dict={
self.feat_index: feat_index,
self.feat_value: feat_value,
self.label: label,
self.keep_prob: 1,
self.is_training:False})
else:
data = Dataset(feat_value,feat_index,label, batch_size, shuffle=False)
for i, (feat_index, feat_value, label) in enumerate(data,1):
_tloss, _entloss, _regloss = sess.run([self.loss, self.entropy_loss, self.reg_loss],feed_dict={
self.feat_index: feat_index,
self.feat_value: feat_value,
self.label: label,
self.keep_prob: 1,
self.is_training:False})
tloss = tloss+ (_tloss-tloss)/i
entloss = entloss + (_entloss-entloss)/i
regloss = regloss + (_regloss-regloss)/i
return tloss, entloss, regloss
def save(self, sess, path, global_step):
if self.saver is not None:
self.saver.save(sess, save_path=path, global_step= global_step)
def restore(self, sess, path):
model_file = tf.train.latest_checkpoint(path)
if model_file is not None:
print('restore model:', model_file)
self.saver.restore(sess, save_path=model_file)
if __name__ == '__main__':
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
params ={'feature_size':None,
'field_size':None,
'embedding_size':4,
'deep_layers':[32,32,32],
'epoch':200,
'batch_size':128,
'learning_rate':0.001,
'l2_reg': 0.001,
'keep_prob':0.7,
'checkpoint_dir':os.path.join(BASE_PATH,'data/deepfm'),
'training_model':True}
with tf.Session() as sess:
model = DeepFM(params)
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer()) # global_step counter etc.
sys.stdout.flush()
if params['training_model']:
#---------------training---------------------------------
for i in range(params['epoch']):
print('epoch ={}'.format(i).center(50,'-'))
for j, (xi, xv, y) in enumerate(train_data):
loss,_, step = model.train(sess, xi, xv, y)
if j %1000 ==0:
train_loss,train_entropy,train_reg = model.evaluate(sess, Xi,Xv, Y)
val_loss,val_entropy, val_reg = model.evaluate(sess, val_Xi, val_Xv, val_y)
print('---batch= %d--- \n train_loss=%f,\t train_entropy=%f,\t train_reg=%f \n val_loss=%f,\t val_entropy=%f,\t val_reg=%f' % (
j,train_loss,train_entropy,train_reg, val_loss,val_entropy,val_reg))
if i%10 ==0 or i == params['epoch']-1:
model.save(sess, model.checkpoint_dir, i)
prob = model.predict(sess, Xi, Xv)
hit_rate, top_k = top_ratio_hit_rate(np.array(Y).ravel(), np.array(prob[0]).ravel(), top_ratio=0.001) # ravel return view, flatten return copy
print('top-k={}, train-hit-rate={}'.format(top_k ,hit_rate))
#-----------------test-----------------------------------
probs =[]
test_y=[]
for xi, xv, y in test_data:
prob = model.predict(sess, xi, xv) # list of np.ndarry
probs.extend(prob[0].ravel().tolist())
test_y.extend(y.tolist())
hit_rate, top_k = top_ratio_hit_rate(np.array(test_y).ravel(), np.array(probs).ravel(), top_ratio=0.001)
print('top-k={}, test-hit-rate={}'.format(top_k ,hit_rate))
calc_threshold_vs_depth(np.asarray(test_y).ravel(), np.asarray(probs).ravel())
else:
model.restore(sess, os.path.split(model.checkpoint_dir)[0])
probs=[]
Y =[]
for xi, xv, y in train_data:
prob = model.predict(sess, xi, xv) # np.ndarry
probs.extend(prob[0].ravel().tolist())
Y.extend(y.tolist())
hit_rate, top_k = top_ratio_hit_rate(np.array(Y).ravel(), np.array(probs).ravel(), top_ratio=0.001)
print('train-top-k={}, train-hit-rate={}'.format(top_k ,hit_rate))
probs=[]
test_y=[]
for xi, xv, y in test_data:
prob = model.predict(sess, xi, xv) # np.ndarry
probs.extend(prob[0].ravel().tolist())
test_y.extend(y.tolist())
hit_rate, top_k = top_ratio_hit_rate(np.array(test_y).ravel(), np.array(probs).ravel(), top_ratio=0.001)
print('test-top-k={}, test-hit-rate={}'.format(top_k ,hit_rate))
| [
"tensorflow.local_variables_initializer",
"numpy.sqrt",
"tensorflow.contrib.layers.l2_regularizer",
"tensorflow.reduce_sum",
"tensorflow.gradients",
"numpy.array",
"tensorflow.control_dependencies",
"tensorflow.clip_by_global_norm",
"tensorflow.nn.embedding_lookup",
"tensorflow.random_normal",
"... | [((945, 1011), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {'shape': '[None, None]', 'name': '"""feature_index"""'}), "(tf.int32, shape=[None, None], name='feature_index')\n", (959, 1011), True, 'import tensorflow as tf\n'), ((1038, 1106), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, None]', 'name': '"""feature_value"""'}), "(tf.float32, shape=[None, None], name='feature_value')\n", (1052, 1106), True, 'import tensorflow as tf\n'), ((1128, 1185), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[None, 1]', 'name': '"""label"""'}), "(tf.float32, shape=[None, 1], name='label')\n", (1142, 1185), True, 'import tensorflow as tf\n'), ((1210, 1264), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[]', 'name': '"""keep_prob"""'}), "(tf.float32, shape=[], name='keep_prob')\n", (1224, 1264), True, 'import tensorflow as tf\n'), ((1300, 1353), 'tensorflow.placeholder', 'tf.placeholder', (['tf.bool'], {'shape': '[]', 'name': '"""is_training"""'}), "(tf.bool, shape=[], name='is_training')\n", (1314, 1353), True, 'import tensorflow as tf\n'), ((2200, 2225), 'numpy.sqrt', 'np.sqrt', (['(2.0 / input_size)'], {}), '(2.0 / input_size)\n', (2207, 2225), True, 'import numpy as np\n'), ((3212, 3248), 'numpy.sqrt', 'np.sqrt', (['(2.0 / (last_layer_size + 1))'], {}), '(2.0 / (last_layer_size + 1))\n', (3219, 3248), True, 'import numpy as np\n'), ((3625, 3697), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (["self.weight['embedding_weight']", 'self.feat_index'], {}), "(self.weight['embedding_weight'], self.feat_index)\n", (3647, 3697), True, 'import tensorflow as tf\n'), ((3943, 4010), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (["self.weight['first_order']", 'self.feat_index'], {}), "(self.weight['first_order'], self.feat_index)\n", (3965, 4010), True, 'import tensorflow as tf\n'), ((4272, 4310), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['self.embedding_first', '(2)'], {}), '(self.embedding_first, 2)\n', (4285, 4310), True, 'import tensorflow as tf\n'), ((4367, 4404), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['self.embedding_part', '(1)'], {}), '(self.embedding_part, 1)\n', (4380, 4404), True, 'import tensorflow as tf\n'), ((4444, 4476), 'tensorflow.square', 'tf.square', (['self.sum_second_order'], {}), '(self.sum_second_order)\n', (4453, 4476), True, 'import tensorflow as tf\n'), ((4512, 4542), 'tensorflow.square', 'tf.square', (['self.embedding_part'], {}), '(self.embedding_part)\n', (4521, 4542), True, 'import tensorflow as tf\n'), ((4582, 4624), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['self.square_second_order', '(1)'], {}), '(self.square_second_order, 1)\n', (4595, 4624), True, 'import tensorflow as tf\n'), ((4841, 4897), 'tensorflow.concat', 'tf.concat', (['[self.first_order, self.second_order]'], {'axis': '(1)'}), '([self.first_order, self.second_order], axis=1)\n', (4850, 4897), True, 'import tensorflow as tf\n'), ((4966, 5042), 'tensorflow.reshape', 'tf.reshape', (['self.embedding_part', '[-1, self.field_size * self.embedding_size]'], {}), '(self.embedding_part, [-1, self.field_size * self.embedding_size])\n', (4976, 5042), True, 'import tensorflow as tf\n'), ((5846, 5900), 'tensorflow.concat', 'tf.concat', (['[self.fm_part, self.deep_embedding]'], {'axis': '(1)'}), '([self.fm_part, self.deep_embedding], axis=1)\n', (5855, 5900), True, 'import tensorflow as tf\n'), ((6143, 6166), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['self.out'], {}), '(self.out)\n', (6156, 6166), True, 'import tensorflow as tf\n'), ((6509, 6562), 'losses.focal_loss', 'focal_loss', (['self.prob', 'self.label'], {'alpha': '(0.5)', 'gamma': '(2)'}), '(self.prob, self.label, alpha=0.5, gamma=2)\n', (6519, 6562), False, 'from losses import focal_loss, weighted_binary_crossentropy\n'), ((7268, 7319), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {'trainable': '(False)', 'name': '"""global_step"""'}), "(0, trainable=False, name='global_step')\n", (7279, 7319), True, 'import tensorflow as tf\n'), ((7349, 7446), 'tensorflow.train.exponential_decay', 'tf.train.exponential_decay', (['self.learning_rate', 'self.global_step', '(3000)', '(0.99)'], {'staircase': '(False)'}), '(self.learning_rate, self.global_step, 3000, 0.99,\n staircase=False)\n', (7375, 7446), True, 'import tensorflow as tf\n'), ((7455, 7497), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['self.learning_rate'], {}), '(self.learning_rate)\n', (7477, 7497), True, 'import tensorflow as tf\n'), ((7589, 7631), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.UPDATE_OPS'], {}), '(tf.GraphKeys.UPDATE_OPS)\n', (7606, 7631), True, 'import tensorflow as tf\n'), ((7660, 7684), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {}), '()\n', (7682, 7684), True, 'import tensorflow as tf\n'), ((7705, 7746), 'tensorflow.gradients', 'tf.gradients', (['self.loss', 'trainable_params'], {}), '(self.loss, trainable_params)\n', (7717, 7746), True, 'import tensorflow as tf\n'), ((7775, 7811), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (['gradients', '(5)'], {}), '(gradients, 5)\n', (7797, 7811), True, 'import tensorflow as tf\n'), ((8086, 8115), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {'max_to_keep': '(3)'}), '(max_to_keep=3)\n', (8100, 8115), True, 'import tensorflow as tf\n'), ((10976, 11008), 'tensorflow.train.latest_checkpoint', 'tf.train.latest_checkpoint', (['path'], {}), '(path)\n', (11002, 11008), True, 'import tensorflow as tf\n'), ((11214, 11239), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (11229, 11239), False, 'import os\n'), ((11526, 11564), 'os.path.join', 'os.path.join', (['BASE_PATH', '"""data/deepfm"""'], {}), "(BASE_PATH, 'data/deepfm')\n", (11538, 11564), False, 'import os\n'), ((11607, 11619), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (11617, 11619), True, 'import tensorflow as tf\n'), ((11798, 11816), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (11814, 11816), False, 'import sys\n'), ((1517, 1568), 'tensorflow.random_normal', 'tf.random_normal', (['[self.feature_size, 1]', '(0.0)', '(0.05)'], {}), '([self.feature_size, 1], 0.0, 0.05)\n', (1533, 1568), True, 'import tensorflow as tf\n'), ((1770, 1839), 'tensorflow.random_normal', 'tf.random_normal', (['[self.feature_size, self.embedding_size]', '(0.0)', '(0.05)'], {}), '([self.feature_size, self.embedding_size], 0.0, 0.05)\n', (1786, 1839), True, 'import tensorflow as tf\n'), ((2284, 2369), 'numpy.random.normal', 'np.random.normal', ([], {'loc': '(0)', 'scale': 'he_normal', 'size': '(input_size, self.deep_layers[0])'}), '(loc=0, scale=he_normal, size=(input_size, self.deep_layers[0])\n )\n', (2300, 2369), True, 'import numpy as np\n'), ((2428, 2499), 'numpy.random.normal', 'np.random.normal', ([], {'loc': '(0)', 'scale': 'he_normal', 'size': '(1, self.deep_layers[0])'}), '(loc=0, scale=he_normal, size=(1, self.deep_layers[0]))\n', (2444, 2499), True, 'import numpy as np\n'), ((2625, 2663), 'numpy.sqrt', 'np.sqrt', (['(2.0 / self.deep_layers[i - 1])'], {}), '(2.0 / self.deep_layers[i - 1])\n', (2632, 2663), True, 'import numpy as np\n'), ((3326, 3397), 'numpy.random.normal', 'np.random.normal', ([], {'loc': '(0)', 'scale': 'glorot_normal', 'size': '(last_layer_size, 1)'}), '(loc=0, scale=glorot_normal, size=(last_layer_size, 1))\n', (3342, 3397), True, 'import numpy as np\n'), ((3464, 3480), 'tensorflow.constant', 'tf.constant', (['(0.0)'], {}), '(0.0)\n', (3475, 3480), True, 'import tensorflow as tf\n'), ((3808, 3861), 'tensorflow.reshape', 'tf.reshape', (['self.feat_value', '[-1, self.field_size, 1]'], {}), '(self.feat_value, [-1, self.field_size, 1])\n', (3818, 3861), True, 'import tensorflow as tf\n'), ((4173, 4226), 'tensorflow.reshape', 'tf.reshape', (['self.feat_value', '[-1, self.field_size, 1]'], {}), '(self.feat_value, [-1, self.field_size, 1])\n', (4183, 4226), True, 'import tensorflow as tf\n'), ((4716, 4787), 'tensorflow.subtract', 'tf.subtract', (['self.sum_second_order_square', 'self.square_second_order_sum'], {}), '(self.sum_second_order_square, self.square_second_order_sum)\n', (4727, 4787), True, 'import tensorflow as tf\n'), ((5402, 5479), 'tensorflow.layers.batch_normalization', 'tf.layers.batch_normalization', (['self.deep_embedding'], {'training': 'self.is_training'}), '(self.deep_embedding, training=self.is_training)\n', (5431, 5479), True, 'import tensorflow as tf\n'), ((5689, 5786), 'tensorflow.layers.dropout', 'tf.layers.dropout', (['self.deep_embedding'], {'rate': '(1.0 - self.keep_prob)', 'training': 'self.is_training'}), '(self.deep_embedding, rate=1.0 - self.keep_prob, training=\n self.is_training)\n', (5706, 5786), True, 'import tensorflow as tf\n'), ((5943, 5988), 'tensorflow.matmul', 'tf.matmul', (['din_all', "self.weight['last_layer']"], {}), "(din_all, self.weight['last_layer'])\n", (5952, 5988), True, 'import tensorflow as tf\n'), ((6741, 6791), 'tensorflow.contrib.layers.l2_regularizer', 'tf.contrib.layers.l2_regularizer', (['self.l2_reg_coef'], {}), '(self.l2_reg_coef)\n', (6773, 6791), True, 'import tensorflow as tf\n'), ((7825, 7860), 'tensorflow.control_dependencies', 'tf.control_dependencies', (['update_ops'], {}), '(update_ops)\n', (7848, 7860), True, 'import tensorflow as tf\n'), ((9266, 9287), 'numpy.concatenate', 'np.concatenate', (['probs'], {}), '(probs)\n', (9280, 9287), True, 'import numpy as np\n'), ((9948, 10013), 'utils.Dataset', 'Dataset', (['feat_value', 'feat_index', 'label', 'batch_size'], {'shuffle': '(False)'}), '(feat_value, feat_index, label, batch_size, shuffle=False)\n', (9955, 10013), False, 'from utils import Dataset\n'), ((11677, 11710), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (11708, 11710), True, 'import tensorflow as tf\n'), ((11729, 11761), 'tensorflow.local_variables_initializer', 'tf.local_variables_initializer', ([], {}), '()\n', (11759, 11761), True, 'import tensorflow as tf\n'), ((2723, 2820), 'numpy.random.normal', 'np.random.normal', ([], {'loc': '(0)', 'scale': 'he_normal', 'size': '(self.deep_layers[i - 1], self.deep_layers[i])'}), '(loc=0, scale=he_normal, size=(self.deep_layers[i - 1],\n self.deep_layers[i]))\n', (2739, 2820), True, 'import numpy as np\n'), ((2947, 3018), 'numpy.random.normal', 'np.random.normal', ([], {'loc': '(0)', 'scale': 'he_normal', 'size': '(1, self.deep_layers[i])'}), '(loc=0, scale=he_normal, size=(1, self.deep_layers[i]))\n', (2963, 3018), True, 'import numpy as np\n'), ((5151, 5210), 'tensorflow.matmul', 'tf.matmul', (['self.deep_embedding', "self.weight['layer_%d' % i]"], {}), "(self.deep_embedding, self.weight['layer_%d' % i])\n", (5160, 5210), True, 'import tensorflow as tf\n'), ((6895, 6945), 'tensorflow.contrib.layers.l2_regularizer', 'tf.contrib.layers.l2_regularizer', (['self.l2_reg_coef'], {}), '(self.l2_reg_coef)\n', (6927, 6945), True, 'import tensorflow as tf\n'), ((13867, 13902), 'os.path.split', 'os.path.split', (['model.checkpoint_dir'], {}), '(model.checkpoint_dir)\n', (13880, 13902), False, 'import os\n'), ((14205, 14216), 'numpy.array', 'np.array', (['Y'], {}), '(Y)\n', (14213, 14216), True, 'import numpy as np\n'), ((14226, 14241), 'numpy.array', 'np.array', (['probs'], {}), '(probs)\n', (14234, 14241), True, 'import numpy as np\n'), ((14640, 14656), 'numpy.array', 'np.array', (['test_y'], {}), '(test_y)\n', (14648, 14656), True, 'import numpy as np\n'), ((14666, 14681), 'numpy.array', 'np.array', (['probs'], {}), '(probs)\n', (14674, 14681), True, 'import numpy as np\n'), ((12917, 12928), 'numpy.array', 'np.array', (['Y'], {}), '(Y)\n', (12925, 12928), True, 'import numpy as np\n'), ((12938, 12955), 'numpy.array', 'np.array', (['prob[0]'], {}), '(prob[0])\n', (12946, 12955), True, 'import numpy as np\n'), ((13561, 13577), 'numpy.array', 'np.array', (['test_y'], {}), '(test_y)\n', (13569, 13577), True, 'import numpy as np\n'), ((13587, 13602), 'numpy.array', 'np.array', (['probs'], {}), '(probs)\n', (13595, 13602), True, 'import numpy as np\n'), ((13753, 13771), 'numpy.asarray', 'np.asarray', (['test_y'], {}), '(test_y)\n', (13763, 13771), True, 'import numpy as np\n'), ((13781, 13798), 'numpy.asarray', 'np.asarray', (['probs'], {}), '(probs)\n', (13791, 13798), True, 'import numpy as np\n')] |
import numpy as np
from math import log
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score, mean_squared_error, mean_absolute_error, classification_report
from math import sqrt
import json
from pprint import pprint
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--problems', type=str, help='file path to problems.json')
parser.add_argument('-s', '--submissions', type=str, help='file path to user_submissions.json')
parser.add_argument('-k', type=int, default=1, help='k-fold cross validation')
args = parser.parse_args()
with open(args.problems, 'r') as file:
problems = json.load(file)
problem_id_2_tag_ids = {problem['id']: problem['tags'] for problem in problems}
with open(args.submissions, 'r') as file:
user_submissions = json.load(file)
max_skill = max([max(problem['tags']) for problem in problems if len(problem['tags']) > 0]) + 1
print('max_skill:', max_skill)
def read_data(training, group, expand_tags=False):
x = []
y = []
for user_data in user_submissions:
user_group = user_data['group']
if training and user_group == group \
or not training and user_group != group:
continue
submissions = user_data['submissions']
user_success = {}
user_fail = {}
for sub in submissions:
tags = problem_id_2_tag_ids[sub['problem']]
if not expand_tags:
y.append(sub['verdict'])
x.append([0] * 3 * max_skill)
for tag in tags:
s = user_success.get(tag, 1)
f = user_fail.get(tag, 1)
x[-1][tag * 3 + 0] = 1
x[-1][tag * 3 + 1] = log(s)
x[-1][tag * 3 + 2] = log(f)
if sub['verdict'] == 1:
user_success[tag] = s + 1
else:
user_fail[tag] = f + 1
else:
for tag in tags:
s = user_success.get(tag, 1)
f = user_fail.get(tag, 1)
x.append([0] * 3 * max_skill)
x[-1][tag * 3 + 0] = 1
x[-1][tag * 3 + 1] = log(s)
x[-1][tag * 3 + 2] = log(f)
if sub['verdict'] == 1:
y.append(1)
user_success[tag] = s + 1
else:
y.append(0)
user_fail[tag] = f + 1
return x, y
def train(group):
model = LogisticRegression()
x, y = read_data(training=True, group=group, expand_tags=False)
print('Fitting')
model.fit(x, y)
x, y = read_data(training=False, group=group, expand_tags=False)
print('Predicting')
pred = model.predict_proba(x)[:, 1]
auc = roc_auc_score(y, pred)
rmse = sqrt(mean_squared_error(y, pred))
mae = mean_absolute_error(y, pred)
print('ROC AUC: {}'.format(auc))
print('RMSE: {}'.format(rmse))
print('MAE: {}'.format(mae))
# res = np.zeros(pred.shape[0])
# res[pred >= 0.5] = 1
# print(classification_report(y, res))
return auc, rmse, mae
def main():
k = args.k
auc = np.zeros(k)
rmse = np.zeros(k)
mae = np.zeros(k)
for i in range(k):
print('group: %d' % i)
auc[i], rmse[i], mae[i] = train(i)
print('-' * 20)
print('ROC AUC: {} (+/- {})'.format(auc.mean(), auc.std()))
print('RMSE: {} (+/- {})'.format(rmse.mean(), rmse.std()))
print('MAE: {} (+/- {})'.format(mae.mean(), mae.std()))
if __name__ == '__main__':
main()
| [
"argparse.ArgumentParser",
"sklearn.metrics.roc_auc_score",
"sklearn.linear_model.LogisticRegression",
"sklearn.metrics.mean_squared_error",
"numpy.zeros",
"math.log",
"json.load",
"sklearn.metrics.mean_absolute_error"
] | [((295, 374), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (318, 374), False, 'import argparse\n'), ((724, 739), 'json.load', 'json.load', (['file'], {}), '(file)\n', (733, 739), False, 'import json\n'), ((888, 903), 'json.load', 'json.load', (['file'], {}), '(file)\n', (897, 903), False, 'import json\n'), ((2719, 2739), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (2737, 2739), False, 'from sklearn.linear_model import LogisticRegression\n'), ((2999, 3021), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['y', 'pred'], {}), '(y, pred)\n', (3012, 3021), False, 'from sklearn.metrics import roc_auc_score, mean_squared_error, mean_absolute_error, classification_report\n'), ((3079, 3107), 'sklearn.metrics.mean_absolute_error', 'mean_absolute_error', (['y', 'pred'], {}), '(y, pred)\n', (3098, 3107), False, 'from sklearn.metrics import roc_auc_score, mean_squared_error, mean_absolute_error, classification_report\n'), ((3396, 3407), 'numpy.zeros', 'np.zeros', (['k'], {}), '(k)\n', (3404, 3407), True, 'import numpy as np\n'), ((3420, 3431), 'numpy.zeros', 'np.zeros', (['k'], {}), '(k)\n', (3428, 3431), True, 'import numpy as np\n'), ((3443, 3454), 'numpy.zeros', 'np.zeros', (['k'], {}), '(k)\n', (3451, 3454), True, 'import numpy as np\n'), ((3039, 3066), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y', 'pred'], {}), '(y, pred)\n', (3057, 3066), False, 'from sklearn.metrics import roc_auc_score, mean_squared_error, mean_absolute_error, classification_report\n'), ((1851, 1857), 'math.log', 'log', (['s'], {}), '(s)\n', (1854, 1857), False, 'from math import log\n'), ((1900, 1906), 'math.log', 'log', (['f'], {}), '(f)\n', (1903, 1906), False, 'from math import log\n'), ((2365, 2371), 'math.log', 'log', (['s'], {}), '(s)\n', (2368, 2371), False, 'from math import log\n'), ((2414, 2420), 'math.log', 'log', (['f'], {}), '(f)\n', (2417, 2420), False, 'from math import log\n')] |
# -*- coding: utf-8 -*-
# @Time : 2020/12/23 2:27 PM
# @Author : Kevin
import config
from utils.sentence_process import cut_sentence_by_character
from search.sort.word_to_sequence import Word2Sequence
import pickle
def prepare_dict_model():
lines=open(config.sort_all_file_path,"r").readlines()
ws=Word2Sequence()
lines=[cut_sentence_by_character(line) for line in lines]
for line in lines:
ws.fit(line)
ws.build_vocab()
pickle.dump(ws,open(config.sort_ws_model_path,"wb"))
def test_dict_model():
sentence="如何在linux下安装storm"
ws=pickle.load(open(config.sort_ws_model_path,"rb"))
sequence=ws.transform(cut_sentence_by_character(sentence))
print(cut_sentence_by_character(sentence))
print(sequence)
def make_data_file():
with open(config.sort_label_file_path,"w+") as file:
for i in range(96339):
if i%3==0:
file.write("0"+"\n")
else:
file.write("1" + "\n")
if __name__ == '__main__':
# prepare_dict_model()
# test_dict_model()
make_data_file() | [
"search.sort.word_to_sequence.Word2Sequence",
"utils.sentence_process.cut_sentence_by_character"
] | [((313, 328), 'search.sort.word_to_sequence.Word2Sequence', 'Word2Sequence', ([], {}), '()\n', (326, 328), False, 'from search.sort.word_to_sequence import Word2Sequence\n'), ((340, 371), 'utils.sentence_process.cut_sentence_by_character', 'cut_sentence_by_character', (['line'], {}), '(line)\n', (365, 371), False, 'from utils.sentence_process import cut_sentence_by_character\n'), ((656, 691), 'utils.sentence_process.cut_sentence_by_character', 'cut_sentence_by_character', (['sentence'], {}), '(sentence)\n', (681, 691), False, 'from utils.sentence_process import cut_sentence_by_character\n'), ((703, 738), 'utils.sentence_process.cut_sentence_by_character', 'cut_sentence_by_character', (['sentence'], {}), '(sentence)\n', (728, 738), False, 'from utils.sentence_process import cut_sentence_by_character\n')] |
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
from __future__ import absolute_import, print_function, unicode_literals
import os
import shutil
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
import uuid
from ddt import ddt as DataDrivenTestCase, data as ddt_data
from pythonic_testcase import *
from schwarz.fakefs_helpers import TempFS
from schwarz.log_utils import l_
from schwarz.puzzle_plugins import connect_signals, SignalRegistry
from testfixtures import LogCapture
from schwarz.mailqueue import (create_maildir_directories, lock_file,
DebugMailer, MessageHandler)
from schwarz.mailqueue.compat import IS_WINDOWS
from schwarz.mailqueue.maildir_utils import find_messages
from schwarz.mailqueue.message_utils import parse_message_envelope
from schwarz.mailqueue.plugins import MQAction, MQSignal
from schwarz.mailqueue.queue_runner import MaildirBackedMsg, MaildirBackend
from schwarz.mailqueue.testutils import (assert_did_log_message, info_logger,
inject_example_message, message as example_message)
@DataDrivenTestCase
class MessageHandlerTest(PythonicTestCase):
def setUp(self):
self.tempfs = TempFS.set_up(test=self)
self.path_maildir = os.path.join(self.tempfs.root, 'mailqueue')
create_maildir_directories(self.path_maildir)
@ddt_data(True, False)
def test_can_send_message(self, with_msg_id):
mailer = DebugMailer()
msg_header = b'X-Header: somevalue\n'
if with_msg_id:
msg_id = '%s@host.<EMAIL>' % uuid.uuid4()
msg_header += b'Message-ID: <%s>\n' % msg_id.encode('ascii')
msg_body = b'MsgBody\n'
msg_bytes = msg_header + b'\n' + msg_body
msg = inject_example_message(self.path_maildir,
sender = b'<EMAIL>',
recipient = b'<EMAIL>',
msg_bytes = msg_bytes,
)
assert_true(os.path.exists(msg.path))
with LogCapture() as lc:
mh = MessageHandler([mailer], info_logger(lc))
was_sent = mh.send_message(msg)
assert_trueish(was_sent)
expected_log_msg = '%s => %s' % ('<EMAIL>', '<EMAIL>')
if with_msg_id:
expected_log_msg += ' <%s>' % msg_id
assert_did_log_message(lc, expected_msg=expected_log_msg)
assert_length(1, mailer.sent_mails)
sent_msg, = mailer.sent_mails
assert_equals('<EMAIL>', sent_msg.from_addr)
assert_equals(('<EMAIL>',), sent_msg.to_addrs)
assert_equals(msg_nl(msg_bytes), sent_msg.msg_fp.read())
assert_false(os.path.exists(msg.path))
# ensure there are no left-overs/tmp files
assert_length(0, self.list_all_files(self.path_maildir))
def test_can_handle_sending_failure(self):
mailer = DebugMailer(simulate_failed_sending=True)
msg = inject_example_message(self.path_maildir)
assert_true(os.path.exists(msg.path))
was_sent = MessageHandler([mailer]).send_message(msg)
assert_falseish(was_sent)
assert_true(os.path.exists(msg.path))
# no left-overs (e.g. in "tmp" folder) other than the initial message file
assert_length(1, self.list_all_files(self.path_maildir))
def test_can_handle_non_existent_file_in_send(self):
mailer = DebugMailer()
invalid_path = os.path.join(self.path_maildir, 'new', 'invalid')
msg_with_invalid_path = MaildirBackedMsg(invalid_path)
was_sent = MessageHandler([mailer]).send_message(msg_with_invalid_path)
assert_none(was_sent)
assert_length(0, mailer.sent_mails)
def test_can_handle_vanished_file_after_successful_send(self):
if IS_WINDOWS:
self.skipTest('unable to unlink open file on Windows')
msg = inject_example_message(self.path_maildir)
path_in_progress = msg.path.replace('new', 'cur')
def delete_on_send(*args):
os.unlink(path_in_progress)
return True
mailer = DebugMailer(send_callback=delete_on_send)
was_sent = MessageHandler([mailer]).send_message(msg)
assert_true(was_sent)
assert_length(1, mailer.sent_mails)
assert_length(0, self.list_all_files(self.path_maildir))
def test_can_handle_vanished_file_after_failed_send(self):
if IS_WINDOWS:
self.skipTest('unable to unlink open file on Windows')
msg = inject_example_message(self.path_maildir)
path_in_progress = msg.path.replace('new', 'cur')
def delete_on_send(*args):
os.unlink(path_in_progress)
return False
mailer = DebugMailer(send_callback=delete_on_send)
was_sent = MessageHandler([mailer]).send_message(msg)
assert_false(was_sent)
assert_length(0, mailer.sent_mails)
assert_length(0, self.list_all_files(self.path_maildir))
def test_can_handle_duplicate_file_in_cur_before_send(self):
msg = inject_example_message(self.path_maildir)
path_in_progress = msg.path.replace('new', 'cur')
# this can happen on Unix/Posix because Python does not provide an
# atomic "move without overwrite". Linux provides the "renameat2"
# system call (with RENAME_NOREPLACE flag) but Python does not expose
# that API.
shutil.copy(msg.path, path_in_progress)
mailer = DebugMailer()
was_sent = MessageHandler([mailer]).send_message(msg)
assert_none(was_sent)
assert_length(0, mailer.sent_mails)
assert_length(2, self.list_all_files(self.path_maildir))
def test_can_handle_duplicate_file_in_new_after_failed_send(self):
msg = inject_example_message(self.path_maildir)
path_in_progress = msg.path.replace('new', 'cur')
# again: can happen because Python provides not atomic "move without
# overwrite" on Linux (see also "renameat2" system call)
def duplicate_on_failed_send(*args):
shutil.copy(path_in_progress, msg.path)
return False
mailer = DebugMailer(send_callback=duplicate_on_failed_send)
was_sent = MessageHandler([mailer]).send_message(msg)
assert_false(was_sent)
assert_length(0, mailer.sent_mails)
assert_length(2, self.list_all_files(self.path_maildir))
def test_tries_to_lock_message_while_sending(self):
mailer = DebugMailer()
msg = inject_example_message(self.path_maildir)
locked_msg = lock_file(msg.path, timeout=0.1)
mh = MessageHandler([mailer])
was_sent = mh.send_message(msg)
assert_none(was_sent)
assert_length(1, self.msg_files(folder='new'))
assert_is_empty(mailer.sent_mails)
locked_msg.close()
was_sent = mh.send_message(msg)
assert_trueish(was_sent)
assert_is_empty(self.msg_files(folder='new'))
assert_length(1, mailer.sent_mails)
def test_can_enqueue_message_after_failed_sending(self):
mailer = DebugMailer(simulate_failed_sending=True)
maildir_fallback = MaildirBackend(self.path_maildir)
msg = example_message()
mh = MessageHandler([mailer, maildir_fallback])
was_sent = mh.send_message(msg, sender='<EMAIL>', recipient='<EMAIL>')
assert_trueish(was_sent)
assert_is_empty(mailer.sent_mails)
msg_path, = self.msg_files(folder='new')
with open(msg_path, 'rb') as msg_fp:
stored_msg = parse_message_envelope(msg_fp)
assert_equals('<EMAIL>', stored_msg.from_addr)
assert_equals(('<EMAIL>',), stored_msg.to_addrs)
def test_can_enqueue_message_with_multiple_recipients_after_failed_sending(self):
mailer = DebugMailer(simulate_failed_sending=True)
mh = MessageHandler([mailer, MaildirBackend(self.path_maildir)])
msg = example_message()
recipients = ('<EMAIL>', '<EMAIL>')
mh.send_message(msg, sender='<EMAIL>', recipients=recipients)
msg_path, = self.msg_files(folder='new')
with open(msg_path, 'rb') as msg_fp:
stored_msg = parse_message_envelope(msg_fp)
assert_equals(recipients, stored_msg.to_addrs)
@ddt_data(True, False)
def test_can_notify_plugin_after_delivery(self, delivery_successful):
if delivery_successful:
signal = MQSignal.delivery_successful
mailer = DebugMailer()
else:
signal = MQSignal.delivery_failed
mailer = DebugMailer(simulate_failed_sending=True)
registry = SignalRegistry()
plugin = MagicMock(return_value=None, spec={})
connect_signals({signal: plugin}, registry.namespace)
mh = MessageHandler([mailer], plugins=registry)
mh.send_message(example_message(), sender='<EMAIL>', recipient='<EMAIL>')
plugin.assert_called_once()
# would be able to simplify this with Python 3 only:
# call_kwargs = plugin.call_args.kwargs
call_kwargs = plugin.call_args[-1]
send_result = call_kwargs['send_result']
if delivery_successful:
assert_length(1, mailer.sent_mails)
assert_trueish(send_result)
else:
assert_length(0, mailer.sent_mails)
assert_falseish(send_result)
assert_false(send_result.queued)
assert_equals('debug', send_result.transport)
def test_plugin_can_discard_message_after_failed_delivery(self):
mailer = DebugMailer(simulate_failed_sending=True)
sender = '<EMAIL>'
recipient = '<EMAIL>'
def discard_message(event_sender, msg, send_result):
assert_falseish(send_result)
assert_none(send_result.discarded)
assert_equals(sender, msg.from_addr)
assert_equals({recipient}, set(msg.to_addrs))
return MQAction.DISCARD
registry = SignalRegistry()
connect_signals({MQSignal.delivery_failed: discard_message}, registry.namespace)
msg = example_message()
mh = MessageHandler([mailer], plugins=registry)
send_result = mh.send_message(msg, sender=sender, recipient=recipient)
assert_falseish(send_result)
assert_false(send_result.queued)
assert_true(send_result.discarded)
def test_plugin_can_access_number_of_failed_deliveries(self):
registry = SignalRegistry()
def discard_after_two_attempts(sender, msg, send_result):
return MQAction.DISCARD if (msg.retries > 1) else None
connect_signals({MQSignal.delivery_failed: discard_after_two_attempts}, registry.namespace)
msg = inject_example_message(self.path_maildir)
mailer = DebugMailer(simulate_failed_sending=True)
mh = MessageHandler([mailer], plugins=registry)
mh.send_message(msg)
assert_length(1, find_messages(self.path_maildir, log=l_(None)))
send_result = mh.send_message(msg)
assert_falseish(send_result)
assert_length(0, mailer.sent_mails)
assert_length(0, find_messages(self.path_maildir, log=l_(None)))
assert_true(send_result.discarded)
# --- internal helpers ----------------------------------------------------
def list_all_files(self, basedir):
files = []
for root_dir, dirnames, filenames in os.walk(basedir):
for filename in filenames:
path = os.path.join(root_dir, filename)
files.append(path)
return files
def msg_files(self, folder='new'):
path = os.path.join(self.path_maildir, folder)
files = []
for filename in os.listdir(path):
file_path = os.path.join(path, filename)
files.append(file_path)
return files
def msg_nl(msg_bytes):
return msg_bytes if (not IS_WINDOWS) else msg_bytes.replace(b'\n', b'\r\n')
| [
"schwarz.mailqueue.DebugMailer",
"schwarz.mailqueue.lock_file",
"testfixtures.LogCapture",
"os.walk",
"os.path.exists",
"schwarz.log_utils.l_",
"os.listdir",
"schwarz.mailqueue.testutils.assert_did_log_message",
"os.unlink",
"schwarz.mailqueue.MessageHandler",
"schwarz.mailqueue.create_maildir_d... | [((1343, 1364), 'ddt.data', 'ddt_data', (['(True)', '(False)'], {}), '(True, False)\n', (1351, 1364), True, 'from ddt import ddt as DataDrivenTestCase, data as ddt_data\n'), ((8179, 8200), 'ddt.data', 'ddt_data', (['(True)', '(False)'], {}), '(True, False)\n', (8187, 8200), True, 'from ddt import ddt as DataDrivenTestCase, data as ddt_data\n'), ((1186, 1210), 'schwarz.fakefs_helpers.TempFS.set_up', 'TempFS.set_up', ([], {'test': 'self'}), '(test=self)\n', (1199, 1210), False, 'from schwarz.fakefs_helpers import TempFS\n'), ((1239, 1282), 'os.path.join', 'os.path.join', (['self.tempfs.root', '"""mailqueue"""'], {}), "(self.tempfs.root, 'mailqueue')\n", (1251, 1282), False, 'import os\n'), ((1291, 1336), 'schwarz.mailqueue.create_maildir_directories', 'create_maildir_directories', (['self.path_maildir'], {}), '(self.path_maildir)\n', (1317, 1336), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((1432, 1445), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {}), '()\n', (1443, 1445), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((1739, 1847), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {'sender': "b'<EMAIL>'", 'recipient': "b'<EMAIL>'", 'msg_bytes': 'msg_bytes'}), "(self.path_maildir, sender=b'<EMAIL>', recipient=\n b'<EMAIL>', msg_bytes=msg_bytes)\n", (1761, 1847), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((2261, 2318), 'schwarz.mailqueue.testutils.assert_did_log_message', 'assert_did_log_message', (['lc'], {'expected_msg': 'expected_log_msg'}), '(lc, expected_msg=expected_log_msg)\n', (2283, 2318), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((2803, 2844), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'simulate_failed_sending': '(True)'}), '(simulate_failed_sending=True)\n', (2814, 2844), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((2859, 2900), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {}), '(self.path_maildir)\n', (2881, 2900), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((3313, 3326), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {}), '()\n', (3324, 3326), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((3350, 3399), 'os.path.join', 'os.path.join', (['self.path_maildir', '"""new"""', '"""invalid"""'], {}), "(self.path_maildir, 'new', 'invalid')\n", (3362, 3399), False, 'import os\n'), ((3432, 3462), 'schwarz.mailqueue.queue_runner.MaildirBackedMsg', 'MaildirBackedMsg', (['invalid_path'], {}), '(invalid_path)\n', (3448, 3462), False, 'from schwarz.mailqueue.queue_runner import MaildirBackedMsg, MaildirBackend\n'), ((3790, 3831), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {}), '(self.path_maildir)\n', (3812, 3831), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((4006, 4047), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'send_callback': 'delete_on_send'}), '(send_callback=delete_on_send)\n', (4017, 4047), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((4418, 4459), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {}), '(self.path_maildir)\n', (4440, 4459), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((4635, 4676), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'send_callback': 'delete_on_send'}), '(send_callback=delete_on_send)\n', (4646, 4676), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((4960, 5001), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {}), '(self.path_maildir)\n', (4982, 5001), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((5315, 5354), 'shutil.copy', 'shutil.copy', (['msg.path', 'path_in_progress'], {}), '(msg.path, path_in_progress)\n', (5326, 5354), False, 'import shutil\n'), ((5372, 5385), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {}), '()\n', (5383, 5385), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((5674, 5715), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {}), '(self.path_maildir)\n', (5696, 5715), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((6055, 6106), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'send_callback': 'duplicate_on_failed_send'}), '(send_callback=duplicate_on_failed_send)\n', (6066, 6106), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((6384, 6397), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {}), '()\n', (6395, 6397), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((6412, 6453), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {}), '(self.path_maildir)\n', (6434, 6453), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((6475, 6507), 'schwarz.mailqueue.lock_file', 'lock_file', (['msg.path'], {'timeout': '(0.1)'}), '(msg.path, timeout=0.1)\n', (6484, 6507), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((6521, 6545), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {}), '([mailer])\n', (6535, 6545), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((6993, 7034), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'simulate_failed_sending': '(True)'}), '(simulate_failed_sending=True)\n', (7004, 7034), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((7062, 7095), 'schwarz.mailqueue.queue_runner.MaildirBackend', 'MaildirBackend', (['self.path_maildir'], {}), '(self.path_maildir)\n', (7076, 7095), False, 'from schwarz.mailqueue.queue_runner import MaildirBackedMsg, MaildirBackend\n'), ((7110, 7127), 'schwarz.mailqueue.testutils.message', 'example_message', ([], {}), '()\n', (7125, 7127), True, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((7142, 7184), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer, maildir_fallback]'], {}), '([mailer, maildir_fallback])\n', (7156, 7184), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((7706, 7747), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'simulate_failed_sending': '(True)'}), '(simulate_failed_sending=True)\n', (7717, 7747), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((7835, 7852), 'schwarz.mailqueue.testutils.message', 'example_message', ([], {}), '()\n', (7850, 7852), True, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((8534, 8550), 'schwarz.puzzle_plugins.SignalRegistry', 'SignalRegistry', ([], {}), '()\n', (8548, 8550), False, 'from schwarz.puzzle_plugins import connect_signals, SignalRegistry\n'), ((8568, 8605), 'mock.MagicMock', 'MagicMock', ([], {'return_value': 'None', 'spec': '{}'}), '(return_value=None, spec={})\n', (8577, 8605), False, 'from mock import MagicMock\n'), ((8614, 8667), 'schwarz.puzzle_plugins.connect_signals', 'connect_signals', (['{signal: plugin}', 'registry.namespace'], {}), '({signal: plugin}, registry.namespace)\n', (8629, 8667), False, 'from schwarz.puzzle_plugins import connect_signals, SignalRegistry\n'), ((8682, 8724), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {'plugins': 'registry'}), '([mailer], plugins=registry)\n', (8696, 8724), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((9450, 9491), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'simulate_failed_sending': '(True)'}), '(simulate_failed_sending=True)\n', (9461, 9491), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((9862, 9878), 'schwarz.puzzle_plugins.SignalRegistry', 'SignalRegistry', ([], {}), '()\n', (9876, 9878), False, 'from schwarz.puzzle_plugins import connect_signals, SignalRegistry\n'), ((9887, 9972), 'schwarz.puzzle_plugins.connect_signals', 'connect_signals', (['{MQSignal.delivery_failed: discard_message}', 'registry.namespace'], {}), '({MQSignal.delivery_failed: discard_message}, registry.namespace\n )\n', (9902, 9972), False, 'from schwarz.puzzle_plugins import connect_signals, SignalRegistry\n'), ((9982, 9999), 'schwarz.mailqueue.testutils.message', 'example_message', ([], {}), '()\n', (9997, 9999), True, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((10013, 10055), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {'plugins': 'registry'}), '([mailer], plugins=registry)\n', (10027, 10055), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((10343, 10359), 'schwarz.puzzle_plugins.SignalRegistry', 'SignalRegistry', ([], {}), '()\n', (10357, 10359), False, 'from schwarz.puzzle_plugins import connect_signals, SignalRegistry\n'), ((10501, 10596), 'schwarz.puzzle_plugins.connect_signals', 'connect_signals', (['{MQSignal.delivery_failed: discard_after_two_attempts}', 'registry.namespace'], {}), '({MQSignal.delivery_failed: discard_after_two_attempts},\n registry.namespace)\n', (10516, 10596), False, 'from schwarz.puzzle_plugins import connect_signals, SignalRegistry\n'), ((10608, 10649), 'schwarz.mailqueue.testutils.inject_example_message', 'inject_example_message', (['self.path_maildir'], {}), '(self.path_maildir)\n', (10630, 10649), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((10667, 10708), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'simulate_failed_sending': '(True)'}), '(simulate_failed_sending=True)\n', (10678, 10708), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((10722, 10764), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {'plugins': 'registry'}), '([mailer], plugins=registry)\n', (10736, 10764), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((11294, 11310), 'os.walk', 'os.walk', (['basedir'], {}), '(basedir)\n', (11301, 11310), False, 'import os\n'), ((11518, 11557), 'os.path.join', 'os.path.join', (['self.path_maildir', 'folder'], {}), '(self.path_maildir, folder)\n', (11530, 11557), False, 'import os\n'), ((11601, 11617), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (11611, 11617), False, 'import os\n'), ((1921, 1945), 'os.path.exists', 'os.path.exists', (['msg.path'], {}), '(msg.path)\n', (1935, 1945), False, 'import os\n'), ((1961, 1973), 'testfixtures.LogCapture', 'LogCapture', ([], {}), '()\n', (1971, 1973), False, 'from testfixtures import LogCapture\n'), ((2596, 2620), 'os.path.exists', 'os.path.exists', (['msg.path'], {}), '(msg.path)\n', (2610, 2620), False, 'import os\n'), ((2921, 2945), 'os.path.exists', 'os.path.exists', (['msg.path'], {}), '(msg.path)\n', (2935, 2945), False, 'import os\n'), ((3064, 3088), 'os.path.exists', 'os.path.exists', (['msg.path'], {}), '(msg.path)\n', (3078, 3088), False, 'import os\n'), ((3937, 3964), 'os.unlink', 'os.unlink', (['path_in_progress'], {}), '(path_in_progress)\n', (3946, 3964), False, 'import os\n'), ((4565, 4592), 'os.unlink', 'os.unlink', (['path_in_progress'], {}), '(path_in_progress)\n', (4574, 4592), False, 'import os\n'), ((5973, 6012), 'shutil.copy', 'shutil.copy', (['path_in_progress', 'msg.path'], {}), '(path_in_progress, msg.path)\n', (5984, 6012), False, 'import shutil\n'), ((7459, 7489), 'schwarz.mailqueue.message_utils.parse_message_envelope', 'parse_message_envelope', (['msg_fp'], {}), '(msg_fp)\n', (7481, 7489), False, 'from schwarz.mailqueue.message_utils import parse_message_envelope\n'), ((8087, 8117), 'schwarz.mailqueue.message_utils.parse_message_envelope', 'parse_message_envelope', (['msg_fp'], {}), '(msg_fp)\n', (8109, 8117), False, 'from schwarz.mailqueue.message_utils import parse_message_envelope\n'), ((8378, 8391), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {}), '()\n', (8389, 8391), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((8473, 8514), 'schwarz.mailqueue.DebugMailer', 'DebugMailer', ([], {'simulate_failed_sending': '(True)'}), '(simulate_failed_sending=True)\n', (8484, 8514), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((8749, 8766), 'schwarz.mailqueue.testutils.message', 'example_message', ([], {}), '()\n', (8764, 8766), True, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((11643, 11671), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (11655, 11671), False, 'import os\n'), ((1557, 1569), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1567, 1569), False, 'import uuid\n'), ((2023, 2038), 'schwarz.mailqueue.testutils.info_logger', 'info_logger', (['lc'], {}), '(lc)\n', (2034, 2038), False, 'from schwarz.mailqueue.testutils import assert_did_log_message, info_logger, inject_example_message, message as example_message\n'), ((2967, 2991), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {}), '([mailer])\n', (2981, 2991), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((3483, 3507), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {}), '([mailer])\n', (3497, 3507), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((4068, 4092), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {}), '([mailer])\n', (4082, 4092), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((4697, 4721), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {}), '([mailer])\n', (4711, 4721), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((5406, 5430), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {}), '([mailer])\n', (5420, 5430), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((6127, 6151), 'schwarz.mailqueue.MessageHandler', 'MessageHandler', (['[mailer]'], {}), '([mailer])\n', (6141, 6151), False, 'from schwarz.mailqueue import create_maildir_directories, lock_file, DebugMailer, MessageHandler\n'), ((7785, 7818), 'schwarz.mailqueue.queue_runner.MaildirBackend', 'MaildirBackend', (['self.path_maildir'], {}), '(self.path_maildir)\n', (7799, 7818), False, 'from schwarz.mailqueue.queue_runner import MaildirBackedMsg, MaildirBackend\n'), ((11374, 11406), 'os.path.join', 'os.path.join', (['root_dir', 'filename'], {}), '(root_dir, filename)\n', (11386, 11406), False, 'import os\n'), ((10857, 10865), 'schwarz.log_utils.l_', 'l_', (['None'], {}), '(None)\n', (10859, 10865), False, 'from schwarz.log_utils import l_\n'), ((11055, 11063), 'schwarz.log_utils.l_', 'l_', (['None'], {}), '(None)\n', (11057, 11063), False, 'from schwarz.log_utils import l_\n')] |
#!/usr/bin/python3
"""Tests for reflinks script."""
#
# (C) Pywikibot team, 2014-2022
#
# Distributed under the terms of the MIT license.
#
import unittest
from scripts.reflinks import ReferencesRobot, XmlDumpPageGenerator, main
from tests import join_xml_data_path
from tests.aspects import ScriptMainTestCase, TestCase
from tests.utils import empty_sites
class TestXMLPageGenerator(TestCase):
"""Test XML Page generator."""
family = 'wikipedia'
code = 'en'
dry = True
def test_non_bare_ref_urls(self):
"""Test pages without bare references are not processed."""
gen = XmlDumpPageGenerator(
filename=join_xml_data_path('article-pear-0.10.xml'),
start='Pear',
namespaces=[0, 1],
site=self.site)
pages = list(gen)
self.assertIsEmpty(pages)
def test_simple_bare_refs(self):
"""Test simple bare references with several namespaces options."""
namespace_variants = (None, [], [0, 1], ['0', '1'])
filename = join_xml_data_path('dummy-reflinks.xml')
for namespaces in namespace_variants:
with self.subTest(namespaces=namespaces):
gen = XmlDumpPageGenerator(filename=filename,
start='Fake page',
namespaces=namespaces,
site=self.site)
pages = list(gen)
self.assertPageTitlesEqual(pages, ('Fake page',
'Talk:Fake page'),
site=self.site)
def test_namespace_names(self):
"""Test namespaces with namespace names."""
gen = XmlDumpPageGenerator(
filename=join_xml_data_path('dummy-reflinks.xml'),
start='Fake page',
namespaces=['Talk'],
site=self.site)
pages = list(gen)
self.assertPageTitlesEqual(pages, ['Talk:Fake page'], site=self.site)
def test_start_variants(self):
"""Test with several page title options."""
start_variants = (
None, # None
'Fake', # prefix
'Fake_page', # underscore
)
filename = join_xml_data_path('dummy-reflinks.xml')
for start in start_variants:
with self.subTest(start=start):
gen = XmlDumpPageGenerator(filename=filename,
start=start,
namespaces=[0, 1],
site=self.site)
pages = list(gen)
self.assertPageTitlesEqual(pages, ('Fake page',
'Talk:Fake page'),
site=self.site)
class TestReferencesBotConstructor(ScriptMainTestCase):
"""
Test reflinks with run() removed.
These tests can't verify the order of the pages in the XML
as the constructor is given a preloading generator.
See APISite.preloadpages for details.
"""
family = 'wikipedia'
code = 'en'
def setUp(self):
"""Set up the script by patching the bot class."""
super().setUp()
self._original_constructor = ReferencesRobot.__init__
self._original_run = ReferencesRobot.run
ReferencesRobot.__init__ = dummy_constructor
ReferencesRobot.run = lambda self: None
def tearDown(self):
"""Tear down the test by undoing the bot class patch."""
ReferencesRobot.__init__ = self._original_constructor
ReferencesRobot.run = self._original_run
with empty_sites():
super().tearDown()
def test_xml_simple(self):
"""Test the generator without any narrowing."""
main('-xml:' + join_xml_data_path('dummy-reflinks.xml'))
gen = self.constructor_kwargs['generator']
self.assertPageTitlesCountEqual(gen, ['Fake page', 'Talk:Fake page'],
site=self.get_site())
def test_xml_one_namespace(self):
"""Test the generator using one namespace id."""
main('-xml:' + join_xml_data_path('dummy-reflinks.xml'),
'-namespace:1')
gen = self.constructor_kwargs['generator']
pages = list(gen)
self.assertPageTitlesEqual(pages, ['Talk:Fake page'],
site=self.get_site())
def test_xml_multiple_namespace_ids(self):
"""Test the generator using multiple separate namespaces parameters."""
main('-xml:' + join_xml_data_path('dummy-reflinks.xml'),
'-namespace:0', '-namespace:1', '-xmlstart:Fake page')
gen = self.constructor_kwargs['generator']
self.assertPageTitlesCountEqual(gen, ['Fake page', 'Talk:Fake page'],
site=self.get_site())
def test_xml_multiple_namespace_ids_2(self):
"""Test the generator using multiple namespaces in one parameter."""
main('-xml:' + join_xml_data_path('dummy-reflinks.xml'),
'-namespace:0,1', '-xmlstart:Fake page')
gen = self.constructor_kwargs['generator']
self.assertPageTitlesCountEqual(gen, ['Fake page', 'Talk:Fake page'],
site=self.get_site())
def test_xml_start_variants(self):
"""Test the generator using variants of start page."""
start_variants = (
'-xmlstart:Fake page', # title
'-xmlstart:Fake_page', # underscore
'-xmlstart:Fake', # prefix
)
filename = '-xml:' + join_xml_data_path('dummy-reflinks.xml')
for start in start_variants:
with self.subTest(xmlstart=start), empty_sites():
main(filename, '-namespace:1', start)
gen = self.constructor_kwargs['generator']
pages = list(gen)
self.assertPageTitlesEqual(pages, ['Talk:Fake page'],
site=self.site)
def test_xml_namespace_name(self):
"""Test the generator using a namespace name."""
main('-xml:' + join_xml_data_path('dummy-reflinks.xml'),
'-namespace:Talk', '-xmlstart:Fake page')
gen = self.constructor_kwargs['generator']
pages = list(gen)
self.assertPageTitlesEqual(pages, ['Talk:Fake page'],
site=self.get_site())
def dummy_constructor(self, *args, **kwargs):
"""A constructor faking the actual constructor."""
TestReferencesBotConstructor.constructor_args = args
TestReferencesBotConstructor.constructor_kwargs = kwargs
if __name__ == '__main__': # pragma: no cover
unittest.main()
| [
"scripts.reflinks.XmlDumpPageGenerator",
"tests.utils.empty_sites",
"tests.join_xml_data_path",
"unittest.main",
"scripts.reflinks.main"
] | [((6787, 6802), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6800, 6802), False, 'import unittest\n'), ((1039, 1079), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (1057, 1079), False, 'from tests import join_xml_data_path\n'), ((2280, 2320), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (2298, 2320), False, 'from tests import join_xml_data_path\n'), ((3716, 3729), 'tests.utils.empty_sites', 'empty_sites', ([], {}), '()\n', (3727, 3729), False, 'from tests.utils import empty_sites\n'), ((5684, 5724), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (5702, 5724), False, 'from tests import join_xml_data_path\n'), ((656, 699), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""article-pear-0.10.xml"""'], {}), "('article-pear-0.10.xml')\n", (674, 699), False, 'from tests import join_xml_data_path\n'), ((1202, 1304), 'scripts.reflinks.XmlDumpPageGenerator', 'XmlDumpPageGenerator', ([], {'filename': 'filename', 'start': '"""Fake page"""', 'namespaces': 'namespaces', 'site': 'self.site'}), "(filename=filename, start='Fake page', namespaces=\n namespaces, site=self.site)\n", (1222, 1304), False, 'from scripts.reflinks import ReferencesRobot, XmlDumpPageGenerator, main\n'), ((1802, 1842), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (1820, 1842), False, 'from tests import join_xml_data_path\n'), ((2424, 2515), 'scripts.reflinks.XmlDumpPageGenerator', 'XmlDumpPageGenerator', ([], {'filename': 'filename', 'start': 'start', 'namespaces': '[0, 1]', 'site': 'self.site'}), '(filename=filename, start=start, namespaces=[0, 1],\n site=self.site)\n', (2444, 2515), False, 'from scripts.reflinks import ReferencesRobot, XmlDumpPageGenerator, main\n'), ((3873, 3913), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (3891, 3913), False, 'from tests import join_xml_data_path\n'), ((4225, 4265), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (4243, 4265), False, 'from tests import join_xml_data_path\n'), ((4643, 4683), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (4661, 4683), False, 'from tests import join_xml_data_path\n'), ((5094, 5134), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (5112, 5134), False, 'from tests import join_xml_data_path\n'), ((5809, 5822), 'tests.utils.empty_sites', 'empty_sites', ([], {}), '()\n', (5820, 5822), False, 'from tests.utils import empty_sites\n'), ((5840, 5877), 'scripts.reflinks.main', 'main', (['filename', '"""-namespace:1"""', 'start'], {}), "(filename, '-namespace:1', start)\n", (5844, 5877), False, 'from scripts.reflinks import ReferencesRobot, XmlDumpPageGenerator, main\n'), ((6220, 6260), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (6238, 6260), False, 'from tests import join_xml_data_path\n')] |
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 4 17:01:28 2021
@author: fahim
"""
from keras.models import Model
from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D
from keras.initializers import glorot_uniform
def identity_block(X, f, filters, stage, block):
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
F1, F2, F3 = filters
X_shortcut = X
X = Conv2D(filters=F1, kernel_size=(1, 1), strides=(1, 1), padding='valid', name=conv_name_base + '2a', kernel_initializer=glorot_uniform(seed=0))(X)
X = BatchNormalization(axis=3, name=bn_name_base + '2a')(X)
X = Activation('relu')(X)
X = Conv2D(filters=F2, kernel_size=(f, f), strides=(1, 1), padding='same', name=conv_name_base + '2b', kernel_initializer=glorot_uniform(seed=0))(X)
X = BatchNormalization(axis=3, name=bn_name_base + '2b')(X)
X = Activation('relu')(X)
X = Conv2D(filters=F3, kernel_size=(1, 1), strides=(1, 1), padding='valid', name=conv_name_base + '2c', kernel_initializer=glorot_uniform(seed=0))(X)
X = BatchNormalization(axis=3, name=bn_name_base + '2c')(X)
X = Add()([X, X_shortcut])# SKIP Connection
X = Activation('relu')(X)
return X
def convolutional_block(X, f, filters, stage, block, s=2):
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
F1, F2, F3 = filters
X_shortcut = X
X = Conv2D(filters=F1, kernel_size=(1, 1), strides=(s, s), padding='valid', name=conv_name_base + '2a', kernel_initializer=glorot_uniform(seed=0))(X)
X = BatchNormalization(axis=3, name=bn_name_base + '2a')(X)
X = Activation('relu')(X)
X = Conv2D(filters=F2, kernel_size=(f, f), strides=(1, 1), padding='same', name=conv_name_base + '2b', kernel_initializer=glorot_uniform(seed=0))(X)
X = BatchNormalization(axis=3, name=bn_name_base + '2b')(X)
X = Activation('relu')(X)
X = Conv2D(filters=F3, kernel_size=(1, 1), strides=(1, 1), padding='valid', name=conv_name_base + '2c', kernel_initializer=glorot_uniform(seed=0))(X)
X = BatchNormalization(axis=3, name=bn_name_base + '2c')(X)
X_shortcut = Conv2D(filters=F3, kernel_size=(1, 1), strides=(s, s), padding='valid', name=conv_name_base + '1', kernel_initializer=glorot_uniform(seed=0))(X_shortcut)
X_shortcut = BatchNormalization(axis=3, name=bn_name_base + '1')(X_shortcut)
X = Add()([X, X_shortcut])
X = Activation('relu')(X)
return X
def ResNet50(input_shape=(224, 224, 3)):
X_input = Input(input_shape)
X = ZeroPadding2D((3, 3))(X_input)
X = Conv2D(64, (7, 7), strides=(2, 2), name='conv1', kernel_initializer=glorot_uniform(seed=0))(X)
X = BatchNormalization(axis=3, name='bn_conv1')(X)
X = Activation('relu')(X)
X = MaxPooling2D((3, 3), strides=(2, 2))(X)
X = convolutional_block(X, f=3, filters=[64, 64, 256], stage=2, block='a', s=1)
X = identity_block(X, 3, [64, 64, 256], stage=2, block='b')
X = identity_block(X, 3, [64, 64, 256], stage=2, block='c')
X = convolutional_block(X, f=3, filters=[128, 128, 512], stage=3, block='a', s=2)
X = identity_block(X, 3, [128, 128, 512], stage=3, block='b')
X = identity_block(X, 3, [128, 128, 512], stage=3, block='c')
X = identity_block(X, 3, [128, 128, 512], stage=3, block='d')
X = convolutional_block(X, f=3, filters=[256, 256, 1024], stage=4, block='a', s=2)
X = identity_block(X, 3, [256, 256, 1024], stage=4, block='b')
X = identity_block(X, 3, [256, 256, 1024], stage=4, block='c')
X = identity_block(X, 3, [256, 256, 1024], stage=4, block='d')
X = identity_block(X, 3, [256, 256, 1024], stage=4, block='e')
X = identity_block(X, 3, [256, 256, 1024], stage=4, block='f')
X = X = convolutional_block(X, f=3, filters=[512, 512, 2048], stage=5, block='a', s=2)
X = identity_block(X, 3, [512, 512, 2048], stage=5, block='b')
X = identity_block(X, 3, [512, 512, 2048], stage=5, block='c')
X = AveragePooling2D(pool_size=(2, 2), padding='same')(X)
model = Model(inputs=X_input, outputs=X, name='ResNet50')
return model | [
"keras.layers.MaxPooling2D",
"keras.layers.ZeroPadding2D",
"keras.layers.AveragePooling2D",
"keras.layers.Input",
"keras.models.Model",
"keras.layers.Activation",
"keras.initializers.glorot_uniform",
"keras.layers.BatchNormalization",
"keras.layers.Add"
] | [((2641, 2659), 'keras.layers.Input', 'Input', (['input_shape'], {}), '(input_shape)\n', (2646, 2659), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((4165, 4214), 'keras.models.Model', 'Model', ([], {'inputs': 'X_input', 'outputs': 'X', 'name': '"""ResNet50"""'}), "(inputs=X_input, outputs=X, name='ResNet50')\n", (4170, 4214), False, 'from keras.models import Model\n'), ((667, 719), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': "(bn_name_base + '2a')"}), "(axis=3, name=bn_name_base + '2a')\n", (685, 719), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((731, 749), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (741, 749), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((915, 967), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': "(bn_name_base + '2b')"}), "(axis=3, name=bn_name_base + '2b')\n", (933, 967), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((979, 997), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (989, 997), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((1164, 1216), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': "(bn_name_base + '2c')"}), "(axis=3, name=bn_name_base + '2c')\n", (1182, 1216), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((1229, 1234), 'keras.layers.Add', 'Add', ([], {}), '()\n', (1232, 1234), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((1277, 1295), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1287, 1295), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((1702, 1754), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': "(bn_name_base + '2a')"}), "(axis=3, name=bn_name_base + '2a')\n", (1720, 1754), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((1766, 1784), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (1776, 1784), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((1950, 2002), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': "(bn_name_base + '2b')"}), "(axis=3, name=bn_name_base + '2b')\n", (1968, 2002), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2014, 2032), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2024, 2032), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2199, 2251), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': "(bn_name_base + '2c')"}), "(axis=3, name=bn_name_base + '2c')\n", (2217, 2251), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2444, 2495), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': "(bn_name_base + '1')"}), "(axis=3, name=bn_name_base + '1')\n", (2462, 2495), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2517, 2522), 'keras.layers.Add', 'Add', ([], {}), '()\n', (2520, 2522), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2548, 2566), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2558, 2566), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2669, 2690), 'keras.layers.ZeroPadding2D', 'ZeroPadding2D', (['(3, 3)'], {}), '((3, 3))\n', (2682, 2690), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2812, 2855), 'keras.layers.BatchNormalization', 'BatchNormalization', ([], {'axis': '(3)', 'name': '"""bn_conv1"""'}), "(axis=3, name='bn_conv1')\n", (2830, 2855), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2867, 2885), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (2877, 2885), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((2897, 2933), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(3, 3)'], {'strides': '(2, 2)'}), '((3, 3), strides=(2, 2))\n', (2909, 2933), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((4094, 4144), 'keras.layers.AveragePooling2D', 'AveragePooling2D', ([], {'pool_size': '(2, 2)', 'padding': '"""same"""'}), "(pool_size=(2, 2), padding='same')\n", (4110, 4144), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((632, 654), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (646, 654), False, 'from keras.initializers import glorot_uniform\n'), ((880, 902), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (894, 902), False, 'from keras.initializers import glorot_uniform\n'), ((1129, 1151), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (1143, 1151), False, 'from keras.initializers import glorot_uniform\n'), ((1667, 1689), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (1681, 1689), False, 'from keras.initializers import glorot_uniform\n'), ((1915, 1937), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (1929, 1937), False, 'from keras.initializers import glorot_uniform\n'), ((2164, 2186), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (2178, 2186), False, 'from keras.initializers import glorot_uniform\n'), ((2391, 2413), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (2405, 2413), False, 'from keras.initializers import glorot_uniform\n'), ((2777, 2799), 'keras.initializers.glorot_uniform', 'glorot_uniform', ([], {'seed': '(0)'}), '(seed=0)\n', (2791, 2799), False, 'from keras.initializers import glorot_uniform\n')] |
# -*- coding: utf-8 -*-
"""Console script for bioinf."""
import sys
import click
from .sequence import Sequence
from .sequence_alignment import NeedlemanWunschSequenceAlignmentAlgorithm
from .utils import read_config, read_sequence
@click.group()
def main(args=None):
"""Console script for bioinf."""
@main.command()
@click.option("-a", type=click.Path(exists=True), required=True)
@click.option("-b", type=click.Path(exists=True), required=True)
@click.option("-c", type=click.Path(exists=True), required=True)
@click.option("-o", type=click.Path())
def align(a: str, b: str, c: str, o: str):
try:
left_sequence: Sequence = read_sequence(a)
right_sequence: Sequence = read_sequence(b)
config = read_config(c)
algorithm = NeedlemanWunschSequenceAlignmentAlgorithm(config)
result = algorithm.align(left_sequence, right_sequence)
if o:
with open(o, "w") as f:
f.write(str(result))
else:
click.echo(result)
except Exception as e:
click.echo(str(e))
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| [
"click.group",
"click.echo",
"click.Path"
] | [((236, 249), 'click.group', 'click.group', ([], {}), '()\n', (247, 249), False, 'import click\n'), ((351, 374), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (361, 374), False, 'import click\n'), ((416, 439), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (426, 439), False, 'import click\n'), ((481, 504), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (491, 504), False, 'import click\n'), ((546, 558), 'click.Path', 'click.Path', ([], {}), '()\n', (556, 558), False, 'import click\n'), ((994, 1012), 'click.echo', 'click.echo', (['result'], {}), '(result)\n', (1004, 1012), False, 'import click\n')] |
from __future__ import unicode_literals
import unittest
from mopidy.local import json
from mopidy.models import Ref
class BrowseCacheTest(unittest.TestCase):
def setUp(self):
self.uris = [b'local:track:foo/bar/song1',
b'local:track:foo/bar/song2',
b'local:track:foo/song3']
self.cache = json._BrowseCache(self.uris)
def test_lookup_root(self):
expected = [Ref.directory(uri='local:directory:foo', name='foo')]
self.assertEqual(expected, self.cache.lookup('local:directory'))
def test_lookup_foo(self):
expected = [Ref.directory(uri='local:directory:foo/bar', name='bar'),
Ref.track(uri=self.uris[2], name='song3')]
self.assertEqual(expected, self.cache.lookup('local:directory:foo'))
def test_lookup_foo_bar(self):
expected = [Ref.track(uri=self.uris[0], name='song1'),
Ref.track(uri=self.uris[1], name='song2')]
self.assertEqual(
expected, self.cache.lookup('local:directory:foo/bar'))
def test_lookup_foo_baz(self):
self.assertEqual([], self.cache.lookup('local:directory:foo/baz'))
| [
"mopidy.models.Ref.directory",
"mopidy.models.Ref.track",
"mopidy.local.json._BrowseCache"
] | [((353, 381), 'mopidy.local.json._BrowseCache', 'json._BrowseCache', (['self.uris'], {}), '(self.uris)\n', (370, 381), False, 'from mopidy.local import json\n'), ((435, 487), 'mopidy.models.Ref.directory', 'Ref.directory', ([], {'uri': '"""local:directory:foo"""', 'name': '"""foo"""'}), "(uri='local:directory:foo', name='foo')\n", (448, 487), False, 'from mopidy.models import Ref\n'), ((614, 670), 'mopidy.models.Ref.directory', 'Ref.directory', ([], {'uri': '"""local:directory:foo/bar"""', 'name': '"""bar"""'}), "(uri='local:directory:foo/bar', name='bar')\n", (627, 670), False, 'from mopidy.models import Ref\n'), ((692, 733), 'mopidy.models.Ref.track', 'Ref.track', ([], {'uri': 'self.uris[2]', 'name': '"""song3"""'}), "(uri=self.uris[2], name='song3')\n", (701, 733), False, 'from mopidy.models import Ref\n'), ((868, 909), 'mopidy.models.Ref.track', 'Ref.track', ([], {'uri': 'self.uris[0]', 'name': '"""song1"""'}), "(uri=self.uris[0], name='song1')\n", (877, 909), False, 'from mopidy.models import Ref\n'), ((931, 972), 'mopidy.models.Ref.track', 'Ref.track', ([], {'uri': 'self.uris[1]', 'name': '"""song2"""'}), "(uri=self.uris[1], name='song2')\n", (940, 972), False, 'from mopidy.models import Ref\n')] |
# coding=utf-8
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlVerlichtingstoestelModelnaam(KeuzelijstField):
"""De modelnaam van het verlichtingstoestel."""
naam = 'KlVerlichtingstoestelModelnaam'
label = 'Verlichtingstoestel modelnaam'
objectUri = 'https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#KlVerlichtingstoestelModelnaam'
definition = 'De modelnaam van het verlichtingstoestel.'
codelist = 'https://wegenenverkeer.data.vlaanderen.be/id/conceptscheme/KlVerlichtingstoestelModelnaam'
options = {
'ARC': KeuzelijstWaarde(invulwaarde='ARC',
label='ARC',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/ARC'),
'Belgica': KeuzelijstWaarde(invulwaarde='Belgica',
label='Belgica',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Belgica'),
'Calypso': KeuzelijstWaarde(invulwaarde='Calypso',
label='Calypso',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Calypso'),
'Corus': KeuzelijstWaarde(invulwaarde='Corus',
label='Corus',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Corus'),
'DTN': KeuzelijstWaarde(invulwaarde='DTN',
label='DTN',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/DTN'),
'Evolo': KeuzelijstWaarde(invulwaarde='Evolo',
label='Evolo',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Evolo'),
'Focal': KeuzelijstWaarde(invulwaarde='Focal',
label='Focal',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Focal'),
'GSM': KeuzelijstWaarde(invulwaarde='GSM',
label='GSM',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GSM'),
'GTMB': KeuzelijstWaarde(invulwaarde='GTMB',
label='GTMB',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GTMB'),
'GTNB': KeuzelijstWaarde(invulwaarde='GTNB',
label='GTNB',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GTNB'),
'GZM': KeuzelijstWaarde(invulwaarde='GZM',
label='GZM',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GZM'),
'Gema': KeuzelijstWaarde(invulwaarde='Gema',
label='Gema',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Gema'),
'HCI-TS': KeuzelijstWaarde(invulwaarde='HCI-TS',
label='HCI-TS',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/HCI-TS'),
'Iridium': KeuzelijstWaarde(invulwaarde='Iridium',
label='Iridium',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Iridium'),
'MNF300': KeuzelijstWaarde(invulwaarde='MNF300',
label='MNF300',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MNF300'),
'MWF230': KeuzelijstWaarde(invulwaarde='MWF230',
label='MWF230',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MWF230'),
'MY11': KeuzelijstWaarde(invulwaarde='MY11',
label='MY11',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MY11'),
'Neos': KeuzelijstWaarde(invulwaarde='Neos',
label='Neos',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Neos'),
'Onyx': KeuzelijstWaarde(invulwaarde='Onyx',
label='Onyx',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Onyx'),
'RT3NB': KeuzelijstWaarde(invulwaarde='RT3NB',
label='RT3NB',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RT3NB'),
'RT3SB': KeuzelijstWaarde(invulwaarde='RT3SB',
label='RT3SB',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RT3SB'),
'RXN': KeuzelijstWaarde(invulwaarde='RXN',
label='RXN',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RXN'),
'RXS': KeuzelijstWaarde(invulwaarde='RXS',
label='RXS',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RXS'),
'Radial': KeuzelijstWaarde(invulwaarde='Radial',
label='Radial',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Radial'),
'SRS201': KeuzelijstWaarde(invulwaarde='SRS201',
label='SRS201',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/SRS201'),
'Safir': KeuzelijstWaarde(invulwaarde='Safir',
label='Safir',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Safir'),
'Saturnus': KeuzelijstWaarde(invulwaarde='Saturnus',
label='Saturnus',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Saturnus'),
'Squalo': KeuzelijstWaarde(invulwaarde='Squalo',
label='Squalo',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Squalo'),
'Syntra': KeuzelijstWaarde(invulwaarde='Syntra',
label='Syntra',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Syntra'),
'VTP': KeuzelijstWaarde(invulwaarde='VTP',
label='VTP',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/VTP'),
'Z18': KeuzelijstWaarde(invulwaarde='Z18',
label='Z18',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z18'),
'Z2': KeuzelijstWaarde(invulwaarde='Z2',
label='Z2',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z2'),
'Z21': KeuzelijstWaarde(invulwaarde='Z21',
label='Z21',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z21'),
'ampera': KeuzelijstWaarde(invulwaarde='ampera',
label='Ampera',
definitie='Ampera',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/ampera'),
'andere': KeuzelijstWaarde(invulwaarde='andere',
label='andere',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/andere'),
'brugleuning': KeuzelijstWaarde(invulwaarde='brugleuning',
label='brugleuning',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/brugleuning'),
'clear-field': KeuzelijstWaarde(invulwaarde='clear-field',
label='ClearField',
definitie='ClearField',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/clear-field'),
'digi-street': KeuzelijstWaarde(invulwaarde='digi-street',
label='DigiStreet',
definitie='DigiStreet',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/digi-street'),
'izylum': KeuzelijstWaarde(invulwaarde='izylum',
label='Izylum',
definitie='Izylum',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/izylum'),
'luma': KeuzelijstWaarde(invulwaarde='luma',
label='Luma',
definitie='Luma',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/luma'),
'lumi-street': KeuzelijstWaarde(invulwaarde='lumi-street',
label='LumiStreet',
definitie='LumiStreet',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/lumi-street'),
'projector': KeuzelijstWaarde(invulwaarde='projector',
label='projector',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/projector'),
'teceo': KeuzelijstWaarde(invulwaarde='teceo',
label='Teceo',
definitie='Teceo',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/teceo')
}
| [
"OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde"
] | [((729, 888), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""ARC"""', 'label': '"""ARC"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/ARC"""'}), "(invulwaarde='ARC', label='ARC', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/ARC'\n )\n", (745, 888), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((963, 1134), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Belgica"""', 'label': '"""Belgica"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Belgica"""'}), "(invulwaarde='Belgica', label='Belgica', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Belgica'\n )\n", (979, 1134), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((1217, 1388), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Calypso"""', 'label': '"""Calypso"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Calypso"""'}), "(invulwaarde='Calypso', label='Calypso', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Calypso'\n )\n", (1233, 1388), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((1469, 1634), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Corus"""', 'label': '"""Corus"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Corus"""'}), "(invulwaarde='Corus', label='Corus', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Corus'\n )\n", (1485, 1634), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((1709, 1868), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""DTN"""', 'label': '"""DTN"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/DTN"""'}), "(invulwaarde='DTN', label='DTN', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/DTN'\n )\n", (1725, 1868), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((1941, 2106), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Evolo"""', 'label': '"""Evolo"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Evolo"""'}), "(invulwaarde='Evolo', label='Evolo', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Evolo'\n )\n", (1957, 2106), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((2183, 2348), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Focal"""', 'label': '"""Focal"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Focal"""'}), "(invulwaarde='Focal', label='Focal', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Focal'\n )\n", (2199, 2348), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((2423, 2582), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""GSM"""', 'label': '"""GSM"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GSM"""'}), "(invulwaarde='GSM', label='GSM', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GSM'\n )\n", (2439, 2582), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((2654, 2816), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""GTMB"""', 'label': '"""GTMB"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GTMB"""'}), "(invulwaarde='GTMB', label='GTMB', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GTMB'\n )\n", (2670, 2816), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((2890, 3052), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""GTNB"""', 'label': '"""GTNB"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GTNB"""'}), "(invulwaarde='GTNB', label='GTNB', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GTNB'\n )\n", (2906, 3052), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((3125, 3284), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""GZM"""', 'label': '"""GZM"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GZM"""'}), "(invulwaarde='GZM', label='GZM', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/GZM'\n )\n", (3141, 3284), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((3356, 3518), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Gema"""', 'label': '"""Gema"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Gema"""'}), "(invulwaarde='Gema', label='Gema', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Gema'\n )\n", (3372, 3518), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((3594, 3762), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""HCI-TS"""', 'label': '"""HCI-TS"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/HCI-TS"""'}), "(invulwaarde='HCI-TS', label='HCI-TS', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/HCI-TS'\n )\n", (3610, 3762), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((3843, 4014), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Iridium"""', 'label': '"""Iridium"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Iridium"""'}), "(invulwaarde='Iridium', label='Iridium', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Iridium'\n )\n", (3859, 4014), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4096, 4264), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""MNF300"""', 'label': '"""MNF300"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MNF300"""'}), "(invulwaarde='MNF300', label='MNF300', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MNF300'\n )\n", (4112, 4264), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4344, 4512), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""MWF230"""', 'label': '"""MWF230"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MWF230"""'}), "(invulwaarde='MWF230', label='MWF230', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MWF230'\n )\n", (4360, 4512), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4590, 4752), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""MY11"""', 'label': '"""MY11"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MY11"""'}), "(invulwaarde='MY11', label='MY11', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/MY11'\n )\n", (4606, 4752), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((4826, 4988), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Neos"""', 'label': '"""Neos"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Neos"""'}), "(invulwaarde='Neos', label='Neos', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Neos'\n )\n", (4842, 4988), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((5062, 5224), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Onyx"""', 'label': '"""Onyx"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Onyx"""'}), "(invulwaarde='Onyx', label='Onyx', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Onyx'\n )\n", (5078, 5224), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((5299, 5464), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""RT3NB"""', 'label': '"""RT3NB"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RT3NB"""'}), "(invulwaarde='RT3NB', label='RT3NB', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RT3NB'\n )\n", (5315, 5464), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((5541, 5706), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""RT3SB"""', 'label': '"""RT3SB"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RT3SB"""'}), "(invulwaarde='RT3SB', label='RT3SB', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RT3SB'\n )\n", (5557, 5706), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((5781, 5940), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""RXN"""', 'label': '"""RXN"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RXN"""'}), "(invulwaarde='RXN', label='RXN', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RXN'\n )\n", (5797, 5940), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((6011, 6170), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""RXS"""', 'label': '"""RXS"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RXS"""'}), "(invulwaarde='RXS', label='RXS', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/RXS'\n )\n", (6027, 6170), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((6244, 6412), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Radial"""', 'label': '"""Radial"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Radial"""'}), "(invulwaarde='Radial', label='Radial', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Radial'\n )\n", (6260, 6412), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((6492, 6660), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""SRS201"""', 'label': '"""SRS201"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/SRS201"""'}), "(invulwaarde='SRS201', label='SRS201', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/SRS201'\n )\n", (6508, 6660), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((6739, 6904), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Safir"""', 'label': '"""Safir"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Safir"""'}), "(invulwaarde='Safir', label='Safir', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Safir'\n )\n", (6755, 6904), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((6984, 7158), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Saturnus"""', 'label': '"""Saturnus"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Saturnus"""'}), "(invulwaarde='Saturnus', label='Saturnus', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Saturnus'\n )\n", (7000, 7158), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((7242, 7410), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Squalo"""', 'label': '"""Squalo"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Squalo"""'}), "(invulwaarde='Squalo', label='Squalo', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Squalo'\n )\n", (7258, 7410), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((7490, 7658), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Syntra"""', 'label': '"""Syntra"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Syntra"""'}), "(invulwaarde='Syntra', label='Syntra', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Syntra'\n )\n", (7506, 7658), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((7735, 7894), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""VTP"""', 'label': '"""VTP"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/VTP"""'}), "(invulwaarde='VTP', label='VTP', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/VTP'\n )\n", (7751, 7894), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((7965, 8124), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Z18"""', 'label': '"""Z18"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z18"""'}), "(invulwaarde='Z18', label='Z18', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z18'\n )\n", (7981, 8124), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((8194, 8350), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Z2"""', 'label': '"""Z2"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z2"""'}), "(invulwaarde='Z2', label='Z2', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z2'\n )\n", (8210, 8350), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((8419, 8578), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""Z21"""', 'label': '"""Z21"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z21"""'}), "(invulwaarde='Z21', label='Z21', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/Z21'\n )\n", (8435, 8578), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((8652, 8844), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""ampera"""', 'label': '"""Ampera"""', 'definitie': '"""Ampera"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/ampera"""'}), "(invulwaarde='ampera', label='Ampera', definitie='Ampera',\n objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/ampera'\n )\n", (8668, 8844), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((8955, 9123), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""andere"""', 'label': '"""andere"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/andere"""'}), "(invulwaarde='andere', label='andere', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/andere'\n )\n", (8971, 9123), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((9208, 9391), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""brugleuning"""', 'label': '"""brugleuning"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/brugleuning"""'}), "(invulwaarde='brugleuning', label='brugleuning', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/brugleuning'\n )\n", (9224, 9391), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((9486, 9697), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""clear-field"""', 'label': '"""ClearField"""', 'definitie': '"""ClearField"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/clear-field"""'}), "(invulwaarde='clear-field', label='ClearField', definitie=\n 'ClearField', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/clear-field'\n )\n", (9502, 9697), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((9827, 10038), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""digi-street"""', 'label': '"""DigiStreet"""', 'definitie': '"""DigiStreet"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/digi-street"""'}), "(invulwaarde='digi-street', label='DigiStreet', definitie=\n 'DigiStreet', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/digi-street'\n )\n", (9843, 10038), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((10163, 10355), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""izylum"""', 'label': '"""Izylum"""', 'definitie': '"""Izylum"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/izylum"""'}), "(invulwaarde='izylum', label='Izylum', definitie='Izylum',\n objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/izylum'\n )\n", (10179, 10355), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((10464, 10648), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""luma"""', 'label': '"""Luma"""', 'definitie': '"""Luma"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/luma"""'}), "(invulwaarde='luma', label='Luma', definitie='Luma',\n objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/luma'\n )\n", (10480, 10648), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((10758, 10969), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""lumi-street"""', 'label': '"""LumiStreet"""', 'definitie': '"""LumiStreet"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/lumi-street"""'}), "(invulwaarde='lumi-street', label='LumiStreet', definitie=\n 'LumiStreet', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/lumi-street'\n )\n", (10774, 10969), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((11097, 11274), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""projector"""', 'label': '"""projector"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/projector"""'}), "(invulwaarde='projector', label='projector', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/projector'\n )\n", (11113, 11274), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n'), ((11359, 11547), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""teceo"""', 'label': '"""Teceo"""', 'definitie': '"""Teceo"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/teceo"""'}), "(invulwaarde='teceo', label='Teceo', definitie='Teceo',\n objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/teceo'\n )\n", (11375, 11547), False, 'from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde\n')] |
import string
from unittest2 import TestCase
import os
from hypothesis import given
from hypothesis.strategies import text, lists
from mock import patch, Mock
from githooks import repo
class FakeDiffObject(object):
def __init__(self, a_path, b_path, new, deleted):
self.a_path = a_path
self.b_path = b_path
self.new_file = new
self.deleted_file = deleted
class RepoGet(TestCase):
@patch('githooks.repo.git')
def test_result_is_repo_created_from_the_parent_of_script_directory(self, git_mock):
git_mock.Repo = Mock(return_value='git repo')
repo_obj = repo.get()
self.assertEqual('git repo', repo_obj)
git_mock.Repo.assert_called_once_with(
os.getcwd(),
search_parent_directories=True,
)
class RepoRepoRoot(TestCase):
@patch('githooks.repo.get')
def test_result_is_the_parent_directory_of_the_git_directory(self, get_mock):
git_dir = os.path.dirname(__file__)
result = Mock()
result.git_dir = git_dir
get_mock.return_value = result
self.assertEqual(os.path.dirname(git_dir), repo.repo_root())
class RepoUntrackedFiles(TestCase):
@patch('githooks.repo.get')
def test_result_is_untracked_files_from_the_repo_object(self, get_mock):
git_dir = os.path.dirname(__file__)
result = Mock()
result.untracked_files = ['untracked files']
result.git_dir = git_dir
get_mock.return_value = result
files = repo.untracked_files()
self.assertListEqual([os.path.join(repo.repo_root(), 'untracked files')], files)
class RepoModifiedFiles(TestCase):
@given(
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
)
def test_result_is_the_absolute_paths_to_all_changed_but_not_new_or_deleted_files(self, mod, new, deleted):
mod_diffs = [FakeDiffObject(f, f, False, False) for f in mod]
new_diffs = [FakeDiffObject(None, f, True, False) for f in new]
deleted_diffs = [FakeDiffObject(None, f, False, True) for f in deleted]
with patch('githooks.repo.get') as get_mock:
git_dir = os.path.dirname(__file__)
result = Mock()
result.head.commit.diff = Mock(return_value=mod_diffs + new_diffs + deleted_diffs)
result.git_dir = git_dir
get_mock.return_value = result
files = repo.modified_files()
self.assertEqual([os.path.join(repo.repo_root(), f) for f in mod], files)
result.head.commit.diff.assert_called_once_with()
class RepoAddedFiles(TestCase):
@given(
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
)
def test_result_is_the_absolute_paths_to_all_new_but_not_modified_or_deleted_files(self, mod, new, deleted):
mod_diffs = [FakeDiffObject(f, f, False, False) for f in mod]
new_diffs = [FakeDiffObject(None, f, True, False) for f in new]
deleted_diffs = [FakeDiffObject(None, f, False, True) for f in deleted]
with patch('githooks.repo.get') as get_mock:
git_dir = os.path.dirname(__file__)
result = Mock()
result.head.commit.diff = Mock(return_value=mod_diffs + new_diffs + deleted_diffs)
result.git_dir = git_dir
get_mock.return_value = result
files = repo.added_files()
self.assertEqual([os.path.join(repo.repo_root(), f) for f in new], files)
result.head.commit.diff.assert_called_once_with()
class RepoDeletedFiles(TestCase):
@given(
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
lists(text(min_size=1, max_size=10, alphabet=string.ascii_letters), max_size=10),
)
def test_result_is_the_absolute_paths_to_all_deleted_but_not_new_or_modified_files(self, mod, new, deleted):
mod_diffs = [FakeDiffObject(f, f, False, False) for f in mod]
new_diffs = [FakeDiffObject(None, f, True, False) for f in new]
deleted_diffs = [FakeDiffObject(None, f, False, True) for f in deleted]
with patch('githooks.repo.get') as get_mock:
git_dir = os.path.dirname(__file__)
result = Mock()
result.head.commit.diff = Mock(return_value=mod_diffs + new_diffs + deleted_diffs)
result.git_dir = git_dir
get_mock.return_value = result
files = repo.deleted_files()
self.assertEqual([os.path.join(repo.repo_root(), f) for f in deleted], files)
result.head.commit.diff.assert_called_once_with()
| [
"hypothesis.strategies.text",
"mock.patch",
"githooks.repo.get",
"mock.Mock",
"githooks.repo.added_files",
"githooks.repo.deleted_files",
"os.getcwd",
"os.path.dirname",
"githooks.repo.repo_root",
"githooks.repo.untracked_files",
"githooks.repo.modified_files"
] | [((427, 453), 'mock.patch', 'patch', (['"""githooks.repo.git"""'], {}), "('githooks.repo.git')\n", (432, 453), False, 'from mock import patch, Mock\n'), ((839, 865), 'mock.patch', 'patch', (['"""githooks.repo.get"""'], {}), "('githooks.repo.get')\n", (844, 865), False, 'from mock import patch, Mock\n'), ((1202, 1228), 'mock.patch', 'patch', (['"""githooks.repo.get"""'], {}), "('githooks.repo.get')\n", (1207, 1228), False, 'from mock import patch, Mock\n'), ((567, 596), 'mock.Mock', 'Mock', ([], {'return_value': '"""git repo"""'}), "(return_value='git repo')\n", (571, 596), False, 'from mock import patch, Mock\n'), ((617, 627), 'githooks.repo.get', 'repo.get', ([], {}), '()\n', (625, 627), False, 'from githooks import repo\n'), ((966, 991), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (981, 991), False, 'import os\n'), ((1009, 1015), 'mock.Mock', 'Mock', ([], {}), '()\n', (1013, 1015), False, 'from mock import patch, Mock\n'), ((1324, 1349), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1339, 1349), False, 'import os\n'), ((1368, 1374), 'mock.Mock', 'Mock', ([], {}), '()\n', (1372, 1374), False, 'from mock import patch, Mock\n'), ((1518, 1540), 'githooks.repo.untracked_files', 'repo.untracked_files', ([], {}), '()\n', (1538, 1540), False, 'from githooks import repo\n'), ((735, 746), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (744, 746), False, 'import os\n'), ((1115, 1139), 'os.path.dirname', 'os.path.dirname', (['git_dir'], {}), '(git_dir)\n', (1130, 1139), False, 'import os\n'), ((1141, 1157), 'githooks.repo.repo_root', 'repo.repo_root', ([], {}), '()\n', (1155, 1157), False, 'from githooks import repo\n'), ((2304, 2330), 'mock.patch', 'patch', (['"""githooks.repo.get"""'], {}), "('githooks.repo.get')\n", (2309, 2330), False, 'from mock import patch, Mock\n'), ((2366, 2391), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2381, 2391), False, 'import os\n'), ((2414, 2420), 'mock.Mock', 'Mock', ([], {}), '()\n', (2418, 2420), False, 'from mock import patch, Mock\n'), ((2459, 2515), 'mock.Mock', 'Mock', ([], {'return_value': '(mod_diffs + new_diffs + deleted_diffs)'}), '(return_value=mod_diffs + new_diffs + deleted_diffs)\n', (2463, 2515), False, 'from mock import patch, Mock\n'), ((2618, 2639), 'githooks.repo.modified_files', 'repo.modified_files', ([], {}), '()\n', (2637, 2639), False, 'from githooks import repo\n'), ((1694, 1754), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (1698, 1754), False, 'from hypothesis.strategies import text, lists\n'), ((1784, 1844), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (1788, 1844), False, 'from hypothesis.strategies import text, lists\n'), ((1874, 1934), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (1878, 1934), False, 'from hypothesis.strategies import text, lists\n'), ((3460, 3486), 'mock.patch', 'patch', (['"""githooks.repo.get"""'], {}), "('githooks.repo.get')\n", (3465, 3486), False, 'from mock import patch, Mock\n'), ((3522, 3547), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3537, 3547), False, 'import os\n'), ((3570, 3576), 'mock.Mock', 'Mock', ([], {}), '()\n', (3574, 3576), False, 'from mock import patch, Mock\n'), ((3615, 3671), 'mock.Mock', 'Mock', ([], {'return_value': '(mod_diffs + new_diffs + deleted_diffs)'}), '(return_value=mod_diffs + new_diffs + deleted_diffs)\n', (3619, 3671), False, 'from mock import patch, Mock\n'), ((3774, 3792), 'githooks.repo.added_files', 'repo.added_files', ([], {}), '()\n', (3790, 3792), False, 'from githooks import repo\n'), ((2849, 2909), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (2853, 2909), False, 'from hypothesis.strategies import text, lists\n'), ((2939, 2999), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (2943, 2999), False, 'from hypothesis.strategies import text, lists\n'), ((3029, 3089), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (3033, 3089), False, 'from hypothesis.strategies import text, lists\n'), ((4615, 4641), 'mock.patch', 'patch', (['"""githooks.repo.get"""'], {}), "('githooks.repo.get')\n", (4620, 4641), False, 'from mock import patch, Mock\n'), ((4677, 4702), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4692, 4702), False, 'import os\n'), ((4725, 4731), 'mock.Mock', 'Mock', ([], {}), '()\n', (4729, 4731), False, 'from mock import patch, Mock\n'), ((4770, 4826), 'mock.Mock', 'Mock', ([], {'return_value': '(mod_diffs + new_diffs + deleted_diffs)'}), '(return_value=mod_diffs + new_diffs + deleted_diffs)\n', (4774, 4826), False, 'from mock import patch, Mock\n'), ((4929, 4949), 'githooks.repo.deleted_files', 'repo.deleted_files', ([], {}), '()\n', (4947, 4949), False, 'from githooks import repo\n'), ((4004, 4064), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (4008, 4064), False, 'from hypothesis.strategies import text, lists\n'), ((4094, 4154), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (4098, 4154), False, 'from hypothesis.strategies import text, lists\n'), ((4184, 4244), 'hypothesis.strategies.text', 'text', ([], {'min_size': '(1)', 'max_size': '(10)', 'alphabet': 'string.ascii_letters'}), '(min_size=1, max_size=10, alphabet=string.ascii_letters)\n', (4188, 4244), False, 'from hypothesis.strategies import text, lists\n'), ((1585, 1601), 'githooks.repo.repo_root', 'repo.repo_root', ([], {}), '()\n', (1599, 1601), False, 'from githooks import repo\n'), ((2684, 2700), 'githooks.repo.repo_root', 'repo.repo_root', ([], {}), '()\n', (2698, 2700), False, 'from githooks import repo\n'), ((3837, 3853), 'githooks.repo.repo_root', 'repo.repo_root', ([], {}), '()\n', (3851, 3853), False, 'from githooks import repo\n'), ((4994, 5010), 'githooks.repo.repo_root', 'repo.repo_root', ([], {}), '()\n', (5008, 5010), False, 'from githooks import repo\n')] |
# -*- coding: utf-8 -*-
from hmac import HMAC
from hashlib import sha256
import random
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
def pbkd(password,salt):
"""
password must be a string in ascii, for some reasons
string of type unicode provokes the following error:
"TypeError: character mapping must return integer, None or unicode"
TODO: should we check type of string before it gets here?
"""
return HMAC(str(password),salt,sha256).digest()
def randomSalt(num_bytes):
return "".join(chr(random.randrange(256)) for i in xrange(num_bytes))
def hash_password(plain_text):
salt = randomSalt(8)
for i in xrange(1000):
hashed_password = pbkd(plain_text,salt)
return salt.encode("base64").strip() + "," + hashed_password.encode("base64").strip()
def check_password(saved_pass, plain_pass):
salt,hashed_p = saved_pass.split(",")
salt=salt.decode("base64")
hashed_p = hashed_p.decode("base64")
return hashed_p == pbkd(plain_pass, salt) | [
"logging.basicConfig",
"logging.getLogger",
"random.randrange"
] | [((102, 142), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (121, 142), False, 'import logging\n'), ((153, 180), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (170, 180), False, 'import logging\n'), ((579, 600), 'random.randrange', 'random.randrange', (['(256)'], {}), '(256)\n', (595, 600), False, 'import random\n')] |
import os
import subprocess
import pathlib
def reemplazar(string):
return string.replace('self.', 'self.w.').replace('Form"', 'self.w.centralWidget"').replace('Form.', 'self.w.centralWidget.').replace('Form)', 'self.w.centralWidget)').replace('"', "'")
try:
url_archivo = input('Archivo: ').strip().strip('"').strip("'")
nombre, extension = os.path.splitext(os.path.basename(url_archivo))
nombre_clase = nombre.title().replace('_', '')
if extension == '.ui':
# Creo el codigo python a partir del .ui
url_archivo_py = pathlib.Path(url_archivo).with_suffix('.py')
subprocess.Popen(f'pyside2-uic {url_archivo} -o {url_archivo_py}', shell=True, stdout=subprocess.PIPE).wait()
# ---------- Comenzamos el formateo del archivo .py ----------
lineas_resultado = []
# Leo las lineas del archivo
with open(url_archivo_py) as file:
lineas = file.readlines()
i = 0
# Ignoramos los comentarios del principio
while 'from PySide2 import' not in lineas[i]:
i += 1
# Empezamos con la clase
while 'class' not in lineas[i]:
lineas_resultado.append(reemplazar(lineas[i]))
i += 1
lineas_resultado.append("\n")
lineas_resultado.append(f"class {nombre_clase}:\n")
lineas_resultado.append(f" def __init__(self, window):\n")
lineas_resultado.append(f" self.w = window\n")
lineas_resultado.append("\n")
lineas_resultado.append(f" self.setup_gui()\n")
lineas_resultado.append("\n")
lineas_resultado.append(f" def setup_gui(self):\n")
lineas_resultado.append(f" self.w.centralWidget = QtWidgets.QWidget(self.w)\n")
lineas_resultado.append(f" self.w.centralWidget.setObjectName('centralWidget')\n")
# Ignoramos las 3 lineas siguientes (def, Form.set, Form.resize) y avanzamos a la siguiente
i += 4
# Copiamos hasta linea en blanco
while lineas[i] != '\n':
lineas_resultado.append(reemplazar(lineas[i]))
i += 1
# Anadimos el widget a la vista
lineas_resultado.append(' self.w.setCentralWidget(self.w.centralWidget)\n')
# Copiamos la linea en blanco
lineas_resultado.append(reemplazar(lineas[i]))
# Ignoramos hasta los setText()
while 'Form.' not in lineas[i]:
i += 1
# Nos saltamos el Form.setWindowTitle()
i += 1
# Transformamos las lineas de los setText()
for linea in lineas[i:]:
lineas_resultado.append(reemplazar(
linea.replace('QtWidgets.QApplication.translate("Form", ', '').replace(', None, -1)', '')))
lineas_resultado.append(' def connect_signals(self, controller):\n')
lineas_resultado.append(' pass\n')
# Sobreescribo el archivo .py
with open(url_archivo_py, 'w', encoding='utf-8') as file:
file.writelines(lineas_resultado)
except Exception as e:
os.remove(url_archivo_py)
print(f'{e.__class__.__name__}: {e}')
input('Presione una tecla para continuar...')
| [
"subprocess.Popen",
"pathlib.Path",
"os.path.basename",
"os.remove"
] | [((374, 403), 'os.path.basename', 'os.path.basename', (['url_archivo'], {}), '(url_archivo)\n', (390, 403), False, 'import os\n'), ((2881, 2906), 'os.remove', 'os.remove', (['url_archivo_py'], {}), '(url_archivo_py)\n', (2890, 2906), False, 'import os\n'), ((558, 583), 'pathlib.Path', 'pathlib.Path', (['url_archivo'], {}), '(url_archivo)\n', (570, 583), False, 'import pathlib\n'), ((611, 718), 'subprocess.Popen', 'subprocess.Popen', (['f"""pyside2-uic {url_archivo} -o {url_archivo_py}"""'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), "(f'pyside2-uic {url_archivo} -o {url_archivo_py}', shell=\n True, stdout=subprocess.PIPE)\n", (627, 718), False, 'import subprocess\n')] |
"""
This script requires developers to add the following information:
1. add file and function name to srcfiles_srcfuncs
2. add file and directory name to srcdir_srcfiles
3. add expected display name for the function to display_names
"""
import os
import itertools
from shutil import copyfile
"""
Add the function names with the src file in this dictionary
If the file is already present, just add the func name in the respective values
Create new entry if the srcfile is not present
srcfiles_srcfuncs = { srcfile : [func1, func2..]}
srcfile : file containing the function that should be renamed
[func1, func2..] : list of function names that should be changed
"""
srcfiles_srcfuncs = {
"hpat_pandas_series_functions.py": ["hpat_pandas_series_append", "hpat_pandas_series_ne", "hpat_pandas_series_iloc"]
}
"""
Add the filenames and the parent directory in this dictionary
If the dir is already present in this list, just add the filename in the respective values
Create a new entry if the dir is not present in this dictionary
srcdir_srcfiles = { parentdir : [filename1, filename2..]}
parentdir : Parent directory for the file
[filename1, filename2 ..] : List of files that have the functions to be renamed
"""
srcdir_srcfiles = {
"hpat/datatypes": ["hpat_pandas_series_functions.py"],
"hpat/hiframes": ["aggregate.py", "boxing.py"]
}
# Add the function name that will replace the original name and should be displayed in documentation
# Always add new name at the ends. Do not change the order
display_names = ['append', 'ne', 'iloc']
cur_dir = os.getcwd()
# This is the dir where all the source files will be copied
src_copy_dir = os.path.join(cur_dir, "API_Doc")
if not os.path.exists(src_copy_dir):
os.mkdir(src_copy_dir)
# Copy all required srcfiles
for dir in srcdir_srcfiles:
file_list = srcdir_srcfiles[dir]
for f in file_list:
src_file = os.path.join(cur_dir, dir, f)
dst_file = os.path.join(cur_dir, "API_Doc", f)
copyfile(src_file, dst_file)
os.chdir(src_copy_dir)
# Change the function names in copied files
i = 0
for filename in srcfiles_srcfuncs:
func_list = srcfiles_srcfuncs[filename]
with open(filename, 'r') as fn:
content = fn.read()
for func in func_list:
content = content.replace(func, display_names[i])
i += 1
with open(filename, 'w') as fn:
fn.write(content)
| [
"os.path.exists",
"os.path.join",
"os.getcwd",
"os.chdir",
"shutil.copyfile",
"os.mkdir"
] | [((1684, 1695), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1693, 1695), False, 'import os\n'), ((1777, 1809), 'os.path.join', 'os.path.join', (['cur_dir', '"""API_Doc"""'], {}), "(cur_dir, 'API_Doc')\n", (1789, 1809), False, 'import os\n'), ((2147, 2169), 'os.chdir', 'os.chdir', (['src_copy_dir'], {}), '(src_copy_dir)\n', (2155, 2169), False, 'import os\n'), ((1818, 1846), 'os.path.exists', 'os.path.exists', (['src_copy_dir'], {}), '(src_copy_dir)\n', (1832, 1846), False, 'import os\n'), ((1853, 1875), 'os.mkdir', 'os.mkdir', (['src_copy_dir'], {}), '(src_copy_dir)\n', (1861, 1875), False, 'import os\n'), ((2020, 2049), 'os.path.join', 'os.path.join', (['cur_dir', 'dir', 'f'], {}), '(cur_dir, dir, f)\n', (2032, 2049), False, 'import os\n'), ((2070, 2105), 'os.path.join', 'os.path.join', (['cur_dir', '"""API_Doc"""', 'f'], {}), "(cur_dir, 'API_Doc', f)\n", (2082, 2105), False, 'import os\n'), ((2115, 2143), 'shutil.copyfile', 'copyfile', (['src_file', 'dst_file'], {}), '(src_file, dst_file)\n', (2123, 2143), False, 'from shutil import copyfile\n')] |
from google.appengine.ext import ndb
from google.appengine.ext.ndb import msgprop
from entities import BaseEntity
from constants import Gender, UserStatus, Device, APIStatus
from errors import DataError
class User(BaseEntity):
name = ndb.StringProperty()
mail = ndb.StringProperty()
gender = msgprop.EnumProperty(Gender)
birthday = ndb.DateProperty()
avatar = ndb.BlobProperty(compressed=True)
status = msgprop.EnumProperty(UserStatus, required=True, default=UserStatus.INACTIVE)
device = msgprop.EnumProperty(Device, required=True)
continue_got_count = ndb.IntegerProperty(required=True, default=0) # daily bonus
last_got_datetime = ndb.DateTimeProperty() # daily bonus
push_token = ndb.StringProperty()
update_date = ndb.DateTimeProperty(required=True, auto_now=True)
@classmethod
def get(cls, uuid):
user = cls.get_by_id(uuid)
if user:
return user
else:
raise DataError(APIStatus.DATA_NOT_FOUND, 'User not found, uuid: %s' % uuid)
class Currency(BaseEntity):
gem = ndb.IntegerProperty(required=True, default=0)
coin = ndb.IntegerProperty(required=True, default=0)
total_spend = ndb.IntegerProperty(required=True, default=0)
update_date = ndb.DateTimeProperty(required=True, auto_now=True)
class StartupHistory(BaseEntity):
version = ndb.StringProperty(required=True)
ip = ndb.StringProperty(required=True)
| [
"google.appengine.ext.ndb.DateProperty",
"google.appengine.ext.ndb.BlobProperty",
"errors.DataError",
"google.appengine.ext.ndb.msgprop.EnumProperty",
"google.appengine.ext.ndb.IntegerProperty",
"google.appengine.ext.ndb.DateTimeProperty",
"google.appengine.ext.ndb.StringProperty"
] | [((240, 260), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (258, 260), False, 'from google.appengine.ext import ndb\n'), ((272, 292), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (290, 292), False, 'from google.appengine.ext import ndb\n'), ((306, 334), 'google.appengine.ext.ndb.msgprop.EnumProperty', 'msgprop.EnumProperty', (['Gender'], {}), '(Gender)\n', (326, 334), False, 'from google.appengine.ext.ndb import msgprop\n'), ((350, 368), 'google.appengine.ext.ndb.DateProperty', 'ndb.DateProperty', ([], {}), '()\n', (366, 368), False, 'from google.appengine.ext import ndb\n'), ((382, 415), 'google.appengine.ext.ndb.BlobProperty', 'ndb.BlobProperty', ([], {'compressed': '(True)'}), '(compressed=True)\n', (398, 415), False, 'from google.appengine.ext import ndb\n'), ((429, 505), 'google.appengine.ext.ndb.msgprop.EnumProperty', 'msgprop.EnumProperty', (['UserStatus'], {'required': '(True)', 'default': 'UserStatus.INACTIVE'}), '(UserStatus, required=True, default=UserStatus.INACTIVE)\n', (449, 505), False, 'from google.appengine.ext.ndb import msgprop\n'), ((519, 562), 'google.appengine.ext.ndb.msgprop.EnumProperty', 'msgprop.EnumProperty', (['Device'], {'required': '(True)'}), '(Device, required=True)\n', (539, 562), False, 'from google.appengine.ext.ndb import msgprop\n'), ((588, 633), 'google.appengine.ext.ndb.IntegerProperty', 'ndb.IntegerProperty', ([], {'required': '(True)', 'default': '(0)'}), '(required=True, default=0)\n', (607, 633), False, 'from google.appengine.ext import ndb\n'), ((673, 695), 'google.appengine.ext.ndb.DateTimeProperty', 'ndb.DateTimeProperty', ([], {}), '()\n', (693, 695), False, 'from google.appengine.ext import ndb\n'), ((728, 748), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (746, 748), False, 'from google.appengine.ext import ndb\n'), ((767, 817), 'google.appengine.ext.ndb.DateTimeProperty', 'ndb.DateTimeProperty', ([], {'required': '(True)', 'auto_now': '(True)'}), '(required=True, auto_now=True)\n', (787, 817), False, 'from google.appengine.ext import ndb\n'), ((1079, 1124), 'google.appengine.ext.ndb.IntegerProperty', 'ndb.IntegerProperty', ([], {'required': '(True)', 'default': '(0)'}), '(required=True, default=0)\n', (1098, 1124), False, 'from google.appengine.ext import ndb\n'), ((1136, 1181), 'google.appengine.ext.ndb.IntegerProperty', 'ndb.IntegerProperty', ([], {'required': '(True)', 'default': '(0)'}), '(required=True, default=0)\n', (1155, 1181), False, 'from google.appengine.ext import ndb\n'), ((1200, 1245), 'google.appengine.ext.ndb.IntegerProperty', 'ndb.IntegerProperty', ([], {'required': '(True)', 'default': '(0)'}), '(required=True, default=0)\n', (1219, 1245), False, 'from google.appengine.ext import ndb\n'), ((1264, 1314), 'google.appengine.ext.ndb.DateTimeProperty', 'ndb.DateTimeProperty', ([], {'required': '(True)', 'auto_now': '(True)'}), '(required=True, auto_now=True)\n', (1284, 1314), False, 'from google.appengine.ext import ndb\n'), ((1365, 1398), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)'}), '(required=True)\n', (1383, 1398), False, 'from google.appengine.ext import ndb\n'), ((1408, 1441), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)'}), '(required=True)\n', (1426, 1441), False, 'from google.appengine.ext import ndb\n'), ((968, 1038), 'errors.DataError', 'DataError', (['APIStatus.DATA_NOT_FOUND', "('User not found, uuid: %s' % uuid)"], {}), "(APIStatus.DATA_NOT_FOUND, 'User not found, uuid: %s' % uuid)\n", (977, 1038), False, 'from errors import DataError\n')] |
# core modules
from math import pi
# 3rd party modules
import matplotlib.pyplot as plt
import pandas as pd
# internal modules
import analysis
def main(path):
df = analysis.parse_file(path)
df = prepare_df(df, grouping=(df['date'].dt.hour))
print(df.reset_index().to_dict(orient='list'))
df = pd.DataFrame({'date': [209, 13, 1, 2, 1, 25, 809, 3571, 1952, 1448, 942, 1007, 1531, 1132, 981, 864, 975, 2502, 2786, 2717, 3985, 4991, 2872, 761]},
index=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23])
print(df)
create_radar_chart(df,
# cat_names=['Monday',
# 'Tuesday',
# 'Wednesday',
# 'Thursday',
# 'Friday',
# 'Saturday',
# 'Sunday']
)
def prepare_df(df, grouping):
df = df['date'].groupby(grouping).count().to_frame().reset_index(drop=True)
return df
def create_radar_chart(df, cat_names=None):
"""
Parameters
----------
df : pandas.DataFrame
Has a column 'date'
"""
values = df['date'].tolist()
df = df.T.reset_index(drop=True)
df.insert(0, 'group', 'A')
# number of variable
categories = list(df)[1:]
if cat_names is None:
cat_names = categories
assert len(cat_names) == len(categories)
N = len(categories)
# What will be the angle of each axis in the plot?
# (we divide the plot / number of variable)
angles = [n / float(N) * 2 * pi for n in range(N)]
angles += angles[:1]
# Initialise the spider plot
ax = plt.subplot(111, polar=True)
# Draw one axe per variable + add labels labels yet
plt.xticks(angles[:-1], cat_names, color='grey', size=8)
# Draw ylabels
# ax.set_rlabel_position(0)
ticks = get_ticks(values)
# plt.yticks(ticks, [str(tick) for tick in ticks], color="grey", size=7)
# plt.ylim(0, 40)
# We are going to plot the first line of the data frame.
# But we need to repeat the first value to close the circular graph:
values = df.loc[0].drop('group').values.flatten().tolist()
values += values[:1]
values
# Plot data
ax.plot(angles, values, linewidth=1, linestyle='solid')
# Fill area
ax.fill(angles, values, 'b', alpha=0.1)
plt.show()
def get_ticks(values):
return sorted(values)
if __name__ == '__main__':
args = analysis.get_parser().parse_args()
main(args.filename)
| [
"analysis.get_parser",
"matplotlib.pyplot.xticks",
"analysis.parse_file",
"pandas.DataFrame",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show"
] | [((171, 196), 'analysis.parse_file', 'analysis.parse_file', (['path'], {}), '(path)\n', (190, 196), False, 'import analysis\n'), ((312, 568), 'pandas.DataFrame', 'pd.DataFrame', (["{'date': [209, 13, 1, 2, 1, 25, 809, 3571, 1952, 1448, 942, 1007, 1531, \n 1132, 981, 864, 975, 2502, 2786, 2717, 3985, 4991, 2872, 761]}"], {'index': '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, \n 21, 22, 23]'}), "({'date': [209, 13, 1, 2, 1, 25, 809, 3571, 1952, 1448, 942, \n 1007, 1531, 1132, 981, 864, 975, 2502, 2786, 2717, 3985, 4991, 2872, \n 761]}, index=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n 17, 18, 19, 20, 21, 22, 23])\n", (324, 568), True, 'import pandas as pd\n'), ((1758, 1786), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {'polar': '(True)'}), '(111, polar=True)\n', (1769, 1786), True, 'import matplotlib.pyplot as plt\n'), ((1848, 1904), 'matplotlib.pyplot.xticks', 'plt.xticks', (['angles[:-1]', 'cat_names'], {'color': '"""grey"""', 'size': '(8)'}), "(angles[:-1], cat_names, color='grey', size=8)\n", (1858, 1904), True, 'import matplotlib.pyplot as plt\n'), ((2462, 2472), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2470, 2472), True, 'import matplotlib.pyplot as plt\n'), ((2564, 2585), 'analysis.get_parser', 'analysis.get_parser', ([], {}), '()\n', (2583, 2585), False, 'import analysis\n')] |
from datetime import datetime
from threading import Lock
from Database import Database
class LoggedSensor:
"""
This is a common base class for all sensors that have data to be stored/logged.
"""
registered_type_ids = []
def __init__(self, type_id, max_measurements=200, holdoff_time=None):
if type_id is LoggedSensor.registered_type_ids:
raise BaseException('Type ID already exists: %d' % type_id)
self.type_id = type_id
self.data = []
self.max_measurements = max_measurements
self.holdoff_time = holdoff_time
self.lock = Lock()
with self.lock:
self.data = Database.instance.fetch_latest_measurements(self.type_id, self.max_measurements)
def add_measurement(self, measurement):
now = datetime.now().timestamp()
with self.lock:
if self.holdoff_time is not None:
if len(self.data) > 0:
diff = now - self.data[-1]['time']
if diff < self.holdoff_time.total_seconds():
return
if len(self.data) > self.max_measurements:
del self.data[0]
self.data.append({'time': now, 'measurement': measurement})
Database.instance.insert_measurement(now, self.type_id, measurement)
def get_chart_data(self, from_timestamp=0):
label = []
data = []
last_timestamp = 0
with self.lock:
for item in self.data:
if item['time'] > from_timestamp:
timestamp = datetime.fromtimestamp(item['time'])
label.append(timestamp.strftime('%H:%M:%S'))
data.append('%.2f' % item['measurement'])
if len(self.data) > 0:
last_timestamp = int(self.data[-1]['time'])
return label, data, last_timestamp
| [
"datetime.datetime.fromtimestamp",
"threading.Lock",
"datetime.datetime.now",
"Database.Database.instance.fetch_latest_measurements",
"Database.Database.instance.insert_measurement"
] | [((608, 614), 'threading.Lock', 'Lock', ([], {}), '()\n', (612, 614), False, 'from threading import Lock\n'), ((664, 749), 'Database.Database.instance.fetch_latest_measurements', 'Database.instance.fetch_latest_measurements', (['self.type_id', 'self.max_measurements'], {}), '(self.type_id, self.max_measurements\n )\n', (707, 749), False, 'from Database import Database\n'), ((1266, 1334), 'Database.Database.instance.insert_measurement', 'Database.instance.insert_measurement', (['now', 'self.type_id', 'measurement'], {}), '(now, self.type_id, measurement)\n', (1302, 1334), False, 'from Database import Database\n'), ((804, 818), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (816, 818), False, 'from datetime import datetime\n'), ((1590, 1626), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (["item['time']"], {}), "(item['time'])\n", (1612, 1626), False, 'from datetime import datetime\n')] |
import boto3
from botocore.exceptions import ClientError
import gzip
import io
import os
import csv
import re
class S3Data(object):
def __init__(self, bucket_name_, prefix_, file_, df_schema_, compression_type_,
check_headers_, file_type_, access_key_=None, secret_key_=None,
region_='us-east-2', decode_='utf-8'):
"""
Writes a compressed s3 file to snowflake
:param bucket_name_: S3 bucket
:param prefix_: S3 prefix (i.e., "directory path" of bucket)
:param file_: S3 file / obj
:param df_schema_: Schema as defined by the json file for this object
:param check_headers_: True or False to check for headers
:param file_type_: S3 object file type (if applicable). Currently,
this function only supports 'csv'. and 'tsv'
:param compression_type: type of compression being used.
:param access_key_: AWS Access Key for S3 bucket (if applicable)
:param secret_key_: AWS Secret Key for S3 bucket (if applicable)
:param region_: AWS region (Default = 'us-east-2')
:param decode_: Character decoding of object
:return: List of Data Dictionaries
"""
# File connection properties
self.file_name = file_
self.df_schema = df_schema_
self.region = region_
self.decode = decode_
self.bucket_name = bucket_name_
self.prefix = prefix_
self.file_type = file_type_
self.compression_type = compression_type_
self.access_key = access_key_
self.secret_key = secret_key_
# file processing properties
self.check_headers = check_headers_
self.current_row = 1
self.lines = None
self.field_names = [] #for snowflake and dataframe processing
self.field_names_file =[] #for column header check
self.table = ''
self.schema = ''
self.file_row_count = 0
if self.file_type == "csv":
self.delimiter = b','
elif self.file_type == "tsv":
self.delimiter = b'\t'
else:
raise Exception(f"File type: {self.file_type} not supported.")
def __read_data(self, s3_data_):
if self.compression_type == "gz":
file_handle = io.BytesIO(s3_data_)
self.lines = gzip.open(file_handle)
elif isinstance(self.compression_type,type(None)):
self.lines = s3_data_.decode('utf-8').split()
else:
raise Exception(f"Compression type: {self.compression_type} not supported.")
def load_data(self,skip_schema=False):
"""
Makes the connection, opens the file as bytes
calls the load schema
calls the load file row count
return: None
"""
client = boto3.client('s3', region_name=self.region,
aws_access_key_id=self.access_key,
aws_secret_access_key=self.secret_key)
obj = client.get_object(Bucket=self.bucket_name, Key=self.prefix + self.file_name)
s3_data = obj['Body'].read()
self.__read_data(s3_data)
if skip_schema is False:
self.__load_schema()
def __load_schema(self):
"""
Private Function
Loads the schema file into a dictionary
Retrieves table and schema values
return: None
"""
# retrieve some data from our json
# get our field names
self.field_names.clear()
for f in self.df_schema['fields']:
self.field_names.append(f['name'])
self.field_names_file.append(f['name_in_file'])
# retrieve what table and schema we are using from the json and pass into the dataframe
self.table = self.df_schema['data_sink']['table_name']
self.schema = self.df_schema['data_sink']['schema']
def __get_file_size(self):
"""
Private Function
gets the row count of the file
return: None sets the self.file_row_count value
"""
row_count = 1
has_column_header = False
for line in self.lines:
newline = line.rstrip().lstrip().split(self.delimiter)
if self.current_row == 1: # check once for column header
if self.check_headers:
if str(newline[0].decode(self.decode)).upper() in self.field_names_file:
has_column_header = True
if has_column_header == False:
row_count += 1
else:
has_column_header = False
self.file_row_count = row_count
def get_file_record_count(self):
if self.file_row_count > 0:
return self.file_row_count
self.__get_file_size()
self.lines.close()
self.load_data()
self.current_row = 1
return self.file_row_count
def get_records(self, row_to_start_: int, chunk_size_: int, file_id_=None):
"""
Loads a set of data into a list of dictionary objects. Keeps track of the row number pointer
:param row_to_start_: The row number to start processing. Note: header records do not count as a row
:param chunk_size_: the number of records to process
:param file_id_: Required parameter when adding the column FILE_ID to the data schema and using the ProcessLogger.
return: List of Data Dictionaries
"""
data = []
has_column_header = False
row_to_end = chunk_size_ + row_to_start_
if row_to_start_ >= self.current_row:
pass
else:
self.lines.close()
self.load_data()
self.current_row = 1
for line in self.lines:
if self.current_row >= row_to_start_:
data_dict = {}
newline = line.rstrip().lstrip().split(self.delimiter)
if self.current_row == 1: # check once for column header
if self.check_headers:
if str(newline[0].decode(self.decode)).upper() in self.field_names_file:
has_column_header = True
if not has_column_header:
column_number = 0
for fields in self.field_names:
if fields == "FILE_ID":
if not isinstance(file_id_, type(None)):
data_dict[fields] = file_id_
else:
raise Exception("Missing file id field")
else:
data_dict[fields] = str(newline[column_number].decode(self.decode))
column_number += 1
if len(data_dict) > 0:
data.append(data_dict)
if not has_column_header:
self.current_row += 1
has_column_header = False
if self.current_row >= row_to_end:
break
return data
@staticmethod
def iterate_on_s3_response(response_: dict, bucket_name_: str,
prefix_: str, files_: list, give_full_path_):
"""
Iterate over an S3 List Objects result and adds object file/object
names to list.
:param response_: Response from List Objects func.
:param bucket_name_: Name of S3 bucket that was searched.
:param prefix_: Prefix used to find files.
:param files_: List append S3 URLs to.
:return: None
"""
for item in response_["Contents"]:
if prefix_ in item["Key"]:
if give_full_path_:
files_.append("s3://" + bucket_name_ + "/" + item["Key"])
else:
files_.append(os.path.basename(item["Key"]))
@staticmethod
def get_file_list_s3(bucket_name_, prefix_, access_key_=None,
secret_key_=None, region_='us-east-2',
file_prefix_: str = None, file_suffix_: str = None,
file_regex_: str = None, give_full_path_=False):
"""
Creates a list of items in an S3 bucket.
:param bucket_name_: Name of S3 bucket to search
:param prefix_: Prefix used to find files.
:param access_key_: AWS Access Key for S3 bucket (if applicable)
:param secret_key_: AWS Secret Key for S3 bucket (if applicable)
:param region_: AWS region (Default = 'us-east-2')
:param file_prefix_: If used, function will return files that start
with this (case-sensitive). Can be used in tandem with file_suffix_
:param file_suffix_: If used, function will return files that end
with this (case-sensitive). Can be used in tandem with file_prefix_
:param file_regex_: If used, will return all files that match this
regex pattern. file_prefix_ & file_suffix_ will be ignored.
:param give_full_path_: If False, only file name will be returned. If
true, full path & file name will be returned.
:return: List of S3 file/object names as strings
"""
client = boto3.client('s3', region_name=region_,
aws_access_key_id=access_key_,
aws_secret_access_key=secret_key_)
response = client.list_objects_v2(Bucket=bucket_name_, Prefix=prefix_)
all_files = []
if "Contents" in response:
S3Data.iterate_on_s3_response(response, bucket_name_,
prefix_, all_files, give_full_path_)
while response["IsTruncated"]:
print(response["NextContinuationToken"])
response = client.list_objects_v2(
Bucket=bucket_name_, Prefix=prefix_,
ContinuationToken=response["NextContinuationToken"])
S3Data.iterate_on_s3_response(response, bucket_name_,
prefix_, all_files, give_full_path_)
if file_regex_ or file_prefix_ or file_suffix_:
pattern = file_regex_ if file_regex_ else \
f"{file_prefix_ if file_prefix_ else ''}.*{file_suffix_ if file_suffix_ else ''}"
files = [x for x in all_files if re.search(pattern, x)]
else:
files = all_files
return files
@staticmethod
def s3_obj_to_dict(bucket_name_, prefix_, file_, file_type_='csv',
access_key_=None, secret_key_=None, region_='us-east-2',
decode_='utf-8'):
"""
Converts an S3 object to a list of flattened dictionary records.
:param bucket_name_: S3 bucket
:param prefix_: S3 prefix (i.e., "directory path" of bucket)
:param file_: S3 file / obj
:param file_type_: S3 object file type (if applicable). Currently,
this function only supports 'csv'. No other file types have been
tested.
:param access_key_: AWS Access Key for S3 bucket (if applicable)
:param secret_key_: AWS Secret Key for S3 bucket (if applicable)
:param region_: AWS region (Default = 'us-east-2')
:param decode_: Character decoding of object
:return: List of flattened dictionaries.
"""
if not file_type_:
raise Exception("Need to pass a file_type_.")
client = boto3.client('s3', region_name=region_,
aws_access_key_id=access_key_,
aws_secret_access_key=secret_key_)
obj = client.get_object(Bucket=bucket_name_, Key=prefix_ + file_)
lines = obj['Body'].read().decode(decode_).split()
data = []
try:
if file_type_ == 'csv':
reader = csv.DictReader(lines)
for row in reader:
data.append(dict(row))
else:
raise Exception(f"{file_type_} not currently supported for S3 obj conversion.")
return data
except Exception as e:
raise Exception(f"There was an issue converting {file_} to a "
f"list of flattened dictionaries. {e}")
def upload_file(self,object_name_=None):
"""Upload a file to an S3 bucket
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name_ is None:
object_name_ = self.prefix+"/"+self.file_name
else:
object_name_ = self.prefix+"/"+object_name_
# Upload the file
client = boto3.client('s3', region_name=self.region,
aws_access_key_id=self.access_key,
aws_secret_access_key=self.secret_key)
try:
response = client.upload_file(Filename=self.file_name, Bucket=self.bucket_name, Key=object_name_)
except ClientError as e:
print(f"There was an error {e} while uploading {self.file_name} to S3")
return False
target_path=f"https://{self.bucket_name}.s3.{self.region}.amazonaws.com/{object_name_}"
return target_path
def save_to_s3(self, data_string_):
"""
Saves data as a file in S3 bucket
:param data_string_: data to be saved
return: target_path
"""
encoded_data=data_string_.encode(self.decode)
s3_path=self.prefix+"/"+self.file_name
resource=boto3.resource('s3', region_name=self.region,
aws_access_key_id=self.access_key,
aws_secret_access_key=self.secret_key)
try:
response=resource.Bucket(self.bucket_name).put_object(Key=s3_path, Body=encoded_data)
except ClientError as e:
print(f"There was an error {e} while uploading {self.file_name} to S3")
return False
target_path=f"https://{self.bucket_name}.s3.{self.region}.amazonaws.com/{s3_path}"
return target_path
| [
"csv.DictReader",
"boto3.client",
"gzip.open",
"io.BytesIO",
"boto3.resource",
"os.path.basename",
"re.search"
] | [((3043, 3165), 'boto3.client', 'boto3.client', (['"""s3"""'], {'region_name': 'self.region', 'aws_access_key_id': 'self.access_key', 'aws_secret_access_key': 'self.secret_key'}), "('s3', region_name=self.region, aws_access_key_id=self.\n access_key, aws_secret_access_key=self.secret_key)\n", (3055, 3165), False, 'import boto3\n'), ((9465, 9574), 'boto3.client', 'boto3.client', (['"""s3"""'], {'region_name': 'region_', 'aws_access_key_id': 'access_key_', 'aws_secret_access_key': 'secret_key_'}), "('s3', region_name=region_, aws_access_key_id=access_key_,\n aws_secret_access_key=secret_key_)\n", (9477, 9574), False, 'import boto3\n'), ((11757, 11866), 'boto3.client', 'boto3.client', (['"""s3"""'], {'region_name': 'region_', 'aws_access_key_id': 'access_key_', 'aws_secret_access_key': 'secret_key_'}), "('s3', region_name=region_, aws_access_key_id=access_key_,\n aws_secret_access_key=secret_key_)\n", (11769, 11866), False, 'import boto3\n'), ((13071, 13193), 'boto3.client', 'boto3.client', (['"""s3"""'], {'region_name': 'self.region', 'aws_access_key_id': 'self.access_key', 'aws_secret_access_key': 'self.secret_key'}), "('s3', region_name=self.region, aws_access_key_id=self.\n access_key, aws_secret_access_key=self.secret_key)\n", (13083, 13193), False, 'import boto3\n'), ((13945, 14069), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {'region_name': 'self.region', 'aws_access_key_id': 'self.access_key', 'aws_secret_access_key': 'self.secret_key'}), "('s3', region_name=self.region, aws_access_key_id=self.\n access_key, aws_secret_access_key=self.secret_key)\n", (13959, 14069), False, 'import boto3\n'), ((2515, 2535), 'io.BytesIO', 'io.BytesIO', (['s3_data_'], {}), '(s3_data_)\n', (2525, 2535), False, 'import io\n'), ((2561, 2583), 'gzip.open', 'gzip.open', (['file_handle'], {}), '(file_handle)\n', (2570, 2583), False, 'import gzip\n'), ((12151, 12172), 'csv.DictReader', 'csv.DictReader', (['lines'], {}), '(lines)\n', (12165, 12172), False, 'import csv\n'), ((8080, 8109), 'os.path.basename', 'os.path.basename', (["item['Key']"], {}), "(item['Key'])\n", (8096, 8109), False, 'import os\n'), ((10623, 10644), 're.search', 're.search', (['pattern', 'x'], {}), '(pattern, x)\n', (10632, 10644), False, 'import re\n')] |
from datetime import datetime
import timebomb.models as models
def test_Notification():
notif = models.Notification("message")
assert notif.content == "message"
assert notif.read is False
assert str(notif) == "message"
def test_Player():
player = models.Player("name", "id")
assert player.name == "name"
assert player.id == "id"
assert player.team is None
assert player.hand is None
player = models.Player("name", "id", "team", ("A", "B"), "roomid")
assert player.name == "name"
assert player.id == "id"
assert player.team == "team"
assert player.hand == ("A", "B")
assert player.roomId == "roomid"
def test_Message():
now = datetime.now()
message = models.Message("player", "message")
assert message.player_name == "player"
assert message.content == "message"
assert message.timestamp and isinstance(message.timestamp, datetime)
assert str(message) == f"[{now:%H:%M}] player: message"
def test_Room():
player = models.Player("player", "player_id")
room = models.Room("room", "room_id", (player,))
assert room.name == "room" and room.id == "room_id"
assert len(room.players) == 1 and room.players[0] is player
assert room.cutter is None and room.winning_team is None and room.status == ""
assert isinstance(room.cards_found, dict) and isinstance(room.cards_left, dict)
assert not room.cards_found and not room.cards_left
def test_GameState():
state = models.GameState()
assert isinstance(state.messages, list) and not state.messages
assert state.room is None and state.me is None and state.notification is None
def test_GameState_new_message():
state = models.GameState()
assert isinstance(state.messages, list) and not state.messages
message = state.new_message({"player": "player", "message": "test_message"})
assert len(state.messages) == 1 and state.messages[0] is message
assert message.player_name == "player" and message.content == "test_message"
for i in range(99):
last = state.new_message(
{"player": f"player{i}", "message": f"test_message{i}"}
)
assert len(state.messages) == 100
assert state.messages[0] is message and state.messages[99] is last
assert last.player_name == "player98" and last.content == "test_message98"
last = state.new_message({"player": "player99", "message": "test_message99"})
assert len(state.messages) == 100
assert state.messages[0] is not message and state.messages[99] is last
assert (
state.messages[0].player_name == "player0"
and state.messages[0].content == "test_message0"
)
assert last.player_name == "player99" and last.content == "test_message99"
res = state.new_message({"message": "test_message100"})
assert res is None
assert state.messages[99] is last
def test_GameState_new_notification():
state = models.GameState()
assert state.notification is None
notif1 = state.new_notification({"message": "notif1"})
assert state.notification is notif1 and notif1.content == "notif1"
notif2 = state.new_notification({"message": "notif2"})
assert state.notification is notif2 and notif2.content == "notif2"
notif3 = state.new_notification({"unknown": "notif2"})
assert notif3 is None and state.notification is notif2
def test_GameState_update_room():
state = models.GameState()
assert state.room is None
players_data = [{"name": "player1", "id": "id1"}]
room_data = {"name": "roomname", "id": "roomid", "players": players_data}
room = state.update_room(room_data)
assert state.room is room and room.name == "roomname" and room.id == "roomid"
assert len(room.players) == 1
assert room.players[0].name == "player1" and room.players[0].id == "id1"
new_data = {"name": "newname", "cutter": {"name": "cutter", "id": "cutterid"}}
room = state.update_room(new_data)
assert state.room is room and room.name == "newname" and room.id == "roomid"
assert len(room.players) == 1
assert room.players[0].name == "player1" and room.players[0].id == "id1"
assert (
isinstance(room.cutter, models.Player)
and room.cutter.id == "cutterid"
and room.cutter.name == "cutter"
)
new_data = {
"players": [{"name": "player1", "id": "id1"}, {"name": "player2", "id": "id2"}]
}
room = state.update_room(new_data)
assert state.room is room and room.name == "newname" and room.id == "roomid"
assert len(room.players) == 2
def test_GameState_update_me():
state = models.GameState()
assert state.me is None
player = state.update_me({"name": "player1", "id": "id1"})
assert state.me is player and player.name == "player1" and player.id == "id1"
assert player.hand is None
player = state.update_me({"hand": ("A", "A", "B", "A")})
assert state.me is player and player.name == "player1" and player.id == "id1"
assert player.hand == ("A", "A", "B", "A")
def test_GameState_reset():
state = models.GameState()
assert isinstance(state.messages, list) and not state.messages
assert state.room is None and state.me is None and state.notification is None
state.messages = ["m1", "m2"]
state.room = "Room"
state.me = "Me"
state.notification = "Notification"
state.reset()
assert isinstance(state.messages, list) and not state.messages
assert state.room is None and state.me is None and state.notification is None
| [
"timebomb.models.Message",
"timebomb.models.Room",
"timebomb.models.GameState",
"timebomb.models.Player",
"datetime.datetime.now",
"timebomb.models.Notification"
] | [((103, 133), 'timebomb.models.Notification', 'models.Notification', (['"""message"""'], {}), "('message')\n", (122, 133), True, 'import timebomb.models as models\n'), ((273, 300), 'timebomb.models.Player', 'models.Player', (['"""name"""', '"""id"""'], {}), "('name', 'id')\n", (286, 300), True, 'import timebomb.models as models\n'), ((440, 497), 'timebomb.models.Player', 'models.Player', (['"""name"""', '"""id"""', '"""team"""', "('A', 'B')", '"""roomid"""'], {}), "('name', 'id', 'team', ('A', 'B'), 'roomid')\n", (453, 497), True, 'import timebomb.models as models\n'), ((700, 714), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (712, 714), False, 'from datetime import datetime\n'), ((729, 764), 'timebomb.models.Message', 'models.Message', (['"""player"""', '"""message"""'], {}), "('player', 'message')\n", (743, 764), True, 'import timebomb.models as models\n'), ((1014, 1050), 'timebomb.models.Player', 'models.Player', (['"""player"""', '"""player_id"""'], {}), "('player', 'player_id')\n", (1027, 1050), True, 'import timebomb.models as models\n'), ((1062, 1103), 'timebomb.models.Room', 'models.Room', (['"""room"""', '"""room_id"""', '(player,)'], {}), "('room', 'room_id', (player,))\n", (1073, 1103), True, 'import timebomb.models as models\n'), ((1486, 1504), 'timebomb.models.GameState', 'models.GameState', ([], {}), '()\n', (1502, 1504), True, 'import timebomb.models as models\n'), ((1703, 1721), 'timebomb.models.GameState', 'models.GameState', ([], {}), '()\n', (1719, 1721), True, 'import timebomb.models as models\n'), ((2928, 2946), 'timebomb.models.GameState', 'models.GameState', ([], {}), '()\n', (2944, 2946), True, 'import timebomb.models as models\n'), ((3414, 3432), 'timebomb.models.GameState', 'models.GameState', ([], {}), '()\n', (3430, 3432), True, 'import timebomb.models as models\n'), ((4607, 4625), 'timebomb.models.GameState', 'models.GameState', ([], {}), '()\n', (4623, 4625), True, 'import timebomb.models as models\n'), ((5064, 5082), 'timebomb.models.GameState', 'models.GameState', ([], {}), '()\n', (5080, 5082), True, 'import timebomb.models as models\n')] |
from __future__ import unicode_literals
import json
import numpy as np
from builtins import str
from abc import ABCMeta, abstractmethod
from pychemia import HAS_PYMONGO
from pychemia.utils.computing import deep_unicode
if HAS_PYMONGO:
from pychemia.db import PyChemiaDB
class Population:
__metaclass__ = ABCMeta
"""
General class for all optimization algorithms that uses fixed and blocked
Generations
"""
def __init__(self, name, tag, use_mongo=True):
name = deep_unicode(name)
self.tag = tag
self.pcdb = None
if isinstance(name, str):
self.name = name
if use_mongo:
self.pcdb = PyChemiaDB(name)
else:
self.name = name.name
if use_mongo:
self.pcdb = name
def __iter__(self):
return self.pcdb.entries.find()
def __len__(self):
return len(self.members)
def __str__(self):
ret = ' Population Name: %s\n' % self.name
ret += ' Tag: %s\n' % self.tag
ret += ' Members: %s\n' % len(self)
return ret
def disable(self, entry_id):
self.pcdb.entries.update({'_id': entry_id}, {'$set': {'status.' + self.tag: False}})
def enable(self, entry_id):
self.pcdb.entries.update({'_id': entry_id}, {'$set': {'status.' + self.tag: True}})
def get_values(self, selection):
ret = {}
for i in selection:
ret[i] = self.value(i)
return ret
def update_properties(self, entry_id, new_properties):
self.pcdb.update(entry_id, properties=new_properties)
def set_in_properties(self, entry_id, field, value):
return self.pcdb.entries.update_one({'_id': entry_id}, {'$set': {'properties.'+field: value}})
def get_population_info(self):
return self.pcdb.db.population_info.find_one({'tag': self.tag})
def insert_entry(self, entry):
if 'structure' not in entry:
entry['structure']={}
if 'properties' not in entry:
entry['properties']={}
if 'status' not in entry:
entry['status']={}
self.pcdb.entries.insert(entry)
def get_structure(self, entry_id):
return self.pcdb.get_structure(entry_id)
def set_structure(self, entry_id, structure):
return self.pcdb.update(entry_id, structure=structure)
def get_entry(self, entry_id, projection=None, with_id=True):
"""
Return an entry identified by 'entry_id'
:param with_id:
:param projection: Insert that projection into the query
:param entry_id: A database identifier
:return:
"""
if projection is None:
projection = {}
if not with_id:
projection['_id']=0
entry = self.pcdb.entries.find_one({'_id': entry_id}, projection)
return entry
def ids_sorted(self, selection):
values = np.array([self.value(i) for i in selection])
sorted_indices = np.argsort(values)
return np.array(selection)[sorted_indices]
def load_json(self, filename):
filep = open(filename, 'r')
data = json.load(filep)
for entry in data:
self.pcdb.entries.insert(entry)
def random_population(self, n):
"""
Create N new random structures to the population
:param n: (int) The number of new structures
:return: (list) The identifiers for the new structures
"""
return [self.add_random() for i in range(n)]
def replace_failed(self):
pass
def save_info(self):
data = self.pcdb.db.population_info.find_one({'_id': self.tag})
if data is None:
data = self.to_dict
data['_id'] = self.tag
self.pcdb.db.population_info.insert(data)
else:
self.pcdb.db.population_info.update({'_id': self.tag}, self.to_dict)
def save_json(self, filename):
ret = []
for entry_id in self.members:
ret.append(self.get_entry(entry_id, with_id=False))
filep = open(filename, 'w')
json.dump(ret, filep, sort_keys=True, indent=4, separators=(',', ': '))
def unlock_all(self, name=None):
for i in self.members:
self.pcdb.unlock(i, name=name)
@abstractmethod
def add_random(self):
pass
@abstractmethod
def check_duplicates(self, ids):
pass
@abstractmethod
def cross(self, ids):
pass
@abstractmethod
def distance(self, entry_id, entry_jd):
pass
@abstractmethod
def get_duplicates(self, ids):
pass
@abstractmethod
def from_dict(self, population_dict):
pass
@abstractmethod
def is_evaluated(self, entry_id):
pass
@abstractmethod
def move_random(self, entry_id, factor=0.2, in_place=False, kind='move'):
pass
@abstractmethod
def move(self, entry_id, entry_jd, factor=0.2, in_place=False):
pass
@abstractmethod
def new_entry(self, data, active=True):
pass
@abstractmethod
def recover(self):
pass
@abstractmethod
def value(self, entry_id):
pass
@abstractmethod
def str_entry(self, entry_id):
pass
@property
def actives(self):
return [entry['_id'] for entry in self.pcdb.entries.find({'status.' + self.tag: True}, {'_id': 1})]
@property
def actives_evaluated(self):
return [x for x in self.actives if self.is_evaluated(x)]
@property
def actives_no_evaluated(self):
return [x for x in self.actives if not self.is_evaluated(x)]
@property
def evaluated(self):
return [entry for entry in self.members if self.is_evaluated(entry)]
@property
def fraction_evaluated(self):
ret = np.sum([1 for i in self.actives if self.is_evaluated(i)])
return float(ret) / len(self.actives)
@property
def members(self):
return [x['_id'] for x in self.pcdb.entries.find({}, {'_id': 1})]
@property
def to_dict(self):
return {'name': self.name, 'tag': self.tag}
@property
def best_candidate(self):
return self.ids_sorted(self.evaluated)[0]
def refine_progressive(self, entry_id):
pass
| [
"pychemia.utils.computing.deep_unicode",
"pychemia.db.PyChemiaDB",
"numpy.argsort",
"numpy.array",
"json.load",
"json.dump"
] | [((501, 519), 'pychemia.utils.computing.deep_unicode', 'deep_unicode', (['name'], {}), '(name)\n', (513, 519), False, 'from pychemia.utils.computing import deep_unicode\n'), ((3036, 3054), 'numpy.argsort', 'np.argsort', (['values'], {}), '(values)\n', (3046, 3054), True, 'import numpy as np\n'), ((3193, 3209), 'json.load', 'json.load', (['filep'], {}), '(filep)\n', (3202, 3209), False, 'import json\n'), ((4151, 4222), 'json.dump', 'json.dump', (['ret', 'filep'], {'sort_keys': '(True)', 'indent': '(4)', 'separators': "(',', ': ')"}), "(ret, filep, sort_keys=True, indent=4, separators=(',', ': '))\n", (4160, 4222), False, 'import json\n'), ((3070, 3089), 'numpy.array', 'np.array', (['selection'], {}), '(selection)\n', (3078, 3089), True, 'import numpy as np\n'), ((685, 701), 'pychemia.db.PyChemiaDB', 'PyChemiaDB', (['name'], {}), '(name)\n', (695, 701), False, 'from pychemia.db import PyChemiaDB\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `pynessie` package."""
import pytest
import requests_mock
import simplejson as json
from click.testing import CliRunner
from pynessie import __version__
from pynessie import cli
from pynessie.model import ReferenceSchema
def test_command_line_interface(requests_mock: requests_mock) -> None:
"""Test the CLI."""
runner = CliRunner()
result = runner.invoke(cli.cli)
assert result.exit_code == 0
assert "Usage: cli" in result.output
help_result = runner.invoke(cli.cli, ["--help"])
assert help_result.exit_code == 0
assert "Usage: cli" in help_result.output
help_result = runner.invoke(cli.cli, ["--version"])
assert help_result.exit_code == 0
assert __version__ in help_result.output
requests_mock.get(
"http://localhost:19120/api/v1/trees",
text=json.dumps([{"name": "main", "type": "BRANCH", "hash": "1234567890abcdef"}]),
)
help_result = runner.invoke(cli.cli, ["list-references"])
assert help_result.exit_code == 0
references = ReferenceSchema().loads(help_result.output, many=True)
assert len(references) == 1
assert references[0].name == "main"
assert references[0].kind == "BRANCH"
assert references[0].hash_ == "1234567890abcdef"
@pytest.mark.e2e
def test_command_line_interface_e2e() -> None:
"""Test the CLI."""
runner = CliRunner()
result = runner.invoke(cli.cli)
assert result.exit_code == 0
assert "Usage: cli" in result.output
help_result = runner.invoke(cli.cli, ["--help"])
assert help_result.exit_code == 0
assert "Usage: cli" in help_result.output
help_result = runner.invoke(cli.cli, ["--version"])
assert help_result.exit_code == 0
assert __version__ in help_result.output
help_result = runner.invoke(cli.cli, ["list-references"])
assert help_result.exit_code == 0
branches = ReferenceSchema().loads(help_result.output, many=True)
assert len(branches) == 1
assert branches[0].name == "main"
| [
"simplejson.dumps",
"pynessie.model.ReferenceSchema",
"click.testing.CliRunner"
] | [((391, 402), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (400, 402), False, 'from click.testing import CliRunner\n'), ((1398, 1409), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1407, 1409), False, 'from click.testing import CliRunner\n'), ((872, 948), 'simplejson.dumps', 'json.dumps', (["[{'name': 'main', 'type': 'BRANCH', 'hash': '1234567890abcdef'}]"], {}), "([{'name': 'main', 'type': 'BRANCH', 'hash': '1234567890abcdef'}])\n", (882, 948), True, 'import simplejson as json\n'), ((1073, 1090), 'pynessie.model.ReferenceSchema', 'ReferenceSchema', ([], {}), '()\n', (1088, 1090), False, 'from pynessie.model import ReferenceSchema\n'), ((1911, 1928), 'pynessie.model.ReferenceSchema', 'ReferenceSchema', ([], {}), '()\n', (1926, 1928), False, 'from pynessie.model import ReferenceSchema\n')] |
# Code generated by sqlc. DO NOT EDIT.
import dataclasses
from typing import Optional
@dataclasses.dataclass()
class Author:
id: int
name: str
bio: Optional[str]
| [
"dataclasses.dataclass"
] | [((89, 112), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {}), '()\n', (110, 112), False, 'import dataclasses\n')] |
import unittest
import datetime
from weightTrack import WeightNote
class TestWeightNote(unittest.TestCase):
### Testing getter methods ###
def test_shouldGetWeight(self):
testWeight = WeightNote(100, "Ate breakfast")
self.assertEqual(testWeight.getWeight(), 100, "Should be 100")
# Note: Impossible to check time with current time; instead
# use library called freezetime to mock a date and time
"""
def test_shouldGetTime(self):
testWeight = WeightNote(100, "Ate breakfast")
self.assertEqual(testWeight.getTime(),
datetime.datetime.now(),
"Should be same time as creation")
"""
def test_shouldGetNote(self):
testWeight = WeightNote(100, "Ate breakfast")
self.assertEqual(testWeight.getNote(),
"Ate breakfast",
"Should be 'Ate breakfast'")
### Testing setter methods ###
def test_shouldSetWeight(self):
testWeight = WeightNote(100, "Ate breakfast")
testWeight.setWeight(150)
self.assertEqual(testWeight.getWeight(), 150, "Should be 100")
# Note: Impossible to check time with current time; instead
# use library called freezetime to mock a date and time
"""
def test_shouldSetTime(self):
testWeight = WeightNote(100, "Ate breakfast")
self.assertEqual(testWeight.getTime(),
datetime.datetime.now(),
"Should be same time as creation")
"""
def test_shouldSetNote(self):
testWeight = WeightNote(100, "Ate breakfast")
testWeight.setNote("Ate lunch")
self.assertEqual(testWeight.getNote(),
"Ate lunch",
"Should be 'Ate lunch'")
# main
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"weightTrack.WeightNote"
] | [((1866, 1881), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1879, 1881), False, 'import unittest\n'), ((204, 236), 'weightTrack.WeightNote', 'WeightNote', (['(100)', '"""Ate breakfast"""'], {}), "(100, 'Ate breakfast')\n", (214, 236), False, 'from weightTrack import WeightNote\n'), ((759, 791), 'weightTrack.WeightNote', 'WeightNote', (['(100)', '"""Ate breakfast"""'], {}), "(100, 'Ate breakfast')\n", (769, 791), False, 'from weightTrack import WeightNote\n'), ((1029, 1061), 'weightTrack.WeightNote', 'WeightNote', (['(100)', '"""Ate breakfast"""'], {}), "(100, 'Ate breakfast')\n", (1039, 1061), False, 'from weightTrack import WeightNote\n'), ((1618, 1650), 'weightTrack.WeightNote', 'WeightNote', (['(100)', '"""Ate breakfast"""'], {}), "(100, 'Ate breakfast')\n", (1628, 1650), False, 'from weightTrack import WeightNote\n')] |
# -*- encoding: utf-8 -*-
import datetime
def formata_data(data):
data = datetime.datetime.strptime(data, '%d/%m/%Y').date()
return data.strftime("%Y%m%d")
def formata_valor(valor):
return str("%.2f" % valor).replace(".", "")
| [
"datetime.datetime.strptime"
] | [((80, 124), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['data', '"""%d/%m/%Y"""'], {}), "(data, '%d/%m/%Y')\n", (106, 124), False, 'import datetime\n')] |
import argparse
import torch
import syft as sy
from syft import WebsocketServerWorker
def get_args():
parser = argparse.ArgumentParser(description="Run websocket server worker.")
parser.add_argument(
"--port",
"-p",
type=int,
default=8777,
help="port number of the websocket server worker, e.g. --port 8777",
)
parser.add_argument("--host", type=str, default="0.0.0.0", help="host for the connection")
parser.add_argument(
"--id", type=str, help="name (id) of the websocket server worker, e.g. --id alice"
)
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="if set, websocket server worker will be started in verbose mode",
)
args = parser.parse_args()
return args
if __name__ == "__main__":
hook = sy.TorchHook(torch)
args = get_args()
kwargs = {
"id": args.id,
"host": args.host,
"port": args.port,
"hook": hook,
"verbose": args.verbose,
}
server = WebsocketServerWorker(**kwargs)
server.start()
| [
"syft.TorchHook",
"argparse.ArgumentParser",
"syft.WebsocketServerWorker"
] | [((117, 184), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run websocket server worker."""'}), "(description='Run websocket server worker.')\n", (140, 184), False, 'import argparse\n'), ((845, 864), 'syft.TorchHook', 'sy.TorchHook', (['torch'], {}), '(torch)\n', (857, 864), True, 'import syft as sy\n'), ((1054, 1085), 'syft.WebsocketServerWorker', 'WebsocketServerWorker', ([], {}), '(**kwargs)\n', (1075, 1085), False, 'from syft import WebsocketServerWorker\n')] |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimInternalLoad_Lights_Default', [dirname(__file__)])
except ImportError:
import _SimInternalLoad_Lights_Default
return _SimInternalLoad_Lights_Default
if fp is not None:
try:
_mod = imp.load_module('_SimInternalLoad_Lights_Default', fp, pathname, description)
finally:
fp.close()
return _mod
_SimInternalLoad_Lights_Default = swig_import_helper()
del swig_import_helper
else:
import _SimInternalLoad_Lights_Default
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
import SimInternalLoad_Equipment_Electric
class SimInternalLoad_Lights(SimInternalLoad_Equipment_Electric.SimInternalLoad):
__swig_setmethods__ = {}
for _s in [SimInternalLoad_Equipment_Electric.SimInternalLoad]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimInternalLoad_Lights, name, value)
__swig_getmethods__ = {}
for _s in [SimInternalLoad_Equipment_Electric.SimInternalLoad]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimInternalLoad_Lights, name)
__repr__ = _swig_repr
def SimInternalLoad_Name(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_Name(self, *args)
def SimInternalLoad_ZoneOrZoneListName(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_ZoneOrZoneListName(self, *args)
def SimInternalLoad_FracRadiant(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracRadiant(self, *args)
def SimInternalLoad_SchedName(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_SchedName(self, *args)
def SimInternalLoad_DesignLevelCalcMeth(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_DesignLevelCalcMeth(self, *args)
def SimInternalLoad_LightLevel(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_LightLevel(self, *args)
def SimInternalLoad_PowerPerZoneFloorArea(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerZoneFloorArea(self, *args)
def SimInternalLoad_PowerPerPerson(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerPerson(self, *args)
def SimInternalLoad_RtnAirFrac(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFrac(self, *args)
def SimInternalLoad_FracVisible(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracVisible(self, *args)
def SimInternalLoad_FracReplaceable(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracReplaceable(self, *args)
def SimInternalLoad_EndUseSubCat(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_EndUseSubCat(self, *args)
def SimInternalLoad_RtnAirFracCalcFromPlenTemp(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracCalcFromPlenTemp(self, *args)
def SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1(self, *args)
def SimInternalLoad_RtnAirFracFuncofPlenumTempCoef2(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef2(self, *args)
def __init__(self, *args):
this = _SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights__clone(self, f, c)
__swig_destroy__ = _SimInternalLoad_Lights_Default.delete_SimInternalLoad_Lights
__del__ = lambda self: None
SimInternalLoad_Lights_swigregister = _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_swigregister
SimInternalLoad_Lights_swigregister(SimInternalLoad_Lights)
class SimInternalLoad_Lights_Default(SimInternalLoad_Lights):
__swig_setmethods__ = {}
for _s in [SimInternalLoad_Lights]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimInternalLoad_Lights_Default, name, value)
__swig_getmethods__ = {}
for _s in [SimInternalLoad_Lights]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimInternalLoad_Lights_Default, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default__clone(self, f, c)
__swig_destroy__ = _SimInternalLoad_Lights_Default.delete_SimInternalLoad_Lights_Default
__del__ = lambda self: None
SimInternalLoad_Lights_Default_swigregister = _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_swigregister
SimInternalLoad_Lights_Default_swigregister(SimInternalLoad_Lights_Default)
class SimInternalLoad_Lights_Default_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimInternalLoad_Lights_Default_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimInternalLoad_Lights_Default_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_assign(self, n, x)
def begin(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_begin(self, *args)
def end(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_end(self, *args)
def rbegin(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rend(self, *args)
def at(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_at(self, *args)
def front(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_front(self, *args)
def back(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_back(self, *args)
def push_back(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_push_back(self, *args)
def pop_back(self):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_insert(self, *args)
def erase(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_swap(self, x)
__swig_destroy__ = _SimInternalLoad_Lights_Default.delete_SimInternalLoad_Lights_Default_sequence
__del__ = lambda self: None
SimInternalLoad_Lights_Default_sequence_swigregister = _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_swigregister
SimInternalLoad_Lights_Default_sequence_swigregister(SimInternalLoad_Lights_Default_sequence)
# This file is compatible with both classic and new-style classes.
| [
"_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_back",
"_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_begin",
"_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1",
"_SimInternalLoad_Lights_Default.SimInterna... | [((3458, 3551), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_Name', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_Name', (['self', '*args'], {}), '(\n self, *args)\n', (3533, 3551), False, 'import _SimInternalLoad_Lights_Default\n'), ((3620, 3727), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_ZoneOrZoneListName', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_ZoneOrZoneListName', (['self', '*args'], {}), '(\n self, *args)\n', (3709, 3727), False, 'import _SimInternalLoad_Lights_Default\n'), ((3789, 3889), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracRadiant', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracRadiant', (['self', '*args'], {}), '(\n self, *args)\n', (3871, 3889), False, 'import _SimInternalLoad_Lights_Default\n'), ((3949, 4047), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_SchedName', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_SchedName', (['self', '*args'], {}), '(\n self, *args)\n', (4029, 4047), False, 'import _SimInternalLoad_Lights_Default\n'), ((4117, 4225), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_DesignLevelCalcMeth', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_DesignLevelCalcMeth', (['self', '*args'], {}), '(\n self, *args)\n', (4207, 4225), False, 'import _SimInternalLoad_Lights_Default\n'), ((4286, 4385), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_LightLevel', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_LightLevel', (['self', '*args'], {}), '(\n self, *args)\n', (4367, 4385), False, 'import _SimInternalLoad_Lights_Default\n'), ((4457, 4567), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerZoneFloorArea', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerZoneFloorArea', (['self', '*args'], {}), '(\n self, *args)\n', (4549, 4567), False, 'import _SimInternalLoad_Lights_Default\n'), ((4632, 4735), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerPerson', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerPerson', (['self', '*args'], {}), '(\n self, *args)\n', (4717, 4735), False, 'import _SimInternalLoad_Lights_Default\n'), ((4796, 4895), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFrac', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFrac', (['self', '*args'], {}), '(\n self, *args)\n', (4877, 4895), False, 'import _SimInternalLoad_Lights_Default\n'), ((4957, 5057), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracVisible', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracVisible', (['self', '*args'], {}), '(\n self, *args)\n', (5039, 5057), False, 'import _SimInternalLoad_Lights_Default\n'), ((5123, 5227), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracReplaceable', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracReplaceable', (['self', '*args'], {}), '(\n self, *args)\n', (5209, 5227), False, 'import _SimInternalLoad_Lights_Default\n'), ((5290, 5391), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_EndUseSubCat', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_EndUseSubCat', (['self', '*args'], {}), '(\n self, *args)\n', (5373, 5391), False, 'import _SimInternalLoad_Lights_Default\n'), ((5468, 5583), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracCalcFromPlenTemp', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracCalcFromPlenTemp', (['self', '*args'], {}), '(\n self, *args)\n', (5565, 5583), False, 'import _SimInternalLoad_Lights_Default\n'), ((5665, 5785), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1', (['self', '*args'], {}), '(\n self, *args)\n', (5767, 5785), False, 'import _SimInternalLoad_Lights_Default\n'), ((5867, 5987), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef2', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef2', (['self', '*args'], {}), '(\n self, *args)\n', (5969, 5987), False, 'import _SimInternalLoad_Lights_Default\n'), ((6030, 6095), '_SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights', '_SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights', (['*args'], {}), '(*args)\n', (6088, 6095), False, 'import _SimInternalLoad_Lights_Default\n'), ((6240, 6313), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights__clone', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights__clone', (['self', 'f', 'c'], {}), '(self, f, c)\n', (6301, 6313), False, 'import _SimInternalLoad_Lights_Default\n'), ((7225, 7298), '_SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default', '_SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default', (['*args'], {}), '(*args)\n', (7291, 7298), False, 'import _SimInternalLoad_Lights_Default\n'), ((7443, 7528), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default__clone', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default__clone', (['self', 'f', 'c'], {}), '(self,\n f, c)\n', (7512, 7528), False, 'import _SimInternalLoad_Lights_Default\n'), ((8497, 8584), '_SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default_sequence', '_SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default_sequence', (['*args'], {}), '(*\n args)\n', (8572, 8584), False, 'import _SimInternalLoad_Lights_Default\n'), ((8717, 8812), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_assign', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_assign', (['self', 'n', 'x'], {}), '(\n self, n, x)\n', (8795, 8812), False, 'import _SimInternalLoad_Lights_Default\n'), ((8852, 8947), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_begin', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_begin', (['self', '*args'], {}), '(\n self, *args)\n', (8929, 8947), False, 'import _SimInternalLoad_Lights_Default\n'), ((8985, 9078), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_end', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_end', (['self', '*args'], {}), '(\n self, *args)\n', (9060, 9078), False, 'import _SimInternalLoad_Lights_Default\n'), ((9119, 9215), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rbegin', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rbegin', (['self', '*args'], {}), '(\n self, *args)\n', (9197, 9215), False, 'import _SimInternalLoad_Lights_Default\n'), ((9254, 9348), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rend', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rend', (['self', '*args'], {}), '(\n self, *args)\n', (9330, 9348), False, 'import _SimInternalLoad_Lights_Default\n'), ((9385, 9477), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_at', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_at', (['self', '*args'], {}), '(self\n , *args)\n', (9459, 9477), False, 'import _SimInternalLoad_Lights_Default\n'), ((9517, 9612), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_front', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_front', (['self', '*args'], {}), '(\n self, *args)\n', (9594, 9612), False, 'import _SimInternalLoad_Lights_Default\n'), ((9651, 9745), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_back', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_back', (['self', '*args'], {}), '(\n self, *args)\n', (9727, 9745), False, 'import _SimInternalLoad_Lights_Default\n'), ((9789, 9888), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_push_back', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_push_back', (['self', '*args'], {}), '(\n self, *args)\n', (9870, 9888), False, 'import _SimInternalLoad_Lights_Default\n'), ((9924, 10015), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_pop_back', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_pop_back', (['self'], {}), '(\n self)\n', (10004, 10015), False, 'import _SimInternalLoad_Lights_Default\n'), ((10064, 10163), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach_back', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach_back', (['self', 'pop'], {}), '(\n self, pop)\n', (10147, 10163), False, 'import _SimInternalLoad_Lights_Default\n'), ((10204, 10300), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_insert', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_insert', (['self', '*args'], {}), '(\n self, *args)\n', (10282, 10300), False, 'import _SimInternalLoad_Lights_Default\n'), ((10340, 10435), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_erase', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_erase', (['self', '*args'], {}), '(\n self, *args)\n', (10417, 10435), False, 'import _SimInternalLoad_Lights_Default\n'), ((10494, 10603), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach', (['self', 'position', 'r', 'erase'], {}), '(\n self, position, r, erase)\n', (10572, 10603), False, 'import _SimInternalLoad_Lights_Default\n'), ((10638, 10728), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_swap', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_swap', (['self', 'x'], {}), '(\n self, x)\n', (10714, 10728), False, 'import _SimInternalLoad_Lights_Default\n'), ((691, 768), 'imp.load_module', 'imp.load_module', (['"""_SimInternalLoad_Lights_Default"""', 'fp', 'pathname', 'description'], {}), "('_SimInternalLoad_Lights_Default', fp, pathname, description)\n", (706, 768), False, 'import imp\n'), ((474, 491), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (481, 491), False, 'from os.path import dirname\n')] |
import garm.indicators as gari
import ham.time_utils as hamt
import ohlcv
import luigi
import strategies as chs
from luigi.util import inherits
@inherits(chs.Strategy)
class BuyAndHold(chs.Strategy):
FN = gari.buy_and_hold_signals
def requires(self):
for m in hamt.months(self.start_date, self.end_date):
yield ohlcv.OHLCV(
self.pair, self.exchange, m, self.period,
self.destination_path)
| [
"ham.time_utils.months",
"ohlcv.OHLCV",
"luigi.util.inherits"
] | [((148, 170), 'luigi.util.inherits', 'inherits', (['chs.Strategy'], {}), '(chs.Strategy)\n', (156, 170), False, 'from luigi.util import inherits\n'), ((280, 323), 'ham.time_utils.months', 'hamt.months', (['self.start_date', 'self.end_date'], {}), '(self.start_date, self.end_date)\n', (291, 323), True, 'import ham.time_utils as hamt\n'), ((343, 419), 'ohlcv.OHLCV', 'ohlcv.OHLCV', (['self.pair', 'self.exchange', 'm', 'self.period', 'self.destination_path'], {}), '(self.pair, self.exchange, m, self.period, self.destination_path)\n', (354, 419), False, 'import ohlcv\n')] |
import pytesseract
import os
import time
import requests
import json
from PIL import Image,ImageFont,ImageDraw
# 读取配置文件
with open('config.json') as json_file:
config = json.load(json_file)
# 默认的文件保存的目录
MAIN_PATH = './imageApi/image/'
# FONT,用于将文字渲染成图片
FONT = config['font']
def strToImg(text,mainPath):
'''
文字转图片
'''
if(mainPath=='' or mainPath==None):
mainPath = MAIN_PATH
W,H = (800,400)
# 图片宽、高、背景色
im = Image.new("RGB", (W,H), (26, 26, 26))
dr = ImageDraw.Draw(im)
# 字体、字号
font = ImageFont.truetype(FONT, 44)
w,h = dr.textsize(text,font=font)
# 文字在背景中的位置、颜色
dr.text((20, (H-h)/2), text, font=font, fill="#F3F3F3")
# im.show()
# 图片保存的路径
path = mainPath+ str(int(time.time()*1000))+'.png'
im.save(path)
return {'success':True,'imgLoaclPath':path}
def getOCRCode(path):
'''
path:本地路径,str
code:返回 ocr 文本
'''
# open的是图片的路径
image = Image.open(path)
code = pytesseract.image_to_string(image, lang='chi_sim')
print(code)
return code
def downloadImg(imgURL,mainPath):
'''
下载图片
输入 imgURL:图片 URL,str
返回 imgLocalPath:图片本地文件
'''
if(mainPath==''):
mainPath = MAIN_PATH
os.makedirs(mainPath, exist_ok=True)
headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36'}
r = requests.get(url=imgURL, headers=headers)
imgLoaclPath = mainPath + str(int(time.time()*1000))+'.png'
with open(imgLoaclPath, 'wb') as f:
f.write(r.content)
if(r.status_code!=200):
success = False
else:
success = True
return {'success':success,'imgLoaclPath':imgLoaclPath} | [
"PIL.Image.open",
"os.makedirs",
"PIL.Image.new",
"PIL.ImageFont.truetype",
"requests.get",
"PIL.ImageDraw.Draw",
"pytesseract.image_to_string",
"json.load",
"time.time"
] | [((174, 194), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (183, 194), False, 'import json\n'), ((452, 490), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(W, H)', '(26, 26, 26)'], {}), "('RGB', (W, H), (26, 26, 26))\n", (461, 490), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((499, 517), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im'], {}), '(im)\n', (513, 517), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((541, 569), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['FONT', '(44)'], {}), '(FONT, 44)\n', (559, 569), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((952, 968), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (962, 968), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((980, 1030), 'pytesseract.image_to_string', 'pytesseract.image_to_string', (['image'], {'lang': '"""chi_sim"""'}), "(image, lang='chi_sim')\n", (1007, 1030), False, 'import pytesseract\n'), ((1235, 1271), 'os.makedirs', 'os.makedirs', (['mainPath'], {'exist_ok': '(True)'}), '(mainPath, exist_ok=True)\n', (1246, 1271), False, 'import os\n'), ((1451, 1492), 'requests.get', 'requests.get', ([], {'url': 'imgURL', 'headers': 'headers'}), '(url=imgURL, headers=headers)\n', (1463, 1492), False, 'import requests\n'), ((753, 764), 'time.time', 'time.time', ([], {}), '()\n', (762, 764), False, 'import time\n'), ((1531, 1542), 'time.time', 'time.time', ([], {}), '()\n', (1540, 1542), False, 'import time\n')] |
# coding: utf-8
# ### Open using Databricks Platform/Py-spark. It holds the code for developing the RandomForest Classifier on the chosen subset of important features.
# In[1]:
import os, sys
import pandas as pd
import numpy as np
from sklearn.metrics import matthews_corrcoef
import pyspark
from numpy import array
import numpy as np
import pandas as pd
from pyspark.ml import Pipeline
from pyspark.ml.classification import RandomForestClassifier
from pyspark.ml.evaluation import MulticlassClassificationEvaluator
from pyspark.ml.feature import StringIndexer, VectorAssembler, VectorIndexer
import gc
from pyspark.sql.functions import col, count, sum
from sklearn.metrics import matthews_corrcoef
from pyspark.ml.feature import VectorAssembler
from pyspark.sql.functions import rand
REPLACE_YOUR_FILE = "/FileStore/tables/e9svdv4y1482386357547/test_numeric.csv"
df0 = sqlContext.read.format("csv").load(REPLACE_YOUR_FILE, header="true", inferSchema="true")
df = df0.na.fill(99999)
df = df.na.drop()
df.printSchema()
# In[2]:
feature=['L3_S31_F3846','L1_S24_F1578','L3_S33_F3857','L1_S24_F1406','L3_S29_F3348','L3_S33_F3863',
'L3_S29_F3427','L3_S37_F3950','L0_S9_F170', 'L3_S29_F3321','L1_S24_F1346','L3_S32_F3850',
'L3_S30_F3514','L1_S24_F1366','L2_S26_F3036']
assembler = VectorAssembler(
inputCols=feature,
outputCol='features')
data = (assembler.transform(df).select("features", df.Response.astype('double')))
(trainingData, testData) = data.randomSplit([0.8, 0.2], seed=451)
data.printSchema()
# In[3]:
cls = RandomForestClassifier(numTrees=60, seed=1111, maxDepth=15, labelCol="Response", featuresCol="features")
pipeline = Pipeline(stages=[cls])
evaluator = MulticlassClassificationEvaluator(
labelCol="Response", predictionCol="prediction", metricName="accuracy")
trainingData=trainingData.na.drop()
trainingData.printSchema()
# In[4]:
gc.collect()
model = pipeline.fit(trainingData)
# In[5]:
# making predictions
predicted = model.transform(testData)
response = predictions.select("Response").rdd.map(lambda r: r[0]).collect()
predictedValue = predictions.select("probability").rdd.map(lambda r: int(r[0][1])).collect()
mcc = matthews_corrcoef(response, predictedValue)
print (mcc)
| [
"pyspark.ml.Pipeline",
"pyspark.ml.classification.RandomForestClassifier",
"gc.collect",
"pyspark.ml.evaluation.MulticlassClassificationEvaluator",
"pyspark.ml.feature.VectorAssembler",
"sklearn.metrics.matthews_corrcoef"
] | [((1315, 1371), 'pyspark.ml.feature.VectorAssembler', 'VectorAssembler', ([], {'inputCols': 'feature', 'outputCol': '"""features"""'}), "(inputCols=feature, outputCol='features')\n", (1330, 1371), False, 'from pyspark.ml.feature import VectorAssembler\n'), ((1568, 1677), 'pyspark.ml.classification.RandomForestClassifier', 'RandomForestClassifier', ([], {'numTrees': '(60)', 'seed': '(1111)', 'maxDepth': '(15)', 'labelCol': '"""Response"""', 'featuresCol': '"""features"""'}), "(numTrees=60, seed=1111, maxDepth=15, labelCol=\n 'Response', featuresCol='features')\n", (1590, 1677), False, 'from pyspark.ml.classification import RandomForestClassifier\n'), ((1685, 1707), 'pyspark.ml.Pipeline', 'Pipeline', ([], {'stages': '[cls]'}), '(stages=[cls])\n', (1693, 1707), False, 'from pyspark.ml import Pipeline\n'), ((1720, 1830), 'pyspark.ml.evaluation.MulticlassClassificationEvaluator', 'MulticlassClassificationEvaluator', ([], {'labelCol': '"""Response"""', 'predictionCol': '"""prediction"""', 'metricName': '"""accuracy"""'}), "(labelCol='Response', predictionCol=\n 'prediction', metricName='accuracy')\n", (1753, 1830), False, 'from pyspark.ml.evaluation import MulticlassClassificationEvaluator\n'), ((1906, 1918), 'gc.collect', 'gc.collect', ([], {}), '()\n', (1916, 1918), False, 'import gc\n'), ((2201, 2244), 'sklearn.metrics.matthews_corrcoef', 'matthews_corrcoef', (['response', 'predictedValue'], {}), '(response, predictedValue)\n', (2218, 2244), False, 'from sklearn.metrics import matthews_corrcoef\n')] |
#!/usr/bin/env python3
# coding=utf-8
import os as os
import sys as sys
import io as io
import traceback as trb
import argparse as argp
import gzip as gz
import operator as op
import functools as fnt
def parse_command_line():
"""
:return:
"""
parser = argp.ArgumentParser()
parser.add_argument('--target', '-t', type=str, dest='target')
parser.add_argument('--query', '-q', type=str, dest='query')
parser.add_argument('--output', '-o', type=str, dest='output')
parser.add_argument('--switch', '-s', action='store_true', default=False, dest='switch',
help='Switch target and query in the output')
parser.add_argument('--filter', '-f', type=int, dest='filter', default=0,
help='Skip blocks smaller than this size. Default: 0')
args = parser.parse_args()
return args
def join_parts(switch, *args):
"""
:param switch:
:param args: tc, ts, te, tstr, bc, qc, qs, qe, qstr
:return:
"""
# had an annoying bug here - passed "(switch,)" instead of "switch"
# which always evaluated to True; but did not affect the one file
# where I used switch, so maybe introduced the error later...?
# anyway, just to be sure here, some manual type checking...
assert isinstance(switch, bool), 'Received wrong type for switch: {}'.format(switch)
if switch:
items = op.itemgetter(*(5, 6, 7, 3, # the new target / original query region
4, # block ID
0, 1, 2, 8)) # the new query / original target region
else:
items = op.itemgetter(*(0, 1, 2, 3, # the target region
4, # block ID
5, 6, 7, 8)) # the query region
joined = '\t'.join(items(args))
return joined
def main():
"""
:return:
"""
args = parse_command_line()
outbuffer = io.StringIO()
bufsize = 0
block_count = 0
block_ids = set()
build_block = fnt.partial(join_parts, *(args.switch, ))
with open(args.target, 'r') as trgfile:
with open(args.query, 'r') as qryfile:
while 1:
tb = trgfile.readline().strip()
qb = qryfile.readline().strip()
try:
tc, ts, te, tid = tb.split()
qc, qs, qe, qid = qb.split()
assert tid == qid,\
'Block mismatch for files {} and {}\nLines {} and {}'.format(os.path.basename(args.target),
os.path.basename(args.query),
tb, qb)
assert tid not in block_ids,\
'Block ID duplicate in files {} and {}\nLines {} and {}'.format(os.path.basename(args.target),
os.path.basename(args.query),
tb, qb)
tl = int(te) - int(ts)
ql = int(qe) - int(qs)
assert tl == ql,\
'Coverage mismatch for files {} and {}\nLines {} and {}'.format(os.path.basename(args.target),
os.path.basename(args.query),
tb, qb)
if tl < args.filter:
continue
block_count += 1
qstrand = qid.split('_')[-1]
#blockline = '\t'.join([tc, ts, te, '+', str(block_count),
# qc, qs, qe, qstrand])
blockline = build_block(tc, ts, te, '+',
str(block_count),
qc, qs, qe, qstrand)
bufsize += len(blockline)
outbuffer.write(blockline + '\n')
if bufsize > 100000:
with gz.open(args.output, 'at') as outfile:
_ = outfile.write(outbuffer.getvalue())
outfile.flush()
outbuffer = io.StringIO()
bufsize = 0
except ValueError:
break
with gz.open(args.output, 'at') as outfile:
_ = outfile.write(outbuffer.getvalue())
# head a corrupted gzip once - not sure about the cause... I/O interrupted???
outfile.flush()
return
if __name__ == '__main__':
try:
main()
except Exception as err:
trb.print_exc(file=sys.stderr)
sys.stderr.write('\nError: {}\n'.format(str(err)))
sys.exit(1)
else:
sys.exit(0)
| [
"argparse.ArgumentParser",
"gzip.open",
"functools.partial",
"os.path.basename",
"sys.exit",
"operator.itemgetter",
"io.StringIO",
"traceback.print_exc"
] | [((271, 292), 'argparse.ArgumentParser', 'argp.ArgumentParser', ([], {}), '()\n', (290, 292), True, 'import argparse as argp\n'), ((1926, 1939), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1937, 1939), True, 'import io as io\n'), ((2016, 2056), 'functools.partial', 'fnt.partial', (['join_parts', '*(args.switch,)'], {}), '(join_parts, *(args.switch,))\n', (2027, 2056), True, 'import functools as fnt\n'), ((1390, 1433), 'operator.itemgetter', 'op.itemgetter', (['*(5, 6, 7, 3, 4, 0, 1, 2, 8)'], {}), '(*(5, 6, 7, 3, 4, 0, 1, 2, 8))\n', (1403, 1433), True, 'import operator as op\n'), ((1620, 1663), 'operator.itemgetter', 'op.itemgetter', (['*(0, 1, 2, 3, 4, 5, 6, 7, 8)'], {}), '(*(0, 1, 2, 3, 4, 5, 6, 7, 8))\n', (1633, 1663), True, 'import operator as op\n'), ((4559, 4585), 'gzip.open', 'gz.open', (['args.output', '"""at"""'], {}), "(args.output, 'at')\n", (4566, 4585), True, 'import gzip as gz\n'), ((4985, 4996), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4993, 4996), True, 'import sys as sys\n'), ((4857, 4887), 'traceback.print_exc', 'trb.print_exc', ([], {'file': 'sys.stderr'}), '(file=sys.stderr)\n', (4870, 4887), True, 'import traceback as trb\n'), ((4955, 4966), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4963, 4966), True, 'import sys as sys\n'), ((2510, 2539), 'os.path.basename', 'os.path.basename', (['args.target'], {}), '(args.target)\n', (2526, 2539), True, 'import os as os\n'), ((2626, 2654), 'os.path.basename', 'os.path.basename', (['args.query'], {}), '(args.query)\n', (2642, 2654), True, 'import os as os\n'), ((2887, 2916), 'os.path.basename', 'os.path.basename', (['args.target'], {}), '(args.target)\n', (2903, 2916), True, 'import os as os\n'), ((3006, 3034), 'os.path.basename', 'os.path.basename', (['args.query'], {}), '(args.query)\n', (3022, 3034), True, 'import os as os\n'), ((3344, 3373), 'os.path.basename', 'os.path.basename', (['args.target'], {}), '(args.target)\n', (3360, 3373), True, 'import os as os\n'), ((3463, 3491), 'os.path.basename', 'os.path.basename', (['args.query'], {}), '(args.query)\n', (3479, 3491), True, 'import os as os\n'), ((4439, 4452), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (4450, 4452), True, 'import io as io\n'), ((4252, 4278), 'gzip.open', 'gz.open', (['args.output', '"""at"""'], {}), "(args.output, 'at')\n", (4259, 4278), True, 'import gzip as gz\n')] |
#-*- coding: utf-8 -*-
from django.db import models
# Create your models here.
class Feature(models.Model):
day = models.SmallIntegerField()
month = models.SmallIntegerField()
year = models.SmallIntegerField()
momentum = models.FloatField(
null=True, blank=True)
day5disparity = models.FloatField(
null=True, blank=True)
day10disparity = models.FloatField(
null=True, blank=True)
stochK = models.FloatField(
null=True, blank=True)
priceVolumeTrend = models.FloatField(
null=True, blank=True)
movAverageExp = models.FloatField(
null=True, blank=True)
paraSar = models.FloatField(
null=True, blank=True)
accDistrLine = models.FloatField(
null=True, blank=True)
avTrueRange = models.FloatField(
null=True, blank=True)
indicB = models.FloatField(
null=True, blank=True)
commChanIndex = models.FloatField(
null=True, blank=True)
chaikinMF = models.FloatField(
null=True, blank=True)
detrPriceOsc = models.FloatField(
null=True, blank=True)
easeMove = models.FloatField(
null=True, blank=True)
forceIndex = models.FloatField(
null=True, blank=True)
macd = models.FloatField(
null=True, blank=True)
monneyFI = models.FloatField(
null=True, blank=True)
negVolIndex = models.FloatField(
null=True, blank=True)
percVolOsc = models.FloatField(
null=True, blank=True)
priceRelWarrent = models.FloatField(
null=True, blank=True)
priceRelAsian = models.FloatField(
null=True, blank=True)
priceRelDiana = models.FloatField(
null=True, blank=True)
priceRelTenren = models.FloatField(
null=True, blank=True)
rateChange = models.FloatField(
null=True, blank=True)
relStrengthI = models.FloatField(
null=True, blank=True)
slope = models.FloatField(
null=True, blank=True)
stdDev = models.FloatField(
null=True, blank=True)
stochOsc = models.FloatField(
null=True, blank=True)
stochRSI = models.FloatField(
null=True, blank=True)
ultimateOsc = models.FloatField(
null=True, blank=True)
williamR = models.FloatField(
null=True, blank=True)
def __unicode__(self):
return u'' + str(self.day) + '/' + str(self.month) + '/' + str(self.year)
class Meta:
abstract = True
class W(models.Model):
temperature = models.SmallIntegerField(null=True, blank=True)
humidity = models.SmallIntegerField(null=True, blank=True)
windSpeed = models.SmallIntegerField(null=True, blank=True)
pressure = models.SmallIntegerField(null=True, blank=True)
day = models.SmallIntegerField()
month = models.SmallIntegerField()
year = models.SmallIntegerField()
def __unicode__(self):
return u'' + str(self.day) + '/' + str(self.month) + '/' + str(self.year)
class Meta:
abstract = True
class Stock(models.Model):
day = models.SmallIntegerField()
month = models.SmallIntegerField()
year = models.SmallIntegerField()
open = models.DecimalField(
null=True, blank=True, max_digits=7, decimal_places=4)
close = models.DecimalField(
null=True, blank=True, max_digits=7, decimal_places=4)
low = models.DecimalField(
null=True, blank=True, max_digits=7, decimal_places=4)
high = models.DecimalField(
null=True, blank=True, max_digits=7, decimal_places=4)
adj = models.DecimalField(
null=True, blank=True, max_digits=7, decimal_places=4)
volume = models.DecimalField(
null=True, blank=True, max_digits=13, decimal_places=4)
class Meta:
abstract = True
def __unicode__(self):
return u'' + str(self.day) + '/' + str(self.month) + '/' + str(self.year)
class TyroonStock(Stock):
pass
class WarrentStock(Stock):
pass
class IndianStock(Stock):
pass
class TenRenStock(Stock):
pass
class DianaStock(Stock):
pass
class Weather(W):
pass
class dWeather(W):
pass
class ddWeather(W):
pass
class Feature35(Feature):
pass
class dFeature35(Feature):
pass
| [
"django.db.models.DecimalField",
"django.db.models.FloatField",
"django.db.models.SmallIntegerField"
] | [((121, 147), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (145, 147), False, 'from django.db import models\n'), ((160, 186), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (184, 186), False, 'from django.db import models\n'), ((198, 224), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (222, 224), False, 'from django.db import models\n'), ((240, 280), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (257, 280), False, 'from django.db import models\n'), ((310, 350), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (327, 350), False, 'from django.db import models\n'), ((381, 421), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (398, 421), False, 'from django.db import models\n'), ((444, 484), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (461, 484), False, 'from django.db import models\n'), ((517, 557), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (534, 557), False, 'from django.db import models\n'), ((587, 627), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (604, 627), False, 'from django.db import models\n'), ((651, 691), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (668, 691), False, 'from django.db import models\n'), ((720, 760), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (737, 760), False, 'from django.db import models\n'), ((788, 828), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (805, 828), False, 'from django.db import models\n'), ((851, 891), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (868, 891), False, 'from django.db import models\n'), ((921, 961), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (938, 961), False, 'from django.db import models\n'), ((987, 1027), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1004, 1027), False, 'from django.db import models\n'), ((1056, 1096), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1073, 1096), False, 'from django.db import models\n'), ((1121, 1161), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1138, 1161), False, 'from django.db import models\n'), ((1188, 1228), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1205, 1228), False, 'from django.db import models\n'), ((1249, 1289), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1266, 1289), False, 'from django.db import models\n'), ((1314, 1354), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1331, 1354), False, 'from django.db import models\n'), ((1382, 1422), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1399, 1422), False, 'from django.db import models\n'), ((1449, 1489), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1466, 1489), False, 'from django.db import models\n'), ((1521, 1561), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1538, 1561), False, 'from django.db import models\n'), ((1591, 1631), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1608, 1631), False, 'from django.db import models\n'), ((1661, 1701), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1678, 1701), False, 'from django.db import models\n'), ((1732, 1772), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1749, 1772), False, 'from django.db import models\n'), ((1799, 1839), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1816, 1839), False, 'from django.db import models\n'), ((1868, 1908), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1885, 1908), False, 'from django.db import models\n'), ((1930, 1970), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1947, 1970), False, 'from django.db import models\n'), ((1993, 2033), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2010, 2033), False, 'from django.db import models\n'), ((2058, 2098), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2075, 2098), False, 'from django.db import models\n'), ((2123, 2163), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2140, 2163), False, 'from django.db import models\n'), ((2191, 2231), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2208, 2231), False, 'from django.db import models\n'), ((2256, 2296), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2273, 2296), False, 'from django.db import models\n'), ((2500, 2547), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2524, 2547), False, 'from django.db import models\n'), ((2563, 2610), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2587, 2610), False, 'from django.db import models\n'), ((2627, 2674), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2651, 2674), False, 'from django.db import models\n'), ((2690, 2737), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (2714, 2737), False, 'from django.db import models\n'), ((2748, 2774), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (2772, 2774), False, 'from django.db import models\n'), ((2787, 2813), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (2811, 2813), False, 'from django.db import models\n'), ((2825, 2851), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (2849, 2851), False, 'from django.db import models\n'), ((3042, 3068), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (3066, 3068), False, 'from django.db import models\n'), ((3081, 3107), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (3105, 3107), False, 'from django.db import models\n'), ((3119, 3145), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (3143, 3145), False, 'from django.db import models\n'), ((3157, 3231), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'null': '(True)', 'blank': '(True)', 'max_digits': '(7)', 'decimal_places': '(4)'}), '(null=True, blank=True, max_digits=7, decimal_places=4)\n', (3176, 3231), False, 'from django.db import models\n'), ((3253, 3327), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'null': '(True)', 'blank': '(True)', 'max_digits': '(7)', 'decimal_places': '(4)'}), '(null=True, blank=True, max_digits=7, decimal_places=4)\n', (3272, 3327), False, 'from django.db import models\n'), ((3347, 3421), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'null': '(True)', 'blank': '(True)', 'max_digits': '(7)', 'decimal_places': '(4)'}), '(null=True, blank=True, max_digits=7, decimal_places=4)\n', (3366, 3421), False, 'from django.db import models\n'), ((3442, 3516), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'null': '(True)', 'blank': '(True)', 'max_digits': '(7)', 'decimal_places': '(4)'}), '(null=True, blank=True, max_digits=7, decimal_places=4)\n', (3461, 3516), False, 'from django.db import models\n'), ((3536, 3610), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'null': '(True)', 'blank': '(True)', 'max_digits': '(7)', 'decimal_places': '(4)'}), '(null=True, blank=True, max_digits=7, decimal_places=4)\n', (3555, 3610), False, 'from django.db import models\n'), ((3633, 3708), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'null': '(True)', 'blank': '(True)', 'max_digits': '(13)', 'decimal_places': '(4)'}), '(null=True, blank=True, max_digits=13, decimal_places=4)\n', (3652, 3708), False, 'from django.db import models\n')] |
'''
@Description: Easter Egg
@Author: <NAME>
@Date: 2019-08-10 10:30:29
@LastEditors: <NAME>
@LastEditTime: 2019-08-10 10:36:24
'''
from flask import Blueprint
egg = Blueprint('egg', __name__)
from . import views | [
"flask.Blueprint"
] | [((167, 193), 'flask.Blueprint', 'Blueprint', (['"""egg"""', '__name__'], {}), "('egg', __name__)\n", (176, 193), False, 'from flask import Blueprint\n')] |
import pyaf.Bench.TS_datasets as tsds
import pyaf.Bench.YahooStocks as ys
import warnings
symbol_lists = tsds.get_yahoo_symbol_lists();
y_keys = sorted(symbol_lists.keys())
print(y_keys)
k = "nysecomp"
tester = ys.cYahoo_Tester(tsds.load_yahoo_stock_prices(k) , "YAHOO_STOCKS_" + k);
with warnings.catch_warnings():
warnings.simplefilter("error")
tester.testSignals('VRS')
| [
"warnings.simplefilter",
"pyaf.Bench.TS_datasets.get_yahoo_symbol_lists",
"warnings.catch_warnings",
"pyaf.Bench.TS_datasets.load_yahoo_stock_prices"
] | [((106, 135), 'pyaf.Bench.TS_datasets.get_yahoo_symbol_lists', 'tsds.get_yahoo_symbol_lists', ([], {}), '()\n', (133, 135), True, 'import pyaf.Bench.TS_datasets as tsds\n'), ((231, 262), 'pyaf.Bench.TS_datasets.load_yahoo_stock_prices', 'tsds.load_yahoo_stock_prices', (['k'], {}), '(k)\n', (259, 262), True, 'import pyaf.Bench.TS_datasets as tsds\n'), ((292, 317), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (315, 317), False, 'import warnings\n'), ((323, 353), 'warnings.simplefilter', 'warnings.simplefilter', (['"""error"""'], {}), "('error')\n", (344, 353), False, 'import warnings\n')] |
import pycropml.transpiler.antlr_py.grammars
from pycropml.transpiler.antlr_py.grammars.CSharpLexer import CSharpLexer
from pycropml.transpiler.antlr_py.grammars.CSharpParser import CSharpParser
from pycropml.transpiler.antlr_py.grammars.Fortran90Lexer import Fortran90Lexer
from pycropml.transpiler.antlr_py.grammars.Fortran90Parser import Fortran90Parser
from pycropml.transpiler.antlr_py.csharp import csharp_generate_tree
from pycropml.transpiler.antlr_py.fortran import fortran_generate_tree
from antlr4 import *
import warnings
import inspect
from typing import Dict, Optional, List, Union, Type, Any, Callable
from functools import reduce
from collections import OrderedDict, namedtuple
from ast import AST, NodeTransformer
from antlr4.Token import CommonToken
from antlr4 import CommonTokenStream, ParseTreeVisitor, ParserRuleContext, RuleContext
from antlr4.tree.Tree import ErrorNode, TerminalNodeImpl, ParseTree
from antlr4.error.ErrorListener import ErrorListener, ConsoleErrorListener
from operator import methodcaller
from antlr4 import InputStream
languages = ['cs',"bioma", 'f90', 'dssat']
gen = {'cs':"csharp","bioma":"csharp", 'f90':"fortran", 'dssat':"fortran"}
NAMES = {'cs':'CSharp','sirius':'CSharp',"bioma":"CSharp", 'f90':'Fortran90', 'dssat':'Fortran90'}
def langLexerParser(ant):
generator = {
format: getattr(
getattr(
pycropml.transpiler.antlr_py.grammars,
'%s%s' % (NAMES[format], ant)),
'%s%s' % (NAMES[format], ant))
for format in languages
}
return generator
LexersGenerators = langLexerParser("Lexer")
ParsersGenerators = langLexerParser("Parser")
genTree= {
format: getattr(
getattr(
pycropml.transpiler.antlr_py,
'%s' % (gen[format])),
'%s_generate_tree' % (gen[format]))
for format in languages
}
def parsef(code, language,
start="compilation_unit",
strict = "False",
transform: Union[str, Callable] = None,
error_listener: ErrorListener = None,
):
input_stream = InputStream(code) #encoding="utf-8"
lexer = LexersGenerators[language](input_stream)
lexer.removeErrorListeners()
lexer.addErrorListener(LexerErrorListener())
stream = CommonTokenStream(lexer)
parser = ParsersGenerators[language](stream)
#tree = parser.compilation_unit()
tree = genTree[language].generate(parser)
parser.buildParseTrees = True # default
return tree
"""
from antlr-ast
It allows you to use ANTLR grammars and use the parser output to generate an abstract syntax tree (AST).
https://github.com/datacamp/antlr-ast/blob/master/README.md
"""
class CaseTransformInputStream(InputStream):
"""Support case insensitive languages
https://github.com/antlr/antlr4/blob/master/doc/case-insensitive-lexing.md#custom-character-streams-approach
"""
UPPER = "upper"
LOWER = "lower"
def __init__(self, *args, transform=None, **kwargs):
if transform is None:
self.transform = lambda x: x
elif transform == self.UPPER:
self.transform = methodcaller("upper")
elif transform == self.LOWER:
self.transform = methodcaller("lower")
elif callable(transform):
self.transform = transform
else:
raise ValueError("Invalid transform")
super().__init__(*args, **kwargs)
def _loadString(self):
self._index = 0
self.data = [ord(self.transform(c)) for c in self.strdata]
self._size = len(self.data)
def __repr__(self):
return "<{} {}>".format(self.__class__.__name__, self.transform)
def dump_node(node, node_class=AST):
if isinstance(node, node_class):
fields = OrderedDict()
for name in node._fields:
attr = getattr(node, name, None)
if attr is not None:
fields[name] = dump_node(attr, node_class=node_class)
return {"type": node.__class__.__name__, "data": fields}
elif isinstance(node, list):
return [dump_node(x, node_class=node_class) for x in node]
else:
return node
FieldSpec = namedtuple("FieldSpec", ["name", "origin"])
def parse_field_spec(spec: str) -> FieldSpec:
# parse mapping for = and . # old: and indices [] -----
name, *origin = [part.strip() for part in spec.split("=")]
origin = name if not origin else origin[0]
origin = origin.split(".")
return FieldSpec(name, origin)
class AstNodeMeta(type):
@property
def _fields(cls):
od = OrderedDict([(parse_field_spec(el).name, None) for el in cls._fields_spec])
return tuple(od)
# Speaker class ---------------------------------------------------------------
class Speaker:
def __init__(self, **cfg):
"""Initialize speaker instance, for a set of AST nodes.
Arguments:
nodes: dictionary of node names, and their human friendly names.
Each entry for a node may also be a dictionary containing
name: human friendly name, fields: a dictionary to override
the field names for that node.
fields: dictionary of human friendly field names, used as a default
for each node.
"""
self.node_names = cfg["nodes"]
self.field_names = cfg.get("fields", {})
def describe(self, node, fmt="{node_name}", field=None, **kwargs):
cls_name = node.__class__.__name__
def_field_name = (
self.field_names.get(field) or field.replace("_", " ") if field else ""
)
node_cfg = self.node_names.get(cls_name, cls_name)
node_name, field_names = self.get_info(node_cfg)
d = {
"node": node,
"field_name": field_names.get(field, def_field_name),
"node_name": node_name.format(node=node),
}
return fmt.format(**d, **kwargs)
@staticmethod
def get_info(node_cfg):
"""Return a tuple with the verbal name of a node, and a dict of field names."""
node_cfg = node_cfg if isinstance(node_cfg, dict) else {"name": node_cfg}
return node_cfg.get("name"), node_cfg.get("fields", {})
# Error Listener ------------------------------------------------------------------
# from antlr4.error.Errors import RecognitionException
class AntlrException(Exception):
def __init__(self, msg, orig):
self.msg, self.orig = msg, orig
class StrictErrorListener(ErrorListener):
# The recognizer will be the parser instance
def syntaxError(self, recognizer, badSymbol, line, col, msg, e):
msg = "line {line}:{col} {msg}".format(
badSymbol=badSymbol, line=line, col=col, msg=msg
)
raise AntlrException(msg, e)
def reportAmbiguity(
self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs
):
return
# raise Exception("TODO")
def reportAttemptingFullContext(
self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs
):
return
# raise Exception("TODO")
def reportContextSensitivity(
self, recognizer, dfa, startIndex, stopIndex, prediction, configs
):
return
# raise Exception("TODO")
class LexerErrorListener(ConsoleErrorListener):
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
if isinstance(e.input, CaseTransformInputStream):
msg = msg + " " + repr(e.input)
super().syntaxError(recognizer, offendingSymbol, line, column, msg, e)
# Parse Tree Visitor ----------------------------------------------------------
# TODO: visitor inheritance not really needed, but indicates compatibility
# TODO: make general node (Terminal) accessible in class property (.subclasses)?
IndexReferences = Dict[str, Union[int, List[int]]]
class BaseNode(AST):
"""AST is subclassed so we can use Python ast module visiting and walking on the custom AST"""
def __init__(
self,
children: list,
field_references: IndexReferences,
label_references: IndexReferences,
ctx: Optional[ParserRuleContext] = None,
position: Optional[dict] = None,
text: Optional[str] = None,
):
self.children = children
self._field_references = field_references
self.children_by_field = materialize(self._field_references, self.children)
self._label_references = label_references
self.children_by_label = materialize(self._label_references, self.children)
self._ctx = ctx
self.position = position
self.text = text
_fields = ()
# whether to descend for selection (greater descends into lower)
_priority = 2
# getattr: return None or raise for nonexistent attr
# in Transformer conditionals:
# - getattr(obj, attr, None) works with both
# - hasattr(obj, attr) if strict
# - obj.attr if not strict
_strict = False
@classmethod
def create(
cls,
ctx: ParserRuleContext,
children: Optional[list] = None,
registry: Optional["BaseNodeRegistry"] = None,
) -> "BaseNode":
if registry is None:
registry = BaseNodeRegistry()
if children is None:
children = ctx.children
field_names = get_field_names(ctx)
children_by_field = get_field_references(ctx, field_names)
label_names = get_label_names(ctx)
children_by_label = get_field_references(ctx, label_names)
cls_name = type(ctx).__name__.split("Context")[0]
subclass = registry.get_cls(cls_name, tuple(field_names))
return subclass(children, children_by_field, children_by_label, ctx)
@classmethod
def create_cls(cls, cls_name: str, field_names: tuple) -> Type["BaseNode"]:
return type(cls_name, (cls,), {"_fields": field_names})
def __getattr__(self, name):
try:
result = self.children_by_label.get(name) or self.children_by_field[name]
except KeyError:
if self._strict:
raise AttributeError(
"{}.{} is invalid.".format(self.__class__.__name__, name)
)
else:
result = None
return result
@classmethod
def combine(cls, *fields: "BaseNode") -> List["BaseNode"]:
"""Combine fields
Creates a list field from other fields
Filters None and combines other elements in a flat list
Use in transformer methods.
"""
result = reduce(cls.extend_node_list, fields, [])
return result
@staticmethod
def extend_node_list(
acc: List["BaseNode"], new: Union[List["BaseNode"], "BaseNode"]
) -> List["BaseNode"]:
"""Extend accumulator with Node(s) from new"""
if new is None:
new = []
elif not isinstance(new, list):
new = [new]
return acc + new
def get_text(self, full_text: str = None) -> Optional[str]:
# TODO implement as __str__?
# + easy to combine with str/Terminal
# + use Python instead of custom interface
# (-) very different from repr / json
text = None
if isinstance(self._ctx, (TerminalNodeImpl, RuleContext)):
if full_text is None:
text = self._ctx.getText()
elif getattr(self._ctx, "start", None) and getattr(self._ctx, "stop", None):
text = full_text[self._ctx.start.start : self._ctx.stop.stop + 1]
elif (
getattr(self._ctx, "symbol", None)
and getattr(self._ctx.symbol, "start", None)
and getattr(self._ctx.symbol, "stop", None)
):
text = full_text[self._ctx.symbol.start : self._ctx.symbol.stop + 1]
if text is None and self.text:
text = self.text
return text
def get_position(self) -> Optional[Dict[str, int]]:
position = None
ctx = self._ctx
if ctx is not None:
if isinstance(ctx, TerminalNodeImpl):
position = {
"line_start": ctx.symbol.line,
"column_start": ctx.symbol.column,
"line_end": ctx.symbol.line,
"column_end": ctx.symbol.column
+ (ctx.symbol.stop - ctx.symbol.start),
}
elif getattr(ctx, "start", None) and getattr(ctx, "stop", None):
position = {
"line_start": ctx.start.line,
"column_start": ctx.start.column,
"line_end": ctx.stop.line,
"column_end": ctx.stop.column + (ctx.stop.stop - ctx.stop.start),
}
return position or self.position
def __repr__(self):
return str({**self.children_by_field, **self.children_by_label})
# TODO:
AstNode = BaseNode
class Terminal(BaseNode):
"""This is a thin node wrapper for a string.
The node is transparent when not in debug mode.
In debug mode, it keeps the link to the corresponding ANTLR node.
"""
_fields = tuple(["value"])
DEBUG = True
DEBUG_INSTANCES = []
def __new__(cls, *args, **kwargs):
instance = super().__new__(cls, *args, **kwargs)
if cls.DEBUG:
cls.DEBUG_INSTANCES.append(instance)
return instance
else:
return args[0][0]
@classmethod
def from_text(cls, text: str, ctx: Optional[ParserRuleContext] = None):
return cls([text], {"value": 0}, {}, ctx)
def __eq__(self, other):
return self.value == other
def __str__(self):
# currently just used for better formatting in debugger
return self.value
def __repr__(self):
return "'{}'".format(self.value)
class AliasNode(BaseNode, metaclass=AstNodeMeta):
# TODO: look at AstNode methods
# defines class properties
# - as a property name to copy from ANTLR nodes
# - as a property name defined in terms of (nested) ANTLR node properties
# the field will be set to the first definition that is not undefined
_fields_spec = []
_fields = AstNodeMeta._fields
# Defines which ANTLR nodes to convert to this node. Elements can be:
# - a string: uses AstNode._from_fields as visitor
# - a tuple ('node_name', 'ast_node_class_method_name'): uses ast_node_class_method_name as visitor
# subclasses use _bind_to_visitor to create visit methods for the nodes in _rules on the ParseTreeVisitor
# using this information
_rules = []
_priority = 1
_strict = True
def __init__(self, node: BaseNode, fields: Optional[Dict[str, Any]] = None):
# TODO: keep reference to node?
# TODO: **fields? (easier notation, but hard to name future arguments
super().__init__(
node.children, node._field_references, node._label_references, node._ctx
)
fields = fields or {}
for field, value in fields.items():
if field not in self._fields:
warnings.warn("Key not in fields: {}".format(field))
setattr(self, field, value)
@classmethod
def from_spec(cls, node: BaseNode):
# TODO: no fields_spec argument as before
field_dict = {}
for field_spec in cls._fields_spec:
name, path = parse_field_spec(field_spec)
# _fields_spec can contain field multiple times
# e.g. x=a and x=b
if field_dict.get(name):
# or / elif behaviour
continue
# get node -----
field_dict[name] = cls.get_path(node, path)
return cls(node, field_dict)
@classmethod
def get_path(cls, node: BaseNode, path: List[str]):
# TODO: can be defined on FieldNode too
result = node
for i in range(len(path)):
result = getattr(result, path[i], None)
if result is None:
break
return result
@classmethod
def bind_to_transformer(
cls,
transformer_cls: Type["BaseNodeTransformer"],
default_transform_method: str = "from_spec",
):
for rule in cls._rules:
if isinstance(rule, str):
cls_method = default_transform_method
else:
rule, cls_method = rule[:2]
transformer_method = cls.get_transformer(cls_method)
bind_to_transformer(transformer_cls, rule, transformer_method)
@classmethod
def get_transformer(cls, method_name: str):
"""Get method to bind to visitor"""
transform_function = getattr(cls, method_name)
assert callable(transform_function)
def transformer_method(self, node):
kwargs = {}
if inspect.signature(transform_function).parameters.get("helper"):
kwargs["helper"] = self.helper
return transform_function(node, **kwargs)
return transformer_method
class BaseNodeRegistry:
def __init__(self):
self.dynamic_node_classes = {}
def get_cls(self, cls_name: str, field_names: tuple) -> Type[BaseNode]:
""""""
if cls_name not in self.dynamic_node_classes:
self.dynamic_node_classes[cls_name] = BaseNode.create_cls(
cls_name, field_names
)
else:
existing_cls = self.dynamic_node_classes[cls_name]
all_fields = tuple(set(existing_cls._fields) | set(field_names))
if len(all_fields) > len(existing_cls._fields):
existing_cls._fields = all_fields
return self.dynamic_node_classes[cls_name]
def isinstance(self, instance: BaseNode, class_name: str) -> bool:
"""Check if a BaseNode is an instance of a registered dynamic class"""
if isinstance(instance, BaseNode):
klass = self.dynamic_node_classes.get(class_name, None)
if klass:
return isinstance(instance, klass)
# Not an instance of a class in the registry
return False
else:
raise TypeError("This function can only be used for BaseNode objects")
# TODO: test: if 'visit' in method, it has to be as 'visit_'
class BaseNodeTransformer(NodeTransformer):
def __init__(self, registry: BaseNodeRegistry):
self.helper = TransformerHelper(registry)
def visit(self, node: BaseNode):
# TODO: I think transform_ + node.__class__.__name__ would be better/clearer then
# as the node methods don't need to do any visiting (which is completely done by visit and generic_visit)
method = "visit_" + type(node).__name__
transformer = getattr(self, method, None)
if transformer is None:
return self.generic_visit(node)
else:
alias = transformer(node)
if isinstance(alias, AliasNode) or alias == node:
# this prevents infinite recursion and visiting
# AliasNodes with a name that is also the name of a BaseNode
if isinstance(alias, BaseNode):
self.generic_visit(alias)
else:
# visit BaseNode (e.g. result of Transformer method)
if isinstance(alias, list):
# Transformer method can return array instead of node
alias = [
self.visit(el) if isinstance(el, BaseNode) else el
for el in alias
] # TODO: test
elif isinstance(alias, BaseNode):
alias = self.visit(alias)
return alias
def visit_Terminal(self, terminal: Terminal) -> Terminal:
""" Handle Terminal the same as other non-node types"""
return terminal
@classmethod
def bind_alias_nodes(cls, alias_classes: List[Type[AliasNode]]):
for item in alias_classes:
if getattr(item, "_rules", None) is not None:
item.bind_to_transformer(cls)
def bind_to_transformer(
transformer_cls: Type[BaseNodeTransformer],
rule_name: str,
transformer_method: Callable,
):
"""Assign AST node class constructors to parse tree visitors."""
setattr(transformer_cls, get_transformer_method_name(rule_name), transformer_method)
def get_transformer_method_name(rule_name: str) -> str:
return "visit_{}".format(rule_name[0].upper() + rule_name[1:])
class TransformerHelper:
def __init__(self, registry: BaseNodeRegistry):
self.registry = registry
def isinstance(self, *args):
return self.registry.isinstance(*args)
def get_alias_nodes(items) -> List[Type[AstNode]]:
return list(
filter(
lambda item: inspect.isclass(item) and issubclass(item, AliasNode), items
)
)
def simplify_tree(tree, unpack_lists=True, in_list=False):
"""Recursively unpack single-item lists and objects where fields and labels only reference a single child
:param tree: the tree to simplify (mutating!)
:param unpack_lists: whether single-item lists should be replaced by that item
:param in_list: this is used to prevent unpacking a node in a list as AST visit can't handle nested lists
"""
# TODO: copy (or (de)serialize)? outside this function?
if isinstance(tree, BaseNode) and not isinstance(tree, Terminal):
used_fields = [field for field in tree._fields if getattr(tree, field, False)]
if len(used_fields) == 1:
result = getattr(tree, used_fields[0])
else:
result = None
if (
len(used_fields) != 1
or isinstance(tree, AliasNode)
or (in_list and isinstance(result, list))
):
result = tree
for field in tree._fields:
old_value = getattr(tree, field, None)
if old_value:
setattr(
result,
field,
simplify_tree(old_value, unpack_lists=unpack_lists),
)
return result
assert result is not None
elif isinstance(tree, list) and len(tree) == 1 and unpack_lists:
result = tree[0]
else:
if isinstance(tree, list):
result = [
simplify_tree(el, unpack_lists=unpack_lists, in_list=True)
for el in tree
]
else:
result = tree
return result
return simplify_tree(result, unpack_lists=unpack_lists)
class BaseAstVisitor(ParseTreeVisitor):
"""Visitor that creates a high level tree
~ ANTLR tree serializer
+ automatic node creation using field and label detection
+ alias nodes can work on tree without (ANTLR) visitor
Used from BaseAstVisitor: visitTerminal, visitErrorNode
TODO:
- [done] support labels
- [done] make compatible with AST: _fields = () (should only every child once)
- [done] include child_index to filter unique elements + order
- [done] memoize dynamic classes, to have list + make instance checks work
- [done] tree simplification as part of AliasNode
- [done] flatten nested list (see select with dynamic clause ordering)
- combine terminals / error nodes
- serialize highlight info
- [done] make compatible with AstNode & AstModule in protowhat (+ shellwhat usage: bashlex + osh parser)
- combining fields & labels dicts needed?
- use exact ANTLR names in _rules (capitalize name without changing other casing)
- add labels to _fields if not overlapping with fields from rules
- [done] eliminate overhead of alias parsing (store ref to child index, get children on alias access)
- [necessary?] grammar must use lexer or grammar rules for elements that should be in the tree
and literals for elements that cannot
currently:
- Use AliasNode to add labels to _fields, define custom fields and omit fields
- Use Transformer to replace a node by a combination of fields
- [rejected] alternative dynamic class naming:
- pass parse start to visitor constructor, use as init for self.current_node
- set self.current_node to field.__name__ before self.visit_field
- use self.current_node to create dynamic classes
(does not use #RuleAlias names in grammar)
(other approach: transforming returned dict, needs more work for arrays + top level)
Higher order visitor (or integrated)
- [alternative] allow node aliases (~ AstNode._rules) by dynamically creating a class inheriting from the dynamic node class
(multiple inheritance if node is alias for multiple nodes, class has combined _fields for AST compatibility
- [alternative] allow field aliases using .aliases property with defaultdict(list) (~ AstNode._fields_spec)
- dynamic fields? (~ visit_path)
test code in parse:
tree = parse_ast(grammar, sql_text, start, **kwargs)
field_tree = BaseAstVisitor().visit(tree)
alias_tree = AliasVisitor(Transformer()).visit(field_tree)
import ast
nodes = [el for el in ast.walk(field_tree)]
import json
json_str = json.dumps(field_tree, default=lambda o: o.to_json())
"""
def __init__(self, registry: BaseNodeRegistry):
self.registry = registry
def visitChildren(
self, node: ParserRuleContext, predicate=None, simplify=False
) -> BaseNode:
# children is None if all parts of a grammar rule are optional and absent
children = [self.visit(child) for child in node.children or []]
instance = BaseNode.create(node, children, self.registry)
return instance
def visitTerminal(self, ctx: ParserRuleContext) -> Terminal:
"""Converts case insensitive keywords and identifiers to lowercase"""
text = ctx.getText()
return Terminal.from_text(text, ctx)
def visitErrorNode(self, node: ErrorNode):
return None
# ANTLR helpers
def get_field(ctx: ParserRuleContext, field: str):
"""Helper to get the value of a field"""
# field can be a string or a node attribute
if isinstance(field, str):
field = getattr(ctx, field, None)
# when not alias needs to be called
if callable(field):
field = field()
# when alias set on token, need to go from CommonToken -> Terminal Node
elif isinstance(field, CommonToken):
# giving a name to lexer rules sets it to a token,
# rather than the terminal node corresponding to that token
# so we need to find it in children
field = next(
filter(lambda c: getattr(c, "symbol", None) is field, ctx.children)
)
return field
def get_field_references(
ctx: ParserRuleContext, field_names: List[str], simplify=False
) -> Dict[str, Any]:
"""
Create a mapping from fields to corresponding child indices
:param ctx: ANTLR node
:param field_names: list of strings
:param simplify: if True, omits fields with empty lists or None
this makes it easy to detect nodes that only use a single field
but it requires more work to combine fields that can be empty
:return: mapping str -> int | int[]
"""
field_dict = {}
for field_name in field_names:
field = get_field(ctx, field_name)
if (
not simplify
or field is not None
and (not isinstance(field, list) or len(field) > 0)
):
if isinstance(field, list):
value = [ctx.children.index(el) for el in field]
elif field is not None:
value = ctx.children.index(field)
else:
value = None
field_dict[field_name] = value
return field_dict
def materialize(reference_dict: IndexReferences, source: List[Any]) -> Dict[str, Any]:
"""
Replace indices by actual elements in a reference mapping
:param reference_dict: mapping str -> int | int[]
:param source: list of elements
:return: mapping str -> element | element[]
"""
materialized_dict = {}
for field in reference_dict:
reference = reference_dict[field]
if isinstance(reference, list):
materialized_dict[field] = [source[index] for index in reference]
elif reference is not None:
materialized_dict[field] = source[reference]
else:
materialized_dict[field] = None
return materialized_dict
def get_field_names(ctx: ParserRuleContext):
"""Get fields defined in an ANTLR context for a parser rule"""
# this does not include labels and literals, only rule names and token names
# TODO: check ANTLR parser template for full exclusion list
fields = [
field
for field in type(ctx).__dict__
if not field.startswith("__")
and field not in ["accept", "enterRule", "exitRule", "getRuleIndex", "copyFrom",'OPEN_BRACE',"CLOSE_BRACE", "COMMA"]
]
return fields
def get_label_names(ctx: ParserRuleContext):
"""Get labels defined in an ANTLR context for a parser rule"""
labels = [
label
for label in ctx.__dict__
if not label.startswith("_")
and label
not in [
"children",
"exception",
"invokingState",
"parentCtx",
"parser",
"start",
"stop",
]
]
return labels
| [
"collections.OrderedDict",
"collections.namedtuple",
"functools.reduce",
"operator.methodcaller",
"inspect.signature",
"antlr4.CommonTokenStream",
"antlr4.InputStream",
"inspect.isclass"
] | [((4280, 4323), 'collections.namedtuple', 'namedtuple', (['"""FieldSpec"""', "['name', 'origin']"], {}), "('FieldSpec', ['name', 'origin'])\n", (4290, 4323), False, 'from collections import OrderedDict, namedtuple\n'), ((2199, 2216), 'antlr4.InputStream', 'InputStream', (['code'], {}), '(code)\n', (2210, 2216), False, 'from antlr4 import InputStream\n'), ((2384, 2408), 'antlr4.CommonTokenStream', 'CommonTokenStream', (['lexer'], {}), '(lexer)\n', (2401, 2408), False, 'from antlr4 import CommonTokenStream, ParseTreeVisitor, ParserRuleContext, RuleContext\n'), ((3875, 3888), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3886, 3888), False, 'from collections import OrderedDict, namedtuple\n'), ((10735, 10775), 'functools.reduce', 'reduce', (['cls.extend_node_list', 'fields', '[]'], {}), '(cls.extend_node_list, fields, [])\n', (10741, 10775), False, 'from functools import reduce\n'), ((3238, 3259), 'operator.methodcaller', 'methodcaller', (['"""upper"""'], {}), "('upper')\n", (3250, 3259), False, 'from operator import methodcaller\n'), ((3327, 3348), 'operator.methodcaller', 'methodcaller', (['"""lower"""'], {}), "('lower')\n", (3339, 3348), False, 'from operator import methodcaller\n'), ((21000, 21021), 'inspect.isclass', 'inspect.isclass', (['item'], {}), '(item)\n', (21015, 21021), False, 'import inspect\n'), ((17031, 17068), 'inspect.signature', 'inspect.signature', (['transform_function'], {}), '(transform_function)\n', (17048, 17068), False, 'import inspect\n')] |
# coding: utf-8
"""
Gitea API.
This documentation describes the Gitea API. # noqa: E501
OpenAPI spec version: 1.16.7
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TimelineComment(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'assignee': 'User',
'assignee_team': 'Team',
'body': 'str',
'created_at': 'datetime',
'dependent_issue': 'Issue',
'html_url': 'str',
'id': 'int',
'issue_url': 'str',
'label': 'Label',
'milestone': 'Milestone',
'new_ref': 'str',
'new_title': 'str',
'old_milestone': 'Milestone',
'old_project_id': 'int',
'old_ref': 'str',
'old_title': 'str',
'project_id': 'int',
'pull_request_url': 'str',
'ref_action': 'str',
'ref_comment': 'Comment',
'ref_commit_sha': 'str',
'ref_issue': 'Issue',
'removed_assignee': 'bool',
'resolve_doer': 'User',
'review_id': 'int',
'tracked_time': 'TrackedTime',
'type': 'str',
'updated_at': 'datetime',
'user': 'User'
}
attribute_map = {
'assignee': 'assignee',
'assignee_team': 'assignee_team',
'body': 'body',
'created_at': 'created_at',
'dependent_issue': 'dependent_issue',
'html_url': 'html_url',
'id': 'id',
'issue_url': 'issue_url',
'label': 'label',
'milestone': 'milestone',
'new_ref': 'new_ref',
'new_title': 'new_title',
'old_milestone': 'old_milestone',
'old_project_id': 'old_project_id',
'old_ref': 'old_ref',
'old_title': 'old_title',
'project_id': 'project_id',
'pull_request_url': 'pull_request_url',
'ref_action': 'ref_action',
'ref_comment': 'ref_comment',
'ref_commit_sha': 'ref_commit_sha',
'ref_issue': 'ref_issue',
'removed_assignee': 'removed_assignee',
'resolve_doer': 'resolve_doer',
'review_id': 'review_id',
'tracked_time': 'tracked_time',
'type': 'type',
'updated_at': 'updated_at',
'user': 'user'
}
def __init__(self, assignee=None, assignee_team=None, body=None, created_at=None, dependent_issue=None, html_url=None, id=None, issue_url=None, label=None, milestone=None, new_ref=None, new_title=None, old_milestone=None, old_project_id=None, old_ref=None, old_title=None, project_id=None, pull_request_url=None, ref_action=None, ref_comment=None, ref_commit_sha=None, ref_issue=None, removed_assignee=None, resolve_doer=None, review_id=None, tracked_time=None, type=None, updated_at=None, user=None): # noqa: E501
"""TimelineComment - a model defined in Swagger""" # noqa: E501
self._assignee = None
self._assignee_team = None
self._body = None
self._created_at = None
self._dependent_issue = None
self._html_url = None
self._id = None
self._issue_url = None
self._label = None
self._milestone = None
self._new_ref = None
self._new_title = None
self._old_milestone = None
self._old_project_id = None
self._old_ref = None
self._old_title = None
self._project_id = None
self._pull_request_url = None
self._ref_action = None
self._ref_comment = None
self._ref_commit_sha = None
self._ref_issue = None
self._removed_assignee = None
self._resolve_doer = None
self._review_id = None
self._tracked_time = None
self._type = None
self._updated_at = None
self._user = None
self.discriminator = None
if assignee is not None:
self.assignee = assignee
if assignee_team is not None:
self.assignee_team = assignee_team
if body is not None:
self.body = body
if created_at is not None:
self.created_at = created_at
if dependent_issue is not None:
self.dependent_issue = dependent_issue
if html_url is not None:
self.html_url = html_url
if id is not None:
self.id = id
if issue_url is not None:
self.issue_url = issue_url
if label is not None:
self.label = label
if milestone is not None:
self.milestone = milestone
if new_ref is not None:
self.new_ref = new_ref
if new_title is not None:
self.new_title = new_title
if old_milestone is not None:
self.old_milestone = old_milestone
if old_project_id is not None:
self.old_project_id = old_project_id
if old_ref is not None:
self.old_ref = old_ref
if old_title is not None:
self.old_title = old_title
if project_id is not None:
self.project_id = project_id
if pull_request_url is not None:
self.pull_request_url = pull_request_url
if ref_action is not None:
self.ref_action = ref_action
if ref_comment is not None:
self.ref_comment = ref_comment
if ref_commit_sha is not None:
self.ref_commit_sha = ref_commit_sha
if ref_issue is not None:
self.ref_issue = ref_issue
if removed_assignee is not None:
self.removed_assignee = removed_assignee
if resolve_doer is not None:
self.resolve_doer = resolve_doer
if review_id is not None:
self.review_id = review_id
if tracked_time is not None:
self.tracked_time = tracked_time
if type is not None:
self.type = type
if updated_at is not None:
self.updated_at = updated_at
if user is not None:
self.user = user
@property
def assignee(self):
"""Gets the assignee of this TimelineComment. # noqa: E501
:return: The assignee of this TimelineComment. # noqa: E501
:rtype: User
"""
return self._assignee
@assignee.setter
def assignee(self, assignee):
"""Sets the assignee of this TimelineComment.
:param assignee: The assignee of this TimelineComment. # noqa: E501
:type: User
"""
self._assignee = assignee
@property
def assignee_team(self):
"""Gets the assignee_team of this TimelineComment. # noqa: E501
:return: The assignee_team of this TimelineComment. # noqa: E501
:rtype: Team
"""
return self._assignee_team
@assignee_team.setter
def assignee_team(self, assignee_team):
"""Sets the assignee_team of this TimelineComment.
:param assignee_team: The assignee_team of this TimelineComment. # noqa: E501
:type: Team
"""
self._assignee_team = assignee_team
@property
def body(self):
"""Gets the body of this TimelineComment. # noqa: E501
:return: The body of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this TimelineComment.
:param body: The body of this TimelineComment. # noqa: E501
:type: str
"""
self._body = body
@property
def created_at(self):
"""Gets the created_at of this TimelineComment. # noqa: E501
:return: The created_at of this TimelineComment. # noqa: E501
:rtype: datetime
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
"""Sets the created_at of this TimelineComment.
:param created_at: The created_at of this TimelineComment. # noqa: E501
:type: datetime
"""
self._created_at = created_at
@property
def dependent_issue(self):
"""Gets the dependent_issue of this TimelineComment. # noqa: E501
:return: The dependent_issue of this TimelineComment. # noqa: E501
:rtype: Issue
"""
return self._dependent_issue
@dependent_issue.setter
def dependent_issue(self, dependent_issue):
"""Sets the dependent_issue of this TimelineComment.
:param dependent_issue: The dependent_issue of this TimelineComment. # noqa: E501
:type: Issue
"""
self._dependent_issue = dependent_issue
@property
def html_url(self):
"""Gets the html_url of this TimelineComment. # noqa: E501
:return: The html_url of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._html_url
@html_url.setter
def html_url(self, html_url):
"""Sets the html_url of this TimelineComment.
:param html_url: The html_url of this TimelineComment. # noqa: E501
:type: str
"""
self._html_url = html_url
@property
def id(self):
"""Gets the id of this TimelineComment. # noqa: E501
:return: The id of this TimelineComment. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this TimelineComment.
:param id: The id of this TimelineComment. # noqa: E501
:type: int
"""
self._id = id
@property
def issue_url(self):
"""Gets the issue_url of this TimelineComment. # noqa: E501
:return: The issue_url of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._issue_url
@issue_url.setter
def issue_url(self, issue_url):
"""Sets the issue_url of this TimelineComment.
:param issue_url: The issue_url of this TimelineComment. # noqa: E501
:type: str
"""
self._issue_url = issue_url
@property
def label(self):
"""Gets the label of this TimelineComment. # noqa: E501
:return: The label of this TimelineComment. # noqa: E501
:rtype: Label
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this TimelineComment.
:param label: The label of this TimelineComment. # noqa: E501
:type: Label
"""
self._label = label
@property
def milestone(self):
"""Gets the milestone of this TimelineComment. # noqa: E501
:return: The milestone of this TimelineComment. # noqa: E501
:rtype: Milestone
"""
return self._milestone
@milestone.setter
def milestone(self, milestone):
"""Sets the milestone of this TimelineComment.
:param milestone: The milestone of this TimelineComment. # noqa: E501
:type: Milestone
"""
self._milestone = milestone
@property
def new_ref(self):
"""Gets the new_ref of this TimelineComment. # noqa: E501
:return: The new_ref of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._new_ref
@new_ref.setter
def new_ref(self, new_ref):
"""Sets the new_ref of this TimelineComment.
:param new_ref: The new_ref of this TimelineComment. # noqa: E501
:type: str
"""
self._new_ref = new_ref
@property
def new_title(self):
"""Gets the new_title of this TimelineComment. # noqa: E501
:return: The new_title of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._new_title
@new_title.setter
def new_title(self, new_title):
"""Sets the new_title of this TimelineComment.
:param new_title: The new_title of this TimelineComment. # noqa: E501
:type: str
"""
self._new_title = new_title
@property
def old_milestone(self):
"""Gets the old_milestone of this TimelineComment. # noqa: E501
:return: The old_milestone of this TimelineComment. # noqa: E501
:rtype: Milestone
"""
return self._old_milestone
@old_milestone.setter
def old_milestone(self, old_milestone):
"""Sets the old_milestone of this TimelineComment.
:param old_milestone: The old_milestone of this TimelineComment. # noqa: E501
:type: Milestone
"""
self._old_milestone = old_milestone
@property
def old_project_id(self):
"""Gets the old_project_id of this TimelineComment. # noqa: E501
:return: The old_project_id of this TimelineComment. # noqa: E501
:rtype: int
"""
return self._old_project_id
@old_project_id.setter
def old_project_id(self, old_project_id):
"""Sets the old_project_id of this TimelineComment.
:param old_project_id: The old_project_id of this TimelineComment. # noqa: E501
:type: int
"""
self._old_project_id = old_project_id
@property
def old_ref(self):
"""Gets the old_ref of this TimelineComment. # noqa: E501
:return: The old_ref of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._old_ref
@old_ref.setter
def old_ref(self, old_ref):
"""Sets the old_ref of this TimelineComment.
:param old_ref: The old_ref of this TimelineComment. # noqa: E501
:type: str
"""
self._old_ref = old_ref
@property
def old_title(self):
"""Gets the old_title of this TimelineComment. # noqa: E501
:return: The old_title of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._old_title
@old_title.setter
def old_title(self, old_title):
"""Sets the old_title of this TimelineComment.
:param old_title: The old_title of this TimelineComment. # noqa: E501
:type: str
"""
self._old_title = old_title
@property
def project_id(self):
"""Gets the project_id of this TimelineComment. # noqa: E501
:return: The project_id of this TimelineComment. # noqa: E501
:rtype: int
"""
return self._project_id
@project_id.setter
def project_id(self, project_id):
"""Sets the project_id of this TimelineComment.
:param project_id: The project_id of this TimelineComment. # noqa: E501
:type: int
"""
self._project_id = project_id
@property
def pull_request_url(self):
"""Gets the pull_request_url of this TimelineComment. # noqa: E501
:return: The pull_request_url of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._pull_request_url
@pull_request_url.setter
def pull_request_url(self, pull_request_url):
"""Sets the pull_request_url of this TimelineComment.
:param pull_request_url: The pull_request_url of this TimelineComment. # noqa: E501
:type: str
"""
self._pull_request_url = pull_request_url
@property
def ref_action(self):
"""Gets the ref_action of this TimelineComment. # noqa: E501
:return: The ref_action of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._ref_action
@ref_action.setter
def ref_action(self, ref_action):
"""Sets the ref_action of this TimelineComment.
:param ref_action: The ref_action of this TimelineComment. # noqa: E501
:type: str
"""
self._ref_action = ref_action
@property
def ref_comment(self):
"""Gets the ref_comment of this TimelineComment. # noqa: E501
:return: The ref_comment of this TimelineComment. # noqa: E501
:rtype: Comment
"""
return self._ref_comment
@ref_comment.setter
def ref_comment(self, ref_comment):
"""Sets the ref_comment of this TimelineComment.
:param ref_comment: The ref_comment of this TimelineComment. # noqa: E501
:type: Comment
"""
self._ref_comment = ref_comment
@property
def ref_commit_sha(self):
"""Gets the ref_commit_sha of this TimelineComment. # noqa: E501
commit SHA where issue/PR was referenced # noqa: E501
:return: The ref_commit_sha of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._ref_commit_sha
@ref_commit_sha.setter
def ref_commit_sha(self, ref_commit_sha):
"""Sets the ref_commit_sha of this TimelineComment.
commit SHA where issue/PR was referenced # noqa: E501
:param ref_commit_sha: The ref_commit_sha of this TimelineComment. # noqa: E501
:type: str
"""
self._ref_commit_sha = ref_commit_sha
@property
def ref_issue(self):
"""Gets the ref_issue of this TimelineComment. # noqa: E501
:return: The ref_issue of this TimelineComment. # noqa: E501
:rtype: Issue
"""
return self._ref_issue
@ref_issue.setter
def ref_issue(self, ref_issue):
"""Sets the ref_issue of this TimelineComment.
:param ref_issue: The ref_issue of this TimelineComment. # noqa: E501
:type: Issue
"""
self._ref_issue = ref_issue
@property
def removed_assignee(self):
"""Gets the removed_assignee of this TimelineComment. # noqa: E501
whether the assignees were removed or added # noqa: E501
:return: The removed_assignee of this TimelineComment. # noqa: E501
:rtype: bool
"""
return self._removed_assignee
@removed_assignee.setter
def removed_assignee(self, removed_assignee):
"""Sets the removed_assignee of this TimelineComment.
whether the assignees were removed or added # noqa: E501
:param removed_assignee: The removed_assignee of this TimelineComment. # noqa: E501
:type: bool
"""
self._removed_assignee = removed_assignee
@property
def resolve_doer(self):
"""Gets the resolve_doer of this TimelineComment. # noqa: E501
:return: The resolve_doer of this TimelineComment. # noqa: E501
:rtype: User
"""
return self._resolve_doer
@resolve_doer.setter
def resolve_doer(self, resolve_doer):
"""Sets the resolve_doer of this TimelineComment.
:param resolve_doer: The resolve_doer of this TimelineComment. # noqa: E501
:type: User
"""
self._resolve_doer = resolve_doer
@property
def review_id(self):
"""Gets the review_id of this TimelineComment. # noqa: E501
:return: The review_id of this TimelineComment. # noqa: E501
:rtype: int
"""
return self._review_id
@review_id.setter
def review_id(self, review_id):
"""Sets the review_id of this TimelineComment.
:param review_id: The review_id of this TimelineComment. # noqa: E501
:type: int
"""
self._review_id = review_id
@property
def tracked_time(self):
"""Gets the tracked_time of this TimelineComment. # noqa: E501
:return: The tracked_time of this TimelineComment. # noqa: E501
:rtype: TrackedTime
"""
return self._tracked_time
@tracked_time.setter
def tracked_time(self, tracked_time):
"""Sets the tracked_time of this TimelineComment.
:param tracked_time: The tracked_time of this TimelineComment. # noqa: E501
:type: TrackedTime
"""
self._tracked_time = tracked_time
@property
def type(self):
"""Gets the type of this TimelineComment. # noqa: E501
:return: The type of this TimelineComment. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this TimelineComment.
:param type: The type of this TimelineComment. # noqa: E501
:type: str
"""
self._type = type
@property
def updated_at(self):
"""Gets the updated_at of this TimelineComment. # noqa: E501
:return: The updated_at of this TimelineComment. # noqa: E501
:rtype: datetime
"""
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
"""Sets the updated_at of this TimelineComment.
:param updated_at: The updated_at of this TimelineComment. # noqa: E501
:type: datetime
"""
self._updated_at = updated_at
@property
def user(self):
"""Gets the user of this TimelineComment. # noqa: E501
:return: The user of this TimelineComment. # noqa: E501
:rtype: User
"""
return self._user
@user.setter
def user(self, user):
"""Sets the user of this TimelineComment.
:param user: The user of this TimelineComment. # noqa: E501
:type: User
"""
self._user = user
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TimelineComment, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TimelineComment):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
] | [((21746, 21779), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (21759, 21779), False, 'import six\n')] |
import string
import warnings
import re
from . import util
import spacy
class FileParser(object):
def __init__(self,
file_parser='txt',
xml_node_path=None, fparser=None):
if file_parser not in ['txt', 'xml', 'defined']:
msg = 'file_parser should be txt, xml or defined, not "{file_parser}"'
raise ValueError(msg.format(file_parser=file_parser))
if file_parser == 'defined' and fparser is None:
msg = 'Please define you own file_parser.'
raise ValueError(msg)
self.file_parser = file_parser
self.xml_node_path = xml_node_path
self.fparser = fparser
def xml_parser(self, file_path, xml_node_path):
for paragraph in util.search_all_specific_nodes_in_xml_known_node_path(file_path, xml_node_path):
for sent in util.tokenize_informal_paragraph_into_sentences(paragraph):
yield str.strip(sent)
def txt_parser(self, file_path):
with open(file_path, 'r', encoding='utf-8') as file:
for line in file:
yield str.strip(line)
def __call__(self, file_path):
if self.file_parser == 'txt':
for sent in self.txt_parser(file_path):
yield sent
if self.file_parser == 'xml':
for sent in self.xml_parser(file_path, self.xml_node_path):
yield sent
if self.file_parser == 'defined':
for sent in self.fparser(file_path):
yield sent
class WordPreprocessor(object):
# default: config file.
def __init__(self, remove_stop_words, remove_numbers, replace_digits_to_zeros, remove_punctuations,
stem_word, lowercase, wpreprocessor):
self.remove_stop_words = remove_stop_words
self.remove_numbers = remove_numbers
self.replace_digits_to_zeros = replace_digits_to_zeros
self.remove_punctuations = remove_punctuations
self.stem_word = stem_word
self.lowercase = lowercase
self.wpreprocessor = wpreprocessor
punctuations = set(string.punctuation)
punctuations.update({'“', '”', '—'}) # English
punctuations.update({'...', '«', '»'}) # French
self.puncs = punctuations
def apply(self, word, spacy_loader=None):
# Removing
if self.remove_numbers and word.isnumeric():
return ''
if self.replace_digits_to_zeros:
word = re.sub('\d', '0', word)
if self.remove_punctuations:
if all(c in self.puncs for c in word):
return ''
# Remove combinations of punctuations and digits
if self.remove_numbers and self.remove_punctuations:
if all(j.isdigit() or j in self.puncs for j in word):
return ''
# remove stop words
if self.remove_stop_words and spacy_loader.vocab[word].is_stop:
# print(word, 'is stop words')
return ''
# Stem word
if self.stem_word:
word = util.stem_word(word)
# Make all words in lowercase
if self.lowercase:
word = word.lower()
# customized word preprocessor
if self.wpreprocessor is not None:
if not callable(self.wpreprocessor):
msg = 'wpreprocessor should be callable'
warnings.warn(msg)
else:
word = self.wpreprocessor(word)
if not isinstance(word, str):
msg = 'The output of wpreprocessor should be string'
raise ValueError(msg)
return word
def __call__(self, word):
return self.apply(word)
class Tokenizer(object):
@staticmethod
def mytok(s):
"""
An example of user customized tokenizer.
:return: list of tokens
"""
# TODO NOW spacy.load here is a really stupid idea, cause each time apply has been called spacy.load need to run. TOO SLOW!!!
tk = spacy.load('en')
return [token.text for token in tk(s)]
def __init__(self, word_tokenizer='Treebank', wtokenizer=None):
self.word_tokenizer = None
if word_tokenizer not in ['Treebank', 'PunktWord', 'WordPunct', 'spacy', '']:
msg = 'word_tokenizer "{word_tokenizer}" should be Treebank, PunktWord, WordPunct or empty'
raise ValueError(msg.format(word_tokenizer=word_tokenizer))
if word_tokenizer == 'spacy':
self.tokenizer = None
self.word_tokenizer = 'spacy'
elif word_tokenizer == 'Treebank':
from nltk.tokenize import TreebankWordTokenizer
self.tokenizer = TreebankWordTokenizer().tokenize
elif word_tokenizer == 'PunktWord':
# PunktTokenizer splits on punctuation, but keeps it with the word. => [‘this’, “‘s”, ‘a’, ‘test’]
from nltk.tokenize import PunktWordTokenizer
self.tokenizer = PunktWordTokenizer().tokenize
elif word_tokenizer == 'WordPunct':
# WordPunctTokenizer splits all punctuations into separate tokens. => [‘This’, “‘”, ‘s’, ‘a’, ‘test’]
from nltk.tokenize import WordPunctTokenizer
self.tokenizer = WordPunctTokenizer().tokenize
else:
if wtokenizer is None:
self.tokenizer = None
else:
if not callable(wtokenizer):
msg = 'wtokenizer should be callable'
warnings.warn(msg)
self.tokenizer = None
else:
self.tokenizer = wtokenizer
def apply(self, text, spacy_loader=None):
if self.word_tokenizer == 'spacy':
return [token.text for token in spacy_loader(text)]
if self.tokenizer is not None:
return self.tokenizer(text)
else:
return [text]
def __call__(self, text):
return self.apply(text)
| [
"spacy.load",
"nltk.tokenize.WordPunctTokenizer",
"nltk.tokenize.PunktWordTokenizer",
"warnings.warn",
"re.sub",
"nltk.tokenize.TreebankWordTokenizer"
] | [((4024, 4040), 'spacy.load', 'spacy.load', (['"""en"""'], {}), "('en')\n", (4034, 4040), False, 'import spacy\n'), ((2476, 2500), 're.sub', 're.sub', (['"""\\\\d"""', '"""0"""', 'word'], {}), "('\\\\d', '0', word)\n", (2482, 2500), False, 'import re\n'), ((3380, 3398), 'warnings.warn', 'warnings.warn', (['msg'], {}), '(msg)\n', (3393, 3398), False, 'import warnings\n'), ((4701, 4724), 'nltk.tokenize.TreebankWordTokenizer', 'TreebankWordTokenizer', ([], {}), '()\n', (4722, 4724), False, 'from nltk.tokenize import TreebankWordTokenizer\n'), ((4975, 4995), 'nltk.tokenize.PunktWordTokenizer', 'PunktWordTokenizer', ([], {}), '()\n', (4993, 4995), False, 'from nltk.tokenize import PunktWordTokenizer\n'), ((5249, 5269), 'nltk.tokenize.WordPunctTokenizer', 'WordPunctTokenizer', ([], {}), '()\n', (5267, 5269), False, 'from nltk.tokenize import WordPunctTokenizer\n'), ((5507, 5525), 'warnings.warn', 'warnings.warn', (['msg'], {}), '(msg)\n', (5520, 5525), False, 'import warnings\n')] |
import logging
import time
from tests.common.helpers.assertions import pytest_assert
logger = logging.getLogger(__name__)
def join_master(duthost, master_vip):
"""
Joins DUT to Kubernetes master
Args:
duthost: DUT host object
master_vip: VIP of high availability Kubernetes master
If join fails, test will fail at the assertion to check_connected
"""
logger.info("Joining DUT to Kubernetes master")
dut_join_cmds = ['sudo config kube server disable on',
'sudo config kube server ip {}'.format(master_vip),
'sudo config kube server disable off']
duthost.shell_cmds(cmds=dut_join_cmds)
pytest_assert(poll_for_status_change(duthost, True),"DUT failed to successfully join Kubernetes master")
def make_vip_unreachable(duthost, master_vip):
"""
Makes Kubernetes master VIP unreachable from SONiC DUT by configuring iptables rules. Cleans preexisting iptables rules for VIP.
Args:
duthost: DUT host object
master_vip: VIP of high availability Kubernetes master
"""
logger.info("Making Kubernetes master VIP unreachable from DUT")
clean_vip_iptables_rules(duthost, master_vip)
duthost.shell('sudo iptables -A INPUT -s {} -j DROP'.format(master_vip))
duthost.shell('sudo iptables -A OUTPUT -d {} -j DROP'.format(master_vip))
def make_vip_reachable(duthost, master_vip):
"""
Makes Kubernetes master VIP reachable from SONiC DUT by removing any iptables rules associated with the VIP.
Args:
duthost: DUT host object
master_vip: VIP of high availability Kubernetes master
"""
logger.info("Making Kubernetes master VIP reachable from DUT")
clean_vip_iptables_rules(duthost, master_vip)
def clean_vip_iptables_rules(duthost, master_vip):
"""
Removes all iptables rules associated with the VIP.
Args:
duthost: DUT host object
master_vip: VIP of high availability Kubernetes master
"""
iptables_rules = duthost.shell('sudo iptables -S | grep {} || true'.format(master_vip))["stdout_lines"]
logger.info('iptables rules: {}'.format(iptables_rules))
for line in iptables_rules:
if line:
duthost.shell('sudo iptables -D {}'.format(line[2:]))
def check_connected(duthost):
"""
Checks if the DUT already shows status 'connected' to Kubernetes master
Args:
duthost: DUT host object
Returns:
True if connected, False if not connected
"""
kube_server_status = duthost.shell('show kube server')["stdout_lines"]
logger.info("Kube server status: {}".format(kube_server_status))
for line in kube_server_status:
if line.startswith("KUBERNETES_MASTER SERVER connected"):
return line.endswith("true")
logger.info("Kubernetes server check_connected failed to check server status")
def poll_for_status_change(duthost, exp_status, poll_wait_secs=5, min_wait_time=20, max_wait_time=120):
"""
Polls to see if kube server connected status updates as expected
Args:
duthost: DUT host object
exp_status: expected server connected status once processes are synced
poll_wait_secs: seconds between each server connected status poll. Default: 5 seconds
min_wait_time: seconds before starting poll of server connected status. Default: 20 seconds
max_wait_time: maximum amount of time to spend polling for status change. Default: 120 seconds
Returns:
True if server connected status updates as expected by max_wait_time
False if server connected status fails to update as expected by max_wait_time
"""
time.sleep(min_wait_time)
timeout_wait_secs = max_wait_time - min_wait_time
while (timeout_wait_secs > 0):
if (check_connected(duthost) == exp_status):
logging.info("Time taken to update Kube server status: {} seconds".format(timeout_wait_secs))
return True
time.sleep(poll_wait_secs)
timeout_wait_secs -= poll_wait_secs
return False
| [
"logging.getLogger",
"time.sleep"
] | [((96, 123), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (113, 123), False, 'import logging\n'), ((3693, 3718), 'time.sleep', 'time.sleep', (['min_wait_time'], {}), '(min_wait_time)\n', (3703, 3718), False, 'import time\n'), ((3995, 4021), 'time.sleep', 'time.sleep', (['poll_wait_secs'], {}), '(poll_wait_secs)\n', (4005, 4021), False, 'import time\n')] |
from django.db import models
from django.contrib.auth.models import User
class Post(models.Model):
user = models.ForeignKey(User, related_name='posts', on_delete=models.CASCADE)
content = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return str(self.user) + ' : ' + self.content
class Comment(models.Model):
post_id = models.ForeignKey(Post, on_delete=models.CASCADE, related_name='comments')
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='comments')
content = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return str(self.user) + ' >> ' + self.content | [
"django.db.models.DateTimeField",
"django.db.models.TextField",
"django.db.models.ForeignKey"
] | [((111, 182), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'related_name': '"""posts"""', 'on_delete': 'models.CASCADE'}), "(User, related_name='posts', on_delete=models.CASCADE)\n", (128, 182), False, 'from django.db import models\n'), ((197, 215), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (213, 215), False, 'from django.db import models\n'), ((233, 272), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (253, 272), False, 'from django.db import models\n'), ((290, 325), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (310, 325), False, 'from django.db import models\n'), ((453, 527), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Post'], {'on_delete': 'models.CASCADE', 'related_name': '"""comments"""'}), "(Post, on_delete=models.CASCADE, related_name='comments')\n", (470, 527), False, 'from django.db import models\n'), ((539, 613), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""comments"""'}), "(User, on_delete=models.CASCADE, related_name='comments')\n", (556, 613), False, 'from django.db import models\n'), ((628, 646), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (644, 646), False, 'from django.db import models\n'), ((664, 703), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (684, 703), False, 'from django.db import models\n'), ((721, 756), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (741, 756), False, 'from django.db import models\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `database` module."""
import pytest
import os
import sqlite3
from packit_app.database import Database
def test_database_connection():
db = Database()
assert type(db.cur) is sqlite3.Cursor
assert type(db.connection) is sqlite3.Connection
assert os.path.exists(Database.db_location) is True
def test_database_disconnects():
db = Database()
db.close_connection()
with pytest.raises(sqlite3.ProgrammingError):
db.cur.execute("CREATE TABLE cannot_not_be_created")
| [
"packit_app.database.Database",
"os.path.exists",
"pytest.raises"
] | [((206, 216), 'packit_app.database.Database', 'Database', ([], {}), '()\n', (214, 216), False, 'from packit_app.database import Database\n'), ((412, 422), 'packit_app.database.Database', 'Database', ([], {}), '()\n', (420, 422), False, 'from packit_app.database import Database\n'), ((323, 359), 'os.path.exists', 'os.path.exists', (['Database.db_location'], {}), '(Database.db_location)\n', (337, 359), False, 'import os\n'), ((458, 497), 'pytest.raises', 'pytest.raises', (['sqlite3.ProgrammingError'], {}), '(sqlite3.ProgrammingError)\n', (471, 497), False, 'import pytest\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
import dgl
from dgl.nn.pytorch.glob import SumPooling, AvgPooling, MaxPooling
"""
GIN: Graph Isomorphism Networks
HOW POWERFUL ARE GRAPH NEURAL NETWORKS? (<NAME>, <NAME>, <NAME> and <NAME>, ICLR 2019)
https://arxiv.org/pdf/1810.00826.pdf
"""
from gnns.gin_layer import GINLayer, ApplyNodeFunc, MLP
class GINNet(nn.Module):
def __init__(self, net_params):
super().__init__()
in_dim = net_params[0]
hidden_dim = net_params[1]
n_classes = net_params[2]
dropout = 0.5
self.n_layers = 2
n_mlp_layers = 1 # GIN
learn_eps = True # GIN
neighbor_aggr_type = 'mean' # GIN
graph_norm = False
batch_norm = False
residual = False
self.n_classes = n_classes
# List of MLPs
self.ginlayers = torch.nn.ModuleList()
for layer in range(self.n_layers):
if layer == 0:
mlp = MLP(n_mlp_layers, in_dim, hidden_dim, hidden_dim)
else:
mlp = MLP(n_mlp_layers, hidden_dim, hidden_dim, n_classes)
self.ginlayers.append(GINLayer(ApplyNodeFunc(mlp), neighbor_aggr_type,
dropout, graph_norm, batch_norm, residual, 0, learn_eps))
# Linear function for output of each layer
# which maps the output of different layers into a prediction score
self.linears_prediction = nn.Linear(hidden_dim, n_classes, bias=False)
def forward(self, g, h, snorm_n, snorm_e):
# list of hidden representation at each layer (including input)
hidden_rep = []
for i in range(self.n_layers):
h = self.ginlayers[i](g, h, snorm_n)
hidden_rep.append(h)
# score_over_layer = (self.linears_prediction(hidden_rep[0]) + hidden_rep[1]) / 2
score_over_layer = (self.linears_prediction(hidden_rep[0]) + hidden_rep[1]) / 2
return score_over_layer
class GINNet_ss(nn.Module):
def __init__(self, net_params, num_par):
super().__init__()
in_dim = net_params[0]
hidden_dim = net_params[1]
n_classes = net_params[2]
dropout = 0.5
self.n_layers = 2
n_mlp_layers = 1 # GIN
learn_eps = True # GIN
neighbor_aggr_type = 'mean' # GIN
graph_norm = False
batch_norm = False
residual = False
self.n_classes = n_classes
# List of MLPs
self.ginlayers = torch.nn.ModuleList()
for layer in range(self.n_layers):
if layer == 0:
mlp = MLP(n_mlp_layers, in_dim, hidden_dim, hidden_dim)
else:
mlp = MLP(n_mlp_layers, hidden_dim, hidden_dim, n_classes)
self.ginlayers.append(GINLayer(ApplyNodeFunc(mlp), neighbor_aggr_type,
dropout, graph_norm, batch_norm, residual, 0, learn_eps))
# Linear function for output of each layer
# which maps the output of different layers into a prediction score
self.linears_prediction = nn.Linear(hidden_dim, n_classes, bias=False)
self.classifier_ss = nn.Linear(hidden_dim, num_par, bias=False)
def forward(self, g, h, snorm_n, snorm_e):
# list of hidden representation at each layer (including input)
hidden_rep = []
for i in range(self.n_layers):
h = self.ginlayers[i](g, h, snorm_n)
hidden_rep.append(h)
score_over_layer = (self.linears_prediction(hidden_rep[0]) + hidden_rep[1]) / 2
h_ss = self.classifier_ss(hidden_rep[0])
return score_over_layer, h_ss
| [
"torch.nn.ModuleList",
"gnns.gin_layer.MLP",
"torch.nn.Linear",
"gnns.gin_layer.ApplyNodeFunc"
] | [((930, 951), 'torch.nn.ModuleList', 'torch.nn.ModuleList', ([], {}), '()\n', (949, 951), False, 'import torch\n'), ((1573, 1617), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', 'n_classes'], {'bias': '(False)'}), '(hidden_dim, n_classes, bias=False)\n', (1582, 1617), True, 'import torch.nn as nn\n'), ((2692, 2713), 'torch.nn.ModuleList', 'torch.nn.ModuleList', ([], {}), '()\n', (2711, 2713), False, 'import torch\n'), ((3335, 3379), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', 'n_classes'], {'bias': '(False)'}), '(hidden_dim, n_classes, bias=False)\n', (3344, 3379), True, 'import torch.nn as nn\n'), ((3409, 3451), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', 'num_par'], {'bias': '(False)'}), '(hidden_dim, num_par, bias=False)\n', (3418, 3451), True, 'import torch.nn as nn\n'), ((1053, 1102), 'gnns.gin_layer.MLP', 'MLP', (['n_mlp_layers', 'in_dim', 'hidden_dim', 'hidden_dim'], {}), '(n_mlp_layers, in_dim, hidden_dim, hidden_dim)\n', (1056, 1102), False, 'from gnns.gin_layer import GINLayer, ApplyNodeFunc, MLP\n'), ((1143, 1195), 'gnns.gin_layer.MLP', 'MLP', (['n_mlp_layers', 'hidden_dim', 'hidden_dim', 'n_classes'], {}), '(n_mlp_layers, hidden_dim, hidden_dim, n_classes)\n', (1146, 1195), False, 'from gnns.gin_layer import GINLayer, ApplyNodeFunc, MLP\n'), ((2815, 2864), 'gnns.gin_layer.MLP', 'MLP', (['n_mlp_layers', 'in_dim', 'hidden_dim', 'hidden_dim'], {}), '(n_mlp_layers, in_dim, hidden_dim, hidden_dim)\n', (2818, 2864), False, 'from gnns.gin_layer import GINLayer, ApplyNodeFunc, MLP\n'), ((2905, 2957), 'gnns.gin_layer.MLP', 'MLP', (['n_mlp_layers', 'hidden_dim', 'hidden_dim', 'n_classes'], {}), '(n_mlp_layers, hidden_dim, hidden_dim, n_classes)\n', (2908, 2957), False, 'from gnns.gin_layer import GINLayer, ApplyNodeFunc, MLP\n'), ((1269, 1287), 'gnns.gin_layer.ApplyNodeFunc', 'ApplyNodeFunc', (['mlp'], {}), '(mlp)\n', (1282, 1287), False, 'from gnns.gin_layer import GINLayer, ApplyNodeFunc, MLP\n'), ((3031, 3049), 'gnns.gin_layer.ApplyNodeFunc', 'ApplyNodeFunc', (['mlp'], {}), '(mlp)\n', (3044, 3049), False, 'from gnns.gin_layer import GINLayer, ApplyNodeFunc, MLP\n')] |
# -*- coding: ascii -*-
#
# Copyright 2007, 2008, 2009, 2010, 2011
# <NAME> or his licensors, as applicable
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
===================
Data distribution
===================
This module provides tools to simplify data distribution.
"""
__author__ = u"<NAME>"
__docformat__ = "restructuredtext en"
from distutils import filelist as _filelist
import os as _os
import posixpath as _posixpath
import sys as _sys
from _setup import commands as _commands
def splitpath(path):
""" Split a path """
drive, path = '', _os.path.normpath(path)
try:
splitunc = _os.path.splitunc
except AttributeError:
pass
else:
drive, path = splitunc(path)
if not drive:
drive, path = _os.path.splitdrive(path)
elems = []
try:
sep = _os.path.sep
except AttributeError:
sep = _os.path.join('1', '2')[1:-1]
while 1:
prefix, path = _os.path.split(path)
elems.append(path)
if prefix in ('', sep):
drive = _os.path.join(drive, prefix)
break
path = prefix
elems.reverse()
return drive, elems
def finalizer(installer):
""" Finalize install_data """
data_files = []
for item in installer.data_files:
if not isinstance(item, Data):
data_files.append(item)
continue
data_files.extend(item.flatten(installer))
installer.data_files = data_files
class Data(object):
""" File list container """
def __init__(self, files, target=None, preserve=0, strip=0,
prefix=None):
""" Initialization """
self._files = files
self._target = target
self._preserve = preserve
self._strip = strip
self._prefix = prefix
self.fixup_commands()
def fixup_commands(self):
pass
def from_templates(cls, *templates, **kwargs):
""" Initialize from template """
files = _filelist.FileList()
for tpl in templates:
for line in tpl.split(';'):
files.process_template_line(line.strip())
files.sort()
files.remove_duplicates()
result = []
for filename in files.files:
_, elems = splitpath(filename)
if '.svn' in elems or '.git' in elems:
continue
result.append(filename)
return cls(result, **kwargs)
from_templates = classmethod(from_templates)
def flatten(self, installer):
""" Flatten the file list to (target, file) tuples """
# pylint: disable = W0613
if self._prefix:
_, prefix = splitpath(self._prefix)
telems = prefix
else:
telems = []
tmap = {}
for fname in self._files:
(_, name), target = splitpath(fname), telems
if self._preserve:
if self._strip:
name = name[max(0, min(self._strip, len(name) - 1)):]
if len(name) > 1:
target = telems + name[:-1]
tmap.setdefault(_posixpath.join(*target), []).append(fname)
return tmap.items()
class Documentation(Data):
""" Documentation container """
def fixup_commands(self):
_commands.add_option('install_data', 'without-docs',
help_text='Do not install documentation files',
inherit='install',
)
_commands.add_finalizer('install_data', 'documentation', finalizer)
def flatten(self, installer):
""" Check if docs should be installed at all """
if installer.without_docs:
return []
return Data.flatten(self, installer)
class Manpages(Documentation):
""" Manpages container """
def dispatch(cls, files):
""" Automatically dispatch manpages to their target directories """
mpmap = {}
for manpage in files:
normalized = _os.path.normpath(manpage)
_, ext = _os.path.splitext(normalized)
if ext.startswith(_os.path.extsep):
ext = ext[len(_os.path.extsep):]
mpmap.setdefault(ext, []).append(manpage)
return [cls(manpages, prefix=_posixpath.join(
'share', 'man', 'man%s' % section,
)) for section, manpages in mpmap.items()]
dispatch = classmethod(dispatch)
def flatten(self, installer):
""" Check if manpages are suitable """
if _sys.platform == 'win32':
return []
return Documentation.flatten(self, installer)
| [
"posixpath.join",
"os.path.splitdrive",
"os.path.join",
"_setup.commands.add_option",
"os.path.splitext",
"os.path.split",
"os.path.normpath",
"_setup.commands.add_finalizer",
"distutils.filelist.FileList"
] | [((1065, 1088), 'os.path.normpath', '_os.path.normpath', (['path'], {}), '(path)\n', (1082, 1088), True, 'import os as _os\n'), ((1262, 1287), 'os.path.splitdrive', '_os.path.splitdrive', (['path'], {}), '(path)\n', (1281, 1287), True, 'import os as _os\n'), ((1446, 1466), 'os.path.split', '_os.path.split', (['path'], {}), '(path)\n', (1460, 1466), True, 'import os as _os\n'), ((2478, 2498), 'distutils.filelist.FileList', '_filelist.FileList', ([], {}), '()\n', (2496, 2498), True, 'from distutils import filelist as _filelist\n'), ((3784, 3908), '_setup.commands.add_option', '_commands.add_option', (['"""install_data"""', '"""without-docs"""'], {'help_text': '"""Do not install documentation files"""', 'inherit': '"""install"""'}), "('install_data', 'without-docs', help_text=\n 'Do not install documentation files', inherit='install')\n", (3804, 3908), True, 'from _setup import commands as _commands\n'), ((3946, 4013), '_setup.commands.add_finalizer', '_commands.add_finalizer', (['"""install_data"""', '"""documentation"""', 'finalizer'], {}), "('install_data', 'documentation', finalizer)\n", (3969, 4013), True, 'from _setup import commands as _commands\n'), ((1546, 1574), 'os.path.join', '_os.path.join', (['drive', 'prefix'], {}), '(drive, prefix)\n', (1559, 1574), True, 'import os as _os\n'), ((4453, 4479), 'os.path.normpath', '_os.path.normpath', (['manpage'], {}), '(manpage)\n', (4470, 4479), True, 'import os as _os\n'), ((4501, 4530), 'os.path.splitext', '_os.path.splitext', (['normalized'], {}), '(normalized)\n', (4518, 4530), True, 'import os as _os\n'), ((1380, 1403), 'os.path.join', '_os.path.join', (['"""1"""', '"""2"""'], {}), "('1', '2')\n", (1393, 1403), True, 'import os as _os\n'), ((4719, 4769), 'posixpath.join', '_posixpath.join', (['"""share"""', '"""man"""', "('man%s' % section)"], {}), "('share', 'man', 'man%s' % section)\n", (4734, 4769), True, 'import posixpath as _posixpath\n'), ((3608, 3632), 'posixpath.join', '_posixpath.join', (['*target'], {}), '(*target)\n', (3623, 3632), True, 'import posixpath as _posixpath\n')] |
from django.urls import path
from . import views
urlpatterns = [
path('charge/', views.charge, name='charge'),
path('payment/', views.HomePageView.as_view(), name='payment'),
]
| [
"django.urls.path"
] | [((71, 115), 'django.urls.path', 'path', (['"""charge/"""', 'views.charge'], {'name': '"""charge"""'}), "('charge/', views.charge, name='charge')\n", (75, 115), False, 'from django.urls import path\n')] |
# Stat_Canada.py (flowsa)
# !/usr/bin/env python3
# coding=utf-8
'''
Pulls Statistics Canada data on water intake and discharge for 3 digit NAICS from 2005 - 2015
'''
import pandas as pd
import io
import zipfile
import pycountry
from flowsa.common import *
def sc_call(url, sc_response, args):
"""
Convert response for calling url to pandas dataframe, begin parsing df into FBA format
:param url: string, url
:param sc_response: df, response from url call
:param args: dictionary, arguments specified when running
flowbyactivity.py ('year' and 'source')
:return: pandas dataframe of original source data
"""
# Convert response to dataframe
# read all files in the stat canada zip
with zipfile.ZipFile(io.BytesIO(sc_response.content), "r") as f:
# read in file names
for name in f.namelist():
# if filename does not contain "MetaData", then create dataframe
if "MetaData" not in name:
data = f.open(name)
df = pd.read_csv(data, header=0)
return df
def sc_parse(dataframe_list, args):
"""
Functions to being parsing and formatting data into flowbyactivity format
:param dataframe_list: list of dataframes to concat and format
:param args: arguments as specified in flowbyactivity.py ('year' and 'source')
:return: dataframe parsed and partially formatted to flowbyactivity specifications
"""
# concat dataframes
df = pd.concat(dataframe_list, sort=False)
# drop columns
df = df.drop(columns=['COORDINATE', 'DECIMALS', 'DGUID', 'SYMBOL', 'TERMINATED', 'UOM_ID', 'SCALAR_ID', 'VECTOR'])
# rename columns
df = df.rename(columns={'GEO': 'Location',
'North American Industry Classification System (NAICS)': 'Description',
'REF_DATE': 'Year',
'STATUS': 'Spread',
'VALUE': "FlowAmount",
'Water use parameter': 'FlowName'})
# extract NAICS as activity column. rename activity based on flowname
df['Activity'] = df['Description'].str.extract('.*\[(.*)\].*')
df.loc[df['Description'] == 'Total, all industries', 'Activity'] = '31-33' # todo: change these activity names
df.loc[df['Description'] == 'Other manufacturing industries', 'Activity'] = 'Other'
df['FlowName'] = df['FlowName'].str.strip()
df.loc[df['FlowName'] == 'Water intake', 'ActivityConsumedBy'] = df['Activity']
df.loc[df['FlowName'].isin(['Water discharge', "Water recirculation"]), 'ActivityProducedBy'] = df['Activity']
# create "unit" column
df["Unit"] = "million " + df["UOM"] + "/year"
# drop columns used to create unit and activity columns
df = df.drop(columns=['SCALAR_FACTOR', 'UOM', 'Activity'])
# Modify the assigned RSD letter values to numeric value
df.loc[df['Spread'] == 'A', 'Spread'] = 2.5 # given range: 0.01 - 4.99%
df.loc[df['Spread'] == 'B', 'Spread'] = 7.5 # given range: 5 - 9.99%
df.loc[df['Spread'] == 'C', 'Spread'] = 12.5 # given range: 10 - 14.99%
df.loc[df['Spread'] == 'D', 'Spread'] = 20 # given range: 15 - 24.99%
df.loc[df['Spread'] == 'E', 'Spread'] = 37.5 # given range:25 - 49.99%
df.loc[df['Spread'] == 'F', 'Spread'] = 75 # given range: > 49.99%
df.loc[df['Spread'] == 'x', 'Spread'] = withdrawn_keyword
# hard code data
df['Class'] = 'Water'
df['SourceName'] = 'StatCan_IWS_MI'
# temp hardcode canada iso code
df['Location'] = call_country_code('Canada')
df['Year'] = df['Year'].astype(str)
df['LocationSystem'] = "ISO"
df["MeasureofSpread"] = 'RSD'
df["DataReliability"] = '3'
df["DataCollection"] = '4'
# subset based on year
df = df[df['Year'] == args['year']]
return df
def convert_statcan_data_to_US_water_use(df, attr):
"""
Use Canadian GDP data to convert 3 digit canadian water use to us water
use:
- canadian gdp
- us gdp
:param df:
:param attr:
:return:
"""
import flowsa
from flowsa.values_from_literature import get_Canadian_to_USD_exchange_rate
from flowsa.flowbyfunctions import assign_fips_location_system, aggregator
from flowsa.common import fba_default_grouping_fields
from flowsa.dataclean import harmonize_units
from flowsa.common import US_FIPS, load_bea_crosswalk
# load Canadian GDP data
gdp = flowsa.getFlowByActivity(datasource='StatCan_GDP', year=attr['allocation_source_year'], flowclass='Money')
gdp = harmonize_units(gdp)
# drop 31-33
gdp = gdp[gdp['ActivityProducedBy'] != '31-33']
gdp = gdp.rename(columns={"FlowAmount": "CanDollar"})
# merge df
df_m = pd.merge(df, gdp[['CanDollar', 'ActivityProducedBy']], how='left', left_on='ActivityConsumedBy',
right_on='ActivityProducedBy')
df_m['CanDollar'] = df_m['CanDollar'].fillna(0)
df_m = df_m.drop(columns=["ActivityProducedBy_y"])
df_m = df_m.rename(columns={"ActivityProducedBy_x": "ActivityProducedBy"})
df_m = df_m[df_m['CanDollar'] != 0]
exchange_rate = get_Canadian_to_USD_exchange_rate(str(attr['allocation_source_year']))
exchange_rate = float(exchange_rate)
# convert to mgal/USD
df_m.loc[:, 'FlowAmount'] = df_m['FlowAmount'] / (df_m['CanDollar'] / exchange_rate)
df_m.loc[:, 'Unit'] = 'Mgal/USD'
df_m = df_m.drop(columns=["CanDollar"])
# convert Location to US
df_m.loc[:, 'Location'] = US_FIPS
df_m = assign_fips_location_system(df_m, str(attr['allocation_source_year']))
# load us gdp
# load Canadian GDP data
us_gdp_load = flowsa.getFlowByActivity(datasource='BEA_GDP_GrossOutput', year=attr['allocation_source_year'],
flowclass='Money')
us_gdp_load = harmonize_units(us_gdp_load)
# load bea crosswalk
cw_load = load_bea_crosswalk()
cw = cw_load[['BEA_2012_Detail_Code', 'NAICS_2012_Code']].drop_duplicates()
cw = cw[cw['NAICS_2012_Code'].apply(lambda x: len(str(x)) == 3)].drop_duplicates().reset_index(drop=True)
# merge
us_gdp = pd.merge(us_gdp_load, cw, how='left', left_on='ActivityProducedBy', right_on='BEA_2012_Detail_Code')
us_gdp = us_gdp.drop(columns=['ActivityProducedBy', 'BEA_2012_Detail_Code'])
# rename columns
us_gdp = us_gdp.rename(columns={'NAICS_2012_Code': 'ActivityProducedBy'})
# agg by naics
us_gdp = aggregator(us_gdp, fba_default_grouping_fields)
us_gdp = us_gdp.rename(columns={'FlowAmount': 'us_gdp'})
# determine annual us water use
df_m2 = pd.merge(df_m, us_gdp[['ActivityProducedBy', 'us_gdp']], how='left', left_on='ActivityConsumedBy',
right_on='ActivityProducedBy')
df_m2.loc[:, 'FlowAmount'] = df_m2['FlowAmount'] * (df_m2['us_gdp'])
df_m2.loc[:, 'Unit'] = 'Mgal'
df_m2 = df_m2.rename(columns={'ActivityProducedBy_x': 'ActivityProducedBy'})
df_m2 = df_m2.drop(columns=['ActivityProducedBy_y', 'us_gdp'])
return df_m2
# def disaggregate_statcan_to_naics_6(df):
# """
#
# :param df:
# :return:
# """
#
# return df
| [
"flowsa.dataclean.harmonize_units",
"pandas.read_csv",
"pandas.merge",
"io.BytesIO",
"flowsa.getFlowByActivity",
"flowsa.flowbyfunctions.aggregator",
"flowsa.common.load_bea_crosswalk",
"pandas.concat"
] | [((1472, 1509), 'pandas.concat', 'pd.concat', (['dataframe_list'], {'sort': '(False)'}), '(dataframe_list, sort=False)\n', (1481, 1509), True, 'import pandas as pd\n'), ((4432, 4543), 'flowsa.getFlowByActivity', 'flowsa.getFlowByActivity', ([], {'datasource': '"""StatCan_GDP"""', 'year': "attr['allocation_source_year']", 'flowclass': '"""Money"""'}), "(datasource='StatCan_GDP', year=attr[\n 'allocation_source_year'], flowclass='Money')\n", (4456, 4543), False, 'import flowsa\n'), ((4549, 4569), 'flowsa.dataclean.harmonize_units', 'harmonize_units', (['gdp'], {}), '(gdp)\n', (4564, 4569), False, 'from flowsa.dataclean import harmonize_units\n'), ((4724, 4856), 'pandas.merge', 'pd.merge', (['df', "gdp[['CanDollar', 'ActivityProducedBy']]"], {'how': '"""left"""', 'left_on': '"""ActivityConsumedBy"""', 'right_on': '"""ActivityProducedBy"""'}), "(df, gdp[['CanDollar', 'ActivityProducedBy']], how='left', left_on=\n 'ActivityConsumedBy', right_on='ActivityProducedBy')\n", (4732, 4856), True, 'import pandas as pd\n'), ((5644, 5763), 'flowsa.getFlowByActivity', 'flowsa.getFlowByActivity', ([], {'datasource': '"""BEA_GDP_GrossOutput"""', 'year': "attr['allocation_source_year']", 'flowclass': '"""Money"""'}), "(datasource='BEA_GDP_GrossOutput', year=attr[\n 'allocation_source_year'], flowclass='Money')\n", (5668, 5763), False, 'import flowsa\n'), ((5820, 5848), 'flowsa.dataclean.harmonize_units', 'harmonize_units', (['us_gdp_load'], {}), '(us_gdp_load)\n', (5835, 5848), False, 'from flowsa.dataclean import harmonize_units\n'), ((5888, 5908), 'flowsa.common.load_bea_crosswalk', 'load_bea_crosswalk', ([], {}), '()\n', (5906, 5908), False, 'from flowsa.common import US_FIPS, load_bea_crosswalk\n'), ((6125, 6229), 'pandas.merge', 'pd.merge', (['us_gdp_load', 'cw'], {'how': '"""left"""', 'left_on': '"""ActivityProducedBy"""', 'right_on': '"""BEA_2012_Detail_Code"""'}), "(us_gdp_load, cw, how='left', left_on='ActivityProducedBy',\n right_on='BEA_2012_Detail_Code')\n", (6133, 6229), True, 'import pandas as pd\n'), ((6438, 6485), 'flowsa.flowbyfunctions.aggregator', 'aggregator', (['us_gdp', 'fba_default_grouping_fields'], {}), '(us_gdp, fba_default_grouping_fields)\n', (6448, 6485), False, 'from flowsa.flowbyfunctions import assign_fips_location_system, aggregator\n'), ((6596, 6729), 'pandas.merge', 'pd.merge', (['df_m', "us_gdp[['ActivityProducedBy', 'us_gdp']]"], {'how': '"""left"""', 'left_on': '"""ActivityConsumedBy"""', 'right_on': '"""ActivityProducedBy"""'}), "(df_m, us_gdp[['ActivityProducedBy', 'us_gdp']], how='left',\n left_on='ActivityConsumedBy', right_on='ActivityProducedBy')\n", (6604, 6729), True, 'import pandas as pd\n'), ((748, 779), 'io.BytesIO', 'io.BytesIO', (['sc_response.content'], {}), '(sc_response.content)\n', (758, 779), False, 'import io\n'), ((1028, 1055), 'pandas.read_csv', 'pd.read_csv', (['data'], {'header': '(0)'}), '(data, header=0)\n', (1039, 1055), True, 'import pandas as pd\n')] |
import bybit
import math
import pandas as pd
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
# settings
num_orders = 3
order_size = 1
order_distance = 10
sl_risk = 0.03
tp_distance = 5
api_key = "YOUR_KEY"
api_secret = "YOUR_SECRET"
client = bybit.bybit(test=False, api_key=api_key, api_secret=api_secret)
mid_price = 0
def place_order(price, side, stop_loss, take_profit):
order = client.Order.Order_new(
side=side,
symbol="BTCUSD",
order_type="Limit",
qty=order_size, price=price,
time_in_force="GoodTillCancel",
take_profit=take_profit,
stop_loss=stop_loss
)
order.result()
def get_result_from_response(response):
result = response.result()[0] or {}
return result.get('result', {})
def ensure_buy_order(price, stop_loss, take_profit):
if ((last_price - order_distance) < price):
return
existing_order = list(
filter(lambda elem: int(elem['price']) == price, buy_orders))
if any(existing_order):
existing_order = existing_order[0]
if (int(float(existing_order['take_profit'])) == take_profit):
return
else:
print("cancelling order, tp has moved")
close_order(existing_order)
print("> opening buy order at {} with sl: {} and tp: {}".format(
price, stop_loss, take_profit))
place_order(price, "Buy", stop_loss, take_profit)
def ensure_sell_order(price, stop_loss, take_profit):
if ((last_price + order_distance) > price):
return
existing_order = list(
filter(lambda elem: int(elem['price']) == price, sell_orders))
if any(existing_order):
existing_order = existing_order[0]
if (int(float(existing_order['take_profit'])) == take_profit):
return
else:
print("cancelling order, tp has moved")
close_order(existing_order)
print("> opening sell order at {} with sl: {} and tp: {}".format(
price, stop_loss, take_profit))
place_order(price, "Sell", stop_loss, take_profit)
def close_order(order):
client.Order.Order_cancel(
symbol="BTCUSD", order_id=order['order_id']).result()
def close_all_orders(order_list):
[close_order(order) for order in order_list]
def check_and_update_orders():
print("> check and update orders running")
my_position = get_result_from_response(
client.Positions.Positions_myPosition(symbol="BTCUSD"))
position_side = my_position['side']
entry_price = round(float(my_position['entry_price']))
sl_distance = mid_price * sl_risk
for n in range(0, num_orders):
order_offset = (n + 1) * order_distance
buy_price = round_to_order_distance(last_price - order_offset)
sell_price = round_to_order_distance(last_price + order_offset)
if position_side == "Buy":
buy_tp = round_to_order_distance(
entry_price + (order_distance * tp_distance))
ensure_buy_order(buy_price, mid_price - sl_distance, buy_tp)
close_all_orders(sell_orders)
elif position_side == "Sell":
sell_tp = round_to_order_distance(
entry_price - (order_distance * tp_distance))
ensure_sell_order(sell_price, mid_price + sl_distance, sell_tp)
close_all_orders(buy_orders)
else:
buy_tp = round_to_order_distance(
last_price + (order_distance * tp_distance))
ensure_buy_order(buy_price, mid_price - sl_distance, buy_tp)
sell_tp = round_to_order_distance(
last_price - (order_distance * tp_distance))
ensure_sell_order(sell_price, mid_price + sl_distance, sell_tp)
def round_to_order_distance(num):
if num is None or math.isnan(num):
return
return order_distance * round(float(num) / order_distance)
def calculate_mid_price():
kline = pd.DataFrame([])
for n in range(1, 4):
from_date = datetime.now() + relativedelta(hours=-(n*3))
unix_from_date = time.mktime(from_date.timetuple())
candle_info = get_result_from_response(client.Kline.Kline_get(
symbol="BTCUSD", interval="1", **{'from': unix_from_date}))
kline = kline.append(candle_info)
kline = kline.drop_duplicates()
kline["time"] = pd.to_datetime(kline["open_time"], unit='s')
kline = kline.sort_values(by=["time"])
kline[["open", "high", "low", "close", "volume"]] = kline[[
"open", "high", "low", "close", "volume"]].apply(pd.to_numeric)
kline.drop(columns=["open_time", "symbol",
"interval", "turnover"], inplace=True)
kline['ma'] = kline['close'].rolling(200).mean()
kline['rounded_ma'] = kline['ma'].apply(
lambda n: round_to_order_distance(n))
mid_price = kline['rounded_ma'].iloc[-1]
print("> mid price {}".format(mid_price))
return mid_price
last_price = get_result_from_response(
client.Market.Market_tradingRecords(symbol="BTCUSD", limit=1))[0]['price']
open_orders = get_result_from_response(
client.Order.Order_query(symbol="BTCUSD", order_id=""))
buy_orders = list(filter(lambda elem: elem['side'] == "Buy", open_orders))
sell_orders = list(filter(lambda elem: elem['side'] == "Sell", open_orders))
print(buy_orders)
mid_price = calculate_mid_price()
check_and_update_orders()
| [
"dateutil.relativedelta.relativedelta",
"datetime.datetime.now",
"pandas.DataFrame",
"bybit.bybit",
"pandas.to_datetime",
"math.isnan"
] | [((288, 351), 'bybit.bybit', 'bybit.bybit', ([], {'test': '(False)', 'api_key': 'api_key', 'api_secret': 'api_secret'}), '(test=False, api_key=api_key, api_secret=api_secret)\n', (299, 351), False, 'import bybit\n'), ((3965, 3981), 'pandas.DataFrame', 'pd.DataFrame', (['[]'], {}), '([])\n', (3977, 3981), True, 'import pandas as pd\n'), ((4376, 4420), 'pandas.to_datetime', 'pd.to_datetime', (["kline['open_time']"], {'unit': '"""s"""'}), "(kline['open_time'], unit='s')\n", (4390, 4420), True, 'import pandas as pd\n'), ((3828, 3843), 'math.isnan', 'math.isnan', (['num'], {}), '(num)\n', (3838, 3843), False, 'import math\n'), ((4029, 4043), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4041, 4043), False, 'from datetime import datetime\n'), ((4046, 4075), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'hours': '(-(n * 3))'}), '(hours=-(n * 3))\n', (4059, 4075), False, 'from dateutil.relativedelta import relativedelta\n')] |
# coding: utf-8
#------------------------------------------------------------------------------------------#
# This file is part of Pyccel which is released under MIT License. See the LICENSE file or #
# go to https://github.com/pyccel/pyccel/blob/master/LICENSE for full license details. #
#------------------------------------------------------------------------------------------#
import sys
import subprocess
import os
import glob
import warnings
from pyccel.ast.bind_c import as_static_function_call
from pyccel.ast.core import SeparatorComment
from pyccel.codegen.printing.fcode import fcode
from pyccel.codegen.printing.cwrappercode import cwrappercode
from pyccel.codegen.utilities import compile_files, get_gfortran_library_dir
from .cwrapper import create_c_setup
from pyccel.errors.errors import Errors
errors = Errors()
__all__ = ['create_shared_library', 'fortran_c_flag_equivalence']
#==============================================================================
PY_VERSION = sys.version_info[0:2]
fortran_c_flag_equivalence = {'-Wconversion-extra' : '-Wconversion' }
#==============================================================================
def create_shared_library(codegen,
language,
pyccel_dirpath,
compiler,
mpi_compiler,
accelerator,
dep_mods,
libs,
libdirs,
includes='',
flags = '',
sharedlib_modname=None,
verbose = False):
# Consistency checks
if not codegen.is_module:
raise TypeError('Expected Module')
# Get module name
module_name = codegen.name
# Change working directory to '__pyccel__'
base_dirpath = os.getcwd()
os.chdir(pyccel_dirpath)
# Name of shared library
if sharedlib_modname is None:
sharedlib_modname = module_name
sharedlib_folder = ''
if language in ['c', 'fortran']:
extra_libs = []
extra_libdirs = []
if language == 'fortran':
# Construct static interface for passing array shapes and write it to file bind_c_MOD.f90
funcs = [f for f in codegen.routines if not f.is_private]
sep = fcode(SeparatorComment(40), codegen.parser)
bind_c_funcs = [as_static_function_call(f, module_name, name=f.name) for f in funcs]
bind_c_code = '\n'.join([sep + fcode(f, codegen.parser) + sep for f in bind_c_funcs])
bind_c_filename = 'bind_c_{}.f90'.format(module_name)
with open(bind_c_filename, 'w') as f:
f.writelines(bind_c_code)
compile_files(bind_c_filename, compiler, flags,
binary=None,
verbose=verbose,
is_module=True,
output=pyccel_dirpath,
libs=libs,
libdirs=libdirs,
language=language)
dep_mods = (os.path.join(pyccel_dirpath,'bind_c_{}'.format(module_name)), *dep_mods)
if compiler == 'gfortran':
extra_libs.append('gfortran')
extra_libdirs.append(get_gfortran_library_dir())
elif compiler == 'ifort':
extra_libs.append('ifcore')
if sys.platform == 'win32':
extra_libs.append('quadmath')
module_old_name = codegen.expr.name
codegen.expr.set_name(sharedlib_modname)
wrapper_code = cwrappercode(codegen.expr, codegen.parser, language)
if errors.has_errors():
return
codegen.expr.set_name(module_old_name)
wrapper_filename_root = '{}_wrapper'.format(module_name)
wrapper_filename = '{}.c'.format(wrapper_filename_root)
with open(wrapper_filename, 'w') as f:
f.writelines(wrapper_code)
c_flags = [fortran_c_flag_equivalence[f] if f in fortran_c_flag_equivalence \
else f for f in flags.strip().split(' ') if f != '']
if sys.platform == "darwin" and "-fopenmp" in c_flags and "-Xpreprocessor" not in c_flags:
idx = 0
while idx < len(c_flags):
if c_flags[idx] == "-fopenmp":
c_flags.insert(idx, "-Xpreprocessor")
idx += 1
idx += 1
setup_code = create_c_setup(sharedlib_modname, wrapper_filename,
dep_mods, compiler, includes, libs + extra_libs, libdirs + extra_libdirs, c_flags)
setup_filename = "setup_{}.py".format(module_name)
with open(setup_filename, 'w') as f:
f.writelines(setup_code)
setup_filename = os.path.join(pyccel_dirpath, setup_filename)
cmd = [sys.executable, setup_filename, "build"]
if verbose:
print(' '.join(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
out, err = p.communicate()
if verbose:
print(out)
if p.returncode != 0:
err_msg = "Failed to build module"
if verbose:
err_msg += "\n" + err
raise RuntimeError(err_msg)
if err:
warnings.warn(UserWarning(err))
sharedlib_folder += 'build/lib*/'
# Obtain absolute path of newly created shared library
# Set file name extension of Python extension module
if os.name == 'nt': # Windows
extext = 'pyd'
else:
extext = 'so'
pattern = '{}{}*.{}'.format(sharedlib_folder, sharedlib_modname, extext)
sharedlib_filename = glob.glob(pattern)[0]
sharedlib_filepath = os.path.abspath(sharedlib_filename)
# Change working directory back to starting point
os.chdir(base_dirpath)
# Return absolute path of shared library
return sharedlib_filepath
| [
"pyccel.ast.core.SeparatorComment",
"subprocess.Popen",
"os.path.join",
"os.getcwd",
"os.chdir",
"pyccel.ast.bind_c.as_static_function_call",
"pyccel.errors.errors.Errors",
"pyccel.codegen.printing.cwrappercode.cwrappercode",
"pyccel.codegen.printing.fcode.fcode",
"os.path.abspath",
"pyccel.code... | [((900, 908), 'pyccel.errors.errors.Errors', 'Errors', ([], {}), '()\n', (906, 908), False, 'from pyccel.errors.errors import Errors\n'), ((1967, 1978), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1976, 1978), False, 'import os\n'), ((1983, 2007), 'os.chdir', 'os.chdir', (['pyccel_dirpath'], {}), '(pyccel_dirpath)\n', (1991, 2007), False, 'import os\n'), ((5826, 5861), 'os.path.abspath', 'os.path.abspath', (['sharedlib_filename'], {}), '(sharedlib_filename)\n', (5841, 5861), False, 'import os\n'), ((5921, 5943), 'os.chdir', 'os.chdir', (['base_dirpath'], {}), '(base_dirpath)\n', (5929, 5943), False, 'import os\n'), ((3665, 3717), 'pyccel.codegen.printing.cwrappercode.cwrappercode', 'cwrappercode', (['codegen.expr', 'codegen.parser', 'language'], {}), '(codegen.expr, codegen.parser, language)\n', (3677, 3717), False, 'from pyccel.codegen.printing.cwrappercode import cwrappercode\n'), ((4847, 4891), 'os.path.join', 'os.path.join', (['pyccel_dirpath', 'setup_filename'], {}), '(pyccel_dirpath, setup_filename)\n', (4859, 4891), False, 'import os\n'), ((5014, 5112), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'universal_newlines': '(True)'}), '(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n universal_newlines=True)\n', (5030, 5112), False, 'import subprocess\n'), ((5779, 5797), 'glob.glob', 'glob.glob', (['pattern'], {}), '(pattern)\n', (5788, 5797), False, 'import glob\n'), ((2863, 3036), 'pyccel.codegen.utilities.compile_files', 'compile_files', (['bind_c_filename', 'compiler', 'flags'], {'binary': 'None', 'verbose': 'verbose', 'is_module': '(True)', 'output': 'pyccel_dirpath', 'libs': 'libs', 'libdirs': 'libdirs', 'language': 'language'}), '(bind_c_filename, compiler, flags, binary=None, verbose=\n verbose, is_module=True, output=pyccel_dirpath, libs=libs, libdirs=\n libdirs, language=language)\n', (2876, 3036), False, 'from pyccel.codegen.utilities import compile_files, get_gfortran_library_dir\n'), ((2458, 2478), 'pyccel.ast.core.SeparatorComment', 'SeparatorComment', (['(40)'], {}), '(40)\n', (2474, 2478), False, 'from pyccel.ast.core import SeparatorComment\n'), ((2524, 2576), 'pyccel.ast.bind_c.as_static_function_call', 'as_static_function_call', (['f', 'module_name'], {'name': 'f.name'}), '(f, module_name, name=f.name)\n', (2547, 2576), False, 'from pyccel.ast.bind_c import as_static_function_call\n'), ((3359, 3385), 'pyccel.codegen.utilities.get_gfortran_library_dir', 'get_gfortran_library_dir', ([], {}), '()\n', (3383, 3385), False, 'from pyccel.codegen.utilities import compile_files, get_gfortran_library_dir\n'), ((2636, 2660), 'pyccel.codegen.printing.fcode.fcode', 'fcode', (['f', 'codegen.parser'], {}), '(f, codegen.parser)\n', (2641, 2660), False, 'from pyccel.codegen.printing.fcode import fcode\n')] |
######
# If you do not want to post results on Twitter remove the lines marked with TWITTER
######
import time
import tweepy
import os
import classes.utility
import requests
from bs4 import BeautifulSoup, SoupStrainer
tools = classes.utility.ScavUtility()
iterator = 1
session = requests.session()
session.proxies = {}
session.proxies["http"] = "socks5h://localhost:9050"
session.proxies["https"] = "socks5h://localhost:9050"
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0"}
#Twitter API credentials
consumer_key = "" # TWITTER
consumer_secret = "" # TWITTER
access_key = "" # TWITTER
access_secret = "" # TWITTER
#authorize twitter, initialize tweepy
auth = tweepy.OAuthHandler(consumer_key, consumer_secret) # TWITTER
auth.set_access_token(access_key, access_secret) # TWITTER
api = tweepy.API(auth) # TWITTER
print("[#] Using website scraping to gather pastes. (TOR cycles to avoid IP blocking)")
# loading notification targets
with open("notification_targets.txt") as f:
notificationtargets = f.readlines()
print("[#] Loaded " + str(len(notificationtargets)) + " notification targets.")
while 1:
# test if ready to archive
archivepath = "data/raw_pastes"
archiveit = tools.testifreadytoarchive(archivepath)
if archiveit == 1:
print("[*] Get all the pastes with credentials...")
tools.getthejuicythings(archivepath, "pastebincom")
print("[*] Archiving old Paste.org pastes...")
tools.archivepastes(archivepath, "pastebincom")
print(str(iterator) + ". iterator:")
iterator += 1
try:
response = session.get("https://pastebin.com/archive", headers=headers)
response = response.text
print("[#] Waiting...")
time.sleep(90)
for link in BeautifulSoup(response, parse_only=SoupStrainer('a'), features="lxml"):
if "HTML" not in link:
if link.has_attr('href'):
if len(link["href"]) == 9 and link["href"][0] == "/" and link["href"] != "/messages" and link["href"] != "/settings" and link["href"] != "/scraping":
print("[*] Crawling " + link["href"])
# I implemented a little fix which currently avoids that your IP gets blocked when simply scraping the website without using the API
binResponse = session.get("https://pastebin.com/raw" + link["href"], headers=headers)
binResponse = binResponse.text
try:
foundPasswords = 0
file_ = open("data/raw_pastes" + link["href"], "wb")
file_.write(binResponse.encode('utf-8').strip())
file_.close()
emailPattern = os.popen("grep -l -E -o \"\\b[a-zA-Z0-9.-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z0-9.-]+\\b\" data/raw_pastes" + link["href"]).read()
emailPattern = emailPattern.split("\n")
for file in emailPattern:
if file != "":
with open("data/raw_pastes" + link["href"]) as f:
pasteContent = f.readlines()
skip = 0
for line in pasteContent:
curLine = line.strip()
if (":" in curLine or ";" in curLine or "," in curLine) and "://" not in curLine and len(curLine) <=100 and "android:" not in curLine and "#EXTINF" not in curLine:
tools.checknotificationtargets(notificationtargets, curLine, apiPaste["key"])
else:
skip = 1
if skip == 0:
foundPasswords = 1
curPasteMySQLi = os.popen("grep mysqli_connect\( data/raw_pastes" + link["href"]).read()
curPasteRSA = os.popen("grep 'BEGIN RSA PRIVATE KEY' data/raw_pastes" + link["href"]).read()
curPasteWP = os.popen("grep 'The name of the database for WordPress' data/raw_pastes" + link["href"]).read()
# search for onion links
containsOnion = 0
containsDocument = 0
with open("data/raw_pastes" + link["href"]) as f:
onionContent = f.readlines()
for line in onionContent:
if ".onion" in line and len(line) <= 150:
containsOnion = 1
if ".pdf" in line or ".doc" in line or ".docx" in line or ".xls" in line or ".xlsx" in line:
containsDocument = 1
if foundPasswords == 1:
foundPasswords = 0
print("Found credentials. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
elif curPasteRSA != "":
print("Found RSA key. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes" + link["href"] + " data/rsa_leaks/.")
elif curPasteWP != "":
print("Found Wordpress configuration file. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes" + link["href"] + " data/wordpress_leaks/.")
elif curPasteMySQLi != "":
print("Found MySQL connect string. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes" + link["href"] + " data/mysql_leaks/.")
elif containsOnion == 1:
if containsDocument == 1:
print("Found .onion link to a document. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes" + link["href"] + " data/onion_docs/.")
else:
print("Found .onion link. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes" + link["href"] + " data/onion/.")
time.sleep(1)
except Exception as e:
print(e)
continue
print("++++++++++")
print("")
except Exception as e:
print(e)
continue
| [
"requests.session",
"bs4.SoupStrainer",
"time.sleep",
"tweepy.API",
"os.popen",
"os.system",
"tweepy.OAuthHandler"
] | [((281, 299), 'requests.session', 'requests.session', ([], {}), '()\n', (297, 299), False, 'import requests\n'), ((712, 762), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (731, 762), False, 'import tweepy\n'), ((840, 856), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (850, 856), False, 'import tweepy\n'), ((1690, 1704), 'time.sleep', 'time.sleep', (['(90)'], {}), '(90)\n', (1700, 1704), False, 'import time\n'), ((1755, 1772), 'bs4.SoupStrainer', 'SoupStrainer', (['"""a"""'], {}), "('a')\n", (1767, 1772), False, 'from bs4 import BeautifulSoup, SoupStrainer\n'), ((5476, 5489), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (5486, 5489), False, 'import time\n'), ((2513, 2634), 'os.popen', 'os.popen', (['(\'grep -l -E -o "\\\\b[a-zA-Z0-9.-]+@[a-zA-Z0-9.-]+\\\\.[a-zA-Z0-9.-]+\\\\b" data/raw_pastes\'\n + link[\'href\'])'], {}), '(\n \'grep -l -E -o "\\\\b[a-zA-Z0-9.-]+@[a-zA-Z0-9.-]+\\\\.[a-zA-Z0-9.-]+\\\\b" data/raw_pastes\'\n + link[\'href\'])\n', (2521, 2634), False, 'import os\n'), ((3297, 3362), 'os.popen', 'os.popen', (["('grep mysqli_connect\\\\( data/raw_pastes' + link['href'])"], {}), "('grep mysqli_connect\\\\( data/raw_pastes' + link['href'])\n", (3305, 3362), False, 'import os\n'), ((3390, 3461), 'os.popen', 'os.popen', (['("grep \'BEGIN RSA PRIVATE KEY\' data/raw_pastes" + link[\'href\'])'], {}), '("grep \'BEGIN RSA PRIVATE KEY\' data/raw_pastes" + link[\'href\'])\n', (3398, 3461), False, 'import os\n'), ((3489, 3581), 'os.popen', 'os.popen', (['("grep \'The name of the database for WordPress\' data/raw_pastes" + link[\'href\']\n )'], {}), '("grep \'The name of the database for WordPress\' data/raw_pastes" +\n link[\'href\'])\n', (3497, 3581), False, 'import os\n'), ((4366, 4434), 'os.system', 'os.system', (["('cp data/raw_pastes' + link['href'] + ' data/rsa_leaks/.')"], {}), "('cp data/raw_pastes' + link['href'] + ' data/rsa_leaks/.')\n", (4375, 4434), False, 'import os\n'), ((4622, 4696), 'os.system', 'os.system', (["('cp data/raw_pastes' + link['href'] + ' data/wordpress_leaks/.')"], {}), "('cp data/raw_pastes' + link['href'] + ' data/wordpress_leaks/.')\n", (4631, 4696), False, 'import os\n'), ((4880, 4950), 'os.system', 'os.system', (["('cp data/raw_pastes' + link['href'] + ' data/mysql_leaks/.')"], {}), "('cp data/raw_pastes' + link['href'] + ' data/mysql_leaks/.')\n", (4889, 4950), False, 'import os\n'), ((5175, 5244), 'os.system', 'os.system', (["('cp data/raw_pastes' + link['href'] + ' data/onion_docs/.')"], {}), "('cp data/raw_pastes' + link['href'] + ' data/onion_docs/.')\n", (5184, 5244), False, 'import os\n'), ((5403, 5467), 'os.system', 'os.system', (["('cp data/raw_pastes' + link['href'] + ' data/onion/.')"], {}), "('cp data/raw_pastes' + link['href'] + ' data/onion/.')\n", (5412, 5467), False, 'import os\n')] |
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from docutils.core import publish_parts
register = template.Library()
@register.filter(name='rst')
@stringfilter
def rst_to_html5(text):
parts = publish_parts(text, writer_name='html5', settings_overrides={'initial_header_level': 2})
return mark_safe(parts['html_title'] + parts['body'])
| [
"docutils.core.publish_parts",
"django.utils.safestring.mark_safe",
"django.template.Library"
] | [((182, 200), 'django.template.Library', 'template.Library', ([], {}), '()\n', (198, 200), False, 'from django import template\n'), ((281, 374), 'docutils.core.publish_parts', 'publish_parts', (['text'], {'writer_name': '"""html5"""', 'settings_overrides': "{'initial_header_level': 2}"}), "(text, writer_name='html5', settings_overrides={\n 'initial_header_level': 2})\n", (294, 374), False, 'from docutils.core import publish_parts\n'), ((381, 427), 'django.utils.safestring.mark_safe', 'mark_safe', (["(parts['html_title'] + parts['body'])"], {}), "(parts['html_title'] + parts['body'])\n", (390, 427), False, 'from django.utils.safestring import mark_safe\n')] |
# -*- coding: utf-8 -*-
"""Top-level package for ballet."""
__author__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '0.19.5'
# filter warnings
import warnings # noqa E402
warnings.filterwarnings(
action='ignore', module='scipy', message='^internal gelsd')
# silence sklearn deprecation warnings
import logging # noqa E402
logging.captureWarnings(True)
import sklearn # noqa E402
logging.captureWarnings(False)
warnings.filterwarnings(
action='ignore', module='sklearn', category=DeprecationWarning)
warnings.filterwarnings(
action='ignore', module='sklearn', category=FutureWarning)
# configure module-level logging
from ballet.util.log import logger # noqa E402
logger.addHandler(logging.NullHandler())
# re-export some names
from ballet.client import b # noqa E402
from ballet.contrib import collect_contrib_features # noqa E402
from ballet.feature import Feature # noqa E402
from ballet.project import load_config, Project # noqa E402
# for feature development, you really only need these two members
__all__ = (
'b',
'Feature',
)
| [
"logging.NullHandler",
"warnings.filterwarnings",
"logging.captureWarnings"
] | [((177, 265), 'warnings.filterwarnings', 'warnings.filterwarnings', ([], {'action': '"""ignore"""', 'module': '"""scipy"""', 'message': '"""^internal gelsd"""'}), "(action='ignore', module='scipy', message=\n '^internal gelsd')\n", (200, 265), False, 'import warnings\n'), ((334, 363), 'logging.captureWarnings', 'logging.captureWarnings', (['(True)'], {}), '(True)\n', (357, 363), False, 'import logging\n'), ((392, 422), 'logging.captureWarnings', 'logging.captureWarnings', (['(False)'], {}), '(False)\n', (415, 422), False, 'import logging\n'), ((423, 515), 'warnings.filterwarnings', 'warnings.filterwarnings', ([], {'action': '"""ignore"""', 'module': '"""sklearn"""', 'category': 'DeprecationWarning'}), "(action='ignore', module='sklearn', category=\n DeprecationWarning)\n", (446, 515), False, 'import warnings\n'), ((516, 603), 'warnings.filterwarnings', 'warnings.filterwarnings', ([], {'action': '"""ignore"""', 'module': '"""sklearn"""', 'category': 'FutureWarning'}), "(action='ignore', module='sklearn', category=\n FutureWarning)\n", (539, 603), False, 'import warnings\n'), ((704, 725), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (723, 725), False, 'import logging\n')] |
# coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class CorsPolicy(object):
"""
Enable CORS (Cross-Origin-Resource-Sharing) request handling.
"""
def __init__(self, **kwargs):
"""
Initializes a new CorsPolicy object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param allowed_origins:
The value to assign to the allowed_origins property of this CorsPolicy.
:type allowed_origins: list[str]
:param allowed_methods:
The value to assign to the allowed_methods property of this CorsPolicy.
:type allowed_methods: list[str]
:param allowed_headers:
The value to assign to the allowed_headers property of this CorsPolicy.
:type allowed_headers: list[str]
:param exposed_headers:
The value to assign to the exposed_headers property of this CorsPolicy.
:type exposed_headers: list[str]
:param is_allow_credentials_enabled:
The value to assign to the is_allow_credentials_enabled property of this CorsPolicy.
:type is_allow_credentials_enabled: bool
:param max_age_in_seconds:
The value to assign to the max_age_in_seconds property of this CorsPolicy.
:type max_age_in_seconds: int
"""
self.swagger_types = {
'allowed_origins': 'list[str]',
'allowed_methods': 'list[str]',
'allowed_headers': 'list[str]',
'exposed_headers': 'list[str]',
'is_allow_credentials_enabled': 'bool',
'max_age_in_seconds': 'int'
}
self.attribute_map = {
'allowed_origins': 'allowedOrigins',
'allowed_methods': 'allowedMethods',
'allowed_headers': 'allowedHeaders',
'exposed_headers': 'exposedHeaders',
'is_allow_credentials_enabled': 'isAllowCredentialsEnabled',
'max_age_in_seconds': 'maxAgeInSeconds'
}
self._allowed_origins = None
self._allowed_methods = None
self._allowed_headers = None
self._exposed_headers = None
self._is_allow_credentials_enabled = None
self._max_age_in_seconds = None
@property
def allowed_origins(self):
"""
**[Required]** Gets the allowed_origins of this CorsPolicy.
The list of allowed origins that the CORS handler will use to respond to CORS requests. The gateway will
send the Access-Control-Allow-Origin header with the best origin match for the circumstances. '*' will match
any origins, and 'null' will match queries from 'file:' origins. All other origins must be qualified with the
scheme, full hostname, and port if necessary.
:return: The allowed_origins of this CorsPolicy.
:rtype: list[str]
"""
return self._allowed_origins
@allowed_origins.setter
def allowed_origins(self, allowed_origins):
"""
Sets the allowed_origins of this CorsPolicy.
The list of allowed origins that the CORS handler will use to respond to CORS requests. The gateway will
send the Access-Control-Allow-Origin header with the best origin match for the circumstances. '*' will match
any origins, and 'null' will match queries from 'file:' origins. All other origins must be qualified with the
scheme, full hostname, and port if necessary.
:param allowed_origins: The allowed_origins of this CorsPolicy.
:type: list[str]
"""
self._allowed_origins = allowed_origins
@property
def allowed_methods(self):
"""
Gets the allowed_methods of this CorsPolicy.
The list of allowed HTTP methods that will be returned for the preflight OPTIONS request in the
Access-Control-Allow-Methods header. '*' will allow all methods.
:return: The allowed_methods of this CorsPolicy.
:rtype: list[str]
"""
return self._allowed_methods
@allowed_methods.setter
def allowed_methods(self, allowed_methods):
"""
Sets the allowed_methods of this CorsPolicy.
The list of allowed HTTP methods that will be returned for the preflight OPTIONS request in the
Access-Control-Allow-Methods header. '*' will allow all methods.
:param allowed_methods: The allowed_methods of this CorsPolicy.
:type: list[str]
"""
self._allowed_methods = allowed_methods
@property
def allowed_headers(self):
"""
Gets the allowed_headers of this CorsPolicy.
The list of headers that will be allowed from the client via the Access-Control-Allow-Headers header.
'*' will allow all headers.
:return: The allowed_headers of this CorsPolicy.
:rtype: list[str]
"""
return self._allowed_headers
@allowed_headers.setter
def allowed_headers(self, allowed_headers):
"""
Sets the allowed_headers of this CorsPolicy.
The list of headers that will be allowed from the client via the Access-Control-Allow-Headers header.
'*' will allow all headers.
:param allowed_headers: The allowed_headers of this CorsPolicy.
:type: list[str]
"""
self._allowed_headers = allowed_headers
@property
def exposed_headers(self):
"""
Gets the exposed_headers of this CorsPolicy.
The list of headers that the client will be allowed to see from the response as indicated by the
Access-Control-Expose-Headers header. '*' will expose all headers.
:return: The exposed_headers of this CorsPolicy.
:rtype: list[str]
"""
return self._exposed_headers
@exposed_headers.setter
def exposed_headers(self, exposed_headers):
"""
Sets the exposed_headers of this CorsPolicy.
The list of headers that the client will be allowed to see from the response as indicated by the
Access-Control-Expose-Headers header. '*' will expose all headers.
:param exposed_headers: The exposed_headers of this CorsPolicy.
:type: list[str]
"""
self._exposed_headers = exposed_headers
@property
def is_allow_credentials_enabled(self):
"""
Gets the is_allow_credentials_enabled of this CorsPolicy.
Whether to send the Access-Control-Allow-Credentials header to allow CORS requests with cookies.
:return: The is_allow_credentials_enabled of this CorsPolicy.
:rtype: bool
"""
return self._is_allow_credentials_enabled
@is_allow_credentials_enabled.setter
def is_allow_credentials_enabled(self, is_allow_credentials_enabled):
"""
Sets the is_allow_credentials_enabled of this CorsPolicy.
Whether to send the Access-Control-Allow-Credentials header to allow CORS requests with cookies.
:param is_allow_credentials_enabled: The is_allow_credentials_enabled of this CorsPolicy.
:type: bool
"""
self._is_allow_credentials_enabled = is_allow_credentials_enabled
@property
def max_age_in_seconds(self):
"""
Gets the max_age_in_seconds of this CorsPolicy.
The time in seconds for the client to cache preflight responses. This is sent as the Access-Control-Max-Age
if greater than 0.
:return: The max_age_in_seconds of this CorsPolicy.
:rtype: int
"""
return self._max_age_in_seconds
@max_age_in_seconds.setter
def max_age_in_seconds(self, max_age_in_seconds):
"""
Sets the max_age_in_seconds of this CorsPolicy.
The time in seconds for the client to cache preflight responses. This is sent as the Access-Control-Max-Age
if greater than 0.
:param max_age_in_seconds: The max_age_in_seconds of this CorsPolicy.
:type: int
"""
self._max_age_in_seconds = max_age_in_seconds
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| [
"oci.util.formatted_flat_dict"
] | [((8598, 8623), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (8617, 8623), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')] |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr
def conv_bn_layer(input,
num_filters,
filter_size,
name,
stride=1,
groups=1,
act=None):
conv = fluid.layers.conv2d(
input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=stride,
padding=(filter_size - 1) // 2,
groups=groups,
act=None,
param_attr=ParamAttr(name=name + "_weights"),
bias_attr=False,
name=name + "_out")
bn_name = name + "_bn"
return fluid.layers.batch_norm(
input=conv,
act=act,
name=bn_name + '_output',
param_attr=ParamAttr(name=bn_name + '_scale'),
bias_attr=ParamAttr(bn_name + '_offset'),
moving_mean_name=bn_name + '_mean',
moving_variance_name=bn_name + '_variance', )
| [
"paddle.fluid.param_attr.ParamAttr"
] | [((1124, 1157), 'paddle.fluid.param_attr.ParamAttr', 'ParamAttr', ([], {'name': "(name + '_weights')"}), "(name=name + '_weights')\n", (1133, 1157), False, 'from paddle.fluid.param_attr import ParamAttr\n'), ((1365, 1399), 'paddle.fluid.param_attr.ParamAttr', 'ParamAttr', ([], {'name': "(bn_name + '_scale')"}), "(name=bn_name + '_scale')\n", (1374, 1399), False, 'from paddle.fluid.param_attr import ParamAttr\n'), ((1419, 1449), 'paddle.fluid.param_attr.ParamAttr', 'ParamAttr', (["(bn_name + '_offset')"], {}), "(bn_name + '_offset')\n", (1428, 1449), False, 'from paddle.fluid.param_attr import ParamAttr\n')] |
from pyscf import gto
import radii
def from_frag(xyz, frags, chgs, spins, gjfhead='', scrfhead='', gjfname='', basis=None, wfnpath=None):
# mol = gto.Mole()
# mol.atom = xyz
# mol.basis = bas
# mol.verbose = 1
# mol.build()
#
if isinstance(frags[0], str):
frags = str2list(frags)
guess_frag(xyz, frags, chgs, spins, gjfhead.lstrip('\n'), scrfhead, gjfname, basis, wfnpath)
def spin_p2g(spin):
if spin >= 0:
spin = spin + 1
elif spin < 0:
spin = spin - 1
return spin
def str2list(frags):
flist = []
for frag in frags:
alist = []
for s in frag.split(','):
if '-' in s:
start = int(s.split('-')[0])
end = int(s.split('-')[1])
else:
start = int(s)
end = int(s)
alist += range(start, end+1)
flist.append(alist)
return flist
def guess_frag(xyz, frags, chgs, spins, gjfhead, scrfhead, gjfname, basis, wfnpath):
'''
frags: e.g. [[1], [2]] for N2
chgs: e.g. [0, 0] for N2
spins: e.g. [3, -3] for N2
'''
#mol.build()
print('**** generating fragments ****')
atom = gto.format_atom(xyz, unit=1)
#print(atom)
#fraga, fragb = frags
#chga, chgb = chgs
#spina, spinb = spins
allatom = range(1,len(atom)+1)
for k in range(len(frags)):
frag = frags[k]
chg = chgs[k]
spin = spins[k]
g_spin = spin_p2g(spin)
atomk = [atom[i-1] for i in frag]
atomother = [atom[i-1] for i in allatom if i not in frag]
print('fragment %d, chg %d, spin %d' % (k, chg, spin))
#print(atomk)
with open(gjfname+'%d.gjf'%k, 'w') as f:
f.write(gjfhead)
f.write('%d %d\n' % (chg, g_spin))
for a in atomk:
f.write('%s %10.5f %10.5f %10.5f\n' % (a[0], a[1][0], a[1][1], a[1][2]))
#f.write('\n')
if basis is not None:
f.write(basis)
#f.write('\n')
f.write(scrfhead)
f.write('ExtraSph=%d\n\n' % len(atomother))
for b in atomother:
rad = radii.uff_radii[b[0]] / 2.0
f.write(' %10.5f %10.5f %10.5f %10.5f\n' % (b[1][0], b[1][1], b[1][2], rad))
f.write('\n')
if wfnpath is not None:
f.write(wfnpath + '%d.wfn'%k + '\n')
f.write('\n')
| [
"pyscf.gto.format_atom"
] | [((1183, 1211), 'pyscf.gto.format_atom', 'gto.format_atom', (['xyz'], {'unit': '(1)'}), '(xyz, unit=1)\n', (1198, 1211), False, 'from pyscf import gto\n')] |
"""
File to control the parameters of the SITE approach and to specify the postprocessing functionality.
The parameters for each equation are the ones used in the paper. All results of the paper
'Sparse Identification of Trunction Errors' of Thaler, Paehler and Adams, 2019 can be replicated only be
setting the appropriate parameters in this control file. The exceptions are the method of manufactured solutions
in the file 'ManufacturedSolutions', the derivations of the analytic modified differential equations (MDEs) in
the respective files and a few plots are generated in 'Postprocessing_Util'. For an understanding of the parameters
below in this file, we assume knowledge from the preprint of our paper.
"""
import SITE
import Postprocessing_Util
if __name__ == '__main__':
# ########################### User input ##########################################################
# discretization parameters:
equation = 'Advection' # other choices: 'Burgers' ; 'KdV'
a = None # initialize, such that input exists for Burgers, KdV, will be overwritten in Advection case
x_min = 0. # calculation domain Omega = [x_min, x_max]
x_max = 1.
# define discretization parameters and library design for each equation separately
if equation == 'Advection':
a = 1.
x_nodes_list = [300] # default choice
# x_nodes_list = [200, 300, 400, 500] # calculate term orders
# to calculate resolution properties:
# x_nodes_list = [50, 100, 150, 200, 250, 300, 350, 400, 450, 500, 550, 600, 650, 700, 750, 800, 900, 1000]
t_steps = 17 # 12 steps are padded
cfl = 0.01
# library parameters:
D = 6 # highest derivative to be included in library
P = 6 # highest polynomial order to multiply u with derivative basis functions
# cumulative orders to include in the library; for definition see paper or 'findCombinations' in PDE_FIND_lib
combinations = [1, 2, 3, 4, 5, 6] # large library
# combinations = None # small library
hotime = 0 # no higher order time drivatives
elif equation == 'Burgers':
x_nodes_list = [10000] # default choice
# x_nodes_list = [6000, 8000, 10000, 12000] # calculate term orders
# to calculate resolution properties:
# x_nodes_list = [1000, 2000, 4000, 6000, 8000, 10000, 12000, 14000, 16000, 18000, 20000, 22000, 25000]
t_steps = 17
cfl = 0.5
# library parameters:
D = 3 # highest derivative to be included in library
P = 3 # highest polynomial order to multiply u with derivative basis functions
# cumulative orders to include in the library; for definition see paper or 'findCombinations' in PDE_FIND_lib
combinations = [1, 2, 3]
hotime = 0 # no higher order time drivatives
elif equation == 'KdV':
x_nodes_list = [100] # default choice
# x_nodes_list = [87, 100, 112, 125] # calculate term orders
# to calculate resolution properties:
# x_nodes_list = [50, 60, 75, 87, 100, 110, 125, 135, 150, 175]
t_steps = 19 # 14 are padded
cfl = 1.e-6
# library parameters:
D = 7 # highest derivative to be included in library
P = 5 # highest polynomial order to multiply u with derivative basis functions
# cumulative orders to include in the library; for definition see paper or 'findCombinations' in PDE_FIND_lib
combinations = [2, 3]
pairwise = 0
hotime = 3 # higher order time derivatives appended up to 3rd order
else: raise Exception('Equation not implemented! (or typo)')
# accuracy orders of finite difference stencils used in PDE-FIND to build the library Theta(u) and u_t:
acc_time = 8
acc_space = 8
# spline parameters:
# n_ctr: number of NURBS control points within Omega
n_ctr_train = 15 # needs to be larger than curve_degree + 3 to be able to enforce periodicity
n_ctr_test = 11 # needs to be larger than curve_degree + 3 to be able to enforce periodicity
curve_degree = 8 # degree of NURBS; should be high enough to ensure high order differentiability
# number of points of spline per grid node used to interpolate spline values on grid points
eval_points_per_node = 30
# spline optimization parameters:
bound_amplitude = 1. # maximum allowed y-value of each control point
particles = 50 # number of particles for particle swarm optimization
iterations = 100 # number of iterations for particle swarm optimization
# default particle swarm parameters (see documentation of pyswarms for its definitions):
c1 = 0.5
c2 = 0.3
w_pso = 0.9
# Preconditioner choices:
# 'norm_precondition': scale the system matrix and apply a puffer transformation afterwards
# 'norm': only scale the system matrix (robust default)
# 'precondition': applies puffer transform without scaling first (depreciated)
# None: use system matrix as obtained from PDE-FIND (depreciated)
preconditioner = 'norm'
# Initial condition choices:
use_spline = True # if True uses spline initialization; else the Gauss initial condition is used
optimize_spline = False # if True re-runs the particle swarm optimization of the spline; else loads saved spline
# Specify which functionality to be used; setting both true does not make a lot of sense:
# comparison of sparse regression algorithms for given preconditioner and discretization parameters
comparison_sparse_regression_algorithms = False
# study of resolution dependency for given preconditioner and sparse regression algorithm 'BIC_algo'
plot_resolution_dependency = False
BIC_algo = 'FoBa' # sparse regression algorithm for resolution dependency and BIC model selection
# whether to calculate the term orders:
# the function assumes all models from the optimal choice to have the same non-zero parameters
calculate_term_orders = False
# ############################### End user input #######################################################
# Runs SITE for given Parameters:
# list initializations for evaluation of resolution properties
best_model_list = []
BIC_list = []
for x_nodes in x_nodes_list:
BIC_model, best_model, rhs_description = SITE.site(
equation, x_nodes, t_steps, D, P, combinations, optimize_spline=optimize_spline, x_min=x_min,
x_max=x_max, acc_space=acc_space, acc_time=acc_space, preconditioner=preconditioner, a=a, cfl=cfl,
n_ctr_train=n_ctr_train, n_ctr_test=n_ctr_test, curve_degree=curve_degree,
eval_points_per_node=eval_points_per_node, bound_amplitude=bound_amplitude, particles=particles,
iterations=iterations, c1=c1, c2=c2, w_pso=w_pso,
comparison_sparse_regression_algorithms=comparison_sparse_regression_algorithms,
use_spline=use_spline, hotime=hotime, BIC_algo=BIC_algo)
# save BIC choice and optimal choice to evaluate resolution properties
BIC_list.append(BIC_model)
best_model_list.append(best_model)
# Postprocessing
if calculate_term_orders:
Postprocessing_Util.calculate_orders(best_model_list, x_nodes_list)
if plot_resolution_dependency:
Postprocessing_Util.plot_resolution(best_model_list, BIC_list, x_nodes_list, equation, preconditioner, t_steps)
| [
"Postprocessing_Util.calculate_orders",
"Postprocessing_Util.plot_resolution",
"SITE.site"
] | [((6369, 6972), 'SITE.site', 'SITE.site', (['equation', 'x_nodes', 't_steps', 'D', 'P', 'combinations'], {'optimize_spline': 'optimize_spline', 'x_min': 'x_min', 'x_max': 'x_max', 'acc_space': 'acc_space', 'acc_time': 'acc_space', 'preconditioner': 'preconditioner', 'a': 'a', 'cfl': 'cfl', 'n_ctr_train': 'n_ctr_train', 'n_ctr_test': 'n_ctr_test', 'curve_degree': 'curve_degree', 'eval_points_per_node': 'eval_points_per_node', 'bound_amplitude': 'bound_amplitude', 'particles': 'particles', 'iterations': 'iterations', 'c1': 'c1', 'c2': 'c2', 'w_pso': 'w_pso', 'comparison_sparse_regression_algorithms': 'comparison_sparse_regression_algorithms', 'use_spline': 'use_spline', 'hotime': 'hotime', 'BIC_algo': 'BIC_algo'}), '(equation, x_nodes, t_steps, D, P, combinations, optimize_spline=\n optimize_spline, x_min=x_min, x_max=x_max, acc_space=acc_space,\n acc_time=acc_space, preconditioner=preconditioner, a=a, cfl=cfl,\n n_ctr_train=n_ctr_train, n_ctr_test=n_ctr_test, curve_degree=\n curve_degree, eval_points_per_node=eval_points_per_node,\n bound_amplitude=bound_amplitude, particles=particles, iterations=\n iterations, c1=c1, c2=c2, w_pso=w_pso,\n comparison_sparse_regression_algorithms=\n comparison_sparse_regression_algorithms, use_spline=use_spline, hotime=\n hotime, BIC_algo=BIC_algo)\n', (6378, 6972), False, 'import SITE\n'), ((7261, 7328), 'Postprocessing_Util.calculate_orders', 'Postprocessing_Util.calculate_orders', (['best_model_list', 'x_nodes_list'], {}), '(best_model_list, x_nodes_list)\n', (7297, 7328), False, 'import Postprocessing_Util\n'), ((7373, 7488), 'Postprocessing_Util.plot_resolution', 'Postprocessing_Util.plot_resolution', (['best_model_list', 'BIC_list', 'x_nodes_list', 'equation', 'preconditioner', 't_steps'], {}), '(best_model_list, BIC_list, x_nodes_list,\n equation, preconditioner, t_steps)\n', (7408, 7488), False, 'import Postprocessing_Util\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = '<NAME>'
import os
import cv2
import numpy
def find_images(path, recursive=False, ignore=True):
if os.path.isfile(path):
yield path
elif os.path.isdir(path):
assert os.path.isdir(path), 'FileIO - get_images: Directory does not exist'
assert isinstance(recursive, bool), 'FileIO - get_images: recursive must be a boolean variable'
ext, result = ['png', 'jpg', 'jpeg'], []
for path_a in os.listdir(path):
path_a = path + '/' + path_a
if os.path.isdir(path_a) and recursive:
for path_b in find_images(path_a):
yield path_b
check_a = path_a.split('.')[-1] in ext
check_b = ignore or ('-' not in path_a.split('/')[-1])
if check_a and check_b:
yield path_a
else:
raise ValueError('error! path is not a valid path or directory')
def display(title, img, max_size=200000):
assert isinstance(img, numpy.ndarray), 'img must be a numpy array'
assert isinstance(title, str), 'title must be a string'
scale = numpy.sqrt(min(1.0, float(max_size) / (img.shape[0] * img.shape[1])))
shape = (int(scale * img.shape[1]), int(scale * img.shape[0]))
img = cv2.resize(img, shape)
cv2.imshow(title, img)
| [
"os.listdir",
"cv2.imshow",
"os.path.isfile",
"os.path.isdir",
"cv2.resize"
] | [((166, 186), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (180, 186), False, 'import os\n'), ((1291, 1313), 'cv2.resize', 'cv2.resize', (['img', 'shape'], {}), '(img, shape)\n', (1301, 1313), False, 'import cv2\n'), ((1318, 1340), 'cv2.imshow', 'cv2.imshow', (['title', 'img'], {}), '(title, img)\n', (1328, 1340), False, 'import cv2\n'), ((216, 235), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (229, 235), False, 'import os\n'), ((252, 271), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (265, 271), False, 'import os\n'), ((496, 512), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (506, 512), False, 'import os\n'), ((570, 591), 'os.path.isdir', 'os.path.isdir', (['path_a'], {}), '(path_a)\n', (583, 591), False, 'import os\n')] |
from aoc2019 import *
import unittest
class Day1(unittest.TestCase):
def test_mass_12(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight(12), 2)
def test_mass_14(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight(14), 2)
def test_mass_1969(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight(1969), 654)
def test_mass_100756(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight(100756), 33583)
def test_mass2_12(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight_recursive(12), 2)
def test_mass2_1969(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight_recursive(1969), 966)
def test_mass2_100756(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight_recursive(100756), 50346)
if __name__ == '__main__':
unittest.main() | [
"unittest.main"
] | [((940, 955), 'unittest.main', 'unittest.main', ([], {}), '()\n', (953, 955), False, 'import unittest\n')] |
"""
Contains the definition of Compound.
"""
from xdtools.artwork import Artwork
from xdtools.utils import Point
class Compound(Artwork):
"""
A compound shape.
=== Attributes ===
uid - the unique id of this Compound shape.
name - the name of this Compound shape as it appears in the Layers panel.
position - the position of this Compound shape.
path - the path of this Compound shape.
children - the children contained in this Compound shape.
operation - the operation performed on the paths of this Compound shape.
=== Operations ===
"""
def __init__(self, uid: int, path: str, operation: str, children=None,
name='Compound', x=0, y=0) -> None:
"""Instantiate a new Compound."""
super().__init__(uid, 'compound', name)
self.path = path
self.operation = operation
self.children = [] if children is None else children
self.position = Point(x, y)
def __repr__(self) -> str:
"""Return a constructor-style representation of this Compound."""
return str.format(
"Compound(uid={}, type={}, path={}, operation={}, " +
"children={}, name={}, position={}, styles={})",
repr(self.uid), repr(self.type), repr(self.path), repr(self.operation),
repr(self.children), repr(self.name),repr(self.position), repr(self.styles))
| [
"xdtools.utils.Point"
] | [((952, 963), 'xdtools.utils.Point', 'Point', (['x', 'y'], {}), '(x, y)\n', (957, 963), False, 'from xdtools.utils import Point\n')] |
#!/usr/bin/env python
# -*- coding: latin-1 -*-
"""
Test play sounds. (January 1st, 2015)
Piece of SimpleGUICS2Pygame.
https://bitbucket.org/OPiMedia/simpleguics2pygame
GPLv3 --- Copyright (C) 2015 <NAME>
http://www.opimedia.be/
"""
import time
try:
import simplegui
SIMPLEGUICS2PYGAME = False
except ImportError:
import SimpleGUICS2Pygame.simpleguics2pygame as simplegui
SIMPLEGUICS2PYGAME = True
TEST = 'test sound'
sound_jump = simplegui.Sound('http://commondatastorage.googleapis.com/codeskulptor-assets/jump.ogg')
if SIMPLEGUICS2PYGAME:
local_sound_chirp = simplegui._LocalSound('_snd/chirp_1s.wav')
def wait(seconds):
"""
Wait during `seconds` seconds.
:param seconds: (int or float) >= 0
"""
assert isinstance(seconds, int) or isinstance(seconds, float), \
type(seconds)
start = time.time()
while time.time() - start < 1:
pass
# Main
wait(1)
print('Play "jump.ogg"')
sound_jump.play()
wait(1)
if SIMPLEGUICS2PYGAME:
print('Play local "chirp_1s.wav"')
local_sound_chirp.play()
wait(1)
| [
"SimpleGUICS2Pygame.simpleguics2pygame.Sound",
"time.time",
"SimpleGUICS2Pygame.simpleguics2pygame._LocalSound"
] | [((457, 549), 'SimpleGUICS2Pygame.simpleguics2pygame.Sound', 'simplegui.Sound', (['"""http://commondatastorage.googleapis.com/codeskulptor-assets/jump.ogg"""'], {}), "(\n 'http://commondatastorage.googleapis.com/codeskulptor-assets/jump.ogg')\n", (472, 549), True, 'import SimpleGUICS2Pygame.simpleguics2pygame as simplegui\n'), ((593, 635), 'SimpleGUICS2Pygame.simpleguics2pygame._LocalSound', 'simplegui._LocalSound', (['"""_snd/chirp_1s.wav"""'], {}), "('_snd/chirp_1s.wav')\n", (614, 635), True, 'import SimpleGUICS2Pygame.simpleguics2pygame as simplegui\n'), ((853, 864), 'time.time', 'time.time', ([], {}), '()\n', (862, 864), False, 'import time\n'), ((875, 886), 'time.time', 'time.time', ([], {}), '()\n', (884, 886), False, 'import time\n')] |
import os
import os.path as osp
import pickle
import time
import numpy as np
from multiprocessing import Pool
from ..utils import get_bbox_dim
from .misc import read_img_info, change_cls_order, get_classes
def load_imgs(img_dir, ann_dir=None, classes=None, nproc=10,
def_bbox_type='poly'):
assert def_bbox_type in ['hbb', 'obb', 'poly', None]
assert osp.isdir(img_dir), f'The {img_dir} is not an existing dir!'
if ann_dir is not None:
print('ann_dir is no use in load_imgs function')
print('Starting loading images information')
start_time = time.time()
imgpaths = [osp.join(img_dir, imgfile)
for imgfile in os.listdir(img_dir)]
if nproc > 1:
pool = Pool(nproc)
infos = pool.map(read_img_info, imgpaths)
pool.close()
else:
infos = list(map(read_img_info, imgpaths))
if def_bbox_type is not None:
for info in infos:
if info is None:
continue
bbox_dim = get_bbox_dim(def_bbox_type)
bboxes = np.zeros((0, bbox_dim), dtype=np.float32)
labels = np.zeros((0, ), dtype=np.int64)
info['ann'] = dict(bboxes=bboxes, labels=labels)
classes = () if classes is None else classes
end_time = time.time()
print(f'Finishing loading images, get {len(infos)} iamges,',
f'using {end_time-start_time:.3f}s.')
return infos, classes
def load_pkl(ann_dir, img_dir=None, classes=None, nproc=10):
assert osp.isfile(ann_dir), f'The {ann_dir} is not an existing pkl file!'
assert img_dir is None or osp.isdir(img_dir), f'The {img_dir} is not an existing dir!'
print('Starting loading pkl information')
start_time = time.time()
data = pickle.load(open(ann_dir, 'rb'))
old_classes, contents = data['cls'], data['content']
if img_dir is not None:
imgpaths = [osp.join(img_dir, content['filename'])
for content in contents]
if nproc > 1:
pool = Pool(nproc)
infos = pool.map(read_img_info, imgpaths)
pool.close()
else:
infos = list(map(read_img_info, imgpaths))
for info, content in zip(infos, contents):
content.update(info)
if classes is None:
classes = old_classes
else:
classes = get_classes(classes)
change_cls_order(contents, old_classes, classes)
end_time = time.time()
print(f'Finishing loading pkl, get {len(contents)} iamges,',
f'using {end_time-start_time:.3f}s.')
return contents, classes
def save_pkl(save_dir, contents, classes):
assert save_dir.endswith('.pkl')
filepath = osp.split(save_dir)[0]
if not osp.exists(filepath):
os.makedirs(filepath)
data = dict(cls=classes, content=contents)
pickle.dump(data, open(save_dir, 'wb'))
| [
"os.path.exists",
"os.listdir",
"os.makedirs",
"os.path.join",
"os.path.split",
"os.path.isfile",
"numpy.zeros",
"os.path.isdir",
"multiprocessing.Pool",
"time.time"
] | [((376, 394), 'os.path.isdir', 'osp.isdir', (['img_dir'], {}), '(img_dir)\n', (385, 394), True, 'import os.path as osp\n'), ((589, 600), 'time.time', 'time.time', ([], {}), '()\n', (598, 600), False, 'import time\n'), ((1281, 1292), 'time.time', 'time.time', ([], {}), '()\n', (1290, 1292), False, 'import time\n'), ((1506, 1525), 'os.path.isfile', 'osp.isfile', (['ann_dir'], {}), '(ann_dir)\n', (1516, 1525), True, 'import os.path as osp\n'), ((1728, 1739), 'time.time', 'time.time', ([], {}), '()\n', (1737, 1739), False, 'import time\n'), ((2436, 2447), 'time.time', 'time.time', ([], {}), '()\n', (2445, 2447), False, 'import time\n'), ((617, 643), 'os.path.join', 'osp.join', (['img_dir', 'imgfile'], {}), '(img_dir, imgfile)\n', (625, 643), True, 'import os.path as osp\n'), ((729, 740), 'multiprocessing.Pool', 'Pool', (['nproc'], {}), '(nproc)\n', (733, 740), False, 'from multiprocessing import Pool\n'), ((1603, 1621), 'os.path.isdir', 'osp.isdir', (['img_dir'], {}), '(img_dir)\n', (1612, 1621), True, 'import os.path as osp\n'), ((2687, 2706), 'os.path.split', 'osp.split', (['save_dir'], {}), '(save_dir)\n', (2696, 2706), True, 'import os.path as osp\n'), ((2721, 2741), 'os.path.exists', 'osp.exists', (['filepath'], {}), '(filepath)\n', (2731, 2741), True, 'import os.path as osp\n'), ((2751, 2772), 'os.makedirs', 'os.makedirs', (['filepath'], {}), '(filepath)\n', (2762, 2772), False, 'import os\n'), ((675, 694), 'os.listdir', 'os.listdir', (['img_dir'], {}), '(img_dir)\n', (685, 694), False, 'import os\n'), ((1061, 1102), 'numpy.zeros', 'np.zeros', (['(0, bbox_dim)'], {'dtype': 'np.float32'}), '((0, bbox_dim), dtype=np.float32)\n', (1069, 1102), True, 'import numpy as np\n'), ((1124, 1154), 'numpy.zeros', 'np.zeros', (['(0,)'], {'dtype': 'np.int64'}), '((0,), dtype=np.int64)\n', (1132, 1154), True, 'import numpy as np\n'), ((1890, 1928), 'os.path.join', 'osp.join', (['img_dir', "content['filename']"], {}), "(img_dir, content['filename'])\n", (1898, 1928), True, 'import os.path as osp\n'), ((2015, 2026), 'multiprocessing.Pool', 'Pool', (['nproc'], {}), '(nproc)\n', (2019, 2026), False, 'from multiprocessing import Pool\n')] |
from setuptools import setup
setup(name='emoji_map',
version='0.1',
description='Maps unicode emoji to its description',
url='http://github.com/rchurch4/emoji_map',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['emoji_map'],
include_package_data=True,
zip_safe=False)
| [
"setuptools.setup"
] | [((30, 308), 'setuptools.setup', 'setup', ([], {'name': '"""emoji_map"""', 'version': '"""0.1"""', 'description': '"""Maps unicode emoji to its description"""', 'url': '"""http://github.com/rchurch4/emoji_map"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['emoji_map']", 'include_package_data': '(True)', 'zip_safe': '(False)'}), "(name='emoji_map', version='0.1', description=\n 'Maps unicode emoji to its description', url=\n 'http://github.com/rchurch4/emoji_map', author='<NAME>', author_email=\n '<EMAIL>', license='MIT', packages=['emoji_map'], include_package_data=\n True, zip_safe=False)\n", (35, 308), False, 'from setuptools import setup\n')] |
import json
import os
import pytest
import tempfile
@pytest.fixture(scope='session', autouse=True)
def working_test_dir():
# create a produce a temporary directory to use for everything
tmp_working_dir = tempfile.TemporaryDirectory()
yield tmp_working_dir.name
# delete it at the end of the session
tmp_working_dir.cleanup()
return
@pytest.fixture(scope='session', autouse=True)
def patched_config_file(working_test_dir):
# now we have to manually modify the config file and replace it later
this_dir, this_filename = os.path.split(__file__)
config_path = os.path.join(this_dir, "../molscore/config.json")
file = open(config_path, 'r')
config_save = json.load(file)
file.close()
file = open(config_path, 'w')
file.write('{"DEFAULT_DATABASE_ROOT": "' + str(working_test_dir) +
'/data"}')
file.close()
yield None
# now we have to save the old one back
file = open(config_path, 'w')
json.dump(config_save, file)
file.close()
return
| [
"tempfile.TemporaryDirectory",
"os.path.join",
"os.path.split",
"json.load",
"pytest.fixture",
"json.dump"
] | [((55, 100), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (69, 100), False, 'import pytest\n'), ((361, 406), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (375, 406), False, 'import pytest\n'), ((214, 243), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (241, 243), False, 'import tempfile\n'), ((555, 578), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (568, 578), False, 'import os\n'), ((597, 646), 'os.path.join', 'os.path.join', (['this_dir', '"""../molscore/config.json"""'], {}), "(this_dir, '../molscore/config.json')\n", (609, 646), False, 'import os\n'), ((699, 714), 'json.load', 'json.load', (['file'], {}), '(file)\n', (708, 714), False, 'import json\n'), ((977, 1005), 'json.dump', 'json.dump', (['config_save', 'file'], {}), '(config_save, file)\n', (986, 1005), False, 'import json\n')] |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit Tests for Google Analytics Account Feed and Data Feed.
AccountFeedTest: All unit tests for AccountFeed class.
DataFeedTest: All unit tests for DataFeed class.
"""
__author__ = '<EMAIL> (<NAME>)'
import unittest
from gdata import test_data
import gdata.analytics.data
import atom.core
import gdata.test_config as conf
class AccountFeedTest(unittest.TestCase):
"""Unit test for all custom elements in the Account Feed."""
def setUp(self):
"""Retrieves the test XML feed into a AccountFeed object."""
self.feed = atom.core.parse(test_data.ANALYTICS_ACCOUNT_FEED,
gdata.analytics.data.AccountFeed)
def testAccountEntryTableId(self):
"""Tests custom classes in Google Analytics Account Feed."""
entry = self.feed.entry[0]
self.assertEquals(entry.table_id.text, 'ga:1174')
def testAccountEntryProperty(self):
"""Tests the property classes in Google Analytics Account Feed."""
property = self.feed.entry[0].property
self.assertEquals(property[0].name, 'ga:accountId')
self.assertEquals(property[0].value, '30481')
self.assertEquals(property[1].name, 'ga:accountName')
self.assertEquals(property[1].value, 'Google Store')
self.assertEquals(property[2].name, 'ga:profileId')
self.assertEquals(property[2].value, '1174')
self.assertEquals(property[3].name, 'ga:webPropertyId')
self.assertEquals(property[3].value, 'UA-30481-1')
self.assertEquals(property[4].name, 'ga:currency')
self.assertEquals(property[4].value, 'USD')
self.assertEquals(property[5].name, 'ga:timezone')
self.assertEquals(property[5].value, 'America/Los_Angeles')
def testAccountEntryGetProperty(self):
"""Tests GetProperty inherited class in the AccountEntry class."""
entry = self.feed.entry[0]
self.assertEquals(entry.GetProperty('ga:accountId').value, '30481')
self.assertEquals(entry.GetProperty('ga:accountName').value, 'Google Store')
self.assertEquals(entry.GetProperty('ga:profileId').value, '1174')
self.assertEquals(entry.GetProperty('ga:webPropertyId').value, 'UA-30481-1')
self.assertEquals(entry.GetProperty('ga:currency').value, 'USD')
self.assertEquals(entry.GetProperty('ga:timezone').value, 'America/Los_Angeles')
class DataFeedTest(unittest.TestCase):
"""Unit test for all custom elements in the Data Feed."""
def setUp(self):
"""Retrieves the test XML feed into a DataFeed object."""
self.feed = atom.core.parse(test_data.ANALYTICS_DATA_FEED,
gdata.analytics.data.DataFeed)
def testDataFeed(self):
"""Tests custom classes in Google Analytics Data Feed."""
self.assertEquals(self.feed.start_date.text, '2008-10-01')
self.assertEquals(self.feed.end_date.text, '2008-10-31')
def testAggregates(self):
"""Tests Aggregates class in Google Analytics Data Feed."""
self.assert_(self.feed.aggregates is not None)
def testAggregatesElements(self):
"""Tests Metrics class in Aggregates class."""
metric = self.feed.aggregates.metric[0]
self.assertEquals(metric.confidence_interval, '0.0')
self.assertEquals(metric.name, 'ga:visits')
self.assertEquals(metric.type, 'integer')
self.assertEquals(metric.value, '136540')
metric = self.feed.aggregates.GetMetric('ga:visits')
self.assertEquals(metric.confidence_interval, '0.0')
self.assertEquals(metric.name, 'ga:visits')
self.assertEquals(metric.type, 'integer')
self.assertEquals(metric.value, '136540')
def testDataSource(self):
"""Tests DataSources class in Google Analytics Data Feed."""
self.assert_(self.feed.data_source[0] is not None)
def testDataSourceTableId(self):
"""Tests TableId class in the DataSource class."""
table_id = self.feed.data_source[0].table_id
self.assertEquals(table_id.text, 'ga:1174')
def testDataSourceTableName(self):
"""Tests TableName class in the DataSource class."""
table_name = self.feed.data_source[0].table_name
self.assertEquals(table_name.text, 'www.googlestore.com')
def testDataSourceProperty(self):
"""Tests Property clas in the DataSource class."""
property = self.feed.data_source[0].property
self.assertEquals(property[0].name, 'ga:profileId')
self.assertEquals(property[0].value, '1174')
self.assertEquals(property[1].name, 'ga:webPropertyId')
self.assertEquals(property[1].value, 'UA-30481-1')
self.assertEquals(property[2].name, 'ga:accountName')
self.assertEquals(property[2].value, 'Google Store')
def testDataSourceGetProperty(self):
"""Tests GetProperty utility method in the DataSource class."""
ds = self.feed.data_source[0]
self.assertEquals(ds.GetProperty('ga:profileId').value, '1174')
self.assertEquals(ds.GetProperty('ga:webPropertyId').value, 'UA-30481-1')
self.assertEquals(ds.GetProperty('ga:accountName').value, 'Google Store')
def testEntryDimension(self):
"""Tests Dimension class in Entry class."""
dim = self.feed.entry[0].dimension[0]
self.assertEquals(dim.name, 'ga:source')
self.assertEquals(dim.value, 'blogger.com')
def testEntryGetDimension(self):
"""Tests GetDimension utility method in the Entry class."""
dim = self.feed.entry[0].GetDimension('ga:source')
self.assertEquals(dim.name, 'ga:source')
self.assertEquals(dim.value, 'blogger.com')
error = self.feed.entry[0].GetDimension('foo')
self.assertEquals(error, None)
def testEntryMetric(self):
"""Tests Metric class in Entry class."""
met = self.feed.entry[0].metric[0]
self.assertEquals(met.confidence_interval, '0.0')
self.assertEquals(met.name, 'ga:visits')
self.assertEquals(met.type, 'integer')
self.assertEquals(met.value, '68140')
def testEntryGetMetric(self):
"""Tests GetMetric utility method in the Entry class."""
met = self.feed.entry[0].GetMetric('ga:visits')
self.assertEquals(met.confidence_interval, '0.0')
self.assertEquals(met.name, 'ga:visits')
self.assertEquals(met.type, 'integer')
self.assertEquals(met.value, '68140')
error = self.feed.entry[0].GetMetric('foo')
self.assertEquals(error, None)
def testEntryGetObject(self):
"""Tests GetObjectOf utility method in Entry class."""
entry = self.feed.entry[0]
dimension = entry.GetObject('ga:source')
self.assertEquals(dimension.name, 'ga:source')
self.assertEquals(dimension.value, 'blogger.com')
metric = entry.GetObject('ga:visits')
self.assertEquals(metric.name, 'ga:visits')
self.assertEquals(metric.value, '68140')
self.assertEquals(metric.type, 'integer')
self.assertEquals(metric.confidence_interval, '0.0')
error = entry.GetObject('foo')
self.assertEquals(error, None)
def suite():
"""Test Account Feed and Data Feed."""
return conf.build_suite([AccountFeedTest, DataFeedTest])
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"gdata.test_config.build_suite"
] | [((7440, 7489), 'gdata.test_config.build_suite', 'conf.build_suite', (['[AccountFeedTest, DataFeedTest]'], {}), '([AccountFeedTest, DataFeedTest])\n', (7456, 7489), True, 'import gdata.test_config as conf\n'), ((7521, 7536), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7534, 7536), False, 'import unittest\n')] |
#!/usr/bin/env python
import rospy
from math import sqrt
from geometry_msgs.msg import Twist, PoseStamped, Pose2D, PointStamped, PoseWithCovarianceStamped
from std_msgs.msg import Empty, String
import Queue
goal_point = rospy.Publisher(
"move_base_simple/goal", PoseStamped, queue_size=1)
servo = rospy.Publisher("servo", Empty, queue_size=1)
robot_state = "In"
p0 = [(0, 0), (0.53, -0.03), (1.09, -0.07)]
p1 = [(0.00, -0.46), (0.53, -0.50), (1.00, -0.55)]
p2 = [(0, -0.90), (0.53, -0.90), (1.00, -0.90)]
p3 = [(0, -1.30), (0.53, -1.30), (1.00, -1.30)]
p4 = [(0, -1.72), (0.53, -1.70), (1.00, -1.72)]
p5 = [(0, -2.10), (0.53, -2.10), (1.00, -2.10)]
graphN = {
"In": p0[0],
"Out": p0[2],
"B1": p1[0],
"B2": p2[0],
"B3": p3[0],
"B4": p4[0],
"B5": p5[0],
"A1": p1[2],
"A2": p2[2],
"A3": p3[2],
"A4": p4[2],
"A5": p5[2]
}
graph = {
p0[0]: [p0[1]],
p0[1]: [p0[0], p0[2], p1[1]],
p0[2]: [p0[1]],
p1[0]: [p1[1]],
p1[1]: [p0[1], p1[0], p1[2], p2[1]],
p1[2]: [p1[1]],
p2[0]: [p2[1]],
p2[1]: [p1[1], p2[0], p2[2], p3[1]],
p2[2]: [p2[1]],
p3[0]: [p3[1]],
p3[1]: [p2[1], p3[0], p3[2], p4[1]],
p3[2]: [p3[1]],
p4[0]: [p4[1]],
p4[1]: [p3[1], p4[0], p4[2], p5[1]],
p4[2]: [p4[1]],
p5[0]: [p5[1]],
p5[1]: [p4[1], p5[0], p5[2]],
p5[2]: [p5[1]]
}
def bfs(start, goal):
global graph
frontier = Queue.Queue()
frontier.put(start)
came_from = {start: None}
while not frontier.empty():
current = frontier.get()
if current == goal:
break
for nieg in graph[current]:
if nieg not in came_from:
frontier.put(nieg)
came_from[nieg] = current
path = [goal]
parent = came_from.get(goal)
while parent != None:
path.insert(0, parent)
parent = came_from.get(parent)
return path
def userCB(msg):
global graphN, robot_state
msgStart, msgGoal = msg.data.split(',')
rospy.loginfo("from {} -> {}".format(msgStart, msgGoal))
path = bfs(graphN[robot_state], graphN[msgStart])
rospy.loginfo("path: {}".format(path))
goal = PoseStamped()
goal.header.frame_id = "map"
for i, pat in enumerate(path[1:]):
goal.pose.position.x = pat[0]
goal.pose.position.y = pat[1]
goal_point.publish(goal)
dx = pat[0] - path[i][0]
dy = pat[1] - path[i][1]
duration = sqrt(dx**2 + dy**2) / 0.08
rospy.sleep(duration)
servo.publish(Empty())
rospy.sleep(2.0)
path = bfs(graphN[msgStart], graphN[msgGoal])
rospy.loginfo("path: {}".format(path))
for i, pat in enumerate(path[1:]):
goal.pose.position.x = pat[0]
goal.pose.position.y = pat[1]
goal_point.publish(goal)
dx = pat[0] - path[i][0]
dy = pat[1] - path[i][1]
duration = sqrt(dx**2 + dy**2) / 0.08
rospy.sleep(duration)
servo.publish(Empty())
rospy.sleep(2.0)
robot_state = msgGoal
def main():
rospy.init_node("parkself_runner")
rospy.loginfo("Parkself Runner")
rospy.Subscriber("user", String, userCB)
goal = PoseStamped()
goal.header.frame_id = "map"
goal_point.publish(goal)
rospy.spin()
if __name__ == '__main__':
main()
| [
"rospy.Subscriber",
"rospy.init_node",
"math.sqrt",
"geometry_msgs.msg.PoseStamped",
"rospy.spin",
"rospy.sleep",
"rospy.Publisher",
"Queue.Queue",
"rospy.loginfo",
"std_msgs.msg.Empty"
] | [((222, 289), 'rospy.Publisher', 'rospy.Publisher', (['"""move_base_simple/goal"""', 'PoseStamped'], {'queue_size': '(1)'}), "('move_base_simple/goal', PoseStamped, queue_size=1)\n", (237, 289), False, 'import rospy\n'), ((304, 349), 'rospy.Publisher', 'rospy.Publisher', (['"""servo"""', 'Empty'], {'queue_size': '(1)'}), "('servo', Empty, queue_size=1)\n", (319, 349), False, 'import rospy\n'), ((1419, 1432), 'Queue.Queue', 'Queue.Queue', ([], {}), '()\n', (1430, 1432), False, 'import Queue\n'), ((2176, 2189), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (2187, 2189), False, 'from geometry_msgs.msg import Twist, PoseStamped, Pose2D, PointStamped, PoseWithCovarianceStamped\n'), ((2544, 2560), 'rospy.sleep', 'rospy.sleep', (['(2.0)'], {}), '(2.0)\n', (2555, 2560), False, 'import rospy\n'), ((2976, 2992), 'rospy.sleep', 'rospy.sleep', (['(2.0)'], {}), '(2.0)\n', (2987, 2992), False, 'import rospy\n'), ((3037, 3071), 'rospy.init_node', 'rospy.init_node', (['"""parkself_runner"""'], {}), "('parkself_runner')\n", (3052, 3071), False, 'import rospy\n'), ((3076, 3108), 'rospy.loginfo', 'rospy.loginfo', (['"""Parkself Runner"""'], {}), "('Parkself Runner')\n", (3089, 3108), False, 'import rospy\n'), ((3113, 3153), 'rospy.Subscriber', 'rospy.Subscriber', (['"""user"""', 'String', 'userCB'], {}), "('user', String, userCB)\n", (3129, 3153), False, 'import rospy\n'), ((3166, 3179), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (3177, 3179), False, 'from geometry_msgs.msg import Twist, PoseStamped, Pose2D, PointStamped, PoseWithCovarianceStamped\n'), ((3247, 3259), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3257, 3259), False, 'import rospy\n'), ((2491, 2512), 'rospy.sleep', 'rospy.sleep', (['duration'], {}), '(duration)\n', (2502, 2512), False, 'import rospy\n'), ((2531, 2538), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (2536, 2538), False, 'from std_msgs.msg import Empty, String\n'), ((2923, 2944), 'rospy.sleep', 'rospy.sleep', (['duration'], {}), '(duration)\n', (2934, 2944), False, 'import rospy\n'), ((2963, 2970), 'std_msgs.msg.Empty', 'Empty', ([], {}), '()\n', (2968, 2970), False, 'from std_msgs.msg import Empty, String\n'), ((2456, 2479), 'math.sqrt', 'sqrt', (['(dx ** 2 + dy ** 2)'], {}), '(dx ** 2 + dy ** 2)\n', (2460, 2479), False, 'from math import sqrt\n'), ((2888, 2911), 'math.sqrt', 'sqrt', (['(dx ** 2 + dy ** 2)'], {}), '(dx ** 2 + dy ** 2)\n', (2892, 2911), False, 'from math import sqrt\n')] |
from unittest import TestCase
from pylibsrtp import Error, Policy, Session
RTP = (
b"\x80\x08\x00\x00" # version, packet type, sequence number
b"\x00\x00\x00\x00" # timestamp
b"\x00\x00\x30\x39" # ssrc: 12345
) + (b"\xd4" * 160)
RTCP = (
b"\x80\xc8\x00\x06\xf3\xcb\x20\x01\x83\xab\x03\xa1\xeb\x02\x0b\x3a"
b"\x00\x00\x94\x20\x00\x00\x00\x9e\x00\x00\x9b\x88"
)
# Set key to predetermined value
KEY = (
b"\<KEY>"
b"\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
b"\x10\x11\x12\x13\x14\x15\x16\x17"
b"\x18\x19\x1a\x1b\x1c\x1d"
)
class PolicyTest(TestCase):
def test_allow_repeat_tx(self):
policy = Policy()
self.assertEqual(policy.allow_repeat_tx, False)
policy.allow_repeat_tx = True
self.assertEqual(policy.allow_repeat_tx, True)
policy.allow_repeat_tx = False
self.assertEqual(policy.allow_repeat_tx, False)
policy.allow_repeat_tx = 1
self.assertEqual(policy.allow_repeat_tx, True)
policy.allow_repeat_tx = 0
self.assertEqual(policy.allow_repeat_tx, False)
def test_key(self):
policy = Policy()
self.assertEqual(policy.key, None)
policy.key = KEY
self.assertEqual(policy.key, KEY)
policy.key = None
self.assertEqual(policy.key, None)
with self.assertRaises(TypeError) as cm:
policy.key = 1234
self.assertEqual(policy.key, None)
self.assertEqual(str(cm.exception), "key must be bytes")
def test_ssrc_type(self):
policy = Policy()
self.assertEqual(policy.ssrc_type, Policy.SSRC_UNDEFINED)
policy.ssrc_type = Policy.SSRC_ANY_INBOUND
self.assertEqual(policy.ssrc_type, Policy.SSRC_ANY_INBOUND)
def test_ssrc_value(self):
policy = Policy()
self.assertEqual(policy.ssrc_value, 0)
policy.ssrc_value = 12345
self.assertEqual(policy.ssrc_value, 12345)
def test_window_size(self):
policy = Policy()
self.assertEqual(policy.window_size, 0)
policy.window_size = 1024
self.assertEqual(policy.window_size, 1024)
class SessionTest(TestCase):
def test_no_key(self):
policy = Policy(ssrc_type=Policy.SSRC_ANY_OUTBOUND)
with self.assertRaises(Error) as cm:
Session(policy=policy)
self.assertEqual(str(cm.exception), "unsupported parameter")
def test_add_remove_stream(self):
# protect RTP
tx_session = Session(
policy=Policy(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)
)
protected = tx_session.protect(RTP)
self.assertEqual(len(protected), 182)
# add stream and unprotect RTP
rx_session = Session()
rx_session.add_stream(
Policy(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)
)
unprotected = rx_session.unprotect(protected)
self.assertEqual(len(unprotected), 172)
self.assertEqual(unprotected, RTP)
# remove stream
rx_session.remove_stream(12345)
# try removing stream again
with self.assertRaises(Error) as cm:
rx_session.remove_stream(12345)
self.assertEqual(str(cm.exception), "no appropriate context found")
def test_rtp_any_ssrc(self):
# protect RTP
tx_session = Session(policy=Policy(key=KEY, ssrc_type=Policy.SSRC_ANY_OUTBOUND))
protected = tx_session.protect(RTP)
self.assertEqual(len(protected), 182)
# bad type
with self.assertRaises(TypeError) as cm:
tx_session.protect(4567)
self.assertEqual(str(cm.exception), "packet must be bytes")
# bad length
with self.assertRaises(ValueError) as cm:
tx_session.protect(b"0" * 1500)
self.assertEqual(str(cm.exception), "packet is too long")
# unprotect RTP
rx_session = Session(policy=Policy(key=KEY, ssrc_type=Policy.SSRC_ANY_INBOUND))
unprotected = rx_session.unprotect(protected)
self.assertEqual(len(unprotected), 172)
self.assertEqual(unprotected, RTP)
def test_rtcp_any_ssrc(self):
# protect RCTP
tx_session = Session(policy=Policy(key=KEY, ssrc_type=Policy.SSRC_ANY_OUTBOUND))
protected = tx_session.protect_rtcp(RTCP)
self.assertEqual(len(protected), 42)
# bad type
with self.assertRaises(TypeError) as cm:
tx_session.protect_rtcp(4567)
self.assertEqual(str(cm.exception), "packet must be bytes")
# bad length
with self.assertRaises(ValueError) as cm:
tx_session.protect_rtcp(b"0" * 1500)
self.assertEqual(str(cm.exception), "packet is too long")
# unprotect RTCP
rx_session = Session(policy=Policy(key=KEY, ssrc_type=Policy.SSRC_ANY_INBOUND))
unprotected = rx_session.unprotect_rtcp(protected)
self.assertEqual(len(unprotected), 28)
self.assertEqual(unprotected, RTCP)
def test_rtp_specific_ssrc(self):
# protect RTP
tx_session = Session(
policy=Policy(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)
)
protected = tx_session.protect(RTP)
self.assertEqual(len(protected), 182)
# unprotect RTP
rx_session = Session(
policy=Policy(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)
)
unprotected = rx_session.unprotect(protected)
self.assertEqual(len(unprotected), 172)
self.assertEqual(unprotected, RTP)
| [
"pylibsrtp.Policy",
"pylibsrtp.Session"
] | [((638, 646), 'pylibsrtp.Policy', 'Policy', ([], {}), '()\n', (644, 646), False, 'from pylibsrtp import Error, Policy, Session\n'), ((1118, 1126), 'pylibsrtp.Policy', 'Policy', ([], {}), '()\n', (1124, 1126), False, 'from pylibsrtp import Error, Policy, Session\n'), ((1544, 1552), 'pylibsrtp.Policy', 'Policy', ([], {}), '()\n', (1550, 1552), False, 'from pylibsrtp import Error, Policy, Session\n'), ((1788, 1796), 'pylibsrtp.Policy', 'Policy', ([], {}), '()\n', (1794, 1796), False, 'from pylibsrtp import Error, Policy, Session\n'), ((1980, 1988), 'pylibsrtp.Policy', 'Policy', ([], {}), '()\n', (1986, 1988), False, 'from pylibsrtp import Error, Policy, Session\n'), ((2198, 2240), 'pylibsrtp.Policy', 'Policy', ([], {'ssrc_type': 'Policy.SSRC_ANY_OUTBOUND'}), '(ssrc_type=Policy.SSRC_ANY_OUTBOUND)\n', (2204, 2240), False, 'from pylibsrtp import Error, Policy, Session\n'), ((2728, 2737), 'pylibsrtp.Session', 'Session', ([], {}), '()\n', (2735, 2737), False, 'from pylibsrtp import Error, Policy, Session\n'), ((2299, 2321), 'pylibsrtp.Session', 'Session', ([], {'policy': 'policy'}), '(policy=policy)\n', (2306, 2321), False, 'from pylibsrtp import Error, Policy, Session\n'), ((2781, 2846), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_SPECIFIC', 'ssrc_value': '(12345)'}), '(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)\n', (2787, 2846), False, 'from pylibsrtp import Error, Policy, Session\n'), ((2501, 2566), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_SPECIFIC', 'ssrc_value': '(12345)'}), '(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)\n', (2507, 2566), False, 'from pylibsrtp import Error, Policy, Session\n'), ((3361, 3412), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_ANY_OUTBOUND'}), '(key=KEY, ssrc_type=Policy.SSRC_ANY_OUTBOUND)\n', (3367, 3412), False, 'from pylibsrtp import Error, Policy, Session\n'), ((3921, 3971), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_ANY_INBOUND'}), '(key=KEY, ssrc_type=Policy.SSRC_ANY_INBOUND)\n', (3927, 3971), False, 'from pylibsrtp import Error, Policy, Session\n'), ((4212, 4263), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_ANY_OUTBOUND'}), '(key=KEY, ssrc_type=Policy.SSRC_ANY_OUTBOUND)\n', (4218, 4263), False, 'from pylibsrtp import Error, Policy, Session\n'), ((4788, 4838), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_ANY_INBOUND'}), '(key=KEY, ssrc_type=Policy.SSRC_ANY_INBOUND)\n', (4794, 4838), False, 'from pylibsrtp import Error, Policy, Session\n'), ((5100, 5165), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_SPECIFIC', 'ssrc_value': '(12345)'}), '(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)\n', (5106, 5165), False, 'from pylibsrtp import Error, Policy, Session\n'), ((5340, 5405), 'pylibsrtp.Policy', 'Policy', ([], {'key': 'KEY', 'ssrc_type': 'Policy.SSRC_SPECIFIC', 'ssrc_value': '(12345)'}), '(key=KEY, ssrc_type=Policy.SSRC_SPECIFIC, ssrc_value=12345)\n', (5346, 5405), False, 'from pylibsrtp import Error, Policy, Session\n')] |
#!/usr/bin/python
# Copyright 2020 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__metaclass__ = type
from ansible.module_utils import baremetal_deploy as bd
from ansible.module_utils.basic import AnsibleModule
import yaml
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: tripleo_baremetal_expand_roles
short_description: Manage baremetal nodes with metalsmith
version_added: "2.9"
author: "<NAME> (@stevebaker)"
description:
- Takes a baremetal deployment description of roles and node instances
and transforms that into an instance list and a heat environment file
for deployed-server.
options:
stack_name:
description:
- Name of the overcloud stack which will be deployed on these instances
default: overcloud
state:
description:
- Build instance list for the desired provision state, "present" to
provision, "absent" to unprovision, "all" for a combination of
"present" and "absent".
default: present
choices:
- present
- absent
- all
baremetal_deployment:
description:
- Data describing roles and baremetal node instances to provision for
those roles
type: list
elements: dict
suboptions:
name:
description:
- Mandatory role name
type: str
required: True
hostname_format:
description:
- Overrides the default hostname format for this role.
The default format uses the lower case role name.
For example, the default format for the Controller role is
%stackname%-controller-%index%. Only the Compute role does not
follow the role name rule. The Compute default format is
%stackname%-novacompute-%index%
type: str
count:
description:
- Number of instances to create for this role.
type: int
default: 1
defaults:
description:
- A dictionary of default values for instances entry properties.
An instances entry property overrides any defaults that you specify
in the defaults parameter.
type: dict
instances:
description:
- Values that you can use to specify attributes for specific nodes.
The length of this list must not be greater than the value of the
count parameter.
type: list
elements: dict
default_network:
description:
- Default nics entry when none are specified
type: list
suboptions: dict
default:
- network: ctlplane
vif: true
default_image:
description:
- Default image
type: dict
default:
href: overcloud-full
ssh_public_keys:
description:
- SSH public keys to load
type: str
user_name:
description:
- Name of the admin user to create
type: str
'''
RETURN = '''
instances:
description: Expanded list of instances to perform actions on
returned: changed
type: list
sample: [
{
"hostname": "overcloud-controller-0",
"image": {
"href": "overcloud-full"
}
},
{
"hostname": "overcloud-controller-1",
"image": {
"href": "overcloud-full"
}
},
{
"hostname": "overcloud-controller-2",
"image": {
"href": "overcloud-full"
}
},
{
"hostname": "overcloud-novacompute-0",
"image": {
"href": "overcloud-full"
}
},
{
"hostname": "overcloud-novacompute-1",
"image": {
"href": "overcloud-full"
}
},
{
"hostname": "overcloud-novacompute-2",
"image": {
"href": "overcloud-full"
}
}
]
environment:
description: Heat environment data to be used with the overcloud deploy.
This is only a partial environment, further changes are
required once instance changes have been made.
returned: changed
type: dict
sample: {
"parameter_defaults": {
"ComputeDeployedServerCount": 3,
"ComputeDeployedServerHostnameFormat": "%stackname%-novacompute-%index%",
"ControllerDeployedServerCount": 3,
"ControllerDeployedServerHostnameFormat": "%stackname%-controller-%index%",
"HostnameMap": {
"overcloud-controller-0": "overcloud-controller-0",
"overcloud-controller-1": "overcloud-controller-1",
"overcloud-controller-2": "overcloud-controller-2",
"overcloud-novacompute-0": "overcloud-novacompute-0",
"overcloud-novacompute-1": "overcloud-novacompute-1",
"overcloud-novacompute-2": "overcloud-novacompute-2"
}
}
}
''' # noqa
EXAMPLES = '''
- name: Expand roles
tripleo_baremetal_expand_roles:
baremetal_deployment:
- name: Controller
count: 3
defaults:
image:
href: overcloud-full
networks: []
- name: Compute
count: 3
defaults:
image:
href: overcloud-full
networks: []
state: present
stack_name: overcloud
register: tripleo_baremetal_instances
'''
def main():
argument_spec = yaml.safe_load(DOCUMENTATION)['options']
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False,
)
state = module.params['state']
try:
if state in ('present', 'all'):
present, env, role_net_map, hostname_role_map = bd.expand(
roles=module.params['baremetal_deployment'],
stack_name=module.params['stack_name'],
expand_provisioned=True,
default_image=module.params['default_image'],
default_network=module.params['default_network'],
user_name=module.params['user_name'],
ssh_public_keys=module.params['ssh_public_keys'],
)
if state in ('absent', 'all'):
absent, _, _, _ = bd.expand(
roles=module.params['baremetal_deployment'],
stack_name=module.params['stack_name'],
expand_provisioned=False,
default_image=module.params['default_image'],
)
env = {}
role_net_map = {}
hostname_role_map = {}
if state == 'present':
instances = present
elif state == 'absent':
instances = absent
elif state == 'all':
instances = present + absent
module.exit_json(
changed=True,
msg='Expanded to %d instances' % len(instances),
instances=instances,
environment=env,
role_net_map=role_net_map,
hostname_role_map=hostname_role_map,
)
except Exception as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| [
"yaml.safe_load",
"ansible.module_utils.basic.AnsibleModule",
"ansible.module_utils.baremetal_deploy.expand"
] | [((6324, 6393), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'argument_spec', 'supports_check_mode': '(False)'}), '(argument_spec=argument_spec, supports_check_mode=False)\n', (6337, 6393), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((6270, 6299), 'yaml.safe_load', 'yaml.safe_load', (['DOCUMENTATION'], {}), '(DOCUMENTATION)\n', (6284, 6299), False, 'import yaml\n'), ((6563, 6885), 'ansible.module_utils.baremetal_deploy.expand', 'bd.expand', ([], {'roles': "module.params['baremetal_deployment']", 'stack_name': "module.params['stack_name']", 'expand_provisioned': '(True)', 'default_image': "module.params['default_image']", 'default_network': "module.params['default_network']", 'user_name': "module.params['user_name']", 'ssh_public_keys': "module.params['ssh_public_keys']"}), "(roles=module.params['baremetal_deployment'], stack_name=module.\n params['stack_name'], expand_provisioned=True, default_image=module.\n params['default_image'], default_network=module.params[\n 'default_network'], user_name=module.params['user_name'],\n ssh_public_keys=module.params['ssh_public_keys'])\n", (6572, 6885), True, 'from ansible.module_utils import baremetal_deploy as bd\n'), ((7063, 7239), 'ansible.module_utils.baremetal_deploy.expand', 'bd.expand', ([], {'roles': "module.params['baremetal_deployment']", 'stack_name': "module.params['stack_name']", 'expand_provisioned': '(False)', 'default_image': "module.params['default_image']"}), "(roles=module.params['baremetal_deployment'], stack_name=module.\n params['stack_name'], expand_provisioned=False, default_image=module.\n params['default_image'])\n", (7072, 7239), True, 'from ansible.module_utils import baremetal_deploy as bd\n')] |
# Generated by Django 3.2.6 on 2021-09-10 11:56
import uuid
import django.db.models.deletion
import django.db.models.fields
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("user_management", "0002_remove_username"),
]
operations = [
migrations.CreateModel(
name="Feature",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("code", models.CharField(max_length=200)),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="FeatureFlag",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("config", models.JSONField(blank=True, null=True)),
(
"feature",
models.ForeignKey(
on_delete=django.db.models.fields.CharField,
to="user_management.feature",
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.JSONField",
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.UUIDField"
] | [((487, 578), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (503, 578), False, 'from django.db import migrations, models\n'), ((745, 784), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (765, 784), False, 'from django.db import migrations, models\n'), ((818, 853), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (838, 853), False, 'from django.db import migrations, models\n'), ((881, 913), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (897, 913), False, 'from django.db import migrations, models\n'), ((1163, 1254), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (1179, 1254), False, 'from django.db import migrations, models\n'), ((1421, 1460), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1441, 1460), False, 'from django.db import migrations, models\n'), ((1494, 1529), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (1514, 1529), False, 'from django.db import migrations, models\n'), ((1559, 1598), 'django.db.models.JSONField', 'models.JSONField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1575, 1598), False, 'from django.db import migrations, models\n'), ((1670, 1767), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.fields.CharField', 'to': '"""user_management.feature"""'}), "(on_delete=django.db.models.fields.CharField, to=\n 'user_management.feature')\n", (1687, 1767), False, 'from django.db import migrations, models\n'), ((1920, 2016), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (1937, 2016), False, 'from django.db import migrations, models\n')] |
from __future__ import absolute_import
from collections import namedtuple
from datetime import datetime, timedelta
import pytz
from casexml.apps.case.dbaccessors import get_open_case_docs_in_domain
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.xml import V2
import uuid
from xml.etree import ElementTree
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.domain.models import Domain
from corehq.apps.es.domains import DomainES
from corehq.apps.es import filters
from corehq.apps.hqcase.utils import submit_case_blocks, get_case_by_domain_hq_user_id
from corehq.feature_previews import CALLCENTER
from corehq.util.quickcache import quickcache
from corehq.util.timezones.conversions import UserTime, ServerTime
from dimagi.utils.couch import CriticalSection
class DomainLite(namedtuple('DomainLite', 'name default_timezone cc_case_type use_fixtures')):
def midnights(self, utcnow=None):
"""Returns a list containing two datetimes in UTC that corresponds to midnight
in the domains timezone on either side of the current UTC datetime.
i.e. [<previous midnight in TZ>, <next midnight in TZ>]
>>> d = DomainLite('', 'Asia/Kolkata', '', True)
>>> d.midnights(datetime(2015, 8, 27, 18, 30, 0 ))
[datetime.datetime(2015, 8, 26, 18, 30), datetime.datetime(2015, 8, 27, 18, 30)]
>>> d.midnights(datetime(2015, 8, 27, 18, 31, 0 ))
[datetime.datetime(2015, 8, 27, 18, 30), datetime.datetime(2015, 8, 28, 18, 30)]
"""
utcnow = utcnow or datetime.utcnow()
tz = pytz.timezone(self.default_timezone)
current_time_tz = ServerTime(utcnow).user_time(tz).done()
midnight_tz1 = current_time_tz.replace(hour=0, minute=0, second=0, microsecond=0)
midnight_tz_utc1 = UserTime(midnight_tz1).server_time().done()
midnight_tz_utc2 = midnight_tz_utc1 + timedelta(days=(1 if midnight_tz_utc1 < utcnow else -1))
return sorted([midnight_tz_utc1, midnight_tz_utc2])
CallCenterCase = namedtuple('CallCenterCase', 'case_id hq_user_id')
def sync_user_case(commcare_user, case_type, owner_id):
"""
Each time a CommCareUser is saved this method gets called and creates or updates
a case associated with the user with the user's details.
This is also called to create user cases when the usercase is used for the
first time.
"""
with CriticalSection(['user_case_%s_for_%s' % (case_type, commcare_user._id)]):
domain = commcare_user.project
def valid_element_name(name):
try:
ElementTree.fromstring('<{}/>'.format(name))
return True
except ElementTree.ParseError:
return False
# remove any keys that aren't valid XML element names
fields = {k: v for k, v in commcare_user.user_data.items() if valid_element_name(k)}
# language or phone_number can be null and will break
# case submission
fields.update({
'name': commcare_user.name or commcare_user.raw_username,
'username': commcare_user.raw_username,
'email': commcare_user.email,
'language': commcare_user.language or '',
'phone_number': commcare_user.phone_number or ''
})
case = get_case_by_domain_hq_user_id(domain.name, commcare_user._id, case_type)
close = commcare_user.to_be_deleted() or not commcare_user.is_active
caseblock = None
if case:
props = dict(case.dynamic_case_properties())
changed = close != case.closed
changed = changed or case.type != case_type
changed = changed or case.name != fields['name']
changed = changed or case.owner_id != owner_id
if not changed:
for field, value in fields.items():
if field != 'name' and props.get(field) != value:
changed = True
break
if changed:
caseblock = CaseBlock(
create=False,
case_id=case._id,
owner_id=owner_id,
case_type=case_type,
close=close,
update=fields
)
else:
fields['hq_user_id'] = commcare_user._id
caseblock = CaseBlock(
create=True,
case_id=uuid.uuid4().hex,
owner_id=owner_id,
user_id=owner_id,
case_type=case_type,
update=fields
)
if caseblock:
casexml = ElementTree.tostring(caseblock.as_xml())
submit_case_blocks(casexml, domain.name)
def sync_call_center_user_case(user):
domain = user.project
if domain and domain.call_center_config.enabled:
owner_id = domain.call_center_config.case_owner_id
if domain.call_center_config.use_user_location_as_owner:
owner_id = user.location_id
sync_user_case(
user,
domain.call_center_config.case_type,
owner_id
)
def sync_usercase(user):
domain = user.project
if domain and domain.usercase_enabled:
sync_user_case(
user,
USERCASE_TYPE,
user.get_id
)
def is_midnight_for_domain(midnight_form_domain, error_margin=15, current_time=None):
current_time = current_time or datetime.utcnow()
diff = current_time - midnight_form_domain
return diff.days >= 0 and diff < timedelta(minutes=error_margin)
def get_call_center_domains():
result = (
DomainES()
.is_active()
.is_snapshot(False)
.filter(filters.term('call_center_config.enabled', True))
.fields(['name', 'default_timezone', 'call_center_config.case_type', 'call_center_config.use_fixtures'])
.run()
)
def to_domain_lite(hit):
return DomainLite(
name=hit['name'],
default_timezone=hit['default_timezone'],
cc_case_type=hit.get('call_center_config.case_type', ''),
use_fixtures=hit.get('call_center_config.use_fixtures', True)
)
return [to_domain_lite(hit) for hit in result.hits]
def get_call_center_cases(domain_name, case_type, user=None):
all_cases = []
if user:
docs = (doc for owner_id in user.get_owner_ids()
for doc in get_open_case_docs_in_domain(domain_name, case_type,
owner_id=owner_id))
else:
docs = get_open_case_docs_in_domain(domain_name, case_type)
for case_doc in docs:
hq_user_id = case_doc.get('hq_user_id', None)
if hq_user_id:
all_cases.append(CallCenterCase(
case_id=case_doc['_id'],
hq_user_id=hq_user_id
))
return all_cases
@quickcache(['domain'])
def get_call_center_case_type_if_enabled(domain):
if CALLCENTER.enabled(domain):
return Domain.get_by_name(domain).call_center_config.case_type
| [
"pytz.timezone",
"corehq.util.timezones.conversions.UserTime",
"collections.namedtuple",
"corehq.util.quickcache.quickcache",
"dimagi.utils.couch.CriticalSection",
"datetime.datetime.utcnow",
"casexml.apps.case.mock.CaseBlock",
"corehq.feature_previews.CALLCENTER.enabled",
"uuid.uuid4",
"corehq.ap... | [((819, 894), 'collections.namedtuple', 'namedtuple', (['"""DomainLite"""', '"""name default_timezone cc_case_type use_fixtures"""'], {}), "('DomainLite', 'name default_timezone cc_case_type use_fixtures')\n", (829, 894), False, 'from collections import namedtuple\n'), ((2034, 2084), 'collections.namedtuple', 'namedtuple', (['"""CallCenterCase"""', '"""case_id hq_user_id"""'], {}), "('CallCenterCase', 'case_id hq_user_id')\n", (2044, 2084), False, 'from collections import namedtuple\n'), ((6972, 6994), 'corehq.util.quickcache.quickcache', 'quickcache', (["['domain']"], {}), "(['domain'])\n", (6982, 6994), False, 'from corehq.util.quickcache import quickcache\n'), ((7052, 7078), 'corehq.feature_previews.CALLCENTER.enabled', 'CALLCENTER.enabled', (['domain'], {}), '(domain)\n', (7070, 7078), False, 'from corehq.feature_previews import CALLCENTER\n'), ((1588, 1624), 'pytz.timezone', 'pytz.timezone', (['self.default_timezone'], {}), '(self.default_timezone)\n', (1601, 1624), False, 'import pytz\n'), ((2410, 2483), 'dimagi.utils.couch.CriticalSection', 'CriticalSection', (["['user_case_%s_for_%s' % (case_type, commcare_user._id)]"], {}), "(['user_case_%s_for_%s' % (case_type, commcare_user._id)])\n", (2425, 2483), False, 'from dimagi.utils.couch import CriticalSection\n'), ((3316, 3388), 'corehq.apps.hqcase.utils.get_case_by_domain_hq_user_id', 'get_case_by_domain_hq_user_id', (['domain.name', 'commcare_user._id', 'case_type'], {}), '(domain.name, commcare_user._id, case_type)\n', (3345, 3388), False, 'from corehq.apps.hqcase.utils import submit_case_blocks, get_case_by_domain_hq_user_id\n'), ((5496, 5513), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5511, 5513), False, 'from datetime import datetime, timedelta\n'), ((6652, 6704), 'casexml.apps.case.dbaccessors.get_open_case_docs_in_domain', 'get_open_case_docs_in_domain', (['domain_name', 'case_type'], {}), '(domain_name, case_type)\n', (6680, 6704), False, 'from casexml.apps.case.dbaccessors import get_open_case_docs_in_domain\n'), ((1557, 1574), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1572, 1574), False, 'from datetime import datetime, timedelta\n'), ((1898, 1952), 'datetime.timedelta', 'timedelta', ([], {'days': '(1 if midnight_tz_utc1 < utcnow else -1)'}), '(days=1 if midnight_tz_utc1 < utcnow else -1)\n', (1907, 1952), False, 'from datetime import datetime, timedelta\n'), ((4727, 4767), 'corehq.apps.hqcase.utils.submit_case_blocks', 'submit_case_blocks', (['casexml', 'domain.name'], {}), '(casexml, domain.name)\n', (4745, 4767), False, 'from corehq.apps.hqcase.utils import submit_case_blocks, get_case_by_domain_hq_user_id\n'), ((5598, 5629), 'datetime.timedelta', 'timedelta', ([], {'minutes': 'error_margin'}), '(minutes=error_margin)\n', (5607, 5629), False, 'from datetime import datetime, timedelta\n'), ((4058, 4172), 'casexml.apps.case.mock.CaseBlock', 'CaseBlock', ([], {'create': '(False)', 'case_id': 'case._id', 'owner_id': 'owner_id', 'case_type': 'case_type', 'close': 'close', 'update': 'fields'}), '(create=False, case_id=case._id, owner_id=owner_id, case_type=\n case_type, close=close, update=fields)\n', (4067, 4172), False, 'from casexml.apps.case.mock import CaseBlock\n'), ((6498, 6569), 'casexml.apps.case.dbaccessors.get_open_case_docs_in_domain', 'get_open_case_docs_in_domain', (['domain_name', 'case_type'], {'owner_id': 'owner_id'}), '(domain_name, case_type, owner_id=owner_id)\n', (6526, 6569), False, 'from casexml.apps.case.dbaccessors import get_open_case_docs_in_domain\n'), ((7095, 7121), 'corehq.apps.domain.models.Domain.get_by_name', 'Domain.get_by_name', (['domain'], {}), '(domain)\n', (7113, 7121), False, 'from corehq.apps.domain.models import Domain\n'), ((1651, 1669), 'corehq.util.timezones.conversions.ServerTime', 'ServerTime', (['utcnow'], {}), '(utcnow)\n', (1661, 1669), False, 'from corehq.util.timezones.conversions import UserTime, ServerTime\n'), ((1808, 1830), 'corehq.util.timezones.conversions.UserTime', 'UserTime', (['midnight_tz1'], {}), '(midnight_tz1)\n', (1816, 1830), False, 'from corehq.util.timezones.conversions import UserTime, ServerTime\n'), ((4461, 4473), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4471, 4473), False, 'import uuid\n'), ((5774, 5822), 'corehq.apps.es.filters.term', 'filters.term', (['"""call_center_config.enabled"""', '(True)'], {}), "('call_center_config.enabled', True)\n", (5786, 5822), False, 'from corehq.apps.es import filters\n'), ((5686, 5696), 'corehq.apps.es.domains.DomainES', 'DomainES', ([], {}), '()\n', (5694, 5696), False, 'from corehq.apps.es.domains import DomainES\n')] |
from snovault import (
AuditFailure,
audit_checker,
)
from .formatter import (
audit_link,
path_to_text,
)
def audit_contributor_institute(value, system):
if value['status'] in ['deleted']:
return
need_inst = []
if 'corresponding_contributors' in value:
for user in value['corresponding_contributors']:
if not user.get('institute_name'):
need_inst.append(user.get('uuid'))
if need_inst:
detail = ('Dataset {} contains corresponding_contributors {} that do not have an institute_name.'.format(
audit_link(path_to_text(value['@id']), value['@id']),
', '.join(need_inst)
)
)
yield AuditFailure('no contributor institute', detail, level='ERROR')
need_inst = []
if 'contributors' in value:
for user in value['contributors']:
if not user.get('institute_name'):
need_inst.append(user.get('uuid'))
if need_inst:
detail = ('Dataset {} contains contributors {} that do not have an institute_name.'.format(
audit_link(path_to_text(value['@id']), value['@id']),
', '.join(need_inst)
)
)
yield AuditFailure('no contributor institute', detail, level='ERROR')
return
def audit_contributor_email(value, system):
if value['status'] in ['deleted']:
return
need_email = []
if 'corresponding_contributors' in value:
for user in value['corresponding_contributors']:
if not user.get('email'):
need_email.append(user.get('uuid'))
if need_email:
detail = ('Dataset {} contains corresponding_contributors {} that do not have an email.'.format(
audit_link(path_to_text(value['@id']), value['@id']),
', '.join(need_email)
)
)
yield AuditFailure('no corresponding email', detail, level='ERROR')
return
def audit_contributor_lists(value, system):
if value['status'] in ['deleted']:
return
duplicates = []
if 'contributors' in value and 'corresponding_contributors' in value:
for user in value['corresponding_contributors']:
if user in value.get('contributors'):
duplicates.append(user.get('uuid'))
if duplicates:
detail = ('Dataset {} contains duplicated contributors {}.'.format(
audit_link(path_to_text(value['@id']), value['@id']),
', '.join(duplicates)
)
)
yield AuditFailure('duplicated contributors', detail, level='ERROR')
return
def audit_dataset_no_raw_files(value, system):
if value['status'] in ['deleted']:
return
raw_data = False
if 'original_files' in value:
for f in value['original_files']:
if f['@type'][0] == 'RawSequenceFile' and f['no_file_available'] != True:
raw_data = True
if raw_data == False:
detail = ('Dataset {} does not contain any raw sequence files.'.format(
audit_link(path_to_text(value['@id']), value['@id'])
)
)
yield AuditFailure('no raw data', detail, level='ERROR')
return
def audit_dataset_dcp_required_properties(value, system):
if value['status'] in ['deleted']:
return
dcp_reqs = ['dataset_title', 'description', 'funding_organizations']
for req in dcp_reqs:
if req not in value:
detail = ('Dataset {} does not have {}, required by the DCP.'.format(
audit_link(path_to_text(value['@id']), value['@id']),
req
)
)
yield AuditFailure('missing DCP-required field', detail, level='ERROR')
dcp_optional = ['corresponding_contributors', 'contributors']
for opt in dcp_optional:
if opt not in value:
detail = ('Dataset {} does not have {}, strongly encouraged by the DCP.'.format(
audit_link(path_to_text(value['@id']), value['@id']),
opt
)
)
yield AuditFailure('missing DCP-encouraged field', detail, level='ERROR')
return
def audit_experiment_released_with_unreleased_files(value, system):
'''
A released experiment should not have unreleased files
'''
if value['status'] != 'released':
return
if 'original_files' not in value:
return
for f in value['original_files']:
if f['status'] not in ['released', 'deleted',
'revoked', 'replaced',
'archived']:
detail = ('Released dataset {} contains file {} that has not been released.'.format(
audit_link(path_to_text(value['@id']), value['@id']),
audit_link(path_to_text(f['@id']), f['@id'])
)
)
yield AuditFailure('mismatched file status', detail, level='INTERNAL_ACTION')
return
function_dispatcher_with_files = {
'audit_contributor_institute': audit_contributor_institute,
'audit_contributor_email': audit_contributor_email,
'audit_contributor_lists': audit_contributor_lists,
'audit_dataset_no_raw_files': audit_dataset_no_raw_files,
'audit_dataset_dcp_required_properties': audit_dataset_dcp_required_properties,
'audit_released_with_unreleased_files': audit_experiment_released_with_unreleased_files
}
@audit_checker('Dataset',
frame=['original_files',
'corresponding_contributors',
'contributors'])
def audit_experiment(value, system):
for function_name in function_dispatcher_with_files.keys():
yield from function_dispatcher_with_files[function_name](value, system)
return
| [
"snovault.AuditFailure",
"snovault.audit_checker"
] | [((5487, 5587), 'snovault.audit_checker', 'audit_checker', (['"""Dataset"""'], {'frame': "['original_files', 'corresponding_contributors', 'contributors']"}), "('Dataset', frame=['original_files',\n 'corresponding_contributors', 'contributors'])\n", (5500, 5587), False, 'from snovault import AuditFailure, audit_checker\n'), ((725, 788), 'snovault.AuditFailure', 'AuditFailure', (['"""no contributor institute"""', 'detail'], {'level': '"""ERROR"""'}), "('no contributor institute', detail, level='ERROR')\n", (737, 788), False, 'from snovault import AuditFailure, audit_checker\n'), ((1245, 1308), 'snovault.AuditFailure', 'AuditFailure', (['"""no contributor institute"""', 'detail'], {'level': '"""ERROR"""'}), "('no contributor institute', detail, level='ERROR')\n", (1257, 1308), False, 'from snovault import AuditFailure, audit_checker\n'), ((1904, 1965), 'snovault.AuditFailure', 'AuditFailure', (['"""no corresponding email"""', 'detail'], {'level': '"""ERROR"""'}), "('no corresponding email', detail, level='ERROR')\n", (1916, 1965), False, 'from snovault import AuditFailure, audit_checker\n'), ((2572, 2634), 'snovault.AuditFailure', 'AuditFailure', (['"""duplicated contributors"""', 'detail'], {'level': '"""ERROR"""'}), "('duplicated contributors', detail, level='ERROR')\n", (2584, 2634), False, 'from snovault import AuditFailure, audit_checker\n'), ((3178, 3228), 'snovault.AuditFailure', 'AuditFailure', (['"""no raw data"""', 'detail'], {'level': '"""ERROR"""'}), "('no raw data', detail, level='ERROR')\n", (3190, 3228), False, 'from snovault import AuditFailure, audit_checker\n'), ((3712, 3777), 'snovault.AuditFailure', 'AuditFailure', (['"""missing DCP-required field"""', 'detail'], {'level': '"""ERROR"""'}), "('missing DCP-required field', detail, level='ERROR')\n", (3724, 3777), False, 'from snovault import AuditFailure, audit_checker\n'), ((4143, 4210), 'snovault.AuditFailure', 'AuditFailure', (['"""missing DCP-encouraged field"""', 'detail'], {'level': '"""ERROR"""'}), "('missing DCP-encouraged field', detail, level='ERROR')\n", (4155, 4210), False, 'from snovault import AuditFailure, audit_checker\n'), ((4949, 5020), 'snovault.AuditFailure', 'AuditFailure', (['"""mismatched file status"""', 'detail'], {'level': '"""INTERNAL_ACTION"""'}), "('mismatched file status', detail, level='INTERNAL_ACTION')\n", (4961, 5020), False, 'from snovault import AuditFailure, audit_checker\n')] |
import numpy as np
import cv2
import math
import datetime
from datetime import timedelta as Delta
h=300
w=300
cap = cv2.VideoCapture(0)
SUN_LOC=(200,70)
SUN_RSIZE=20
ORBITAL_R=10
def Orbiral(frame,Centerloc,orbit_r,size_r,phi,color):
x_orbit=Centerloc[0]+int(orbit_r*np.cos(np.deg2rad(phi)))
y_orbit=Centerloc[1]+int(orbit_r*np.sin(np.deg2rad(phi)))
#print(f"x:{x_orbit} y:{y_orbit} phi:{int(orbitphi)}")
frame= cv2.circle(frame,(x_orbit,y_orbit),size_r, color, -1)
return frame
ORBITAL_RSIZE=3
ORBITAL_PHI=0
ORBITAL_DPHI=1 #0.5deg delta
dr=(SUN_RSIZE+ORBITAL_R) #*(orbitdphi) #*np.pi/180)
orbitloc=(SUN_LOC[0],SUN_LOC[1]+SUN_RSIZE+ORBITAL_R)
satsn=0
#2021/05/06 Window priority
print(cv2.WND_PROP_FULLSCREEN)
cv2.namedWindow("Frame", cv2.WND_PROP_FULLSCREEN)
cv2.setWindowProperty("Frame",cv2.WND_PROP_FULLSCREEN,0)
Start_Time=datetime.datetime.today()
Delta_T=60
#Sat_Time_Space=Delta(minutes=1)
Sat_Time_Space=Delta(seconds=Delta_T)
Sat_dic={}
Poff=180
Roff=0
#mins=time.minute
while True:
_, frame = cap.read()
frame_time=datetime.datetime.today()
if frame_time >= Sat_Time_Space+Start_Time:
Start_Time=frame_time
dr=(SUN_RSIZE+ORBITAL_R)
Sat_dic[satsn]={"Time":Start_Time,"Phi_Offset":Poff,"Sat_Radius":dr}
print("New Sat added")
print(Sat_dic[satsn])
Poff-=30
satsn+=1
if Poff <=-180:
Poff=180
ORBITAL_R+=5
print(frame_time)
#frame = cv2.resize(frame,(h,w))
if(frame is None):
continue
frame = cv2.circle(frame,SUN_LOC,SUN_RSIZE, (0,0,250), -1)
#Satn to frame
# frame=cv2.putText(frame,str(satsn),(SUN_LOC[0]-15,SUN_LOC[1]+15),
# cv2.FONT_HERSHEY_PLAIN,3,(255,255,255))
if satsn:
for n,sat in Sat_dic.items():
frame=Orbiral(frame,SUN_LOC,sat["Sat_Radius"],ORBITAL_RSIZE,ORBITAL_PHI-sat["Phi_Offset"],(0,0,255))
#for offphi in range(-180,180,satsn):
#if n==satsn:
# for R_OFF, fadeSeconds in zip(np.linspace(ORBITAL_RSIZE,1,ORBITAL_RSIZE),np.linspace(0,Delta//2,int(ORBITAL_RSIZE))):
# if frame_time >= Sat_Time_Space+fadeSeconds:
# print("Fade:",R_OFF)
# frame=Orbiral(frame,SUN_LOC,sat["Sat_Radius"],ORBITAL_RSIZE-int(R_OFF),ORBITAL_PHI-sat["Phi_Offset"],(255,0,255))
# else:
#frame=Orbiral(frame,SUN_LOC,sat["Sat_Radius"],ORBITAL_RSIZE,ORBITAL_PHI-sat["Phi_Offset"],(0,0,255))
ORBITAL_PHI+=ORBITAL_DPHI
if ORBITAL_PHI>=360:
ORBITAL_PHI=0
#Line
#img = cv2.line(frame,logoloc,orbitloc,(255,0,0),5)
cv2.imshow('Frame', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# VideoCaptureオブジェクト破棄
cap.release()
cv2.destroyAllWindows()
| [
"cv2.setWindowProperty",
"cv2.imshow",
"cv2.circle",
"numpy.deg2rad",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"datetime.datetime.today",
"datetime.timedelta",
"cv2.waitKey",
"cv2.namedWindow"
] | [((125, 144), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (141, 144), False, 'import cv2\n'), ((775, 824), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Frame"""', 'cv2.WND_PROP_FULLSCREEN'], {}), "('Frame', cv2.WND_PROP_FULLSCREEN)\n", (790, 824), False, 'import cv2\n'), ((826, 884), 'cv2.setWindowProperty', 'cv2.setWindowProperty', (['"""Frame"""', 'cv2.WND_PROP_FULLSCREEN', '(0)'], {}), "('Frame', cv2.WND_PROP_FULLSCREEN, 0)\n", (847, 884), False, 'import cv2\n'), ((899, 924), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (922, 924), False, 'import datetime\n'), ((987, 1009), 'datetime.timedelta', 'Delta', ([], {'seconds': 'Delta_T'}), '(seconds=Delta_T)\n', (992, 1009), True, 'from datetime import timedelta as Delta\n'), ((2968, 2991), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (2989, 2991), False, 'import cv2\n'), ((457, 513), 'cv2.circle', 'cv2.circle', (['frame', '(x_orbit, y_orbit)', 'size_r', 'color', '(-1)'], {}), '(frame, (x_orbit, y_orbit), size_r, color, -1)\n', (467, 513), False, 'import cv2\n'), ((1117, 1142), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1140, 1142), False, 'import datetime\n'), ((1654, 1708), 'cv2.circle', 'cv2.circle', (['frame', 'SUN_LOC', 'SUN_RSIZE', '(0, 0, 250)', '(-1)'], {}), '(frame, SUN_LOC, SUN_RSIZE, (0, 0, 250), -1)\n', (1664, 1708), False, 'import cv2\n'), ((2839, 2865), 'cv2.imshow', 'cv2.imshow', (['"""Frame"""', 'frame'], {}), "('Frame', frame)\n", (2849, 2865), False, 'import cv2\n'), ((2874, 2888), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2885, 2888), False, 'import cv2\n'), ((304, 319), 'numpy.deg2rad', 'np.deg2rad', (['phi'], {}), '(phi)\n', (314, 319), True, 'import numpy as np\n'), ((367, 382), 'numpy.deg2rad', 'np.deg2rad', (['phi'], {}), '(phi)\n', (377, 382), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
#
"""Tests for editor and outline explorer interaction."""
# Test library imports
import pytest
# Local imports
from spyder.plugins.outlineexplorer.widgets import OutlineExplorerWidget
from spyder.plugins.outlineexplorer.editor import OutlineExplorerProxyEditor
from spyder.plugins.outlineexplorer.api import OutlineExplorerData
from spyder.utils.qthelpers import qapplication
from spyder.plugins.editor.widgets.codeeditor import CodeEditor
class testBlock():
def __init__(self, line_number):
self._line = line_number - 1
def blockNumber(self):
return self._line
text = ('# test file\n'
'class a():\n'
' self.b = 1\n'
' print(self.b)\n'
' \n'
' def some_method(self):\n'
' self.b = 3')
expected_oe_list = [
OutlineExplorerData(
testBlock(2), 'class a():', 0,
OutlineExplorerData.CLASS, 'a'),
OutlineExplorerData(
testBlock(6), ' def some_method(self):', 4,
OutlineExplorerData.FUNCTION, 'some_method')
]
@pytest.fixture()
def editor_outline_explorer_bot():
"""setup editor and outline_explorer."""
app = qapplication()
editor = CodeEditor(parent=None)
editor.setup_editor(language='Python')
outlineexplorer = OutlineExplorerWidget(editor)
editor.set_text(text)
editor.oe_proxy = OutlineExplorerProxyEditor(editor, "test.py")
outlineexplorer.set_current_editor(editor.oe_proxy,
update=False,
clear=False)
outlineexplorer.setEnabled(True)
return editor, outlineexplorer, editor.oe_proxy
def test_editor_outline_explorer(editor_outline_explorer_bot):
"""Test basic interaction between outline_explorer and editor."""
editor, outline_explorer, oe_proxy = editor_outline_explorer_bot
assert outline_explorer
# Assert proxy
assert oe_proxy == outline_explorer.treewidget.current_editor
assert len(outline_explorer.treewidget.editor_items) == 1
# Assert root item
file_root = outline_explorer.treewidget.editor_items[id(editor)]
assert file_root.text(0) == oe_proxy.fname
# Assert OEData
oedata = oe_proxy.outlineexplorer_data_list()
for left, right in zip(oedata, expected_oe_list):
a = right.__dict__
b = left.__dict__
b['color'] = None
assert a['block'].blockNumber() == b['block'].blockNumber()
a['block'] = None
b['block'] = None
assert a == b
# Assert Treewidget Items
items = outline_explorer.treewidget.get_items()
oedata_texts = [oe.def_name for oe in expected_oe_list]
for item, oe_item in zip(items, oedata_texts):
assert item.text(0) == oe_item
if __name__ == "__main__":
pytest.main()
| [
"spyder.utils.qthelpers.qapplication",
"spyder.plugins.editor.widgets.codeeditor.CodeEditor",
"pytest.main",
"spyder.plugins.outlineexplorer.editor.OutlineExplorerProxyEditor",
"pytest.fixture",
"spyder.plugins.outlineexplorer.widgets.OutlineExplorerWidget"
] | [((1165, 1181), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1179, 1181), False, 'import pytest\n'), ((1272, 1286), 'spyder.utils.qthelpers.qapplication', 'qapplication', ([], {}), '()\n', (1284, 1286), False, 'from spyder.utils.qthelpers import qapplication\n'), ((1300, 1323), 'spyder.plugins.editor.widgets.codeeditor.CodeEditor', 'CodeEditor', ([], {'parent': 'None'}), '(parent=None)\n', (1310, 1323), False, 'from spyder.plugins.editor.widgets.codeeditor import CodeEditor\n'), ((1389, 1418), 'spyder.plugins.outlineexplorer.widgets.OutlineExplorerWidget', 'OutlineExplorerWidget', (['editor'], {}), '(editor)\n', (1410, 1418), False, 'from spyder.plugins.outlineexplorer.widgets import OutlineExplorerWidget\n'), ((1469, 1514), 'spyder.plugins.outlineexplorer.editor.OutlineExplorerProxyEditor', 'OutlineExplorerProxyEditor', (['editor', '"""test.py"""'], {}), "(editor, 'test.py')\n", (1495, 1514), False, 'from spyder.plugins.outlineexplorer.editor import OutlineExplorerProxyEditor\n'), ((2899, 2912), 'pytest.main', 'pytest.main', ([], {}), '()\n', (2910, 2912), False, 'import pytest\n')] |
from typing import TYPE_CHECKING
from drip.utils import json_list, json_object, raise_response
if TYPE_CHECKING:
from requests import Session
class Subscribers:
session: 'Session'
@json_object('subscribers')
def create_or_update_subscriber(self, email, marshall=True, **options):
"""
create_or_update_subscriber(email,
new_email=None, user_id=None, time_zone='Etc/UTC', lifetime_value=None, ip_address=None,
tags=None, remove_tags=None, prospect=True, base_lead_score=30, eu_consent=None, eu_consent_message=None, marshall=True)
Update a subscriber or create it if it doesn't exist.
Arguments:
email {str} -- Person's email address
Call Options:
new_email {str} -- Update the subscriber's email address, taking precedence over 'email' while creating (default: {None})
user_id {str} -- A custom unique identifier (default: {None})
time_zone {str} -- Timezone (default: {'Etc/UTC'})
lifetime_value {int} -- LifeTime Value, in cents (default: {None})
ip_address {str} -- IP Address (default: {None})
custom_fields {Mapping[str, Any]} -- Dictionary of custom fields and their values (default: {None})
tags {Iterable[str]} -- List of tags to apply (default: {None})
remove_tags {Iterable[str]} -- List of tags to remove (default: {None})
prospect {bool} -- Person is a Prospect (default: {True})
base_lead_score {int} -- Starting leadscore (default: {0})
eu_consent {str} -- Status of consent for GDPR: granted, denied (default: {None})
eu_consent_message {str} -- Message that was consented to (default: {None})
Other Keyword Arguments:
marshall {bool} -- Unpack the Response object (default: {True})
Returns:
Response -- API Response, or the marshalled Subscriber object
"""
payload = {
'email': email,
}
payload.update(options)
return self.session.post('subscribers', json={'subscribers': [payload, ]})
@json_list('subscribers')
def subscribers(self, marshall=True, **params):
"""
subscribers(page=0, per_page=100, marshall=True)
List all subscribers. Supports pagination and filtering.
Call Parameters:
page {int} -- Page to get, or 0 for all pages (default: {0})
per_page {int} -- Number of objects to get on each page (default: {100})
tags {Iterable[str]} -- List of tags to filter by (default: {None})
subscribed_before {str} -- Include only people created before this date, Eg. "2016-01-01T00:00:00Z" (default: {None})
subscribed_after {str} -- Include only people after before this date, Eg. "2016-01-01T00:00:00Z" (default: {None})
Other Keyword Arguments:
marshall {bool} -- Unpack the Response object (default: {True})
Returns:
Response -- API Response, or the marshalled List of Subscriber objects
"""
return self.session.get('subscribers', params=params)
@json_object('subscribers')
def subscriber(self, email, marshall=True):
"""
subscriber(email, marshall=True)
Get a subscriber.
Arguments:
email {str} -- Person's email address
Other Keyword Arguments:
marshall {bool} -- Unpack the Response object (default: {True})
Returns:
Response -- API Response, or the marshalled Subscriber object
"""
return self.session.get(f'subscribers/{email}')
@json_object('subscribers')
def unsubscribe(self, email, marshall=True, **params):
"""
unsubscribe(email, campaign_id=None, marshall=True)
Unsubscribe a subscriber from all campaigns, or optionally one specific campaign.
Arguments:
email {str} -- Person's email address
Call Parameters:
campaign_id {int} -- Campaign from which to remove the subscriber (default: {None})
Other Keyword Arguments:
marshall {bool} -- Unpack the Response object (default: {True})
Returns:
Response -- API Response, or the marshalled Subscriber object
"""
return self.session.post(f'subscribers/{email}/remove', params=params)
@json_object('subscribers')
def unsubscribe_from_all(self, email, marshall=True):
"""
unsubscribe_from_all(email, campaign_id=None, marshall=True)
Unsubscribe a subscriber from all campaigns.
Arguments:
email {str} -- Person's email address
Other Keyword Arguments:
marshall {bool} -- Unpack the Response object (default: {True})
Returns:
Response -- API Response, or the marshalled Subscriber object
"""
return self.session.post(f'subscribers/{email}/unsubscribe_all')
@raise_response()
def delete_subscriber(self, email):
"""
delete_subscriber(email, campaign_id=None, marshall=True)
Delete a subscriber.
Arguments:
email {str} -- Person's email address
Returns:
Response -- API Response
"""
return self.session.delete(f'subscribers/{email}')
# @pagination('subscribers')
# def subscribers(self,
# status: str = None, # active, all, unsubscribed, active_or_unsubscribed, undeliverable. Default: active
# tags: 'Iterable[str]' = None,
# subscribed_before: str = None, # "2017-01-01T00:00:00Z"
# subscribed_after: str = None,
# page: int = 0,
# per_page: int = None,
# marshall=True) -> 'Response':
# payload: 'MutableMapping[str, Any]' = {}
# if status:
# payload['status'] = status
# if tags:
# payload['tags'] = tags
# if subscribed_before:
# payload['subscribed_before'] = subscribed_before
# if subscribed_after:
# payload['subscribed_after'] = subscribed_after
# if page:
# payload['page'] = page
# if per_page:
# payload['per_page'] = per_page
# return self.session.get('subscribers', params=payload)
| [
"drip.utils.json_object",
"drip.utils.raise_response",
"drip.utils.json_list"
] | [((199, 225), 'drip.utils.json_object', 'json_object', (['"""subscribers"""'], {}), "('subscribers')\n", (210, 225), False, 'from drip.utils import json_list, json_object, raise_response\n'), ((2147, 2171), 'drip.utils.json_list', 'json_list', (['"""subscribers"""'], {}), "('subscribers')\n", (2156, 2171), False, 'from drip.utils import json_list, json_object, raise_response\n'), ((3171, 3197), 'drip.utils.json_object', 'json_object', (['"""subscribers"""'], {}), "('subscribers')\n", (3182, 3197), False, 'from drip.utils import json_list, json_object, raise_response\n'), ((3672, 3698), 'drip.utils.json_object', 'json_object', (['"""subscribers"""'], {}), "('subscribers')\n", (3683, 3698), False, 'from drip.utils import json_list, json_object, raise_response\n'), ((4412, 4438), 'drip.utils.json_object', 'json_object', (['"""subscribers"""'], {}), "('subscribers')\n", (4423, 4438), False, 'from drip.utils import json_list, json_object, raise_response\n'), ((4995, 5011), 'drip.utils.raise_response', 'raise_response', ([], {}), '()\n', (5009, 5011), False, 'from drip.utils import json_list, json_object, raise_response\n')] |
from flask import Flask, request
from flask_httpauth import HTTPBasicAuth
from auth_handler import AuthHandler
from cache import Cache
from os import environ
from yaml import safe_load
import logging
from connection_provider import ConnectionProvider
# init logging
logging.basicConfig(format='[%(asctime)s] [%(levelname)s] %(message)s', level=logging.DEBUG)
# Init flask app
app = Flask(__name__)
auth = HTTPBasicAuth()
# Basic cache
CACHE_KEY_EXPIRATION_SECONDS = 60 * 60 * 8 # 8 hours
cache = Cache(CACHE_KEY_EXPIRATION_SECONDS)
# Init LDAP config
logging.info("Reading config.yaml")
with open("/config/config.yaml", 'r') as stream:
config = safe_load(stream)
# Create the AuthHandler instance
logging.info("Initializing authentication handler")
authHandler = AuthHandler(
environ['LDAP_MANAGER_BINDDN'],
environ["LDAP_MANAGER_PASSWORD"],
ConnectionProvider(config['ldapServers'])
)
@auth.verify_password
def login(username, password):
# Check if username or password is empty
if not username or not password:
return False
# Get lookup key for config
ldap_config_key = request.headers['Ldap-Config-Key']
# Check if authentication was cached
if cache.validate(username, ldap_config_key, password):
logging.info("[user=%s, config=%s] authenticated from cache", username, ldap_config_key)
return True
# Lookup LDAP config
ldapParameters = config[ldap_config_key]
# Validate user
if authHandler.validate(username, password, ldap_config_key, ldapParameters):
# Add successful authentication to cache
cache.set(username, ldap_config_key, password)
return True
return False
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
@auth.login_required
def index(path):
code = 200
msg = "LDAP Authentication"
headers = []
return msg, code, headers
# health endpoint
@app.route('/healthz')
def healthz():
if cache is None or authHandler is None:
return "not healthy", 503
else:
return "healthy", 200
# Main
if __name__ == '__main__':
app.run(host='0.0.0.0', port=9000, debug=True)
| [
"logging.basicConfig",
"flask_httpauth.HTTPBasicAuth",
"connection_provider.ConnectionProvider",
"flask.Flask",
"yaml.safe_load",
"logging.info",
"cache.Cache"
] | [((268, 364), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""[%(asctime)s] [%(levelname)s] %(message)s"""', 'level': 'logging.DEBUG'}), "(format='[%(asctime)s] [%(levelname)s] %(message)s',\n level=logging.DEBUG)\n", (287, 364), False, 'import logging\n'), ((385, 400), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (390, 400), False, 'from flask import Flask, request\n'), ((408, 423), 'flask_httpauth.HTTPBasicAuth', 'HTTPBasicAuth', ([], {}), '()\n', (421, 423), False, 'from flask_httpauth import HTTPBasicAuth\n'), ((501, 536), 'cache.Cache', 'Cache', (['CACHE_KEY_EXPIRATION_SECONDS'], {}), '(CACHE_KEY_EXPIRATION_SECONDS)\n', (506, 536), False, 'from cache import Cache\n'), ((557, 592), 'logging.info', 'logging.info', (['"""Reading config.yaml"""'], {}), "('Reading config.yaml')\n", (569, 592), False, 'import logging\n'), ((708, 759), 'logging.info', 'logging.info', (['"""Initializing authentication handler"""'], {}), "('Initializing authentication handler')\n", (720, 759), False, 'import logging\n'), ((655, 672), 'yaml.safe_load', 'safe_load', (['stream'], {}), '(stream)\n', (664, 672), False, 'from yaml import safe_load\n'), ((865, 906), 'connection_provider.ConnectionProvider', 'ConnectionProvider', (["config['ldapServers']"], {}), "(config['ldapServers'])\n", (883, 906), False, 'from connection_provider import ConnectionProvider\n'), ((1266, 1358), 'logging.info', 'logging.info', (['"""[user=%s, config=%s] authenticated from cache"""', 'username', 'ldap_config_key'], {}), "('[user=%s, config=%s] authenticated from cache', username,\n ldap_config_key)\n", (1278, 1358), False, 'import logging\n')] |
import json
import logging
from typing import Tuple
import requests
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.views.generic import FormView, TemplateView
from .forms import CodeSchoolForm
logger = logging.getLogger(__name__)
class IndexView(TemplateView):
template_name = 'frontend/index.html'
class CodeschoolFormView(FormView):
form_class = CodeSchoolForm
template_name = 'frontend/codeschool-form.html'
success_url = f'https://github.com/{settings.GITHUB_REPO}/issues'
def form_valid(self, form):
form.save()
handle_submission(form.cleaned_data)
return super().form_valid(form)
def form_invalid(self, form):
return super().form_invalid(form)
class BotMessagesView(TemplateView):
template_name = 'frontend/messages.html'
def get_logo_and_users(logo: InMemoryUploadedFile) -> Tuple[str, str]:
school_logo = logo.name.replace(' ', '_')
if settings.DEBUG or settings.PRE_PROD:
users = '@wimo7083 @AllenAnthes,'
else:
users = '@wimo7083 @jhampton @kylemh'
logo_url = f'{settings.MEDIA_URL}logos/{school_logo}'
return logo_url, users
def handle_submission(form: dict):
repo_path = settings.GITHUB_REPO
url = f"https://api.github.com/repos/{repo_path}/issues"
headers = {"Authorization": f"Bearer {settings.GITHUB_JWT}"}
params = make_params(**form)
res = requests.post(url, headers=headers, data=json.dumps(params))
logger.info(f'response from github API call {res}')
def make_params(logo, name, url, address1, city, state, zipcode, country, rep_name, rep_email, recaptcha='',
address2=None, fulltime=False, hardware=False, has_online=False, only_online=False, accredited=False,
housing=False, mooc=False):
logo_url, notify_users = get_logo_and_users(logo)
return ({
'title': f'New Code School Request: {name}',
'body': (
f"Name: {name}\n"
f"Website: {url}\n"
f"Full Time: {fulltime}\n"
f"Hardware Included: {hardware}\n"
f"Has Online: {has_online}\n"
f"Only Online: {only_online}\n"
f"VA Accredited: {accredited}\n"
f"Housing Included: {housing}\n"
f"MOOC Only: {mooc}\n"
f"Address: {address1} {address2}\n"
f"City: {city}\n"
f"State: {state}\n"
f"Country: {country}\n"
f"Zip: {zipcode}\n\n"
f"Representative Name: {rep_name}\n"
f"Representative Email: {rep_email}\n"
f"logo: \n"
'This code school is ready to be added/updated:\n'
f"{notify_users}\n"
"Please close this issue once you've added/updated the code school."
)
})
| [
"logging.getLogger",
"json.dumps"
] | [((267, 294), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (284, 294), False, 'import logging\n'), ((1493, 1511), 'json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (1503, 1511), False, 'import json\n')] |
# Create your service here.
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__copyright__ = "Copyright 2019."
from utils.commons import safe_invoke
class NotificationManager(object):
def __init__(self, *args, **kwargs):
pass
def notify(self, *args, **kwargs):
pass
@staticmethod
def notify_sync(notif_mgr, *args, **kwargs):
safe_invoke(notif_mgr.notify, *args)
@classmethod
def EMAIL(cls, *args, **kwargs):
from utils.notification.email.email_manager import EmailManager
return EmailManager(*args, **kwargs)
class Parameters(object):
pass
__all__ = ["NotificationManager", "Parameters"]
| [
"utils.commons.safe_invoke",
"utils.notification.email.email_manager.EmailManager"
] | [((368, 404), 'utils.commons.safe_invoke', 'safe_invoke', (['notif_mgr.notify', '*args'], {}), '(notif_mgr.notify, *args)\n', (379, 404), False, 'from utils.commons import safe_invoke\n'), ((547, 576), 'utils.notification.email.email_manager.EmailManager', 'EmailManager', (['*args'], {}), '(*args, **kwargs)\n', (559, 576), False, 'from utils.notification.email.email_manager import EmailManager\n')] |
import cv2
import urllib
import numpy as np
import multiprocessing as mp
stream = 'http://192.168.53.114:8000/streamLow.mjpg'
stream2 = 'http://192.168.53.114:8001/streamLow.mjpg'
def procImg(str, wind, stop):
bytes = ''
stream = urllib.urlopen(str)
while not stop.is_set():
try:
bytes += stream.read(4096)
a = bytes.find('\xff\xd8')
b = bytes.find('\xff\xd9')
if wind == 'Low':
c = bytes.find('\xff\xaa\xee')
if a != -1 and b != -1:
jpg = bytes[a:b+2]
if wind == 'Low':
if c != -1:
str = bytes[b+2:c]
print(str)
bytes = bytes[c+3:]
else:
bytes = bytes[b+2:]
else:
bytes = bytes[b+2:]
i = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.IMREAD_COLOR)
cv2.imshow(wind, i)
cv2.waitKey(1)
if cv2.waitKey(1) == ord('q'):
stop.set()
break
except:
pass
if __name__ == '__main__':
st = mp.Event()
lowProc = mp.Process(target = procImg, args=(stream, 'Low', st))
HighProc = mp.Process(target = procImg, args=(stream2, 'High', st))
lowProc.start()
HighProc.start()
lowProc.join()
HighProc.join()
exit(0)
| [
"multiprocessing.Event",
"multiprocessing.Process",
"urllib.urlopen",
"cv2.imshow",
"numpy.fromstring",
"cv2.waitKey"
] | [((241, 260), 'urllib.urlopen', 'urllib.urlopen', (['str'], {}), '(str)\n', (255, 260), False, 'import urllib\n'), ((1203, 1213), 'multiprocessing.Event', 'mp.Event', ([], {}), '()\n', (1211, 1213), True, 'import multiprocessing as mp\n'), ((1228, 1280), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'procImg', 'args': "(stream, 'Low', st)"}), "(target=procImg, args=(stream, 'Low', st))\n", (1238, 1280), True, 'import multiprocessing as mp\n'), ((1298, 1352), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'procImg', 'args': "(stream2, 'High', st)"}), "(target=procImg, args=(stream2, 'High', st))\n", (1308, 1352), True, 'import multiprocessing as mp\n'), ((991, 1010), 'cv2.imshow', 'cv2.imshow', (['wind', 'i'], {}), '(wind, i)\n', (1001, 1010), False, 'import cv2\n'), ((1027, 1041), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1038, 1041), False, 'import cv2\n'), ((1057, 1071), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1068, 1071), False, 'import cv2\n'), ((921, 955), 'numpy.fromstring', 'np.fromstring', (['jpg'], {'dtype': 'np.uint8'}), '(jpg, dtype=np.uint8)\n', (934, 955), True, 'import numpy as np\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cProfile
import io
import pstats
cache = {}
PROFILE_SWITCH = True
class Profiler:
def __init__(self):
self.profiler = cProfile.Profile()
def profile(self, func, *args, **kwargs):
self.profiler.enable()
result = func(*args, **kwargs)
self.profiler.disable()
with io.StringIO() as string_stream:
profiler_stats = pstats.Stats(self.profiler, stream = string_stream).sort_stats("cumulative")
profiler_stats.print_stats()
print(string_stream.getvalue())
return result
class PassThroughProfiler:
def __init__(self):
pass
def profile(self, func, *args, **kwargs):
return func(*args, **kwargs)
def get_profiler():
if "profiler" not in cache:
if PROFILE_SWITCH:
cache["profiler"] = Profiler()
else:
cache["profiler"] = PassThroughProfiler()
return cache["profiler"]
def profile(func, *args, **kwargs):
profiler = get_profiler()
return profiler.profile(func, *args, **kwargs)
def profileable(func):
def _profile(*args, **kwargs):
return profile(func, *args, **kwargs)
return _profile | [
"pstats.Stats",
"cProfile.Profile",
"io.StringIO"
] | [((250, 268), 'cProfile.Profile', 'cProfile.Profile', ([], {}), '()\n', (266, 268), False, 'import cProfile\n'), ((433, 446), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (444, 446), False, 'import io\n'), ((494, 543), 'pstats.Stats', 'pstats.Stats', (['self.profiler'], {'stream': 'string_stream'}), '(self.profiler, stream=string_stream)\n', (506, 543), False, 'import pstats\n')] |
import aiohttp
import aiohttp_jinja2
import pytest
from ddtrace.contrib.aiohttp.middlewares import trace_app
from ddtrace.contrib.aiohttp_jinja2.patch import patch as patch_jinja2
from ddtrace.internal.utils import version
from ddtrace.pin import Pin
from .app.web import setup_app
if version.parse_version(aiohttp.__version__) < (3, 0, 0):
@pytest.fixture
def aiohttp_client(test_client):
return test_client
@pytest.fixture
def app_tracer(tracer, loop):
app = setup_app()
trace_app(app, tracer)
return app, tracer
@pytest.fixture
def patched_app_tracer(app_tracer):
patch_jinja2()
app, tracer = app_tracer
Pin.override(aiohttp_jinja2, tracer=tracer)
return app, tracer
# When Python 3.5 is dropped, rather do:
# yield app, tracer
# unpatch()
@pytest.fixture
def untraced_app_tracer(tracer, loop):
patch_jinja2()
app = setup_app()
Pin.override(aiohttp_jinja2, tracer=tracer)
return app, tracer
# When Python 3.5 is dropped, rather do:
# yield app, tracer
# unpatch()
else:
@pytest.fixture
async def app_tracer(tracer, loop):
app = setup_app()
trace_app(app, tracer)
return app, tracer
@pytest.fixture
async def patched_app_tracer(app_tracer):
patch_jinja2()
app, tracer = app_tracer
Pin.override(aiohttp_jinja2, tracer=tracer)
return app, tracer
# When Python 3.5 is dropped, rather do:
# yield app, tracer
# unpatch()
@pytest.fixture
async def untraced_app_tracer(tracer, loop):
patch_jinja2()
app = setup_app()
Pin.override(aiohttp_jinja2, tracer=tracer)
return app, tracer
# When Python 3.5 is dropped, rather do:
# yield app, tracer
# unpatch()
| [
"ddtrace.contrib.aiohttp_jinja2.patch.patch",
"ddtrace.pin.Pin.override",
"ddtrace.internal.utils.version.parse_version",
"ddtrace.contrib.aiohttp.middlewares.trace_app"
] | [((289, 331), 'ddtrace.internal.utils.version.parse_version', 'version.parse_version', (['aiohttp.__version__'], {}), '(aiohttp.__version__)\n', (310, 331), False, 'from ddtrace.internal.utils import version\n'), ((519, 541), 'ddtrace.contrib.aiohttp.middlewares.trace_app', 'trace_app', (['app', 'tracer'], {}), '(app, tracer)\n', (528, 541), False, 'from ddtrace.contrib.aiohttp.middlewares import trace_app\n'), ((638, 652), 'ddtrace.contrib.aiohttp_jinja2.patch.patch', 'patch_jinja2', ([], {}), '()\n', (650, 652), True, 'from ddtrace.contrib.aiohttp_jinja2.patch import patch as patch_jinja2\n'), ((694, 737), 'ddtrace.pin.Pin.override', 'Pin.override', (['aiohttp_jinja2'], {'tracer': 'tracer'}), '(aiohttp_jinja2, tracer=tracer)\n', (706, 737), False, 'from ddtrace.pin import Pin\n'), ((934, 948), 'ddtrace.contrib.aiohttp_jinja2.patch.patch', 'patch_jinja2', ([], {}), '()\n', (946, 948), True, 'from ddtrace.contrib.aiohttp_jinja2.patch import patch as patch_jinja2\n'), ((983, 1026), 'ddtrace.pin.Pin.override', 'Pin.override', (['aiohttp_jinja2'], {'tracer': 'tracer'}), '(aiohttp_jinja2, tracer=tracer)\n', (995, 1026), False, 'from ddtrace.pin import Pin\n'), ((1254, 1276), 'ddtrace.contrib.aiohttp.middlewares.trace_app', 'trace_app', (['app', 'tracer'], {}), '(app, tracer)\n', (1263, 1276), False, 'from ddtrace.contrib.aiohttp.middlewares import trace_app\n'), ((1379, 1393), 'ddtrace.contrib.aiohttp_jinja2.patch.patch', 'patch_jinja2', ([], {}), '()\n', (1391, 1393), True, 'from ddtrace.contrib.aiohttp_jinja2.patch import patch as patch_jinja2\n'), ((1435, 1478), 'ddtrace.pin.Pin.override', 'Pin.override', (['aiohttp_jinja2'], {'tracer': 'tracer'}), '(aiohttp_jinja2, tracer=tracer)\n', (1447, 1478), False, 'from ddtrace.pin import Pin\n'), ((1681, 1695), 'ddtrace.contrib.aiohttp_jinja2.patch.patch', 'patch_jinja2', ([], {}), '()\n', (1693, 1695), True, 'from ddtrace.contrib.aiohttp_jinja2.patch import patch as patch_jinja2\n'), ((1730, 1773), 'ddtrace.pin.Pin.override', 'Pin.override', (['aiohttp_jinja2'], {'tracer': 'tracer'}), '(aiohttp_jinja2, tracer=tracer)\n', (1742, 1773), False, 'from ddtrace.pin import Pin\n')] |
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, HiddenField,IntegerField
from wtforms.validators import DataRequired, NumberRange
class ConnectForm(FlaskForm):
'''
The form for connecting to the Arduino
'''
id = HiddenField('A hidden field');
serial_port = StringField('Connect on port:', validators=[DataRequired()], description = 'Serial port')
name = StringField('Name of the Arduino:', description = 'Name', default = 'Arduino')
submit = SubmitField('Connect')
class UpdateForm(FlaskForm):
'''
The form for connecting to the Arduino
'''
id = HiddenField('A hidden field');
serial_port = StringField('Update to port:', validators=[DataRequired()])
baud_rate = IntegerField('Baudrate:', validators=[ NumberRange(4800,1000000)])
submit = SubmitField('Update connection')
class SerialWaitForm(FlaskForm):
'''
The form for connecting to the Arduino
'''
id = HiddenField('A hidden field');
serial_time = IntegerField('Time between measurements (s):', [DataRequired(), NumberRange(2,300)])
submit = SubmitField('Update waiting time.')
class DisconnectForm(FlaskForm):
'''
The form for disconnecting from the Arduino
'''
id = HiddenField('A hidden field');
submit = SubmitField('Disconnect')
| [
"wtforms.validators.NumberRange",
"wtforms.SubmitField",
"wtforms.StringField",
"wtforms.HiddenField",
"wtforms.validators.DataRequired"
] | [((260, 289), 'wtforms.HiddenField', 'HiddenField', (['"""A hidden field"""'], {}), "('A hidden field')\n", (271, 289), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((410, 484), 'wtforms.StringField', 'StringField', (['"""Name of the Arduino:"""'], {'description': '"""Name"""', 'default': '"""Arduino"""'}), "('Name of the Arduino:', description='Name', default='Arduino')\n", (421, 484), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((502, 524), 'wtforms.SubmitField', 'SubmitField', (['"""Connect"""'], {}), "('Connect')\n", (513, 524), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((623, 652), 'wtforms.HiddenField', 'HiddenField', (['"""A hidden field"""'], {}), "('A hidden field')\n", (634, 652), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((828, 860), 'wtforms.SubmitField', 'SubmitField', (['"""Update connection"""'], {}), "('Update connection')\n", (839, 860), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((963, 992), 'wtforms.HiddenField', 'HiddenField', (['"""A hidden field"""'], {}), "('A hidden field')\n", (974, 992), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((1110, 1145), 'wtforms.SubmitField', 'SubmitField', (['"""Update waiting time."""'], {}), "('Update waiting time.')\n", (1121, 1145), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((1253, 1282), 'wtforms.HiddenField', 'HiddenField', (['"""A hidden field"""'], {}), "('A hidden field')\n", (1264, 1282), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((1297, 1322), 'wtforms.SubmitField', 'SubmitField', (['"""Disconnect"""'], {}), "('Disconnect')\n", (1308, 1322), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((1060, 1074), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1072, 1074), False, 'from wtforms.validators import DataRequired, NumberRange\n'), ((1076, 1095), 'wtforms.validators.NumberRange', 'NumberRange', (['(2)', '(300)'], {}), '(2, 300)\n', (1087, 1095), False, 'from wtforms.validators import DataRequired, NumberRange\n'), ((353, 367), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (365, 367), False, 'from wtforms.validators import DataRequired, NumberRange\n'), ((715, 729), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (727, 729), False, 'from wtforms.validators import DataRequired, NumberRange\n'), ((787, 813), 'wtforms.validators.NumberRange', 'NumberRange', (['(4800)', '(1000000)'], {}), '(4800, 1000000)\n', (798, 813), False, 'from wtforms.validators import DataRequired, NumberRange\n')] |
import pandas as pd
import numpy as np
import os
import tensorflow as tf
####### STUDENTS FILL THIS OUT ######
#Question 3
def reduce_dimension_ndc(df, ndc_df):
'''
df: pandas dataframe, input dataset
ndc_df: pandas dataframe, drug code dataset used for mapping in generic names
return:
df: pandas dataframe, output dataframe with joined generic drug name
'''
df1 = pd.merge(df, ndc_df[['Proprietary Name', 'NDC_Code']], left_on='ndc_code', right_on='NDC_Code')
df1['generic_drug_name'] = df1['Proprietary Name']
df1 = df1.drop(['NDC_Code', 'Proprietary Name'], axis=1)
return df1
#Question 4
def select_first_encounter(df):
'''
df: pandas dataframe, dataframe with all encounters
return:
first_encounter_df: pandas dataframe, dataframe with only the first encounter for a given patient
'''
first_encounter_df = df.copy()
first_encounter_df = first_encounter_df.sort_values('encounter_id')
first_encounter_df = (first_encounter_df.drop_duplicates(subset=['encounter_id'], keep='first').drop_duplicates(subset=['patient_nbr'], keep='first'))
return first_encounter_df
#Question 6
def patient_dataset_splitter(df, patient_key='patient_nbr'):
'''
df: pandas dataframe, input dataset that will be split
patient_key: string, column that is the patient id
return:
- train: pandas dataframe,
- validation: pandas dataframe,
- test: pandas dataframe,
'''
df = pd.DataFrame(df)
df = df.iloc[np.random.permutation(len(df))]
unique_values = df[patient_key].unique()
total_values = len(unique_values)
sample_size_60 = round(total_values * (0.6 ))
sample_size_80 = round(total_values * (0.8 ))
train = df[df[patient_key].isin(unique_values[:sample_size_60])].reset_index(drop=True)
validation = df[df[patient_key].isin(unique_values[sample_size_60:sample_size_80])].reset_index(drop=True)
test = df[df[patient_key].isin(unique_values[sample_size_80:])].reset_index(drop=True)
return train, validation, test
#Question 7
def create_tf_categorical_feature_cols(categorical_col_list,
vocab_dir='./diabetes_vocab/'):
'''
categorical_col_list: list, categorical field list that will be transformed with TF feature column
vocab_dir: string, the path where the vocabulary text files are located
return:
output_tf_list: list of TF feature columns
'''
output_tf_list = []
for c in categorical_col_list:
vocab_file_path = os.path.join(vocab_dir, c + "_vocab.txt")
'''
Which TF function allows you to read from a text file and create a categorical feature
You can use a pattern like this below...
tf_categorical_feature_column = tf.feature_column.......
'''
tf_categorical_feature_column = tf.feature_column.categorical_column_with_vocabulary_file(
key=c, vocabulary_file = vocab_file_path, num_oov_buckets=1)
tf_categorical_feature_column = tf.feature_column.indicator_column(tf_categorical_feature_column)
output_tf_list.append(tf_categorical_feature_column)
return output_tf_list
#Question 8
import functools
def normalize_numeric_with_zscore(col, mean, std):
'''
This function can be used in conjunction with the tf feature column for normalization
'''
return (col - mean)/std
def create_tf_numeric_feature(col, MEAN, STD, default_value=0):
'''
col: string, input numerical column name
MEAN: the mean for the column in the training data
STD: the standard deviation for the column in the training data
default_value: the value that will be used for imputing the field
return:
tf_numeric_feature: tf feature column representation of the input field
'''
normalizer = functools.partial(normalize_numeric_with_zscore, mean=MEAN, std=STD)
tf_numeric_feature = tf.feature_column.numeric_column(key=col, default_value = default_value, normalizer_fn=normalizer, dtype=tf.float64)
return tf_numeric_feature
#Question 9
def get_mean_std_from_preds(diabetes_yhat):
'''
diabetes_yhat: TF Probability prediction object
'''
m = '?'
s = '?'
return m, s
# Question 10
def get_student_binary_prediction(df, col):
'''
df: pandas dataframe prediction output dataframe
col: str, probability mean prediction field
return:
student_binary_prediction: pandas dataframe converting input to flattened numpy array and binary labels
'''
return student_binary_prediction
| [
"tensorflow.feature_column.categorical_column_with_vocabulary_file",
"pandas.merge",
"os.path.join",
"tensorflow.feature_column.numeric_column",
"tensorflow.feature_column.indicator_column",
"functools.partial",
"pandas.DataFrame"
] | [((399, 498), 'pandas.merge', 'pd.merge', (['df', "ndc_df[['Proprietary Name', 'NDC_Code']]"], {'left_on': '"""ndc_code"""', 'right_on': '"""NDC_Code"""'}), "(df, ndc_df[['Proprietary Name', 'NDC_Code']], left_on='ndc_code',\n right_on='NDC_Code')\n", (407, 498), True, 'import pandas as pd\n'), ((1480, 1496), 'pandas.DataFrame', 'pd.DataFrame', (['df'], {}), '(df)\n', (1492, 1496), True, 'import pandas as pd\n'), ((3844, 3912), 'functools.partial', 'functools.partial', (['normalize_numeric_with_zscore'], {'mean': 'MEAN', 'std': 'STD'}), '(normalize_numeric_with_zscore, mean=MEAN, std=STD)\n', (3861, 3912), False, 'import functools\n'), ((3938, 4056), 'tensorflow.feature_column.numeric_column', 'tf.feature_column.numeric_column', ([], {'key': 'col', 'default_value': 'default_value', 'normalizer_fn': 'normalizer', 'dtype': 'tf.float64'}), '(key=col, default_value=default_value,\n normalizer_fn=normalizer, dtype=tf.float64)\n', (3970, 4056), True, 'import tensorflow as tf\n'), ((2548, 2589), 'os.path.join', 'os.path.join', (['vocab_dir', "(c + '_vocab.txt')"], {}), "(vocab_dir, c + '_vocab.txt')\n", (2560, 2589), False, 'import os\n'), ((2865, 2985), 'tensorflow.feature_column.categorical_column_with_vocabulary_file', 'tf.feature_column.categorical_column_with_vocabulary_file', ([], {'key': 'c', 'vocabulary_file': 'vocab_file_path', 'num_oov_buckets': '(1)'}), '(key=c,\n vocabulary_file=vocab_file_path, num_oov_buckets=1)\n', (2922, 2985), True, 'import tensorflow as tf\n'), ((3046, 3111), 'tensorflow.feature_column.indicator_column', 'tf.feature_column.indicator_column', (['tf_categorical_feature_column'], {}), '(tf_categorical_feature_column)\n', (3080, 3111), True, 'import tensorflow as tf\n')] |
'''
This library is used to incorporate
'''
import numpy as np
def cell_prob_with_nucleus(cell, nucleus):
'''
This function is used to figure out whether one region is cell or empty hole (without nucleus)
:param cell: segmentations results with different labels
:param nucleus: nucleus RawMemb image (after resize)
:return cell: cells without cavity
:return hole: cavity inside the embryos
'''
labels = np.unique(cell).tolist()
labels.remove(0)
hole = np.zeros_like(cell, dtype=np.uint8)
for label in labels:
one_cell_mask = (cell == label)
# After checking on all intensity values, the segmented region should be regarded as empty when the intensity is
# lower than 100. Most are equals 0
if (nucleus[one_cell_mask].sum() == 0):
cell[one_cell_mask] = 0
hole[one_cell_mask] = 1
return cell, hole
| [
"numpy.zeros_like",
"numpy.unique"
] | [((496, 531), 'numpy.zeros_like', 'np.zeros_like', (['cell'], {'dtype': 'np.uint8'}), '(cell, dtype=np.uint8)\n', (509, 531), True, 'import numpy as np\n'), ((439, 454), 'numpy.unique', 'np.unique', (['cell'], {}), '(cell)\n', (448, 454), True, 'import numpy as np\n')] |
import calendar
import datetime
import re
import sys
from dateutil.relativedelta import relativedelta
import gam
from gam.var import *
from gam import controlflow
from gam import display
from gam import gapi
from gam import utils
from gam.gapi.directory import orgunits as gapi_directory_orgunits
def build():
return gam.buildGAPIObject('reports')
REPORT_CHOICE_MAP = {
'access': 'access_transparency',
'accesstransparency': 'access_transparency',
'calendars': 'calendar',
'customers': 'customer',
'doc': 'drive',
'docs': 'drive',
'domain': 'customer',
'enterprisegroups': 'groups_enterprise',
'google+': 'gplus',
'group': 'groups',
'groupsenterprise': 'groups_enterprise',
'hangoutsmeet': 'meet',
'logins': 'login',
'oauthtoken': 'token',
'tokens': 'token',
'usage': 'usage',
'usageparameters': 'usageparameters',
'users': 'user',
'useraccounts': 'user_accounts',
}
def showUsageParameters():
rep = build()
throw_reasons = [
gapi.errors.ErrorReason.INVALID, gapi.errors.ErrorReason.BAD_REQUEST
]
todrive = False
if len(sys.argv) == 3:
controlflow.missing_argument_exit('user or customer',
'report usageparameters')
report = sys.argv[3].lower()
titles = ['parameter']
if report == 'customer':
endpoint = rep.customerUsageReports()
kwargs = {}
elif report == 'user':
endpoint = rep.userUsageReport()
kwargs = {'userKey': gam._get_admin_email()}
else:
controlflow.expected_argument_exit('usageparameters',
['user', 'customer'], report)
customerId = GC_Values[GC_CUSTOMER_ID]
if customerId == MY_CUSTOMER:
customerId = None
tryDate = datetime.date.today().strftime(YYYYMMDD_FORMAT)
all_parameters = set()
i = 4
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'todrive':
todrive = True
i += 1
else:
controlflow.invalid_argument_exit(sys.argv[i],
'gam report usageparameters')
fullDataRequired = ['all']
while True:
try:
result = gapi.call(endpoint,
'get',
throw_reasons=throw_reasons,
date=tryDate,
customerId=customerId,
fields='warnings,usageReports(parameters(name))',
**kwargs)
warnings = result.get('warnings', [])
usage = result.get('usageReports')
has_reports = bool(usage)
fullData, tryDate = _check_full_data_available(
warnings, tryDate, fullDataRequired, has_reports)
if fullData < 0:
print('No usage parameters available.')
sys.exit(1)
if has_reports:
for parameter in usage[0]['parameters']:
name = parameter.get('name')
if name:
all_parameters.add(name)
if fullData == 1:
break
except gapi.errors.GapiInvalidError as e:
tryDate = _adjust_date(str(e))
csvRows = []
for parameter in sorted(all_parameters):
csvRows.append({'parameter': parameter})
display.write_csv_file(csvRows, titles,
f'{report.capitalize()} Report Usage Parameters',
todrive)
REPORTS_PARAMETERS_SIMPLE_TYPES = [
'intValue', 'boolValue', 'datetimeValue', 'stringValue'
]
def showUsage():
rep = build()
throw_reasons = [
gapi.errors.ErrorReason.INVALID, gapi.errors.ErrorReason.BAD_REQUEST
]
todrive = False
if len(sys.argv) == 3:
controlflow.missing_argument_exit('user or customer', 'report usage')
report = sys.argv[3].lower()
titles = ['date']
if report == 'customer':
endpoint = rep.customerUsageReports()
kwargs = [{}]
elif report == 'user':
endpoint = rep.userUsageReport()
kwargs = [{'userKey': 'all'}]
titles.append('user')
else:
controlflow.expected_argument_exit('usage', ['user', 'customer'],
report)
customerId = GC_Values[GC_CUSTOMER_ID]
if customerId == MY_CUSTOMER:
customerId = None
parameters = []
start_date = end_date = orgUnitId = None
skip_day_numbers = []
skip_dates = set()
one_day = datetime.timedelta(days=1)
i = 4
while i < len(sys.argv):
myarg = sys.argv[i].lower().replace('_', '')
if myarg == 'startdate':
start_date = utils.get_yyyymmdd(sys.argv[i + 1],
returnDateTime=True)
i += 2
elif myarg == 'enddate':
end_date = utils.get_yyyymmdd(sys.argv[i + 1], returnDateTime=True)
i += 2
elif myarg == 'todrive':
todrive = True
i += 1
elif myarg in ['fields', 'parameters']:
parameters = sys.argv[i + 1].split(',')
i += 2
elif myarg == 'skipdates':
for skip in sys.argv[i + 1].split(','):
if skip.find(':') == -1:
skip_dates.add(utils.get_yyyymmdd(skip,
returnDateTime=True))
else:
skip_start, skip_end = skip.split(':', 1)
skip_start = utils.get_yyyymmdd(skip_start,
returnDateTime=True)
skip_end = utils.get_yyyymmdd(skip_end, returnDateTime=True)
while skip_start <= skip_end:
skip_dates.add(skip_start)
skip_start += one_day
i += 2
elif myarg == 'skipdaysofweek':
skipdaynames = sys.argv[i + 1].split(',')
dow = [d.lower() for d in calendar.day_abbr]
skip_day_numbers = [dow.index(d) for d in skipdaynames if d in dow]
i += 2
elif report == 'user' and myarg in ['orgunit', 'org', 'ou']:
_, orgUnitId = gapi_directory_orgunits.getOrgUnitId(sys.argv[i + 1])
i += 2
elif report == 'user' and myarg in usergroup_types:
users = gam.getUsersToModify(myarg, sys.argv[i + 1])
kwargs = [{'userKey': user} for user in users]
i += 2
else:
controlflow.invalid_argument_exit(sys.argv[i],
f'gam report usage {report}')
if parameters:
titles.extend(parameters)
parameters = ','.join(parameters)
else:
parameters = None
if not end_date:
end_date = datetime.datetime.now()
if not start_date:
start_date = end_date + relativedelta(months=-1)
if orgUnitId:
for kw in kwargs:
kw['orgUnitID'] = orgUnitId
usage_on_date = start_date
start_date = usage_on_date.strftime(YYYYMMDD_FORMAT)
usage_end_date = end_date
end_date = end_date.strftime(YYYYMMDD_FORMAT)
start_use_date = end_use_date = None
csvRows = []
while usage_on_date <= usage_end_date:
if usage_on_date.weekday() in skip_day_numbers or \
usage_on_date in skip_dates:
usage_on_date += one_day
continue
use_date = usage_on_date.strftime(YYYYMMDD_FORMAT)
usage_on_date += one_day
try:
for kwarg in kwargs:
try:
usage = gapi.get_all_pages(endpoint,
'get',
'usageReports',
throw_reasons=throw_reasons,
customerId=customerId,
date=use_date,
parameters=parameters,
**kwarg)
except gapi.errors.GapiBadRequestError:
continue
for entity in usage:
row = {'date': use_date}
if 'userEmail' in entity['entity']:
row['user'] = entity['entity']['userEmail']
for item in entity.get('parameters', []):
if 'name' not in item:
continue
name = item['name']
if name == 'cros:device_version_distribution':
for cros_ver in item['msgValue']:
v = cros_ver['version_number']
column_name = f'cros:num_devices_chrome_{v}'
if column_name not in titles:
titles.append(column_name)
row[column_name] = cros_ver['num_devices']
else:
if not name in titles:
titles.append(name)
for ptype in REPORTS_PARAMETERS_SIMPLE_TYPES:
if ptype in item:
row[name] = item[ptype]
break
else:
row[name] = ''
if not start_use_date:
start_use_date = use_date
end_use_date = use_date
csvRows.append(row)
except gapi.errors.GapiInvalidError as e:
display.print_warning(str(e))
break
if start_use_date:
report_name = f'{report.capitalize()} Usage Report - {start_use_date}:{end_use_date}'
else:
report_name = f'{report.capitalize()} Usage Report - {start_date}:{end_date} - No Data'
display.write_csv_file(csvRows, titles, report_name, todrive)
def showReport():
rep = build()
throw_reasons = [gapi.errors.ErrorReason.INVALID]
report = sys.argv[2].lower()
report = REPORT_CHOICE_MAP.get(report.replace('_', ''), report)
if report == 'usage':
showUsage()
return
if report == 'usageparameters':
showUsageParameters()
return
valid_apps = gapi.get_enum_values_minus_unspecified(
rep._rootDesc['resources']['activities']['methods']['list']
['parameters']['applicationName']['enum']) + ['customer', 'user']
if report not in valid_apps:
controlflow.expected_argument_exit('report',
', '.join(sorted(valid_apps)),
report)
customerId = GC_Values[GC_CUSTOMER_ID]
if customerId == MY_CUSTOMER:
customerId = None
filters = parameters = actorIpAddress = groupIdFilter = startTime = endTime = eventName = orgUnitId = None
tryDate = datetime.date.today().strftime(YYYYMMDD_FORMAT)
to_drive = False
userKey = 'all'
fullDataRequired = None
i = 3
while i < len(sys.argv):
myarg = sys.argv[i].lower()
if myarg == 'date':
tryDate = utils.get_yyyymmdd(sys.argv[i + 1])
i += 2
elif myarg in ['orgunit', 'org', 'ou']:
_, orgUnitId = gapi_directory_orgunits.getOrgUnitId(sys.argv[i + 1])
i += 2
elif myarg == 'fulldatarequired':
fullDataRequired = []
fdr = sys.argv[i + 1].lower()
if fdr and fdr == 'all':
fullDataRequired = 'all'
else:
fullDataRequired = fdr.replace(',', ' ').split()
i += 2
elif myarg == 'start':
startTime = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'end':
endTime = utils.get_time_or_delta_from_now(sys.argv[i + 1])
i += 2
elif myarg == 'event':
eventName = sys.argv[i + 1]
i += 2
elif myarg == 'user':
userKey = sys.argv[i + 1].lower()
if userKey != 'all':
userKey = gam.normalizeEmailAddressOrUID(sys.argv[i + 1])
i += 2
elif myarg in ['filter', 'filters']:
filters = sys.argv[i + 1]
i += 2
elif myarg in ['fields', 'parameters']:
parameters = sys.argv[i + 1]
i += 2
elif myarg == 'ip':
actorIpAddress = sys.argv[i + 1]
i += 2
elif myarg == 'groupidfilter':
groupIdFilter = sys.argv[i + 1]
i += 2
elif myarg == 'todrive':
to_drive = True
i += 1
else:
controlflow.invalid_argument_exit(sys.argv[i], 'gam report')
if report == 'user':
while True:
try:
one_page = gapi.call(rep.userUsageReport(),
'get',
throw_reasons=throw_reasons,
date=tryDate,
userKey=userKey,
customerId=customerId,
orgUnitID=orgUnitId,
fields='warnings,usageReports',
maxResults=1)
warnings = one_page.get('warnings', [])
has_reports = bool(one_page.get('usageReports'))
fullData, tryDate = _check_full_data_available(
warnings, tryDate, fullDataRequired, has_reports)
if fullData < 0:
print('No user report available.')
sys.exit(1)
if fullData == 0:
continue
page_message = gapi.got_total_items_msg('Users', '...\n')
usage = gapi.get_all_pages(rep.userUsageReport(),
'get',
'usageReports',
page_message=page_message,
throw_reasons=throw_reasons,
date=tryDate,
userKey=userKey,
customerId=customerId,
orgUnitID=orgUnitId,
filters=filters,
parameters=parameters)
break
except gapi.errors.GapiInvalidError as e:
tryDate = _adjust_date(str(e))
if not usage:
print('No user report available.')
sys.exit(1)
titles = ['email', 'date']
csvRows = []
for user_report in usage:
if 'entity' not in user_report:
continue
row = {'email': user_report['entity']['userEmail'], 'date': tryDate}
for item in user_report.get('parameters', []):
if 'name' not in item:
continue
name = item['name']
if not name in titles:
titles.append(name)
for ptype in REPORTS_PARAMETERS_SIMPLE_TYPES:
if ptype in item:
row[name] = item[ptype]
break
else:
row[name] = ''
csvRows.append(row)
display.write_csv_file(csvRows, titles, f'User Reports - {tryDate}',
to_drive)
elif report == 'customer':
while True:
try:
first_page = gapi.call(rep.customerUsageReports(),
'get',
throw_reasons=throw_reasons,
customerId=customerId,
date=tryDate,
fields='warnings,usageReports')
warnings = first_page.get('warnings', [])
has_reports = bool(first_page.get('usageReports'))
fullData, tryDate = _check_full_data_available(
warnings, tryDate, fullDataRequired, has_reports)
if fullData < 0:
print('No customer report available.')
sys.exit(1)
if fullData == 0:
continue
usage = gapi.get_all_pages(rep.customerUsageReports(),
'get',
'usageReports',
throw_reasons=throw_reasons,
customerId=customerId,
date=tryDate,
parameters=parameters)
break
except gapi.errors.GapiInvalidError as e:
tryDate = _adjust_date(str(e))
if not usage:
print('No customer report available.')
sys.exit(1)
titles = ['name', 'value', 'client_id']
csvRows = []
auth_apps = list()
for item in usage[0]['parameters']:
if 'name' not in item:
continue
name = item['name']
if 'intValue' in item:
value = item['intValue']
elif 'msgValue' in item:
if name == 'accounts:authorized_apps':
for subitem in item['msgValue']:
app = {}
for an_item in subitem:
if an_item == 'client_name':
app['name'] = 'App: ' + \
subitem[an_item].replace('\n', '\\n')
elif an_item == 'num_users':
app['value'] = f'{subitem[an_item]} users'
elif an_item == 'client_id':
app['client_id'] = subitem[an_item]
auth_apps.append(app)
continue
values = []
for subitem in item['msgValue']:
if 'count' in subitem:
mycount = myvalue = None
for key, value in list(subitem.items()):
if key == 'count':
mycount = value
else:
myvalue = value
if mycount and myvalue:
values.append(f'{myvalue}:{mycount}')
value = ' '.join(values)
elif 'version_number' in subitem \
and 'num_devices' in subitem:
values.append(f'{subitem["version_number"]}:'
f'{subitem["num_devices"]}')
else:
continue
value = ' '.join(sorted(values, reverse=True))
csvRows.append({'name': name, 'value': value})
for app in auth_apps: # put apps at bottom
csvRows.append(app)
display.write_csv_file(csvRows,
titles,
f'Customer Report - {tryDate}',
todrive=to_drive)
else:
page_message = gapi.got_total_items_msg('Activities', '...\n')
activities = gapi.get_all_pages(rep.activities(),
'list',
'items',
page_message=page_message,
applicationName=report,
userKey=userKey,
customerId=customerId,
actorIpAddress=actorIpAddress,
startTime=startTime,
endTime=endTime,
eventName=eventName,
filters=filters,
orgUnitID=orgUnitId,
groupIdFilter=groupIdFilter)
if activities:
titles = ['name']
csvRows = []
for activity in activities:
events = activity['events']
del activity['events']
activity_row = utils.flatten_json(activity)
purge_parameters = True
for event in events:
for item in event.get('parameters', []):
if set(item) == {'value', 'name'}:
event[item['name']] = item['value']
elif set(item) == {'intValue', 'name'}:
if item['name'] in ['start_time', 'end_time']:
val = item.get('intValue')
if val is not None:
val = int(val)
if val >= 62135683200:
event[item['name']] = \
datetime.datetime.fromtimestamp(
val-62135683200).isoformat()
else:
event[item['name']] = item['intValue']
elif set(item) == {'boolValue', 'name'}:
event[item['name']] = item['boolValue']
elif set(item) == {'multiValue', 'name'}:
event[item['name']] = ' '.join(item['multiValue'])
elif item['name'] == 'scope_data':
parts = {}
for message in item['multiMessageValue']:
for mess in message['parameter']:
value = mess.get(
'value',
' '.join(mess.get('multiValue', [])))
parts[mess['name']] = parts.get(
mess['name'], []) + [value]
for part, v in parts.items():
if part == 'scope_name':
part = 'scope'
event[part] = ' '.join(v)
else:
purge_parameters = False
if purge_parameters:
event.pop('parameters', None)
row = utils.flatten_json(event)
row.update(activity_row)
for item in row:
if item not in titles:
titles.append(item)
csvRows.append(row)
display.sort_csv_titles([
'name',
], titles)
display.write_csv_file(csvRows, titles,
f'{report.capitalize()} Activity Report',
to_drive)
def _adjust_date(errMsg):
match_date = re.match(
'Data for dates later than (.*) is not yet '
'available. Please check back later', errMsg)
if not match_date:
match_date = re.match('Start date can not be later than (.*)', errMsg)
if not match_date:
controlflow.system_error_exit(4, errMsg)
return str(match_date.group(1))
def _check_full_data_available(warnings, tryDate, fullDataRequired,
has_reports):
one_day = datetime.timedelta(days=1)
tryDateTime = datetime.datetime.strptime(tryDate, YYYYMMDD_FORMAT)
# move to day before if we don't have at least one usageReport
if not has_reports:
tryDateTime -= one_day
return (0, tryDateTime.strftime(YYYYMMDD_FORMAT))
for warning in warnings:
if warning['code'] == 'PARTIAL_DATA_AVAILABLE':
for app in warning['data']:
if app['key'] == 'application' and \
app['value'] != 'docs' and \
fullDataRequired is not None and \
(fullDataRequired == 'all' or app['value'] in fullDataRequired):
tryDateTime -= one_day
return (0, tryDateTime.strftime(YYYYMMDD_FORMAT))
elif warning['code'] == 'DATA_NOT_AVAILABLE':
for app in warning['data']:
if app['key'] == 'application' and \
app['value'] != 'docs' and \
(not fullDataRequired or app['value'] in fullDataRequired):
return (-1, tryDate)
return (1, tryDate)
| [
"dateutil.relativedelta.relativedelta",
"gam.gapi.got_total_items_msg",
"sys.exit",
"datetime.timedelta",
"gam.gapi.directory.orgunits.getOrgUnitId",
"gam.utils.get_yyyymmdd",
"gam.utils.get_time_or_delta_from_now",
"gam.gapi.get_enum_values_minus_unspecified",
"gam.controlflow.invalid_argument_exit... | [((325, 355), 'gam.buildGAPIObject', 'gam.buildGAPIObject', (['"""reports"""'], {}), "('reports')\n", (344, 355), False, 'import gam\n'), ((4653, 4679), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4671, 4679), False, 'import datetime\n'), ((10144, 10205), 'gam.display.write_csv_file', 'display.write_csv_file', (['csvRows', 'titles', 'report_name', 'todrive'], {}), '(csvRows, titles, report_name, todrive)\n', (10166, 10205), False, 'from gam import display\n'), ((23709, 23815), 're.match', 're.match', (['"""Data for dates later than (.*) is not yet available. Please check back later"""', 'errMsg'], {}), "(\n 'Data for dates later than (.*) is not yet available. Please check back later'\n , errMsg)\n", (23717, 23815), False, 'import re\n'), ((24165, 24191), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (24183, 24191), False, 'import datetime\n'), ((24210, 24262), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['tryDate', 'YYYYMMDD_FORMAT'], {}), '(tryDate, YYYYMMDD_FORMAT)\n', (24236, 24262), False, 'import datetime\n'), ((1160, 1239), 'gam.controlflow.missing_argument_exit', 'controlflow.missing_argument_exit', (['"""user or customer"""', '"""report usageparameters"""'], {}), "('user or customer', 'report usageparameters')\n", (1193, 1239), False, 'from gam import controlflow\n'), ((3929, 3998), 'gam.controlflow.missing_argument_exit', 'controlflow.missing_argument_exit', (['"""user or customer"""', '"""report usage"""'], {}), "('user or customer', 'report usage')\n", (3962, 3998), False, 'from gam import controlflow\n'), ((6954, 6977), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6975, 6977), False, 'import datetime\n'), ((10558, 10703), 'gam.gapi.get_enum_values_minus_unspecified', 'gapi.get_enum_values_minus_unspecified', (["rep._rootDesc['resources']['activities']['methods']['list']['parameters'][\n 'applicationName']['enum']"], {}), "(rep._rootDesc['resources'][\n 'activities']['methods']['list']['parameters']['applicationName']['enum'])\n", (10596, 10703), False, 'from gam import gapi\n'), ((15779, 15857), 'gam.display.write_csv_file', 'display.write_csv_file', (['csvRows', 'titles', 'f"""User Reports - {tryDate}"""', 'to_drive'], {}), "(csvRows, titles, f'User Reports - {tryDate}', to_drive)\n", (15801, 15857), False, 'from gam import display\n'), ((23870, 23927), 're.match', 're.match', (['"""Start date can not be later than (.*)"""', 'errMsg'], {}), "('Start date can not be later than (.*)', errMsg)\n", (23878, 23927), False, 'import re\n'), ((23959, 23999), 'gam.controlflow.system_error_exit', 'controlflow.system_error_exit', (['(4)', 'errMsg'], {}), '(4, errMsg)\n', (23988, 23999), False, 'from gam import controlflow\n'), ((1576, 1663), 'gam.controlflow.expected_argument_exit', 'controlflow.expected_argument_exit', (['"""usageparameters"""', "['user', 'customer']", 'report'], {}), "('usageparameters', ['user', 'customer'],\n report)\n", (1610, 1663), False, 'from gam import controlflow\n'), ((1820, 1841), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1839, 1841), False, 'import datetime\n'), ((2090, 2166), 'gam.controlflow.invalid_argument_exit', 'controlflow.invalid_argument_exit', (['sys.argv[i]', '"""gam report usageparameters"""'], {}), "(sys.argv[i], 'gam report usageparameters')\n", (2123, 2166), False, 'from gam import controlflow\n'), ((2294, 2454), 'gam.gapi.call', 'gapi.call', (['endpoint', '"""get"""'], {'throw_reasons': 'throw_reasons', 'date': 'tryDate', 'customerId': 'customerId', 'fields': '"""warnings,usageReports(parameters(name))"""'}), "(endpoint, 'get', throw_reasons=throw_reasons, date=tryDate,\n customerId=customerId, fields='warnings,usageReports(parameters(name))',\n **kwargs)\n", (2303, 2454), False, 'from gam import gapi\n'), ((4305, 4378), 'gam.controlflow.expected_argument_exit', 'controlflow.expected_argument_exit', (['"""usage"""', "['user', 'customer']", 'report'], {}), "('usage', ['user', 'customer'], report)\n", (4339, 4378), False, 'from gam import controlflow\n'), ((4830, 4886), 'gam.utils.get_yyyymmdd', 'utils.get_yyyymmdd', (['sys.argv[i + 1]'], {'returnDateTime': '(True)'}), '(sys.argv[i + 1], returnDateTime=True)\n', (4848, 4886), False, 'from gam import utils\n'), ((7033, 7057), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(-1)'}), '(months=-1)\n', (7046, 7057), False, 'from dateutil.relativedelta import relativedelta\n'), ((11179, 11200), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (11198, 11200), False, 'import datetime\n'), ((11421, 11456), 'gam.utils.get_yyyymmdd', 'utils.get_yyyymmdd', (['sys.argv[i + 1]'], {}), '(sys.argv[i + 1])\n', (11439, 11456), False, 'from gam import utils\n'), ((15010, 15021), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15018, 15021), False, 'import sys\n'), ((19592, 19685), 'gam.display.write_csv_file', 'display.write_csv_file', (['csvRows', 'titles', 'f"""Customer Report - {tryDate}"""'], {'todrive': 'to_drive'}), "(csvRows, titles, f'Customer Report - {tryDate}',\n todrive=to_drive)\n", (19614, 19685), False, 'from gam import display\n'), ((19808, 19855), 'gam.gapi.got_total_items_msg', 'gapi.got_total_items_msg', (['"""Activities"""', '"""...\n"""'], {}), "('Activities', '...\\n')\n", (19832, 19855), False, 'from gam import gapi\n'), ((1534, 1556), 'gam._get_admin_email', 'gam._get_admin_email', ([], {}), '()\n', (1554, 1556), False, 'import gam\n'), ((2995, 3006), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3003, 3006), False, 'import sys\n'), ((5006, 5062), 'gam.utils.get_yyyymmdd', 'utils.get_yyyymmdd', (['sys.argv[i + 1]'], {'returnDateTime': '(True)'}), '(sys.argv[i + 1], returnDateTime=True)\n', (5024, 5062), False, 'from gam import utils\n'), ((11551, 11604), 'gam.gapi.directory.orgunits.getOrgUnitId', 'gapi_directory_orgunits.getOrgUnitId', (['sys.argv[i + 1]'], {}), '(sys.argv[i + 1])\n', (11587, 11604), True, 'from gam.gapi.directory import orgunits as gapi_directory_orgunits\n'), ((14073, 14115), 'gam.gapi.got_total_items_msg', 'gapi.got_total_items_msg', (['"""Users"""', '"""...\n"""'], {}), "('Users', '...\\n')\n", (14097, 14115), False, 'from gam import gapi\n'), ((17419, 17430), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (17427, 17430), False, 'import sys\n'), ((23417, 23458), 'gam.display.sort_csv_titles', 'display.sort_csv_titles', (["['name']", 'titles'], {}), "(['name'], titles)\n", (23440, 23458), False, 'from gam import display\n'), ((7756, 7916), 'gam.gapi.get_all_pages', 'gapi.get_all_pages', (['endpoint', '"""get"""', '"""usageReports"""'], {'throw_reasons': 'throw_reasons', 'customerId': 'customerId', 'date': 'use_date', 'parameters': 'parameters'}), "(endpoint, 'get', 'usageReports', throw_reasons=\n throw_reasons, customerId=customerId, date=use_date, parameters=\n parameters, **kwarg)\n", (7774, 7916), False, 'from gam import gapi\n'), ((13967, 13978), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (13975, 13978), False, 'import sys\n'), ((20931, 20959), 'gam.utils.flatten_json', 'utils.flatten_json', (['activity'], {}), '(activity)\n', (20949, 20959), False, 'from gam import utils\n'), ((11977, 12026), 'gam.utils.get_time_or_delta_from_now', 'utils.get_time_or_delta_from_now', (['sys.argv[i + 1]'], {}), '(sys.argv[i + 1])\n', (12009, 12026), False, 'from gam import utils\n'), ((16695, 16706), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (16703, 16706), False, 'import sys\n'), ((23162, 23187), 'gam.utils.flatten_json', 'utils.flatten_json', (['event'], {}), '(event)\n', (23180, 23187), False, 'from gam import utils\n'), ((12097, 12146), 'gam.utils.get_time_or_delta_from_now', 'utils.get_time_or_delta_from_now', (['sys.argv[i + 1]'], {}), '(sys.argv[i + 1])\n', (12129, 12146), False, 'from gam import utils\n'), ((5661, 5712), 'gam.utils.get_yyyymmdd', 'utils.get_yyyymmdd', (['skip_start'], {'returnDateTime': '(True)'}), '(skip_start, returnDateTime=True)\n', (5679, 5712), False, 'from gam import utils\n'), ((5796, 5845), 'gam.utils.get_yyyymmdd', 'utils.get_yyyymmdd', (['skip_end'], {'returnDateTime': '(True)'}), '(skip_end, returnDateTime=True)\n', (5814, 5845), False, 'from gam import utils\n'), ((6358, 6411), 'gam.gapi.directory.orgunits.getOrgUnitId', 'gapi_directory_orgunits.getOrgUnitId', (['sys.argv[i + 1]'], {}), '(sys.argv[i + 1])\n', (6394, 6411), True, 'from gam.gapi.directory import orgunits as gapi_directory_orgunits\n'), ((5443, 5488), 'gam.utils.get_yyyymmdd', 'utils.get_yyyymmdd', (['skip'], {'returnDateTime': '(True)'}), '(skip, returnDateTime=True)\n', (5461, 5488), False, 'from gam import utils\n'), ((6511, 6555), 'gam.getUsersToModify', 'gam.getUsersToModify', (['myarg', 'sys.argv[i + 1]'], {}), '(myarg, sys.argv[i + 1])\n', (6531, 6555), False, 'import gam\n'), ((6660, 6736), 'gam.controlflow.invalid_argument_exit', 'controlflow.invalid_argument_exit', (['sys.argv[i]', 'f"""gam report usage {report}"""'], {}), "(sys.argv[i], f'gam report usage {report}')\n", (6693, 6736), False, 'from gam import controlflow\n'), ((12391, 12438), 'gam.normalizeEmailAddressOrUID', 'gam.normalizeEmailAddressOrUID', (['sys.argv[i + 1]'], {}), '(sys.argv[i + 1])\n', (12421, 12438), False, 'import gam\n'), ((12968, 13028), 'gam.controlflow.invalid_argument_exit', 'controlflow.invalid_argument_exit', (['sys.argv[i]', '"""gam report"""'], {}), "(sys.argv[i], 'gam report')\n", (13001, 13028), False, 'from gam import controlflow\n'), ((21689, 21739), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['(val - 62135683200)'], {}), '(val - 62135683200)\n', (21720, 21739), False, 'import datetime\n')] |
#!/usr/bin/env python2.7
import sys
infiles = sys.argv[1:]
data = []
genes = []
for i, fname in enumerate(infiles):
sys.stderr.write(fname + '\n')
d = []
with open(fname, 'rb') as ihandle:
for j, line in enumerate(ihandle):
g, c = line.strip().split()
if i != 0 and g != genes[j]:
raise Exception('no match')
if i == 0:
genes.append(g)
d.append(c)
data.append(d)
out = sys.stdout
for i in xrange(len(genes)):
out.write(genes[i])
for d in data:
out.write('\t' + d[i])
out.write('\n')
| [
"sys.stderr.write"
] | [((121, 151), 'sys.stderr.write', 'sys.stderr.write', (["(fname + '\\n')"], {}), "(fname + '\\n')\n", (137, 151), False, 'import sys\n')] |
import numpy as np
from .image_transforms import mat_to_gray
def rgb2hcv(Blue, Green, Red):
"""transform red green blue arrays to a color space
Parameters
----------
Blue : np.array, size=(m,n)
Blue band of satellite image
Green : np.array, size=(m,n)
Green band of satellite image
Red : np.array, size=(m,n)
Red band of satellite image
Returns
-------
V : np.array, size=(m,n)
array with dominant frequency
H : np.array, size=(m,n)
array with amount of color
C : np.array, size=(m,n)
luminance
See also
--------
rgb2yiq, rgb2ycbcr, rgb2hsi, rgb2xyz, rgb2lms
Notes
-----
.. [1] Smith, "Putting colors in order", Dr. Dobb’s Journal, pp 40, 1993.
.. [2] Tsai, "A comparative study on shadow compensation of color aerial
images in invariant color models", IEEE transactions in geoscience and
remote sensing, vol. 44(6) pp. 1661--1671, 2006.
"""
NanBol = Blue == 0
Blue, Green = mat_to_gray(Blue, NanBol), mat_to_gray(Green, NanBol)
Red = Red = mat_to_gray(Red, NanBol)
np.amax( np.dstack((Red, Green)))
V = 0.3*(Red + Green + Blue)
H = np.arctan2( Red-Blue, np.sqrt(3)*(V-Green))
IN = abs(np.cos(H))<= 0.2
C = np.divide(V-Green, np.cos(H))
C2 = np.divide(Red-Blue, np.sqrt(3)*np.sin(H))
C[IN] = C2[IN]
return H, C, V
def rgb2yiq(Red, Green, Blue):
"""transform red, green, blue to luminance, inphase, quadrature values
Parameters
----------
Red : np.array, size=(m,n)
red band of satellite image
Green : np.array, size=(m,n)
green band of satellite image
Blue : np.array, size=(m,n)
blue band of satellite image
Returns
-------
Y : np.array, size=(m,n)
luminance
I : np.array, size=(m,n)
inphase
Q : np.array, size=(m,n)
quadrature
See also
--------
yiq2rgb, rgb2hcv, rgb2ycbcr, rgb2hsi, rgb2xyz, rgb2lms
Notes
-----
.. [1] <NAME> "Digital image processing", 1992.
"""
L = np.array([(+0.299, +0.587, +0.114),
(+0.596, -0.275, -0.321),
(+0.212, -0.523, +0.311)])
RGB = np.dstack((Red, Green, Blue))
YIQ = np.einsum('ij,klj->kli', L, RGB)
Y,I,Q = YIQ[:,:,0], YIQ[:,:,1], YIQ[:,:,2]
return Y, I, Q
def yiq2rgb(Y,I,Q):
"""transform luminance, inphase, quadrature values to red, green, blue
Parameters
----------
Red : np.array, size=(m,n)
red band of satellite image
Green : np.array, size=(m,n)
green band of satellite image
Blue : np.array, size=(m,n)
blue band of satellite image
Returns
-------
Y : np.array, size=(m,n)
luminance
I : np.array, size=(m,n)
inphase
Q : np.array, size=(m,n)
quadrature
See also
--------
rgb2yiq
Notes
-----
.. [1] <NAME> "Digital image processing", 1992.
"""
L = np.array([(+0.299, +0.587, +0.114),
(+0.596, -0.275, -0.321),
(+0.212, -0.523, +0.311)])
Linv = np.linalg.inv(L)
YIQ = np.dstack((Y, I, Q))
RGB = np.einsum('ij,klj->kli', Linv, YIQ)
R,G,B = RGB[:,:,0], RGB[:,:,1], RGB[:,:,2]
return R, G, B
def rgb2ycbcr(Red, Green, Blue):
"""transform red, green, blue arrays to luna and chroma values
Parameters
----------
Red : np.array, size=(m,n)
red band of satellite image
Green : np.array, size=(m,n)
green band of satellite image
Blue : np.array, size=(m,n)
blue band of satellite image
Returns
-------
Y : np.array, size=(m,n)
luma
Cb : np.array, size=(m,n)
chroma
Cr : np.array, size=(m,n)
chroma
See also
--------
rgb2hcv, rgb2yiq, rgb2hsi, rgb2xyz, rgb2lms
Notes
-----
.. [1] Tsai, "A comparative study on shadow compensation of color aerial
images in invariant color models", IEEE transactions in geoscience and
remote sensing, vol. 44(6) pp. 1661--1671, 2006.
"""
L = np.array([(+0.257, +0.504, +0.098),
(-0.148, -0.291, +0.439),
(+0.439, -0.368, -0.071)])
C = np.array([16, 128, 128])/2**8
RGB = np.dstack((Red, Green, Blue))
YCC = np.einsum('ij,klj->kli', L, RGB)
del RGB
Y = YCC[:,:,0] + C[0]
Cb= YCC[:,:,1] + C[1]
Cr= YCC[:,:,2] + C[2]
return Y, Cb, Cr
def rgb2hsi(Red, Green, Blue):
"""transform red, green, blue arrays to hue, saturation, intensity arrays
Parameters
----------
Red : np.array, size=(m,n)
red band of satellite image
Green : np.array, size=(m,n)
green band of satellite image
Blue : np.array, size=(m,n)
blue band of satellite image
Returns
-------
Hue : np.array, size=(m,n), range=0...1
Hue
Sat : np.array, size=(m,n), range=0...1
Saturation
Int : np.array, size=(m,n), range=0...1
Intensity
See also
--------
erdas2hsi, rgb2hcv, rgb2yiq, rgb2ycbcr, rgb2xyz, rgb2lms
Notes
-----
.. [1] Tsai, "A comparative study on shadow compensation of color aerial
images in invariant color models", IEEE transactions in geoscience and
remote sensing, vol. 44(6) pp. 1661--1671, 2006.
.. [2] Pratt, "Digital image processing" Wiley, 1991.
"""
if np.ptp(Red.flatten())>1:
Red = mat_to_gray(Red)
if np.ptp(Green.flatten())>1:
Green = mat_to_gray(Green)
if np.ptp(Blue.flatten())>1:
Blue = mat_to_gray(Blue)
Tsai = np.array([(1/3, 1/3, 1/3),
(-np.sqrt(6)/6, -np.sqrt(6)/6, -np.sqrt(6)/3),
(1/np.sqrt(6), 2/-np.sqrt(6), 0)])
RGB = np.dstack((Red, Green, Blue))
HSI = np.einsum('ij,klj->kli', Tsai, RGB)
Int = HSI[:,:,0]
Sat = np.sqrt(HSI[:,:,1] ** 2 + HSI[:,:,2] ** 2)
Hue = np.arctan2(HSI[:,:,1], HSI[:,:,2])/np.pi
Hue = np.remainder(Hue, 1) # bring to from -.5...+.5 to 0...1 range
return Hue, Sat, Int
def hsi2rgb(Hue, Sat, Int): #todo
Red,Green,Blue = np.zeros_like(Hue), np.zeros_like(Hue), np.zeros_like(Hue)
Class = np.ceil(Hue/3)
Color = 1 + Sat * np.divide(Hue, np.cos(np.radians(60)))
# red-green space
Sel = Class==1
Blue[Sel] = np.divide(1 - Sat[Sel], 3)
Red[Sel] = np.divide(Int[Sel] + Color[Sel], 3)
Green[Sel] = 1 - (Red[Sel] + Blue[Sel])
# green-blue space
Sel = Class==2
Red[Sel] = np.divide(1 - Sat[Sel], 3)
Green[Sel] = np.divide(Int[Sel] + Color[Sel], 3)
Blue[Sel] = 1 - (Green[Sel] + Red[Sel])
# blue-red space
Sel = Class==3
Green[Sel] = np.divide(1 - Sat[Sel], 3)
Blue[Sel] = np.divide(Int[Sel] + Color[Sel], 3)
Red[Sel] = 1 - (Blue[Sel] + Green[Sel])
return Red, Green, Blue
def erdas2hsi(Blue, Green, Red):
"""transform red, green, blue arrays to hue, saturation, intensity arrays
Parameters
----------
Blue : np.array, size=(m,n)
blue band of satellite image
Green : np.array, size=(m,n)
green band of satellite image
Red : np.array, size=(m,n)
red band of satellite image
Returns
-------
Hue : np.array, size=(m,n), float
hue
Sat : np.array, size=(m,n), float
saturation
Int : np.array, size=(m,n), float
intensity
See also
--------
rgb2hsi
Notes
-----
.. [1] ERDAS, "User handbook", 2013.
"""
if np.ptp(Red.flatten())>1:
Red = mat_to_gray(Red)
if np.ptp(Green.flatten())>1:
Green = mat_to_gray(Green)
if np.ptp(Blue.flatten())>1:
Blue = mat_to_gray(Blue)
Stack = np.dstack((Blue, Green, Red))
min_Stack = np.amin(Stack, axis=2)
max_Stack = np.amax(Stack, axis=2)
Int = (max_Stack + min_Stack)/2
Sat = np.copy(Blue)
Sat[Int==0] = 0
Sat[Int<=.5] = (max_Stack[Int<=.5] -
min_Stack[Int<=.5]) / (max_Stack[Int<=.5] +
min_Stack[Int<=.5])
Sat[Int>.5] = (max_Stack[Int>.5] -
min_Stack[Int>.5]) / ( 2 - max_Stack[Int>.5] +
min_Stack[Int>.5])
Hue = np.copy(Blue)
Hue[Blue==max_Stack] = (1/6) *(6
+ Green[Blue==max_Stack]
- Red[Blue==max_Stack])
Hue[Green==max_Stack] = (1/6) *(4
+ Red[Green==max_Stack]
- Blue[Green==max_Stack])
Hue[Red==max_Stack] = (1/6) *(2
+ Blue[Red==max_Stack]
- Green[Red==max_Stack])
return Hue, Sat, Int
def rgb2xyz(Red, Green, Blue, method='reinhardt'):
"""transform red, green, blue arrays to XYZ tristimulus values
Parameters
----------
Red : np.array, size=(m,n)
red band of satellite image
Green : np.array, size=(m,n)
green band of satellite image
Blue : np.array, size=(m,n)
blue band of satellite image
method :
'reinhardt'
XYZitu601-1 axis
'ford'
D65 illuminant
Returns
-------
X : np.array, size=(m,n)
Y : np.array, size=(m,n)
Z : np.array, size=(m,n)
See also
--------
rgb2hcv, rgb2ycbcr, rgb2hsi, rgb2yiq, rgb2lms, xyz2lms
Notes
-----
.. [1] <NAME> al. "Color transfer between images" IEEE Computer graphics
and applications vol.21(5) pp.34-41, 2001.
.. [2] <NAME>. "Color space conversion", pp. 1--31, 1998.
"""
if method=='ford':
M = np.array([(0.4124564, 0.3575761, 0.1804375),
(0.2126729, 0.7151522, 0.0721750),
(0.0193339, 0.1191920, 0.9503041)])
else:
M = np.array([(0.5141, 0.3239, 0.1604),
(0.2651, 0.6702, 0.0641),
(0.0241, 0.1228, 0.8444)])
RGB = np.dstack((Red, Green, Blue))
XYZ = np.einsum('ij,klj->kli', M, RGB)
X,Y,Z = XYZ[:,:,0], XYZ[:,:,1], XYZ[:,:,2]
return X, Y, Z
def xyz2lms(X, Y, Z):
"""transform XYZ tristimulus arrays to LMS values
Parameters
----------
X : np.array, size=(m,n)
modified XYZitu601-1 axis
Y : np.array, size=(m,n)
modified XYZitu601-1 axis
Z : np.array, size=(m,n)
modified XYZitu601-1 axis
Returns
-------
L : np.array, size=(m,n)
M : np.array, size=(m,n)
S : np.array, size=(m,n)
See also
--------
rgb2hcv, rgb2ycbcr, rgb2hsi, rgb2yiq, rgb2lms
Notes
-----
.. [1] <NAME> al. "Color transfer between images" IEEE Computer graphics
and applications vol.21(5) pp.34-41, 2001.
"""
N = np.array([(+0.3897, +0.6890, -0.0787),
(-0.2298, +1.1834, +0.0464),
(+0.0000, +0.0000, +0.0000)])
RGB = np.dstack((X, Y, Z))
LMS = np.einsum('ij,klj->kli', N, RGB)
L,M,S = LMS[:,:,0], LMS[:,:,1], LMS[:,:,2]
return L, M, S
def xyz2lab(X, Y, Z, th=0.008856):
"""transform XYZ tristimulus arrays to Lab values
Parameters
----------
X : np.array, size=(m,n)
Y : np.array, size=(m,n)
Z : np.array, size=(m,n)
Returns
-------
L : np.array, size=(m,n)
a : np.array, size=(m,n)
b : np.array, size=(m,n)
See also
--------
rgb2xyz, xyz2lms, lms2lch
Notes
-----
.. [1] Ford & Roberts. "Color space conversion", pp. 1--31, 1998.
.. [2] Silva et al. "Near real-time shadow detection and removal in aerial
motion imagery application" ISPRS journal of photogrammetry and remote
sensing, vol.140 pp.104--121, 2018.
"""
Xn,Yn,Zn = 95.047, 100.00, 108.883 # D65 illuminant
YYn = Y/Yn
L_1 = 116* YYn**(1/3.)
L_2 = 903.3 * YYn
L = L_1
L[YYn<=th] = L_2[YYn<=th]
def f(tau, th):
fx = X**(1/3.)
fx[X<=th] = 7.787*X[X<th] + 16/116
return fx
a = 500*( f(X/Xn, th) - f(Z/Zn, th) )
b = 200*( f(Y/Yn, th) - f(Z/Zn, th) )
return L, a, b
def lab2lch(L, a, b):
"""transform XYZ tristimulus arrays to Lab values
Parameters
----------
L : np.array, size=(m,n)
a : np.array, size=(m,n)
b : np.array, size=(m,n)
Returns
-------
C : np.array, size=(m,n)
h : np.array, size=(m,n)
See also
--------
rgb2xyz, xyz2lms, xyz2lab
Notes
-----
.. [1] Ford & Roberts. "Color space conversion", pp. 1--31, 1998.
.. [2] Silva et al. "Near real-time shadow detection and removal in aerial
motion imagery application" ISPRS journal of photogrammetry and remote
sensing, vol.140 pp.104--121, 2018.
"""
C = np.sqrt( a**2 + b**2)
# calculate angle, and let it range from 0...1
h = ((np.arctan2(b, a) + 2*np.pi)% 2*np.pi) / 2*np.pi
return C, h
def rgb2lms(Red, Green, Blue):
"""transform red, green, blue arrays to XYZ tristimulus values
Parameters
----------
Red : np.array, size=(m,n)
red band of satellite image
Green : np.array, size=(m,n)
green band of satellite image
Blue : np.array, size=(m,n)
blue band of satellite image
Returns
-------
L : np.array, size=(m,n)
M : np.array, size=(m,n)
S : np.array, size=(m,n)
See also
--------
rgb2hcv, rgb2ycbcr, rgb2hsi, rgb2yiq, rgb2xyz, xyz2lms
Notes
-----
.. [1] Reinhard et al. "Color transfer between images", 2001.
"""
I = np.array([(0.3811, 0.5783, 0.0402),
(0.1967, 0.7244, 0.0782),
(0.0241, 0.1228, 0.8444)])
RGB = np.dstack((Red, Green, Blue))
LMS = np.einsum('ij,klj->kli', I, RGB)
L,M,S = LMS[:,:,0], LMS[:,:,1], LMS[:,:,2]
return L, M, S
def lms2lab(L, M, S):
"""transform L, M, S arrays to lab color space
Parameters
----------
L : np.array, size=(m,n)
M : np.array, size=(m,n)
S : np.array, size=(m,n)
Returns
-------
l : np.array, size=(m,n)
a : np.array, size=(m,n)
b : np.array, size=(m,n)
See also
--------
rgb2hcv, rgb2ycbcr, rgb2hsi, rgb2yiq, rgb2xyz, xyz2lms
Notes
-----
.. [1] Reinhard et al. "Color transfer between images", 2001.
"""
I = np.matmul(np.array([(1/np.sqrt(3), 0, 0),
(0, 1/np.sqrt(6), 0),
(0, 0, 1/np.sqrt(2))]),
np.array([(+1, +1, +1),
(+1, +1, -2),
(+1, -1, +0)]))
LMS = np.dstack((L, M, S))
lab = np.einsum('ij,klj->kli', I, LMS)
l,a,b = lab[:,:,0], lab[:,:,1], lab[:,:,2]
return l, a, b
| [
"numpy.radians",
"numpy.dstack",
"numpy.ceil",
"numpy.copy",
"numpy.sqrt",
"numpy.amin",
"numpy.divide",
"numpy.zeros_like",
"numpy.array",
"numpy.linalg.inv",
"numpy.einsum",
"numpy.cos",
"numpy.arctan2",
"numpy.sin",
"numpy.amax",
"numpy.remainder"
] | [((2091, 2184), 'numpy.array', 'np.array', (['[(+0.299, +0.587, +0.114), (+0.596, -0.275, -0.321), (+0.212, -0.523, +0.311)]'], {}), '([(+0.299, +0.587, +0.114), (+0.596, -0.275, -0.321), (+0.212, -\n 0.523, +0.311)])\n', (2099, 2184), True, 'import numpy as np\n'), ((2227, 2256), 'numpy.dstack', 'np.dstack', (['(Red, Green, Blue)'], {}), '((Red, Green, Blue))\n', (2236, 2256), True, 'import numpy as np\n'), ((2267, 2299), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'L', 'RGB'], {}), "('ij,klj->kli', L, RGB)\n", (2276, 2299), True, 'import numpy as np\n'), ((2993, 3086), 'numpy.array', 'np.array', (['[(+0.299, +0.587, +0.114), (+0.596, -0.275, -0.321), (+0.212, -0.523, +0.311)]'], {}), '([(+0.299, +0.587, +0.114), (+0.596, -0.275, -0.321), (+0.212, -\n 0.523, +0.311)])\n', (3001, 3086), True, 'import numpy as np\n'), ((3129, 3145), 'numpy.linalg.inv', 'np.linalg.inv', (['L'], {}), '(L)\n', (3142, 3145), True, 'import numpy as np\n'), ((3156, 3176), 'numpy.dstack', 'np.dstack', (['(Y, I, Q)'], {}), '((Y, I, Q))\n', (3165, 3176), True, 'import numpy as np\n'), ((3187, 3222), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'Linv', 'YIQ'], {}), "('ij,klj->kli', Linv, YIQ)\n", (3196, 3222), True, 'import numpy as np\n'), ((4109, 4202), 'numpy.array', 'np.array', (['[(+0.257, +0.504, +0.098), (-0.148, -0.291, +0.439), (+0.439, -0.368, -0.071)]'], {}), '([(+0.257, +0.504, +0.098), (-0.148, -0.291, +0.439), (+0.439, -\n 0.368, -0.071)])\n', (4117, 4202), True, 'import numpy as np\n'), ((4283, 4312), 'numpy.dstack', 'np.dstack', (['(Red, Green, Blue)'], {}), '((Red, Green, Blue))\n', (4292, 4312), True, 'import numpy as np\n'), ((4323, 4355), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'L', 'RGB'], {}), "('ij,klj->kli', L, RGB)\n", (4332, 4355), True, 'import numpy as np\n'), ((5780, 5809), 'numpy.dstack', 'np.dstack', (['(Red, Green, Blue)'], {}), '((Red, Green, Blue))\n', (5789, 5809), True, 'import numpy as np\n'), ((5820, 5855), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'Tsai', 'RGB'], {}), "('ij,klj->kli', Tsai, RGB)\n", (5829, 5855), True, 'import numpy as np\n'), ((5887, 5933), 'numpy.sqrt', 'np.sqrt', (['(HSI[:, :, 1] ** 2 + HSI[:, :, 2] ** 2)'], {}), '(HSI[:, :, 1] ** 2 + HSI[:, :, 2] ** 2)\n', (5894, 5933), True, 'import numpy as np\n'), ((5991, 6011), 'numpy.remainder', 'np.remainder', (['Hue', '(1)'], {}), '(Hue, 1)\n', (6003, 6011), True, 'import numpy as np\n'), ((6205, 6221), 'numpy.ceil', 'np.ceil', (['(Hue / 3)'], {}), '(Hue / 3)\n', (6212, 6221), True, 'import numpy as np\n'), ((6339, 6365), 'numpy.divide', 'np.divide', (['(1 - Sat[Sel])', '(3)'], {}), '(1 - Sat[Sel], 3)\n', (6348, 6365), True, 'import numpy as np\n'), ((6381, 6416), 'numpy.divide', 'np.divide', (['(Int[Sel] + Color[Sel])', '(3)'], {}), '(Int[Sel] + Color[Sel], 3)\n', (6390, 6416), True, 'import numpy as np\n'), ((6519, 6545), 'numpy.divide', 'np.divide', (['(1 - Sat[Sel])', '(3)'], {}), '(1 - Sat[Sel], 3)\n', (6528, 6545), True, 'import numpy as np\n'), ((6563, 6598), 'numpy.divide', 'np.divide', (['(Int[Sel] + Color[Sel])', '(3)'], {}), '(Int[Sel] + Color[Sel], 3)\n', (6572, 6598), True, 'import numpy as np\n'), ((6701, 6727), 'numpy.divide', 'np.divide', (['(1 - Sat[Sel])', '(3)'], {}), '(1 - Sat[Sel], 3)\n', (6710, 6727), True, 'import numpy as np\n'), ((6744, 6779), 'numpy.divide', 'np.divide', (['(Int[Sel] + Color[Sel])', '(3)'], {}), '(Int[Sel] + Color[Sel], 3)\n', (6753, 6779), True, 'import numpy as np\n'), ((7712, 7741), 'numpy.dstack', 'np.dstack', (['(Blue, Green, Red)'], {}), '((Blue, Green, Red))\n', (7721, 7741), True, 'import numpy as np\n'), ((7758, 7780), 'numpy.amin', 'np.amin', (['Stack'], {'axis': '(2)'}), '(Stack, axis=2)\n', (7765, 7780), True, 'import numpy as np\n'), ((7797, 7819), 'numpy.amax', 'np.amax', (['Stack'], {'axis': '(2)'}), '(Stack, axis=2)\n', (7804, 7819), True, 'import numpy as np\n'), ((7867, 7880), 'numpy.copy', 'np.copy', (['Blue'], {}), '(Blue)\n', (7874, 7880), True, 'import numpy as np\n'), ((8245, 8258), 'numpy.copy', 'np.copy', (['Blue'], {}), '(Blue)\n', (8252, 8258), True, 'import numpy as np\n'), ((9996, 10025), 'numpy.dstack', 'np.dstack', (['(Red, Green, Blue)'], {}), '((Red, Green, Blue))\n', (10005, 10025), True, 'import numpy as np\n'), ((10036, 10068), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'M', 'RGB'], {}), "('ij,klj->kli', M, RGB)\n", (10045, 10068), True, 'import numpy as np\n'), ((10785, 10877), 'numpy.array', 'np.array', (['[(+0.3897, +0.689, -0.0787), (-0.2298, +1.1834, +0.0464), (+0.0, +0.0, +0.0)]'], {}), '([(+0.3897, +0.689, -0.0787), (-0.2298, +1.1834, +0.0464), (+0.0, +\n 0.0, +0.0)])\n', (10793, 10877), True, 'import numpy as np\n'), ((10930, 10950), 'numpy.dstack', 'np.dstack', (['(X, Y, Z)'], {}), '((X, Y, Z))\n', (10939, 10950), True, 'import numpy as np\n'), ((10961, 10993), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'N', 'RGB'], {}), "('ij,klj->kli', N, RGB)\n", (10970, 10993), True, 'import numpy as np\n'), ((12752, 12776), 'numpy.sqrt', 'np.sqrt', (['(a ** 2 + b ** 2)'], {}), '(a ** 2 + b ** 2)\n', (12759, 12776), True, 'import numpy as np\n'), ((13538, 13631), 'numpy.array', 'np.array', (['[(0.3811, 0.5783, 0.0402), (0.1967, 0.7244, 0.0782), (0.0241, 0.1228, 0.8444)]'], {}), '([(0.3811, 0.5783, 0.0402), (0.1967, 0.7244, 0.0782), (0.0241, \n 0.1228, 0.8444)])\n', (13546, 13631), True, 'import numpy as np\n'), ((13674, 13703), 'numpy.dstack', 'np.dstack', (['(Red, Green, Blue)'], {}), '((Red, Green, Blue))\n', (13683, 13703), True, 'import numpy as np\n'), ((13714, 13746), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'I', 'RGB'], {}), "('ij,klj->kli', I, RGB)\n", (13723, 13746), True, 'import numpy as np\n'), ((14589, 14609), 'numpy.dstack', 'np.dstack', (['(L, M, S)'], {}), '((L, M, S))\n', (14598, 14609), True, 'import numpy as np\n'), ((14620, 14652), 'numpy.einsum', 'np.einsum', (['"""ij,klj->kli"""', 'I', 'LMS'], {}), "('ij,klj->kli', I, LMS)\n", (14629, 14652), True, 'import numpy as np\n'), ((1137, 1160), 'numpy.dstack', 'np.dstack', (['(Red, Green)'], {}), '((Red, Green))\n', (1146, 1160), True, 'import numpy as np\n'), ((1305, 1314), 'numpy.cos', 'np.cos', (['H'], {}), '(H)\n', (1311, 1314), True, 'import numpy as np\n'), ((4242, 4266), 'numpy.array', 'np.array', (['[16, 128, 128]'], {}), '([16, 128, 128])\n', (4250, 4266), True, 'import numpy as np\n'), ((5940, 5978), 'numpy.arctan2', 'np.arctan2', (['HSI[:, :, 1]', 'HSI[:, :, 2]'], {}), '(HSI[:, :, 1], HSI[:, :, 2])\n', (5950, 5978), True, 'import numpy as np\n'), ((6134, 6152), 'numpy.zeros_like', 'np.zeros_like', (['Hue'], {}), '(Hue)\n', (6147, 6152), True, 'import numpy as np\n'), ((6154, 6172), 'numpy.zeros_like', 'np.zeros_like', (['Hue'], {}), '(Hue)\n', (6167, 6172), True, 'import numpy as np\n'), ((6174, 6192), 'numpy.zeros_like', 'np.zeros_like', (['Hue'], {}), '(Hue)\n', (6187, 6192), True, 'import numpy as np\n'), ((9670, 9788), 'numpy.array', 'np.array', (['[(0.4124564, 0.3575761, 0.1804375), (0.2126729, 0.7151522, 0.072175), (\n 0.0193339, 0.119192, 0.9503041)]'], {}), '([(0.4124564, 0.3575761, 0.1804375), (0.2126729, 0.7151522, \n 0.072175), (0.0193339, 0.119192, 0.9503041)])\n', (9678, 9788), True, 'import numpy as np\n'), ((9852, 9945), 'numpy.array', 'np.array', (['[(0.5141, 0.3239, 0.1604), (0.2651, 0.6702, 0.0641), (0.0241, 0.1228, 0.8444)]'], {}), '([(0.5141, 0.3239, 0.1604), (0.2651, 0.6702, 0.0641), (0.0241, \n 0.1228, 0.8444)])\n', (9860, 9945), True, 'import numpy as np\n'), ((14468, 14520), 'numpy.array', 'np.array', (['[(+1, +1, +1), (+1, +1, -2), (+1, -1, +0)]'], {}), '([(+1, +1, +1), (+1, +1, -2), (+1, -1, +0)])\n', (14476, 14520), True, 'import numpy as np\n'), ((1225, 1235), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (1232, 1235), True, 'import numpy as np\n'), ((1261, 1270), 'numpy.cos', 'np.cos', (['H'], {}), '(H)\n', (1267, 1270), True, 'import numpy as np\n'), ((1345, 1355), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (1352, 1355), True, 'import numpy as np\n'), ((1356, 1365), 'numpy.sin', 'np.sin', (['H'], {}), '(H)\n', (1362, 1365), True, 'import numpy as np\n'), ((5737, 5747), 'numpy.sqrt', 'np.sqrt', (['(6)'], {}), '(6)\n', (5744, 5747), True, 'import numpy as np\n'), ((6264, 6278), 'numpy.radians', 'np.radians', (['(60)'], {}), '(60)\n', (6274, 6278), True, 'import numpy as np\n'), ((5668, 5678), 'numpy.sqrt', 'np.sqrt', (['(6)'], {}), '(6)\n', (5675, 5678), True, 'import numpy as np\n'), ((5683, 5693), 'numpy.sqrt', 'np.sqrt', (['(6)'], {}), '(6)\n', (5690, 5693), True, 'import numpy as np\n'), ((5698, 5708), 'numpy.sqrt', 'np.sqrt', (['(6)'], {}), '(6)\n', (5705, 5708), True, 'import numpy as np\n'), ((5752, 5762), 'numpy.sqrt', 'np.sqrt', (['(6)'], {}), '(6)\n', (5759, 5762), True, 'import numpy as np\n'), ((12836, 12852), 'numpy.arctan2', 'np.arctan2', (['b', 'a'], {}), '(b, a)\n', (12846, 12852), True, 'import numpy as np\n'), ((14329, 14339), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (14336, 14339), True, 'import numpy as np\n'), ((14382, 14392), 'numpy.sqrt', 'np.sqrt', (['(6)'], {}), '(6)\n', (14389, 14392), True, 'import numpy as np\n'), ((14435, 14445), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (14442, 14445), True, 'import numpy as np\n')] |
import pandas as pd
X_train = pd.read_csv("X_train.csv")
df_y = pd.read_csv("y_train.csv")
y_train = df_y["y"]
X_test = pd.read_csv("X_test.csv")
| [
"pandas.read_csv"
] | [((31, 57), 'pandas.read_csv', 'pd.read_csv', (['"""X_train.csv"""'], {}), "('X_train.csv')\n", (42, 57), True, 'import pandas as pd\n'), ((65, 91), 'pandas.read_csv', 'pd.read_csv', (['"""y_train.csv"""'], {}), "('y_train.csv')\n", (76, 91), True, 'import pandas as pd\n'), ((122, 147), 'pandas.read_csv', 'pd.read_csv', (['"""X_test.csv"""'], {}), "('X_test.csv')\n", (133, 147), True, 'import pandas as pd\n')] |
import logging
from flask import render_template, request, redirect, session, url_for, flash
from flask.ext.restful import abort
from flask_login import current_user, login_required
from redash import models, settings
from redash.wsgi import app
from redash.utils import json_dumps
@app.route('/embed/query/<query_id>/visualization/<visualization_id>', methods=['GET'])
@login_required
def embed(query_id, visualization_id):
query = models.Query.get_by_id(query_id)
vis = query.visualizations.where(models.Visualization.id == visualization_id).first()
qr = {}
if vis is not None:
vis = vis.to_dict()
qr = query.latest_query_data
if qr is None:
abort(400, message="No Results for this query")
else:
qr = qr.to_dict()
else:
abort(404, message="Visualization not found.")
client_config = {}
client_config.update(settings.COMMON_CLIENT_CONFIG)
return render_template("embed.html",
name=settings.NAME,
client_config=json_dumps(client_config),
visualization=json_dumps(vis),
query_result=json_dumps(qr),
analytics=settings.ANALYTICS)
| [
"redash.utils.json_dumps",
"redash.wsgi.app.route",
"redash.models.Query.get_by_id",
"flask.ext.restful.abort"
] | [((287, 377), 'redash.wsgi.app.route', 'app.route', (['"""/embed/query/<query_id>/visualization/<visualization_id>"""'], {'methods': "['GET']"}), "('/embed/query/<query_id>/visualization/<visualization_id>',\n methods=['GET'])\n", (296, 377), False, 'from redash.wsgi import app\n'), ((442, 474), 'redash.models.Query.get_by_id', 'models.Query.get_by_id', (['query_id'], {}), '(query_id)\n', (464, 474), False, 'from redash import models, settings\n'), ((811, 857), 'flask.ext.restful.abort', 'abort', (['(404)'], {'message': '"""Visualization not found."""'}), "(404, message='Visualization not found.')\n", (816, 857), False, 'from flask.ext.restful import abort\n'), ((701, 748), 'flask.ext.restful.abort', 'abort', (['(400)'], {'message': '"""No Results for this query"""'}), "(400, message='No Results for this query')\n", (706, 748), False, 'from flask.ext.restful import abort\n'), ((1068, 1093), 'redash.utils.json_dumps', 'json_dumps', (['client_config'], {}), '(client_config)\n', (1078, 1093), False, 'from redash.utils import json_dumps\n'), ((1136, 1151), 'redash.utils.json_dumps', 'json_dumps', (['vis'], {}), '(vis)\n', (1146, 1151), False, 'from redash.utils import json_dumps\n'), ((1193, 1207), 'redash.utils.json_dumps', 'json_dumps', (['qr'], {}), '(qr)\n', (1203, 1207), False, 'from redash.utils import json_dumps\n')] |
from unittest import TestCase
import os
class TestSet_up_logger(TestCase):
def test_set_up_logger(self):
from utils import set_up_logger
from logging import Logger
logger = set_up_logger("test", "test.log")
self.assertIsInstance(logger, Logger)
os.remove("test.log")
| [
"utils.set_up_logger",
"os.remove"
] | [((205, 238), 'utils.set_up_logger', 'set_up_logger', (['"""test"""', '"""test.log"""'], {}), "('test', 'test.log')\n", (218, 238), False, 'from utils import set_up_logger\n'), ((293, 314), 'os.remove', 'os.remove', (['"""test.log"""'], {}), "('test.log')\n", (302, 314), False, 'import os\n')] |
# the comments in this file were made while learning, as reminders
# to RUN APP IN CMD PROMPT: cd to this directory, or place in default CMD directory:
# then run 'python rubicon_reminders_cli.py'
from os import listdir
from datetime import datetime
# this assigns dt variable as date + timestamp
dt = (datetime.now())
# TODO numerate note items per entry
# open existing or create a new file prompt:
p1 = input("(V)iew or (N)ew [V/N]: ").upper()
if p1 == "V":
# this views file directory of existing notes if first input is (view)
for file in listdir():
if file.endswith(".txt"):
print(file)
# below opens existing file, allows multiple note lines, and dates it when finished with session.
old_file = (input("which file would you like to open: "))
hdl = open(old_file + ".txt", "r+") # using r+ by default places text at beginning, overwriting.
for line in hdl: # as long as you first READ the file, then r+ becomes APPEND TO END.
print(line.strip())
of_note = input("Add Note: ")
if of_note == "done": # FIXME add accept on any 'done' check for upper and lowercase
# specifies notes were reviewed if first note entry is "done"
hdl.write(" REVIEWED: ")
hdl.write(str(dt))
# if first entered note is not 'done', continue asking for more notes until entry == 'done'
else:
hdl.write('\n')
hdl.write(of_note)
hdl.write('\n')
while of_note != "done":
of_note = input("Add more notes: ")
while of_note != "done":
hdl.write(of_note)
hdl.write('\n')
else:
hdl.write("SESSION END: ")
hdl.write(str(dt))
hdl.write('\n')
hdl.close()
# below is the block for generating and noting in a new file, if line 15 == 'N'
elif p1 == "N":
new_file = input("new file name: ")
hdl = open(new_file, "a")
nf_note = input("Add Note: ")
if nf_note == "done":
print("finished")
else:
hdl.write(nf_note)
hdl.write('\n')
while nf_note != "done":
nf_note = input("Add more notes: ")
while nf_note != "done":
hdl.write(nf_note)
hdl.write('\n')
break
else:
hdl.write("SESSION END: ")
hdl.write(str(dt))
hdl.write('\n')
hdl.close()
else:
print("Error: please enter V or N")
| [
"datetime.datetime.now",
"os.listdir"
] | [((306, 320), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (318, 320), False, 'from datetime import datetime\n'), ((557, 566), 'os.listdir', 'listdir', ([], {}), '()\n', (564, 566), False, 'from os import listdir\n')] |
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import time
import weakref
from PySide import QtGui
from mcedit2.widgets.layout import Column
log = logging.getLogger(__name__)
class InfoPanel(QtGui.QWidget):
def __init__(self, attrs, signals, **kwargs):
"""
Create a widget that displays a list of an object's selected attributes, named in `attrs`.
The widget updates itself whenever one of the object's signals named in `signals` is emitted.
If an attribute named in `attrs` is not found on `object`, the InfoPanel instance is checked for
an attribute of the same name and it is used instead if found.
:type attrs: list of attribute names to display
:type signals: list of signals to monitor
:param kwargs: args for QWidget
:type kwargs:
"""
QtGui.QWidget.__init__(self, **kwargs)
self.attrs = attrs
self.signals = signals
self.lastUpdate = time.time()
self.labels = [QtGui.QLabel() for _ in attrs]
self.setLayout(Column(*self.labels))
def updateLabels(self):
now = time.time()
if now < self.lastUpdate + 0.25:
return
self.lastUpdate = now
if self.object:
for attr, label in zip(self.attrs, self.labels):
try:
value = getattr(self.object, attr)
except AttributeError: # catches unrelated AttributeErrors in property getters...
try:
value = getattr(self, attr)
except AttributeError:
log.exception("Error updating info panel.")
value = getattr(self, attr, "Attribute not found")
label.setText("%s: %s" % (attr, value))
_object = None
@property
def object(self):
return self._object()
@object.setter
def object(self, value):
self._object = weakref.ref(value)
self.updateLabels()
for signal in self.signals:
signal = getattr(self.object, signal, None)
if signal:
signal.connect(self.updateLabels)
setObject = object.setter
| [
"logging.getLogger",
"mcedit2.widgets.layout.Column",
"time.time",
"PySide.QtGui.QLabel",
"PySide.QtGui.QWidget.__init__",
"weakref.ref"
] | [((222, 249), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (239, 249), False, 'import logging\n'), ((912, 950), 'PySide.QtGui.QWidget.__init__', 'QtGui.QWidget.__init__', (['self'], {}), '(self, **kwargs)\n', (934, 950), False, 'from PySide import QtGui\n'), ((1035, 1046), 'time.time', 'time.time', ([], {}), '()\n', (1044, 1046), False, 'import time\n'), ((1190, 1201), 'time.time', 'time.time', ([], {}), '()\n', (1199, 1201), False, 'import time\n'), ((2029, 2047), 'weakref.ref', 'weakref.ref', (['value'], {}), '(value)\n', (2040, 2047), False, 'import weakref\n'), ((1070, 1084), 'PySide.QtGui.QLabel', 'QtGui.QLabel', ([], {}), '()\n', (1082, 1084), False, 'from PySide import QtGui\n'), ((1125, 1145), 'mcedit2.widgets.layout.Column', 'Column', (['*self.labels'], {}), '(*self.labels)\n', (1131, 1145), False, 'from mcedit2.widgets.layout import Column\n')] |