code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import sys
from PyQt5 import QtCore, QtGui, QtWidgets, uic
import controller
from datetime import date, datetime, timezone
qt_tela_inicial = "telas/gerenciar_meta.ui"
Ui_MainWindow, QtBaseClass = uic.loadUiType(qt_tela_inicial)
class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
switch_tela_gerenciar_meta = QtCore.pyqtSignal()
def __init__(self, id_meta):
QtWidgets.QMainWindow.__init__(self)
Ui_MainWindow.__init__(self)
self.setupUi(self)
self.meta_atual = controller.select_meta_completa(id_meta)
self.carregar_dados_meta(id_meta)
data_hoje = str(date.today())
data_hoje = QtCore.QDate.fromString(data_hoje, 'yyyy-MM-dd')
self.date_dt_inicial.setDate(data_hoje)
self.date_dt_final.setDate(data_hoje)
self.btn_cadastrar.pressed.connect(self.cadastrar)
self.btn_limpar.pressed.connect(self.limpar)
self.btn_sair.pressed.connect(self.sair)
def carregar_dados_meta(self, id_meta):
dt_inicio, verbo, quantidade, unidade, dt_final, periodo, dias_semana = controller.listar_meta_para_editar(id_meta)
self.date_dt_inicial.setDate(dt_inicio)
self.txt_verbo.setText(verbo)
self.double_quantidade.setValue(quantidade)
self.txt_unidade.setText(unidade)
self.date_dt_final.setDate(dt_final)
self.txt_periodo.setText(periodo)
if(0 in dias_semana):
self.check_segunda.setCheckState(2)
if(1 in dias_semana):
self.check_terca.setCheckState(2)
if(2 in dias_semana):
self.check_quarta.setCheckState(2)
if(3 in dias_semana):
self.check_quinta.setCheckState(2)
if(4 in dias_semana):
self.check_sexta.setCheckState(2)
if(5 in dias_semana):
self.check_sabado.setCheckState(2)
if(6 in dias_semana):
self.check_domingo.setCheckState(2)
def editar(self):
dt_inicio = self.date_dt_inicial.date()
dt_inicio = datetime.strptime('{}/{}/{}'.format(dt_inicio.day(), dt_inicio.month(), dt_inicio.year()), '%d/%m/%Y')
verbo = self.txt_verbo.text()
quantidade = self.double_quantidade.value()
unidade = self.txt_unidade.text()
dt_limite = self.date_dt_final.date()
dt_limite = datetime.strptime('{}/{}/{}'.format(dt_limite.day(), dt_limite.month(), dt_limite.year()), '%d/%m/%Y')
if(self.validar_entradas(dt_inicio, verbo, quantidade, unidade, dt_limite)):
periodo = self.combo_periodo.currentText()
if('semanal' in periodo):
periodo = 'semana'
controller.editar_meta_to_divisao(self.meta_atual, self.meta_atual['codigo'], dt_inicio, verbo, quantidade, unidade, dt_limite, periodo)
else:
periodo = 'dia'
dias_da_semana = []
if(self.check_segunda.isChecked()): dias_da_semana.append(0)
if(self.check_terca.isChecked()): dias_da_semana.append(1)
if(self.check_quarta.isChecked()): dias_da_semana.append(2)
if(self.check_quinta.isChecked()): dias_da_semana.append(3)
if(self.check_sexta.isChecked()): dias_da_semana.append(4)
if(self.check_sabado.isChecked()): dias_da_semana.append(5)
if(self.check_domingo.isChecked()): dias_da_semana.append(6)
controller.editar_meta_to_divisao(self.meta_atual, self.meta_atual['codigo'], dt_inicio, verbo, quantidade, unidade, dt_limite, periodo, dias_da_semana)
else:
#POPUP AVISANDO QUE TEM ALGO DE ERRADO
print('Todos os campos devem ser preenchidos')
pass
def validar_entradas(self, dt_inicio, verbo, quantidade, unidade, dt_limite):
#VALIDAR SE AS DATAS SAO RAZOAVEIS
dt_inicio = date.toordinal(dt_inicio)
dt_limite = date.toordinal(dt_limite)
if(dt_limite <= dt_inicio):
return False
if((not verbo) or (not unidade) or (quantidade <= 0)):
return False
#PEGAR O PERIODO
periodo = self.combo_periodo.currentText()
#SE O PERIODO FOR dia, CHECAR SE PELO MENOS UM DIA ESTA ESCOLHIDO
if('diaria' in periodo):
if(not (
self.check_segunda.isChecked() or
self.check_terca.isChecked() or
self.check_quarta.isChecked() or
self.check_quinta.isChecked() or
self.check_sexta.isChecked() or
self.check_sabado.isChecked() or
self.check_domingo.isChecked()
)):
return False
return True
def sair(self):
self.close() | [
"PyQt5.QtCore.pyqtSignal",
"PyQt5.QtCore.QDate.fromString",
"PyQt5.QtWidgets.QMainWindow.__init__",
"PyQt5.uic.loadUiType",
"datetime.date.today",
"controller.editar_meta_to_divisao",
"controller.listar_meta_para_editar",
"datetime.date.toordinal",
"controller.select_meta_completa"
] | [((197, 228), 'PyQt5.uic.loadUiType', 'uic.loadUiType', (['qt_tela_inicial'], {}), '(qt_tela_inicial)\n', (211, 228), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((321, 340), 'PyQt5.QtCore.pyqtSignal', 'QtCore.pyqtSignal', ([], {}), '()\n', (338, 340), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((383, 419), 'PyQt5.QtWidgets.QMainWindow.__init__', 'QtWidgets.QMainWindow.__init__', (['self'], {}), '(self)\n', (413, 419), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((511, 551), 'controller.select_meta_completa', 'controller.select_meta_completa', (['id_meta'], {}), '(id_meta)\n', (542, 551), False, 'import controller\n'), ((662, 710), 'PyQt5.QtCore.QDate.fromString', 'QtCore.QDate.fromString', (['data_hoje', '"""yyyy-MM-dd"""'], {}), "(data_hoje, 'yyyy-MM-dd')\n", (685, 710), False, 'from PyQt5 import QtCore, QtGui, QtWidgets, uic\n'), ((1096, 1139), 'controller.listar_meta_para_editar', 'controller.listar_meta_para_editar', (['id_meta'], {}), '(id_meta)\n', (1130, 1139), False, 'import controller\n'), ((3909, 3934), 'datetime.date.toordinal', 'date.toordinal', (['dt_inicio'], {}), '(dt_inicio)\n', (3923, 3934), False, 'from datetime import date, datetime, timezone\n'), ((3955, 3980), 'datetime.date.toordinal', 'date.toordinal', (['dt_limite'], {}), '(dt_limite)\n', (3969, 3980), False, 'from datetime import date, datetime, timezone\n'), ((628, 640), 'datetime.date.today', 'date.today', ([], {}), '()\n', (638, 640), False, 'from datetime import date, datetime, timezone\n'), ((2697, 2838), 'controller.editar_meta_to_divisao', 'controller.editar_meta_to_divisao', (['self.meta_atual', "self.meta_atual['codigo']", 'dt_inicio', 'verbo', 'quantidade', 'unidade', 'dt_limite', 'periodo'], {}), "(self.meta_atual, self.meta_atual['codigo'\n ], dt_inicio, verbo, quantidade, unidade, dt_limite, periodo)\n", (2730, 2838), False, 'import controller\n'), ((3469, 3630), 'controller.editar_meta_to_divisao', 'controller.editar_meta_to_divisao', (['self.meta_atual', "self.meta_atual['codigo']", 'dt_inicio', 'verbo', 'quantidade', 'unidade', 'dt_limite', 'periodo', 'dias_da_semana'], {}), "(self.meta_atual, self.meta_atual['codigo'\n ], dt_inicio, verbo, quantidade, unidade, dt_limite, periodo,\n dias_da_semana)\n", (3502, 3630), False, 'import controller\n')] |
import unittest
from unittest.mock import call, patch
from nestor_api.errors.config.aggregated_configuration_error import AggregatedConfigurationError
from nestor_api.errors.config.app_configuration_not_found_error import AppConfigurationNotFoundError
import nestor_api.lib.config as config
@patch("nestor_api.lib.config.io", autospec=True)
class TestConfigLibrary(unittest.TestCase):
def test_change_environment(self, io_mock):
config.change_environment("environment", "path/to/config")
io_mock.execute.assert_has_calls(
[
call("git stash", "path/to/config"),
call("git fetch origin", "path/to/config"),
call("git checkout environment", "path/to/config"),
call("git reset --hard origin/environment", "path/to/config"),
]
)
@patch("nestor_api.lib.config.Configuration", autospec=True)
def test_create_temporary_config_copy(self, configuration_mock, io_mock):
io_mock.create_temporary_copy.return_value = "/temporary/path"
configuration_mock.get_config_path.return_value = "tests/__fixtures__/config"
path = config.create_temporary_config_copy()
io_mock.create_temporary_copy.assert_called_once_with("tests/__fixtures__/config", "config")
self.assertEqual(path, "/temporary/path")
@patch("yaml_lib.read_yaml", autospec=True)
@patch("nestor_api.lib.config.get_project_config", autospec=True)
@patch("nestor_api.lib.config.Configuration", autospec=True)
def test_get_app_config(
self, configuration_mock, get_project_config_mock, read_yaml_mock, io_mock
):
# Mocks
configuration_mock.get_config_app_folder.return_value = "apps"
io_mock.exists.return_value = True
read_yaml_mock.return_value = {
"sub_domain": "backoffice",
"variables": {
"ope": {"VARIABLE_OPE_2": "ope_2_override", "VARIABLE_OPE_3": "ope_3"},
"app": {"VARIABLE_APP_2": "app_2_override", "VARIABLE_APP_3": "app_3"},
},
}
get_project_config_mock.return_value = {
"domain": "website.com",
"variables": {
"ope": {"VARIABLE_OPE_1": "ope_1", "VARIABLE_OPE_2": "ope_2"},
"app": {"VARIABLE_APP_1": "app_1", "VARIABLE_APP_2": "app_2"},
},
}
# Test
app_config = config.get_app_config("backoffice", "tests/__fixtures__/config")
# Assertions
io_mock.exists.assert_called_once_with("tests/__fixtures__/config/apps/backoffice.yaml")
read_yaml_mock.assert_called_once_with("tests/__fixtures__/config/apps/backoffice.yaml")
get_project_config_mock.assert_called_once()
self.assertEqual(
app_config,
{
"domain": "website.com",
"sub_domain": "backoffice",
"variables": {
"ope": {
"VARIABLE_OPE_1": "ope_1",
"VARIABLE_OPE_2": "ope_2_override",
"VARIABLE_OPE_3": "ope_3",
},
"app": {
"VARIABLE_APP_1": "app_1",
"VARIABLE_APP_2": "app_2_override",
"VARIABLE_APP_3": "app_3",
},
},
},
)
@patch("nestor_api.lib.config.Configuration", autospec=True)
def test_get_app_config_when_not_found(self, configuration_mock, io_mock):
io_mock.exists.return_value = False
configuration_mock.get_config_app_folder.return_value = "apps"
with self.assertRaises(AppConfigurationNotFoundError) as context:
config.get_app_config("some-app", "/some/path")
self.assertEqual("Configuration file not found for app: some-app", str(context.exception))
def test_get_processes(self, _io_mock):
app_config = {
"name": "my-app",
"processes": [
{"name": "web", "start_command": "./web", "is_cronjob": False,},
{"name": "cleaner", "start_command": "./clean", "is_cronjob": True,},
{"name": "worker", "start_command": "./worker", "is_cronjob": False,},
],
}
applications = config.get_processes(app_config)
self.assertEqual(
applications,
[
{"name": "web", "start_command": "./web", "is_cronjob": False,},
{"name": "worker", "start_command": "./worker", "is_cronjob": False,},
],
)
def test_get_cronjobs(self, _io_mock):
app_config = {
"name": "my-app",
"processes": [
{"name": "web", "start_command": "./web", "is_cronjob": False,},
{"name": "cleaner", "start_command": "./clean", "is_cronjob": True,},
{"name": "worker", "start_command": "./worker", "is_cronjob": False,},
],
}
cronjobs = config.get_cronjobs(app_config)
self.assertEqual(
cronjobs, [{"name": "cleaner", "start_command": "./clean", "is_cronjob": True,}]
)
def test_get_deployments(self, _io_mock):
project_config = {
"project": "my-project",
"spec": {"spec_1": "default_spec_1", "spec_2": "default_spec_2",},
"deployments": [
{"cluster": "cluster_1", "spec": {"spec_1": "spec_deployment_1"}},
{"cluster": "cluster_2", "spec": {"spec_2": "spec_deployment_2"}},
],
}
deployments = config.get_deployments(project_config)
self.assertEqual(
deployments,
[
{
"cluster": "cluster_1",
"project": "my-project",
"spec": {"spec_1": "spec_deployment_1", "spec_2": "default_spec_2"},
},
{
"cluster": "cluster_2",
"project": "my-project",
"spec": {"spec_1": "default_spec_1", "spec_2": "spec_deployment_2"},
},
],
)
@patch("yaml_lib.read_yaml", autospec=True)
@patch("nestor_api.lib.config.Configuration", autospec=True)
def test_get_project_config(self, configuration_mock, read_yaml_mock, io_mock):
# Mocks
configuration_mock.get_config_project_filename.return_value = "project.yaml"
io_mock.exists.return_value = True
read_yaml_mock.return_value = {
"domain": "website.com",
"variables": {
"ope": {"VARIABLE_OPE_1": "ope_1", "VARIABLE_OPE_2": "ope_2"},
"app": {"VARIABLE_APP_1": "app_1", "VARIABLE_APP_2": "app_2"},
},
}
# Test
environment_config = config.get_project_config("tests/__fixtures__/config")
# Assertions
io_mock.exists.assert_called_once_with("tests/__fixtures__/config/project.yaml")
read_yaml_mock.assert_called_once_with("tests/__fixtures__/config/project.yaml")
self.assertEqual(
environment_config,
{
"domain": "website.com",
"variables": {
"ope": {"VARIABLE_OPE_1": "ope_1", "VARIABLE_OPE_2": "ope_2"},
"app": {"VARIABLE_APP_1": "app_1", "VARIABLE_APP_2": "app_2"},
},
},
)
@patch("nestor_api.lib.config.Configuration", autospec=True)
def test_get_project_config_when_not_found(self, configuration_mock, io_mock):
io_mock.exists.return_value = False
configuration_mock.get_config_project_filename.return_value = "project.yaml"
with self.assertRaises(FileNotFoundError) as context:
config.get_project_config("/some/path")
self.assertEqual(
"[Errno 2] No such file or directory: '/some/path/project.yaml'", str(context.exception)
)
@patch.dict("nestor_api.lib.config.os.environ", {"VALUE_IN_ENV": "value_in_env"})
def test_resolve_variables_deep(self, _io_mock):
result = config._resolve_variables_deep(
{
"A": "value_a",
"B": "value_b",
"C": {
"C1": "__{{A}}__",
"C2": "__{{key_not_present}}__",
"C3": "__{{A}}__{{B}}__{{key_not_present}}__",
"C4": "A",
"C5": "{{C1}}__{{key-with.special_characters}}",
},
"D": [
"value_d1",
"__{{A}}__",
"__{{key_not_present}}__",
"__{{A}}__{{B}}__{{key_not_present}}__",
],
"E": [{"E1": {"E11": "deep__{{A}}__"}}],
"F": 42,
"key-with.special_characters": "amazing-value",
"env_var": "$VALUE_IN_ENV",
"env_var_missing": "$VALUE_NOT_IN_ENV",
}
)
self.assertEqual(
result,
{
"A": "value_a",
"B": "value_b",
"C": {
"C1": "__value_a__",
"C2": "__{{key_not_present}}__",
"C3": "__value_a__value_b__{{key_not_present}}__",
"C4": "A",
"C5": "{{C1}}__amazing-value",
},
"D": [
"value_d1",
"__value_a__",
"__{{key_not_present}}__",
"__value_a__value_b__{{key_not_present}}__",
],
"E": [{"E1": {"E11": "deep__value_a__"}}],
"F": 42,
"key-with.special_characters": "amazing-value",
"env_var": "value_in_env",
"env_var_missing": "$VALUE_NOT_IN_ENV",
},
)
def test_resolve_variables_deep_with_invalid_reference(self, _io_mock):
with self.assertRaises(AggregatedConfigurationError) as context:
config._resolve_variables_deep(
{
"error": {},
"simple_key": "__{{error}}__",
"array": ["0", "1", "2__{{error}}__",],
"dict": {"a": "val_a", "b": "{{error}}",},
"deep_dict": {
"sub_dict": {"a": "val_a", "b": "{{error}}"},
"sub_array": [
{"a": "{{error}}", "b": "val_b"},
{"a": "val_a", "b": "{{error}}"},
],
},
}
)
err = context.exception
self.assertEqual(
err,
{
"value": "Invalid configuration",
"errors": [
{
"path": "CONFIG.simple_key",
"message": "Referenced variable should resolved to a string",
},
{
"path": "CONFIG.array[2]",
"message": "Referenced variable should resolved to a string",
},
{
"path": "CONFIG.dict.b",
"message": "Referenced variable should resolved to a string",
},
{
"path": "CONFIG.deep_dict.sub_dict.b",
"message": "Referenced variable should resolved to a string",
},
{
"path": "CONFIG.deep_dict.sub_array[0].a",
"message": "Referenced variable should resolved to a string",
},
{
"path": "CONFIG.deep_dict.sub_array[1].b",
"message": "Referenced variable should resolved to a string",
},
],
},
)
@patch("nestor_api.lib.config.os.path.isdir", autospec=True)
@patch("nestor_api.lib.config.os.listdir", autospec=True)
@patch("nestor_api.lib.config.get_app_config", autospec=True)
def test_list_apps_config(self, get_app_config_mock, listdir_mock, isdir_mock, _io_mock):
"""Should return a dictionary of apps config."""
isdir_mock.return_value = True
listdir_mock.return_value = [
"path/to/app-1.yml",
"path/to/app-2.yaml",
"path/to/app-3.ext",
"path/to/dir/",
]
def yaml_side_effect(arg):
# pylint: disable=no-else-return
if arg == "app-1":
return {"name": "app-1", "config_key": "value for app-1"}
elif arg == "app-2":
return {"name": "app-2", "config_key": "value for app-2"}
return None
get_app_config_mock.side_effect = yaml_side_effect
result = config.list_apps_config("test")
self.assertEqual(
result,
{
"app-1": {"name": "app-1", "config_key": "value for app-1"},
"app-2": {"name": "app-2", "config_key": "value for app-2"},
},
)
@patch("nestor_api.lib.config.os.path.isdir", autospec=True)
def test_list_apps_config_with_incorrect_apps_path(self, is_dir_mock, _io_mock):
"""Should return a dictionary of apps config."""
is_dir_mock.return_value = False
with self.assertRaisesRegex(ValueError, "test/apps"):
config.list_apps_config("test")
| [
"nestor_api.lib.config.get_cronjobs",
"unittest.mock.patch.dict",
"nestor_api.lib.config.get_project_config",
"nestor_api.lib.config.get_app_config",
"nestor_api.lib.config.change_environment",
"nestor_api.lib.config.list_apps_config",
"unittest.mock.call",
"nestor_api.lib.config._resolve_variables_de... | [((295, 343), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.io"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.io', autospec=True)\n", (300, 343), False, 'from unittest.mock import call, patch\n'), ((850, 909), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.Configuration"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.Configuration', autospec=True)\n", (855, 909), False, 'from unittest.mock import call, patch\n'), ((1357, 1399), 'unittest.mock.patch', 'patch', (['"""yaml_lib.read_yaml"""'], {'autospec': '(True)'}), "('yaml_lib.read_yaml', autospec=True)\n", (1362, 1399), False, 'from unittest.mock import call, patch\n'), ((1405, 1469), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.get_project_config"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.get_project_config', autospec=True)\n", (1410, 1469), False, 'from unittest.mock import call, patch\n'), ((1475, 1534), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.Configuration"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.Configuration', autospec=True)\n", (1480, 1534), False, 'from unittest.mock import call, patch\n'), ((3417, 3476), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.Configuration"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.Configuration', autospec=True)\n", (3422, 3476), False, 'from unittest.mock import call, patch\n'), ((6211, 6253), 'unittest.mock.patch', 'patch', (['"""yaml_lib.read_yaml"""'], {'autospec': '(True)'}), "('yaml_lib.read_yaml', autospec=True)\n", (6216, 6253), False, 'from unittest.mock import call, patch\n'), ((6259, 6318), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.Configuration"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.Configuration', autospec=True)\n", (6264, 6318), False, 'from unittest.mock import call, patch\n'), ((7494, 7553), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.Configuration"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.Configuration', autospec=True)\n", (7499, 7553), False, 'from unittest.mock import call, patch\n'), ((8025, 8110), 'unittest.mock.patch.dict', 'patch.dict', (['"""nestor_api.lib.config.os.environ"""', "{'VALUE_IN_ENV': 'value_in_env'}"], {}), "('nestor_api.lib.config.os.environ', {'VALUE_IN_ENV': 'value_in_env'}\n )\n", (8035, 8110), False, 'from unittest.mock import call, patch\n'), ((12234, 12293), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.os.path.isdir"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.os.path.isdir', autospec=True)\n", (12239, 12293), False, 'from unittest.mock import call, patch\n'), ((12299, 12355), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.os.listdir"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.os.listdir', autospec=True)\n", (12304, 12355), False, 'from unittest.mock import call, patch\n'), ((12361, 12421), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.get_app_config"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.get_app_config', autospec=True)\n", (12366, 12421), False, 'from unittest.mock import call, patch\n'), ((13460, 13519), 'unittest.mock.patch', 'patch', (['"""nestor_api.lib.config.os.path.isdir"""'], {'autospec': '(True)'}), "('nestor_api.lib.config.os.path.isdir', autospec=True)\n", (13465, 13519), False, 'from unittest.mock import call, patch\n'), ((444, 502), 'nestor_api.lib.config.change_environment', 'config.change_environment', (['"""environment"""', '"""path/to/config"""'], {}), "('environment', 'path/to/config')\n", (469, 502), True, 'import nestor_api.lib.config as config\n'), ((1161, 1198), 'nestor_api.lib.config.create_temporary_config_copy', 'config.create_temporary_config_copy', ([], {}), '()\n', (1196, 1198), True, 'import nestor_api.lib.config as config\n'), ((2425, 2489), 'nestor_api.lib.config.get_app_config', 'config.get_app_config', (['"""backoffice"""', '"""tests/__fixtures__/config"""'], {}), "('backoffice', 'tests/__fixtures__/config')\n", (2446, 2489), True, 'import nestor_api.lib.config as config\n'), ((4334, 4366), 'nestor_api.lib.config.get_processes', 'config.get_processes', (['app_config'], {}), '(app_config)\n', (4354, 4366), True, 'import nestor_api.lib.config as config\n'), ((5050, 5081), 'nestor_api.lib.config.get_cronjobs', 'config.get_cronjobs', (['app_config'], {}), '(app_config)\n', (5069, 5081), True, 'import nestor_api.lib.config as config\n'), ((5645, 5683), 'nestor_api.lib.config.get_deployments', 'config.get_deployments', (['project_config'], {}), '(project_config)\n', (5667, 5683), True, 'import nestor_api.lib.config as config\n'), ((6879, 6933), 'nestor_api.lib.config.get_project_config', 'config.get_project_config', (['"""tests/__fixtures__/config"""'], {}), "('tests/__fixtures__/config')\n", (6904, 6933), True, 'import nestor_api.lib.config as config\n'), ((8176, 8704), 'nestor_api.lib.config._resolve_variables_deep', 'config._resolve_variables_deep', (["{'A': 'value_a', 'B': 'value_b', 'C': {'C1': '__{{A}}__', 'C2':\n '__{{key_not_present}}__', 'C3':\n '__{{A}}__{{B}}__{{key_not_present}}__', 'C4': 'A', 'C5':\n '{{C1}}__{{key-with.special_characters}}'}, 'D': ['value_d1',\n '__{{A}}__', '__{{key_not_present}}__',\n '__{{A}}__{{B}}__{{key_not_present}}__'], 'E': [{'E1': {'E11':\n 'deep__{{A}}__'}}], 'F': 42, 'key-with.special_characters':\n 'amazing-value', 'env_var': '$VALUE_IN_ENV', 'env_var_missing':\n '$VALUE_NOT_IN_ENV'}"], {}), "({'A': 'value_a', 'B': 'value_b', 'C': {'C1':\n '__{{A}}__', 'C2': '__{{key_not_present}}__', 'C3':\n '__{{A}}__{{B}}__{{key_not_present}}__', 'C4': 'A', 'C5':\n '{{C1}}__{{key-with.special_characters}}'}, 'D': ['value_d1',\n '__{{A}}__', '__{{key_not_present}}__',\n '__{{A}}__{{B}}__{{key_not_present}}__'], 'E': [{'E1': {'E11':\n 'deep__{{A}}__'}}], 'F': 42, 'key-with.special_characters':\n 'amazing-value', 'env_var': '$VALUE_IN_ENV', 'env_var_missing':\n '$VALUE_NOT_IN_ENV'})\n", (8206, 8704), True, 'import nestor_api.lib.config as config\n'), ((13183, 13214), 'nestor_api.lib.config.list_apps_config', 'config.list_apps_config', (['"""test"""'], {}), "('test')\n", (13206, 13214), True, 'import nestor_api.lib.config as config\n'), ((3758, 3805), 'nestor_api.lib.config.get_app_config', 'config.get_app_config', (['"""some-app"""', '"""/some/path"""'], {}), "('some-app', '/some/path')\n", (3779, 3805), True, 'import nestor_api.lib.config as config\n'), ((7841, 7880), 'nestor_api.lib.config.get_project_config', 'config.get_project_config', (['"""/some/path"""'], {}), "('/some/path')\n", (7866, 7880), True, 'import nestor_api.lib.config as config\n'), ((10141, 10459), 'nestor_api.lib.config._resolve_variables_deep', 'config._resolve_variables_deep', (["{'error': {}, 'simple_key': '__{{error}}__', 'array': ['0', '1',\n '2__{{error}}__'], 'dict': {'a': 'val_a', 'b': '{{error}}'},\n 'deep_dict': {'sub_dict': {'a': 'val_a', 'b': '{{error}}'}, 'sub_array':\n [{'a': '{{error}}', 'b': 'val_b'}, {'a': 'val_a', 'b': '{{error}}'}]}}"], {}), "({'error': {}, 'simple_key': '__{{error}}__',\n 'array': ['0', '1', '2__{{error}}__'], 'dict': {'a': 'val_a', 'b':\n '{{error}}'}, 'deep_dict': {'sub_dict': {'a': 'val_a', 'b': '{{error}}'\n }, 'sub_array': [{'a': '{{error}}', 'b': 'val_b'}, {'a': 'val_a', 'b':\n '{{error}}'}]}})\n", (10171, 10459), True, 'import nestor_api.lib.config as config\n'), ((13778, 13809), 'nestor_api.lib.config.list_apps_config', 'config.list_apps_config', (['"""test"""'], {}), "('test')\n", (13801, 13809), True, 'import nestor_api.lib.config as config\n'), ((576, 611), 'unittest.mock.call', 'call', (['"""git stash"""', '"""path/to/config"""'], {}), "('git stash', 'path/to/config')\n", (580, 611), False, 'from unittest.mock import call, patch\n'), ((629, 671), 'unittest.mock.call', 'call', (['"""git fetch origin"""', '"""path/to/config"""'], {}), "('git fetch origin', 'path/to/config')\n", (633, 671), False, 'from unittest.mock import call, patch\n'), ((689, 739), 'unittest.mock.call', 'call', (['"""git checkout environment"""', '"""path/to/config"""'], {}), "('git checkout environment', 'path/to/config')\n", (693, 739), False, 'from unittest.mock import call, patch\n'), ((757, 818), 'unittest.mock.call', 'call', (['"""git reset --hard origin/environment"""', '"""path/to/config"""'], {}), "('git reset --hard origin/environment', 'path/to/config')\n", (761, 818), False, 'from unittest.mock import call, patch\n')] |
"""
Epsagon's init.
"""
from __future__ import absolute_import
import os
from .utils import init
from .patcher import patch_all
from .constants import __version__
from .trace import tracer
def dummy_wrapper(func):
"""
A dummy wrapper for when Epsagon is disabled
:param func: The function to wrap
:return: The same function, unchanged
"""
return func
if os.environ.get('DISABLE_EPSAGON') == 'TRUE':
os.environ['DISABLE_EPSAGON_PATCH'] = 'TRUE'
lambda_wrapper = dummy_wrapper # pylint: disable=C0103
step_lambda_wrapper = dummy_wrapper # pylint: disable=C0103
azure_wrapper = dummy_wrapper # pylint: disable=C0103
python_wrapper = dummy_wrapper # pylint: disable=C0103
EpsagonFlask = dummy_wrapper # pylint: disable=C0103
else:
# Environments.
from .wrappers import (
lambda_wrapper,
step_lambda_wrapper,
azure_wrapper,
python_wrapper
)
# Frameworks.
try:
from .wrappers.flask import FlaskWrapper as flask_wrapper
except ImportError:
flask_wrapper = dummy_wrapper
# pylint: disable=C0103
log = tracer.add_log
# pylint: disable=C0103
error = tracer.add_error
__all__ = ['lambda_wrapper', 'azure_wrapper', 'python_wrapper', 'init',
'step_lambda_wrapper', 'flask_wrapper', 'log', 'error']
# The modules are patched only if DISABLE_EPSAGON_PATCH variable is NOT 'TRUE'
if os.environ.get('DISABLE_EPSAGON_PATCH') != 'TRUE':
patch_all()
| [
"os.environ.get"
] | [((383, 416), 'os.environ.get', 'os.environ.get', (['"""DISABLE_EPSAGON"""'], {}), "('DISABLE_EPSAGON')\n", (397, 416), False, 'import os\n'), ((1414, 1453), 'os.environ.get', 'os.environ.get', (['"""DISABLE_EPSAGON_PATCH"""'], {}), "('DISABLE_EPSAGON_PATCH')\n", (1428, 1453), False, 'import os\n')] |
import unittest
import numpy as np
from mlscratch.models import losses
class TestBinaryCrossEntropy(unittest.TestCase):
def setUp(self):
self.y_true = np.array([0, 1, 0.5])
self.y_pred = np.array([0, 1, 0.5])
def test_return(self):
bce = losses.binary_cross_entropy(self.y_true, self.y_pred)
self.assertIsInstance(bce, np.float64)
class TestMeanSquaredError(unittest.TestCase):
def setUp(self):
self.y_true = np.array([0, 1])
self.y_pred = np.array([0.3, 0.4])
def test_return(self):
mse = losses.mean_squared_error(self.y_true, self.y_pred)
self.assertIsInstance(mse, np.float64)
| [
"mlscratch.models.losses.binary_cross_entropy",
"numpy.array",
"mlscratch.models.losses.mean_squared_error"
] | [((168, 189), 'numpy.array', 'np.array', (['[0, 1, 0.5]'], {}), '([0, 1, 0.5])\n', (176, 189), True, 'import numpy as np\n'), ((212, 233), 'numpy.array', 'np.array', (['[0, 1, 0.5]'], {}), '([0, 1, 0.5])\n', (220, 233), True, 'import numpy as np\n'), ((276, 329), 'mlscratch.models.losses.binary_cross_entropy', 'losses.binary_cross_entropy', (['self.y_true', 'self.y_pred'], {}), '(self.y_true, self.y_pred)\n', (303, 329), False, 'from mlscratch.models import losses\n'), ((470, 486), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (478, 486), True, 'import numpy as np\n'), ((509, 529), 'numpy.array', 'np.array', (['[0.3, 0.4]'], {}), '([0.3, 0.4])\n', (517, 529), True, 'import numpy as np\n'), ((572, 623), 'mlscratch.models.losses.mean_squared_error', 'losses.mean_squared_error', (['self.y_true', 'self.y_pred'], {}), '(self.y_true, self.y_pred)\n', (597, 623), False, 'from mlscratch.models import losses\n')] |
import argparse
import base64
import datetime
import hashlib
import os
import random
import sqlite3
import ssl
import subprocess
import sys
import tempfile
import threading
try:
import secrets
except ImportError:
import string
class secrets:
def token_urlsafe(size):
return (''.join(random.SystemRandom().choice(string.printable.strip().translate({ord(c): None for c in '$:#?@&/\|`<>~.!;{}[]()\'"'})) for i in range(size)))
from http.server import HTTPServer, BaseHTTPRequestHandler
from socketserver import ThreadingMixIn
OPENSSL_PATH = '/usr/bin/openssl'
SSL_PROTOCOL = ssl.PROTOCOL_TLSv1_2
SSL_CIPHERS = 'EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH' # https://cipherli.st/
STATE = 'ethereal'
USER_CODE_LENGTH = 6 # Code length 5: Code length 6: Code length 7:
# users probabilty (single guess) / users probabilty (single guess) / users probabilty (single guess)
# 1 1:100,000 \ 1 1:1,000,000 \ 1 1:10,000,000
# 15 1:6,667 / 15 1:66,667 / 15 1:666,667
# 25 1:4000 \ 25 1:40,000 \ 25 1:400,000
# 100 1:1000 ! / 100 1:10,000 / 100 1:100,000
# 250 1:400 !! \ 250 1:4,000 \ 250 1:40,000
def callback_function(conn, cert, errno, depth, result):
print(cert)
return True
# Okay, let's get started...
def main(args):
outer_realm = 'default'
database_name = 'ostiary.db'
if args.realm:
outer_realm = args.realm
database_name = 'ostiary-{:s}.db'.format(outer_realm.lower())
database_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), database_name)
database_exists = os.path.exists(database_file)
database_connection = sqlite3.connect(database_file, check_same_thread=False, detect_types=sqlite3.PARSE_DECLTYPES)
database_connection.row_factory = sqlite3.Row # This lets us refer to database results by column name
def die(error):
cleanup(database_connection)
print("ERROR: {0}".format(error))
sys.exit(1)
if not database_exists:
if not args.quiet: print("Creating database at {0}".format(database_file))
setup(database_connection, outer_realm)
if args.no_ssl:
if not args.quiet: print("WARNING! Operating with SSL disabled.\nAccess codes and admin credentials will be sent in the clear.\nUSE FOR TESTING PURPOSES ONLY")
port = 8080
use_ssl = False
else:
port = 4433
use_ssl = True
if args.port: port = args.port
if use_ssl:
ssl_context = ssl.SSLContext(protocol=SSL_PROTOCOL)
ssl_context.check_hostname=False
ssl_context.verify_mode = ssl.CERT_OPTIONAL
ssl_context.set_ciphers(SSL_CIPHERS)
# ssl_context.load_verify_locations("keypad_fw/public.cert")
if args.cert_file:
try:
ssl_context.load_cert_chain(args.cert_file, keyfile=args.key_file)
except ssl.SSLError as e:
additional_info = "\nIf the key is not included in the certificate file be sure to specify --key-file" if not args.key_file else ""
die("{0}: Could not validate ssl certificate{1}".format(args.cert_file, additional_info))
except Exception as e:
die("{0}: {1}".format(args.cert_file, str(e)))
if not args.quiet: print("Adding {:s} to local certificate store.".format(args.cert_file))
if get_certificate(database_connection, args.host) is not None:
delete_certificate(database_connection, args.host)
with open(args.cert_file, 'r') as cert_data:
save_certificate(database_connection, args.host, cert_data.read())
else:
compound_cert = get_certificate(database_connection, args.host, cert_only=False)
if get_cert_expiration(compound_cert) < datetime.datetime.utcnow():
if not args.quiet: print("Certificate is expired!")
delete_certificate(database_connection, args.host)
if compound_cert is None or get_cert_expiration(compound_cert) < datetime.datetime.utcnow():
if not args.quiet: print("Generating self-signed certificate...")
certificate, key = generate_ssl_cert(state_name=STATE, locality_name=outer_realm, common_name=args.host, hush = not args.verbose)
compound_cert = certificate + key # TODO: Implement hardware-based key storage
save_certificate(database_connection, args.host, compound_cert)
temporary_certificate_path = temporary_path(compound_cert) # We have to do this because SSLContext wont accept file like objects, just an actual path
ssl_context.load_cert_chain(temporary_certificate_path)
os.remove(temporary_certificate_path)
print(get_cert_expiration(compound_cert))
if args.verbose: print(get_certificate(database_connection, args.host))
class HTTPServerThreads(ThreadingMixIn, HTTPServer):
database = database_connection # This allows us to access the database from within RequestHandler using self.server.database
database_lock = threading.RLock() # And since we're only using the one database connection with a multithreaded server we will have to lock it on writes
default_realm = outer_realm.lower()
http_server = HTTPServerThreads((args.host, port), RequestHandler)
if use_ssl: http_server.socket = ssl_context.wrap_socket(http_server.socket, server_side=True)
if not args.verbose: http_server.RequestHandlerClass.log_message = lambda *args: None
if not args.quiet: print("Starting server on {0}:{1}".format(args.host, port))
if not args.quiet: print("(<Ctrl>-C to exit)")
try:
http_server.serve_forever()
except KeyboardInterrupt:
print('', end='\r')
if not args.quiet: print("Goodbye")
cleanup(database_connection)
# Authorizes (or not) a user to access resources of a realm
# Logs said access if succesful
# usage:
# GET /<realm>/auth/<n digit user code>
# where n is USER_CODE_LENGTH
def auth (db, lock, method, realm, args, **kw):
if method is not 'GET': return(400, "Invalid request")
if len(args) is not 1: return(400, "Invalid arguments")
user_code = args[0]
if user_code.isnumeric() and len(user_code) is USER_CODE_LENGTH:
name = check_user_auth(db, user_code, realm)
if name:
log_access(db, lock, realm, name)
return(200, "Auth") # TODO: Implement rentdue response
else:
return(401, "No auth")
else:
return(400, "Invalid user code")
# Lets admins list and get information about system users
# Also lets admins set or remove fields in the user database and add or delete users
# usage:
# GET /<default realm>/users - List all users of the system and their extended information
# GET /<default realm>/users/<username>/<field name> - Show the value of a specific user field (comment, created, expires, uses, rentdue, frozen)
# GET /<realm>/users/ - List authorized users of the realm and their last auth time
# GET /<realm>/users/<username> - List information for a specific user
# PUT /<realm>/users/<new username> - Create a new user and add them to the given realm, returns the new user's access code
# PUT /<default realm>/users/<username>/<field name>/<value> - Set the value of an editable field (comment, exipires, uses, rentdue, frozen) on a given user
# DELETE /<realm>/users/<username> - Delete user from realm
def users (db, lock, method, realm, args, **kw):
if len(args) > 0: user_name = args[0].replace('%20', ' ')
if method is 'GET':
if len(args) is 0: return(200, "\n".join(normalize_nested_list(admin_list_users(db, realm)))) # List users
user_info = get_user_info(db, realm, user_name)
if not user_info: return(404, "User not found")
if len(args) is 1: return(200, ", ".join([str(col) for col in user_info])) # List info of specific user
try:
info = user_info[args[1]]
except IndexError:
return(400, "Invalid arguments")
if len(args) is 2: return(200, str(info)) # Show value of specific field for specific user
return(400, "Invalid request")
if method is 'PUT':
if len(args) < 1: return(400, "Invalid arguments")
user_info = get_user_info(db, None, user_name)
if len(args) is 1: # Add user
if user_info:
if realm:
if get_user_info(db, realm, user_name): return(409, "User already exists")
if add_user_to_realm(db, lock, user_name, realm): return(200, "OK")
return(409, "User already exists")
return(200, str(create_user(db, lock, user_name, realm=realm)).zfill(USER_CODE_LENGTH))
if len(args) is 3: # Set value of field on user
if realm: return(400, "Invalid arguments")
if not user_info: return(404, "User not found")
try:
user_info[args[1]]
except IndexError:
return(400, "Invalid arguments")
if args[1] in ['code', 'created']: return(451, "Cannot edit static field")
if not update_user(db, lock, user_name, args[1], args[2]): return(406, "Not acceptable")
return(200, "OK")
return(400, "Invalid arguments")
if method is 'DELETE':
if len(args) < 1: return(400, "Invalid arguments")
user_info = get_user_info(db, realm, user_name)
if len(args) is 1: # Delete user
if not user_info: return(404, "User not found")
if delete_user(db, lock, user_name, realm=realm): return(200, "OK")
return(400, "Invalid arguments")
return(400, "Invalid request")
# Mostly lets 'admin' add and remove other admins
# But also lets admins change their secret
# usage:
# GET /<default realm>/admins/ - List all system administrators
# GET /<realm>/admins/ - List all administrators of <realm>
# PUT /<default realm>/admins/<new adminname>/<secret> - Creates a new adminstrator with password <secret>
# PUT /<realm>/admins/<adminname> - Adds <adminname> as an administrator of <realm>
# DELETE /<default realm>/admins/<adminname> - Deletes <adminname> from the system
# DELETE /<realm>/admins/<adminname> - Removes <adminname> as an administrator of <realm>
# PUT /<default realm>/admins/secret/<new secret> - Changes your secret to <new secret>
def admins (db, lock, method, realm, args, **kw):
if method is 'GET':
if len(args) is 0: return(200, "\n".join(list_admins(db, realm)))
return(400, "Invalid arguments")
if method is 'PUT':
pass # TODO: Implement me
if method is 'DELETE':
pass # TODO: Implement me
return(400, "Invalid request")
def certificates (db, lock, method, realm, args, **kw):
return(200, "Certs") # TODO: Implement me
# Allows an endpoint device (keypad) to register for a given realm
# Only one device allowed per realm
# usage:
# GET /<realm>/register/ - Registers calling device as accessor of <realm>, returns user code length
def register (db, lock, method, realm, args, **kw):
if method is 'GET':
print(kw['peercert'])
return(200, str(USER_CODE_LENGTH))
return(400, "Invalid request")
# And here's the machinery...
class RequestHandler(BaseHTTPRequestHandler):
functions = {'auth':auth,'users':users,'admins':admins,'certificates':certificates,'register':register}
read_realm = False
write_realm = False
admin = False
# Here we hook the internal parse_request() method to set up some ground state
# While we're here lets apply some hax so our server doesn't balk at spaces
def parse_request(self, *args, **kwargs):
raw_request = str(self.raw_requestline, 'iso-8859-1') # .-----.
request_path = raw_request[raw_request.index(' ')+1:raw_request.rindex(' ')] # < Hax |
self.raw_requestline = raw_request.replace(request_path, request_path.replace(' ', '%20')).encode('iso-8859-1') # '-----'
return_to_caller = super().parse_request(*args, **kwargs)
try:
self.realm, self.function, self.args = parse_request_path(self.path)
except ValueError:
self.send_error(400, "Invalid function request")
return False
return(return_to_caller)
def process_request(self, method):
in_outer_realm = False
realm, function, args = self.realm, self.function, self.args
if not realm_exists(self.server.database, realm):
if realm != self.server.default_realm:
self.send_error(404, "No such realm")
return
else:
in_outer_realm = True
try:
run_func = self.functions[function.lower()]
except KeyError:
self.send_error(400, "Invalid request")
return
code, message = run_func(self.server.database,
self.server.database_lock,
method,
None if in_outer_realm else realm.lower(),
[arg for arg in args],
peercert=self.connection.getpeercert())
if int(str(code)[:1]) is 2:
self.send_response(code)
self.end_headers()
try:
self.wfile.write(bytes(message, 'utf-8'))
except ssl.SSLEOFError:
print("sslsocket: Premature EOF") #TODO: Fix me
self.wfile.write(bytes('\n', 'utf-8'))
else:
self.send_error(code, message)
def do_GET(self):
if self.check_auth('read_realm'): self.process_request('GET')
def do_PUT(self):
if self.check_auth('write_realm'): self.process_request('PUT')
def do_DELETE(self):
if self.check_auth('write_realm'): self.process_request('DELETE')
def check_auth(self, permission):
realm, function, args = self.realm, self.function, self.args
admin_credentials = authorization_credentials(self.headers)
if not admin_credentials: # Most of the functionality can only be used by admins, so whitelist the things a non-admin can do
if self.command != 'GET':
self.send_error(403, "Access not allowed") # non-admins can read-only
return False
if function == 'auth' and len(args) is 1: return True # non-admins can see if a user code is authorized for to access the realm
if function == 'certificates' and len(args) is 0: return True #
if function == 'register' and len(args) is 0: return True # non-admins (keypads) can register as gatekeepers of a realm
self.send_error(403, "Access not allowed")
return False
if not self.authenticate_admin(realm, admin_credentials):
self.send_error(403, "Access not allowed")
return False
check_permission = self.__dict__[permission] # This check must be performed after authenticate_admin() has been called,
if not check_permission: # since that function sets read_realm and write_realm as a side-effect
self.send_error(403, "Access not allowed")
return False
return True
def authenticate_admin(self, realm, admin_credentials):
if not admin_credentials: return False
admin_username, admin_password = admin_credentials
query = "select * from admins where name = (?)"
result = self.server.database.execute(query, [admin_username])
row = result.fetchone()
if not row: return False # Potential timing attack for admin username discovery
if row['secret'] == hash_password(admin_password, row['salt'])[0]:
self.admin = True
if admin_username == 'admin': # One admin to rule them all and in the darkness bind them
self.read_realm = self.write_realm = True
return True
query = "select * from admin_acl where name = (?) and realm = (?)"
result = self.server.database.execute(query, [admin_username, realm])
row = result.fetchone()
if row and (row['read'] or row['write']):
self.read_realm, self.write_realm = row['read'], row['write']
return True
return False
def version_string(self):
if self.admin: return("Cookie crisp!")
return("YOU SHALL NOT PASS!")
# Some convenience functions...
def authorization_credentials (headers):
if not headers['Authorization']: return False
auth_type, encoded_auth_string = headers['Authorization'].split(' ')
return base64.b64decode(encoded_auth_string).decode('utf-8').split(':')
def daemonize(fun, *args, **kwargs):
thread = threading.Thread(target=fun, args=args, kwargs=kwargs, daemon=True)
thread.start()
def generate_ssl_cert(country_name=None, state_name=None, locality_name=None, org_name=None, unit_name=None, common_name=None, email_address=None, hush=True):
def openssl(*args):
command = [OPENSSL_PATH] + list(args)
subprocess.check_call(command, stderr=subprocess.DEVNULL) if hush else subprocess.check_call(command)
field = lambda s, f: "/{0}={1}".format(s, f) if f else ""
subject = field('C', country_name)
subject += field('ST', state_name)
subject += field('L', locality_name)
subject += field('O', org_name)
subject += field('OU', unit_name)
subject += field('CN', common_name)
subject += field('emailAddress', email_address)
cert_path = temporary_path()
key_path = temporary_path()
openssl('req', '-new', '-x509', '-nodes', '-out', cert_path, '-keyout', key_path, '-subj', subject)
with open(cert_path, 'r') as cert_file: cert=cert_file.read()
os.remove(cert_path)
with open(key_path, 'r') as key_file: key=key_file.read()
os.remove(key_path)
return cert, key
def get_cert_expiration(cert_data):
# Solution borrowed from: https://kyle.io/2016/01/checking-a-ssl-certificates-expiry-date-with-python/
from OpenSSL import crypto
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_data)
return(datetime.datetime.strptime(cert.get_notAfter().decode('utf-8'), "%Y%m%d%H%M%SZ"))
def hash_password(password, salt=secrets.token_urlsafe(32)):
password_hash = base64.b64encode(hashlib.pbkdf2_hmac('sha256', bytes(password, 'utf-8'), bytes(salt, 'utf-8'), 200000))
return (password_hash, salt)
def locked_query(db, lock, query, *args):
lock.acquire()
result = db.execute(query, args)
db.commit()
lock.release()
return result
def make_boolean(bool_string):
if bool_string.lower() in ['yes', 'y', 'true', 't', '1']: return True
return False
def normalize_nested_list(nested_list):
normalized_list = list()
for row in nested_list:
normalized_list.append(", ".join(map(str, row)))
return(normalized_list)
# Requests must be in the form of /<realm>/<function>/[optional/arguments]
def parse_request_path (path):
realm, function, *arguments = [s for s in path.split("/") if s is not '']
return realm.lower(), function, arguments
def temporary_path(contents=None):
handle, path = tempfile.mkstemp()
if contents: os.write(handle, bytes(contents, 'utf-8'))
os.close(handle)
return path
def valid_date(date_string):
try:
datetime.datetime.strptime(date_string, '%Y-%m-%d')
except ValueError:
return False
return True
# Database manipulation...
def save_certificate(db, hostname, certificate, lock=None):
query = "insert into ssl_certs (hostname, certificate) values ((?), (?))"
if lock:
locked_query(db, lock, query, hostname, certificate)
else:
db.execute(query, [hostname, certificate])
def delete_certificate(db, hostname, lock=None):
query = "delete from ssl_certs where hostname = (?)"
if lock:
locked_query(db, lock, query, hostname)
else:
db.execute(query, [hostname])
def get_certificate(db, hostname, cert_only=True):
query = "select certificate from ssl_certs where hostname = (?)"
result = db.execute(query, [hostname])
row = result.fetchone()
if not row: return None
if cert_only: return row['certificate'][row['certificate'].find("-----BEGIN CERTIFICATE-----"):row['certificate'].rfind("-----END CERTIFICATE-----")+25]
else: return row['certificate']
def realm_exists(db, realm):
query = "select 1 from sqlite_master where type = 'table' and name = ?"
return db.execute(query, ["{0}_users".format(realm)]).fetchone() is not None
def generate_user_code(db):
potential_code = random.randint(0,(10**USER_CODE_LENGTH)-1)
query = "select name from users where code = (?)"
if db.execute(query, [potential_code]).fetchone(): return(generate_user_code(db))
return(potential_code)
def get_user_code(db, user_name):
query = "select code from users where name = (?)"
return db.execute(query, [user_name]).fetchone()
def check_user_auth(db, code, realm):
retval = False
if realm is None: return retval
query = "select name from users where code = (?)"
result = db.execute(query, [code])
row = result.fetchone()
if row:
query = "select * from {0}_users where name = ?".format(realm)
if db.execute(query, [row['name']]).fetchone() is not None: return row['name']
return retval
def log_access(db, lock, realm, name):
now = datetime.datetime.utcnow()
query = "insert into accesslog values ((?), (?), (?))"
daemonize(locked_query, db, lock, query, now, realm, name)
query = "update {0}_users set lastused = (?) where name = (?)".format(realm)
daemonize(locked_query, db, lock, query, now, name)
def list_users(db, realm):
query = "select name from {0}_users".format(realm) if realm else "select name from users"
result = db.execute(query)
return [row['name'] for row in result]
def admin_list_users(db, realm):
query = "select name, lastused from {0}_users".format(realm) if realm else "select name, comment, created, expires, uses, rentdue, frozen from users"
result = db.execute(query)
return [[col for col in row] for row in result]
def list_admins(db, realm):
query="select name from admin_acl where realm == (?)" if realm else "select name from admins"
result = db.execute(query, [realm]) if realm else db.execute(query)
return [row['name'] for row in result]
def get_user_info(db, realm, user_name):
if realm is None:
query = "select comment, created, expires, uses, rentdue, frozen from users where name = (?)"
else:
query = "select lastused from {0}_users where name = (?)".format(realm)
return db.execute(query, [user_name]).fetchone()
def create_user(db, lock, name, realm=None):
code = generate_user_code(db)
query = "insert into users (code, name, created) values (?, ?, ?)"
locked_query(db, lock, query, code, name, datetime.date.today())
if realm: add_user_to_realm(db, lock, name, realm)
return code
def delete_user(db, lock, name, realm=None):
def cleanup_realms(realm_tables):
for realm_table in realm_tables:
query = "delete from {0} where name = (?)".format(realm_table)
locked_query(db, lock, query, name)
query = "delete from {0} where name = (?)".format("{0}_users".format(realm) if realm else "users")
locked_query(db, lock, query, name)
if not realm:
query = "select tbl_name from sqlite_master where type = 'table' and tbl_name like '%_users' escape '_'"
result = db.execute(query)
daemonize(cleanup_realms, [row['tbl_name'] for row in result])
return True
def add_user_to_realm(db, lock, name, realm):
if not realm: return False
query = "insert into {0}_users (name) values (?)".format(realm)
locked_query(db, lock, query, name)
return True
def update_user(db, lock, name, field, value):
if field == 'expires' and not valid_date(value): return False
if field == 'comment': value = value.replace('%20', ' ')
if field == 'rentdue' or field == 'frozen': value = make_boolean(value)
query = "update users set {0} = ? where name = (?)".format(field)
locked_query(db, lock, query, value, name)
return True
def setup(db, realm):
db.executescript(DATABASE_SCHEMA)
db.executescript(TESTDATA) # REMOVE ONLY FOR TESTING
admin_secret = secrets.token_urlsafe(16)
print("\033[1m\033[93mAdmin secret:\033[0m\033[1m {0}\033[0m".format(admin_secret))
query = "insert into admins (name, secret, salt) values ('admin', (?), (?));"
db.execute(query, hash_password(admin_secret))
query = "insert into admin_acl (name, realm, read, write) values ('admin', (?), 1, 1);"
db.execute(query, [realm])
db.commit()
def cleanup(db):
db.close()
sqlite3.register_converter('bool', lambda x: True if int(x) is 1 else False)
sqlite3.register_adapter('bool', lambda x: 1 if x is True else 0)
DATABASE_SCHEMA = """
create table admins (
name text primary key,
secret text not null,
salt text not null
);
create table admin_acl (
name text not null,
realm text not null collate nocase,
read bool not null default 1,
write bool not null default 0
);
create table users (
code integer primary key not null,
name text not null collate nocase,
comment text,
created date not null,
expires date,
uses integer not null default -1,
rentdue bool not null default 0,
frozen bool not null default 0
);
create table accesslog (
timestamp timestamp not null,
realm text not null collate nocase,
name text not null default 'Someone' collate nocase
);
create table ssl_certs (
hostname text not null primary key collate nocase,
certificate text not null
);
"""
TESTDATA = """
create table gate_users (
name text not null primary key collate nocase,
lastused datetime
);
create table pfaff_users (
name text not null primary key collate nocase,
lastused datetime
);
insert into users values(123456, 'tom', 'wha', '2012-12-25', '2012-12-25', -1, 0, 0);
insert into users values(111111, 'sue', 'wha', '2012-12-25', '2012-12-25', -1, 0, 0);
insert into users values(654321, 'tim', 'wha', '2012-12-25', '2012-12-25', -1, 0, 0);
insert into gate_users values ('tom', '2012-12-25 23:59:59');
insert into gate_users values ('tim', '2012-12-25 23:59:59');
insert into pfaff_users values ('sue', '2012-12-25 23:59:59');
"""
if __name__ == '__main__':
argument_parser = argparse.ArgumentParser(add_help=False)
noisy_options = argument_parser.add_mutually_exclusive_group()
noisy_options.add_argument('-q', '--quiet', action='store_true')
noisy_options.add_argument('-v', '--verbose', action='store_true')
ssl_options = argument_parser.add_mutually_exclusive_group()
ssl_options.add_argument('-c', '--cert-file')
ssl_options.add_argument('-n', '--no-ssl', action='store_true')
argument_parser.add_argument('-k', '--key-file')
argument_parser.add_argument('-r', '--realm')
argument_parser.add_argument('-h', '--host', default='localhost')
argument_parser.add_argument('-p', '--port')
args = argument_parser.parse_args()
if args.key_file and not args.cert_file: argument_parser.error("You must specify a certificate file (use --cert-file <path/to/certificate.pem>)")
main(args)
| [
"sys.exit",
"OpenSSL.crypto.load_certificate",
"os.remove",
"os.path.exists",
"argparse.ArgumentParser",
"secrets.token_urlsafe",
"threading.RLock",
"random.randint",
"string.printable.strip",
"subprocess.check_call",
"os.close",
"ssl.SSLContext",
"datetime.date.today",
"tempfile.mkstemp",... | [((26638, 26703), 'sqlite3.register_adapter', 'sqlite3.register_adapter', (['"""bool"""', '(lambda x: 1 if x is True else 0)'], {}), "('bool', lambda x: 1 if x is True else 0)\n", (26662, 26703), False, 'import sqlite3\n'), ((2136, 2165), 'os.path.exists', 'os.path.exists', (['database_file'], {}), '(database_file)\n', (2150, 2165), False, 'import os\n'), ((2192, 2290), 'sqlite3.connect', 'sqlite3.connect', (['database_file'], {'check_same_thread': '(False)', 'detect_types': 'sqlite3.PARSE_DECLTYPES'}), '(database_file, check_same_thread=False, detect_types=\n sqlite3.PARSE_DECLTYPES)\n', (2207, 2290), False, 'import sqlite3\n'), ((18483, 18550), 'threading.Thread', 'threading.Thread', ([], {'target': 'fun', 'args': 'args', 'kwargs': 'kwargs', 'daemon': '(True)'}), '(target=fun, args=args, kwargs=kwargs, daemon=True)\n', (18499, 18550), False, 'import threading\n'), ((19496, 19516), 'os.remove', 'os.remove', (['cert_path'], {}), '(cert_path)\n', (19505, 19516), False, 'import os\n'), ((19583, 19602), 'os.remove', 'os.remove', (['key_path'], {}), '(key_path)\n', (19592, 19602), False, 'import os\n'), ((19810, 19865), 'OpenSSL.crypto.load_certificate', 'crypto.load_certificate', (['crypto.FILETYPE_PEM', 'cert_data'], {}), '(crypto.FILETYPE_PEM, cert_data)\n', (19833, 19865), False, 'from OpenSSL import crypto\n'), ((19993, 20018), 'secrets.token_urlsafe', 'secrets.token_urlsafe', (['(32)'], {}), '(32)\n', (20014, 20018), False, 'import secrets\n'), ((20922, 20940), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (20938, 20940), False, 'import tempfile\n'), ((21005, 21021), 'os.close', 'os.close', (['handle'], {}), '(handle)\n', (21013, 21021), False, 'import os\n'), ((22364, 22409), 'random.randint', 'random.randint', (['(0)', '(10 ** USER_CODE_LENGTH - 1)'], {}), '(0, 10 ** USER_CODE_LENGTH - 1)\n', (22378, 22409), False, 'import random\n'), ((23169, 23195), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (23193, 23195), False, 'import datetime\n'), ((26141, 26166), 'secrets.token_urlsafe', 'secrets.token_urlsafe', (['(16)'], {}), '(16)\n', (26162, 26166), False, 'import secrets\n'), ((28392, 28431), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (28415, 28431), False, 'import argparse\n'), ((2506, 2517), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2514, 2517), False, 'import sys\n'), ((3039, 3076), 'ssl.SSLContext', 'ssl.SSLContext', ([], {'protocol': 'SSL_PROTOCOL'}), '(protocol=SSL_PROTOCOL)\n', (3053, 3076), False, 'import ssl\n'), ((5683, 5700), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (5698, 5700), False, 'import threading\n'), ((21085, 21136), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date_string', '"""%Y-%m-%d"""'], {}), "(date_string, '%Y-%m-%d')\n", (21111, 21136), False, 'import datetime\n'), ((24670, 24691), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (24689, 24691), False, 'import datetime\n'), ((2070, 2096), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2086, 2096), False, 'import os\n'), ((5289, 5326), 'os.remove', 'os.remove', (['temporary_certificate_path'], {}), '(temporary_certificate_path)\n', (5298, 5326), False, 'import os\n'), ((18808, 18865), 'subprocess.check_call', 'subprocess.check_call', (['command'], {'stderr': 'subprocess.DEVNULL'}), '(command, stderr=subprocess.DEVNULL)\n', (18829, 18865), False, 'import subprocess\n'), ((18879, 18909), 'subprocess.check_call', 'subprocess.check_call', (['command'], {}), '(command)\n', (18900, 18909), False, 'import subprocess\n'), ((4345, 4371), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4369, 4371), False, 'import datetime\n'), ((4585, 4611), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (4609, 4611), False, 'import datetime\n'), ((18367, 18404), 'base64.b64decode', 'base64.b64decode', (['encoded_auth_string'], {}), '(encoded_auth_string)\n', (18383, 18404), False, 'import base64\n'), ((316, 337), 'random.SystemRandom', 'random.SystemRandom', ([], {}), '()\n', (335, 337), False, 'import random\n'), ((345, 369), 'string.printable.strip', 'string.printable.strip', ([], {}), '()\n', (367, 369), False, 'import string\n')] |
import typing
from fiepipedesktoplib.locallymanagedtypes.shells.AbstractLocalManagedTypeCommand import LocalManagedTypeCommand
from fiepipedesktoplib.shells.AbstractShell import AbstractShell
from fiepipehoudini.data.installs import HoudiniInstall
from fiepipehoudini.routines.installs import HoudiniInstallsInteractiveRoutines
from fiepipedesktoplib.shells.ui.abspath_input_ui import AbspathInputDefaultUI
from fiepipedesktoplib.shells.ui.subpath_input_ui import SubpathInputDefaultUI
class HoudiniInstallsCommand(LocalManagedTypeCommand[HoudiniInstall]):
def get_routines(self) -> HoudiniInstallsInteractiveRoutines:
return HoudiniInstallsInteractiveRoutines(self.get_feedback_ui(), AbspathInputDefaultUI(self), SubpathInputDefaultUI(self))
def get_shell(self, item) -> AbstractShell:
return super(HoudiniInstallsCommand, self).get_shell()
def get_plugin_names_v1(self) -> typing.List[str]:
ret = super(HoudiniInstallsCommand, self).get_plugin_names_v1()
ret.append("houdini_installs_command")
return ret
def get_prompt_text(self) -> str:
return self.prompt_separator.join(['fiepipe','houdini_installs'])
| [
"fiepipedesktoplib.shells.ui.subpath_input_ui.SubpathInputDefaultUI",
"fiepipedesktoplib.shells.ui.abspath_input_ui.AbspathInputDefaultUI"
] | [((700, 727), 'fiepipedesktoplib.shells.ui.abspath_input_ui.AbspathInputDefaultUI', 'AbspathInputDefaultUI', (['self'], {}), '(self)\n', (721, 727), False, 'from fiepipedesktoplib.shells.ui.abspath_input_ui import AbspathInputDefaultUI\n'), ((729, 756), 'fiepipedesktoplib.shells.ui.subpath_input_ui.SubpathInputDefaultUI', 'SubpathInputDefaultUI', (['self'], {}), '(self)\n', (750, 756), False, 'from fiepipedesktoplib.shells.ui.subpath_input_ui import SubpathInputDefaultUI\n')] |
#!/usr/bin/env python
# coding=utf-8
# vim:ts=4:sts=4:sw=4:et
#
# Author: <NAME>
# Date: 2017-11-26 18:55:00 +0100 (Sun, 26 Nov 2017)
#
# https://github.com/harisekhon/nagios-plugins
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/harisekhon
#
"""
Nagios Plugin to check Logstash hot threads via the Logstash Rest API
Optional thresholds apply to the % CPU time for the busiest hot thread by default, or if using the
--top-3 switch then the total sum of % CPU time for top 3 hot threads combined
The top hot thread CPU % and state is output regardless, and perfdata for the top hot thread CPU % and
the top 3 hot threads total CPU % is output for graphing
API is only available in Logstash 5.x onwards, will get connection refused on older versions
Ensure Logstash options:
--http.host should be set to 0.0.0.0 if querying remotely
--http.port should be set to the same port that you are querying via this plugin's --port switch
Tested on Logstash 5.0, 5.1, 5.2, 5.3, 5.4, 5.5, 5.6, 6.0, 6.1
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
import traceback
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
#from harisekhon.utils import log
from harisekhon.utils import UnknownError, support_msg_api
from harisekhon.utils import isDict, isList
from harisekhon import RestNagiosPlugin
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = '<NAME>'
__version__ = '0.2'
class CheckLogstashHotThreads(RestNagiosPlugin):
def __init__(self):
# Python 2.x
super(CheckLogstashHotThreads, self).__init__()
# Python 3.x
# super().__init__()
self.name = 'Logstash'
self.default_port = 9600
# could add pipeline name to end of this endpoint but error would be less good 404 Not Found
# Logstash 5.x /_node/pipeline <= use -5 switch for older Logstash
# Logstash 6.x /_node/pipelines
self.path = '/_node/hot_threads'
self.auth = False
self.json = True
self.msg = 'Logstash hot threads msg not defined yet'
self.plugins = None
def add_options(self):
super(CheckLogstashHotThreads, self).add_options()
self.add_opt('--top-3', action='store_true',
help='Test the total sum cpu percentage of the top 3 hot threads' + \
' instead of the top thread')
self.add_thresholds(default_warning=50)
def process_options(self):
super(CheckLogstashHotThreads, self).process_options()
self.validate_thresholds(percent=True, optional=True)
def parse_json(self, json_data):
if not isDict(json_data):
raise UnknownError('non-dict returned for hot threads. {}'.format(support_msg_api()))
hot_threads = json_data['hot_threads']['threads']
top_3 = self.get_opt('top_3')
sum_percent = 0
last_percent = None
for thread in hot_threads:
thread_percent = thread['percent_of_cpu_time']
if last_percent is None:
last_percent = thread_percent
if thread_percent > last_percent:
raise UnknownError('assertion failure - subsequent thread percent is unexpectedly higher' + \
', out of expected order. {}'.format(support_msg_api()))
sum_percent += thread_percent
self.msg = 'Logstash '
if top_3:
self.msg += 'top 3 hot threads cpu percentage = {}%'.format(sum_percent)
self.check_thresholds(sum_percent)
self.msg += ', '
# they come sorted with highest at top
top_thread = hot_threads[0]
name = top_thread['name']
percent = top_thread['percent_of_cpu_time']
state = top_thread['state']
# not available in 5.0, only later versions such as 6.0
#thread_id = top_thread['thread_id']
self.msg += 'top hot thread \'{}\' cpu percentage = {}%'.format(name, percent)
if not top_3:
self.check_thresholds(percent)
self.msg += ', state = \'{}\''.format(state)
#self.msg += ', id = {}'.format(state, thread_id)
if self.verbose:
if not isList(top_thread['traces']):
raise UnknownError('hot thread\'s trace field is not a list. {}'.format(support_msg_api()))
traces = '\\n'.join(top_thread['traces'])
self.msg += ', traces: {}'.format(traces)
if not top_3:
self.msg += ', top 3 hot threads cpu percentage = {}%'.format(sum_percent)
self.msg += ' | top_hot_thread_cpu_percentage={}%'.format(percent)
if not top_3:
self.msg += '{}'.format(self.get_perf_thresholds())
self.msg += ' top_three_hot_thread_cpu_percentage={}%'.format(sum_percent)
if top_3:
self.msg += '{}'.format(self.get_perf_thresholds())
if __name__ == '__main__':
CheckLogstashHotThreads().main()
| [
"harisekhon.utils.support_msg_api",
"traceback.format_exc",
"os.path.join",
"os.path.dirname",
"harisekhon.utils.isDict",
"harisekhon.utils.isList",
"sys.exit",
"sys.path.append"
] | [((1457, 1486), 'os.path.join', 'os.path.join', (['srcdir', '"""pylib"""'], {}), "(srcdir, 'pylib')\n", (1469, 1486), False, 'import os\n'), ((1487, 1510), 'sys.path.append', 'sys.path.append', (['libdir'], {}), '(libdir)\n', (1502, 1510), False, 'import sys\n'), ((1421, 1446), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1436, 1446), False, 'import os\n'), ((1824, 1835), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (1832, 1835), False, 'import sys\n'), ((1788, 1810), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1808, 1810), False, 'import traceback\n'), ((3089, 3106), 'harisekhon.utils.isDict', 'isDict', (['json_data'], {}), '(json_data)\n', (3095, 3106), False, 'from harisekhon.utils import isDict, isList\n'), ((4652, 4680), 'harisekhon.utils.isList', 'isList', (["top_thread['traces']"], {}), "(top_thread['traces'])\n", (4658, 4680), False, 'from harisekhon.utils import isDict, isList\n'), ((3186, 3203), 'harisekhon.utils.support_msg_api', 'support_msg_api', ([], {}), '()\n', (3201, 3203), False, 'from harisekhon.utils import UnknownError, support_msg_api\n'), ((4770, 4787), 'harisekhon.utils.support_msg_api', 'support_msg_api', ([], {}), '()\n', (4785, 4787), False, 'from harisekhon.utils import UnknownError, support_msg_api\n'), ((3759, 3776), 'harisekhon.utils.support_msg_api', 'support_msg_api', ([], {}), '()\n', (3774, 3776), False, 'from harisekhon.utils import UnknownError, support_msg_api\n')] |
import os
import sys
from dataclasses import asdict, dataclass
from pprint import pprint
from typing import Iterator, Optional, Tuple, List
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
# import torch
from fastai.vision.all import (
CategoryBlock,
ClassificationInterpretation,
ColReader,
ColSplitter,
DataBlock,
EarlyStoppingCallback,
ImageBlock,
Resize,
SaveModelCallback,
accuracy,
cnn_learner,
error_rate,
resnet50,
resnet101,
resnet152,
)
from utils.common import load_dz_data
from utils.cross_validation import CrossValidator
from utils.fastai_utils import MLFlowExperiment, MLFlowTracking, set_seeds
from utils.metrics import Metrics
from utils.test_split import TestSplitter
def get_train_val_indices(output_path: str) -> np.array:
"""Helper function to read training indices"""
train_val_indices_filename = os.path.join(output_path, "train_val_indices.csv")
with open(train_val_indices_filename, "rt") as f:
train_val_indices = np.array([int(index) for index in f.readlines()])
return train_val_indices
def get_test_indices(output_path: str) -> np.array:
"""Helper function to read test indices"""
test_indices_filename = os.path.join(output_path, "test_indices.csv")
with open(test_indices_filename, "rt") as f:
test_indices = np.array([int(index) for index in f.readlines()])
return test_indices
def fold_gen(output_path: str, nfolds: int) -> Iterator[Tuple[np.array, np.array]]:
"""Yield training and validation indices per fold"""
for fold in range(nfolds):
with open(os.path.join(output_path, f"train_indices_{fold}.csv"), "rt") as f:
train_indices = np.array([int(index) for index in f.readlines()])
with open(os.path.join(output_path, f"val_indices_{fold}.csv"), "rt") as f:
val_indices = np.array([int(index) for index in f.readlines()])
yield train_indices, val_indices
def up_sample(df: pd.DataFrame, target_col: str) -> pd.DataFrame:
""" Sample dataframe records with replacement to match max group size"""
max_class_size = int(df[target_col].value_counts().max())
balanced = pd.concat(
[
group.sample(max_class_size, replace=True)
if len(group) < max_class_size
else group
for _, group in df.groupby(target_col)
]
)
return balanced
def create_datasets(data_params: dict) -> Tuple[pd.DataFrame, pd.DataFrame]:
""" Handle the creation of train and test datasets
Returns:
Train Dataframe, Test Dataframe
"""
target_col = data_params["STRATIFY_COL"]
TestSplitter(data_params).get_no_leakage_trainval_test_splits()
# Create folds
train_val_indices = get_train_val_indices(data_params["OUTPUT_PATH"])
CrossValidator(data_params).get_no_leakage_crossval_splits(train_val_indices)
df = load_dz_data(data_params["BASE_DATA_DIR"], target_col=target_col)
test_df = df[df.index.isin(get_test_indices(data_params["OUTPUT_PATH"]))]
# Fastai Dataframes expect train and validation in the same dataframe
train_val_df = df[df.index.isin(train_val_indices)].reset_index(drop=True)
df_train = train_val_df[["path", target_col]]
df_test = test_df[["path", target_col]]
return df_train, df_test
@dataclass
class ModelParams:
""" Default and allowed parameters for Models """
target: str = "age"
library: str = "fastai"
epochs: int = 20
batch_size: int = 10
transforms: Tuple[str] = ("Resize",)
batch_transforms: Tuple[str] = None
patience: int = 5
pretrained: str = "resnet50"
num_folds: int = 5
seed: int = 100
class FastaiModel:
def __init__(
self,
model_params: ModelParams = ModelParams(),
metadata_path: str = ".",
model_path: str = ".",
) -> None:
self.model_params = model_params
self.metadata_path = metadata_path
self.model_path = model_path
def train(
self,
df_train: pd.DataFrame,
df_test: pd.DataFrame,
experiment_name: str = "4-class-age",
upsample=False,
) -> None:
""" Main training loop
Trains models according to options specified in model_params.
MLFlow logging performed under the provided experiment name
"""
# Start mlflow experiment
experiment = MLFlowExperiment(experiment_name)
# log parameters to mlflow
experiment.register_params(asdict(self.model_params))
set_seeds(self.model_params.seed)
val_preds, val_targs = [], []
test_preds, test_targs = [], []
# Force order of labels so folds are comparable
labels = list(sorted(set(df_train[self.model_params.target])))
for fold, (tr_idx, vl_idx) in enumerate(
fold_gen(self.metadata_path, self.model_params.num_folds)
):
# logging.INFO(f"FOLD={fold}================")
# Create column to specify if validation set
df_train["is_valid"] = df_train.index.isin(vl_idx)
if upsample:
df_train = up_sample(df_train, self.model_params.target)
# Define a fastai datablock to ingest data
spectrogramBlock = DataBlock(
blocks=(ImageBlock, CategoryBlock),
splitter=ColSplitter(), # Uses "is_valid" column to determine validation set
get_x=ColReader(0), # path column
get_y=ColReader(1), # label column
item_tfms=Resize(224), # resize to expected shape of pretrained model
)
dls = spectrogramBlock.dataloaders(
df_train, bs=self.model_params.batch_size, classes=labels
)
learn = cnn_learner(
dls,
# load pre-trained model, to be replaced with non-eval way
eval(self.model_params.pretrained),
# Monitor accuracy and error rate, error rate is what we really care about
metrics=[accuracy, error_rate],
# Callbacks used to register within model metrics to MLFlow
cbs=[
MLFlowTracking(
metric_names=[
"valid_loss",
"train_loss",
"error_rate",
"accuracy",
],
client=experiment.mlfclient,
run_id=experiment.mlrun.info.run_uuid,
)
],
).to_fp16() # Training with half precision
# Train model for n epochs, saving the "best" model
# "best" is defined as the model with lowest validation loss.
# Model training is stopped when model does not improve for
# [patience] epochs.
learn.fine_tune(
self.model_params.epochs,
cbs=[
EarlyStoppingCallback(
monitor="valid_loss", patience=self.model_params.patience
),
SaveModelCallback(every_epoch=True, monitor="valid_loss"),
],
)
# Save best model per fold
model_name = f"{experiment.experiment_name}-{fold}.pkl"
learn.export(model_name)
experiment.register_model(model_name)
# Within fold metrics for validation set
val_interp = ClassificationInterpretation.from_learner(learn)
# Convert torch category tensor to numpy and find softmax
val_output = np.argmax(val_interp.preds.detach().cpu().numpy(), axis=1)
val_preds.extend(val_output)
val_targs.extend(val_interp.targs.detach().cpu().numpy())
# Using best fold model on holdout test set and store results
test_dls = dls.test_dl(df_test, with_labels=True)
test_interp = ClassificationInterpretation.from_learner(learn, dl=test_dls)
test_output = np.argmax(test_interp.preds.detach().cpu().numpy(), axis=1)
test_preds.extend(test_output)
test_targs.extend(test_interp.targs.detach().cpu().numpy())
# Calculating aggregated metrics accross folds
val_metrics, val_conf_png = FastaiModel.aggregated_metrics(
val_targs, val_preds, labels=labels, prefix="valid"
)
# log metrics with Mlflow
experiment.register_metrics(val_metrics)
experiment.register_artifact(val_conf_png)
# Repeat for test set
test_metrics, test_conf_png = FastaiModel.aggregated_metrics(
test_targs, test_preds, labels=labels, prefix="test"
)
experiment.register_metrics(test_metrics)
experiment.register_artifact(test_conf_png)
@staticmethod
def aggregated_metrics(
targs: List[int], preds: List[int], labels: List[str] = None, prefix: str = ""
) -> None:
""" Boilerplate metrics logging """
m = Metrics(targs, preds, labels=labels)
m_dict = m.get_metrics_dict(prefix=prefix)
pprint(m_dict)
m.plot_confusion_matrix()
artifact_name = f"{prefix}_confusion.png"
plt.savefig(artifact_name)
return m_dict, artifact_name
def predict(self) -> None:
pass
def _sample_experiment(target_col="age", experiment_name="4-class-age"):
""" Example pipeline for experiments """
for seed in [100, 200, 300]:
for pretrained in ["resnet50"]: # , "resnet101", "resnet152"]:
for folds in [5]:
# data params to override
data_params = {
"STRATIFY_COL": target_col,
"NUM_K_FOLDS": folds,
"BASE_DATA_DIR": "./data/metadata",
"OUTPUT_PATH": "./data/metadata",
"SPECTROGRAM_DIR": "./data/metadata/spectrograms",
"SEED": seed,
}
model_params = ModelParams()
# model parameters to inherit from data_params
model_params.target = data_params["STRATIFY_COL"]
model_params.num_folds = data_params["NUM_K_FOLDS"]
model_params.seed = data_params["SEED"]
model_params.transforms = ("Resize", "Upsample")
# Model specific parameters
model_params.pretrained = pretrained
train_df, test_df = create_datasets(data_params)
model = FastaiModel(
model_params=model_params, metadata_path=data_params["OUTPUT_PATH"]
)
model.train(
train_df, test_df, experiment_name=experiment_name, upsample=True
)
if __name__ == "__main__":
# _sample_experiment(target_col="age", experiment_name="4-class-age")
_sample_experiment(target_col="agecat", experiment_name="2-class-age")
# _sample_experiment(target_col="sex", experiment_name="2-class-sex")
| [
"utils.test_split.TestSplitter",
"fastai.vision.all.SaveModelCallback",
"fastai.vision.all.ClassificationInterpretation.from_learner",
"matplotlib.pyplot.savefig",
"dataclasses.asdict",
"utils.fastai_utils.MLFlowTracking",
"utils.metrics.Metrics",
"utils.common.load_dz_data",
"os.path.join",
"util... | [((914, 964), 'os.path.join', 'os.path.join', (['output_path', '"""train_val_indices.csv"""'], {}), "(output_path, 'train_val_indices.csv')\n", (926, 964), False, 'import os\n'), ((1255, 1300), 'os.path.join', 'os.path.join', (['output_path', '"""test_indices.csv"""'], {}), "(output_path, 'test_indices.csv')\n", (1267, 1300), False, 'import os\n'), ((2932, 2997), 'utils.common.load_dz_data', 'load_dz_data', (["data_params['BASE_DATA_DIR']"], {'target_col': 'target_col'}), "(data_params['BASE_DATA_DIR'], target_col=target_col)\n", (2944, 2997), False, 'from utils.common import load_dz_data\n'), ((4439, 4472), 'utils.fastai_utils.MLFlowExperiment', 'MLFlowExperiment', (['experiment_name'], {}), '(experiment_name)\n', (4455, 4472), False, 'from utils.fastai_utils import MLFlowExperiment, MLFlowTracking, set_seeds\n'), ((4579, 4612), 'utils.fastai_utils.set_seeds', 'set_seeds', (['self.model_params.seed'], {}), '(self.model_params.seed)\n', (4588, 4612), False, 'from utils.fastai_utils import MLFlowExperiment, MLFlowTracking, set_seeds\n'), ((9144, 9180), 'utils.metrics.Metrics', 'Metrics', (['targs', 'preds'], {'labels': 'labels'}), '(targs, preds, labels=labels)\n', (9151, 9180), False, 'from utils.metrics import Metrics\n'), ((9240, 9254), 'pprint.pprint', 'pprint', (['m_dict'], {}), '(m_dict)\n', (9246, 9254), False, 'from pprint import pprint\n'), ((9347, 9373), 'matplotlib.pyplot.savefig', 'plt.savefig', (['artifact_name'], {}), '(artifact_name)\n', (9358, 9373), True, 'import matplotlib.pyplot as plt\n'), ((2683, 2708), 'utils.test_split.TestSplitter', 'TestSplitter', (['data_params'], {}), '(data_params)\n', (2695, 2708), False, 'from utils.test_split import TestSplitter\n'), ((2845, 2872), 'utils.cross_validation.CrossValidator', 'CrossValidator', (['data_params'], {}), '(data_params)\n', (2859, 2872), False, 'from utils.cross_validation import CrossValidator\n'), ((4543, 4568), 'dataclasses.asdict', 'asdict', (['self.model_params'], {}), '(self.model_params)\n', (4549, 4568), False, 'from dataclasses import asdict, dataclass\n'), ((7587, 7635), 'fastai.vision.all.ClassificationInterpretation.from_learner', 'ClassificationInterpretation.from_learner', (['learn'], {}), '(learn)\n', (7628, 7635), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((8065, 8126), 'fastai.vision.all.ClassificationInterpretation.from_learner', 'ClassificationInterpretation.from_learner', (['learn'], {'dl': 'test_dls'}), '(learn, dl=test_dls)\n', (8106, 8126), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((1639, 1693), 'os.path.join', 'os.path.join', (['output_path', 'f"""train_indices_{fold}.csv"""'], {}), "(output_path, f'train_indices_{fold}.csv')\n", (1651, 1693), False, 'import os\n'), ((1803, 1855), 'os.path.join', 'os.path.join', (['output_path', 'f"""val_indices_{fold}.csv"""'], {}), "(output_path, f'val_indices_{fold}.csv')\n", (1815, 1855), False, 'import os\n'), ((5406, 5419), 'fastai.vision.all.ColSplitter', 'ColSplitter', ([], {}), '()\n', (5417, 5419), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((5497, 5509), 'fastai.vision.all.ColReader', 'ColReader', (['(0)'], {}), '(0)\n', (5506, 5509), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((5548, 5560), 'fastai.vision.all.ColReader', 'ColReader', (['(1)'], {}), '(1)\n', (5557, 5560), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((5604, 5615), 'fastai.vision.all.Resize', 'Resize', (['(224)'], {}), '(224)\n', (5610, 5615), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((7073, 7158), 'fastai.vision.all.EarlyStoppingCallback', 'EarlyStoppingCallback', ([], {'monitor': '"""valid_loss"""', 'patience': 'self.model_params.patience'}), "(monitor='valid_loss', patience=self.model_params.patience\n )\n", (7094, 7158), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((7221, 7278), 'fastai.vision.all.SaveModelCallback', 'SaveModelCallback', ([], {'every_epoch': '(True)', 'monitor': '"""valid_loss"""'}), "(every_epoch=True, monitor='valid_loss')\n", (7238, 7278), False, 'from fastai.vision.all import CategoryBlock, ClassificationInterpretation, ColReader, ColSplitter, DataBlock, EarlyStoppingCallback, ImageBlock, Resize, SaveModelCallback, accuracy, cnn_learner, error_rate, resnet50, resnet101, resnet152\n'), ((6255, 6415), 'utils.fastai_utils.MLFlowTracking', 'MLFlowTracking', ([], {'metric_names': "['valid_loss', 'train_loss', 'error_rate', 'accuracy']", 'client': 'experiment.mlfclient', 'run_id': 'experiment.mlrun.info.run_uuid'}), "(metric_names=['valid_loss', 'train_loss', 'error_rate',\n 'accuracy'], client=experiment.mlfclient, run_id=experiment.mlrun.info.\n run_uuid)\n", (6269, 6415), False, 'from utils.fastai_utils import MLFlowExperiment, MLFlowTracking, set_seeds\n')] |
import torch
import numpy as np
class ToTensorGjz(object):
def __call__(self, pic):
img = torch.from_numpy(pic.transpose((2, 0, 1)))
return img.float()
def __repr__(self):
return self.__class__.__name__ + '()'
class NormalizeGjz(object):
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, tensor):
tensor.sub_(self.mean).div_(self.std)
return tensor
def crop_img(img, roi_box):
h, w = img.shape[:2]
sx, sy, ex, ey = [int(round(_)) for _ in roi_box]
dh, dw = ey - sy, ex - sx
if len(img.shape) == 3:
res = np.zeros((dh, dw, 3), dtype=np.uint8)
else:
res = np.zeros((dh, dw), dtype=np.uint8)
if sx < 0:
sx, dsx = 0, -sx
else:
dsx = 0
if ex > w:
ex, dex = w, dw - (ex - w)
else:
dex = dw
if sy < 0:
sy, dsy = 0, -sy
else:
dsy = 0
if ey > h:
ey, dey = h, dh - (ey - h)
else:
dey = dh
res[dsy:dey, dsx:dex] = img[sy:ey, sx:ex]
return res
| [
"numpy.zeros"
] | [((638, 675), 'numpy.zeros', 'np.zeros', (['(dh, dw, 3)'], {'dtype': 'np.uint8'}), '((dh, dw, 3), dtype=np.uint8)\n', (646, 675), True, 'import numpy as np\n'), ((700, 734), 'numpy.zeros', 'np.zeros', (['(dh, dw)'], {'dtype': 'np.uint8'}), '((dh, dw), dtype=np.uint8)\n', (708, 734), True, 'import numpy as np\n')] |
import unittest
from flexmock import flexmock
import swiftclient
import swiftclient.utils
from ansible.modules.network.fabric import swift_fileutil
from test_fabric_base import TestFabricModule
from test_fabric_base import set_module_args
from ansible.module_utils import fabric_utils
class TestSwiftFileUtilModule(TestFabricModule):
module = swift_fileutil
def setUp(self):
super(TestSwiftFileUtilModule, self).setUp()
# Mocking the swift connection object
self.mockobj = flexmock().should_receive('get_account').and_return(['storageurl']).mock()
flexmock(swiftclient.client).should_receive('Connection').and_return(self.mockobj)
flexmock(self.mockobj).should_receive('post_account').and_return(None)
flexmock(self.mockobj).url = "storage_url"
flexmock(self.mockobj).should_receive("close").and_return(None)
fake_logger = flexmock()
flexmock(fake_logger).should_receive('error')
flexmock(fake_logger).should_receive('debug')
flexmock(fabric_utils).should_receive('fabric_ansible_logger').and_return(fake_logger)
self.args_dict = dict(authtoken="<PASSWORD>", authurl="auth_url", user="admin", key="contrail", tenant_name="project",
auth_version="3.0", temp_url_key="temp_url_key1",
temp_url_key_2="temp_url_key2", chosen_temp_url_key="temp_url_key",
container_name="container", filename="sample.txt", expirytime=3600)
# Testing the swift utility module
def test_fileutility01(self):
fake_image_url = "/v1/sample.txt"
flexmock(swiftclient.utils).should_receive('generate_temp_url').and_return(fake_image_url)
set_module_args(self.args_dict)
result = self.execute_module()
self.assertTrue(result["url"])
self.assertEqual(result["url"], fake_image_url)
# Testing when generate_temp_url returns None
def test_fileutility02(self):
flexmock(swiftclient.utils).should_receive('generate_temp_url').and_return(None)
set_module_args(self.args_dict)
self.assertRaises(Exception, self.execute_module())
# Testing when generate_temp_url raises exception
def test_fileutility_03(self):
flexmock(swiftclient.utils).should_receive('generate_temp_url').and_raise(Exception)
set_module_args(self.args_dict)
self.assertRaises(Exception, self.execute_module())
# #Testing the swift connection after retry
def test_fileutility04(self):
flexmock(swiftclient.client).should_receive('Connection').and_return(None)
self.args_dict['connection_retry_count'] = 1
set_module_args(self.args_dict)
self.assertRaises(Exception, self.execute_module())
# Testing the update account error
def test_fileutility05(self):
flexmock(self.mockobj).should_receive('post_account').and_raise(Exception)
set_module_args(self.args_dict)
self.assertRaises(Exception, self.execute_module())
# Testing the case where optional args are not passed and it should take default value
def test_fileutility06(self):
for e in ["tenant_name","auth_version","chosen_temp_url_key","connection_retry_count"]:
self.args_dict.pop(e, None)
set_module_args(self.args_dict)
fake_image_url = "/v1/sample.txt"
flexmock(swiftclient.utils).should_receive('generate_temp_url').and_return(fake_image_url)
result = self.execute_module()
self.assertTrue(result["url"])
self.assertEqual(result["url"], fake_image_url)
| [
"test_fabric_base.set_module_args",
"flexmock.flexmock"
] | [((898, 908), 'flexmock.flexmock', 'flexmock', ([], {}), '()\n', (906, 908), False, 'from flexmock import flexmock\n'), ((1739, 1770), 'test_fabric_base.set_module_args', 'set_module_args', (['self.args_dict'], {}), '(self.args_dict)\n', (1754, 1770), False, 'from test_fabric_base import set_module_args\n'), ((2087, 2118), 'test_fabric_base.set_module_args', 'set_module_args', (['self.args_dict'], {}), '(self.args_dict)\n', (2102, 2118), False, 'from test_fabric_base import set_module_args\n'), ((2370, 2401), 'test_fabric_base.set_module_args', 'set_module_args', (['self.args_dict'], {}), '(self.args_dict)\n', (2385, 2401), False, 'from test_fabric_base import set_module_args\n'), ((2689, 2720), 'test_fabric_base.set_module_args', 'set_module_args', (['self.args_dict'], {}), '(self.args_dict)\n', (2704, 2720), False, 'from test_fabric_base import set_module_args\n'), ((2946, 2977), 'test_fabric_base.set_module_args', 'set_module_args', (['self.args_dict'], {}), '(self.args_dict)\n', (2961, 2977), False, 'from test_fabric_base import set_module_args\n'), ((3308, 3339), 'test_fabric_base.set_module_args', 'set_module_args', (['self.args_dict'], {}), '(self.args_dict)\n', (3323, 3339), False, 'from test_fabric_base import set_module_args\n'), ((761, 783), 'flexmock.flexmock', 'flexmock', (['self.mockobj'], {}), '(self.mockobj)\n', (769, 783), False, 'from flexmock import flexmock\n'), ((917, 938), 'flexmock.flexmock', 'flexmock', (['fake_logger'], {}), '(fake_logger)\n', (925, 938), False, 'from flexmock import flexmock\n'), ((971, 992), 'flexmock.flexmock', 'flexmock', (['fake_logger'], {}), '(fake_logger)\n', (979, 992), False, 'from flexmock import flexmock\n'), ((591, 619), 'flexmock.flexmock', 'flexmock', (['swiftclient.client'], {}), '(swiftclient.client)\n', (599, 619), False, 'from flexmock import flexmock\n'), ((682, 704), 'flexmock.flexmock', 'flexmock', (['self.mockobj'], {}), '(self.mockobj)\n', (690, 704), False, 'from flexmock import flexmock\n'), ((812, 834), 'flexmock.flexmock', 'flexmock', (['self.mockobj'], {}), '(self.mockobj)\n', (820, 834), False, 'from flexmock import flexmock\n'), ((1025, 1047), 'flexmock.flexmock', 'flexmock', (['fabric_utils'], {}), '(fabric_utils)\n', (1033, 1047), False, 'from flexmock import flexmock\n'), ((1640, 1667), 'flexmock.flexmock', 'flexmock', (['swiftclient.utils'], {}), '(swiftclient.utils)\n', (1648, 1667), False, 'from flexmock import flexmock\n'), ((1998, 2025), 'flexmock.flexmock', 'flexmock', (['swiftclient.utils'], {}), '(swiftclient.utils)\n', (2006, 2025), False, 'from flexmock import flexmock\n'), ((2277, 2304), 'flexmock.flexmock', 'flexmock', (['swiftclient.utils'], {}), '(swiftclient.utils)\n', (2285, 2304), False, 'from flexmock import flexmock\n'), ((2553, 2581), 'flexmock.flexmock', 'flexmock', (['swiftclient.client'], {}), '(swiftclient.client)\n', (2561, 2581), False, 'from flexmock import flexmock\n'), ((2863, 2885), 'flexmock.flexmock', 'flexmock', (['self.mockobj'], {}), '(self.mockobj)\n', (2871, 2885), False, 'from flexmock import flexmock\n'), ((3390, 3417), 'flexmock.flexmock', 'flexmock', (['swiftclient.utils'], {}), '(swiftclient.utils)\n', (3398, 3417), False, 'from flexmock import flexmock\n'), ((508, 518), 'flexmock.flexmock', 'flexmock', ([], {}), '()\n', (516, 518), False, 'from flexmock import flexmock\n')] |
from glob import glob
import cv2
import os
# we'll run it manually like this:
# $ python
# Python 3.8.0 (tags/v3.8.0:fa919fd, Oct 14 2019, 19:21:23) [MSC v.1916 32 bit (Intel)] on win32
# Type "help", "copyright", "credits" or "license" for more information.
# >>> from jpgutils import convert_png_to_jpg
# >>> convert_png_to_jpg()
# >>> exit()
def convert_png_to_jpg():
mydir = "datasetposff"
pngs = glob(mydir+'/*.png')
for j in pngs:
img = cv2.imread(j)
cv2.imwrite(j[:-3] + 'jpg', img)
#delete the files png
filelist = [ f for f in os.listdir(mydir) if f.endswith(".png") ]
for f in filelist:
os.remove(os.path.join(mydir, f)) | [
"cv2.imwrite",
"os.listdir",
"os.path.join",
"cv2.imread",
"glob.glob"
] | [((470, 492), 'glob.glob', 'glob', (["(mydir + '/*.png')"], {}), "(mydir + '/*.png')\n", (474, 492), False, 'from glob import glob\n'), ((524, 537), 'cv2.imread', 'cv2.imread', (['j'], {}), '(j)\n', (534, 537), False, 'import cv2\n'), ((546, 578), 'cv2.imwrite', 'cv2.imwrite', (["(j[:-3] + 'jpg')", 'img'], {}), "(j[:-3] + 'jpg', img)\n", (557, 578), False, 'import cv2\n'), ((633, 650), 'os.listdir', 'os.listdir', (['mydir'], {}), '(mydir)\n', (643, 650), False, 'import os\n'), ((716, 738), 'os.path.join', 'os.path.join', (['mydir', 'f'], {}), '(mydir, f)\n', (728, 738), False, 'import os\n')] |
"""Random Forest classification and computation of assessment metrics."""
import numpy as np
from imblearn.over_sampling import RandomOverSampler
from imblearn.under_sampling import RandomUnderSampler
from sklearn import metrics
from sklearn.ensemble import RandomForestClassifier
from raster import is_raster
def transform_input(scene):
"""Transform input variables (here Landsat NDSV).
Parameters
----------
scene : landsat.Scene
Input Landsat scene.
Returns
-------
X : array
Transformed input data as an array of shape (n_samples, n_features).
"""
n_features = len(scene.ndsv_)
n_samples = scene.profile['width'] * scene.profile['height']
X = np.zeros(shape=(n_samples, n_features), dtype=np.float)
ndsv = scene.ndsv
for i in range(n_features):
X[:, i] = ndsv[i, :, :].ravel()
return X
def transform_test(true, pred):
"""Transform true and predicted raster data sets to
flat arrays.
Parameters
----------
true : array-like
Testing data set raster as a 2D NumPy array.
pred : array-like
Predicted values as a 2D NumPy array.
Returns
-------
y_true : array
1D array of true labels of shape (n_samples).
y_pred : array
1D array of predicted labels of shape (n_samples).
"""
y_pred = pred[true > 0].ravel()
y_true = true[true > 0].ravel()
return y_true, y_pred
def transform_training(scene, training):
"""Transform training data set.
Parameters
----------
scene : landsat.Scene
Input Landsat scene.
training : 2D numpy array
Training data raster as a 2D numpy array.
Returns
-------
X : array
Training samples as an array of shape (n_samples, n_features).
y : array
Training labels as an array of shape (n_samples).
"""
n_features = len(scene.ndsv_)
n_samples = np.count_nonzero(training)
X = np.zeros(shape=(n_samples, n_features), dtype=np.float)
ndsv = scene.ndsv
for i in range(n_features):
X[:, i] = ndsv[i, :, :][training > 0].ravel()
y = training[training > 0].ravel()
return X, y
def classify(
scene,
training,
oversampling=False,
undersampling=False,
water=None,
**kwargs):
"""Classify Landsat scene using Random Forest.
Parameters
----------
scene : landsat.Scene
Input Landsat scene.
training : 2D numpy array
Input training data set as a 2D numpy array.
oversampling : bool, optional
If set to `True`, random oversampling will be performed on the
minority class.
undersampling : bool, optional
If set to `True`, random undersampling will be performed on the
majority class.
water : 2D numpy array, optional
If provided, water pixels will be ignored and classified as
non-built.
kwargs : **kwargs
Additionnal arguments to the Random Forest classifier.
Returns
-------
classes : 2D numpy array
Binary output as a 2D numpy array.
probabilities : 2D numpy array
Probabilistic output as a 2D numpy array.
"""
X = transform_input(scene)
x_train, y_train = transform_training(scene, training)
random_state = kwargs.pop('random_state', None)
if oversampling:
ros = RandomOverSampler(random_state=random_state)
x_train, y_train = ros.fit_sample(x_train, y_train)
if undersampling:
ros = RandomUnderSampler(random_state=random_state)
x_train, y_train = ros.fit_sample(x_train, y_train)
rf = RandomForestClassifier(**kwargs)
rf.fit(x_train, y_train)
probabilities = rf.predict_proba(X)
probabilities = probabilities[:, 0].reshape(scene.red.shape)
if is_raster(water):
probabilities[water] = 0
return probabilities
def assess(probabilities, testing_dataset, threshold=0.75):
"""Compute validation metrics.
Parameters
----------
probabilities : 2D numpy array
Predicted probabilities of belonging to
the built-up class as a 2D NumPy array.
testing_dataset : 2D numpy array
Testing data set as as 2D NumPy array.
threshold : float
Threshold applied to the probabilistic output
to obtain a binary product (0-1).
Returns
-------
summary : dict
Assessment metrics in a dictionnary.
"""
summary = {}
# Binary product obtained by thresholding the probabilities
classes = np.zeros(shape=probabilities.shape, dtype=np.uint8)
classes[probabilities >= threshold] = 1
classes[probabilities < threshold] = 2
# 1. Binary classification metrics:
# Assign value 2 to all non-built land covers
true, pred = testing_dataset.copy(), classes.copy()
true[true >= 2] = 2
pred[pred >= 2] = 2
# Transform and binarize input data
y_true, y_pred = transform_test(true, pred)
y_true, y_pred = y_true == 1, y_pred == 1
summary['accuracy'] = metrics.accuracy_score(
y_true, y_pred
)
summary['balanced_accuracy'] = metrics.recall_score(
y_true, y_pred
)
summary['precision'] = metrics.precision_score(
y_true, y_pred
)
summary['recall'] = metrics.recall_score(
y_true, y_pred
)
summary['f1_score'] = metrics.f1_score(
y_true, y_pred
)
summary['confusion_matrix'] = metrics.confusion_matrix(
y_true, y_pred
)
# 2. Continuous metrics based on probabilities:
# Assign value 2 to all non-built land covers
true = testing_dataset.copy()
true[true >= 2] = 2
# Transform and binarize input data
y_true, y_pred = transform_test(true, probabilities)
y_true = y_true == 1
summary['pr_curve'] = metrics.precision_recall_curve(
y_true, y_pred
)
summary['avg_precision'] = metrics.average_precision_score(
y_true, y_pred, average='weighted'
)
# 3. Per land cover accuracies
land_covers = {
'builtup': 1,
'baresoil': 2,
'lowveg': 3,
'highveg': 4
}
for label, value in land_covers.items():
mask = testing_dataset == value
true = testing_dataset[mask]
pred = classes[mask]
total = np.count_nonzero(mask)
if label == 'builtup':
accuracy = np.count_nonzero(pred == 1) / total
else:
accuracy = np.count_nonzero(pred >= 2) / total
summary['{}_accuracy'.format(label)] = accuracy
return summary
| [
"sklearn.metrics.f1_score",
"sklearn.metrics.average_precision_score",
"sklearn.metrics.precision_recall_curve",
"sklearn.ensemble.RandomForestClassifier",
"sklearn.metrics.precision_score",
"numpy.count_nonzero",
"sklearn.metrics.recall_score",
"numpy.zeros",
"imblearn.over_sampling.RandomOverSampl... | [((713, 768), 'numpy.zeros', 'np.zeros', ([], {'shape': '(n_samples, n_features)', 'dtype': 'np.float'}), '(shape=(n_samples, n_features), dtype=np.float)\n', (721, 768), True, 'import numpy as np\n'), ((1924, 1950), 'numpy.count_nonzero', 'np.count_nonzero', (['training'], {}), '(training)\n', (1940, 1950), True, 'import numpy as np\n'), ((1959, 2014), 'numpy.zeros', 'np.zeros', ([], {'shape': '(n_samples, n_features)', 'dtype': 'np.float'}), '(shape=(n_samples, n_features), dtype=np.float)\n', (1967, 2014), True, 'import numpy as np\n'), ((3639, 3671), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {}), '(**kwargs)\n', (3661, 3671), False, 'from sklearn.ensemble import RandomForestClassifier\n'), ((3815, 3831), 'raster.is_raster', 'is_raster', (['water'], {}), '(water)\n', (3824, 3831), False, 'from raster import is_raster\n'), ((4546, 4597), 'numpy.zeros', 'np.zeros', ([], {'shape': 'probabilities.shape', 'dtype': 'np.uint8'}), '(shape=probabilities.shape, dtype=np.uint8)\n', (4554, 4597), True, 'import numpy as np\n'), ((5043, 5081), 'sklearn.metrics.accuracy_score', 'metrics.accuracy_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (5065, 5081), False, 'from sklearn import metrics\n'), ((5132, 5168), 'sklearn.metrics.recall_score', 'metrics.recall_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (5152, 5168), False, 'from sklearn import metrics\n'), ((5211, 5250), 'sklearn.metrics.precision_score', 'metrics.precision_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (5234, 5250), False, 'from sklearn import metrics\n'), ((5290, 5326), 'sklearn.metrics.recall_score', 'metrics.recall_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (5310, 5326), False, 'from sklearn import metrics\n'), ((5368, 5400), 'sklearn.metrics.f1_score', 'metrics.f1_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (5384, 5400), False, 'from sklearn import metrics\n'), ((5450, 5490), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (5474, 5490), False, 'from sklearn import metrics\n'), ((5817, 5863), 'sklearn.metrics.precision_recall_curve', 'metrics.precision_recall_curve', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (5847, 5863), False, 'from sklearn import metrics\n'), ((5910, 5977), 'sklearn.metrics.average_precision_score', 'metrics.average_precision_score', (['y_true', 'y_pred'], {'average': '"""weighted"""'}), "(y_true, y_pred, average='weighted')\n", (5941, 5977), False, 'from sklearn import metrics\n'), ((3381, 3425), 'imblearn.over_sampling.RandomOverSampler', 'RandomOverSampler', ([], {'random_state': 'random_state'}), '(random_state=random_state)\n', (3398, 3425), False, 'from imblearn.over_sampling import RandomOverSampler\n'), ((3523, 3568), 'imblearn.under_sampling.RandomUnderSampler', 'RandomUnderSampler', ([], {'random_state': 'random_state'}), '(random_state=random_state)\n', (3541, 3568), False, 'from imblearn.under_sampling import RandomUnderSampler\n'), ((6311, 6333), 'numpy.count_nonzero', 'np.count_nonzero', (['mask'], {}), '(mask)\n', (6327, 6333), True, 'import numpy as np\n'), ((6389, 6416), 'numpy.count_nonzero', 'np.count_nonzero', (['(pred == 1)'], {}), '(pred == 1)\n', (6405, 6416), True, 'import numpy as np\n'), ((6462, 6489), 'numpy.count_nonzero', 'np.count_nonzero', (['(pred >= 2)'], {}), '(pred >= 2)\n', (6478, 6489), True, 'import numpy as np\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ecl.identity import identity_service
from ecl import resource
class Project(resource.Resource):
resource_key = 'project'
resources_key = 'projects'
base_path = '/projects'
service = identity_service.IdentityService()
# capabilities
allow_create = True
allow_retrieve = True
allow_update = True
allow_delete = True
allow_list = True
patch_update = True
# Properties
#: The description of the project. *Type: string*
description = resource.prop('description')
#: References the domain ID which owns the project; if a domain ID is not
#: specified by the client, the Identity service implementation will
#: default it to the domain ID to which the client's token is scoped.
#: *Type: string*
domain_id = resource.prop('domain_id')
#: Setting this attribute to ``False`` prevents users from authorizing
#: against this project. Additionally, all pre-existing tokens authorized
#: for the project are immediately invalidated. Re-enabling a project
#: does not re-enable pre-existing tokens. *Type: bool*
is_enabled = resource.prop('enabled', type=bool)
#: Unique project name, within the owning domain. *Type: string*
name = resource.prop('name')
| [
"ecl.identity.identity_service.IdentityService",
"ecl.resource.prop"
] | [((751, 785), 'ecl.identity.identity_service.IdentityService', 'identity_service.IdentityService', ([], {}), '()\n', (783, 785), False, 'from ecl.identity import identity_service\n'), ((1040, 1068), 'ecl.resource.prop', 'resource.prop', (['"""description"""'], {}), "('description')\n", (1053, 1068), False, 'from ecl import resource\n'), ((1332, 1358), 'ecl.resource.prop', 'resource.prop', (['"""domain_id"""'], {}), "('domain_id')\n", (1345, 1358), False, 'from ecl import resource\n'), ((1663, 1698), 'ecl.resource.prop', 'resource.prop', (['"""enabled"""'], {'type': 'bool'}), "('enabled', type=bool)\n", (1676, 1698), False, 'from ecl import resource\n'), ((1779, 1800), 'ecl.resource.prop', 'resource.prop', (['"""name"""'], {}), "('name')\n", (1792, 1800), False, 'from ecl import resource\n')] |
from collections import deque
import pickle
import cv2
import numpy as np
import time
import ast
from utils import *
import tensorflow_hub as hub
import concurrent.futures
from tensorflow.keras import layers
import tensorflow as tf
# Load Yolo
net = cv2.dnn.readNet("./data/yolov4-tiny.weights", "./data/yolov4-tiny.cfg")
net.setPreferableBackend(cv2.dnn.DNN_BACKEND_OPENCV)
net.setPreferableTarget(cv2.dnn.DNN_TARGET_CPU)
classes = []
with open("coco.names", "r") as f:
classes = [line.strip() for line in f.readlines()]
print(classes)
layer_names = net.getLayerNames()
output_layers = [layer_names[i[0] - 1] for i in net.getUnconnectedOutLayers()]
colors = np.random.uniform(0, 255, size=(len(classes), 3))
# Loading image
cap = cv2.VideoCapture('vid_short.mp4')
mouse_pts = []
model = tf.keras.models.load_model('./model/resnet191020.h5')
model.summary()
#lb = pickle.loads(open(args["label"], "rb").read())
#lb = ["football","tennis","weight_lifting"]
lb = ['Fire', 'Normal Car', 'Normal', 'Road Accident', 'Shooting', 'Violence']
#model.summary()
# initialize the image mean for mean subtraction along with the
# predictions queue
mean = np.array([123.68, 116.779, 103.939][::1], dtype="float32")
Q = deque(maxlen=128)
train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale = 1./255.,
)
my_file = open("./test.txt","a+")
def get_mouse_points(event, x, y, flags, param):
# Used to mark 4 points on the frame zero of the video that will be warped
# Used to mark 2 points on the frame zero of the video that are 6 feet away
global mouseX, mouseY, mouse_pts
if event == cv2.EVENT_LBUTTONDOWN:
mouseX, mouseY = x, y
file1=open("./test.txt","a")
cv2.circle(image, (x, y), 10, (0, 255, 255), 10)
if "mouse_pts" not in globals():
mouse_pts = []
if(len(mouse_pts)==6):
file1.write(str(mouse_pts))
file1.close()
mouse_pts.append((x, y))
print("Point detected")
print(mouse_pts)
def Check(a, b):
dist = ((a[0] - b[0]) ** 2 + 550 / ((a[1] + b[1]) / 2) * (a[1] - b[1]) ** 2) ** 0.5
calibration = (a[1] + b[1]) / 2
if 0 < dist < 0.25 * calibration:
return True
else:
return False
scale_w = 1.2 / 2
scale_h = 4 / 2
SOLID_BACK_COLOR = (41, 41, 41)
frame_num = 0
total_pedestrians_detected = 0
total_six_feet_violations = 0
total_pairs = 0
abs_six_feet_violations = 0
pedestrian_per_sec = 0
sh_index = 1
sc_index = 1
cv2.namedWindow("image")
cv2.setMouseCallback("image", get_mouse_points)
num_mouse_points = 0
first_frame_display = True
font = cv2.FONT_HERSHEY_PLAIN
starting_time = time.time()
frame_id = 0
while True:
_, frame = cap.read()
frame_id += 1
height, width, channels = frame.shape
if frame_id == 1:
# Ask user to mark parallel points and two points 6 feet apart. Order bl, br, tr, tl, p1, p2
while True:
image = frame
file = open('./test.txt','r')
s = file.read()
if s:
x = ast.literal_eval(s)
cv2.imshow("image", image)
cv2.waitKey(1)
if s:
if len(mouse_pts) == 7 or len(x) == 6:
cv2.destroyWindow("image")
mouse_pts = x
break
first_frame_display = False
four_points = mouse_pts
M = perspective(frame, four_points[0:4])
pts = src = np.float32(np.array([four_points[4:]]))
warped_pt = cv2.perspectiveTransform(pts, M)[0]
d_thresh = np.sqrt(
(warped_pt[0][0] - warped_pt[1][0]) ** 2
+ (warped_pt[0][1] - warped_pt[1][1]) ** 2
)
bird_image = np.zeros(
(int(height * scale_h), int(width * scale_w), 3), np.uint8
)
bird_image[:] = SOLID_BACK_COLOR
pedestrian_detect = frame
# Detecting objects
blob = cv2.dnn.blobFromImage(frame, 0.00392, (416, 416), (0, 0, 0), True, crop=False)
net.setInput(blob)
outs = net.forward(output_layers)
class_ids = []
confidences = []
boxes = []
for out in outs:
for detection in out:
scores = detection[5:]
class_id = np.argmax(scores)
confidence = scores[class_id]
if confidence > 0.5 and class_id == 0:
center_x = int(detection[0] * width)
center_y = int(detection[1] * height)
h = int(detection[3] * height)
w = int(detection[2] * width)
x = int(center_x - w / 2)
y = int(center_y - h / 2)
boxes.append([x, y, w, h])
confidences.append(float(confidence))
class_ids.append(class_id)
indexes = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4)
if len(indexes) > 0:
flat_box = indexes.flatten()
pairs = []
center = []
status = []
for i in flat_box:
(x, y) = (boxes[i][0], boxes[i][1])
(w, h) = (boxes[i][2], boxes[i][3])
center.append([int(x + w / 2), int(y + h / 2)])
status.append(False)
for i in range(len(center)):
for j in range(len(center)):
close = Check(center[i], center[j])
if close:
pairs.append([center[i], center[j]])
status[i] = True
status[j] = True
index = 0
for i in flat_box:
(x, y) = (boxes[i][0], boxes[i][1])
(w, h) = (boxes[i][2], boxes[i][3])
if status[index] == True:
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 150), 2)
elif status[index] == False:
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
index += 1
for h in pairs:
cv2.line(frame, tuple(h[0]), tuple(h[1]), (0, 0, 255), 2)
processedImg = frame.copy()
pedestrian_boxes, num_pedestrians = indexes, len(indexes)
# if len(indexes) > 0:
# pedestrian_detect = bird_eye_view_plot(frames, boxes, M, scale_w, scale_h)
canvas = np.zeros((200,200,3))
canvas[:] = (0,0,0)
text = "people:{}".format(len(pedestrian_boxes))
cv2.putText(canvas, text, (35,50), cv2.FONT_HERSHEY_SIMPLEX,
1.0, (0,255,0), 5)
cv2.imshow('info',canvas)
# make predictions on the frame and then update the predictions
# queue
canvas = np.zeros((250, 300, 3), dtype="uint8")
output = frame.copy()
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
frame = cv2.resize(frame, (224, 224)).astype("float32")
frame = train_datagen.standardize(frame)
preds = model.predict(np.expand_dims(frame, axis=0),workers=6,use_multiprocessing=True)[0]
Q.append(preds)
for (i,(lab, prob)) in enumerate(zip(lb, preds)):
text= "{}:{:.2f}%".format(lab, prob*100)
w = int(prob*300)
cv2.rectangle(canvas, (7, (i*35) +5),
(w, (i*35)+35), (0,0,255), -1)
cv2.putText(canvas, text, (10,(i*35)+23), cv2.FONT_HERSHEY_SIMPLEX,0.45, (255,255,255),2)
results = np.array(Q).mean(axis=0)
i = np.argmax(results)
label = lb[i]
print(label)
# draw the activity on the output frame
text = "{}".format(label)
cv2.putText(output, text, (105, 50), cv2.FONT_HERSHEY_SIMPLEX,
1.0, (0, 255, 0), 5)
cv2.imshow("probs", canvas)
elapsed_time = time.time() - starting_time
fps = frame_id / elapsed_time
cv2.putText(output, "FPS: " + str(round(fps, 2)), (10, 50), font, 4, (0, 0, 0), 3)
cv2.imshow("Image", output)
for i in range(len(boxes)):
if i in indexes:
x, y, w, h = boxes[i]
label = str(classes[class_ids[i]])
confidence = confidences[i]
color = colors[class_ids[i]]
cv2.rectangle(frame, (x, y), (x + w, y + h), color, 2)
cv2.putText(frame, label + " " + str(round(confidence, 2)), (x, y + 30), font, 2, color, 1)
if len(pedestrian_boxes) > 0:
warped_pts, bird_image = display_points(
frame, boxes
)
key = cv2.waitKey(1)
if key == 27:
break
cap.release()
cv2.destroyAllWindows()
| [
"cv2.rectangle",
"numpy.sqrt",
"tensorflow.keras.preprocessing.image.ImageDataGenerator",
"cv2.imshow",
"numpy.array",
"tensorflow.keras.models.load_model",
"cv2.destroyAllWindows",
"cv2.dnn.NMSBoxes",
"cv2.setMouseCallback",
"collections.deque",
"cv2.perspectiveTransform",
"cv2.waitKey",
"c... | [((254, 325), 'cv2.dnn.readNet', 'cv2.dnn.readNet', (['"""./data/yolov4-tiny.weights"""', '"""./data/yolov4-tiny.cfg"""'], {}), "('./data/yolov4-tiny.weights', './data/yolov4-tiny.cfg')\n", (269, 325), False, 'import cv2\n'), ((741, 774), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""vid_short.mp4"""'], {}), "('vid_short.mp4')\n", (757, 774), False, 'import cv2\n'), ((801, 854), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['"""./model/resnet191020.h5"""'], {}), "('./model/resnet191020.h5')\n", (827, 854), True, 'import tensorflow as tf\n'), ((1158, 1216), 'numpy.array', 'np.array', (['[123.68, 116.779, 103.939][::1]'], {'dtype': '"""float32"""'}), "([123.68, 116.779, 103.939][::1], dtype='float32')\n", (1166, 1216), True, 'import numpy as np\n'), ((1221, 1238), 'collections.deque', 'deque', ([], {'maxlen': '(128)'}), '(maxlen=128)\n', (1226, 1238), False, 'from collections import deque\n'), ((1256, 1324), 'tensorflow.keras.preprocessing.image.ImageDataGenerator', 'tf.keras.preprocessing.image.ImageDataGenerator', ([], {'rescale': '(1.0 / 255.0)'}), '(rescale=1.0 / 255.0)\n', (1303, 1324), True, 'import tensorflow as tf\n'), ((2530, 2554), 'cv2.namedWindow', 'cv2.namedWindow', (['"""image"""'], {}), "('image')\n", (2545, 2554), False, 'import cv2\n'), ((2555, 2602), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""image"""', 'get_mouse_points'], {}), "('image', get_mouse_points)\n", (2575, 2602), False, 'import cv2\n'), ((2698, 2709), 'time.time', 'time.time', ([], {}), '()\n', (2707, 2709), False, 'import time\n'), ((8319, 8342), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (8340, 8342), False, 'import cv2\n'), ((3969, 4047), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['frame', '(0.00392)', '(416, 416)', '(0, 0, 0)', '(True)'], {'crop': '(False)'}), '(frame, 0.00392, (416, 416), (0, 0, 0), True, crop=False)\n', (3990, 4047), False, 'import cv2\n'), ((4828, 4874), 'cv2.dnn.NMSBoxes', 'cv2.dnn.NMSBoxes', (['boxes', 'confidences', '(0.5)', '(0.4)'], {}), '(boxes, confidences, 0.5, 0.4)\n', (4844, 4874), False, 'import cv2\n'), ((6231, 6254), 'numpy.zeros', 'np.zeros', (['(200, 200, 3)'], {}), '((200, 200, 3))\n', (6239, 6254), True, 'import numpy as np\n'), ((6334, 6420), 'cv2.putText', 'cv2.putText', (['canvas', 'text', '(35, 50)', 'cv2.FONT_HERSHEY_SIMPLEX', '(1.0)', '(0, 255, 0)', '(5)'], {}), '(canvas, text, (35, 50), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 255,\n 0), 5)\n', (6345, 6420), False, 'import cv2\n'), ((6429, 6455), 'cv2.imshow', 'cv2.imshow', (['"""info"""', 'canvas'], {}), "('info', canvas)\n", (6439, 6455), False, 'import cv2\n'), ((6555, 6593), 'numpy.zeros', 'np.zeros', (['(250, 300, 3)'], {'dtype': '"""uint8"""'}), "((250, 300, 3), dtype='uint8')\n", (6563, 6593), True, 'import numpy as np\n'), ((6632, 6670), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (6644, 6670), False, 'import cv2\n'), ((7272, 7290), 'numpy.argmax', 'np.argmax', (['results'], {}), '(results)\n', (7281, 7290), True, 'import numpy as np\n'), ((7404, 7491), 'cv2.putText', 'cv2.putText', (['output', 'text', '(105, 50)', 'cv2.FONT_HERSHEY_SIMPLEX', '(1.0)', '(0, 255, 0)', '(5)'], {}), '(output, text, (105, 50), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 255,\n 0), 5)\n', (7415, 7491), False, 'import cv2\n'), ((7505, 7532), 'cv2.imshow', 'cv2.imshow', (['"""probs"""', 'canvas'], {}), "('probs', canvas)\n", (7515, 7532), False, 'import cv2\n'), ((7706, 7733), 'cv2.imshow', 'cv2.imshow', (['"""Image"""', 'output'], {}), "('Image', output)\n", (7716, 7733), False, 'import cv2\n'), ((8257, 8271), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (8268, 8271), False, 'import cv2\n'), ((1758, 1806), 'cv2.circle', 'cv2.circle', (['image', '(x, y)', '(10)', '(0, 255, 255)', '(10)'], {}), '(image, (x, y), 10, (0, 255, 255), 10)\n', (1768, 1806), False, 'import cv2\n'), ((3619, 3715), 'numpy.sqrt', 'np.sqrt', (['((warped_pt[0][0] - warped_pt[1][0]) ** 2 + (warped_pt[0][1] - warped_pt[1]\n [1]) ** 2)'], {}), '((warped_pt[0][0] - warped_pt[1][0]) ** 2 + (warped_pt[0][1] -\n warped_pt[1][1]) ** 2)\n', (3626, 3715), True, 'import numpy as np\n'), ((7044, 7117), 'cv2.rectangle', 'cv2.rectangle', (['canvas', '(7, i * 35 + 5)', '(w, i * 35 + 35)', '(0, 0, 255)', '(-1)'], {}), '(canvas, (7, i * 35 + 5), (w, i * 35 + 35), (0, 0, 255), -1)\n', (7057, 7117), False, 'import cv2\n'), ((7134, 7234), 'cv2.putText', 'cv2.putText', (['canvas', 'text', '(10, i * 35 + 23)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.45)', '(255, 255, 255)', '(2)'], {}), '(canvas, text, (10, i * 35 + 23), cv2.FONT_HERSHEY_SIMPLEX, 0.45,\n (255, 255, 255), 2)\n', (7145, 7234), False, 'import cv2\n'), ((7553, 7564), 'time.time', 'time.time', ([], {}), '()\n', (7562, 7564), False, 'import time\n'), ((3128, 3154), 'cv2.imshow', 'cv2.imshow', (['"""image"""', 'image'], {}), "('image', image)\n", (3138, 3154), False, 'import cv2\n'), ((3167, 3181), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (3178, 3181), False, 'import cv2\n'), ((3515, 3542), 'numpy.array', 'np.array', (['[four_points[4:]]'], {}), '([four_points[4:]])\n', (3523, 3542), True, 'import numpy as np\n'), ((3564, 3596), 'cv2.perspectiveTransform', 'cv2.perspectiveTransform', (['pts', 'M'], {}), '(pts, M)\n', (3588, 3596), False, 'import cv2\n'), ((4275, 4292), 'numpy.argmax', 'np.argmax', (['scores'], {}), '(scores)\n', (4284, 4292), True, 'import numpy as np\n'), ((6683, 6712), 'cv2.resize', 'cv2.resize', (['frame', '(224, 224)'], {}), '(frame, (224, 224))\n', (6693, 6712), False, 'import cv2\n'), ((6811, 6840), 'numpy.expand_dims', 'np.expand_dims', (['frame'], {'axis': '(0)'}), '(frame, axis=0)\n', (6825, 6840), True, 'import numpy as np\n'), ((7239, 7250), 'numpy.array', 'np.array', (['Q'], {}), '(Q)\n', (7247, 7250), True, 'import numpy as np\n'), ((7967, 8021), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', 'color', '(2)'], {}), '(frame, (x, y), (x + w, y + h), color, 2)\n', (7980, 8021), False, 'import cv2\n'), ((3096, 3115), 'ast.literal_eval', 'ast.literal_eval', (['s'], {}), '(s)\n', (3112, 3115), False, 'import ast\n'), ((5717, 5777), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 0, 150)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 0, 150), 2)\n', (5730, 5777), False, 'import cv2\n'), ((3275, 3301), 'cv2.destroyWindow', 'cv2.destroyWindow', (['"""image"""'], {}), "('image')\n", (3292, 3301), False, 'import cv2\n'), ((5835, 5895), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 255, 0)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)\n', (5848, 5895), False, 'import cv2\n')] |
from bling.helpscout.client import HelpScoutClient
from bling.mc.client import MobileCommonsClient
from twilio.rest import Client as TwilioClient
from bling.incoming import IncomingHandler
from bling.outgoing import OutgoingHandler
from bling.config import config
from bling.transport import (
Transport,
TwilioTransport,
MobileCommonsTransport,
TWILIO_TRANSPORT_TYPE,
MOBILECOMMONS_TRANSPORT_TYPE,
)
def helpscout_client() -> HelpScoutClient:
return HelpScoutClient(
client_id=config.helpscout_api_client_id,
secret=config.helpscout_api_client_secret,
)
def twilio_client() -> TwilioClient:
return TwilioClient(config.twilio_account_sid, config.twilio_auth_token)
def twilio_transport() -> TwilioTransport:
return TwilioTransport(twilio_client())
def mobilecommons_client() -> MobileCommonsClient:
return MobileCommonsClient(
config.mobilecommons_username, config.mobilecommons_password
)
def mobilecommons_transport() -> MobileCommonsTransport:
return MobileCommonsTransport(mobilecommons_client())
def incoming_message_handler(transport: Transport) -> IncomingHandler:
return IncomingHandler(helpscout_client(), transport)
def transport_for_type(transport_type: str) -> Transport:
if transport_type == TWILIO_TRANSPORT_TYPE:
return twilio_transport()
elif transport_type == MOBILECOMMONS_TRANSPORT_TYPE:
return mobilecommons_transport()
else:
raise Exception(f"'{transport_type}' is not a valid transport type'")
def outgoing_reply_handler(transport: Transport) -> OutgoingHandler:
return OutgoingHandler(helpscout_client(), transport) | [
"bling.helpscout.client.HelpScoutClient",
"twilio.rest.Client",
"bling.mc.client.MobileCommonsClient"
] | [((480, 585), 'bling.helpscout.client.HelpScoutClient', 'HelpScoutClient', ([], {'client_id': 'config.helpscout_api_client_id', 'secret': 'config.helpscout_api_client_secret'}), '(client_id=config.helpscout_api_client_id, secret=config.\n helpscout_api_client_secret)\n', (495, 585), False, 'from bling.helpscout.client import HelpScoutClient\n'), ((654, 719), 'twilio.rest.Client', 'TwilioClient', (['config.twilio_account_sid', 'config.twilio_auth_token'], {}), '(config.twilio_account_sid, config.twilio_auth_token)\n', (666, 719), True, 'from twilio.rest import Client as TwilioClient\n'), ((873, 959), 'bling.mc.client.MobileCommonsClient', 'MobileCommonsClient', (['config.mobilecommons_username', 'config.mobilecommons_password'], {}), '(config.mobilecommons_username, config.\n mobilecommons_password)\n', (892, 959), False, 'from bling.mc.client import MobileCommonsClient\n')] |
# -*- coding: utf-8 -*-
import MySQLdb
class Connection(object):
def __init__(self):
self.db = MySQLdb.connect(host="localhost", # your host, usually localhost
user="root", # your username
passwd="<PASSWORD>", # your password
db="gigapy") # name of the data base
self.cursor = self.db.cursor()
def close_connection(self):
self.cursor.close()
self.db.close()
conn = Connection()
| [
"MySQLdb.connect"
] | [((112, 197), 'MySQLdb.connect', 'MySQLdb.connect', ([], {'host': '"""localhost"""', 'user': '"""root"""', 'passwd': '"""<PASSWORD>"""', 'db': '"""gigapy"""'}), "(host='localhost', user='root', passwd='<PASSWORD>', db='gigapy'\n )\n", (127, 197), False, 'import MySQLdb\n')] |
import requests
from anytree import Node, RenderTree
import argparse
import json
ROR_API_ENDPOINT = "https://api.ror.org/organizations/"
# construct organizational tree recursively starting at given ROR
def construct(ror, parent=None):
organization = get_data(ror)
current_node = Node(organization["name"], parent=parent)
for rel in organization['relationships']:
if rel["type"]=="Child":
construct(rel["id"], current_node)
return current_node
# HTTP request to get data from ROR API
def get_data(ror):
response = requests.get(ROR_API_ENDPOINT + ror)
response_text = response.text.encode('ascii', 'ignore')
return json.loads(response_text)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--ror', type=str, default='https://ror.org/03vek6s52')
args = parser.parse_args()
ror = args.ror
tree = construct(ror)
print(RenderTree(tree))
if __name__ == '__main__':
main()
| [
"json.loads",
"argparse.ArgumentParser",
"requests.get",
"anytree.RenderTree",
"anytree.Node"
] | [((291, 332), 'anytree.Node', 'Node', (["organization['name']"], {'parent': 'parent'}), "(organization['name'], parent=parent)\n", (295, 332), False, 'from anytree import Node, RenderTree\n'), ((560, 596), 'requests.get', 'requests.get', (['(ROR_API_ENDPOINT + ror)'], {}), '(ROR_API_ENDPOINT + ror)\n', (572, 596), False, 'import requests\n'), ((668, 693), 'json.loads', 'json.loads', (['response_text'], {}), '(response_text)\n', (678, 693), False, 'import json\n'), ((720, 745), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (743, 745), False, 'import argparse\n'), ((918, 934), 'anytree.RenderTree', 'RenderTree', (['tree'], {}), '(tree)\n', (928, 934), False, 'from anytree import Node, RenderTree\n')] |
import RPi.GPIO as GPIO
import time
channel = 21
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(channel, GPIO.OUT)
GPIO.output(channel, GPIO.HIGH)
#time.sleep(5)
#GPIO.output(channel, GPIO.LOW)
| [
"RPi.GPIO.setup",
"RPi.GPIO.setwarnings",
"RPi.GPIO.output",
"RPi.GPIO.setmode"
] | [((51, 73), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (63, 73), True, 'import RPi.GPIO as GPIO\n'), ((74, 97), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', (['(False)'], {}), '(False)\n', (90, 97), True, 'import RPi.GPIO as GPIO\n'), ((98, 127), 'RPi.GPIO.setup', 'GPIO.setup', (['channel', 'GPIO.OUT'], {}), '(channel, GPIO.OUT)\n', (108, 127), True, 'import RPi.GPIO as GPIO\n'), ((130, 161), 'RPi.GPIO.output', 'GPIO.output', (['channel', 'GPIO.HIGH'], {}), '(channel, GPIO.HIGH)\n', (141, 161), True, 'import RPi.GPIO as GPIO\n')] |
# -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.wi.urls.user.user
@author <NAME>
@date 1.10.2010
"""
from django.conf.urls import patterns, url, include
from wi.utils.decorators import user_permission
from wi.utils.views import direct_to_template, simple_generic, get_list_generic, simple_generic_id
account_patterns = patterns('wi.views.user.user',
url(r'^$', user_permission(direct_to_template), {'template_name': 'account/base.html'}, name='acc_account'),
url(r'^account_data/$', user_permission(direct_to_template),
{'template_name': 'account/account_data.html'}, name='acc_account_data'),
url(r'^account_data/ajax/get_user_data/$', 'acc_ajax_get_user_data', name='acc_ajax_get_user_data'),
url(r'^account_data/ajax/edit/$', 'acc_ajax_account_data_edit', name='acc_ajax_account_data_edit'),
url(r'^account_quotas/$', user_permission(direct_to_template),
{'template_name': 'account/account_quotas.html'}, name='acc_account_quotas'),
url(r'^account_data/ajax/get_user_quotas/$', 'acc_ajax_get_user_quotas', name='acc_ajax_get_user_quotas'),
url(r'^password_change/$', 'acc_password_change', name='acc_password_change'),
url(r'^ajax/charts/$', user_permission(simple_generic),
{'template_name': 'account/ajax/charts.html'}, name='acc_ajax_account_charts'),
url(r'^ajax/charts_points/$', user_permission(get_list_generic),
{'request_url': 'user/user/points_history/'}, name='acc_ajax_charts_points'),
)
help_patterns = patterns('wi.views.user.user',
url(r'^form/$', 'hlp_form', name='hlp_form'),
url(r'^issue_error/$', direct_to_template, {'template_name': 'help/issue_error.html'}, name='hlp_issue_error'),
url(r'^issue_sent/$', direct_to_template, {'template_name': 'help/issue_sent.html'}, name='hlp_issue_sent'),
)
main_patterns = patterns('wi.views.user.user',
url(r'^remove_message/(?P<id1>\d+)/$', user_permission(simple_generic_id),
{'success_msg': (lambda desc: _('Message removed.') % {'desc': desc}),
'request_url': 'user/message/delete/',
'id_key': 'message_id'},
name='remove_message'),
url(r'^get_messages/$', 'get_messages', name='get_messages'),
)
urlpatterns = patterns('',
url(r'^account/', include(account_patterns)),
url(r'^help/', include(help_patterns)),
url(r'^main/', include(main_patterns)),
url(r'^change_cm/(?P<cm_id>\d+)/$', 'change_cm', name='change_cm'),
)
| [
"django.conf.urls.include",
"django.conf.urls.url",
"wi.utils.decorators.user_permission"
] | [((1284, 1387), 'django.conf.urls.url', 'url', (['"""^account_data/ajax/get_user_data/$"""', '"""acc_ajax_get_user_data"""'], {'name': '"""acc_ajax_get_user_data"""'}), "('^account_data/ajax/get_user_data/$', 'acc_ajax_get_user_data', name=\n 'acc_ajax_get_user_data')\n", (1287, 1387), False, 'from django.conf.urls import patterns, url, include\n'), ((1390, 1492), 'django.conf.urls.url', 'url', (['"""^account_data/ajax/edit/$"""', '"""acc_ajax_account_data_edit"""'], {'name': '"""acc_ajax_account_data_edit"""'}), "('^account_data/ajax/edit/$', 'acc_ajax_account_data_edit', name=\n 'acc_ajax_account_data_edit')\n", (1393, 1492), False, 'from django.conf.urls import patterns, url, include\n'), ((1648, 1756), 'django.conf.urls.url', 'url', (['"""^account_data/ajax/get_user_quotas/$"""', '"""acc_ajax_get_user_quotas"""'], {'name': '"""acc_ajax_get_user_quotas"""'}), "('^account_data/ajax/get_user_quotas/$', 'acc_ajax_get_user_quotas',\n name='acc_ajax_get_user_quotas')\n", (1651, 1756), False, 'from django.conf.urls import patterns, url, include\n'), ((1760, 1836), 'django.conf.urls.url', 'url', (['"""^password_change/$"""', '"""acc_password_change"""'], {'name': '"""acc_password_change"""'}), "('^password_change/$', 'acc_password_change', name='acc_password_change')\n", (1763, 1836), False, 'from django.conf.urls import patterns, url, include\n'), ((2197, 2240), 'django.conf.urls.url', 'url', (['"""^form/$"""', '"""hlp_form"""'], {'name': '"""hlp_form"""'}), "('^form/$', 'hlp_form', name='hlp_form')\n", (2200, 2240), False, 'from django.conf.urls import patterns, url, include\n'), ((2247, 2360), 'django.conf.urls.url', 'url', (['"""^issue_error/$"""', 'direct_to_template', "{'template_name': 'help/issue_error.html'}"], {'name': '"""hlp_issue_error"""'}), "('^issue_error/$', direct_to_template, {'template_name':\n 'help/issue_error.html'}, name='hlp_issue_error')\n", (2250, 2360), False, 'from django.conf.urls import patterns, url, include\n'), ((2363, 2473), 'django.conf.urls.url', 'url', (['"""^issue_sent/$"""', 'direct_to_template', "{'template_name': 'help/issue_sent.html'}"], {'name': '"""hlp_issue_sent"""'}), "('^issue_sent/$', direct_to_template, {'template_name':\n 'help/issue_sent.html'}, name='hlp_issue_sent')\n", (2366, 2473), False, 'from django.conf.urls import patterns, url, include\n'), ((2815, 2874), 'django.conf.urls.url', 'url', (['"""^get_messages/$"""', '"""get_messages"""'], {'name': '"""get_messages"""'}), "('^get_messages/$', 'get_messages', name='get_messages')\n", (2818, 2874), False, 'from django.conf.urls import patterns, url, include\n'), ((3049, 3115), 'django.conf.urls.url', 'url', (['"""^change_cm/(?P<cm_id>\\\\d+)/$"""', '"""change_cm"""'], {'name': '"""change_cm"""'}), "('^change_cm/(?P<cm_id>\\\\d+)/$', 'change_cm', name='change_cm')\n", (3052, 3115), False, 'from django.conf.urls import patterns, url, include\n'), ((1034, 1069), 'wi.utils.decorators.user_permission', 'user_permission', (['direct_to_template'], {}), '(direct_to_template)\n', (1049, 1069), False, 'from wi.utils.decorators import user_permission\n'), ((1160, 1195), 'wi.utils.decorators.user_permission', 'user_permission', (['direct_to_template'], {}), '(direct_to_template)\n', (1175, 1195), False, 'from wi.utils.decorators import user_permission\n'), ((1521, 1556), 'wi.utils.decorators.user_permission', 'user_permission', (['direct_to_template'], {}), '(direct_to_template)\n', (1536, 1556), False, 'from wi.utils.decorators import user_permission\n'), ((1867, 1898), 'wi.utils.decorators.user_permission', 'user_permission', (['simple_generic'], {}), '(simple_generic)\n', (1882, 1898), False, 'from wi.utils.decorators import user_permission\n'), ((2022, 2055), 'wi.utils.decorators.user_permission', 'user_permission', (['get_list_generic'], {}), '(get_list_generic)\n', (2037, 2055), False, 'from wi.utils.decorators import user_permission\n'), ((2565, 2599), 'wi.utils.decorators.user_permission', 'user_permission', (['simple_generic_id'], {}), '(simple_generic_id)\n', (2580, 2599), False, 'from wi.utils.decorators import user_permission\n'), ((2929, 2954), 'django.conf.urls.include', 'include', (['account_patterns'], {}), '(account_patterns)\n', (2936, 2954), False, 'from django.conf.urls import patterns, url, include\n'), ((2976, 2998), 'django.conf.urls.include', 'include', (['help_patterns'], {}), '(help_patterns)\n', (2983, 2998), False, 'from django.conf.urls import patterns, url, include\n'), ((3020, 3042), 'django.conf.urls.include', 'include', (['main_patterns'], {}), '(main_patterns)\n', (3027, 3042), False, 'from django.conf.urls import patterns, url, include\n')] |
"""
Train Fashion MNIST CNN
Code borrowed from http://danialk.github.io/blog/2017/09/29/range-of-
convolutional-neural-networks-on-fashion-mnist-dataset/
"""
# Specify visible cuda device
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "7"
import numpy as np
from keras.preprocessing.image import ImageDataGenerator
from lib.keras_utils import build_vgg_fmnist
from lib.utils import load_dataset_fmnist
batch_size = 512
# Load f-mnist, find mean and std
x_train, y_train, x_test, y_test = load_dataset_fmnist()
mean = x_train.mean().astype(np.float32)
std = x_train.std().astype(np.float32)
# Build Keras model
cnn = build_vgg_fmnist(mean, std)
# Data augmentation
gen = ImageDataGenerator(rotation_range=8, width_shift_range=0.08,
shear_range=0.3, height_shift_range=0.08,
zoom_range=0.08)
batches = gen.flow(x_train, y_train, batch_size=batch_size)
val_batches = gen.flow(x_test, y_test, batch_size=batch_size)
cnn.fit_generator(batches,
steps_per_epoch=60000//batch_size,
epochs=50,
validation_data=val_batches,
validation_steps=10000//batch_size,
use_multiprocessing=True)
score = cnn.evaluate(x_train, y_train, verbose=0)
print('Train loss:', score[0])
print('Train accuracy:', score[1])
score = cnn.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
# Save trained weight
cnn.save_weights('./tmp/weights/fmnist_vgg_smxe.h5')
| [
"lib.utils.load_dataset_fmnist",
"keras.preprocessing.image.ImageDataGenerator",
"lib.keras_utils.build_vgg_fmnist"
] | [((538, 559), 'lib.utils.load_dataset_fmnist', 'load_dataset_fmnist', ([], {}), '()\n', (557, 559), False, 'from lib.utils import load_dataset_fmnist\n'), ((667, 694), 'lib.keras_utils.build_vgg_fmnist', 'build_vgg_fmnist', (['mean', 'std'], {}), '(mean, std)\n', (683, 694), False, 'from lib.keras_utils import build_vgg_fmnist\n'), ((722, 846), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rotation_range': '(8)', 'width_shift_range': '(0.08)', 'shear_range': '(0.3)', 'height_shift_range': '(0.08)', 'zoom_range': '(0.08)'}), '(rotation_range=8, width_shift_range=0.08, shear_range=\n 0.3, height_shift_range=0.08, zoom_range=0.08)\n', (740, 846), False, 'from keras.preprocessing.image import ImageDataGenerator\n')] |
import hmac
import hashlib
from django.conf import settings
from django.contrib.auth.models import User
from .models import TurnusersLt
def sync_new_user_to_coturn(sender, instance, **kwargs):
##
# NOTE: Do not use this for REST API. You do not need to sync users if you are using the REST API.
#
#
if sender == User:
username = instance.get_username()
else:
if not hasattr(settings, "COTURN_USERNAME_FIELD"):
raise ImportError("Coturn was sent a signal from a non-django User model, but COTURN_USERNAME_FIELD is not set")
if not hasattr(instance, settings.COTURN_USERNAME_FIELD):
raise ValueError("Coturn - username field {} does not exist on model we were sent in sync signal".format(settings.COTURN_USERNAME_FIELD))
if "." in settings.COTURN_USERNAME_FIELD:
# walk down the set of sub-fields until you reach the username
fields = settings.COTURN_USERNAME_FIELD.split(".")
working_instance = instance
for field in fields:
if hasattr(working_instance, field):
working_instance = getattr(working_instance, field)
else:
raise ValueError("missing sub-field in username search: {}".format(field))
username = working_instance()
else:
username = getattr(instance, settings.COTURN_USERNAME_FIELD)
username = username()
if not hasattr(settings, "COTURN_REALM"):
raise ValueError("Coturn - missing COTURN_REALM entry in settings.py")
realm = settings.COTURN_REALM
# NOTE: since we assume the system will be running coturn in REST API mode, this password will never be used.
# so we set it to something random.
password = User.objects.make_random_password()
hash_val = hmac.new(settings.SECRET_KEY.encode("utf-8"), password.encode("utf-8"), hashlib.sha1)
hash_val.update(realm)
new_user = TurnusersLt(name=username, realm=realm, password=hash_val.hexdigest())
new_user.save(using="coturn")
| [
"django.contrib.auth.models.User.objects.make_random_password",
"django.conf.settings.SECRET_KEY.encode",
"django.conf.settings.COTURN_USERNAME_FIELD.split"
] | [((1787, 1822), 'django.contrib.auth.models.User.objects.make_random_password', 'User.objects.make_random_password', ([], {}), '()\n', (1820, 1822), False, 'from django.contrib.auth.models import User\n'), ((1847, 1882), 'django.conf.settings.SECRET_KEY.encode', 'settings.SECRET_KEY.encode', (['"""utf-8"""'], {}), "('utf-8')\n", (1873, 1882), False, 'from django.conf import settings\n'), ((939, 980), 'django.conf.settings.COTURN_USERNAME_FIELD.split', 'settings.COTURN_USERNAME_FIELD.split', (['"""."""'], {}), "('.')\n", (975, 980), False, 'from django.conf import settings\n')] |
import secrets
from typing import FrozenSet
class AuthorizationTokens:
def __init__(self, filename: str):
self._filename = filename
def get_tokens(self) -> FrozenSet[str]:
return self._read_tokens()
def generate_token(self) -> str:
token = secrets.token_urlsafe(32)
self._write_token(token)
return token
def _write_token(self, token: str):
with open(self._filename, "at") as file:
file.write(token)
file.write("\n")
def _read_tokens(self) -> FrozenSet[str]:
with open(self._filename, "r") as file:
return frozenset(self._token_gen(file))
def _token_gen(self, file):
lines = file.read()
for line in lines.split("\n"):
if (line):
yield line
| [
"secrets.token_urlsafe"
] | [((281, 306), 'secrets.token_urlsafe', 'secrets.token_urlsafe', (['(32)'], {}), '(32)\n', (302, 306), False, 'import secrets\n')] |
# Code made for <NAME>
# 12 Abril 2021
# License MIT
# Transport Phenomena: Python Program-Assessment 4.3
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
from scipy.optimize import minimize
sns.set()
# Solve for Temperature of Steam at given Pressure
class enviroment_convective:
def temp_steam_sat_reg(self, Sat_pressure_1, Sat_pressure_2, Temp_from_pressure_1, Temp_from_pressure_2,
Sat_pressure_system):
p1 = Sat_pressure_1 # [kPa]
p2 = Sat_pressure_2 # [kPa]
T1 = Temp_from_pressure_1 + 273.15 # [K]
T2 = Temp_from_pressure_2 + 273.15 # [K]
P_x = Sat_pressure_system # [kPa]
m = (T2 - T1) / (p2 - p1)
T = m * P_x - (m * p1) + T1
return T
# Optimice for the maximum difference allow
class Optimice:
def objective_T(self, x, *args):
T_supp, r = args[0], args[1]
thk = 0.015
x1 = x[0] # C1
x2 = x[1] # C2
return T_supp - ((x1 * np.log(r + thk)) - x2)
def constraint_BC1_BC2(self, x):
r, T_in = (0.025, 484.8362745098039)
K, thk, h_in, T_out, h_out, e = (15.6, 0.015, 30, 25 + 273.15, 5, 0.3)
x1 = x[0] # C1
x2 = x[1] # C2
R_conv_1 = (1 / (2 * np.pi * (r)) * h_in)
h_comb = (2 * np.pi * (r + thk)) * (h_out + e * 5.670e-8 * (x1 * np.log(r + thk) + x2 - T_out)
* ((x1 * np.log(r + thk) + x2) ** 2 + T_out ** 2))
R_cond = np.log(thk) / (2 * np.pi * K)
return ((T_in - T_out) / (R_conv_1 + R_cond + (1 / h_comb))) + ((K * x1) / r)
def objective_T_II(self, x, *args):
T_supp, r = args[0], args[1]
x1 = x[0] # C1
x2 = x[1] # C2
return T_supp - ((x1 * np.log(r)) - x2)
def constraint_BC1_BC2_II(self, x):
r, T_in = (0.025, 484.8362745098039)
K, thk_1, h_in, T_out, h_out = (15.6, 0.015, 30, 25 + 273.15, 5)
K_2, thk_2, e = (0.25, 0.012, 0.8)
x1 = x[0] # C1
x2 = x[1] # C2
R_conv_1 = (1 / (2 * np.pi * r) * h_in)
R_cond = np.log(thk_1) / (2 * np.pi * K)
R_cond_2 = np.log(thk_2) / (2 * np.pi * K_2)
h_comb = (2 * np.pi * (r + thk_1 + thk_2)) * (
h_out + e * 5.670e-8 * (x1 * np.log(r + thk_1 + thk_2) + x2 - T_out)
* ((x1 * np.log(r + thk_1 + thk_2) + x2) ** 2 + T_out ** 2))
return ((T_in - T_out) / (R_conv_1 + R_cond + R_cond_2 + (1 / h_comb))) + ((K * x1) / r)
# Determine the Q flux with cover and without cover
class Q_determine:
def Q_uncover(self, r, T_in, K, thk, h_in, T_out, h_out, e, Delta_T):
T_surf = (T_int - Delta_T) + 273.15
R_conv_1 = (1 / (2 * np.pi * r) * h_in)
h_comb = (2 * np.pi * (r + thk)) * (h_out + e * 5.670e-8 * (T_surf - T_out)
* (T_surf ** 2 + T_out ** 2))
R_cond = np.log(thk) / (2 * np.pi * K)
Q = ((T_in - T_out) / (R_conv_1 + R_cond + (1 / h_comb)))
return Q
def Q_cover(self, r, T_in, K, K_2, thk_1, thk_2, h_in, T_out, h_out, e, Delta_T):
T_surf = (T_int - Delta_T) + 273.15
R_conv_1 = (1 / (2 * np.pi * r) * h_in)
R_cond = np.log(thk_1) / (2 * np.pi * K)
R_cond_2 = np.log(thk_2) / (2 * np.pi * K_2)
h_comb = (2 * np.pi * (r + thk_1 + thk_2)) * (
h_out + e * 5.670e-8 * (T_surf - T_out)
* (T_surf ** 2 + T_out ** 2))
Q = ((T_in - T_out) / (R_conv_1 + R_cond + R_cond_2 + (1 / h_comb)))
return Q
# Temperature of T in of the cylinder iron
class T_profile_iron:
def T_in_II(self, Q_tot, r, K, thk, T_surf_out):
R_cond = np.log(r - thk) / (2 * np.pi * K)
T_surf_in = (-Q_tot * R_cond) + T_surf_out
return T_surf_in
env_conv = enviroment_convective()
Opt = Optimice()
Q_s = Q_determine()
T_iron = T_profile_iron()
T_int = env_conv.temp_steam_sat_reg(1553, 2318, 200, 220, 2000)
constraint_equal1 = {'type': 'eq', 'fun': Opt.constraint_BC1_BC2}
constraint = [constraint_equal1]
# T_suppose, Radius_max, T_in
arguments = (T_int, 0.025)
x0 = [0, 0] # This initial values are extracted from a first solution given by the method
sol = minimize(Opt.objective_T, x0, method='SLSQP', args=arguments, constraints=constraint, options={'maxiter': 5})
# BIG NOTE: modify the iteration to reach values according to reality--> You need more restrictions
# In the result you find the maximum difference that the system reach between the suppose and the reality
Q_1 = Q_s.Q_uncover(0.025, T_int, 15.6, 0.015, 30, 25 + 273.15, 5, 0.3, sol.fun)
T_in_iron = T_iron.T_in_II(Q_1, 0.025, 30, 0.015, (T_int - sol.fun) + 273.15)
########################################### Case 2 #####################################################################
constraint_equal1_II = {'type': 'eq', 'fun': Opt.constraint_BC1_BC2_II}
constraint_II = [constraint_equal1_II]
# T_suppose, Radius_max
arguments_II = (T_int, 0.025 + 0.015 + 0.012)
x0 = [0, 0] # This initial values are extracted from a first solution given by the method
sol_II = minimize(Opt.objective_T, x0, method='SLSQP', args=arguments_II, constraints=constraint_II,
options={'maxiter': 5})
# BIG NOTE: modify the iteration to reach values according to reality--> You need more restrictions
# In the result you find the maximum difference that the system reach between the suppose and the reality
Q_2 = Q_s.Q_cover(0.025, T_int, 15.6, 0.25, 0.015, 0.012, 30, 25 + 273.15, 5, 0.3, sol_II.fun)
print("========================= WITH UNCOVER ==============================================\n")
print("Temperature in the convective enviro. 1: {} [K]".format(T_int))
print("Temperature at the start of the cylinder: {} [K]".format(T_in_iron))
print("Temperature at the end of the cylinder: {} [K]".format((T_int - sol.fun) + 273.15))
print("Q for meter of cylinder: {} [W/m]\n".format(Q_1))
print("================================================================================")
print("========================= WITH COVER ==============================================\n")
print("Temperature in the convective enviro. 1: {} [K]".format(T_int))
print("Temperature at the end of the cylinder: {} [K]".format((T_int - sol_II.fun) + 273.15))
print("Q for meter of cylinder: {} [W/m]\n".format(Q_2))
print("================================================================================\n") | [
"numpy.log",
"seaborn.set",
"scipy.optimize.minimize"
] | [((217, 226), 'seaborn.set', 'sns.set', ([], {}), '()\n', (224, 226), True, 'import seaborn as sns\n'), ((4244, 4358), 'scipy.optimize.minimize', 'minimize', (['Opt.objective_T', 'x0'], {'method': '"""SLSQP"""', 'args': 'arguments', 'constraints': 'constraint', 'options': "{'maxiter': 5}"}), "(Opt.objective_T, x0, method='SLSQP', args=arguments, constraints=\n constraint, options={'maxiter': 5})\n", (4252, 4358), False, 'from scipy.optimize import minimize\n'), ((5137, 5256), 'scipy.optimize.minimize', 'minimize', (['Opt.objective_T', 'x0'], {'method': '"""SLSQP"""', 'args': 'arguments_II', 'constraints': 'constraint_II', 'options': "{'maxiter': 5}"}), "(Opt.objective_T, x0, method='SLSQP', args=arguments_II,\n constraints=constraint_II, options={'maxiter': 5})\n", (5145, 5256), False, 'from scipy.optimize import minimize\n'), ((1503, 1514), 'numpy.log', 'np.log', (['thk'], {}), '(thk)\n', (1509, 1514), True, 'import numpy as np\n'), ((2108, 2121), 'numpy.log', 'np.log', (['thk_1'], {}), '(thk_1)\n', (2114, 2121), True, 'import numpy as np\n'), ((2159, 2172), 'numpy.log', 'np.log', (['thk_2'], {}), '(thk_2)\n', (2165, 2172), True, 'import numpy as np\n'), ((2921, 2932), 'numpy.log', 'np.log', (['thk'], {}), '(thk)\n', (2927, 2932), True, 'import numpy as np\n'), ((3230, 3243), 'numpy.log', 'np.log', (['thk_1'], {}), '(thk_1)\n', (3236, 3243), True, 'import numpy as np\n'), ((3281, 3294), 'numpy.log', 'np.log', (['thk_2'], {}), '(thk_2)\n', (3287, 3294), True, 'import numpy as np\n'), ((3703, 3718), 'numpy.log', 'np.log', (['(r - thk)'], {}), '(r - thk)\n', (3709, 3718), True, 'import numpy as np\n'), ((1005, 1020), 'numpy.log', 'np.log', (['(r + thk)'], {}), '(r + thk)\n', (1011, 1020), True, 'import numpy as np\n'), ((1776, 1785), 'numpy.log', 'np.log', (['r'], {}), '(r)\n', (1782, 1785), True, 'import numpy as np\n'), ((1361, 1376), 'numpy.log', 'np.log', (['(r + thk)'], {}), '(r + thk)\n', (1367, 1376), True, 'import numpy as np\n'), ((1444, 1459), 'numpy.log', 'np.log', (['(r + thk)'], {}), '(r + thk)\n', (1450, 1459), True, 'import numpy as np\n'), ((2293, 2318), 'numpy.log', 'np.log', (['(r + thk_1 + thk_2)'], {}), '(r + thk_1 + thk_2)\n', (2299, 2318), True, 'import numpy as np\n'), ((2358, 2383), 'numpy.log', 'np.log', (['(r + thk_1 + thk_2)'], {}), '(r + thk_1 + thk_2)\n', (2364, 2383), True, 'import numpy as np\n')] |
from __future__ import (
absolute_import, division, print_function, unicode_literals)
import collections
import copy
import functools
import itertools
import json
import jsonschema
import logging
import os
import random
from ruamel.yaml import YAML
import six
from six import iteritems
from six.moves import range
from ..job_steps import step_runners
from ..utils import download_utils, fs_utils, job_utils
def experiment_paths(local_dirs, job_name, exp_name, urls_file):
output_dir = os.path.join(local_dirs['output'], job_name, exp_name)
return {
'client_dir': os.path.dirname(os.path.realpath(__file__)),
'archives_dir': local_dirs['archives'],
'metadata_dir': local_dirs['metadata'],
'output_dir': output_dir,
'fasta_output_dir': os.path.join(output_dir, 'fasta'),
'metadata_output_file': os.path.join(output_dir, 'metadata.json'),
'log_file': os.path.join(output_dir, 'log.txt'),
'experiment_rerun_file': os.path.join(output_dir,
'rerun_experiment.yml'),
'urls_file': urls_file
}
def preprocess_experiments(experiments, select_copy_for_options):
def inflate_expand_option(option_vals):
if isinstance(option_vals, six.string_types):
[start, end] = option_vals.split('..')
return range(int(start), int(end)+1)
else:
return option_vals
def exp_name_with_options(exp_name, option_values):
return exp_name + '-' + '-'.join('{}={}'.format(option_key, option_val)
for (option_key, option_val)
in iteritems(dict(option_values)))
# handle expand_options
final_experiments = collections.OrderedDict()
for exp_name, exp_options in iteritems(experiments):
if 'expand_options' in exp_options:
nonexpanded_options = exp_options.copy()
nonexpanded_options.pop('expand_options')
expand_options = exp_options['expand_options']
expand_values = list(itertools.product(*(
[(option_key, option_val)
for option_val in inflate_expand_option(option_vals)]
for (option_key, option_vals) in iteritems(expand_options)
)))
for expanded_options in expand_values:
new_exp_name = exp_name_with_options(exp_name,
expanded_options)
new_exp_options = dict(expanded_options, **nonexpanded_options)
# handle selection copy_for_options
if select_copy_for_options:
sliced_options = [o for o in expanded_options if
o[0] not in select_copy_for_options]
for opts in expand_values:
if opts == expanded_options:
break
elif all(o in opts for o in sliced_options):
new_exp_options['selection_copy_from'] = \
exp_name_with_options(exp_name, opts)
break
final_experiments[new_exp_name] = new_exp_options
else:
final_experiments[exp_name] = exp_options
return final_experiments
def preprocess_steps(steps, paths, exp_options, disable_avx):
def make_output_paths(options, keys):
for key in keys:
if key in options and not os.path.isabs(options[key]):
options[key] = os.path.join(paths['output_dir'], options[key])
steps = copy.deepcopy(steps)
for step_options in steps:
if step_options['type'] == 'select':
step_options.update(paths)
elif step_options['type'] == 'kmers':
step_options['fasta_output_dir'] = paths['fasta_output_dir']
step_options['disable_avx'] = disable_avx
if step_options['k'] == 'from_options':
step_options['k'] = exp_options['k']
make_output_paths(step_options, ['output_file'])
elif step_options['type'] == 'distances':
step_options['disable_avx'] = disable_avx
make_output_paths(step_options, ['input_file', 'output_prefix'])
elif step_options['type'] == 'mds':
make_output_paths(step_options, ['dists_file', 'output_file'])
elif step_options['type'] == 'classify':
step_options['metadata_file'] = paths['metadata_output_file']
make_output_paths(step_options, ['features_file', 'output_file'])
generation_opts = next(
(step for step in steps if step['type'] == 'kmers' and
step['output_file'] == step_options['features_file']),
None
)
if generation_opts:
step_options['generation_options'] = {
k: generation_opts[k] for k in
{'mode', 'k', 'bits_per_element'}
}
return steps
def validate_schema(data, schema_name):
with open(os.path.normpath(os.path.join(
os.path.dirname(__file__), '..', 'schemas',
schema_name + '.json'
))) as schema:
try:
jsonschema.validate(data, json.load(schema))
except Exception as e:
e.message = ('error while validating {}: {}'
.format(schema_name, e.message))
raise e
def validate_job_options(options):
validate_schema(options, 'job_options')
# check lambdas under experiments
if isinstance(options['experiments'], six.string_types):
job_utils.parse_multiline_lambda_str(options['experiments'])
else:
for exp_opts in options['experiments'].values():
if isinstance(exp_opts['groups'], six.string_types):
job_utils.parse_multiline_lambda_str(exp_opts['groups'])
# check select step
select_steps = [s for s in options['steps'] if s['type'] == 'select']
if len(select_steps) > 1:
raise Exception('at most one step of type select is allowed in a job')
elif select_steps:
select_step = select_steps[0]
job_utils.parse_multiline_lambda_str(select_step['pick_group'])
if 'postprocess' in select_step:
job_utils.parse_multiline_lambda_str(select_step['postprocess'])
def load_metadata(metadata_dir, urls_file, name):
file_path = os.path.join(metadata_dir, name + '.json')
if not os.path.exists(file_path):
download_utils.download_file(
download_utils.url_for_file(file_path, urls_file, 'metadata'),
file_path
)
with open(file_path, 'r') as f:
metadata = json.load(f)
return metadata
def run_experiment_steps(steps, exp_options):
for i, step_options in enumerate(steps):
step_desc = "step '{}' ({}/{})".format(step_options['type'], i+1,
len(steps))
with job_utils.log_step(step_desc, start_stars=True):
step_runners[step_options['type']](step_options, exp_options)
def run(args):
job_options = YAML(typ='safe').load(
download_utils.read_file_or_url(args.job_file)
)
validate_job_options(job_options)
settings = YAML(typ='safe').load(
download_utils.read_file_or_url(args.settings_file)
)
validate_schema(settings, 'settings')
if args.validate_only:
if args.urls_file:
validate_schema(YAML(typ='safe').load(
download_utils.read_file_or_url(args.urls_file)
), 'file_urls')
print('INFO options files validated successfully')
return
local_dirs = settings['local_dirs']
job_name = job_options['name']
# create archives/metadata dirs if they don't exist
fs_utils.mkdir_p(local_dirs['archives'])
fs_utils.mkdir_p(local_dirs['metadata'])
experiments = job_options['experiments']
if isinstance(experiments, six.string_types):
paths = experiment_paths(local_dirs, job_name, '')
experiments = job_utils.parse_multiline_lambda_str(
experiments, load_metadata=functools.partial(
load_metadata, paths['metadata_dir'], args.urls_file
)
)()
first_select = next((step for step in job_options['steps'] if
step['type'] == 'select'), {})
experiments = preprocess_experiments(experiments,
first_select.get('copy_for_options'))
log, formatter = job_utils.setup_logging(job_name, settings)
for i, (exp_name, exp_options) in enumerate(iteritems(experiments)):
with job_utils.log_step("experiment '{}' ({}/{})"
.format(exp_name, i+1, len(experiments)),
start_stars=True):
exp_options = exp_options.copy()
exp_options['experiment_name'] = exp_name
# get ready
paths = experiment_paths(local_dirs, job_name, exp_name,
args.urls_file)
steps = preprocess_steps(job_options['steps'], paths, exp_options,
args.disable_avx)
if isinstance(exp_options['groups'], six.string_types):
metadata = None
if 'dataset' in exp_options and ('metadata' in
exp_options['dataset']):
metadata_name = exp_options['dataset']['metadata']
metadata = load_metadata(paths['metadata_dir'],
args.urls_file, metadata_name)
exp_options['groups'] = job_utils.parse_multiline_lambda_str(
exp_options['groups'],
load_metadata=functools.partial(
load_metadata, paths['metadata_dir'], args.urls_file
)
)(dict(exp_options, **paths), metadata)
fs_utils.mkdir_p(paths['output_dir'])
# start file log
file_logger = logging.FileHandler(paths['log_file'], mode='w')
file_logger.setFormatter(formatter)
log.addHandler(file_logger)
# seed the RNG
if 'random_seed' in job_options:
random_seed = job_options['random_seed']
else:
random_seed = random.getrandbits(32)
log.info('using random seed value %d', random_seed)
random.seed(random_seed)
# create a re-run file
with open(paths['experiment_rerun_file'], 'w') as rerun_file:
YAML().dump({
'name': job_name,
'random_seed': random_seed,
'experiments': {
exp_name: exp_options
},
'steps': job_options['steps']
}, rerun_file)
# run steps
run_experiment_steps(steps, exp_options)
# finish file log
file_logger.close()
log.removeHandler(file_logger)
| [
"os.path.exists",
"collections.OrderedDict",
"os.path.isabs",
"os.path.join",
"ruamel.yaml.YAML",
"random.seed",
"os.path.realpath",
"os.path.dirname",
"logging.FileHandler",
"random.getrandbits",
"functools.partial",
"copy.deepcopy",
"json.load",
"six.iteritems"
] | [((497, 551), 'os.path.join', 'os.path.join', (["local_dirs['output']", 'job_name', 'exp_name'], {}), "(local_dirs['output'], job_name, exp_name)\n", (509, 551), False, 'import os\n'), ((1771, 1796), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (1794, 1796), False, 'import collections\n'), ((1830, 1852), 'six.iteritems', 'iteritems', (['experiments'], {}), '(experiments)\n', (1839, 1852), False, 'from six import iteritems\n'), ((3663, 3683), 'copy.deepcopy', 'copy.deepcopy', (['steps'], {}), '(steps)\n', (3676, 3683), False, 'import copy\n'), ((6517, 6559), 'os.path.join', 'os.path.join', (['metadata_dir', "(name + '.json')"], {}), "(metadata_dir, name + '.json')\n", (6529, 6559), False, 'import os\n'), ((791, 824), 'os.path.join', 'os.path.join', (['output_dir', '"""fasta"""'], {}), "(output_dir, 'fasta')\n", (803, 824), False, 'import os\n'), ((858, 899), 'os.path.join', 'os.path.join', (['output_dir', '"""metadata.json"""'], {}), "(output_dir, 'metadata.json')\n", (870, 899), False, 'import os\n'), ((921, 956), 'os.path.join', 'os.path.join', (['output_dir', '"""log.txt"""'], {}), "(output_dir, 'log.txt')\n", (933, 956), False, 'import os\n'), ((991, 1039), 'os.path.join', 'os.path.join', (['output_dir', '"""rerun_experiment.yml"""'], {}), "(output_dir, 'rerun_experiment.yml')\n", (1003, 1039), False, 'import os\n'), ((6571, 6596), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (6585, 6596), False, 'import os\n'), ((6798, 6810), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6807, 6810), False, 'import json\n'), ((8733, 8755), 'six.iteritems', 'iteritems', (['experiments'], {}), '(experiments)\n', (8742, 8755), False, 'from six import iteritems\n'), ((604, 630), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (620, 630), False, 'import os\n'), ((7228, 7244), 'ruamel.yaml.YAML', 'YAML', ([], {'typ': '"""safe"""'}), "(typ='safe')\n", (7232, 7244), False, 'from ruamel.yaml import YAML\n'), ((7366, 7382), 'ruamel.yaml.YAML', 'YAML', ([], {'typ': '"""safe"""'}), "(typ='safe')\n", (7370, 7382), False, 'from ruamel.yaml import YAML\n'), ((10208, 10256), 'logging.FileHandler', 'logging.FileHandler', (["paths['log_file']"], {'mode': '"""w"""'}), "(paths['log_file'], mode='w')\n", (10227, 10256), False, 'import logging\n'), ((10622, 10646), 'random.seed', 'random.seed', (['random_seed'], {}), '(random_seed)\n', (10633, 10646), False, 'import random\n'), ((3602, 3649), 'os.path.join', 'os.path.join', (["paths['output_dir']", 'options[key]'], {}), "(paths['output_dir'], options[key])\n", (3614, 3649), False, 'import os\n'), ((5350, 5367), 'json.load', 'json.load', (['schema'], {}), '(schema)\n', (5359, 5367), False, 'import json\n'), ((10523, 10545), 'random.getrandbits', 'random.getrandbits', (['(32)'], {}), '(32)\n', (10541, 10545), False, 'import random\n'), ((3542, 3569), 'os.path.isabs', 'os.path.isabs', (['options[key]'], {}), '(options[key])\n', (3555, 3569), False, 'import os\n'), ((5186, 5211), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5201, 5211), False, 'import os\n'), ((8250, 8321), 'functools.partial', 'functools.partial', (['load_metadata', "paths['metadata_dir']", 'args.urls_file'], {}), "(load_metadata, paths['metadata_dir'], args.urls_file)\n", (8267, 8321), False, 'import functools\n'), ((7580, 7596), 'ruamel.yaml.YAML', 'YAML', ([], {'typ': '"""safe"""'}), "(typ='safe')\n", (7584, 7596), False, 'from ruamel.yaml import YAML\n'), ((10773, 10779), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (10777, 10779), False, 'from ruamel.yaml import YAML\n'), ((9928, 9999), 'functools.partial', 'functools.partial', (['load_metadata', "paths['metadata_dir']", 'args.urls_file'], {}), "(load_metadata, paths['metadata_dir'], args.urls_file)\n", (9945, 9999), False, 'import functools\n'), ((2281, 2306), 'six.iteritems', 'iteritems', (['expand_options'], {}), '(expand_options)\n', (2290, 2306), False, 'from six import iteritems\n')] |
from mmcv.runner import checkpoint
from mmdet.apis.inference import init_detector,LoadImage, inference_detector
import easymd
config = 'config.py'
#checkpoints = './checkpoints/pseg_r101_r50_latest.pth'
checkpoints = "path/to/pth"
img = '000000322864.jpg'
results = {
'img': './datasets/coco/val2017/'+img
}
model = init_detector(config,checkpoint=checkpoints)
results = inference_detector(model,'./datasets/coco/val2017/'+img)
| [
"mmdet.apis.inference.init_detector",
"mmdet.apis.inference.inference_detector"
] | [((333, 378), 'mmdet.apis.inference.init_detector', 'init_detector', (['config'], {'checkpoint': 'checkpoints'}), '(config, checkpoint=checkpoints)\n', (346, 378), False, 'from mmdet.apis.inference import init_detector, LoadImage, inference_detector\n'), ((389, 448), 'mmdet.apis.inference.inference_detector', 'inference_detector', (['model', "('./datasets/coco/val2017/' + img)"], {}), "(model, './datasets/coco/val2017/' + img)\n", (407, 448), False, 'from mmdet.apis.inference import init_detector, LoadImage, inference_detector\n')] |
from anydeck import AnyDeck
# Create a default deck
# A default deck consists of a standard deck of poker cards with four suits with cards numbered 2 through 10, as well
# as a Jack, Queen, King, and Ace for each suit. The standard deck gives a value to the card equal to their
# face value further Jack, Queen, and King have a value of 10 and Ace has a value of 11.
cards = AnyDeck()
# Draw a card from the top (the default for a draw)
card = cards.draw()
# Print the card
print(f'{card.face} of {card.suit}')
# Draw a card from the bottom
card = cards.draw(position='bottom')
# Print the card
print(f'{card.face} of {card.suit}')
# Recreate a new standard deck; this time shuffled
cards = AnyDeck(shuffled=True)
# Draw a card from the top
card = cards.draw()
# Print the card
print(f'{card.face} of {card.suit}')
# Draw another card from the top
card = cards.draw()
# Print the card
print(f'{card.face} of {card.suit}')
| [
"anydeck.AnyDeck"
] | [((376, 385), 'anydeck.AnyDeck', 'AnyDeck', ([], {}), '()\n', (383, 385), False, 'from anydeck import AnyDeck\n'), ((698, 720), 'anydeck.AnyDeck', 'AnyDeck', ([], {'shuffled': '(True)'}), '(shuffled=True)\n', (705, 720), False, 'from anydeck import AnyDeck\n')] |
from django.contrib import admin
from django.conf import settings
# Register your models here.
from .models import Treatment
admin.site.register(Treatment)
| [
"django.contrib.admin.site.register"
] | [((126, 156), 'django.contrib.admin.site.register', 'admin.site.register', (['Treatment'], {}), '(Treatment)\n', (145, 156), False, 'from django.contrib import admin\n')] |
Environment(loader=templateLoader, autoescape=fake_func())
from flask import Flask, request, make_response, escape
from jinja2 import Environment, select_autoescape, FileSystemLoader, Template
app = Flask(__name__)
loader = FileSystemLoader( searchpath="templates/" )
unsafe_env = Environment(loader=loader)
safe1_env = Environment(loader=loader, autoescape=True)
safe2_env = Environment(loader=loader, autoescape=select_autoescape())
def render_response_from_env(env):
name = request.args.get('name', '')
template = env.get_template('template.html')
return make_response(template.render(name=name))
@app.route('/unsafe')
def unsafe():
return render_response_from_env(unsafe_env)
@app.route('/safe1')
def safe1():
return render_response_from_env(safe1_env)
@app.route('/safe2')
def safe2():
return render_response_from_env(safe2_env)
# Explicit autoescape
e = Environment(
loader=loader,
autoescape=select_autoescape(['html', 'htm', 'xml'])
) # GOOD
# Additional checks with flow.
auto = select_autoescape
e = Environment(autoescape=auto) # GOOD
z = 0
e = Environment(autoescape=z) # BAD
E = Environment
E() # BAD
E(autoescape=z) # BAD
E(autoescape=auto) # GOOD
E(autoescape=0+1) # GOOD
def checked(cond=False):
if cond:
e = Environment(autoescape=cond) # GOOD
unsafe_tmpl = Template('Hello {{ name }}!')
safe1_tmpl = Template('Hello {{ name }}!', autoescape=True)
safe2_tmpl = Template('Hello {{ name }}!', autoescape=select_autoescape())
| [
"flask.request.args.get",
"jinja2.Environment",
"flask.Flask",
"jinja2.Template",
"jinja2.select_autoescape",
"jinja2.FileSystemLoader"
] | [((201, 216), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (206, 216), False, 'from flask import Flask, request, make_response, escape\n'), ((226, 267), 'jinja2.FileSystemLoader', 'FileSystemLoader', ([], {'searchpath': '"""templates/"""'}), "(searchpath='templates/')\n", (242, 267), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((284, 310), 'jinja2.Environment', 'Environment', ([], {'loader': 'loader'}), '(loader=loader)\n', (295, 310), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((323, 366), 'jinja2.Environment', 'Environment', ([], {'loader': 'loader', 'autoescape': '(True)'}), '(loader=loader, autoescape=True)\n', (334, 366), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((1052, 1080), 'jinja2.Environment', 'Environment', ([], {'autoescape': 'auto'}), '(autoescape=auto)\n', (1063, 1080), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((1098, 1123), 'jinja2.Environment', 'Environment', ([], {'autoescape': 'z'}), '(autoescape=z)\n', (1109, 1123), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((1332, 1361), 'jinja2.Template', 'Template', (['"""Hello {{ name }}!"""'], {}), "('Hello {{ name }}!')\n", (1340, 1361), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((1375, 1421), 'jinja2.Template', 'Template', (['"""Hello {{ name }}!"""'], {'autoescape': '(True)'}), "('Hello {{ name }}!', autoescape=True)\n", (1383, 1421), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((485, 513), 'flask.request.args.get', 'request.args.get', (['"""name"""', '""""""'], {}), "('name', '')\n", (501, 513), False, 'from flask import Flask, request, make_response, escape\n'), ((417, 436), 'jinja2.select_autoescape', 'select_autoescape', ([], {}), '()\n', (434, 436), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((940, 981), 'jinja2.select_autoescape', 'select_autoescape', (["['html', 'htm', 'xml']"], {}), "(['html', 'htm', 'xml'])\n", (957, 981), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((1280, 1308), 'jinja2.Environment', 'Environment', ([], {'autoescape': 'cond'}), '(autoescape=cond)\n', (1291, 1308), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n'), ((1476, 1495), 'jinja2.select_autoescape', 'select_autoescape', ([], {}), '()\n', (1493, 1495), False, 'from jinja2 import Environment, select_autoescape, FileSystemLoader, Template\n')] |
import pandas as pd
import pytest
from cascade.executor.model_results_main import _get_model_results
def test_get_model_results_inputs_ok(ihme):
"""at_mvid=265844 has 5850 rows, ode_mvid=102680 has 238836 rows
"""
results_columns = ['model_version_id', 'year_id', 'location_id', 'sex_id',
'age_group_id', 'measure_id', 'mean', 'upper', 'lower']
ode_model_version_id = 102680
db = "epi"
table = "fit"
ode_results = _get_model_results(ode_model_version_id, db, table)
assert set(ode_results.columns) == set(results_columns)
at_model_version_id = 265844
db = "dismod-at-dev"
table = "fit"
at_results = _get_model_results(at_model_version_id, db, table)
assert set(at_results.columns) == set(results_columns)
at_row_index_8 = pd.Series([265844, 1990, 90, 1, 2, 16, 0.161961, 0.161961, 0.161961])
at_row_index_8.index = at_results.columns
pd.testing.assert_series_equal(at_results.iloc[8], at_row_index_8,
check_exact=False, check_names=False)
def test_get_model_results_bad_model_version_id_for_db_and_table(ihme):
"""Expect an exception if the mvid is not found in the specified db and table"""
with pytest.raises(ValueError):
model_version_id = 1
db = "dismod-at-dev"
table = "fit"
_get_model_results(model_version_id, db, table)
def test_get_model_results_bad_model_version_id_all_locations(ihme):
"""Expect an exception if the mvid is not found in any of the locations"""
with pytest.raises(ValueError):
model_version_id = 1
db = None
table = None
_get_model_results(model_version_id, db, table)
def test_get_model_results__multiple_finds(ihme):
"""Expect an exception if no db and table are given and the mvid is found in multiple locations"""
with pytest.raises(ValueError):
model_version_id = 265844
db = None
table = None
_get_model_results(model_version_id, db, table)
| [
"pandas.Series",
"cascade.executor.model_results_main._get_model_results",
"pytest.raises",
"pandas.testing.assert_series_equal"
] | [((469, 520), 'cascade.executor.model_results_main._get_model_results', '_get_model_results', (['ode_model_version_id', 'db', 'table'], {}), '(ode_model_version_id, db, table)\n', (487, 520), False, 'from cascade.executor.model_results_main import _get_model_results\n'), ((676, 726), 'cascade.executor.model_results_main._get_model_results', '_get_model_results', (['at_model_version_id', 'db', 'table'], {}), '(at_model_version_id, db, table)\n', (694, 726), False, 'from cascade.executor.model_results_main import _get_model_results\n'), ((809, 878), 'pandas.Series', 'pd.Series', (['[265844, 1990, 90, 1, 2, 16, 0.161961, 0.161961, 0.161961]'], {}), '([265844, 1990, 90, 1, 2, 16, 0.161961, 0.161961, 0.161961])\n', (818, 878), True, 'import pandas as pd\n'), ((930, 1038), 'pandas.testing.assert_series_equal', 'pd.testing.assert_series_equal', (['at_results.iloc[8]', 'at_row_index_8'], {'check_exact': '(False)', 'check_names': '(False)'}), '(at_results.iloc[8], at_row_index_8,\n check_exact=False, check_names=False)\n', (960, 1038), True, 'import pandas as pd\n'), ((1239, 1264), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1252, 1264), False, 'import pytest\n'), ((1354, 1401), 'cascade.executor.model_results_main._get_model_results', '_get_model_results', (['model_version_id', 'db', 'table'], {}), '(model_version_id, db, table)\n', (1372, 1401), False, 'from cascade.executor.model_results_main import _get_model_results\n'), ((1562, 1587), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1575, 1587), False, 'import pytest\n'), ((1665, 1712), 'cascade.executor.model_results_main._get_model_results', '_get_model_results', (['model_version_id', 'db', 'table'], {}), '(model_version_id, db, table)\n', (1683, 1712), False, 'from cascade.executor.model_results_main import _get_model_results\n'), ((1878, 1903), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1891, 1903), False, 'import pytest\n'), ((1986, 2033), 'cascade.executor.model_results_main._get_model_results', '_get_model_results', (['model_version_id', 'db', 'table'], {}), '(model_version_id, db, table)\n', (2004, 2033), False, 'from cascade.executor.model_results_main import _get_model_results\n')] |
# -*- coding: utf-8 -*-
"""Morphology_embed.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1uZUe-O4GgKuMK1FfbAV7xztgtD4lWEEC
"""
import stanza
import argparse
parser = argparse.ArgumentParser(description='A tutorial of argparse!')
parser.add_argument("--language")
parser.add_argument("--inputfile")
parser.add_argument("--outputfile")
args = parser.parse_args()
lang = args.language
inp_f=args.inputfile
opt_f=args.outputfile
fr=open(inp_f,'r+')
entxt=fr.read()
nlp = stanza.Pipeline(lang=lang, processors='tokenize,pos',use_gpu=True, pos_batch_size=2000)
doc = nlp(entxt)
print(len(doc.sentences))
fd=open(opt_f,'a+')
featlst=['Case', 'AdpType', 'Gender', 'Mood', 'Person', 'PronType', 'Tense', 'VerbForm', 'Voice', 'Number']
i=0
for sent in doc.sentences:
wrdlst=[]
print(i)
i=i+1
for word in sent.words:
formatted_word=f'{word.text}+{word.upos}'
if word.feats:
morph_feat_dict = dict(x.split("=") for x in word.feats.split("|"))
feat_form=''
for feat in featlst:
if feat in morph_feat_dict:
feat_form=feat_form+'+'+morph_feat_dict[feat]
else:
feat_form=feat_form+'+'+'-'
#print(feat_form)
#print(d)
else:
feat_form='+-+-+-+-+-+-+-+-+-+-'
formatted_word=formatted_word+feat_form
wrdlst.append(formatted_word)
#print(formatted_word)
formatted_sent=' '.join(wrdlst)
fd.write(formatted_sent)
fd.write("\n")
fd.close()
print("file formatting has been done for language:",lang)
| [
"stanza.Pipeline",
"argparse.ArgumentParser"
] | [((243, 305), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""A tutorial of argparse!"""'}), "(description='A tutorial of argparse!')\n", (266, 305), False, 'import argparse\n'), ((544, 636), 'stanza.Pipeline', 'stanza.Pipeline', ([], {'lang': 'lang', 'processors': '"""tokenize,pos"""', 'use_gpu': '(True)', 'pos_batch_size': '(2000)'}), "(lang=lang, processors='tokenize,pos', use_gpu=True,\n pos_batch_size=2000)\n", (559, 636), False, 'import stanza\n')] |
from .FHIR_Generator import Generator
from .genomics_test_generator.fhir_genomics_test_gene import *
import os
spec_basepath = os.path.join(os.getcwd(), 'task_runner/resources/spec/')
def get_resource_correct_cases(version, resource_type):
'''
get correct resource objects for a certain resource type
'''
generator = Generator()
generator.load_definition(version, resource_type)
raw_cases = generator.correct_cases('nothing')
cases = []
if raw_cases:
for raw_case in raw_cases:
cases.append(raw_case['testcase'])
return cases
def get_resource_basic_cases(version, resource_type):
'''
get resource withour reference
'''
generator = Generator()
generator.load_definition(version, resource_type)
raw_cases = generator.correct_cases('noreference')
cases = []
if raw_cases:
for raw_case in raw_cases:
cases.append(raw_case['testcase'])
return cases
def get_resource_wrong_cases(version, resource_type):
'''
get wrong resource objects for a certain resource type
'''
generator = Generator()
generator.load_definition(version, resource_type)
raw_cases = generator.wrong_cases()
cases = []
if raw_cases:
for raw_case in raw_cases:
cases.append(raw_case['testcase']['testcase'])
return cases
#temp version
# def get_right_cases(resource_type):
# basepath = os.path.join(os.getcwd(), 'task_runner/resources/resource_file')
# filepath_list = []
# for parentDir, dirnames, filenames in os.walk(basepath):
# for filename in filenames:
# if filename.endswith('json') and resource_type.lower() in filename.lower():
# resource_name = filename[:filename.find('_')] if '_' in filename else filename[:filename.find('.')]
# fullFilename = (parentDir if parentDir.endswith('/') else parentDir + '/') + filename
# if resource_name.lower() == resource_type.lower(): filepath_list.append(fullFilename)
# #get json objs
# cases = []
# for fullFilename in filepath_list:
# f = open(fullFilename, 'r')
# cases.append(json.loads(f.read()))
# f.close()
# return cases
# def create_all_test_case4type(resource_spec_filename,resource_type):
# #load spec
# csv_reader = csv.reader(open(resource_spec_filename, 'r'))
# detail_dict = trans_csv_to_dict(csv_reader)
# del csv_reader
# #generate all cases
# test_cases = create_element_test_cases(detail_dict)
# right_cases, wrong_cases = create_orthogonal_test_cases(test_cases)
# #wrap test cases
# all_cases = {}
# all_cases['right'] = get_right_cases(resource_type)
# all_cases['wrong'] = []
# # for case in right_cases:
# # case['resourceType'] = resource_type
# # all_cases['right'].append(case)
# # get right cases from files instead
# for case in wrong_cases:
# case['case']['resourceType'] = resource_type
# all_cases['wrong'].append(case['case'])
# #return all cases
# return all_cases
# def get_resource_correct_cases(resource_type):
# all_cases = create_all_test_case4type('%s%s.csv' % (spec_basepath, resource_type), resource_type)
# return all_cases['right']
# def get_resource_wrong_cases(resource_type):
# all_cases = create_all_test_case4type('%s%s.csv' % (spec_basepath, resource_type), resource_type)
# return all_cases['wrong'] | [
"os.getcwd"
] | [((141, 152), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (150, 152), False, 'import os\n')] |
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
class BasicRenderModelBackend(object):
def get_layout_template_name(self, model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render(self, object, name, dictionary=None,
context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
template_name = self.get_layout_template_name(object, name)
return mark_safe(render_to_string(template_name, dictionary=dictionary,
context_instance=context_instance))
def __call__(self, *args, **kwargs):
return self.render(*args, **kwargs)
| [
"django.template.loader.render_to_string"
] | [((710, 804), 'django.template.loader.render_to_string', 'render_to_string', (['template_name'], {'dictionary': 'dictionary', 'context_instance': 'context_instance'}), '(template_name, dictionary=dictionary, context_instance=\n context_instance)\n', (726, 804), False, 'from django.template.loader import render_to_string\n')] |
import os, gzip
INPUT_FILES=['kr_codepoints.json','ja_codepoints.json','zh_codepoints.json','vn_codepoints.json','yue_codepoints.json','unicodepoints.json']
for f in INPUT_FILES:
source = os.path.join('result', f)
input = open(source, 'rb')
s = input.read()
input.close()
dest = os.path.join('../src/Unihandecode/_gz', f+'.gz')
output = gzip.GzipFile(dest, 'wb')
output.write(s)
output.close()
print("done") | [
"gzip.GzipFile",
"os.path.join"
] | [((198, 223), 'os.path.join', 'os.path.join', (['"""result"""', 'f'], {}), "('result', f)\n", (210, 223), False, 'import os, gzip\n'), ((321, 371), 'os.path.join', 'os.path.join', (['"""../src/Unihandecode/_gz"""', "(f + '.gz')"], {}), "('../src/Unihandecode/_gz', f + '.gz')\n", (333, 371), False, 'import os, gzip\n'), ((387, 412), 'gzip.GzipFile', 'gzip.GzipFile', (['dest', '"""wb"""'], {}), "(dest, 'wb')\n", (400, 412), False, 'import os, gzip\n')] |
import cv2
import numpy as np
from shapes import Myinit
class Triangle(Myinit):
def __init__(self):
super(Triangle, self).__init__()
self.vertices = np.array([[100,50], [150,150], [50,150]],np.int32)
self.vertices = self.vertices.reshape((-1, 1, 2))
self.color=(255,0,255)
def form_shape(self):
self.img = cv2.polylines(self.img, [self.vertices], True, self.color)
cv2.fillPoly(self.img, [self.vertices], self.color)
def welcome(self):
print('Printing Triangle...!')
def sides(self):
print("Triangle has 3 sides.")
def draw_shape(self):
self.welcome()
self.form_shape()
self.sides()
cv2.imshow("Triangle", self.img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"cv2.fillPoly",
"cv2.polylines",
"cv2.imshow",
"numpy.array",
"cv2.destroyAllWindows",
"cv2.waitKey"
] | [((177, 231), 'numpy.array', 'np.array', (['[[100, 50], [150, 150], [50, 150]]', 'np.int32'], {}), '([[100, 50], [150, 150], [50, 150]], np.int32)\n', (185, 231), True, 'import numpy as np\n'), ((368, 426), 'cv2.polylines', 'cv2.polylines', (['self.img', '[self.vertices]', '(True)', 'self.color'], {}), '(self.img, [self.vertices], True, self.color)\n', (381, 426), False, 'import cv2\n'), ((436, 487), 'cv2.fillPoly', 'cv2.fillPoly', (['self.img', '[self.vertices]', 'self.color'], {}), '(self.img, [self.vertices], self.color)\n', (448, 487), False, 'import cv2\n'), ((729, 761), 'cv2.imshow', 'cv2.imshow', (['"""Triangle"""', 'self.img'], {}), "('Triangle', self.img)\n", (739, 761), False, 'import cv2\n'), ((771, 785), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (782, 785), False, 'import cv2\n'), ((795, 818), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (816, 818), False, 'import cv2\n')] |
import sys
import os
import timeit
from global_vars import *
def common_process(file, fnc, isFile):
try:
if file != None and os.path.exists(file):
select = input("FILE is already exist. Press 'Y' if you want to rewrite. ")
if select != 'Y':
print("Execution aborted..")
return
tot_time = timeit.default_timer()
if isFile is True:
for file_name in file_name_list:
try:
time = timeit.default_timer()
print("\n\n\t*** " + file_name + " ***")
fnc(file_name)
print("\n\t\tEXECUTION TIME= " + str(round(timeit.default_timer() - time, 3)) + " (sec)\n")
except Exception as ex:
_, _, tb = sys.exc_info()
print("[common_process:" + file_name + ":" + str(tb.tb_lineno) + "] " + str(ex) + "\n\n")
else:
fnc()
print("\t\tTOTAL EXECUTION TIME= " + str(round(timeit.default_timer() - tot_time, 3)) + " (sec)\n")
except Exception as ex:
_, _, tb = sys.exc_info()
print("[common_process:" + str(tb.tb_lineno) + "] " + str(ex) + "\n\n")
| [
"timeit.default_timer",
"sys.exc_info",
"os.path.exists"
] | [((347, 369), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (367, 369), False, 'import timeit\n'), ((145, 165), 'os.path.exists', 'os.path.exists', (['file'], {}), '(file)\n', (159, 165), False, 'import os\n'), ((1021, 1035), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1033, 1035), False, 'import sys\n'), ((468, 490), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (488, 490), False, 'import timeit\n'), ((729, 743), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (741, 743), False, 'import sys\n'), ((923, 945), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (943, 945), False, 'import timeit\n'), ((623, 645), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (643, 645), False, 'import timeit\n')] |
import numpy as np
class vents:
def __init__(self, segments):
self.segments = segments
(self.maxx, self.maxy) = self._getmaxxy()
self.board = np.zeros((self.maxx+1, self.maxy+1), dtype=int)
def _getmaxxy(self):
allxs = [x[0] for x in self.segments]
allxs.extend([x[2] for x in self.segments])
allys = [x[1] for x in self.segments]
allys.extend([x[3] for x in self.segments])
print(f"segments: {self.segments}")
print([x[0] for x in self.segments])
print([x[2] for x in self.segments])
print(f"allxs: {allxs}")
maxx = max(allxs)
maxy = max(allys)
print(f"(maxx, maxy): ({maxx}, {maxy})")
return (int(maxx), int(maxy))
def _draw_vertical(self, s):
print(f"draw vertical: {s}")
x = s[0]
if s[3] < s[1]:
(start, fin) = (s[3], s[1])
else:
(start, fin) = (s[1], s[3])
for y in range(start, fin+1):
self.board[x, y] += 1
print(np.transpose(self.board))
def _draw_horizontal(self, s):
print(f"draw horizontal: {s}")
y = s[1]
if s[2] < s[0]:
(start, fin) = (s[2], s[0])
else:
(start, fin) = (s[0], s[2])
for x in range(start, fin+1):
self.board[x, y] += 1
print(np.transpose(self.board))
def _build_board(self):
for s in self.segments:
if s[0] == s[2]:
self._draw_vertical(s)
if s[1] == s[3]:
self._draw_horizontal(s)
def count_overlaps(self):
print(self.board)
self._build_board()
return np.count_nonzero(self.board > 1)
| [
"numpy.count_nonzero",
"numpy.zeros",
"numpy.transpose"
] | [((172, 223), 'numpy.zeros', 'np.zeros', (['(self.maxx + 1, self.maxy + 1)'], {'dtype': 'int'}), '((self.maxx + 1, self.maxy + 1), dtype=int)\n', (180, 223), True, 'import numpy as np\n'), ((1697, 1729), 'numpy.count_nonzero', 'np.count_nonzero', (['(self.board > 1)'], {}), '(self.board > 1)\n', (1713, 1729), True, 'import numpy as np\n'), ((1041, 1065), 'numpy.transpose', 'np.transpose', (['self.board'], {}), '(self.board)\n', (1053, 1065), True, 'import numpy as np\n'), ((1372, 1396), 'numpy.transpose', 'np.transpose', (['self.board'], {}), '(self.board)\n', (1384, 1396), True, 'import numpy as np\n')] |
from tkinter import *
from PIL import Image, ImageTk
import sql
import mysql.connector as msq
# Ouverture de la connexion
# --------------------------------------------------------
config = {
"user": "root",
"password": "<PASSWORD>",
"host": "127.0.0.1",
"port": "8081",
"database": "trombinoscope",
}
db = msq.connect(**config)
# Création de l'interface
# --------------------------------------------------------
fenetre = Tk()
fenetre.title("Trombinoscope")
# Création des fonctions
# --------------------------------------------------------
def click_button():
prenom, nom = find_person()
path = get_path_image(prenom, nom)
path = "./photos/" + path
identite = get_identite(prenom, nom)
change_content(path, identite)
def find_person():
indice = user_list.curselection() # retourne un indice
print(indice)
full_name = user_list.get(
indice
) # retourne un string avec le prenom, nom de l'indice
print(full_name)
full_name = full_name.split(" ")
prenom, nom = full_name
return (prenom, nom)
def get_path_image(prenom, nom):
"""
Retourne le chemin de l'image depuis la BDD
"""
# Cursor sert à récupérer les données que l'on demande
cursor = db.cursor()
# Requêtes :
query = (
"SELECT photo FROM personnes WHERE nom = '"
+ nom
+ "' AND prenom = '"
+ prenom
+ "'"
)
cursor.execute(query)
path = cursor.fetchone() # retourne une ligne de la requête sous forme de tuple
# Fermeture
cursor.close()
return path[0]
def get_identite(prenom, nom):
"""
Retourne le nom, le prénom, le genre et le statut de la personne
"""
# Cursor sert à récupérer les données que l'on demande
cursor = db.cursor()
# Requêtes :
query = (
"SELECT prenom, nom, qualification, libelle_genre FROM personnes NATURAL JOIN genres NATURAL JOIN statuts WHERE nom = '"
+ nom
+ "' AND prenom = '"
+ prenom
+ "'"
)
cursor.execute(query)
libelle = cursor.fetchone() # retourne une ligne de la requête sous forme de tuple
# Fermeture
cursor.close()
return libelle
def change_content(path, libelle):
# Changement de l'image dans le canvas
img = Image.open(path)
photo_resize = ImageTk.PhotoImage(img.resize((263, 350)))
# photo.config(file=photo_resize)
canvas.itemconfig(image_canvas, image=photo_resize)
# Changement du texte dans le label
name_label.config(text=libelle)
canvas.mainloop()
def get_users_list(db):
"""
Retourne tous les utilisateurs pour la ListBox
"""
# Cursor sert à récuperer les données que l'on demande
cursor = db.cursor()
# Requêtes :
query = "SELECT prenom, nom FROM personnes ORDER BY id_personne"
cursor.execute(query)
users = []
for user in cursor:
print(user)
name = " ".join(user)
users.append(name)
# Fermeture
cursor.close()
return users
# Interface
# --------------------------------------------------------
# Element principal
main_layout = Frame(fenetre)
header = Label(
main_layout,
text="Ecole ISEN x SIMPLON \n Formation IA 2021/2022",
fg="#161853",
font="Noto 15",
background="#FAEDF0",
padx=15,
pady=15,
)
central_widget = PanedWindow(main_layout, orient=HORIZONTAL)
# Placement du label header et central widget
header.pack(fill="x")
central_widget.pack(fill=BOTH)
# Element listes_users
sidebar = Frame(central_widget)
user_list = Listbox(sidebar, relief=FLAT)
button = Button(
sidebar, text="Valider", fg="white", bg="#292C6D", relief=FLAT, command=click_button
)
user_list.pack(side=TOP, expand=10, fill=BOTH)
button.pack(fill="x")
# Insertion des personnes dans la ListBox en appelant la fonction get_users_list
user_list.insert(END, *get_users_list(db))
# Elément Content : photo & user
content = Frame(central_widget)
canvas = Canvas(content, width=350, height=350, bg="#FAEDF0")
photo = PhotoImage(file="avatar_h.png")
image_canvas = canvas.create_image(50, 0, anchor=NW, image=photo)
name_label = Label(
content,
text="Prénom Nom \n Statut Genre",
fg="#161853",
font="Noto 12",
padx=10,
pady=10,
bg="#FAEDF0",
)
# Assemblage de la photo et du label
canvas.pack()
name_label.pack(fill="x")
content.pack(fill=BOTH)
# Assemblage
central_widget.add(sidebar)
central_widget.add(content)
# central_widget.pack()
main_layout.pack(fill=BOTH)
fenetre.mainloop()
# ----------------------------------------------------------
# Fermeture de la DB
db.close()
| [
"mysql.connector.connect",
"PIL.Image.open"
] | [((329, 350), 'mysql.connector.connect', 'msq.connect', ([], {}), '(**config)\n', (340, 350), True, 'import mysql.connector as msq\n'), ((2300, 2316), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (2310, 2316), False, 'from PIL import Image, ImageTk\n')] |
# This is used for testing Fine Tune Hyper-Parameters
from datetime import datetime
import itertools
import json
import matplotlib.pyplot as plt
import numpy as np
from keras.callbacks import ModelCheckpoint
from keras.wrappers.scikit_learn import KerasClassifier
from keras_preprocessing.sequence import pad_sequences
import joblib
from sklearn.model_selection import RandomizedSearchCV
from sklearn.utils import shuffle
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, Embedding, Conv1D, GlobalMaxPooling1D, Flatten, LSTM
from keras.preprocessing import text, sequence
from keras import utils
import pandas as pd
testData = pd.read_csv("../data/test.csv")
dictData = pd.read_csv("../data/kata_dasar_kbbi.csv")
categories_file = open("../data/categories.json", "r")
categories = json.load(categories_file)
inverted_categories_mobile = {v: k.lower() for k, v in categories['Mobile'].items()}
inverted_categories_fashion = {v: k.lower() for k, v in categories['Fashion'].items()}
inverted_categories_beauty = {v: k.lower() for k, v in categories['Beauty'].items()}
all_subcategories = {k.lower(): v for k, v in categories['Mobile'].items()}
all_subcategories.update({k.lower(): v for k, v in categories['Fashion'].items()})
all_subcategories.update({k.lower(): v for k, v in categories['Beauty'].items()})
# Main settings
plot_history_check = True
gen_test = True
max_length = 35 # 32 is max word in train, think need to test this later of the actual words need to be used...
num_classes = len(all_subcategories)
# Training for more epochs will likely lead to overfitting on this dataset
# You can try tweaking these hyperparamaters when using this model with your own data
batch_size = 256
epochs = 10
max_words = 1000
print(all_subcategories)
print("no of categories: " + str(num_classes))
category_mapping = {
'fashion_image': 'Fashion',
'beauty_image': 'Beauty',
'mobile_image': 'Mobile',
}
directory_mapping = {
'Fashion': 'fashion_image',
'Beauty': 'beauty_image',
'Mobile': 'mobile_image',
}
trainData = pd.read_csv("../data/train.csv")
# Shuffle train data
trainData = shuffle(trainData)
max_data_size = int(len(trainData) * 1)
train_data_size = int(max_data_size * .95)
train_data_step = 1
validate_data_step = 1
print(train_data_size, max_data_size)
train_texts = trainData['title'][::train_data_step]
train_tags = trainData['Category'][::train_data_step]
test_texts = testData['title']
print(len(train_texts), len(train_tags))
y = train_tags.values
tokenize = text.Tokenizer(num_words=max_words, char_level=False)
tokenize.fit_on_texts(train_texts) # only fit on train
x_train = tokenize.texts_to_sequences(train_texts)
x_test = tokenize.texts_to_sequences(test_texts)
# Pad sequences with zeros
x_train = pad_sequences(x_train, padding='post', maxlen=max_length)
x_test = pad_sequences(x_test, padding='post', maxlen=max_length)
y_train = train_tags.values
y_train = utils.to_categorical(y_train)
vocab_size = len(tokenize.word_index) + 1
print(vocab_size)
def create_model(num_filters, kernel_size, max_words, embedding_dim, max_length):
model = Sequential()
model.add(Embedding(max_words,
embedding_dim,
input_length=max_length,
trainable=True))
model.add(Conv1D(num_filters, kernel_size, activation='relu'))
model.add(GlobalMaxPooling1D())
model.add(Dense(embedding_dim, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
return model
def gen_filename_h5():
return 'epoch_'+str(epochs) + '_' + datetime.now().strftime("%m_%d_%Y_%H_%M_%S")
def gen_filename_csv():
return 'epoch_'+str(epochs) + '_' + datetime.now().strftime("%m_%d_%Y_%H_%M_%S")
param_grid = dict(num_filters=[32, 64, 128],
kernel_size=[3, 5, 7],
max_words=[max_words],
embedding_dim=[64, 128],
max_length=[max_length])
filepath = "../checkpoints/"+gen_filename_h5()+"v2.hdf5"
checkpointer = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
model = KerasClassifier(build_fn=create_model,
epochs=epochs,
batch_size=batch_size,
verbose=True,
)
grid = RandomizedSearchCV(estimator=model, param_distributions=param_grid,
cv=4, verbose=1, n_iter=10)
print(grid)
grid_result = grid.fit(x_train,
y_train,
validation_split=0.1,
callbacks=[checkpointer])
with open("../checkpoints/"+gen_filename_h5()+".pickle","w+") as f:
joblib.dump(grid_result, f)
def plot_history(history):
plt.style.use('ggplot')
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
x = range(1, len(acc) + 1)
plt.figure(figsize=(12, 5))
plt.subplot(1, 2, 1)
plt.plot(x, acc, 'b', label='Training acc')
plt.plot(x, val_acc, 'r', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.subplot(1, 2, 2)
plt.plot(x, loss, 'b', label='Training loss')
plt.plot(x, val_loss, 'r', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
if plot_history_check:
plot_history(grid)
def perform_test():
prediction = grid.predict(x_test, batch_size=batch_size, verbose=1)
predicted_label = [np.argmax(prediction[i]) for i in range(len(x_test))]
# print(predicted_label)
df = pd.DataFrame({'itemid': testData['itemid'].astype(int), 'Category': predicted_label})
df.to_csv(path_or_buf='res_' + gen_filename_csv() + '.csv', index=False)
if gen_test:
perform_test()
# This utility function is from the sklearn docs:
# http://scikit-learn.org/stable/auto_examples/model_selection/plot_confusion_matrix.html
def plot_confusion_matrix(cm, classes,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title, fontsize=30)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45, fontsize=22)
plt.yticks(tick_marks, classes, fontsize=22)
fmt = '.2f'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.ylabel('True label', fontsize=25)
plt.xlabel('Predicted label', fontsize=25)
plt.show()
| [
"pandas.read_csv",
"matplotlib.pyplot.ylabel",
"keras.layers.GlobalMaxPooling1D",
"keras.utils.to_categorical",
"keras.layers.Dense",
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.yticks",
"joblib.dump",
"mat... | [((670, 701), 'pandas.read_csv', 'pd.read_csv', (['"""../data/test.csv"""'], {}), "('../data/test.csv')\n", (681, 701), True, 'import pandas as pd\n'), ((713, 755), 'pandas.read_csv', 'pd.read_csv', (['"""../data/kata_dasar_kbbi.csv"""'], {}), "('../data/kata_dasar_kbbi.csv')\n", (724, 755), True, 'import pandas as pd\n'), ((824, 850), 'json.load', 'json.load', (['categories_file'], {}), '(categories_file)\n', (833, 850), False, 'import json\n'), ((2084, 2116), 'pandas.read_csv', 'pd.read_csv', (['"""../data/train.csv"""'], {}), "('../data/train.csv')\n", (2095, 2116), True, 'import pandas as pd\n'), ((2151, 2169), 'sklearn.utils.shuffle', 'shuffle', (['trainData'], {}), '(trainData)\n', (2158, 2169), False, 'from sklearn.utils import shuffle\n'), ((2549, 2602), 'keras.preprocessing.text.Tokenizer', 'text.Tokenizer', ([], {'num_words': 'max_words', 'char_level': '(False)'}), '(num_words=max_words, char_level=False)\n', (2563, 2602), False, 'from keras.preprocessing import text, sequence\n'), ((2797, 2854), 'keras_preprocessing.sequence.pad_sequences', 'pad_sequences', (['x_train'], {'padding': '"""post"""', 'maxlen': 'max_length'}), "(x_train, padding='post', maxlen=max_length)\n", (2810, 2854), False, 'from keras_preprocessing.sequence import pad_sequences\n'), ((2864, 2920), 'keras_preprocessing.sequence.pad_sequences', 'pad_sequences', (['x_test'], {'padding': '"""post"""', 'maxlen': 'max_length'}), "(x_test, padding='post', maxlen=max_length)\n", (2877, 2920), False, 'from keras_preprocessing.sequence import pad_sequences\n'), ((2960, 2989), 'keras.utils.to_categorical', 'utils.to_categorical', (['y_train'], {}), '(y_train)\n', (2980, 2989), False, 'from keras import utils\n'), ((4190, 4282), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['filepath'], {'monitor': '"""val_acc"""', 'verbose': '(1)', 'save_best_only': '(True)', 'mode': '"""max"""'}), "(filepath, monitor='val_acc', verbose=1, save_best_only=True,\n mode='max')\n", (4205, 4282), False, 'from keras.callbacks import ModelCheckpoint\n'), ((4287, 4381), 'keras.wrappers.scikit_learn.KerasClassifier', 'KerasClassifier', ([], {'build_fn': 'create_model', 'epochs': 'epochs', 'batch_size': 'batch_size', 'verbose': '(True)'}), '(build_fn=create_model, epochs=epochs, batch_size=batch_size,\n verbose=True)\n', (4302, 4381), False, 'from keras.wrappers.scikit_learn import KerasClassifier\n'), ((4484, 4583), 'sklearn.model_selection.RandomizedSearchCV', 'RandomizedSearchCV', ([], {'estimator': 'model', 'param_distributions': 'param_grid', 'cv': '(4)', 'verbose': '(1)', 'n_iter': '(10)'}), '(estimator=model, param_distributions=param_grid, cv=4,\n verbose=1, n_iter=10)\n', (4502, 4583), False, 'from sklearn.model_selection import RandomizedSearchCV\n'), ((3146, 3158), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (3156, 3158), False, 'from keras.models import Sequential\n'), ((4856, 4883), 'joblib.dump', 'joblib.dump', (['grid_result', 'f'], {}), '(grid_result, f)\n', (4867, 4883), False, 'import joblib\n'), ((4917, 4940), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (4930, 4940), True, 'import matplotlib.pyplot as plt\n'), ((5129, 5156), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 5)'}), '(figsize=(12, 5))\n', (5139, 5156), True, 'import matplotlib.pyplot as plt\n'), ((5161, 5181), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (5172, 5181), True, 'import matplotlib.pyplot as plt\n'), ((5186, 5229), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'acc', '"""b"""'], {'label': '"""Training acc"""'}), "(x, acc, 'b', label='Training acc')\n", (5194, 5229), True, 'import matplotlib.pyplot as plt\n'), ((5234, 5283), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'val_acc', '"""r"""'], {'label': '"""Validation acc"""'}), "(x, val_acc, 'r', label='Validation acc')\n", (5242, 5283), True, 'import matplotlib.pyplot as plt\n'), ((5288, 5333), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and validation accuracy"""'], {}), "('Training and validation accuracy')\n", (5297, 5333), True, 'import matplotlib.pyplot as plt\n'), ((5338, 5350), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5348, 5350), True, 'import matplotlib.pyplot as plt\n'), ((5355, 5375), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (5366, 5375), True, 'import matplotlib.pyplot as plt\n'), ((5380, 5425), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'loss', '"""b"""'], {'label': '"""Training loss"""'}), "(x, loss, 'b', label='Training loss')\n", (5388, 5425), True, 'import matplotlib.pyplot as plt\n'), ((5430, 5481), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'val_loss', '"""r"""'], {'label': '"""Validation loss"""'}), "(x, val_loss, 'r', label='Validation loss')\n", (5438, 5481), True, 'import matplotlib.pyplot as plt\n'), ((5486, 5527), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and validation loss"""'], {}), "('Training and validation loss')\n", (5495, 5527), True, 'import matplotlib.pyplot as plt\n'), ((5532, 5544), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5542, 5544), True, 'import matplotlib.pyplot as plt\n'), ((5549, 5559), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5557, 5559), True, 'import matplotlib.pyplot as plt\n'), ((6494, 6544), 'matplotlib.pyplot.imshow', 'plt.imshow', (['cm'], {'interpolation': '"""nearest"""', 'cmap': 'cmap'}), "(cm, interpolation='nearest', cmap=cmap)\n", (6504, 6544), True, 'import matplotlib.pyplot as plt\n'), ((6549, 6578), 'matplotlib.pyplot.title', 'plt.title', (['title'], {'fontsize': '(30)'}), '(title, fontsize=30)\n', (6558, 6578), True, 'import matplotlib.pyplot as plt\n'), ((6583, 6597), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (6595, 6597), True, 'import matplotlib.pyplot as plt\n'), ((6643, 6700), 'matplotlib.pyplot.xticks', 'plt.xticks', (['tick_marks', 'classes'], {'rotation': '(45)', 'fontsize': '(22)'}), '(tick_marks, classes, rotation=45, fontsize=22)\n', (6653, 6700), True, 'import matplotlib.pyplot as plt\n'), ((6705, 6749), 'matplotlib.pyplot.yticks', 'plt.yticks', (['tick_marks', 'classes'], {'fontsize': '(22)'}), '(tick_marks, classes, fontsize=22)\n', (6715, 6749), True, 'import matplotlib.pyplot as plt\n'), ((7033, 7070), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""True label"""'], {'fontsize': '(25)'}), "('True label', fontsize=25)\n", (7043, 7070), True, 'import matplotlib.pyplot as plt\n'), ((7075, 7117), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Predicted label"""'], {'fontsize': '(25)'}), "('Predicted label', fontsize=25)\n", (7085, 7117), True, 'import matplotlib.pyplot as plt\n'), ((7122, 7132), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7130, 7132), True, 'import matplotlib.pyplot as plt\n'), ((3173, 3249), 'keras.layers.Embedding', 'Embedding', (['max_words', 'embedding_dim'], {'input_length': 'max_length', 'trainable': '(True)'}), '(max_words, embedding_dim, input_length=max_length, trainable=True)\n', (3182, 3249), False, 'from keras.layers import Dense, Activation, Dropout, Embedding, Conv1D, GlobalMaxPooling1D, Flatten, LSTM\n'), ((3337, 3388), 'keras.layers.Conv1D', 'Conv1D', (['num_filters', 'kernel_size'], {'activation': '"""relu"""'}), "(num_filters, kernel_size, activation='relu')\n", (3343, 3388), False, 'from keras.layers import Dense, Activation, Dropout, Embedding, Conv1D, GlobalMaxPooling1D, Flatten, LSTM\n'), ((3404, 3424), 'keras.layers.GlobalMaxPooling1D', 'GlobalMaxPooling1D', ([], {}), '()\n', (3422, 3424), False, 'from keras.layers import Dense, Activation, Dropout, Embedding, Conv1D, GlobalMaxPooling1D, Flatten, LSTM\n'), ((3440, 3479), 'keras.layers.Dense', 'Dense', (['embedding_dim'], {'activation': '"""relu"""'}), "(embedding_dim, activation='relu')\n", (3445, 3479), False, 'from keras.layers import Dense, Activation, Dropout, Embedding, Conv1D, GlobalMaxPooling1D, Flatten, LSTM\n'), ((3495, 3535), 'keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (3500, 3535), False, 'from keras.layers import Dense, Activation, Dropout, Embedding, Conv1D, GlobalMaxPooling1D, Flatten, LSTM\n'), ((5725, 5749), 'numpy.argmax', 'np.argmax', (['prediction[i]'], {}), '(prediction[i])\n', (5734, 5749), True, 'import numpy as np\n'), ((3746, 3760), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3758, 3760), False, 'from datetime import datetime\n'), ((3857, 3871), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3869, 3871), False, 'from datetime import datetime\n')] |
import os
from torchaudio.datasets import SPEECHCOMMANDS
import torch
class SubsetSC(SPEECHCOMMANDS):
def __init__(self, subset, data_path):
super().__init__(root=data_path, download=True)
def load_list(filename):
filepath = os.path.join(self._path, filename)
with open(filepath) as fileobj:
return [os.path.join(self._path, line.strip()) for line in fileobj]
if subset == "validation":
self._walker = load_list("validation_list.txt")
elif subset == "testing":
self._walker = load_list("testing_list.txt")
elif subset == "training":
excludes = load_list("validation_list.txt") + load_list("testing_list.txt")
excludes = set(excludes)
self._walker = [w for w in self._walker if w not in excludes]
def set_LABELS(labels):
global LABELS
LABELS = labels
def label_to_index(word):
# Return the position of the word in labels
return torch.tensor(LABELS.index(word))
def index_to_label(index):
# Return the word corresponding to the index in labels
# This is the inverse of label_to_index
return LABELS[index]
def pad_sequence(batch):
# Make all tensor in a batch the same length by padding with zeros
batch = [item.t() for item in batch]
batch = torch.nn.utils.rnn.pad_sequence(batch, batch_first=True, padding_value=0.)
return batch.permute(0, 2, 1)
def collate_fn(batch):
# A data tuple has the form:
# waveform, sample_rate, label, speaker_id, utterance_number
tensors, targets = [], []
# Gather in lists, and encode labels as indices
for waveform, _, label, *_ in batch:
tensors += [waveform]
targets += [label_to_index(label)]
# Group the list of tensors into a batched tensor
tensors = pad_sequence(tensors)
targets = torch.stack(targets)
return tensors, targets
def number_of_correct(pred, target):
# count number of correct predictions
return pred.squeeze().eq(target).sum().item()
def get_likely_index(tensor):
# find most likely label index for each element in the batch
return tensor.argmax(dim=-1)
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad) | [
"torch.stack",
"os.path.join",
"torch.nn.utils.rnn.pad_sequence"
] | [((1334, 1409), 'torch.nn.utils.rnn.pad_sequence', 'torch.nn.utils.rnn.pad_sequence', (['batch'], {'batch_first': '(True)', 'padding_value': '(0.0)'}), '(batch, batch_first=True, padding_value=0.0)\n', (1365, 1409), False, 'import torch\n'), ((1867, 1887), 'torch.stack', 'torch.stack', (['targets'], {}), '(targets)\n', (1878, 1887), False, 'import torch\n'), ((262, 296), 'os.path.join', 'os.path.join', (['self._path', 'filename'], {}), '(self._path, filename)\n', (274, 296), False, 'import os\n')] |
from setuptools import setup
with open('README.rst') as readme:
r = str(readme.read())
setup(
name='clpb',
version='1.0.0',
url='https://github.com/dmitriiweb/clpb',
license='MIT',
author='<NAME>',
author_email='<EMAIL>',
description=' Command line progress bar for Python 3',
long_description=r,
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
),
)
| [
"setuptools.setup"
] | [((93, 677), 'setuptools.setup', 'setup', ([], {'name': '"""clpb"""', 'version': '"""1.0.0"""', 'url': '"""https://github.com/dmitriiweb/clpb"""', 'license': '"""MIT"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'description': '""" Command line progress bar for Python 3"""', 'long_description': 'r', 'classifiers': "('Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python', 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6')"}), "(name='clpb', version='1.0.0', url=\n 'https://github.com/dmitriiweb/clpb', license='MIT', author='<NAME>',\n author_email='<EMAIL>', description=\n ' Command line progress bar for Python 3', long_description=r,\n classifiers=('Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python', 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6'))\n", (98, 677), False, 'from setuptools import setup\n')] |
def wandb_log( # noqa: C901
func=None,
# /, # py38 only
log_component_file=True,
):
"""Wrap a standard python function and log to W&B"""
import json
import os
from functools import wraps
from inspect import Parameter, signature
from kfp import components
from kfp.components import (
InputArtifact,
InputBinaryFile,
InputPath,
InputTextFile,
OutputArtifact,
OutputBinaryFile,
OutputPath,
OutputTextFile,
)
import wandb
from wandb.sdk.lib import telemetry as wb_telemetry
output_types = (OutputArtifact, OutputBinaryFile, OutputPath, OutputTextFile)
input_types = (InputArtifact, InputBinaryFile, InputPath, InputTextFile)
def isinstance_namedtuple(x):
t = type(x)
b = t.__bases__
if len(b) != 1 or b[0] != tuple:
return False
f = getattr(t, "_fields", None)
if not isinstance(f, tuple):
return False
return all(type(n) == str for n in f)
def get_iframe_html(run):
return f'<iframe src="{run.url}?kfp=true" style="border:none;width:100%;height:100%;min-width:900px;min-height:600px;"></iframe>'
def get_link_back_to_kubeflow():
wandb_kubeflow_url = os.getenv("WANDB_KUBEFLOW_URL")
return f"{wandb_kubeflow_url}/#/runs/details/{{workflow.uid}}"
def log_input_scalar(name, data, run=None):
run.config[name] = data
wandb.termlog(f"Setting config: {name} to {data}")
def log_input_artifact(name, data, type, run=None):
artifact = wandb.Artifact(name, type=type)
artifact.add_file(data)
run.use_artifact(artifact)
wandb.termlog(f"Using artifact: {name}")
def log_output_scalar(name, data, run=None):
if isinstance_namedtuple(data):
for k, v in zip(data._fields, data):
run.log({f"{func.__name__}.{k}": v})
else:
run.log({name: data})
def log_output_artifact(name, data, type, run=None):
artifact = wandb.Artifact(name, type=type)
artifact.add_file(data)
run.log_artifact(artifact)
wandb.termlog(f"Logging artifact: {name}")
def _log_component_file(func, run=None):
name = func.__name__
output_component_file = f"{name}.yml"
components._python_op.func_to_component_file(func, output_component_file)
artifact = wandb.Artifact(name, type="kubeflow_component_file")
artifact.add_file(output_component_file)
run.log_artifact(artifact)
wandb.termlog(f"Logging component file: {output_component_file}")
# Add `mlpipeline_ui_metadata_path` to signature to show W&B run in "ML Visualizations tab"
sig = signature(func)
no_default = []
has_default = []
for param in sig.parameters.values():
if param.default is param.empty:
no_default.append(param)
else:
has_default.append(param)
new_params = tuple(
(
*no_default,
Parameter(
"mlpipeline_ui_metadata_path",
annotation=OutputPath(),
kind=Parameter.POSITIONAL_OR_KEYWORD,
),
*has_default,
)
)
new_sig = sig.replace(parameters=new_params)
new_anns = {param.name: param.annotation for param in new_params}
if "return" in func.__annotations__:
new_anns["return"] = func.__annotations__["return"]
def decorator(func):
input_scalars = {}
input_artifacts = {}
output_scalars = {}
output_artifacts = {}
for name, ann in func.__annotations__.items():
if name == "return":
output_scalars[name] = ann
elif isinstance(ann, output_types):
output_artifacts[name] = ann
elif isinstance(ann, input_types):
input_artifacts[name] = ann
else:
input_scalars[name] = ann
@wraps(func)
def wrapper(*args, **kwargs):
bound = new_sig.bind(*args, **kwargs)
bound.apply_defaults()
mlpipeline_ui_metadata_path = bound.arguments["mlpipeline_ui_metadata_path"]
del bound.arguments["mlpipeline_ui_metadata_path"]
with wandb.init(
job_type=func.__name__,
group="{{workflow.annotations.pipelines.kubeflow.org/run_name}}",
) as run:
# Link back to the kfp UI
kubeflow_url = get_link_back_to_kubeflow()
run.notes = kubeflow_url
run.config["LINK_TO_KUBEFLOW_RUN"] = kubeflow_url
iframe_html = get_iframe_html(run)
metadata = {
"outputs": [
{
"type": "markdown",
"storage": "inline",
"source": iframe_html,
}
]
}
with open(mlpipeline_ui_metadata_path, "w") as metadata_file:
json.dump(metadata, metadata_file)
if log_component_file:
_log_component_file(func, run=run)
for name, _ in input_scalars.items():
log_input_scalar(name, kwargs[name], run)
for name, ann in input_artifacts.items():
log_input_artifact(name, kwargs[name], ann.type, run)
with wb_telemetry.context(run=run) as tel:
tel.feature.kfp_wandb_log = True
result = func(*bound.args, **bound.kwargs)
for name, _ in output_scalars.items():
log_output_scalar(name, result, run)
for name, ann in output_artifacts.items():
log_output_artifact(name, kwargs[name], ann.type, run)
return result
wrapper.__signature__ = new_sig
wrapper.__annotations__ = new_anns
return wrapper
if func is None:
return decorator
else:
return decorator(func)
| [
"wandb.Artifact",
"os.getenv",
"json.dump",
"kfp.components._python_op.func_to_component_file",
"inspect.signature",
"functools.wraps",
"wandb.init",
"wandb.termlog",
"wandb.sdk.lib.telemetry.context",
"kfp.components.OutputPath"
] | [((2754, 2769), 'inspect.signature', 'signature', (['func'], {}), '(func)\n', (2763, 2769), False, 'from inspect import Parameter, signature\n'), ((1280, 1311), 'os.getenv', 'os.getenv', (['"""WANDB_KUBEFLOW_URL"""'], {}), "('WANDB_KUBEFLOW_URL')\n", (1289, 1311), False, 'import os\n'), ((1472, 1522), 'wandb.termlog', 'wandb.termlog', (['f"""Setting config: {name} to {data}"""'], {}), "(f'Setting config: {name} to {data}')\n", (1485, 1522), False, 'import wandb\n'), ((1599, 1630), 'wandb.Artifact', 'wandb.Artifact', (['name'], {'type': 'type'}), '(name, type=type)\n', (1613, 1630), False, 'import wandb\n'), ((1706, 1746), 'wandb.termlog', 'wandb.termlog', (['f"""Using artifact: {name}"""'], {}), "(f'Using artifact: {name}')\n", (1719, 1746), False, 'import wandb\n'), ((2064, 2095), 'wandb.Artifact', 'wandb.Artifact', (['name'], {'type': 'type'}), '(name, type=type)\n', (2078, 2095), False, 'import wandb\n'), ((2171, 2213), 'wandb.termlog', 'wandb.termlog', (['f"""Logging artifact: {name}"""'], {}), "(f'Logging artifact: {name}')\n", (2184, 2213), False, 'import wandb\n'), ((2343, 2416), 'kfp.components._python_op.func_to_component_file', 'components._python_op.func_to_component_file', (['func', 'output_component_file'], {}), '(func, output_component_file)\n', (2387, 2416), False, 'from kfp import components\n'), ((2436, 2488), 'wandb.Artifact', 'wandb.Artifact', (['name'], {'type': '"""kubeflow_component_file"""'}), "(name, type='kubeflow_component_file')\n", (2450, 2488), False, 'import wandb\n'), ((2581, 2646), 'wandb.termlog', 'wandb.termlog', (['f"""Logging component file: {output_component_file}"""'], {}), "(f'Logging component file: {output_component_file}')\n", (2594, 2646), False, 'import wandb\n'), ((4012, 4023), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (4017, 4023), False, 'from functools import wraps\n'), ((4318, 4423), 'wandb.init', 'wandb.init', ([], {'job_type': 'func.__name__', 'group': '"""{{workflow.annotations.pipelines.kubeflow.org/run_name}}"""'}), "(job_type=func.__name__, group=\n '{{workflow.annotations.pipelines.kubeflow.org/run_name}}')\n", (4328, 4423), False, 'import wandb\n'), ((3141, 3153), 'kfp.components.OutputPath', 'OutputPath', ([], {}), '()\n', (3151, 3153), False, 'from kfp.components import InputArtifact, InputBinaryFile, InputPath, InputTextFile, OutputArtifact, OutputBinaryFile, OutputPath, OutputTextFile\n'), ((5136, 5170), 'json.dump', 'json.dump', (['metadata', 'metadata_file'], {}), '(metadata, metadata_file)\n', (5145, 5170), False, 'import json\n'), ((5538, 5567), 'wandb.sdk.lib.telemetry.context', 'wb_telemetry.context', ([], {'run': 'run'}), '(run=run)\n', (5558, 5567), True, 'from wandb.sdk.lib import telemetry as wb_telemetry\n')] |
# -*- coding: utf-8 -*-
# Copyright 2017 Interstellar Technologies Inc. All Rights Reserved.
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics
class Rocket:
g0 = 1.0 # Gravity at surface [-]
def __init__(self):
self.H0 = 1.0 # Initial height
self.V0 = 0.0 # Initial velocity
self.M0 = 1.0 # Initial mass
self.Tc = 3.5 # Use for thrust
self.Hc = 500 # Use for drag
self.Vc = 620 # Use for drag
self.Mc = 0.6 # Fraction of initial mass left at end
self.c = 0.5 * np.sqrt(self.g0*self.H0) # Thrust-to-fuel mass
self.Mf = self.Mc * self.M0 # Final mass
self.Dc = 0.5 * self.Vc * self.M0 / self.g0 # Drag scaling
self.T_max = self.Tc * self.g0 * self.M0 # Maximum thrust
def dynamics(prob, obj, section):
h = prob.states(0, section)
v = prob.states(1, section)
m = prob.states(2, section)
T = prob.controls(0, section)
Dc = obj.Dc
c = obj.c
drag = 1 * Dc * v ** 2 * np.exp(-obj.Hc * (h - obj.H0) / obj.H0)
g = obj.g0 * (obj.H0 / h)**2
dx = Dynamics(prob, section)
dx[0] = v
dx[1] = (T - drag) / m - g
dx[2] = - T / c
return dx()
def equality(prob, obj):
h = prob.states_all_section(0)
v = prob.states_all_section(1)
m = prob.states_all_section(2)
T = prob.controls_all_section(0)
tf = prob.time_final(-1)
result = Condition()
# event condition
result.equal(h[0], obj.H0)
result.equal(v[0], obj.V0)
result.equal(m[0], obj.M0)
result.equal(v[-1], 0.0)
result.equal(m[-1], obj.Mf)
return result()
def inequality(prob, obj):
h = prob.states_all_section(0)
v = prob.states_all_section(1)
m = prob.states_all_section(2)
T = prob.controls_all_section(0)
tf = prob.time_final(-1)
result = Condition()
# lower bounds
result.lower_bound(h, obj.H0)
result.lower_bound(v, 0.0)
result.lower_bound(m, obj.Mf)
result.lower_bound(T, 0.0)
result.lower_bound(tf, 0.1)
# upper bounds
result.upper_bound(m, obj.M0)
result.upper_bound(T, obj.T_max)
return result()
def cost(prob, obj):
h = prob.states_all_section(0)
return -h[-1]
# ========================
plt.close("all")
# Program Starting Point
time_init = [0.0, 0.3]
n = [50]
num_states = [3]
num_controls = [1]
max_iteration = 30
flag_savefig = True
savefig_file = "04_Goddard/04_0knot_"
# ------------------------
# set OpenGoddard class for algorithm determination
prob = Problem(time_init, n, num_states, num_controls, max_iteration)
# ------------------------
# create instance of operating object
# Nondimensionalization of parameters
obj = Rocket()
# ========================
# Initial parameter guess
# altitude profile
H_init = Guess.cubic(prob.time_all_section, 1.0, 0.0, 1.010, 0.0)
# Guess.plot(prob.time_all_section, H_init, "Altitude", "time", "Altitude")
# if(flag_savefig):plt.savefig(savefig_file + "guess_alt" + ".png")
# velocity
V_init = Guess.linear(prob.time_all_section, 0.0, 0.0)
# Guess.plot(prob.time_all_section, V_init, "Velocity", "time", "Velocity")
# mass profile
M_init = Guess.cubic(prob.time_all_section, 1.0, -0.6, 0.6, 0.0)
# Guess.plot(prob.time_all_section, M_init, "Mass", "time", "Mass")
# if(flag_savefig):plt.savefig(savefig_file + "guess_mass" + ".png")
# thrust profile
T_init = Guess.cubic(prob.time_all_section, 3.5, 0.0, 0.0, 0.0)
# Guess.plot(prob.time_all_section, T_init, "Thrust Guess", "time", "Thrust")
# if(flag_savefig):plt.savefig(savefig_file + "guess_thrust" + ".png")
plt.show()
# ========================
# Substitution initial value to parameter vector to be optimized
prob.set_states_all_section(0, H_init)
prob.set_states_all_section(1, V_init)
prob.set_states_all_section(2, M_init)
prob.set_controls_all_section(0, T_init)
# ========================
# Main Process
# Assign problem to SQP solver
prob.dynamics = [dynamics]
prob.knot_states_smooth = []
prob.cost = cost
prob.cost_derivative = None
prob.equality = equality
prob.inequality = inequality
def display_func():
h = prob.states_all_section(0)
print("max altitude: {0:.5f}".format(h[-1]))
prob.solve(obj, display_func, ftol=1e-10)
# ========================
# Post Process
# ------------------------
# Convert parameter vector to variable
h = prob.states_all_section(0)
v = prob.states_all_section(1)
m = prob.states_all_section(2)
T = prob.controls_all_section(0)
time = prob.time_update()
# ------------------------
# Calculate necessary variables
Dc = 0.5 * 620 * 1.0 / 1.0
drag = 1 * Dc * v ** 2 * np.exp(-500 * (h - 1.0) / 1.0)
g = 1.0 * (1.0 / h)**2
# ------------------------
# Visualizetion
plt.figure()
plt.title("Altitude profile")
plt.plot(time, h, marker="o", label="Altitude")
for line in prob.time_knots():
plt.axvline(line, color="k", alpha=0.5)
plt.grid()
plt.xlabel("time [s]")
plt.ylabel("Altitude [-]")
if(flag_savefig): plt.savefig(savefig_file + "altitude" + ".png")
plt.figure()
plt.title("Velocity")
plt.plot(time, v, marker="o", label="Velocity")
for line in prob.time_knots():
plt.axvline(line, color="k", alpha=0.5)
plt.grid()
plt.xlabel("time [s]")
plt.ylabel("Velocity [-]")
if(flag_savefig): plt.savefig(savefig_file + "velocity" + ".png")
plt.figure()
plt.title("Mass")
plt.plot(time, m, marker="o", label="Mass")
for line in prob.time_knots():
plt.axvline(line, color="k", alpha=0.5)
plt.grid()
plt.xlabel("time [s]")
plt.ylabel("Mass [-]")
if(flag_savefig): plt.savefig(savefig_file + "mass" + ".png")
plt.figure()
plt.title("Thrust profile")
plt.plot(time, T, marker="o", label="Thrust")
plt.plot(time, drag, marker="o", label="Drag")
plt.plot(time, g, marker="o", label="Gravity")
for line in prob.time_knots():
plt.axvline(line, color="k", alpha=0.5)
plt.grid()
plt.xlabel("time [s]")
plt.ylabel("Thrust [-]")
plt.legend(loc="best")
if(flag_savefig): plt.savefig(savefig_file + "force" + ".png")
plt.show()
| [
"OpenGoddard.optimize.Guess.cubic",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig",
"OpenGoddard.optimize.Problem",
"numpy.sqrt",
"OpenGoddard.optimize.Guess.linear",
"matplotlib.pyplot.ylabel",
"OpenGoddard.optimize.Dynamics",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplo... | [((2361, 2377), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (2370, 2377), True, 'import matplotlib.pyplot as plt\n'), ((2636, 2698), 'OpenGoddard.optimize.Problem', 'Problem', (['time_init', 'n', 'num_states', 'num_controls', 'max_iteration'], {}), '(time_init, n, num_states, num_controls, max_iteration)\n', (2643, 2698), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((2901, 2956), 'OpenGoddard.optimize.Guess.cubic', 'Guess.cubic', (['prob.time_all_section', '(1.0)', '(0.0)', '(1.01)', '(0.0)'], {}), '(prob.time_all_section, 1.0, 0.0, 1.01, 0.0)\n', (2912, 2956), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((3123, 3168), 'OpenGoddard.optimize.Guess.linear', 'Guess.linear', (['prob.time_all_section', '(0.0)', '(0.0)'], {}), '(prob.time_all_section, 0.0, 0.0)\n', (3135, 3168), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((3270, 3325), 'OpenGoddard.optimize.Guess.cubic', 'Guess.cubic', (['prob.time_all_section', '(1.0)', '(-0.6)', '(0.6)', '(0.0)'], {}), '(prob.time_all_section, 1.0, -0.6, 0.6, 0.0)\n', (3281, 3325), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((3490, 3544), 'OpenGoddard.optimize.Guess.cubic', 'Guess.cubic', (['prob.time_all_section', '(3.5)', '(0.0)', '(0.0)', '(0.0)'], {}), '(prob.time_all_section, 3.5, 0.0, 0.0, 0.0)\n', (3501, 3544), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((3695, 3705), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3703, 3705), True, 'import matplotlib.pyplot as plt\n'), ((4806, 4818), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4816, 4818), True, 'import matplotlib.pyplot as plt\n'), ((4819, 4848), 'matplotlib.pyplot.title', 'plt.title', (['"""Altitude profile"""'], {}), "('Altitude profile')\n", (4828, 4848), True, 'import matplotlib.pyplot as plt\n'), ((4849, 4896), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'h'], {'marker': '"""o"""', 'label': '"""Altitude"""'}), "(time, h, marker='o', label='Altitude')\n", (4857, 4896), True, 'import matplotlib.pyplot as plt\n'), ((4972, 4982), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (4980, 4982), True, 'import matplotlib.pyplot as plt\n'), ((4983, 5005), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time [s]"""'], {}), "('time [s]')\n", (4993, 5005), True, 'import matplotlib.pyplot as plt\n'), ((5006, 5032), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Altitude [-]"""'], {}), "('Altitude [-]')\n", (5016, 5032), True, 'import matplotlib.pyplot as plt\n'), ((5100, 5112), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5110, 5112), True, 'import matplotlib.pyplot as plt\n'), ((5113, 5134), 'matplotlib.pyplot.title', 'plt.title', (['"""Velocity"""'], {}), "('Velocity')\n", (5122, 5134), True, 'import matplotlib.pyplot as plt\n'), ((5135, 5182), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'v'], {'marker': '"""o"""', 'label': '"""Velocity"""'}), "(time, v, marker='o', label='Velocity')\n", (5143, 5182), True, 'import matplotlib.pyplot as plt\n'), ((5258, 5268), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (5266, 5268), True, 'import matplotlib.pyplot as plt\n'), ((5269, 5291), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time [s]"""'], {}), "('time [s]')\n", (5279, 5291), True, 'import matplotlib.pyplot as plt\n'), ((5292, 5318), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Velocity [-]"""'], {}), "('Velocity [-]')\n", (5302, 5318), True, 'import matplotlib.pyplot as plt\n'), ((5386, 5398), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5396, 5398), True, 'import matplotlib.pyplot as plt\n'), ((5399, 5416), 'matplotlib.pyplot.title', 'plt.title', (['"""Mass"""'], {}), "('Mass')\n", (5408, 5416), True, 'import matplotlib.pyplot as plt\n'), ((5417, 5460), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'm'], {'marker': '"""o"""', 'label': '"""Mass"""'}), "(time, m, marker='o', label='Mass')\n", (5425, 5460), True, 'import matplotlib.pyplot as plt\n'), ((5536, 5546), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (5544, 5546), True, 'import matplotlib.pyplot as plt\n'), ((5547, 5569), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time [s]"""'], {}), "('time [s]')\n", (5557, 5569), True, 'import matplotlib.pyplot as plt\n'), ((5570, 5592), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mass [-]"""'], {}), "('Mass [-]')\n", (5580, 5592), True, 'import matplotlib.pyplot as plt\n'), ((5656, 5668), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5666, 5668), True, 'import matplotlib.pyplot as plt\n'), ((5669, 5696), 'matplotlib.pyplot.title', 'plt.title', (['"""Thrust profile"""'], {}), "('Thrust profile')\n", (5678, 5696), True, 'import matplotlib.pyplot as plt\n'), ((5697, 5742), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'T'], {'marker': '"""o"""', 'label': '"""Thrust"""'}), "(time, T, marker='o', label='Thrust')\n", (5705, 5742), True, 'import matplotlib.pyplot as plt\n'), ((5743, 5789), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'drag'], {'marker': '"""o"""', 'label': '"""Drag"""'}), "(time, drag, marker='o', label='Drag')\n", (5751, 5789), True, 'import matplotlib.pyplot as plt\n'), ((5790, 5836), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'g'], {'marker': '"""o"""', 'label': '"""Gravity"""'}), "(time, g, marker='o', label='Gravity')\n", (5798, 5836), True, 'import matplotlib.pyplot as plt\n'), ((5912, 5922), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (5920, 5922), True, 'import matplotlib.pyplot as plt\n'), ((5923, 5945), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time [s]"""'], {}), "('time [s]')\n", (5933, 5945), True, 'import matplotlib.pyplot as plt\n'), ((5946, 5970), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Thrust [-]"""'], {}), "('Thrust [-]')\n", (5956, 5970), True, 'import matplotlib.pyplot as plt\n'), ((5971, 5993), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (5981, 5993), True, 'import matplotlib.pyplot as plt\n'), ((6058, 6068), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6066, 6068), True, 'import matplotlib.pyplot as plt\n'), ((1211, 1234), 'OpenGoddard.optimize.Dynamics', 'Dynamics', (['prob', 'section'], {}), '(prob, section)\n', (1219, 1234), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((1528, 1539), 'OpenGoddard.optimize.Condition', 'Condition', ([], {}), '()\n', (1537, 1539), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((1952, 1963), 'OpenGoddard.optimize.Condition', 'Condition', ([], {}), '()\n', (1961, 1963), False, 'from OpenGoddard.optimize import Problem, Guess, Condition, Dynamics\n'), ((4708, 4738), 'numpy.exp', 'np.exp', (['(-500 * (h - 1.0) / 1.0)'], {}), '(-500 * (h - 1.0) / 1.0)\n', (4714, 4738), True, 'import numpy as np\n'), ((4932, 4971), 'matplotlib.pyplot.axvline', 'plt.axvline', (['line'], {'color': '"""k"""', 'alpha': '(0.5)'}), "(line, color='k', alpha=0.5)\n", (4943, 4971), True, 'import matplotlib.pyplot as plt\n'), ((5051, 5098), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(savefig_file + 'altitude' + '.png')"], {}), "(savefig_file + 'altitude' + '.png')\n", (5062, 5098), True, 'import matplotlib.pyplot as plt\n'), ((5218, 5257), 'matplotlib.pyplot.axvline', 'plt.axvline', (['line'], {'color': '"""k"""', 'alpha': '(0.5)'}), "(line, color='k', alpha=0.5)\n", (5229, 5257), True, 'import matplotlib.pyplot as plt\n'), ((5337, 5384), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(savefig_file + 'velocity' + '.png')"], {}), "(savefig_file + 'velocity' + '.png')\n", (5348, 5384), True, 'import matplotlib.pyplot as plt\n'), ((5496, 5535), 'matplotlib.pyplot.axvline', 'plt.axvline', (['line'], {'color': '"""k"""', 'alpha': '(0.5)'}), "(line, color='k', alpha=0.5)\n", (5507, 5535), True, 'import matplotlib.pyplot as plt\n'), ((5611, 5654), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(savefig_file + 'mass' + '.png')"], {}), "(savefig_file + 'mass' + '.png')\n", (5622, 5654), True, 'import matplotlib.pyplot as plt\n'), ((5872, 5911), 'matplotlib.pyplot.axvline', 'plt.axvline', (['line'], {'color': '"""k"""', 'alpha': '(0.5)'}), "(line, color='k', alpha=0.5)\n", (5883, 5911), True, 'import matplotlib.pyplot as plt\n'), ((6012, 6056), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(savefig_file + 'force' + '.png')"], {}), "(savefig_file + 'force' + '.png')\n", (6023, 6056), True, 'import matplotlib.pyplot as plt\n'), ((1128, 1167), 'numpy.exp', 'np.exp', (['(-obj.Hc * (h - obj.H0) / obj.H0)'], {}), '(-obj.Hc * (h - obj.H0) / obj.H0)\n', (1134, 1167), True, 'import numpy as np\n'), ((653, 679), 'numpy.sqrt', 'np.sqrt', (['(self.g0 * self.H0)'], {}), '(self.g0 * self.H0)\n', (660, 679), True, 'import numpy as np\n')] |
import argparse
from pathlib import Path
import tarfile
from zipfile import ZipFile
from scanf import scanf
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('tarball_output')
parser.add_argument('tex_files',nargs='*')
args = parser.parse_args()
files = []
for tex_file in args.tex_files:
tex_file_path = Path(tex_file)
if tex_file_path.exists():
files.append(str(tex_file_path))
pdf_path = tex_file_path.with_suffix('.pdf')
if pdf_path.exists() and str(pdf_path) not in files:
files.append(str(pdf_path))
# parse dep_file generated with \RequirePackage{snapshot}
dep_file = tex_file_path.with_suffix('.dep')
if dep_file.exists():
with open(str(dep_file),'r') as f:
for line in f:
if '*{file}' not in line:
continue
match = scanf('*{file} {%s}{0000/00/00 v0.0}', line, collapseWhitespace=True)
if match is None:
alt = scanf('*{file} {%s} {0000/00/00 v0.0}', line, collapseWhitespace=True)
if alt is None:
alt2 = scanf('*{file} {%s}{Graphic v0.0}', line, collapseWhitespace=True)
if alt2 is None:
alt3 = scanf('*{file} {%s} {Graphic v0.0}', line, collapseWhitespace=True)
if alt3 is None:
continue
else:
match = alt3
else:
match = alt2
else:
match = alt
filename, = match
path = Path(filename)
if path.suffix in ['.png','.pdf','.tex','.bbl','.cls'] and path.exists():
if str(path) not in files:
files.append(str(path))
print("FILES IN TARBALL:\n")
for myfile in files:
print(myfile)
# make tarball from files
output_path = Path(args.tarball_output)
if output_path.suffix == '.gz':
with tarfile.open(args.tarball_output, 'w:gz', dereference=True) as tar:
for this_file in files:
tar.add(this_file)
elif output_path.suffix == '.zip':
with ZipFile(args.tarball_output, 'w') as myzip:
for this_file in files:
myzip.write(this_file)
else:
Exception('unrecognized output suffix')
| [
"tarfile.open",
"zipfile.ZipFile",
"argparse.ArgumentParser",
"pathlib.Path",
"scanf.scanf"
] | [((150, 175), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (173, 175), False, 'import argparse\n'), ((2203, 2228), 'pathlib.Path', 'Path', (['args.tarball_output'], {}), '(args.tarball_output)\n', (2207, 2228), False, 'from pathlib import Path\n'), ((376, 390), 'pathlib.Path', 'Path', (['tex_file'], {}), '(tex_file)\n', (380, 390), False, 'from pathlib import Path\n'), ((2278, 2337), 'tarfile.open', 'tarfile.open', (['args.tarball_output', '"""w:gz"""'], {'dereference': '(True)'}), "(args.tarball_output, 'w:gz', dereference=True)\n", (2290, 2337), False, 'import tarfile\n'), ((2469, 2502), 'zipfile.ZipFile', 'ZipFile', (['args.tarball_output', '"""w"""'], {}), "(args.tarball_output, 'w')\n", (2476, 2502), False, 'from zipfile import ZipFile\n'), ((962, 1031), 'scanf.scanf', 'scanf', (['"""*{file} {%s}{0000/00/00 v0.0}"""', 'line'], {'collapseWhitespace': '(True)'}), "('*{file} {%s}{0000/00/00 v0.0}', line, collapseWhitespace=True)\n", (967, 1031), False, 'from scanf import scanf\n'), ((1861, 1875), 'pathlib.Path', 'Path', (['filename'], {}), '(filename)\n', (1865, 1875), False, 'from pathlib import Path\n'), ((1100, 1170), 'scanf.scanf', 'scanf', (['"""*{file} {%s} {0000/00/00 v0.0}"""', 'line'], {'collapseWhitespace': '(True)'}), "('*{file} {%s} {0000/00/00 v0.0}', line, collapseWhitespace=True)\n", (1105, 1170), False, 'from scanf import scanf\n'), ((1246, 1312), 'scanf.scanf', 'scanf', (['"""*{file} {%s}{Graphic v0.0}"""', 'line'], {'collapseWhitespace': '(True)'}), "('*{file} {%s}{Graphic v0.0}', line, collapseWhitespace=True)\n", (1251, 1312), False, 'from scanf import scanf\n'), ((1397, 1464), 'scanf.scanf', 'scanf', (['"""*{file} {%s} {Graphic v0.0}"""', 'line'], {'collapseWhitespace': '(True)'}), "('*{file} {%s} {Graphic v0.0}', line, collapseWhitespace=True)\n", (1402, 1464), False, 'from scanf import scanf\n')] |
# ┌────────────────────────────────────────────────────────────────────────┐
# │ InstaBot - Python Selenium Bot │
# ├────────────────────────────────────────────────────────────────────────┤
# │ Copyright © 2019 <NAME> |
# | (https://github.com/Estayparadox/InstaBot) │
# ├────────────────────────────────────────────────────────────────────────┤
# │ Licensed under the MIT |
# | (https://github.com/Estayparadox/InstaBot/blob/master/LICENSE) license.│
# └────────────────────────────────────────────────────────────────────────┘
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep, strftime
from random import randint
import pandas as pd
chromedriver_path = 'chromedriver.exe' # Change this to your own chromedriver path!
webdriver = webdriver.Chrome(executable_path=chromedriver_path)
sleep(2)
webdriver.get('https://www.instagram.com/accounts/login/?source=auth_switcher')
sleep(3)
username = webdriver.find_element_by_name('username')
username.send_keys('<EMAIL>') # Change this to your own Instagram username
password = webdriver.find_element_by_name('password')
password.send_keys('<PASSWORD>') # Change this to your own Instagram password
submit = webdriver.find_element_by_tag_name('form')
submit.submit()
sleep(3)
notnow = webdriver.find_element_by_css_selector('body > div.RnEpo.Yx5HN > div > div > div.mt3GC > button.aOOlW.HoLwm')
notnow.click() # Comment these last 2 lines out, if you don't get a pop up asking about notifications
hashtag_list = ['trip', 'dronephotography', 'traveler', 'python', 'php', 'css'] # Change this to your own tags
prev_user_list = [] # If it's the first time you run it, use this line and comment the two below
# prev_user_list = pd.read_csv('20190604-224633_users_followed_list.csv', delimiter=',').iloc[:,1:2] # useful to build a user log
# prev_user_list = list(prev_user_list['0'])
new_followed = []
tag = -1
followed = 0
likes = 0
comments = 0
for hashtag in hashtag_list:
tag += 1
webdriver.get('https://www.instagram.com/explore/tags/'+ hashtag_list[tag] + '/')
sleep(5)
first_thumbnail = webdriver.find_element_by_xpath('//*[@id="react-root"]/section/main/article/div[1]/div/div/div[1]/div[1]/a/div')
first_thumbnail.click()
sleep(randint(1,2))
try:
for x in range(1,200):
username = webdriver.find_element_by_xpath('/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[1]/h2/a').text
if username not in prev_user_list:
# If we already follow, do not unfollow
if webdriver.find_element_by_xpath('/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[2]/button').text == 'Follow' :
webdriver.find_element_by_xpath('/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[2]/button').click()
new_followed.append(username)
followed += 1
# Liking the picture
button_like = webdriver.find_element_by_xpath('/html/body/div[3]/div[2]/div/article/div[2]/section[1]/span[1]/button')
button_like.click()
likes += 1
sleep(randint(18,25))
# Comments and tracker
comm_prob = randint(1,10)
print('{}_{}: {}'.format(hashtag, x,comm_prob))
if comm_prob > 7:
comments += 1
webdriver.find_element_by_xpath('/html/body/div[3]/div[2]/div/article/div[2]/section[1]/span[2]/button/span').click()
comment_box = webdriver.find_element_by_xpath('/html/body/div[3]/div[2]/div/article/div[2]/section[3]/div/form/textarea')
if (comm_prob < 7):
comment_box.send_keys('<PASSWORD>!')
sleep(1)
elif (comm_prob > 6) and (comm_prob < 9):
comment_box.send_keys('Nice work :)')
sleep(1)
elif comm_prob == 9:
comment_box.send_keys('Nice gallery!!')
sleep(1)
elif comm_prob == 10:
comment_box.send_keys('So cool! :)')
sleep(1)
# Enter to post comment
comment_box.send_keys(Keys.ENTER)
sleep(randint(22,28))
# Next picture
webdriver.find_element_by_link_text('Next').click()
sleep(randint(25,29))
else:
webdriver.find_element_by_link_text('Next').click()
sleep(randint(20,26))
# Some hashtag stops refreshing photos (it may happen sometimes), it continues to the next
except:
continue
for n in range(0,len(new_followed)):
prev_user_list.append(new_followed[n])
updated_user_df = pd.DataFrame(prev_user_list)
updated_user_df.to_csv('{}_users_followed_list.csv'.format(strftime("%Y%m%d-%H%M%S")))
print('Liked {} photos.'.format(likes))
print('Commented {} photos.'.format(comments))
print('Followed {} new people.'.format(followed))
| [
"selenium.webdriver.find_element_by_css_selector",
"selenium.webdriver.Chrome",
"time.strftime",
"time.sleep",
"selenium.webdriver.find_element_by_name",
"selenium.webdriver.find_element_by_xpath",
"selenium.webdriver.get",
"pandas.DataFrame",
"selenium.webdriver.find_element_by_link_text",
"selen... | [((939, 990), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': 'chromedriver_path'}), '(executable_path=chromedriver_path)\n', (955, 990), False, 'from selenium import webdriver\n'), ((991, 999), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (996, 999), False, 'from time import sleep, strftime\n'), ((1000, 1079), 'selenium.webdriver.get', 'webdriver.get', (['"""https://www.instagram.com/accounts/login/?source=auth_switcher"""'], {}), "('https://www.instagram.com/accounts/login/?source=auth_switcher')\n", (1013, 1079), False, 'from selenium import webdriver\n'), ((1080, 1088), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (1085, 1088), False, 'from time import sleep, strftime\n'), ((1101, 1143), 'selenium.webdriver.find_element_by_name', 'webdriver.find_element_by_name', (['"""username"""'], {}), "('username')\n", (1131, 1143), False, 'from selenium import webdriver\n'), ((1230, 1272), 'selenium.webdriver.find_element_by_name', 'webdriver.find_element_by_name', (['"""password"""'], {}), "('password')\n", (1260, 1272), False, 'from selenium import webdriver\n'), ((1361, 1403), 'selenium.webdriver.find_element_by_tag_name', 'webdriver.find_element_by_tag_name', (['"""form"""'], {}), "('form')\n", (1395, 1403), False, 'from selenium import webdriver\n'), ((1420, 1428), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (1425, 1428), False, 'from time import sleep, strftime\n'), ((1439, 1553), 'selenium.webdriver.find_element_by_css_selector', 'webdriver.find_element_by_css_selector', (['"""body > div.RnEpo.Yx5HN > div > div > div.mt3GC > button.aOOlW.HoLwm"""'], {}), "(\n 'body > div.RnEpo.Yx5HN > div > div > div.mt3GC > button.aOOlW.HoLwm')\n", (1477, 1553), False, 'from selenium import webdriver\n'), ((5141, 5169), 'pandas.DataFrame', 'pd.DataFrame', (['prev_user_list'], {}), '(prev_user_list)\n', (5153, 5169), True, 'import pandas as pd\n'), ((2147, 2233), 'selenium.webdriver.get', 'webdriver.get', (["('https://www.instagram.com/explore/tags/' + hashtag_list[tag] + '/')"], {}), "('https://www.instagram.com/explore/tags/' + hashtag_list[tag] +\n '/')\n", (2160, 2233), False, 'from selenium import webdriver\n'), ((2233, 2241), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (2238, 2241), False, 'from time import sleep, strftime\n'), ((2264, 2386), 'selenium.webdriver.find_element_by_xpath', 'webdriver.find_element_by_xpath', (['"""//*[@id="react-root"]/section/main/article/div[1]/div/div/div[1]/div[1]/a/div"""'], {}), '(\n \'//*[@id="react-root"]/section/main/article/div[1]/div/div/div[1]/div[1]/a/div\'\n )\n', (2295, 2386), False, 'from selenium import webdriver\n'), ((2416, 2429), 'random.randint', 'randint', (['(1)', '(2)'], {}), '(1, 2)\n', (2423, 2429), False, 'from random import randint\n'), ((5229, 5254), 'time.strftime', 'strftime', (['"""%Y%m%d-%H%M%S"""'], {}), "('%Y%m%d-%H%M%S')\n", (5237, 5254), False, 'from time import sleep, strftime\n'), ((2494, 2603), 'selenium.webdriver.find_element_by_xpath', 'webdriver.find_element_by_xpath', (['"""/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[1]/h2/a"""'], {}), "(\n '/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[1]/h2/a')\n", (2525, 2603), False, 'from selenium import webdriver\n'), ((3149, 3258), 'selenium.webdriver.find_element_by_xpath', 'webdriver.find_element_by_xpath', (['"""/html/body/div[3]/div[2]/div/article/div[2]/section[1]/span[1]/button"""'], {}), "(\n '/html/body/div[3]/div[2]/div/article/div[2]/section[1]/span[1]/button')\n", (3180, 3258), False, 'from selenium import webdriver\n'), ((3443, 3457), 'random.randint', 'randint', (['(1)', '(10)'], {}), '(1, 10)\n', (3450, 3457), False, 'from random import randint\n'), ((4777, 4792), 'random.randint', 'randint', (['(25)', '(29)'], {}), '(25, 29)\n', (4784, 4792), False, 'from random import randint\n'), ((4901, 4916), 'random.randint', 'randint', (['(20)', '(26)'], {}), '(20, 26)\n', (4908, 4916), False, 'from random import randint\n'), ((2727, 2838), 'selenium.webdriver.find_element_by_xpath', 'webdriver.find_element_by_xpath', (['"""/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[2]/button"""'], {}), "(\n '/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[2]/button')\n", (2758, 2838), False, 'from selenium import webdriver\n'), ((3351, 3366), 'random.randint', 'randint', (['(18)', '(25)'], {}), '(18, 25)\n', (3358, 3366), False, 'from random import randint\n'), ((3781, 3893), 'selenium.webdriver.find_element_by_xpath', 'webdriver.find_element_by_xpath', (['"""/html/body/div[3]/div[2]/div/article/div[2]/section[3]/div/form/textarea"""'], {}), "(\n '/html/body/div[3]/div[2]/div/article/div[2]/section[3]/div/form/textarea')\n", (3812, 3893), False, 'from selenium import webdriver\n'), ((4703, 4746), 'selenium.webdriver.find_element_by_link_text', 'webdriver.find_element_by_link_text', (['"""Next"""'], {}), "('Next')\n", (4738, 4746), False, 'from selenium import webdriver\n'), ((4827, 4870), 'selenium.webdriver.find_element_by_link_text', 'webdriver.find_element_by_link_text', (['"""Next"""'], {}), "('Next')\n", (4862, 4870), False, 'from selenium import webdriver\n'), ((2874, 2985), 'selenium.webdriver.find_element_by_xpath', 'webdriver.find_element_by_xpath', (['"""/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[2]/button"""'], {}), "(\n '/html/body/div[3]/div[2]/div/article/header/div[2]/div[1]/div[2]/button')\n", (2905, 2985), False, 'from selenium import webdriver\n'), ((4027, 4035), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4032, 4035), False, 'from time import sleep, strftime\n'), ((4639, 4654), 'random.randint', 'randint', (['(22)', '(28)'], {}), '(22, 28)\n', (4646, 4654), False, 'from random import randint\n'), ((3625, 3744), 'selenium.webdriver.find_element_by_xpath', 'webdriver.find_element_by_xpath', (['"""/html/body/div[3]/div[2]/div/article/div[2]/section[1]/span[2]/button/span"""'], {}), "(\n '/html/body/div[3]/div[2]/div/article/div[2]/section[1]/span[2]/button/span'\n )\n", (3656, 3744), False, 'from selenium import webdriver\n'), ((4196, 4204), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4201, 4204), False, 'from time import sleep, strftime\n'), ((4346, 4354), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4351, 4354), False, 'from time import sleep, strftime\n'), ((4494, 4502), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4499, 4502), False, 'from time import sleep, strftime\n')] |
# -*- coding: utf-8 -*-
"""
MagicTelecomAPILib.Models.Account
This file was automatically generated by APIMATIC v2.0 on 06/22/2016
"""
from MagicTelecomAPILib.APIHelper import APIHelper
class Account(object):
"""Implementation of the 'Account' model.
TODO: type model description here.
Attributes:
number (string): TODO: type description here.
roles (list of string): TODO: type description here.
email (string): TODO: type description here.
contact_number (string): TODO: type description here.
firstname (string): TODO: type description here.
lastname (string): TODO: type description here.
"""
def __init__(self,
**kwargs):
"""Constructor for the Account class
Args:
**kwargs: Keyword Arguments in order to initialise the
object. Any of the attributes in this object are able to
be set through the **kwargs of the constructor. The values
that can be supplied and their types are as follows::
number -- string -- Sets the attribute number
roles -- list of string -- Sets the attribute roles
email -- string -- Sets the attribute email
contact_number -- string -- Sets the attribute contact_number
firstname -- string -- Sets the attribute firstname
lastname -- string -- Sets the attribute lastname
"""
# Set all of the parameters to their default values
self.number = None
self.roles = None
self.email = None
self.contact_number = None
self.firstname = None
self.lastname = None
# Create a mapping from API property names to Model property names
replace_names = {
"number": "number",
"roles": "roles",
"email": "email",
"contact_number": "contact_number",
"firstname": "firstname",
"lastname": "lastname",
}
# Parse all of the Key-Value arguments
if kwargs is not None:
for key in kwargs:
# Only add arguments that are actually part of this object
if key in replace_names:
setattr(self, replace_names[key], kwargs[key])
def resolve_names(self):
"""Creates a dictionary representation of this object.
This method converts an object to a dictionary that represents the
format that the model should be in when passed into an API Request.
Because of this, the generated dictionary may have different
property names to that of the model itself.
Returns:
dict: The dictionary representing the object.
"""
# Create a mapping from Model property names to API property names
replace_names = {
"number": "number",
"roles": "roles",
"email": "email",
"contact_number": "contact_number",
"firstname": "firstname",
"lastname": "lastname",
}
retval = dict()
return APIHelper.resolve_names(self, replace_names, retval) | [
"MagicTelecomAPILib.APIHelper.APIHelper.resolve_names"
] | [((3317, 3369), 'MagicTelecomAPILib.APIHelper.APIHelper.resolve_names', 'APIHelper.resolve_names', (['self', 'replace_names', 'retval'], {}), '(self, replace_names, retval)\n', (3340, 3369), False, 'from MagicTelecomAPILib.APIHelper import APIHelper\n')] |
import os
import re
import json
from os.path import expanduser
import zipfile
import datetime
import tensorflow as tf
import numpy as np
from utils import mkdir_p
from inoutdoor_dataset_download import InoutdoorDatasetDownload
from inoutdoor_versions import *
from tf_features import *
from PIL import Image
class InoutdoorDatasetWriter(object):
feature_dict = {
'image/height': None,
'image/width': None,
'image/object/bbox/id': None,
'image/object/bbox/xmin': None,
'image/object/bbox/xmax': None,
'image/object/bbox/ymin': None,
'image/object/bbox/ymax': None,
'image/object/bbox/truncated': None,
'image/object/bbox/occluded': None,
'image/object/class/label/name': None,
'image/object/class/label/id': None,
'image/object/class/label': None,
'image/format': None,
'image/id': None,
'image/source_id': None,
'image/filename': None,
# new
'image/object/class/text': None,
'image/rgb/encoded': None,
'image/depth/encoded': None,
'image/encoded': None,
'image/depth': None,
'boxes/length': None,
}
def get_image_sets(self):
imagesets = dict()
for f in os.listdir(self.image_set_definition_path):
# check if it is a file
if not os.path.isfile(os.path.join(
self.image_set_definition_path, f)):
continue
imagesets[f] = []
with open(os.path.join(
self.image_set_definition_path, f), 'r') as setfile:
for line in setfile.readlines():
imagesets[f].append(
line if not line.endswith('\n') else line[:-1]
)
return imagesets
def __init__(self):
self.input_path = os.path.join(expanduser('~'), 'dataset', 'inoutdoorpeoplergbd')
assert (os.path.exists(self.input_path))
expected_paths = ['Images', 'Depth', 'Annotations', 'ImageSets']
for path in expected_paths:
if not os.path.exists(os.path.join(self.input_path, path)):
raise ValueError('Expected subdirectory {0} does not exist. {1}'.format(
path, os.path.join(self.input_path, path))
)
self.tracking_path = os.path.join(self.input_path, 'Annotations')
self.rgb_path = os.path.join(self.input_path, 'Images')
self.depth_path = os.path.join(self.input_path, 'DepthJet')
self.image_set_definition_path = os.path.join(self.input_path, 'ImageSets')
self.dataset_path = self.input_path
self.image_sets = self.get_image_sets()
@staticmethod
def feature_dict_description(type='feature_dict'):
"""
Get the feature dict. In the default case it is filled with all the keys and the items set to None. If the
type=reading_shape the shape description required for reading elements from a tfrecord is returned)
:param type: (anything = returns the feature_dict with empty elements, reading_shape = element description for
reading the tfrecord files is returned)
:return:
"""
obj = InoutdoorDatasetWriter.feature_dict
if type == 'reading_shape':
obj['image/height'] = tf.FixedLenFeature((), tf.int64, 1)
obj['image/width'] = tf.FixedLenFeature((), tf.int64, 1)
obj['image/object/bbox/id'] = tf.VarLenFeature(tf.int64)
obj['image/object/bbox/xmin'] = tf.VarLenFeature(tf.float32)
obj['image/object/bbox/xmax'] = tf.VarLenFeature(tf.float32)
obj['image/object/bbox/ymin'] = tf.VarLenFeature(tf.float32)
obj['image/object/bbox/ymax'] = tf.VarLenFeature(tf.float32)
obj['image/object/bbox/truncated'] = tf.VarLenFeature(tf.string)
obj['image/object/bbox/occluded'] = tf.VarLenFeature(tf.string)
obj['image/encoded'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/format'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/filename'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/id'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/source_id'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/object/class/label/id'] = tf.VarLenFeature(tf.int64)
obj['image/object/class/label'] = tf.VarLenFeature(tf.int64)
obj['image/object/class/label/name'] = tf.VarLenFeature(tf.string)
#
obj['image/object/class/label'] = tf.VarLenFeature(tf.int64)
obj['image/object/class/text'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/rgb/encoded'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/depth/encoded'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/encoded'] = tf.FixedLenFeature((), tf.string, default_value='')
obj['image/depth'] = tf.FixedLenFeature((), tf.int64, 1)
obj['boxes/length'] = tf.FixedLenFeature((), tf.int64, 1)
return obj
def unzip_file_to_folder(self, filename, folder, remove_file_after_creating=True):
assert(os.path.exists(filename) and os.path.isfile(filename))
assert(os.path.exists(folder) and os.path.isdir(folder))
with zipfile.ZipFile(filename, 'r') as zf:
zf.extractall(folder)
if remove_file_after_creating:
print('\nRemoving file: {0}'.format(filename))
os.remove(folder)
def get_image_label_folder(self, fold_type=None, version=None):
"""
Returns the folder containing all images and the folder containing all label information
:param fold_type:
:param version:
:return: Raises BaseExceptions if expectations are not fulfilled
"""
download_folder = os.path.join(self.input_path, 'download')
expansion_images_folder = os.path.join(self.input_path, 'Images')
expansion_depthjet_folder = os.path.join(self.input_path, 'DepthJet')
expansion_labels_folder = os.path.join(self.input_path, 'Annotations')
#
if not os.path.exists(expansion_images_folder):
mkdir_p(expansion_images_folder)
if not os.path.exists(expansion_depthjet_folder):
mkdir_p(expansion_depthjet_folder)
if not os.path.exists(expansion_labels_folder):
mkdir_p(expansion_labels_folder)
full_images_path = expansion_images_folder
full_depthjet_path = expansion_depthjet_folder
full_labels_path = expansion_labels_folder
extract_files = True
if len(InoutdoorDatasetDownload.filter_files(full_labels_path)) == \
len(InoutdoorDatasetDownload.filter_files(full_images_path)):
print('Do not check the download folder. Pictures seem to exist.')
extract_files = False
elif os.path.exists(download_folder):
raise BaseException('not yet implemented')
# files_in_directory = InoutdoorDatasetDownload.filter_files(
# download_folder, False, re.compile('\.zip$'))
# if len(files_in_directory) < 2:
# raise BaseException('Not enough files found in {0}. All files present: {1}'.format(
# download_folder, files_in_directory
# ))
else:
mkdir_p(download_folder)
raise BaseException('Download folder: {0} did not exist. It had been created. '
'Please put images, labels there.'.format(download_folder))
# unzip the elements
if extract_files:
print('Starting to unzip the files')
raise BaseException('Starting to unzip the files')
if fold_type == 'test':
return full_images_path, full_depthjet_path, None
return full_images_path, full_depthjet_path, full_labels_path
def _get_boundingboxes(self, annotations_for_picture_id):
boxid, xmin, xmax, ymin, ymax, label_id, label, truncated, occluded = \
[], [], [], [], [], [], [], [], []
if annotations_for_picture_id is None:
return boxid, xmin, xmax, ymin, ymax, label_id, label, truncated, occluded
for i, object in enumerate(annotations_for_picture_id.get('object', [])):
if 'bndbox' not in object:
continue
boxid.append(i)
xmin.append(float(object['bndbox']['xmin']))
xmax.append(float(object['bndbox']['xmax']))
ymin.append(float(object['bndbox']['ymin']))
ymax.append(float(object['bndbox']['ymax']))
label.append(object['name'])
label_id.append(INOUTDOOR_LABELS.index(object['name']) + 1)
truncated.append(False)
occluded.append(False)
return boxid, xmin, xmax, ymin, ymax, label_id, label, truncated, occluded
def _get_tf_feature_dict(self, image_id, image_path, image_format, annotations):
assert(isinstance(image_path, dict))
boxid, xmin, xmax, ymin, ymax, label_id, label, truncated, occluded = \
self._get_boundingboxes(annotations)
truncated = np.asarray(truncated)
occluded = np.asarray(occluded)
# convert things to bytes
label_bytes = [tf.compat.as_bytes(l) for l in label]
default_image_path = image_path['rgb'] \
if image_path.get('rgb', None) is not None \
else image_path['depth']
im = Image.open(default_image_path)
image_width, image_height = im.size
image_filename = os.path.basename(default_image_path)
xmin = [x / float(image_width) for x in xmin]
xmax = [x / float(image_width) for x in xmax]
ymin = [y / float(image_height) for y in ymin]
ymax = [y / float(image_height) for y in ymax]
image_fileid = re.search('^(.*)(\.png)$', image_filename).group(1)
assert(image_fileid == image_id)
tmp_feat_dict = InoutdoorDatasetWriter.feature_dict
tmp_feat_dict['image/id'] = bytes_feature(image_fileid)
tmp_feat_dict['image/source_id'] = bytes_feature(image_fileid)
tmp_feat_dict['image/height'] = int64_feature(image_height)
tmp_feat_dict['image/width'] = int64_feature(image_width)
tmp_feat_dict['image/depth'] = int64_feature([3])
for key, item in image_path.items():
if item is None:
continue
with open(item, 'rb') as f:
tmp_feat_dict['image/{0}/encoded'.format(key)] = bytes_feature(f.read())
tmp_feat_dict['image/format'] = bytes_feature(image_format)
tmp_feat_dict['image/filename'] = bytes_feature(image_filename)
tmp_feat_dict['image/object/bbox/id'] = int64_feature(boxid)
tmp_feat_dict['image/object/bbox/xmin'] = float_feature(xmin)
tmp_feat_dict['image/object/bbox/xmax'] = float_feature(xmax)
tmp_feat_dict['image/object/bbox/ymin'] = float_feature(ymin)
tmp_feat_dict['image/object/bbox/ymax'] = float_feature(ymax)
tmp_feat_dict['image/object/bbox/truncated'] = bytes_feature(
truncated.tobytes())
tmp_feat_dict['image/object/bbox/occluded'] = bytes_feature(
occluded.tobytes())
tmp_feat_dict['image/object/class/label/id'] = int64_feature(label_id)
tmp_feat_dict['image/object/class/label'] = int64_feature(label_id)
tmp_feat_dict['image/object/class/label/name'] = bytes_feature(
label_bytes)
items_to_remove = [
key for key, item in tmp_feat_dict.items() if item is None
]
for it in items_to_remove:
del tmp_feat_dict[it]
return tmp_feat_dict
def _get_tf_feature(self, image_id, image_path, image_format, annotations):
feature_dict = self._get_tf_feature_dict(
image_id, image_path, image_format, annotations)
return tf.train.Features(feature=feature_dict)
def write_tfrecord(self, fold_type=None, version=None,
max_elements_per_file=1000, maximum_files_to_write=None,
write_masks=False):
assert(version is None or version in ['rgb', 'depth', 'both'])
assert(fold_type in self.image_sets.keys())
assert(fold_type is not None and
re.match('^(seq\d)\.txt$', fold_type))
if version is None:
version = 'rgb'
sequence_type = re.match('^(seq\d)\.txt$', fold_type).group(1)
output_path = os.path.join(self.input_path, 'tfrecord')
if not os.path.exists(output_path):
mkdir_p(output_path)
full_images_path, full_depthjet_path, full_labels_path = \
self.get_image_label_folder(fold_type, version)
def get_annotation(picture_id):
if full_labels_path is None:
return None
with open(os.path.join(
full_labels_path, picture_id + '.yml'), 'r') as f:
import yaml
obj = yaml.load(f.read())
obj_annotation = obj['annotation']
return obj_annotation
image_filename_regex = re.compile('^(.*)\.(png)$')
tfrecord_file_id, writer = 0, None
tfrecord_filename_template = os.path.join(
output_path,
'output_modality_{modality}_'
'sequence_{version}_'
'split_{{iteration:06d}}.tfrecord'.format(
modality=version,
version=sequence_type
))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
files_written = 0
for _, f in enumerate(self.image_sets[fold_type]):
f = '{0}.png'.format(f)
if files_written % max_elements_per_file == 0:
if writer is not None:
writer.close()
tfrecord_file_id += 1
tmp_filename_tfrecord = tfrecord_filename_template.format(
iteration=tfrecord_file_id)
print('{0}: Create TFRecord filename: {1} after '
'processing {2}/{3} files'.format(
str(datetime.datetime.now()), tmp_filename_tfrecord,
files_written, len(self.image_sets[fold_type])
))
writer = tf.python_io.TFRecordWriter(
tmp_filename_tfrecord
)
if files_written % 250 == 0:
print('\t{0}: Processed file: {1}/{2}'.format(
str(datetime.datetime.now()),
files_written, len(self.image_sets[fold_type])))
# match the filename with the regex
m = image_filename_regex.search(f)
if m is None:
print('Filename did not match regex: {0}'.format(f))
continue
picture_id = m.group(1)
picture_id_annotations = get_annotation(picture_id)
filenames = {'rgb': None, 'depth': None}
if version == 'rgb' or version is None:
filenames['rgb'] = os.path.join(full_images_path, f)
elif version == 'depth':
filenames['depth'] = os.path.join(full_depthjet_path, f)
else:
filenames = {
'rgb': os.path.join(full_images_path, f),
'depth': os.path.join(full_depthjet_path, f)
}
feature = self._get_tf_feature(
picture_id, filenames, m.group(2), picture_id_annotations)
example = tf.train.Example(features=feature)
writer.write(example.SerializeToString())
if maximum_files_to_write is not None:
if files_written < maximum_files_to_write:
break
files_written += 1
# Close the last files
if writer is not None:
writer.close()
| [
"zipfile.ZipFile",
"re.compile",
"utils.mkdir_p",
"tensorflow.compat.as_bytes",
"os.remove",
"re.search",
"os.path.exists",
"os.listdir",
"tensorflow.train.Example",
"tensorflow.Session",
"numpy.asarray",
"os.path.isdir",
"tensorflow.python_io.TFRecordWriter",
"os.path.expanduser",
"tens... | [((1270, 1312), 'os.listdir', 'os.listdir', (['self.image_set_definition_path'], {}), '(self.image_set_definition_path)\n', (1280, 1312), False, 'import os\n'), ((1958, 1989), 'os.path.exists', 'os.path.exists', (['self.input_path'], {}), '(self.input_path)\n', (1972, 1989), False, 'import os\n'), ((2372, 2416), 'os.path.join', 'os.path.join', (['self.input_path', '"""Annotations"""'], {}), "(self.input_path, 'Annotations')\n", (2384, 2416), False, 'import os\n'), ((2441, 2480), 'os.path.join', 'os.path.join', (['self.input_path', '"""Images"""'], {}), "(self.input_path, 'Images')\n", (2453, 2480), False, 'import os\n'), ((2507, 2548), 'os.path.join', 'os.path.join', (['self.input_path', '"""DepthJet"""'], {}), "(self.input_path, 'DepthJet')\n", (2519, 2548), False, 'import os\n'), ((2590, 2632), 'os.path.join', 'os.path.join', (['self.input_path', '"""ImageSets"""'], {}), "(self.input_path, 'ImageSets')\n", (2602, 2632), False, 'import os\n'), ((6020, 6061), 'os.path.join', 'os.path.join', (['self.input_path', '"""download"""'], {}), "(self.input_path, 'download')\n", (6032, 6061), False, 'import os\n'), ((6096, 6135), 'os.path.join', 'os.path.join', (['self.input_path', '"""Images"""'], {}), "(self.input_path, 'Images')\n", (6108, 6135), False, 'import os\n'), ((6172, 6213), 'os.path.join', 'os.path.join', (['self.input_path', '"""DepthJet"""'], {}), "(self.input_path, 'DepthJet')\n", (6184, 6213), False, 'import os\n'), ((6248, 6292), 'os.path.join', 'os.path.join', (['self.input_path', '"""Annotations"""'], {}), "(self.input_path, 'Annotations')\n", (6260, 6292), False, 'import os\n'), ((9376, 9397), 'numpy.asarray', 'np.asarray', (['truncated'], {}), '(truncated)\n', (9386, 9397), True, 'import numpy as np\n'), ((9417, 9437), 'numpy.asarray', 'np.asarray', (['occluded'], {}), '(occluded)\n', (9427, 9437), True, 'import numpy as np\n'), ((9692, 9722), 'PIL.Image.open', 'Image.open', (['default_image_path'], {}), '(default_image_path)\n', (9702, 9722), False, 'from PIL import Image\n'), ((9792, 9828), 'os.path.basename', 'os.path.basename', (['default_image_path'], {}), '(default_image_path)\n', (9808, 9828), False, 'import os\n'), ((12144, 12183), 'tensorflow.train.Features', 'tf.train.Features', ([], {'feature': 'feature_dict'}), '(feature=feature_dict)\n', (12161, 12183), True, 'import tensorflow as tf\n'), ((12734, 12775), 'os.path.join', 'os.path.join', (['self.input_path', '"""tfrecord"""'], {}), "(self.input_path, 'tfrecord')\n", (12746, 12775), False, 'import os\n'), ((13390, 13418), 're.compile', 're.compile', (['"""^(.*)\\\\.(png)$"""'], {}), "('^(.*)\\\\.(png)$')\n", (13400, 13418), False, 'import re\n'), ((1891, 1906), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (1901, 1906), False, 'from os.path import expanduser\n'), ((3350, 3385), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.int64', '(1)'], {}), '((), tf.int64, 1)\n', (3368, 3385), True, 'import tensorflow as tf\n'), ((3419, 3454), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.int64', '(1)'], {}), '((), tf.int64, 1)\n', (3437, 3454), True, 'import tensorflow as tf\n'), ((3497, 3523), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.int64'], {}), '(tf.int64)\n', (3513, 3523), True, 'import tensorflow as tf\n'), ((3568, 3596), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.float32'], {}), '(tf.float32)\n', (3584, 3596), True, 'import tensorflow as tf\n'), ((3641, 3669), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.float32'], {}), '(tf.float32)\n', (3657, 3669), True, 'import tensorflow as tf\n'), ((3714, 3742), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.float32'], {}), '(tf.float32)\n', (3730, 3742), True, 'import tensorflow as tf\n'), ((3787, 3815), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.float32'], {}), '(tf.float32)\n', (3803, 3815), True, 'import tensorflow as tf\n'), ((3865, 3892), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.string'], {}), '(tf.string)\n', (3881, 3892), True, 'import tensorflow as tf\n'), ((3941, 3968), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.string'], {}), '(tf.string)\n', (3957, 3968), True, 'import tensorflow as tf\n'), ((4004, 4055), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4022, 4055), True, 'import tensorflow as tf\n'), ((4090, 4141), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4108, 4141), True, 'import tensorflow as tf\n'), ((4178, 4229), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4196, 4229), True, 'import tensorflow as tf\n'), ((4260, 4311), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4278, 4311), True, 'import tensorflow as tf\n'), ((4349, 4400), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4367, 4400), True, 'import tensorflow as tf\n'), ((4450, 4476), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.int64'], {}), '(tf.int64)\n', (4466, 4476), True, 'import tensorflow as tf\n'), ((4523, 4549), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.int64'], {}), '(tf.int64)\n', (4539, 4549), True, 'import tensorflow as tf\n'), ((4601, 4628), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.string'], {}), '(tf.string)\n', (4617, 4628), True, 'import tensorflow as tf\n'), ((4689, 4715), 'tensorflow.VarLenFeature', 'tf.VarLenFeature', (['tf.int64'], {}), '(tf.int64)\n', (4705, 4715), True, 'import tensorflow as tf\n'), ((4761, 4812), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4779, 4812), True, 'import tensorflow as tf\n'), ((4852, 4903), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4870, 4903), True, 'import tensorflow as tf\n'), ((4945, 4996), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4963, 4996), True, 'import tensorflow as tf\n'), ((5032, 5083), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (5050, 5083), True, 'import tensorflow as tf\n'), ((5118, 5153), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.int64', '(1)'], {}), '((), tf.int64, 1)\n', (5136, 5153), True, 'import tensorflow as tf\n'), ((5188, 5223), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.int64', '(1)'], {}), '((), tf.int64, 1)\n', (5206, 5223), True, 'import tensorflow as tf\n'), ((5347, 5371), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (5361, 5371), False, 'import os\n'), ((5376, 5400), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (5390, 5400), False, 'import os\n'), ((5417, 5439), 'os.path.exists', 'os.path.exists', (['folder'], {}), '(folder)\n', (5431, 5439), False, 'import os\n'), ((5444, 5465), 'os.path.isdir', 'os.path.isdir', (['folder'], {}), '(folder)\n', (5457, 5465), False, 'import os\n'), ((5480, 5510), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (5495, 5510), False, 'import zipfile\n'), ((5662, 5679), 'os.remove', 'os.remove', (['folder'], {}), '(folder)\n', (5671, 5679), False, 'import os\n'), ((6318, 6357), 'os.path.exists', 'os.path.exists', (['expansion_images_folder'], {}), '(expansion_images_folder)\n', (6332, 6357), False, 'import os\n'), ((6371, 6403), 'utils.mkdir_p', 'mkdir_p', (['expansion_images_folder'], {}), '(expansion_images_folder)\n', (6378, 6403), False, 'from utils import mkdir_p\n'), ((6419, 6460), 'os.path.exists', 'os.path.exists', (['expansion_depthjet_folder'], {}), '(expansion_depthjet_folder)\n', (6433, 6460), False, 'import os\n'), ((6474, 6508), 'utils.mkdir_p', 'mkdir_p', (['expansion_depthjet_folder'], {}), '(expansion_depthjet_folder)\n', (6481, 6508), False, 'from utils import mkdir_p\n'), ((6524, 6563), 'os.path.exists', 'os.path.exists', (['expansion_labels_folder'], {}), '(expansion_labels_folder)\n', (6538, 6563), False, 'import os\n'), ((6577, 6609), 'utils.mkdir_p', 'mkdir_p', (['expansion_labels_folder'], {}), '(expansion_labels_folder)\n', (6584, 6609), False, 'from utils import mkdir_p\n'), ((7079, 7110), 'os.path.exists', 'os.path.exists', (['download_folder'], {}), '(download_folder)\n', (7093, 7110), False, 'import os\n'), ((9496, 9517), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['l'], {}), '(l)\n', (9514, 9517), True, 'import tensorflow as tf\n'), ((12546, 12585), 're.match', 're.match', (['"""^(seq\\\\d)\\\\.txt$"""', 'fold_type'], {}), "('^(seq\\\\d)\\\\.txt$', fold_type)\n", (12554, 12585), False, 'import re\n'), ((12792, 12819), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (12806, 12819), False, 'import os\n'), ((12833, 12853), 'utils.mkdir_p', 'mkdir_p', (['output_path'], {}), '(output_path)\n', (12840, 12853), False, 'from utils import mkdir_p\n'), ((13768, 13780), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (13778, 13780), True, 'import tensorflow as tf\n'), ((6813, 6868), 'inoutdoor_dataset_download.InoutdoorDatasetDownload.filter_files', 'InoutdoorDatasetDownload.filter_files', (['full_labels_path'], {}), '(full_labels_path)\n', (6850, 6868), False, 'from inoutdoor_dataset_download import InoutdoorDatasetDownload\n'), ((6895, 6950), 'inoutdoor_dataset_download.InoutdoorDatasetDownload.filter_files', 'InoutdoorDatasetDownload.filter_files', (['full_images_path'], {}), '(full_images_path)\n', (6932, 6950), False, 'from inoutdoor_dataset_download import InoutdoorDatasetDownload\n'), ((7558, 7582), 'utils.mkdir_p', 'mkdir_p', (['download_folder'], {}), '(download_folder)\n', (7565, 7582), False, 'from utils import mkdir_p\n'), ((10072, 10115), 're.search', 're.search', (['"""^(.*)(\\\\.png)$"""', 'image_filename'], {}), "('^(.*)(\\\\.png)$', image_filename)\n", (10081, 10115), False, 'import re\n'), ((12665, 12704), 're.match', 're.match', (['"""^(seq\\\\d)\\\\.txt$"""', 'fold_type'], {}), "('^(seq\\\\d)\\\\.txt$', fold_type)\n", (12673, 12704), False, 'import re\n'), ((13811, 13844), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (13842, 13844), True, 'import tensorflow as tf\n'), ((15984, 16018), 'tensorflow.train.Example', 'tf.train.Example', ([], {'features': 'feature'}), '(features=feature)\n', (16000, 16018), True, 'import tensorflow as tf\n'), ((1384, 1431), 'os.path.join', 'os.path.join', (['self.image_set_definition_path', 'f'], {}), '(self.image_set_definition_path, f)\n', (1396, 1431), False, 'import os\n'), ((1532, 1579), 'os.path.join', 'os.path.join', (['self.image_set_definition_path', 'f'], {}), '(self.image_set_definition_path, f)\n', (1544, 1579), False, 'import os\n'), ((2135, 2170), 'os.path.join', 'os.path.join', (['self.input_path', 'path'], {}), '(self.input_path, path)\n', (2147, 2170), False, 'import os\n'), ((13114, 13165), 'os.path.join', 'os.path.join', (['full_labels_path', "(picture_id + '.yml')"], {}), "(full_labels_path, picture_id + '.yml')\n", (13126, 13165), False, 'import os\n'), ((14632, 14682), 'tensorflow.python_io.TFRecordWriter', 'tf.python_io.TFRecordWriter', (['tmp_filename_tfrecord'], {}), '(tmp_filename_tfrecord)\n', (14659, 14682), True, 'import tensorflow as tf\n'), ((15465, 15498), 'os.path.join', 'os.path.join', (['full_images_path', 'f'], {}), '(full_images_path, f)\n', (15477, 15498), False, 'import os\n'), ((2288, 2323), 'os.path.join', 'os.path.join', (['self.input_path', 'path'], {}), '(self.input_path, path)\n', (2300, 2323), False, 'import os\n'), ((15581, 15616), 'os.path.join', 'os.path.join', (['full_depthjet_path', 'f'], {}), '(full_depthjet_path, f)\n', (15593, 15616), False, 'import os\n'), ((15704, 15737), 'os.path.join', 'os.path.join', (['full_images_path', 'f'], {}), '(full_images_path, f)\n', (15716, 15737), False, 'import os\n'), ((15772, 15807), 'os.path.join', 'os.path.join', (['full_depthjet_path', 'f'], {}), '(full_depthjet_path, f)\n', (15784, 15807), False, 'import os\n'), ((14460, 14483), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14481, 14483), False, 'import datetime\n'), ((14869, 14892), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14890, 14892), False, 'import datetime\n')] |
from __future__ import print_function, division, absolute_import
import logging
import io
from warnings import warn
from dask import bytes as dbytes
from s3fs import S3FileSystem
from .executor import default_executor, ensure_default_get
logger = logging.getLogger(__name__)
def read_text(fn, keyname=None, encoding='utf-8', errors='strict',
lineterminator='\n', executor=None, fs=None, lazy=True,
collection=True, blocksize=2**27, compression=None, anon=None, **kwargs):
warn("distributed.s3.read_text(...) Moved to "
"dask.bag.read_text('s3://...')")
if keyname is not None:
if not keyname.startswith('/'):
keyname = '/' + keyname
fn = fn + keyname
import dask.bag as db
result = db.read_text('s3://' + fn, encoding=encoding, errors=errors,
linedelimiter=lineterminator, collection=collection,
blocksize=blocksize, compression=compression,
storage_options={'s3': fs, 'anon': anon}, **kwargs)
executor = default_executor(executor)
ensure_default_get(executor)
if not lazy:
if collection:
result = executor.persist(result)
else:
result = executor.compute(result)
return result
def read_csv(path, executor=None, fs=None, lazy=True, collection=True,
lineterminator='\n', blocksize=2**27, storage_options=None, **kwargs):
warn("distributed.s3.read_csv(...) Moved to "
"dask.dataframe.read_csv('s3://...')")
import dask.dataframe as dd
result = dd.read_csv('s3://' + path, collection=collection,
lineterminator=lineterminator, blocksize=blocksize,
storage_options=storage_options, **kwargs)
executor = default_executor(executor)
ensure_default_get(executor)
if not lazy:
if collection:
result = executor.persist(result)
else:
result = executor.compute(result)
return result
| [
"logging.getLogger",
"warnings.warn",
"dask.dataframe.read_csv",
"dask.bag.read_text"
] | [((252, 279), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (269, 279), False, 'import logging\n'), ((499, 576), 'warnings.warn', 'warn', (['"""distributed.s3.read_text(...) Moved to dask.bag.read_text(\'s3://...\')"""'], {}), '("distributed.s3.read_text(...) Moved to dask.bag.read_text(\'s3://...\')")\n', (503, 576), False, 'from warnings import warn\n'), ((758, 979), 'dask.bag.read_text', 'db.read_text', (["('s3://' + fn)"], {'encoding': 'encoding', 'errors': 'errors', 'linedelimiter': 'lineterminator', 'collection': 'collection', 'blocksize': 'blocksize', 'compression': 'compression', 'storage_options': "{'s3': fs, 'anon': anon}"}), "('s3://' + fn, encoding=encoding, errors=errors, linedelimiter=\n lineterminator, collection=collection, blocksize=blocksize, compression\n =compression, storage_options={'s3': fs, 'anon': anon}, **kwargs)\n", (770, 979), True, 'import dask.bag as db\n'), ((1401, 1492), 'warnings.warn', 'warn', (['"""distributed.s3.read_csv(...) Moved to dask.dataframe.read_csv(\'s3://...\')"""'], {}), '(\n "distributed.s3.read_csv(...) Moved to dask.dataframe.read_csv(\'s3://...\')"\n )\n', (1405, 1492), False, 'from warnings import warn\n'), ((1540, 1694), 'dask.dataframe.read_csv', 'dd.read_csv', (["('s3://' + path)"], {'collection': 'collection', 'lineterminator': 'lineterminator', 'blocksize': 'blocksize', 'storage_options': 'storage_options'}), "('s3://' + path, collection=collection, lineterminator=\n lineterminator, blocksize=blocksize, storage_options=storage_options,\n **kwargs)\n", (1551, 1694), True, 'import dask.dataframe as dd\n')] |
import numpy as np
from arbol import aprint
from dexp.processing.utils.scatter_gather_i2v import scatter_gather_i2v
from dexp.utils.backends import Backend
from dexp.utils.testing.testing import execute_both_backends
from dexp.utils.timeit import timeit
@execute_both_backends
def test_scatter_gather_i2v(ndim=3, length_xy=128, splits=4):
xp = Backend.get_xp_module()
rng = np.random.default_rng()
image1 = rng.uniform(0, 1, size=(length_xy,) * ndim)
image2 = rng.uniform(0, 1, size=(length_xy,) * ndim)
def f(x, y):
return xp.stack([x.min(), x.max()]), xp.stack([y.max(), y.mean(), y.min()])
with timeit("scatter_gather(f)"):
chunks = (length_xy // splits,) * ndim
result1, result2 = scatter_gather_i2v(f, (image1, image2), tiles=chunks, margins=8)
assert result1.ndim == ndim + 1
assert result2.ndim == ndim + 1
assert result1.shape[:-1] == result2.shape[:-1]
assert result1.shape[-1] == 2
assert result2.shape[-1] == 3
result1 -= (0, 1) # expected stats from uniform distribution
result1 = Backend.to_numpy(result1)
error = np.linalg.norm(result1.ravel(), ord=1) / result1.size
aprint(f"Error = {error}")
assert error < 0.001
result2 -= (1, 0.5, 0) # expected stats from uniform distribution
result2 = Backend.to_numpy(result2)
error = np.linalg.norm(result2.ravel(), ord=1) / result2.size
aprint(f"Error = {error}")
assert error < 0.001
| [
"dexp.utils.backends.Backend.get_xp_module",
"numpy.random.default_rng",
"dexp.utils.backends.Backend.to_numpy",
"arbol.aprint",
"dexp.processing.utils.scatter_gather_i2v.scatter_gather_i2v",
"dexp.utils.timeit.timeit"
] | [((351, 374), 'dexp.utils.backends.Backend.get_xp_module', 'Backend.get_xp_module', ([], {}), '()\n', (372, 374), False, 'from dexp.utils.backends import Backend\n'), ((385, 408), 'numpy.random.default_rng', 'np.random.default_rng', ([], {}), '()\n', (406, 408), True, 'import numpy as np\n'), ((1079, 1104), 'dexp.utils.backends.Backend.to_numpy', 'Backend.to_numpy', (['result1'], {}), '(result1)\n', (1095, 1104), False, 'from dexp.utils.backends import Backend\n'), ((1175, 1201), 'arbol.aprint', 'aprint', (['f"""Error = {error}"""'], {}), "(f'Error = {error}')\n", (1181, 1201), False, 'from arbol import aprint\n'), ((1313, 1338), 'dexp.utils.backends.Backend.to_numpy', 'Backend.to_numpy', (['result2'], {}), '(result2)\n', (1329, 1338), False, 'from dexp.utils.backends import Backend\n'), ((1409, 1435), 'arbol.aprint', 'aprint', (['f"""Error = {error}"""'], {}), "(f'Error = {error}')\n", (1415, 1435), False, 'from arbol import aprint\n'), ((636, 663), 'dexp.utils.timeit.timeit', 'timeit', (['"""scatter_gather(f)"""'], {}), "('scatter_gather(f)')\n", (642, 663), False, 'from dexp.utils.timeit import timeit\n'), ((739, 803), 'dexp.processing.utils.scatter_gather_i2v.scatter_gather_i2v', 'scatter_gather_i2v', (['f', '(image1, image2)'], {'tiles': 'chunks', 'margins': '(8)'}), '(f, (image1, image2), tiles=chunks, margins=8)\n', (757, 803), False, 'from dexp.processing.utils.scatter_gather_i2v import scatter_gather_i2v\n')] |
import multiprocessing
import traceback
from multiprocessing.pool import Pool
class Color():
_RED = "\033[1;31m"
_BLUE = "\033[1;34m"
_YELLOW = "\033[1;93m"
_CYAN = "\033[1;36xm"
_GREEN = "\033[0;32m"
_RESET = "\033[0;0m"
_BOLD = "\033[;1m"
_REVERSE = "\033[;7m"
@staticmethod
def red(str):
return Color.build(str, Color._RED)
@staticmethod
def green(str):
return Color.build(str, Color._GREEN)
@staticmethod
def yellow(str):
return Color.build(str, Color._YELLOW)
@staticmethod
def build(str, COLOR):
return "%s%s%s" % (COLOR, str, Color._RESET)
# Shortcut to multiprocessing's logger
def error(msg, *args):
return multiprocessing.get_logger().error(msg, *args)
class LogExceptions(object):
def __init__(self, callable):
self.__callable = callable
def __call__(self, *args, **kwargs):
try:
result = self.__callable(*args, **kwargs)
except Exception as e:
# Here we add some debugging help. If multiprocessing's
# debugging is on, it will arrange to log the traceback
error(traceback.format_exc())
# Re-raise the original exception so the Pool worker can
# clean up
raise
# It was fine, give a normal answer
return result
class LoggingPool(Pool):
def apply_async(self, func, args=(), kwds={}, callback=None):
return Pool.apply_async(self, LogExceptions(func), args, kwds, callback)
| [
"traceback.format_exc",
"multiprocessing.get_logger"
] | [((723, 751), 'multiprocessing.get_logger', 'multiprocessing.get_logger', ([], {}), '()\n', (749, 751), False, 'import multiprocessing\n'), ((1163, 1185), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1183, 1185), False, 'import traceback\n')] |
from my_mathematics.simple_math import MyMath
import math
import pytest
@pytest.mark.parametrize('x', [0, 1, 2, 3, 4, 5, 0.01, 3e-7, 232, 213123, 392, 921])
def test_check_sine(x):
assert math.sin(x) == MyMath.sin(x)
def test_check_pi():
assert round(math.pi, 2) == MyMath.pi
@pytest.mark.parametrize('x', [0, 1, 2, 3, 4, 5, 0.01, 3e-7, 232, 213123, 392, 921])
def test_check_sqrt(x):
assert math.sqrt(x) == MyMath.sqrt(x)
@pytest.mark.xfail
@pytest.mark.parametrize('x', [-1, -4, -5, -2])
def test_neg_sqrt(x):
assert math.sqrt(x) == MyMath.sqrt(x)
| [
"math.sqrt",
"my_mathematics.simple_math.MyMath.sin",
"pytest.mark.parametrize",
"my_mathematics.simple_math.MyMath.sqrt",
"math.sin"
] | [((75, 164), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""x"""', '[0, 1, 2, 3, 4, 5, 0.01, 3e-07, 232, 213123, 392, 921]'], {}), "('x', [0, 1, 2, 3, 4, 5, 0.01, 3e-07, 232, 213123, \n 392, 921])\n", (98, 164), False, 'import pytest\n'), ((291, 380), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""x"""', '[0, 1, 2, 3, 4, 5, 0.01, 3e-07, 232, 213123, 392, 921]'], {}), "('x', [0, 1, 2, 3, 4, 5, 0.01, 3e-07, 232, 213123, \n 392, 921])\n", (314, 380), False, 'import pytest\n'), ((463, 509), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""x"""', '[-1, -4, -5, -2]'], {}), "('x', [-1, -4, -5, -2])\n", (486, 509), False, 'import pytest\n'), ((194, 205), 'math.sin', 'math.sin', (['x'], {}), '(x)\n', (202, 205), False, 'import math\n'), ((209, 222), 'my_mathematics.simple_math.MyMath.sin', 'MyMath.sin', (['x'], {}), '(x)\n', (219, 222), False, 'from my_mathematics.simple_math import MyMath\n'), ((410, 422), 'math.sqrt', 'math.sqrt', (['x'], {}), '(x)\n', (419, 422), False, 'import math\n'), ((426, 440), 'my_mathematics.simple_math.MyMath.sqrt', 'MyMath.sqrt', (['x'], {}), '(x)\n', (437, 440), False, 'from my_mathematics.simple_math import MyMath\n'), ((543, 555), 'math.sqrt', 'math.sqrt', (['x'], {}), '(x)\n', (552, 555), False, 'import math\n'), ((559, 573), 'my_mathematics.simple_math.MyMath.sqrt', 'MyMath.sqrt', (['x'], {}), '(x)\n', (570, 573), False, 'from my_mathematics.simple_math import MyMath\n')] |
#
# This file is part of CasADi.
#
# CasADi -- A symbolic framework for dynamic optimization.
# Copyright (C) 2010-2014 <NAME>, <NAME>, <NAME>,
# <NAME>. All rights reserved.
# Copyright (C) 2011-2014 <NAME>
#
# CasADi is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# CasADi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with CasADi; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from casadi import *
import casadi as c
import numpy
import unittest
from types import *
from helpers import *
import pickle
from operator import itemgetter
import sys
scipy_available = True
try:
import scipy.special
from scipy.linalg import expm
except:
scipy_available = False
class Misctests(casadiTestCase):
def test_issue179B(self):
self.message('Regression test #179 (B)')
def calc_sparsity():
x = casadi.SX.sym("x")
f = casadi.Function('f', [x], [x ** 2])
return f.sparsity_jac(0, 0)
def print_sparsity():
sparsity = calc_sparsity()
str(sparsity) # Segfault
print_sparsity()
def test_sanity(self):
DM(Sparsity(4,3,[0,2,2,3],[1,2,1]),[0.738,0.39,0.99])
self.assertRaises(RuntimeError,lambda : DM(Sparsity(4,4,[0,2,2,3],[1,2,1]),[0.738,0.39,0.99]))
self.assertRaises(RuntimeError,lambda : DM(Sparsity(4,3,[0,2,2,12],[1,2,1]),[0.738,0.39,0.99]))
self.assertRaises(RuntimeError,lambda : DM(Sparsity(4,3,[-10,2,2,3],[1,2,1]),[0.738,0.39,0.99]))
self.assertRaises(RuntimeError,lambda : DM(Sparsity(4,3,[0,2,2,3],[8,2,1]),[0.738,0.39,0.99]))
self.assertRaises(RuntimeError,lambda : DM(Sparsity(4,3,[0,2,2,3],[-3,2,1]),[0.738,0.39,0.99]))
self.assertRaises(RuntimeError,lambda : DM(Sparsity(4,3,[0,2,2,3],[1,2,1,2]),[0.738,0.39,0.99]))
self.assertRaises(RuntimeError,lambda : DM(Sparsity(4,3,[0,2,0,3],[1,2,1]),[0.738,0.39,0.99]))
def test_copyconstr_norefcount(self):
self.message("Copy constructor for non-refcounted classes")
x = DM.ones(2,3)
y = DM(x)
x[0,0] = 5
self.assertFalse(id(x)==id(y))
self.assertEqual(x[0,0],5)
self.assertEqual(y[0,0],1)
def test_copyconstr_refcount(self):
self.message("Copy constructor for refcounted classes")
x = Sparsity.diag(4)
y = Sparsity(x)
x.resize(2,8)
self.assertFalse(id(x)==id(y))
self.assertTrue(x.numel(),y.numel())
self.checkarray(x.shape,(2,8),"shape")
self.checkarray(y.shape,(4,4),"shape")
def test_copy_norefcount(self):
self.message("Shallow copy for non-refcounted classes")
import copy
x = DM.ones(2,3)
y = copy.copy(x)
x[0,0] = 5
self.assertFalse(id(x)==id(y))
self.assertEqual(x[0,0],5)
self.assertEqual(y[0,0],1)
def test_copy_refcount(self):
self.message("Shallow copy for refcounted classes")
import copy
x = Sparsity.diag(4)
y = copy.copy(x)
x.resize(2,8)
self.assertFalse(id(x)==id(y))
self.assertTrue(x.numel(),y.numel())
self.checkarray(x.shape,(2,8),"shape")
self.checkarray(y.shape,(4,4),"shape")
def test_deepcopy_norefcount(self):
self.message("Deep copy for non-refcounted classes")
import copy
x = DM.ones(2,3)
y = copy.deepcopy(x)
x[0,0] = 5
self.assertFalse(id(x)==id(y))
self.assertEqual(x[0,0],5)
self.assertEqual(y[0,0],1)
def test_deepcopy_refcount(self):
self.message("Deep copy for refcounted classes")
import copy
x = Sparsity.diag(4)
y = copy.deepcopy(x)
x.resize(2,8)
self.assertFalse(id(x)==id(y))
self.assertTrue(x.numel(),y.numel())
self.checkarray(x.shape,(2,8),"shape")
self.checkarray(y.shape,(4,4),"shape")
@requiresPlugin(nlpsol,"ipopt")
def test_options_introspection(self):
self.message("options introspection")
x=SX.sym("x")
nlp = {'x':x, 'f':x**2}
i = nlpsol('i', "ipopt", nlp)
opts = i.optionNames()
self.assertTrue(isinstance(opts,list))
n = opts[0]
self.assertTrue(type(n)==type(""))
n = "monitor"
d = i.optionDescription(n)
self.assertTrue(type(d)==type(""))
self.assertTrue(not("d"=="N/A"))
d = i.optionTypeName(n)
self.assertEqual(d,"OT_STRINGVECTOR")
#d = i.optionAllowed(n)
@unittest.skipIf(sys.version_info>=(3,0),"too lazy to fix now")
def test_pickling(self):
a = Sparsity.lower(4)
s = pickle.dumps(a)
b = pickle.loads(s)
self.assertTrue(a==b)
a = Sparsity()
s = pickle.dumps(a)
b = pickle.loads(s)
self.assertTrue(a.is_null())
a = IM(Sparsity.lower(4),list(range(10)))
s = pickle.dumps(a)
b = pickle.loads(s)
self.checkarray(a,b)
a = DM(Sparsity.lower(4),list(range(10)))
s = pickle.dumps(a)
b = pickle.loads(s)
self.checkarray(a,b)
@known_bug()
def test_exceptions(self):
try:
nlpsol(123)
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert "nlpsol(str,str,Function,Dict)" in e.message
assert "You have: nlpsol(int)" in e.message
assert "::" not in e.message
assert "std" not in e.message
try:
vertcat(*123)
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert "vertcat(*[SX]" in e.message
assert "vertcat(*[DM" in e.message
assert "You have: vertcat(*int)" in e.message
assert "::" not in e.message
assert "std" not in e.message
try:
substitute(123)
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert "substitute(SX,SX,SX)" in e.message
assert "substitute([SX] ,[SX] ,[SX] )" in e.message
assert "You have: substitute(int)" in e.message
assert "::" not in e.message
assert "std" not in e.message
try:
load_nlpsol(132)
self.assertTrue(False)
except TypeError as e:
print(e.message)
assert "Failed to convert input to str" in e.message
assert "::" not in e.message
assert "std" not in e.message
x=SX.sym("x")
try:
[x]+ x
self.assertTrue(False)
except TypeError as e:
print(e.message)
try:
x + [x]
self.assertTrue(False)
except TypeError as e:
print(e.message)
try:
x.reshape(2)
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert "reshape(SX,(int,int) )" in e.message
try:
x.reshape(("a",2))
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert "You have: reshape((str,int))" in e.message
try:
diagsplit("s")
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert "diagsplit(SX,int)" in e.message
assert "diagsplit(DM ,int)" in e.message
try:
DM("df")
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert " DM (" in e.message
try:
vertcat(*[1,SX.sym('x'),MX.sym('x')])
self.assertTrue(False)
except NotImplementedError as e:
print(e.message)
assert " vertcat(*" in e.message
def test_getscheme(self):
x = SX.sym("x")
p = SX.sym("p")
F = Function('F', [x, p], [x+p, x**2], ['x', 'p'], ['f', 'g'])
fc = F(x=3,p=4)
f = fc['f']
self.checkarray(f,DM([7]))
g = fc['g']
self.checkarray(g,DM([9]))
[f,g] = itemgetter('f','g')(fc)
self.checkarray(f,DM([7]))
self.checkarray(g,DM([9]))
[g,f] = itemgetter('g','f')(fc)
self.checkarray(f,DM([7]))
self.checkarray(g,DM([9]))
def test_assertions(self):
x = MX.sym("x")
z = x**2
z = z.attachAssert(z>3,"x must be larger than 3")
v = sin(z)
f = Function('f', [x],[v])
print(f)
f_out = f(-6)
try :
f_out = f(1)
except Exception as e:
print(str(e))
self.assertTrue("x must be larger than 3" in str(e))
@requires_nlpsol("ipopt")
def test_output(self):
with capture_stdout() as result:
DM([1,2]).print_dense()
assert "2" in result[0]
x=SX.sym("x")
f = {'x':x, 'f':x**2}
solver = nlpsol("solver", "ipopt",f)
with capture_stdout() as result:
solver_out = solver(x0=0)
assert "Number of nonzeros in equality constraint" in result[0]
assert "iter objective inf_pr" in result[0]
with capture_stdout() as result:
try:
solver = nlpsol("solver","foo",f)
except:
pass
assert "casadi_nlpsol_foo" in result[1]
if __name__ == '__main__':
unittest.main()
| [
"pickle.dumps",
"pickle.loads",
"unittest.skipIf",
"copy.deepcopy",
"unittest.main",
"operator.itemgetter",
"copy.copy"
] | [((4795, 4861), 'unittest.skipIf', 'unittest.skipIf', (['(sys.version_info >= (3, 0))', '"""too lazy to fix now"""'], {}), "(sys.version_info >= (3, 0), 'too lazy to fix now')\n", (4810, 4861), False, 'import unittest\n'), ((9116, 9131), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9129, 9131), False, 'import unittest\n'), ((3168, 3180), 'copy.copy', 'copy.copy', (['x'], {}), '(x)\n', (3177, 3180), False, 'import copy\n'), ((3433, 3445), 'copy.copy', 'copy.copy', (['x'], {}), '(x)\n', (3442, 3445), False, 'import copy\n'), ((3771, 3787), 'copy.deepcopy', 'copy.deepcopy', (['x'], {}), '(x)\n', (3784, 3787), False, 'import copy\n'), ((4041, 4057), 'copy.deepcopy', 'copy.deepcopy', (['x'], {}), '(x)\n', (4054, 4057), False, 'import copy\n'), ((4920, 4935), 'pickle.dumps', 'pickle.dumps', (['a'], {}), '(a)\n', (4932, 4935), False, 'import pickle\n'), ((4944, 4959), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (4956, 4959), False, 'import pickle\n'), ((5014, 5029), 'pickle.dumps', 'pickle.dumps', (['a'], {}), '(a)\n', (5026, 5029), False, 'import pickle\n'), ((5038, 5053), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (5050, 5053), False, 'import pickle\n'), ((5142, 5157), 'pickle.dumps', 'pickle.dumps', (['a'], {}), '(a)\n', (5154, 5157), False, 'import pickle\n'), ((5166, 5181), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (5178, 5181), False, 'import pickle\n'), ((5263, 5278), 'pickle.dumps', 'pickle.dumps', (['a'], {}), '(a)\n', (5275, 5278), False, 'import pickle\n'), ((5287, 5302), 'pickle.loads', 'pickle.loads', (['s'], {}), '(s)\n', (5299, 5302), False, 'import pickle\n'), ((7971, 7991), 'operator.itemgetter', 'itemgetter', (['"""f"""', '"""g"""'], {}), "('f', 'g')\n", (7981, 7991), False, 'from operator import itemgetter\n'), ((8069, 8089), 'operator.itemgetter', 'itemgetter', (['"""g"""', '"""f"""'], {}), "('g', 'f')\n", (8079, 8089), False, 'from operator import itemgetter\n')] |
import argparse
import copy
import datetime
import gym
import numpy as np
import itertools
import torch
import csv
import os
import json
from plane_env import Plane
from sac import SAC
from verify import verify_models, generate_agent_simulator
from torch.utils.tensorboard import SummaryWriter
from replay_memory import ReplayMemory
parser = argparse.ArgumentParser(description='PyTorch Soft Actor-Critic Args')
parser.add_argument('--env-name', default="HalfCheetah-v2",
help='Mujoco Gym environment (default: HalfCheetah-v2)')
parser.add_argument('--policy', default="Gaussian",
help='Policy Type: Gaussian | Deterministic (default: Gaussian)')
parser.add_argument('--eval', type=int, default=True,
help='Evaluates a policy a policy every 10 episode (default: True)')
parser.add_argument('--gamma', type=float, default=0.99, metavar='G',
help='discount factor for reward (default: 0.99)')
parser.add_argument('--tau', type=float, default=0.005, metavar='G',
help='target smoothing coefficient(τ) (default: 0.005)')
parser.add_argument('--lr', type=float, default=0.0003, metavar='G',
help='learning rate (default: 0.0003)')
parser.add_argument('--alpha', type=float, default=0.2, metavar='G',
help='Temperature parameter α determines the relative importance of the entropy\
term against the reward (default: 0.2)')
parser.add_argument('--automatic_entropy_tuning', type=bool, default=False, metavar='G',
help='Automaically adjust α (default: False)')
parser.add_argument('--seed', type=int, default=123456, metavar='N',
help='random seed (default: 123456)')
parser.add_argument('--batch_size', type=int, default=256, metavar='N',
help='batch size (default: 256)')
parser.add_argument('--num_steps', type=int, default=1000001, metavar='N',
help='maximum number of steps (default: 1000000)')
parser.add_argument('--hidden_size', type=int, default=256, metavar='N',
help='hidden size (default: 256)')
parser.add_argument('--updates_per_step', type=float, default=1, metavar='N',
help='model updates per simulator step (default: 1)')
parser.add_argument('--start_steps', type=int, default=10000, metavar='N',
help='Steps sampling random actions (default: 10000)')
parser.add_argument('--target_update_interval', type=int, default=1, metavar='N',
help='Value target update per no. of updates per step (default: 1)')
parser.add_argument('--replay_size', type=int, default=100000, metavar='N',
help='size of replay buffer (default: 10000000)')
parser.add_argument('--num_planes', type=int, default=1, metavar='N',
help='number of planes to use in verification (default: 1)')
parser.add_argument('--horizon', type=int, default=10, metavar='N',
help='number of actions to plan ahead before moving on to the next plane')
parser.add_argument('--cuda', action="store_true",
help='run on CUDA (default: False)')
args = parser.parse_args()
# Environment
env = Plane()
torch.manual_seed(args.seed)
np.random.seed(args.seed)
# Agent
# expert_agent = SAC(env.obs_state_len, env.action_space, args)
# expert_agent.load_checkpoint('winning_config_c3/c3_model')
agent = SAC(env.obs_state_len, env.action_space, args, map_input=(env.bspace.img.shape[2], env.bspace.img.shape[0], env.bspace.img.shape[1]))
run_dir = 'runs/{}_SAC_{}_{}_{}'.format(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"), args.env_name,
args.policy, "autotune" if args.automatic_entropy_tuning else "")
os.mkdir(run_dir)
reward_file = csv.writer(open(f"{run_dir}/rewards.csv", 'w'), delimiter=',', quoting=csv.QUOTE_MINIMAL, quotechar="|")
reward_file.writerow(['avg_reward', 'crash_rate'])
loss_file = csv.writer(open(f"{run_dir}/training_loss.csv", 'w'), delimiter=',', quoting=csv.QUOTE_MINIMAL, quotechar="|")
loss_file.writerow(['critic1_loss', 'critic2_loss', 'policy_loss', 'ent_loss', 'alpha'])
with open(f'{run_dir}/run_args.cfg', 'w') as conf:
conf.write(json.dumps(vars(args), indent=4, sort_keys=True))
# Memory
memory = ReplayMemory(args.replay_size, args.seed)
# Training Loop
total_numsteps = 0
updates = 0
if args.updates_per_step < 1:
steps_per_update = int(1/args.updates_per_step)
else: steps_per_update = None
for i_episode in itertools.count(1):
episode_reward = 0
episode_steps = 0
done = False
state = env.reset()
while not done:
if args.start_steps > total_numsteps:
action = env.action_space.sample() # Sample random action
else:
action = agent.select_action(state) # Sample action from policy
if len(memory) > args.batch_size:
# Number of updates per step in environment
if steps_per_update:
if episode_steps % steps_per_update == 0:
# Update parameters of all the networks
critic_1_loss, critic_2_loss, policy_loss, ent_loss, alpha = agent.update_parameters(memory, args.batch_size, updates)
loss_file.writerow([critic_1_loss, critic_2_loss, policy_loss, ent_loss, alpha])
updates += 1
else:
for i in range(int(args.updates_per_step)):
# Update parameters of all the networks
critic_1_loss, critic_2_loss, policy_loss, ent_loss, alpha = agent.update_parameters(memory, args.batch_size, updates)
loss_file.writerow([critic_1_loss, critic_2_loss, policy_loss, ent_loss, alpha])
updates += 1
next_state, reward, done, _ = env.step(action) # Step
episode_steps += 1
total_numsteps += 1
episode_reward += reward
# Ignore the "done" signal if it comes from hitting the time horizon.
# (https://github.com/openai/spinningup/blob/master/spinup/algos/sac/sac.py)
mask = 1 if episode_steps == env._max_episode_steps else float(not done)
memory.push(state, action, reward, next_state, mask) # Append transition to memory
state = next_state
if total_numsteps > args.num_steps:
break
print("Episode: {}, total numsteps: {}, episode steps: {}, reward: {}".format(i_episode, total_numsteps, episode_steps, round(episode_reward, 2)))
if i_episode % args.eval == 0 and args.eval != 0:
episodes = 21
simulator = generate_agent_simulator(agent, args.horizon)
avg_reward, _, crashed = verify_models(args.num_planes, episodes, simulator, save_path=f"{run_dir}/{i_episode}_", display=False)
reward_file.writerow([avg_reward, crashed])
print("----------------------------------------")
print("Test Episodes: {}, Total updates {}, Avg. Reward: {}, Crash Rate: {}".format(episodes, updates, round(avg_reward, 5), crashed))
print("----------------------------------------")
agent.save_checkpoint(args.env_name, ckpt_path=f"{run_dir}/{i_episode}_model")
env.close()
| [
"torch.manual_seed",
"verify.generate_agent_simulator",
"argparse.ArgumentParser",
"verify.verify_models",
"plane_env.Plane",
"datetime.datetime.now",
"itertools.count",
"numpy.random.seed",
"os.mkdir",
"sac.SAC",
"replay_memory.ReplayMemory"
] | [((343, 412), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch Soft Actor-Critic Args"""'}), "(description='PyTorch Soft Actor-Critic Args')\n", (366, 412), False, 'import argparse\n'), ((3258, 3265), 'plane_env.Plane', 'Plane', ([], {}), '()\n', (3263, 3265), False, 'from plane_env import Plane\n'), ((3267, 3295), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (3284, 3295), False, 'import torch\n'), ((3296, 3321), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (3310, 3321), True, 'import numpy as np\n'), ((3465, 3603), 'sac.SAC', 'SAC', (['env.obs_state_len', 'env.action_space', 'args'], {'map_input': '(env.bspace.img.shape[2], env.bspace.img.shape[0], env.bspace.img.shape[1])'}), '(env.obs_state_len, env.action_space, args, map_input=(env.bspace.img.\n shape[2], env.bspace.img.shape[0], env.bspace.img.shape[1]))\n', (3468, 3603), False, 'from sac import SAC\n'), ((3816, 3833), 'os.mkdir', 'os.mkdir', (['run_dir'], {}), '(run_dir)\n', (3824, 3833), False, 'import os\n'), ((4352, 4393), 'replay_memory.ReplayMemory', 'ReplayMemory', (['args.replay_size', 'args.seed'], {}), '(args.replay_size, args.seed)\n', (4364, 4393), False, 'from replay_memory import ReplayMemory\n'), ((4572, 4590), 'itertools.count', 'itertools.count', (['(1)'], {}), '(1)\n', (4587, 4590), False, 'import itertools\n'), ((6684, 6729), 'verify.generate_agent_simulator', 'generate_agent_simulator', (['agent', 'args.horizon'], {}), '(agent, args.horizon)\n', (6708, 6729), False, 'from verify import verify_models, generate_agent_simulator\n'), ((6763, 6871), 'verify.verify_models', 'verify_models', (['args.num_planes', 'episodes', 'simulator'], {'save_path': 'f"""{run_dir}/{i_episode}_"""', 'display': '(False)'}), "(args.num_planes, episodes, simulator, save_path=\n f'{run_dir}/{i_episode}_', display=False)\n", (6776, 6871), False, 'from verify import verify_models, generate_agent_simulator\n'), ((3640, 3663), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3661, 3663), False, 'import datetime\n')] |
from pystac.validation.stac_validator import STACValidator, STACValidationError
from pystac.validation.schema_uri_map import DefaultSchemaUriMap
from pystac import STAC_IO
import json
try:
import jsonschema
except ImportError:
jsonschema = None
class CMIP5SchemaSTACValidator(STACValidator):
"""Validate STAC based on JSON Schemas.
This validator uses JSON schemas, read from URIs provided by a
:class:`~pystac.__validation.SchemaUriMap`, to validate STAC core
objects and extensions.
Args:
schema_uri_map (SchemaUriMap): The SchemaUriMap that defines where
the validator will retrieve the JSON schemas for __validation.
Defaults to an instance of
:class:`~pystac.__validation.schema_uri_map.DefaultSchemaUriMap`
Note:
This class requires the ``jsonschema`` library to be installed.
"""
def __init__(self, schema_uri_map=None):
if jsonschema is None:
raise Exception('Cannot instantiate, requires jsonschema package')
if schema_uri_map is not None:
self.schema_uri_map = schema_uri_map
else:
self.schema_uri_map = DefaultSchemaUriMap()
self.schema_cache = {}
def get_schema_from_uri(self, schema_uri):
if schema_uri not in self.schema_cache:
s = json.loads(STAC_IO.read_text(schema_uri))
self.schema_cache[schema_uri] = s
schema = self.schema_cache[schema_uri]
resolver = jsonschema.validators.RefResolver(base_uri=schema_uri,
referrer=schema,
store=self.schema_cache)
return (schema, resolver)
def _validate_from_uri(self, stac_dict, schema_uri):
schema, resolver = self.get_schema_from_uri(schema_uri)
jsonschema.validate(instance=stac_dict, schema=schema, resolver=resolver)
for uri in resolver.store:
if uri not in self.schema_cache:
self.schema_cache[uri] = resolver.store[uri]
def _get_error_message(self, schema_uri, stac_object_type, extension_id, href, stac_id):
s = 'Validation failed for {} '.format(stac_object_type)
if href is not None:
s += 'at {} '.format(href)
if stac_id is not None:
s += 'with ID {} '.format(stac_id)
s += 'against schema at {}'.format(schema_uri)
if extension_id is not None:
s += " for STAC extension '{}'".format(extension_id)
return s
def validate_core(self, stac_dict, stac_object_type, stac_version, href=None):
"""Validate a core stac object.
Return value can be None or specific to the implementation.
Args:
stac_dict (dict): Dictionary that is the STAC json of the object.
stac_object_type (str): The stac object type of the object encoded in stac_dict.
One of :class:`~pystac.STACObjectType`.
stac_version (str): The version of STAC to validate the object against.
href (str): Optional HREF of the STAC object being validated.
Returns:
str: URI for the JSON schema that was validated against, or None if
no __validation occurred.
"""
schema_uri = self.schema_uri_map.get_core_schema_uri(stac_object_type, stac_version)
if schema_uri is None:
return None
try:
self._validate_from_uri(stac_dict, schema_uri)
return schema_uri
except jsonschema.exceptions.ValidationError as e:
msg = self._get_error_message(schema_uri, stac_object_type, None, href,
stac_dict.get('id'))
raise STACValidationError(msg, source=e) from e
def validate_extension(self,
stac_dict,
stac_object_type,
stac_version,
extension_id,
href=None):
"""Validate an extension stac object.
Return value can be None or specific to the implementation.
Args:
stac_dict (dict): Dictionary that is the STAC json of the object.
stac_object_type (str): The stac object type of the object encoded in stac_dict.
One of :class:`~pystac.STACObjectType`.
stac_version (str): The version of STAC to validate the object against.
extension_id (str): The extension ID to validate against.
href (str): Optional HREF of the STAC object being validated.
Returns:
str: URI for the JSON schema that was validated against, or None if
no __validation occurred.
"""
schema_uri = self.schema_uri_map.get_extension_schema_uri(extension_id, stac_object_type,
stac_version)
if schema_uri is None:
return None
try:
self._validate_from_uri(stac_dict, schema_uri)
return schema_uri
except jsonschema.exceptions.ValidationError as e:
msg = self._get_error_message(schema_uri, stac_object_type, extension_id, href,
stac_dict.get('id'))
raise STACValidationError(msg, source=e) from e | [
"pystac.validation.schema_uri_map.DefaultSchemaUriMap",
"jsonschema.validators.RefResolver",
"pystac.STAC_IO.read_text",
"jsonschema.validate",
"pystac.validation.stac_validator.STACValidationError"
] | [((1489, 1589), 'jsonschema.validators.RefResolver', 'jsonschema.validators.RefResolver', ([], {'base_uri': 'schema_uri', 'referrer': 'schema', 'store': 'self.schema_cache'}), '(base_uri=schema_uri, referrer=schema,\n store=self.schema_cache)\n', (1522, 1589), False, 'import jsonschema\n'), ((1857, 1930), 'jsonschema.validate', 'jsonschema.validate', ([], {'instance': 'stac_dict', 'schema': 'schema', 'resolver': 'resolver'}), '(instance=stac_dict, schema=schema, resolver=resolver)\n', (1876, 1930), False, 'import jsonschema\n'), ((1167, 1188), 'pystac.validation.schema_uri_map.DefaultSchemaUriMap', 'DefaultSchemaUriMap', ([], {}), '()\n', (1186, 1188), False, 'from pystac.validation.schema_uri_map import DefaultSchemaUriMap\n'), ((1344, 1373), 'pystac.STAC_IO.read_text', 'STAC_IO.read_text', (['schema_uri'], {}), '(schema_uri)\n', (1361, 1373), False, 'from pystac import STAC_IO\n'), ((3769, 3803), 'pystac.validation.stac_validator.STACValidationError', 'STACValidationError', (['msg'], {'source': 'e'}), '(msg, source=e)\n', (3788, 3803), False, 'from pystac.validation.stac_validator import STACValidator, STACValidationError\n'), ((5350, 5384), 'pystac.validation.stac_validator.STACValidationError', 'STACValidationError', (['msg'], {'source': 'e'}), '(msg, source=e)\n', (5369, 5384), False, 'from pystac.validation.stac_validator import STACValidator, STACValidationError\n')] |
import matplotlib.pyplot as plt
import networkx as nx
import random
class Graph:
def __init__(self,vertex):
self.v = vertex
self.graph = []
def add_edge(self, source, destination, weight):
self.graph.append([source, destination, weight])
def algo(self):
parent, rank, result = [], [], []
i,e = 0,0
self.graph = sorted(self.graph, key=lambda item:item[2])
for node in range(self.v):
parent.append(node)
rank.append(0)
while e < self.v -1:
s, d, w = self.graph[i]
i += 1
x = self.find(parent, s)
y = self.find(parent, d)
if x != y:
e += 1
result.append([s, d, w])
self.union(parent, rank, x, y)
return result
def find(self, p, i):
if p[i] ==i:
return i
return self.find(p, p[i])
def union(self, p, r, x, y):
s = self.find(p, x)
d = self.find(p, y)
if r[s] < r[d]:
p[s] = d
elif r[s] > r[d]:
p[d] = s
else:
p[d] = s
r[s] += 1
def plot(G):
pos = nx.spring_layout(G, seed=7)
nx.draw_networkx_nodes(G, pos, node_size=700)
# edges
nx.draw_networkx_edges(G, pos, width=6)
nx.draw_networkx_edges(
G, pos, width=6, alpha=0.5, edge_color="b", style="dashed"
)
# labels
nx.draw_networkx_labels(G, pos, font_size=20, font_family="sans-serif")
ax = plt.gca()
ax.margins(0.08)
plt.axis("off")
plt.tight_layout()
plt.show()
if __name__ == "__main__":
graph = Graph(10)
for i in range(0 ,random.randint(19,50)):
graph.add_edge(random.randint(0,9),random.randint(0,9), random.randint(0,9))
result = graph.algo();
G = nx.Graph()
for u, v, w in graph.graph:
G.add_edge(u, v, weight=w)
plot(G)
G = nx.Graph()
for u, v, w in result:
G.add_edge(u, v, weight=w)
print("%d - %d: %d" % (u, v, w))
plot(G)
| [
"matplotlib.pyplot.gca",
"networkx.spring_layout",
"networkx.Graph",
"networkx.draw_networkx_nodes",
"networkx.draw_networkx_labels",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.axis",
"networkx.draw_networkx_edges",
"random.randint",
"matplotlib.pyplot.show"
] | [((1256, 1283), 'networkx.spring_layout', 'nx.spring_layout', (['G'], {'seed': '(7)'}), '(G, seed=7)\n', (1272, 1283), True, 'import networkx as nx\n'), ((1289, 1334), 'networkx.draw_networkx_nodes', 'nx.draw_networkx_nodes', (['G', 'pos'], {'node_size': '(700)'}), '(G, pos, node_size=700)\n', (1311, 1334), True, 'import networkx as nx\n'), ((1357, 1396), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'width': '(6)'}), '(G, pos, width=6)\n', (1379, 1396), True, 'import networkx as nx\n'), ((1402, 1489), 'networkx.draw_networkx_edges', 'nx.draw_networkx_edges', (['G', 'pos'], {'width': '(6)', 'alpha': '(0.5)', 'edge_color': '"""b"""', 'style': '"""dashed"""'}), "(G, pos, width=6, alpha=0.5, edge_color='b', style=\n 'dashed')\n", (1424, 1489), True, 'import networkx as nx\n'), ((1522, 1593), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['G', 'pos'], {'font_size': '(20)', 'font_family': '"""sans-serif"""'}), "(G, pos, font_size=20, font_family='sans-serif')\n", (1545, 1593), True, 'import networkx as nx\n'), ((1606, 1615), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1613, 1615), True, 'import matplotlib.pyplot as plt\n'), ((1643, 1658), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1651, 1658), True, 'import matplotlib.pyplot as plt\n'), ((1664, 1682), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1680, 1682), True, 'import matplotlib.pyplot as plt\n'), ((1688, 1698), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1696, 1698), True, 'import matplotlib.pyplot as plt\n'), ((1934, 1944), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (1942, 1944), True, 'import networkx as nx\n'), ((2040, 2050), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (2048, 2050), True, 'import networkx as nx\n'), ((1785, 1807), 'random.randint', 'random.randint', (['(19)', '(50)'], {}), '(19, 50)\n', (1799, 1807), False, 'import random\n'), ((1833, 1853), 'random.randint', 'random.randint', (['(0)', '(9)'], {}), '(0, 9)\n', (1847, 1853), False, 'import random\n'), ((1853, 1873), 'random.randint', 'random.randint', (['(0)', '(9)'], {}), '(0, 9)\n', (1867, 1873), False, 'import random\n'), ((1874, 1894), 'random.randint', 'random.randint', (['(0)', '(9)'], {}), '(0, 9)\n', (1888, 1894), False, 'import random\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import time
from cinderclient import exceptions as cinder_exception
from novaclient import exceptions as nova_exception
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_log import log as logging
from fuxi.common import blockdevice
from fuxi.common import config
from fuxi.common import constants as consts
from fuxi.common import state_monitor
from fuxi.connector import connector
from fuxi import exceptions
from fuxi.i18n import _
from fuxi import utils
CONF = config.CONF
LOG = logging.getLogger(__name__)
class CinderConnector(connector.Connector):
def __init__(self):
super(CinderConnector, self).__init__()
self.cinderclient = utils.get_cinderclient()
self.novaclient = utils.get_novaclient()
@lockutils.synchronized('openstack-attach-volume')
def connect_volume(self, volume, **connect_opts):
bdm = blockdevice.BlockerDeviceManager()
ori_devices = bdm.device_scan()
# Do volume-attach
try:
server_id = connect_opts.get('server_id', None)
if not server_id:
server_id = utils.get_instance_uuid()
LOG.info("Start to connect to volume %s", volume)
nova_volume = self.novaclient.volumes.create_server_volume(
server_id=server_id,
volume_id=volume.id,
device=None)
volume_monitor = state_monitor.StateMonitor(
self.cinderclient,
nova_volume,
'in-use',
('available', 'attaching',))
attached_volume = volume_monitor.monitor_cinder_volume()
except nova_exception.ClientException as ex:
LOG.error("Attaching volume %(vol)s to server %(s)s "
"failed. Error: %(err)s",
{'vol': volume.id, 's': server_id, 'err': ex})
raise
# Get all devices on host after do volume-attach,
# and then find attached device.
LOG.info("After connected to volume, scan the added "
"block device on host")
curr_devices = bdm.device_scan()
start_time = time.time()
delta_devices = list(set(curr_devices) - set(ori_devices))
while not delta_devices:
time.sleep(consts.DEVICE_SCAN_TIME_DELAY)
curr_devices = bdm.device_scan()
delta_devices = list(set(curr_devices) - set(ori_devices))
if time.time() - start_time > consts.DEVICE_SCAN_TIMEOUT:
msg = _("Could not detect added device with "
"limited time")
raise exceptions.FuxiException(msg)
LOG.info("Get extra added block device %s", delta_devices)
for device in delta_devices:
if bdm.get_device_size(device) == volume.size:
device = device.replace('/sys/block', '/dev')
LOG.info("Find attached device %(dev)s"
" for volume %(at)s %(vol)s",
{'dev': device, 'at': attached_volume.name,
'vol': volume})
link_path = os.path.join(consts.VOLUME_LINK_DIR, volume.id)
try:
utils.execute('ln', '-s', device,
link_path,
run_as_root=True)
except processutils.ProcessExecutionError as e:
LOG.error("Error happened when create link file for"
" block device attached by Nova."
" Error: %s", e)
raise
return {'path': link_path}
LOG.warning("Could not find matched device")
raise exceptions.NotFound("Not Found Matched Device")
def disconnect_volume(self, volume, **disconnect_opts):
try:
volume = self.cinderclient.volumes.get(volume.id)
except cinder_exception.ClientException as e:
LOG.error("Get Volume %s from Cinder failed", volume.id)
raise
try:
link_path = self.get_device_path(volume)
utils.execute('rm', '-f', link_path, run_as_root=True)
except processutils.ProcessExecutionError as e:
LOG.warning("Error happened when remove docker volume"
" mountpoint directory. Error: %s", e)
try:
self.novaclient.volumes.delete_server_volume(
utils.get_instance_uuid(),
volume.id)
except nova_exception.ClientException as e:
LOG.error("Detaching volume %(vol)s failed. Err: %(err)s",
{'vol': volume.id, 'err': e})
raise
volume_monitor = state_monitor.StateMonitor(self.cinderclient,
volume,
'available',
('in-use', 'detaching',))
return volume_monitor.monitor_cinder_volume()
def get_device_path(self, volume):
return os.path.join(consts.VOLUME_LINK_DIR, volume.id)
| [
"fuxi.i18n._",
"fuxi.utils.get_novaclient",
"oslo_concurrency.lockutils.synchronized",
"fuxi.utils.get_cinderclient",
"fuxi.exceptions.NotFound",
"fuxi.common.state_monitor.StateMonitor",
"os.path.join",
"time.sleep",
"fuxi.exceptions.FuxiException",
"fuxi.common.blockdevice.BlockerDeviceManager",... | [((1083, 1110), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1100, 1110), True, 'from oslo_log import log as logging\n'), ((1337, 1386), 'oslo_concurrency.lockutils.synchronized', 'lockutils.synchronized', (['"""openstack-attach-volume"""'], {}), "('openstack-attach-volume')\n", (1359, 1386), False, 'from oslo_concurrency import lockutils\n'), ((1257, 1281), 'fuxi.utils.get_cinderclient', 'utils.get_cinderclient', ([], {}), '()\n', (1279, 1281), False, 'from fuxi import utils\n'), ((1308, 1330), 'fuxi.utils.get_novaclient', 'utils.get_novaclient', ([], {}), '()\n', (1328, 1330), False, 'from fuxi import utils\n'), ((1455, 1489), 'fuxi.common.blockdevice.BlockerDeviceManager', 'blockdevice.BlockerDeviceManager', ([], {}), '()\n', (1487, 1489), False, 'from fuxi.common import blockdevice\n'), ((2734, 2745), 'time.time', 'time.time', ([], {}), '()\n', (2743, 2745), False, 'import time\n'), ((4322, 4369), 'fuxi.exceptions.NotFound', 'exceptions.NotFound', (['"""Not Found Matched Device"""'], {}), "('Not Found Matched Device')\n", (4341, 4369), False, 'from fuxi import exceptions\n'), ((5328, 5424), 'fuxi.common.state_monitor.StateMonitor', 'state_monitor.StateMonitor', (['self.cinderclient', 'volume', '"""available"""', "('in-use', 'detaching')"], {}), "(self.cinderclient, volume, 'available', (\n 'in-use', 'detaching'))\n", (5354, 5424), False, 'from fuxi.common import state_monitor\n'), ((5686, 5733), 'os.path.join', 'os.path.join', (['consts.VOLUME_LINK_DIR', 'volume.id'], {}), '(consts.VOLUME_LINK_DIR, volume.id)\n', (5698, 5733), False, 'import os\n'), ((1983, 2084), 'fuxi.common.state_monitor.StateMonitor', 'state_monitor.StateMonitor', (['self.cinderclient', 'nova_volume', '"""in-use"""', "('available', 'attaching')"], {}), "(self.cinderclient, nova_volume, 'in-use', (\n 'available', 'attaching'))\n", (2009, 2084), False, 'from fuxi.common import state_monitor\n'), ((2858, 2899), 'time.sleep', 'time.sleep', (['consts.DEVICE_SCAN_TIME_DELAY'], {}), '(consts.DEVICE_SCAN_TIME_DELAY)\n', (2868, 2899), False, 'import time\n'), ((4726, 4780), 'fuxi.utils.execute', 'utils.execute', (['"""rm"""', '"""-f"""', 'link_path'], {'run_as_root': '(True)'}), "('rm', '-f', link_path, run_as_root=True)\n", (4739, 4780), False, 'from fuxi import utils\n'), ((1689, 1714), 'fuxi.utils.get_instance_uuid', 'utils.get_instance_uuid', ([], {}), '()\n', (1712, 1714), False, 'from fuxi import utils\n'), ((3108, 3160), 'fuxi.i18n._', '_', (['"""Could not detect added device with limited time"""'], {}), "('Could not detect added device with limited time')\n", (3109, 3160), False, 'from fuxi.i18n import _\n'), ((3210, 3239), 'fuxi.exceptions.FuxiException', 'exceptions.FuxiException', (['msg'], {}), '(msg)\n', (3234, 3239), False, 'from fuxi import exceptions\n'), ((3717, 3764), 'os.path.join', 'os.path.join', (['consts.VOLUME_LINK_DIR', 'volume.id'], {}), '(consts.VOLUME_LINK_DIR, volume.id)\n', (3729, 3764), False, 'import os\n'), ((5055, 5080), 'fuxi.utils.get_instance_uuid', 'utils.get_instance_uuid', ([], {}), '()\n', (5078, 5080), False, 'from fuxi import utils\n'), ((3031, 3042), 'time.time', 'time.time', ([], {}), '()\n', (3040, 3042), False, 'import time\n'), ((3806, 3868), 'fuxi.utils.execute', 'utils.execute', (['"""ln"""', '"""-s"""', 'device', 'link_path'], {'run_as_root': '(True)'}), "('ln', '-s', device, link_path, run_as_root=True)\n", (3819, 3868), False, 'from fuxi import utils\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
#The MIT License (MIT)
#
#Copyright (c) 2015 <NAME>
# 2021 <NAME> <EMAIL>
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import logging
import pytz
import time
import json
import re
from datetime import datetime
from ansible import constants as C
from ansible.plugins.callback import CallbackBase
# these are used to provide backwards compat with old plugins that subclass from default
# but still don't use the new config system and/or fail to document the options
# TODO: Change the default of check_mode_markers to True in a future release (2.13)
COMPAT_OPTIONS = (('display_skipped_hosts', C.DISPLAY_SKIPPED_HOSTS),
('display_ok_hosts', True),
('show_custom_stats', C.SHOW_CUSTOM_STATS),
('display_failed_stderr', False),
('check_mode_markers', False),)
class CallbackModule(CallbackBase):
"""
This is a Ansible(v2) elasticsearch callback plugin.
It collects the result of ansible task runs and ships it to the
elasticsearch.
This plugin makes use of the following environment variables:
ELASTICSEARCH_SERVER (optional): defaults to localhost
ELASTICSEARCH_PORT (optional): defaults to 9200
ELASTICSEARCH_TIMEOUT (optional): defaults to 3 (seconds)
ELASTICSEARCH_INDEX (optional): defaults to 3 ansible_logs
ELASTICSEARCH_DOC_ARGS (optional): Addtional json key-value pair(e.g. {"bar":"abc", "foo":"def"}) to be stored in each document
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification'
CALLBACK_NAME = 'elasticsearch'
CALLBACK_NEEDS_WHITELIST = False
def __init__(self):
self._last_task_banner = None
self._task_type_cache = {}
self._last_task_name = None
super(CallbackModule, self).__init__()
try:
self.elasticsearch = __import__('elasticsearch')
self.helpers = __import__('elasticsearch.helpers')
self.db_import = True
except ImportError:
self.db_import = False
logging.error("Failed to import elasticsearch module. Maybe you can use pip to install!")
#Optional environment variables
self.elasticsearch_host = os.getenv('ELASTICSEARCH_SERVER','localhost')
self.elasticsearch_port = os.getenv('ELASTICSEARCH_PORT', 9200)
self.args = os.getenv('ELASTICSEARCH_DOC_ARGS')
if self.args is not None:
self.args = json.loads(self.args)
self.timeout = os.getenv('ELASTICSEARCH_TIMEOUT', 3)
self.index_name = os.getenv('ELASTICSEARCH_INDEX', "ansible_logs") + "-"+ time.strftime('%Y.%m.%d') # ES index name one per day
#Elasticsearch variables
self.es_address = self.elasticsearch_host + ":" + str(self.elasticsearch_port)
self.es_status = self._connect()
#Log variables
self.run_output = []
self.taskname = ""
self.playname = ""
self.diff = ""
self.stats = ""
self.checkmsg = ""
self.logger = logging.getLogger('ansible logger')
self.logger.setLevel(logging.ERROR)
def _connect(self):
if self.db_import:
try:
self.es = self.elasticsearch.Elasticsearch(self.es_address, timeout=self.timeout)
except Exception as e:
logging.error("Failed to connect elasticsearch server '%s'. Exception = %s " % (self.es_address, e))
return False
try:
return self.es.ping()
except Exception as e:
logging.error("Failed to get ping from elasticsearch server '%s'. Exception = %s " % (self.es_address, e))
return False
def _getTime(self):
return datetime.utcnow().replace(tzinfo=pytz.utc)
def _insert(self):
#print("sending data to ELK")
if self.es_status:
try:
result = self.helpers.helpers.bulk(self.es, self.run_output,index=self.index_name)
if result:
return True
except Exception as e:
logging.error("Inserting data into elasticsearch 'failed' because %s" % e)
print("Inserting data into elasticsearch 'failed' because %s" % e)
return False
def _print_task_banner(self, task):
# args can be specified as no_log in several places: in the task or in
# the argument spec. We can check whether the task is no_log but the
# argument spec can't be because that is only run on the target
# machine and we haven't run it thereyet at this time.
#
# So we give people a config option to affect display of the args so
# that they can secure this if they feel that their stdout is insecure
# (shoulder surfing, logging stdout straight to a file, etc).
args = ''
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
args = u', '.join(u'%s=%s' % a for a in task.args.items())
args = u' %s' % args
prefix = self._task_type_cache.get(task._uuid, 'TASK')
# Use cached task name
task_name = self._last_task_name
if task_name is None:
task_name = task.get_name().strip()
checkmsg = ""
self._display.banner(u"%s [%s%s]%s" % (prefix, task_name, args, checkmsg))
if self._display.verbosity >= 2:
path = task.get_path()
if path:
self._display.display(u"task path: %s" % path, color=C.COLOR_DEBUG)
self._last_task_banner = task._uuid
def process_data(self, status, hostname,other=None, doc_type="ansible-runs"):
results = {}
results['hostname'] = hostname
results['play'] = self.playname
results['task'] = self.taskname
results['status'] = status
results['timestamp'] = self._getTime()
results['_type'] = doc_type
results['diff'] = self.diff
results['check_mode'] = self.checkmsg
if self.args is not None:
results.update(self.args)
self.run_output.append(results)
def v2_runner_on_ok(self, result):
status = None
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if result._task.action == 'include':
return
elif result._result.get('changed', False):
status = "Changed"
else:
status = "Ok"
if result._task.loop and 'results' in result._result:
self._process_items(result)
self.process_data(status, result._host.get_name())
def v2_runner_on_failed(self, result, ignore_errors=False):
results = {}
results['exception'] = result._host.get_name()
if result._task.ignore_errors:
results['ignore_errors'] = "yes"
if 'exception' in result._result:
error = result._result['exception'].strip().split('\n')[-1]
results['error'] = error
self.process_data("Failed", result._host.get_name(),results,"ansible-failures")
def v2_runner_on_unreachable(self, result):
self.process_data("Unreachable", result._host.get_name())
def v2_playbook_on_task_start(self, task, is_conditional):
self.taskname = task.get_name().strip()
def v2_playbook_on_play_start(self, play):
self.playname = play.get_name().strip()
if play.check_mode:
self.checkmsg = "CHECK_MODE"
def v2_runner_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
self.process_data("Skipped", result._host.get_name())
def v2_playbook_on_stats(self, stats):
self._insert()
def v2_on_file_diff(self, result):
self.diff = re.sub(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]", '', self._get_diff(result._result['diff']))
| [
"logging.getLogger",
"json.loads",
"os.getenv",
"datetime.datetime.utcnow",
"time.strftime",
"logging.error"
] | [((3427, 3473), 'os.getenv', 'os.getenv', (['"""ELASTICSEARCH_SERVER"""', '"""localhost"""'], {}), "('ELASTICSEARCH_SERVER', 'localhost')\n", (3436, 3473), False, 'import os\n'), ((3507, 3544), 'os.getenv', 'os.getenv', (['"""ELASTICSEARCH_PORT"""', '(9200)'], {}), "('ELASTICSEARCH_PORT', 9200)\n", (3516, 3544), False, 'import os\n'), ((3565, 3600), 'os.getenv', 'os.getenv', (['"""ELASTICSEARCH_DOC_ARGS"""'], {}), "('ELASTICSEARCH_DOC_ARGS')\n", (3574, 3600), False, 'import os\n'), ((3704, 3741), 'os.getenv', 'os.getenv', (['"""ELASTICSEARCH_TIMEOUT"""', '(3)'], {}), "('ELASTICSEARCH_TIMEOUT', 3)\n", (3713, 3741), False, 'import os\n'), ((4248, 4283), 'logging.getLogger', 'logging.getLogger', (['"""ansible logger"""'], {}), "('ansible logger')\n", (4265, 4283), False, 'import logging\n'), ((3659, 3680), 'json.loads', 'json.loads', (['self.args'], {}), '(self.args)\n', (3669, 3680), False, 'import json\n'), ((3824, 3849), 'time.strftime', 'time.strftime', (['"""%Y.%m.%d"""'], {}), "('%Y.%m.%d')\n", (3837, 3849), False, 'import time\n'), ((3254, 3348), 'logging.error', 'logging.error', (['"""Failed to import elasticsearch module. Maybe you can use pip to install!"""'], {}), "(\n 'Failed to import elasticsearch module. Maybe you can use pip to install!')\n", (3267, 3348), False, 'import logging\n'), ((3768, 3816), 'os.getenv', 'os.getenv', (['"""ELASTICSEARCH_INDEX"""', '"""ansible_logs"""'], {}), "('ELASTICSEARCH_INDEX', 'ansible_logs')\n", (3777, 3816), False, 'import os\n'), ((4959, 4976), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4974, 4976), False, 'from datetime import datetime\n'), ((4547, 4657), 'logging.error', 'logging.error', (['("Failed to connect elasticsearch server \'%s\'. Exception = %s " % (self.\n es_address, e))'], {}), '(\n "Failed to connect elasticsearch server \'%s\'. Exception = %s " % (self.\n es_address, e))\n', (4560, 4657), False, 'import logging\n'), ((4783, 4898), 'logging.error', 'logging.error', (['("Failed to get ping from elasticsearch server \'%s\'. Exception = %s " % (\n self.es_address, e))'], {}), '(\n "Failed to get ping from elasticsearch server \'%s\'. Exception = %s " %\n (self.es_address, e))\n', (4796, 4898), False, 'import logging\n'), ((5317, 5391), 'logging.error', 'logging.error', (['("Inserting data into elasticsearch \'failed\' because %s" % e)'], {}), '("Inserting data into elasticsearch \'failed\' because %s" % e)\n', (5330, 5391), False, 'import logging\n')] |
from flask import (
Blueprint, flash, g, redirect, render_template, request, url_for
)
from werkzeug.exceptions import abort
from flaskr.blueprints.auth import login_required
from flaskr.schema import DB
from flaskr.schema.post import Post
from flaskr.schema.user import User
# Como não há url_prefix definido, a view 'index' será adicionada em '/',
# 'create' em 'create', e assim sucessivamente.
bp = Blueprint('blog', __name__)
@bp.route('/')
def index():
"""Página inicial, onde há uma listagem de todas as postagens"""
posts = DB.session.query(Post).order_by(Post.created.desc()).all()
return render_template('blog/index.html', posts=posts)
# @login_reuired -> Chama função login_required que encapsula view e checa a
# autenticação do usuário
@bp.route('/create', methods=('GET', 'POST'))
@login_required
def create():
"""Criar novo post"""
if request.method == 'POST':
req = validate_post_form(request)
if g.error is None:
author = DB.session.query(User).get(g.user.id)
new_post = Post(title=req['title'], body=req['body'], author=author)
DB.session.add(new_post)
DB.session.commit()
return redirect(url_for('blog.index'))
return render_template('blog/create.html', post=None)
@bp.route('/<int:post_id>', methods=('GET',))
def read(post_id):
"""Visualizar um post"""
post = get_post(post_id, check_author=False)
return render_template('blog/read.html', post=post)
@bp.route('/<int:post_id>/update', methods=('GET', 'POST'))
@login_required
def update(post_id):
"""Atualizar um post"""
post = get_post(post_id)
if request.method == 'POST':
req = validate_post_form(request)
if g.error is None:
post.title = req['title']
post.body = req['body']
DB.session.commit()
return redirect(url_for('blog.index'))
return render_template('blog/update.html', post=post)
@bp.route('/<int:post_id>/delete', methods=('POST',))
@login_required
def delete(post_id):
"""Remover uma postagem"""
post = get_post(post_id)
DB.session.delete(post)
DB.session.commit()
return redirect(url_for('blog.index'))
# Uma vez que ambos 'update' e 'delete' precisarão procurar pela postagem
# Definir numa função separada para evitar repetição de código
# check_author: Checa se usuário é o autor do post.
# Verdadeiro para update/delete
# False para show
def get_post(post_id, check_author=True):
"""Busca pelo post pelo seu id."""
post = DB.session.query(Post).get(post_id)
# Caso não exista um post com este ID, Erro 404
if post is None:
abort(404, "Post id {0} doesn't exist.".format(post_id))
# Caso o usuário não é o mesmo que publicou originalmente o post, Erro 403
if check_author and post.author != g.user:
abort(403)
# abort -> levanta uma exceção especial que retorna um código de status HTTP.
# É necessário um argumento opcional para vir com o erro, senão vem uma mensagem
# padrão
# mais em: https://flask.palletsprojects.com/en/1.1.x/api/#flask.abort
return post
def validate_post_form(request):
title = request.form['title']
body = request.form['body']
g.error = None
if not title:
g.error = 'Title is required.'
elif not body:
g.error = 'Body is required.'
if g.error is not None:
flash(g.error)
return None
else:
return {'title': title, 'body': body}
| [
"flask.render_template",
"flask.flash",
"flaskr.schema.DB.session.query",
"flaskr.schema.post.Post.created.desc",
"flaskr.schema.post.Post",
"flask.url_for",
"flaskr.schema.DB.session.add",
"werkzeug.exceptions.abort",
"flaskr.schema.DB.session.delete",
"flask.Blueprint",
"flaskr.schema.DB.sessi... | [((409, 436), 'flask.Blueprint', 'Blueprint', (['"""blog"""', '__name__'], {}), "('blog', __name__)\n", (418, 436), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((617, 664), 'flask.render_template', 'render_template', (['"""blog/index.html"""'], {'posts': 'posts'}), "('blog/index.html', posts=posts)\n", (632, 664), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((1247, 1293), 'flask.render_template', 'render_template', (['"""blog/create.html"""'], {'post': 'None'}), "('blog/create.html', post=None)\n", (1262, 1293), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((1449, 1493), 'flask.render_template', 'render_template', (['"""blog/read.html"""'], {'post': 'post'}), "('blog/read.html', post=post)\n", (1464, 1493), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((1923, 1969), 'flask.render_template', 'render_template', (['"""blog/update.html"""'], {'post': 'post'}), "('blog/update.html', post=post)\n", (1938, 1969), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((2126, 2149), 'flaskr.schema.DB.session.delete', 'DB.session.delete', (['post'], {}), '(post)\n', (2143, 2149), False, 'from flaskr.schema import DB\n'), ((2154, 2173), 'flaskr.schema.DB.session.commit', 'DB.session.commit', ([], {}), '()\n', (2171, 2173), False, 'from flaskr.schema import DB\n'), ((2194, 2215), 'flask.url_for', 'url_for', (['"""blog.index"""'], {}), "('blog.index')\n", (2201, 2215), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((2863, 2873), 'werkzeug.exceptions.abort', 'abort', (['(403)'], {}), '(403)\n', (2868, 2873), False, 'from werkzeug.exceptions import abort\n'), ((3417, 3431), 'flask.flash', 'flash', (['g.error'], {}), '(g.error)\n', (3422, 3431), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((1057, 1114), 'flaskr.schema.post.Post', 'Post', ([], {'title': "req['title']", 'body': "req['body']", 'author': 'author'}), "(title=req['title'], body=req['body'], author=author)\n", (1061, 1114), False, 'from flaskr.schema.post import Post\n'), ((1127, 1151), 'flaskr.schema.DB.session.add', 'DB.session.add', (['new_post'], {}), '(new_post)\n', (1141, 1151), False, 'from flaskr.schema import DB\n'), ((1164, 1183), 'flaskr.schema.DB.session.commit', 'DB.session.commit', ([], {}), '()\n', (1181, 1183), False, 'from flaskr.schema import DB\n'), ((1840, 1859), 'flaskr.schema.DB.session.commit', 'DB.session.commit', ([], {}), '()\n', (1857, 1859), False, 'from flaskr.schema import DB\n'), ((2553, 2575), 'flaskr.schema.DB.session.query', 'DB.session.query', (['Post'], {}), '(Post)\n', (2569, 2575), False, 'from flaskr.schema import DB\n'), ((579, 598), 'flaskr.schema.post.Post.created.desc', 'Post.created.desc', ([], {}), '()\n', (596, 598), False, 'from flaskr.schema.post import Post\n'), ((1212, 1233), 'flask.url_for', 'url_for', (['"""blog.index"""'], {}), "('blog.index')\n", (1219, 1233), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((1888, 1909), 'flask.url_for', 'url_for', (['"""blog.index"""'], {}), "('blog.index')\n", (1895, 1909), False, 'from flask import Blueprint, flash, g, redirect, render_template, request, url_for\n'), ((547, 569), 'flaskr.schema.DB.session.query', 'DB.session.query', (['Post'], {}), '(Post)\n', (563, 569), False, 'from flaskr.schema import DB\n'), ((996, 1018), 'flaskr.schema.DB.session.query', 'DB.session.query', (['User'], {}), '(User)\n', (1012, 1018), False, 'from flaskr.schema import DB\n')] |
#!/opt/homebrew/bin/python3
import sys, argparse, pickle, os
sys.dont_write_bytecode = True
from http import client
from box import do_Box_OAuth
from boxsdk import Client
from boxsdk.exception import BoxAPIException
folder_ids = []
RANSOMWARE_KEY= '.deadbolt'
def get_files():
"""get all the files in the specified folders"""
global folder_ids
for folder_id in folder_ids:
items = client.folder(folder_id=folder_id).get_items()
for item in items:
if item.type == 'folder':
folder_ids.append(item.id)
else:
# work only on the ransomware infected files
if (item.name.endswith(RANSOMWARE_KEY)):
file_versions = client.file(item.id).get_previous_versions()
version_count = 0
for i in file_versions:
# incrementing counter
version_count = version_count + 1
print(f'{item.type} {item.id} is named "{item.name} with versions {version_count}"')
def cleanup():
# we are done remove the intermediate files
return
def main(argv):
global oauth, client, folder_ids, RANSOMWARE_KEY
parser = argparse.ArgumentParser(description='Get details on files in folders hit with ransomware.')
parser.add_argument('-t', '--test', action='store_true', help="tests the oauth connection to Box servers")
parser.add_argument("-d", "--folder_id", action='extend', nargs='+', help="folder ID(s) to work on")
parser.add_argument("-r", "--ransomware_ext", action='store', help="ransomware file extension, default is deadbolt")
args = parser.parse_args()
if (args.test):
oauth = do_Box_OAuth()
client = Client(oauth)
user = client.user().get()
print(f'User ID is {user.id}')
sys.exit()
if (args.folder_id):
folder_ids = args.folder_id
if (args.ransomware_ext):
RANSOMWARE_KEY = "." + args.ransomware_ext
oauth = do_Box_OAuth()
client = Client(oauth)
get_files()
print(f'{folder_ids}')
cleanup()
if __name__ == "__main__":
main(sys.argv[1:])
| [
"http.client.folder",
"argparse.ArgumentParser",
"boxsdk.Client",
"http.client.user",
"box.do_Box_OAuth",
"http.client.file",
"sys.exit"
] | [((1225, 1321), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Get details on files in folders hit with ransomware."""'}), "(description=\n 'Get details on files in folders hit with ransomware.')\n", (1248, 1321), False, 'import sys, argparse, pickle, os\n'), ((2014, 2028), 'box.do_Box_OAuth', 'do_Box_OAuth', ([], {}), '()\n', (2026, 2028), False, 'from box import do_Box_OAuth\n'), ((2042, 2055), 'boxsdk.Client', 'Client', (['oauth'], {}), '(oauth)\n', (2048, 2055), False, 'from boxsdk import Client\n'), ((1721, 1735), 'box.do_Box_OAuth', 'do_Box_OAuth', ([], {}), '()\n', (1733, 1735), False, 'from box import do_Box_OAuth\n'), ((1753, 1766), 'boxsdk.Client', 'Client', (['oauth'], {}), '(oauth)\n', (1759, 1766), False, 'from boxsdk import Client\n'), ((1849, 1859), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1857, 1859), False, 'import sys, argparse, pickle, os\n'), ((403, 437), 'http.client.folder', 'client.folder', ([], {'folder_id': 'folder_id'}), '(folder_id=folder_id)\n', (416, 437), False, 'from http import client\n'), ((1782, 1795), 'http.client.user', 'client.user', ([], {}), '()\n', (1793, 1795), False, 'from http import client\n'), ((730, 750), 'http.client.file', 'client.file', (['item.id'], {}), '(item.id)\n', (741, 750), False, 'from http import client\n')] |
from subscenery.scrapper import SubSceneScrapper
from tkinter import Tk, filedialog, Label, Listbox, ACTIVE
from tkinter import ttk
import os
win = Tk()
class Application(Tk):
def __init__(self):
ttk.Button(win, text='Choose a media file', command=self.choose_location_btn).grid(column=0, row=0)
self.label = Label(win, text='')
self.listbox = Listbox(win)
self.listbox.grid(column=0, row=2)
self.add_subtitle_btn = ttk.Button(win, text='Add Subtitle', command=self.add_subtitle)
self.add_subtitle_btn.grid(column=0, row=3)
def choose_location_btn(self):
self.path, name = os.path.split(filedialog.askopenfile(title="Choose a media file").name)
self.scrapper = SubSceneScrapper(name, is_filename=True)
self.subtitles = self.scrapper.get_subtitles()
for language in self.subtitles.keys():
self.listbox.insert(1, language)
def add_subtitle(self):
language = self.listbox.get(ACTIVE)
best_match = self.scrapper.get_best_match_subtitle(language)
self.scrapper.download_subtitle_to_path(best_match, self.path + os.sep)
app = Application()
win.title('SubDown')
win.resizable(0, 0)
win.lift()
win.attributes('-topmost', True)
win.after_idle(win.attributes, '-topmost', False)
win.mainloop()
| [
"tkinter.ttk.Button",
"tkinter.Tk",
"tkinter.Label",
"tkinter.filedialog.askopenfile",
"tkinter.Listbox",
"subscenery.scrapper.SubSceneScrapper"
] | [((149, 153), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (151, 153), False, 'from tkinter import Tk, filedialog, Label, Listbox, ACTIVE\n'), ((332, 351), 'tkinter.Label', 'Label', (['win'], {'text': '""""""'}), "(win, text='')\n", (337, 351), False, 'from tkinter import Tk, filedialog, Label, Listbox, ACTIVE\n'), ((375, 387), 'tkinter.Listbox', 'Listbox', (['win'], {}), '(win)\n', (382, 387), False, 'from tkinter import Tk, filedialog, Label, Listbox, ACTIVE\n'), ((463, 526), 'tkinter.ttk.Button', 'ttk.Button', (['win'], {'text': '"""Add Subtitle"""', 'command': 'self.add_subtitle'}), "(win, text='Add Subtitle', command=self.add_subtitle)\n", (473, 526), False, 'from tkinter import ttk\n'), ((737, 777), 'subscenery.scrapper.SubSceneScrapper', 'SubSceneScrapper', (['name'], {'is_filename': '(True)'}), '(name, is_filename=True)\n', (753, 777), False, 'from subscenery.scrapper import SubSceneScrapper\n'), ((211, 288), 'tkinter.ttk.Button', 'ttk.Button', (['win'], {'text': '"""Choose a media file"""', 'command': 'self.choose_location_btn'}), "(win, text='Choose a media file', command=self.choose_location_btn)\n", (221, 288), False, 'from tkinter import ttk\n'), ((655, 706), 'tkinter.filedialog.askopenfile', 'filedialog.askopenfile', ([], {'title': '"""Choose a media file"""'}), "(title='Choose a media file')\n", (677, 706), False, 'from tkinter import Tk, filedialog, Label, Listbox, ACTIVE\n')] |
"""
Fixed policies to test our sim integration with. These are intended to take
Brain states and return Brain actions.
"""
import random
from typing import Dict
def random_policy(state: Dict = None):
"""
Ignore the state, move randomly.
"""
action = {"hvacON": random.randint(0, 1)}
return action
def coast(state):
"""
Ignore the state, go straight.
"""
action = {"hvacON": 1}
return action
def P_controller(state):
"""
Use only the temperature desired - actual to generate hvac ON or OFF requests
"""
Kp = 0.2
output = Kp * (state["Tset"] - state["Tin"])
if output < 0:
control = 1
else:
control = 0
action = {"hvacON": control}
return action
POLICIES = {"random": random_policy, "coast": coast, "P_controller": P_controller}
| [
"random.randint"
] | [((280, 300), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (294, 300), False, 'import random\n')] |
from __future__ import print_function
import sys, codecs, optparse, os
import heapq as heapq
import numpy as np
from heap import *
# pdb.set_trace()
optparser = optparse.OptionParser()
optparser.add_option("-c", "--unigramcounts", dest='counts1w', default=os.path.join('data', 'count_1w.txt'), help="unigram counts")
optparser.add_option("-b", "--bigramcounts", dest='counts2w', default=os.path.join('data', 'count_2w.txt'), help="bigram counts")
optparser.add_option("-i", "--inputfile", dest="input", default=os.path.join('data', 'input'), help="input file to segment")
(opts, _) = optparser.parse_args()
Pw1 = Pdist(opts.counts1w)
Pw2 = Pdist(opts.counts2w)
def bigram(input_filename='data/input', sort_acc_to='log_prob'):
out_file = open('outfile', 'wb')
with open(input_filename) as f:
# iterate over all the lines in the input file
for line in f:
# initialize the dynamic programming table chart and heap
chart = dict()
heap = Heap()
utf8line = unicode(line.strip(), 'utf-8')
"""
Step 1:
Initializing step
Finding each word that matches input at position 0
"""
num_observ = 0
for key in Pw2:
# for all keys in the probability distribution
# check if the sentence starts with this word
if utf8line.startswith(key):
entry = chartEntry("".join(key).encode('utf-8'),
start_pos=0,
end_pos=len(key)-1,
log_prob=np.log2(Pw2("".join(key))),
back_ptr=None,
sort_acc_to=sort_acc_to)
heap.push(entry)
num_observ += 1
"""
Check whether the pattern exists in our learnt data or not.
If it doesn't exist we move forward one character and push the unseen character to the heap
with a smoothed probability 1/N (where N = number of elements in the distribution)
"""
if num_observ == 0:
heap.push(chartEntry("".join(utf8line[0]).encode('utf-8'),
start_pos=0,
end_pos=0,
log_prob=np.log2(Pw1("".join(utf8line[0]))),
back_ptr=None,
sort_acc_to=sort_acc_to))
"""
Start filling the `chart` table iteratively.
"""
while heap:
head = heap.pop() # pop the item with highest log-probability
utf8word = head.get_item('word').decode('utf-8')
startindex = head.get_item('start_pos')
endindex = head.get_item('start_pos') + len(utf8word)-1
if endindex in chart:
# get the previous entry
preventry = chart[endindex]
if head.get_item('log_prob') > preventry.get_item('log_prob'):
chart[endindex] = head
else:
continue
else: # there was no previous entry
chart[endindex] = head
num_observ = 0
# move to the next element in the line
sub_utf8line = utf8line[startindex:]
# print(sub_utf8line)
for key in Pw2:
(u,w) = key.split(' ')
# print("WORD: " + head.get_item('word'))
search_word = key.replace(" ","")
if sub_utf8line.startswith(search_word):
num_observ += 1
# Computing new probability
# Computing p(u)p(w|u)
newp = np.log2(Pw1("".join(u)) * ( (Pw2("".join(key))/Pw1("".join(u))) ))
newentry = chartEntry("".join(w).encode('utf-8'),
start_pos=endindex + 1,
end_pos=startindex + len(key) -1,
log_prob=newp,
back_ptr=head,
sort_acc_to=sort_acc_to)
heap.push(newentry)
"""
Check wether the pattern exist in our learn data or no
If it doesn't exist we move for one character and push that character to the heap
"""
if num_observ == 0 and len(sub_utf8line) > 1:
newentry = chartEntry("".join(utf8line[endindex+1]).encode('utf-8'),
start_pos=endindex+1,
end_pos=endindex+1,
# log_prob=np.log2(smoothed_prob) + head.get_item('log_prob'),
log_prob=np.log2(Pw1("".join(w))) + head.get_item('log_prob'),
back_ptr=head,
sort_acc_to=sort_acc_to)
heap.push(newentry)
finalindex = len(utf8line)-1
if finalindex in chart:
finalentry = chart[finalindex]
"""
Step 3:
Backtracking and printing the output
"""
ptr = finalentry.get_item('back_ptr')
result = finalentry.get_item('word')
while ptr:
# out_file.write(ptr.get_item('word') + ' ')
result = ptr.get_item('word') + ' ' + result
ptr = ptr.get_item('back_ptr')
# out_file.write('\n'.encode('utf-8'))
out_file.write(result+'\n')
print(result)
else:
print(chart)
print('Not Found!')
"""
Running the algorithem
1) First fill the PW using count_1w file
2) Run the baseline algorithem
"""
bigram(input_filename=opts.input)
| [
"os.path.join",
"optparse.OptionParser"
] | [((162, 185), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (183, 185), False, 'import sys, codecs, optparse, os\n'), ((257, 293), 'os.path.join', 'os.path.join', (['"""data"""', '"""count_1w.txt"""'], {}), "('data', 'count_1w.txt')\n", (269, 293), False, 'import sys, codecs, optparse, os\n'), ((388, 424), 'os.path.join', 'os.path.join', (['"""data"""', '"""count_2w.txt"""'], {}), "('data', 'count_2w.txt')\n", (400, 424), False, 'import sys, codecs, optparse, os\n'), ((512, 541), 'os.path.join', 'os.path.join', (['"""data"""', '"""input"""'], {}), "('data', 'input')\n", (524, 541), False, 'import sys, codecs, optparse, os\n')] |
from google.cloud import automl
client = automl.AutoMlClient()
def get_operation_details(operation_id: str):
response = client._transport.operations_client.get_operation(operation_id)
if response.done:
if response.error.code != 0:
operation_status = "Failed"
error_message = response.error.message
else:
operation_status = "Success"
error_message = ""
else:
operation_status = "In-Progress"
error_message = ""
return {
"operation_id": operation_id,
"operation_completed": response.done,
"status_metadata": operation_status,
"error_message": error_message,
}
| [
"google.cloud.automl.AutoMlClient"
] | [((42, 63), 'google.cloud.automl.AutoMlClient', 'automl.AutoMlClient', ([], {}), '()\n', (61, 63), False, 'from google.cloud import automl\n')] |
import pygame
import time
import sys
from pygame.locals import *
pygame.init()
screenWidth,screenHeight = 1600, 900
screen = pygame.display.set_mode((screenWidth, screenHeight))
pygame.display.set_caption("Turing Machine")
tape = [""]
if len(sys.argv) > 2:
tape = list(sys.argv[2])
for i in range(len(tape)):
if tape[i] == " ":
tape[i] = ""
boxSize = 100
transitionTime = 0.1
program = sys.argv[1]
font = pygame.font.SysFont("Courier", 30)
offset = 0
currentIndex = 0
if len(sys.argv) > 3:
currentIndex = int(sys.argv[3])
startedTransitioningTime = 0
transitioning = 0
tapeLabels = list(font.render(x, 1, (0, 0, 0) ) for x in tape)
tapeLabelDict = {"": font.render("", 1, (0, 0, 0))}
for i, cell in enumerate(tape):
if cell not in tapeLabelDict:
tapeLabelDict[cell] = tapeLabels[i]
state = "start"
rules = {}
with open(program, encoding="utf-8") as transitionTable:
data = [x.strip() for x in transitionTable.read().strip().split("\n")]
for index, i in enumerate(data):
try:
parts = i.split(" = ")
oldState, read = parts[0][2:-1].split(",")
write, arrow, newState = parts[1][1:-1].split(",")
direction = "<" if arrow == "←" else ">"
if oldState not in rules:
rules[oldState] = {}
rules[oldState][read] = [write, direction, newState]
except:
print("Mangled rule: '%s' on line %i" % (i, index+1))
pygame.quit()
sys.exit()
currentRule = "Program terminated"
if state in rules:
if tape[currentIndex] in rules[state]:
write, direction, newState = rules[state][tape[currentIndex]]
arrow = "←" if direction == "<" else "→"
currentRule = "Δ(%s,%s) = (%s,%s,%s)" % (state, tape[currentIndex], write, arrow, newState)
manual = True
moveCount = 0
manualToggleLabel = font.render("Press SPACE to toggle manual mode", 1, (255, 0, 0))
manualLabel = font.render("MANUAL MODE - Press RETURN to advance", 1, (255, 0, 0))
def advance():
global state
global moveCount
global startedTransitioningTime
global transitioning
global tapeLabels
global tapeLabelDict
global tape
global font
global currentRule
if state in rules:
if tape[currentIndex] in rules[state]:
moveCount += 1
write, direction, newState = rules[state][tape[currentIndex]]
transitioning = -1 if direction == "<" else 1
arrow = "←" if direction == "<" else "→"
state = newState
tape[currentIndex] = write
if write not in tapeLabelDict:
tapeLabelDict[write] = font.render(write, 1, (0, 0, 0))
tapeLabels[currentIndex] = tapeLabelDict[write]
startedTransitioningTime = time.time()
ruleLabelDict = {}
stateLabelDict = {}
speedLabelDict = {}
lastTime = time.time()
programStartTime = time.time()
view = 0
viewLabel = font.render("Press T to switch views", 1, (255, 0, 0))
rowCount = 10
miniBoxSize = 50
rowSpacing = 10
while True:
boxCount = screenWidth//boxSize
miniBoxCount = screenWidth//miniBoxSize
boxCount += 4 if boxCount % 2 else 3
deltaTime = time.time()-lastTime
lastTime = time.time()
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
sys.exit()
elif event.key == K_SPACE:
manual = not manual
if not manual:
advance()
elif event.key == K_t:
view = 1-view
elif event.key == K_LEFT:
transitionTime += 0.05
elif event.key == K_RIGHT:
transitionTime -= 0.05
if transitionTime <= 0:
transitionTime = 0.000000001
if transitioning == 0:
if event.key == K_RETURN and manual:
advance()
screen.fill((255, 255, 255))
screen.blit(manualToggleLabel, (screenWidth-manualToggleLabel.get_width()-10, 10))
if manual:
screen.blit(manualLabel, (screenWidth-manualLabel.get_width()-10, 20+manualToggleLabel.get_height()))
if transitionTime not in speedLabelDict:
speedLabelDict[transitionTime] = font.render("Speed: "+str(round(1/transitionTime, 3)), 1, (255, 0, 0))
speedLabel = speedLabelDict[transitionTime]
screen.blit(speedLabel, (screenWidth-speedLabel.get_width()-10,
30+manualToggleLabel.get_height()+manualLabel.get_height()))
screen.blit(viewLabel, (screenWidth-viewLabel.get_width()-10,
40+manualToggleLabel.get_height()+manualLabel.get_height()+speedLabel.get_height()))
if state not in stateLabelDict:
stateLabelDict[state] = font.render("State: %s" % state, 1, (255, 0, 0))
stateLabel = stateLabelDict[state]
screen.blit(stateLabel, (10, 10))
if currentRule not in ruleLabelDict:
ruleLabelDict[currentRule] = font.render(currentRule, 1, (255, 0, 0))
ruleLabel = ruleLabelDict[currentRule]
screen.blit(ruleLabel, (10, screenHeight-ruleLabel.get_height()-10))
if view == 0:
y = (screenHeight-boxSize)//2
for i in range(-boxCount//2, boxCount//2+1):
x = (screenWidth-boxSize)//2 + i*boxSize + offset
pygame.draw.rect(screen, (0, 0, 0), (x, y, boxSize, boxSize), 1)
tapeIndex = i + currentIndex
if 0 <= tapeIndex < len(tape):
label = tapeLabels[tapeIndex]
screen.blit(label, (x+(boxSize-label.get_width())//2, y+(boxSize-label.get_height())//2))
pygame.draw.rect(screen, (0, 0, 255), ((screenWidth-boxSize)//2, y, boxSize, boxSize), 3)
else:
startY = 70+manualToggleLabel.get_height()+manualLabel.get_height()+speedLabel.get_height()+viewLabel.get_height()
startX = (screenWidth-miniBoxCount*miniBoxSize)/2
i = 0
miniOffset = offset*(miniBoxSize/boxSize)
for row in range(rowCount):
for column in range(miniBoxCount):
y = startY + row * (miniBoxSize + rowSpacing)
x = startX + column * miniBoxSize
if currentIndex == 0 and transitioning == -1:
pygame.draw.rect(screen, (0, 0, 0), (x+miniOffset, y, miniBoxSize, miniBoxSize), 1)
if column == miniBoxCount-1:
pygame.draw.rect(screen, (0, 0, 0), (startX-miniBoxSize+miniOffset,
y,
miniBoxSize,
miniBoxSize), 1)
else:
pygame.draw.rect(screen, (0, 0, 0), (x, y, miniBoxSize, miniBoxSize), 1)
if i < len(tape):
label = tapeLabels[i]
if currentIndex == 0 and transitioning == -1:
screen.blit(label, (x+(miniBoxSize-label.get_width())/2+miniOffset,
y+(miniBoxSize-label.get_height())/2))
if column == miniBoxCount-1 and row < rowCount-1:
screen.blit(label, (startX-miniBoxSize+(miniBoxSize-label.get_width())/2+miniOffset,
y+miniBoxSize+rowSpacing+(miniBoxSize-label.get_height())/2))
else:
screen.blit(label, (x+(miniBoxSize-label.get_width())/2,
y+(miniBoxSize-label.get_height())/2))
i += 1
if currentIndex == 0 and transitioning == -1:
pygame.draw.rect(screen, (0, 0, 255), (startX, startY, miniBoxSize, miniBoxSize), 3)
elif currentIndex < miniBoxCount * rowCount:
pygame.draw.rect(screen, (0, 0, 255), (startX+(currentIndex%miniBoxCount)*miniBoxSize-miniOffset,
startY+(currentIndex//miniBoxCount)*(miniBoxSize+rowSpacing),
miniBoxSize,
miniBoxSize), 3)
if currentIndex%miniBoxCount == miniBoxCount-1 and currentIndex//miniBoxCount < rowCount-1:
pygame.draw.rect(screen, (0, 0, 255), (startX-miniBoxSize-miniOffset,
startY+(currentIndex//miniBoxCount+1)*(miniBoxSize+rowSpacing),
miniBoxSize,
miniBoxSize), 3)
if time.time()-programStartTime > transitionTime and moveCount == 0 and not manual:
advance()
transitionedFor = time.time()-startedTransitioningTime
if transitionedFor >= transitionTime and transitioning != 0:
offset = 0
if currentIndex == 0 and transitioning == -1:
tape = [""] + tape
tapeLabels = [tapeLabelDict[""]] + tapeLabels
else:
currentIndex += transitioning
if currentIndex == len(tape) and transitioning == 1:
tape.append("")
tapeLabels.append(tapeLabelDict[""])
transitioning = 0
currentRule = "Program terminated"
if state in rules:
if tape[currentIndex] in rules[state]:
write, direction, newState = rules[state][tape[currentIndex]]
arrow = "←" if direction == "<" else "→"
currentRule = "Δ(%s,%s) = (%s,%s,%s)" % (state, tape[currentIndex], write, arrow, newState)
if not manual:
advance()
else:
offset = - transitioning * boxSize * transitionedFor //transitionTime
pygame.display.update()
| [
"sys.exit",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.draw.rect",
"pygame.display.set_caption",
"pygame.display.update",
"time.time",
"pygame.font.SysFont"
] | [((67, 80), 'pygame.init', 'pygame.init', ([], {}), '()\n', (78, 80), False, 'import pygame\n'), ((129, 181), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(screenWidth, screenHeight)'], {}), '((screenWidth, screenHeight))\n', (152, 181), False, 'import pygame\n'), ((182, 226), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Turing Machine"""'], {}), "('Turing Machine')\n", (208, 226), False, 'import pygame\n'), ((441, 475), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Courier"""', '(30)'], {}), "('Courier', 30)\n", (460, 475), False, 'import pygame\n'), ((2855, 2866), 'time.time', 'time.time', ([], {}), '()\n', (2864, 2866), False, 'import time\n'), ((2886, 2897), 'time.time', 'time.time', ([], {}), '()\n', (2895, 2897), False, 'import time\n'), ((3209, 3220), 'time.time', 'time.time', ([], {}), '()\n', (3218, 3220), False, 'import time\n'), ((3238, 3256), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (3254, 3256), False, 'import pygame\n'), ((9915, 9938), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (9936, 9938), False, 'import pygame\n'), ((3173, 3184), 'time.time', 'time.time', ([], {}), '()\n', (3182, 3184), False, 'import time\n'), ((8912, 8923), 'time.time', 'time.time', ([], {}), '()\n', (8921, 8923), False, 'import time\n'), ((1439, 1452), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (1450, 1452), False, 'import pygame\n'), ((1461, 1471), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1469, 1471), False, 'import sys\n'), ((2770, 2781), 'time.time', 'time.time', ([], {}), '()\n', (2779, 2781), False, 'import time\n'), ((3301, 3314), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (3312, 3314), False, 'import pygame\n'), ((3327, 3337), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3335, 3337), False, 'import sys\n'), ((5440, 5504), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 0)', '(x, y, boxSize, boxSize)', '(1)'], {}), '(screen, (0, 0, 0), (x, y, boxSize, boxSize), 1)\n', (5456, 5504), False, 'import pygame\n'), ((5753, 5850), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 255)', '((screenWidth - boxSize) // 2, y, boxSize, boxSize)', '(3)'], {}), '(screen, (0, 0, 255), ((screenWidth - boxSize) // 2, y,\n boxSize, boxSize), 3)\n', (5769, 5850), False, 'import pygame\n'), ((7824, 7912), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 255)', '(startX, startY, miniBoxSize, miniBoxSize)', '(3)'], {}), '(screen, (0, 0, 255), (startX, startY, miniBoxSize,\n miniBoxSize), 3)\n', (7840, 7912), False, 'import pygame\n'), ((3426, 3439), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (3437, 3439), False, 'import pygame\n'), ((3456, 3466), 'sys.exit', 'sys.exit', ([], {}), '()\n', (3464, 3466), False, 'import sys\n'), ((7974, 8184), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 255)', '(startX + currentIndex % miniBoxCount * miniBoxSize - miniOffset, startY + \n currentIndex // miniBoxCount * (miniBoxSize + rowSpacing), miniBoxSize,\n miniBoxSize)', '(3)'], {}), '(screen, (0, 0, 255), (startX + currentIndex % miniBoxCount *\n miniBoxSize - miniOffset, startY + currentIndex // miniBoxCount * (\n miniBoxSize + rowSpacing), miniBoxSize, miniBoxSize), 3)\n', (7990, 8184), False, 'import pygame\n'), ((8790, 8801), 'time.time', 'time.time', ([], {}), '()\n', (8799, 8801), False, 'import time\n'), ((6375, 6464), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 0)', '(x + miniOffset, y, miniBoxSize, miniBoxSize)', '(1)'], {}), '(screen, (0, 0, 0), (x + miniOffset, y, miniBoxSize,\n miniBoxSize), 1)\n', (6391, 6464), False, 'import pygame\n'), ((6858, 6930), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 0)', '(x, y, miniBoxSize, miniBoxSize)', '(1)'], {}), '(screen, (0, 0, 0), (x, y, miniBoxSize, miniBoxSize), 1)\n', (6874, 6930), False, 'import pygame\n'), ((8437, 8624), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 255)', '(startX - miniBoxSize - miniOffset, startY + (currentIndex // miniBoxCount +\n 1) * (miniBoxSize + rowSpacing), miniBoxSize, miniBoxSize)', '(3)'], {}), '(screen, (0, 0, 255), (startX - miniBoxSize - miniOffset, \n startY + (currentIndex // miniBoxCount + 1) * (miniBoxSize + rowSpacing\n ), miniBoxSize, miniBoxSize), 3)\n', (8453, 8624), False, 'import pygame\n'), ((6532, 6640), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', '(0, 0, 0)', '(startX - miniBoxSize + miniOffset, y, miniBoxSize, miniBoxSize)', '(1)'], {}), '(screen, (0, 0, 0), (startX - miniBoxSize + miniOffset, y,\n miniBoxSize, miniBoxSize), 1)\n', (6548, 6640), False, 'import pygame\n')] |
from functools import wraps
from contextlib import contextmanager
from threading import local
thread_local = local()
from surround.django.logging import setupModuleLogger
setupModuleLogger(globals())
class LocalCacheBackend(object):
def __init__(self):
self.backend = {}
def get(self, key):
return self.backend.get(key, None)
def set(self, key, value):
self.backend[key] = value
def cached(func):
name = func.__module__ + '.' + func.__name__
def _get_key(args, kwargs):
return name + ':a:' + ','.join(map(str, args)) + ':kw:' + ','.join(['%s=%s' % (k, v) for k, v in kwargs.items()])
@wraps(func)
def wrapped(*args, **kwargs):
current = get_active()
if current is None:
return func(*args, **kwargs)
key = _get_key(args, kwargs)
cached_value = current.get(key)
if cached_value is not None:
return cached_value
result = func(*args, **kwargs)
current.set(key, result)
return result
def _force(value, args=[], kwargs={}):
key = _get_key(args, kwargs)
current = get_active()
if current is None:
raise Exception('forcing context cache value outside context')
current.set(key, value)
wrapped._force = _force
wrapped._get_key = _get_key
return wrapped
@contextmanager
def make_active(current):
old = getattr(thread_local, 'backend', None)
thread_local.backend = current
try:
yield current
finally:
if old is not None:
thread_local.backend = old
else:
del thread_local.backend
def wrap_with_current(func):
current = get_active()
@wraps(func)
def wrapped(*args, **kwargs):
with make_active(current):
return func(*args, **kwargs)
return wrapped
def wrap_with_activate(func):
@wraps(func)
def wrapped(*args, **kwargs):
with activate():
return func(*args, **kwargs)
return wrapped
def wrap_with_assure_active(func):
@wraps(func)
def wrapped(*args, **kwargs):
with assure_active():
return func(*args, **kwargs)
return wrapped
def activate():
return make_active(LocalCacheBackend())
@contextmanager
def assure_active():
current = get_active()
if current is not None:
yield
else:
with activate():
yield
def deactivate():
return make_active(None)
def get_active():
return getattr(thread_local, 'backend', None)
| [
"threading.local",
"functools.wraps"
] | [((110, 117), 'threading.local', 'local', ([], {}), '()\n', (115, 117), False, 'from threading import local\n'), ((650, 661), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (655, 661), False, 'from functools import wraps\n'), ((1723, 1734), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (1728, 1734), False, 'from functools import wraps\n'), ((1903, 1914), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (1908, 1914), False, 'from functools import wraps\n'), ((2078, 2089), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (2083, 2089), False, 'from functools import wraps\n')] |
# encoding: utf-8
from django.forms.models import model_to_dict
from django.db.models import Model
from django.core.files import File
from django.db.models.fields.files import FieldFile
from rest_framework import serializers, relations, fields as rest_fields
from datetime import datetime
from enum import Enum
from corekit import utils
from decimal import Decimal
import json
import yaml
from collections import OrderedDict
class BaseModelSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
"""
Object instance -> Dict of primitive datatypes.
"""
ret = OrderedDict()
fields = [field for field in self.fields.values()
if not field.write_only]
for field in fields:
try:
attribute = field.get_attribute(instance)
# except SkipField:
except:
continue
if attribute is not None:
represenation = field.to_representation(attribute)
if represenation is None:
# Do not seralize empty objects
continue
if isinstance(represenation, list) and not represenation:
# Do not serialize empty lists
continue
ret[field.field_name] = represenation
return ret
def dump(self):
return BaseModelSerializer.to_json(self.data)
class ExportModelSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
self.verbose_field = kwargs.pop('verbose_field', True)
super(ExportModelSerializer, self).__init__(*args, **kwargs)
def to_representation(self, instance):
'''(override)'''
ret = OrderedDict()
fields = self._readable_fields
for field in fields:
# translated field names
if self.verbose_field:
name = u"{}".format(self.Meta.model._meta.get_field(
field.field_name).verbose_name)
else:
name = field.field_name
try:
attribute = field.get_attribute(instance)
except rest_fields.SkipField:
continue
check_for_none = \
attribute.pk if isinstance(attribute, relations.PKOnlyObject) \
else attribute # NOQA
if check_for_none is None:
ret[name] = ''
else:
val = field.to_representation(attribute)
ret[name] = '' if val is None else val
return ret
class BaseObjectSerializer(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Model):
return model_to_dict(obj)
if isinstance(obj, FieldFile):
return {'url': obj.url, 'name': obj.name}
if isinstance(obj, Enum):
return obj.value
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, Decimal):
return str(obj)
if isinstance(obj, object):
ex = obj._excludes if hasattr(obj, '_excludes') else {}
vals = obj._customes.copy() if hasattr(obj, '_customs') else {}
vals.update(getattr(obj, '__dict__', {}))
return dict([(k, v) for k, v in vals.items()
if k not in ex and not k.startswith('_') and v])
return super(BaseObjectSerializer, self).default(obj)
@classmethod
def to_json(cls, obj, *args, **kwargs):
return json.dumps(obj, cls=cls, *args, **kwargs)
@classmethod
def to_json_file(cls, obj, name=None, *args, **kwargs):
name = name or u"{}.json".format(cls.__name__)
return File(
utils.contents(cls.to_json(obj, *args, **kwargs)), name=name)
@classmethod
def load_json(cls, jsonstr, *args, **kwargs):
return json.loads(jsonstr, *args, **kwargs)
@classmethod
def to_yaml(cls, obj, *args, **kwargs):
return yaml.safe_dump(obj, *args, **kwargs)
@classmethod
def to_dict(cls, obj, *args, **kwargs):
return json.loads(cls.to_json(obj, *args, **kwargs))
| [
"collections.OrderedDict",
"json.loads",
"yaml.safe_dump",
"json.dumps",
"django.forms.models.model_to_dict"
] | [((623, 636), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (634, 636), False, 'from collections import OrderedDict\n'), ((1769, 1782), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1780, 1782), False, 'from collections import OrderedDict\n'), ((3570, 3611), 'json.dumps', 'json.dumps', (['obj', '*args'], {'cls': 'cls'}), '(obj, *args, cls=cls, **kwargs)\n', (3580, 3611), False, 'import json\n'), ((3924, 3960), 'json.loads', 'json.loads', (['jsonstr', '*args'], {}), '(jsonstr, *args, **kwargs)\n', (3934, 3960), False, 'import json\n'), ((4038, 4074), 'yaml.safe_dump', 'yaml.safe_dump', (['obj', '*args'], {}), '(obj, *args, **kwargs)\n', (4052, 4074), False, 'import yaml\n'), ((2753, 2771), 'django.forms.models.model_to_dict', 'model_to_dict', (['obj'], {}), '(obj)\n', (2766, 2771), False, 'from django.forms.models import model_to_dict\n')] |
import classad
import collections
import concurrent
import datetime
import htcondor
import logging
import os
import sys
import time
from configparser import NoSectionError, NoOptionError
from . import Executor
logger = logging.getLogger(__name__)
# context in strategy pattern
class HTCondor(Executor):
def __init__(self, config):
super().__init__(config)
self.ids = []
try:
self.refresh_rate = int(self.submitf['darwin']['refresh_rate'])
except (KeyError, NoSectionError, NoOptionError) as e:
self.refresh_rate = 60
logging.warning('refresh_rate not find, fallback to default: 60s')
def _coreExecution(self, handler, particles):
schedd = htcondor.Schedd()
conf = self.submitf
executable = conf['darwin']['executable']
executable_path = os.path.join(handler.optdir, executable)
conf['htcondor']['executable'] = executable_path
if not os.path.exists(executable_path):
logger.error('executable "{}" not found'.format(executable_path))
sys.exit(1)
# secure the job id from condor
self.ids = []
for p in particles:
arguments = p.coordinate.format()
formatted_args = ['-{} {}'.format(k, v) for k,v in arguments.items()]
conf['htcondor']['arguments'] = ' '.join(formatted_args)
conf['htcondor']['initialdir'] = handler.particlepath(p.name)
# get redirect of htcondor submit file to a dict
sub = htcondor.Submit(dict(conf.items('htcondor')))
with schedd.transaction() as txn:
ads = []
clusterid = sub.queue(txn, ad_results=ads)
self.ids.append(clusterid)
if 'should_transfer_files' in conf['htcondor'] and \
conf['htcondor']['should_transfer_files'] in ('YES',):
schedd.spool(ads)
req = ' || '.join('(ClusterId == {})'.format(id) for id in self.ids)
proj = ['ClusterId', 'JobStatus']
finished = False
while not finished:
count = 0
for data in schedd.xquery(requirements=req, projection=proj):
count += 1
if count == 0:
finished = True
else:
time.sleep(self.refresh_rate)
if 'should_transfer_files' in conf['htcondor'] and \
conf['htcondor']['should_transfer_files'] in ('YES',):
for clusterid in self.ids:
self._schedd.retrieve("ClusterId == %d".format(clusterid))
def _interruptHandler(self):
self._cleanUp()
def _cleanUp(self):
schedd = htcondor.Schedd()
req = ' || '.join('(ClusterId == {})'.format(id) for id in self.ids)
schedd.act(htcondor.JobAction.Remove, req)
| [
"logging.getLogger",
"os.path.exists",
"os.path.join",
"logging.warning",
"time.sleep",
"htcondor.Schedd",
"sys.exit"
] | [((223, 250), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (240, 250), False, 'import logging\n'), ((729, 746), 'htcondor.Schedd', 'htcondor.Schedd', ([], {}), '()\n', (744, 746), False, 'import htcondor\n'), ((852, 892), 'os.path.join', 'os.path.join', (['handler.optdir', 'executable'], {}), '(handler.optdir, executable)\n', (864, 892), False, 'import os\n'), ((2715, 2732), 'htcondor.Schedd', 'htcondor.Schedd', ([], {}), '()\n', (2730, 2732), False, 'import htcondor\n'), ((965, 996), 'os.path.exists', 'os.path.exists', (['executable_path'], {}), '(executable_path)\n', (979, 996), False, 'import os\n'), ((1088, 1099), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1096, 1099), False, 'import sys\n'), ((594, 660), 'logging.warning', 'logging.warning', (['"""refresh_rate not find, fallback to default: 60s"""'], {}), "('refresh_rate not find, fallback to default: 60s')\n", (609, 660), False, 'import logging\n'), ((2338, 2367), 'time.sleep', 'time.sleep', (['self.refresh_rate'], {}), '(self.refresh_rate)\n', (2348, 2367), False, 'import time\n')] |
#!/usr/bin/env python3
import os
import sys
import argparse
import logging
from typing import List, Optional, Union, Dict, Tuple
from datetime import datetime, timedelta
from sqlalchemy import create_engine
import rx
import rx.operators as ops
from rx.subject import AsyncSubject, Subject, BehaviorSubject, ReplaySubject
from rx.core.observable import Observable
from typing import List, Optional, NoReturn
from collections import defaultdict
from dateutil.parser import parse
import numpy as np
import pandas as pd
from ibapi import wrapper
from ibapi.common import TickerId, BarData
from ibapi.client import EClient
from ibapi.contract import Contract
from ibapi.utils import iswrapper
ContractList = List[Contract]
BarDataList = List[BarData]
OptionalDate = Optional[datetime]
def make_download_path(args: argparse.Namespace, contract: Contract) -> str:
"""Make path for saving csv files.
Files to be stored in base_directory/<security_type>/<size>/<symbol>/
"""
path = os.path.sep.join([
args.base_directory,
args.security_type,
args.size.replace(" ", "_"),
contract.symbol,
])
return path
class DownloadApp(EClient, wrapper.EWrapper):
def __init__(self):
EClient.__init__(self, wrapper=self)
wrapper.EWrapper.__init__(self)
self.request_id = 0
self.started = False
self.next_valid_order_id = None
# self.contracts = contracts
# self.requests = {}
# self.bar_data = defaultdict(list)
# self.pending_ends = set()
# # self.args = args
# self.current = self.args.end_date
# self.duration = self.args.duration
# self.useRTH = 0
self.requests: Dict[int, Contract] = {}
self._subjects: Dict[int, Subject[(Contract, BarData)]] = {}
self.connected: BehaviorSubject[bool] = BehaviorSubject(False)
# self.engine = create_engine(self.args.db_url)
def next_request_id(self, contract: Contract) -> int:
self.request_id += 1
self.requests[self.request_id] = contract
return self.request_id
def historicalDataRequest(self, contract: Contract, endDateTime:str,
durationStr:str, barSizeSetting:str, whatToShow:str = "TRADES",
useRTH:int = 0, formatDate:int = 1, keepUpToDate:bool = False) -> Observable:
cid = self.next_request_id(contract)
# self.pending_ends.add(cid)
subject = Subject()
self._subjects[cid] = subject
self.reqHistoricalData(
cid, # tickerId, used to identify incoming data
contract,
endDateTime, # always go to midnight
durationStr, # amount of time to go back
barSizeSetting, # bar size
whatToShow, # historical data type
useRTH, # useRTH (regular trading hours)
formatDate, # format the date in yyyyMMdd HH:mm:ss
keepUpToDate, # keep up to date after snapshot
[], # chart options
)
return self._subjects[cid]
@iswrapper
def historicalData(self, reqId: int, bar) -> None:
logging.info('historicalData %s, %s' % (reqId, bar))
print('historicalData %s, %s' % (reqId, bar))
contract = self.requests[reqId]
subject = self._subjects[reqId]
if contract and subject:
subject.on_next((contract, bar))
@iswrapper
def historicalDataEnd(self, reqId: int, start: str, end: str) -> None:
super().historicalDataEnd(reqId, start, end)
logging.info('historicalDataEnd %s, %s, %s' % (reqId, start, end))
print('historicalDataEnd %s, %s, %s' % (reqId, start, end))
subject = self._subjects[reqId]
subject.on_completed()
@iswrapper
def connectAck(self):
logging.info("Connected")
self.connected.on_next(True)
@iswrapper
def connectionClosed(self):
logging.info("Disconnected")
self.connected.on_next(False)
self.connected.on_completed()
@iswrapper
def nextValidId(self, order_id: int):
super().nextValidId(order_id)
self.next_valid_order_id = order_id
logging.info(f"nextValidId: {order_id}")
# we can start now
# self.start()
@iswrapper
def error(self, req_id: TickerId, error_code: int, error: str):
super().error(req_id, error_code, error)
err = Exception("Error. Id: %s Code %s Msg: %s" % req_id, error_code, error)
if req_id < 0:
logging.debug("Error. Id: %s Code %s Msg: %s", req_id, error_code, error)
self.connected.on_error(err)
else:
logging.error("Error. Id: %s Code %s Msg: %s", req_id, error_code, error)
# we will always exit on error since data will need to be validated
subject = self._subjects[req_id]
if (subject is not None):
subject.on_error(err)
# self.done = True
def do_connect(self, host: str = "127.0.0.1", port: int = 4001, clientId: int = 0) -> rx.Observable:
self.connect(host, port, clientId)
return self.connected
def say_bye(self):
print('bye!')
self.disconnect()
def make_contract(symbol: str, sec_type: str, currency: str, exchange: str,
primaryExchange: str, localsymbol: str) -> Contract:
contract = Contract()
contract.symbol = symbol
contract.secType = sec_type
contract.currency = currency
contract.exchange = exchange
contract.primaryExchange = primaryExchange
if localsymbol:
contract.localSymbol = localsymbol
return contract
def read_file(observer: rx.core.Observer, scheduler= None) -> None:
# subject = ReplaySubject()
with open('symbols.txt', 'r') as f:
lines = f.readlines()
for line in lines:
observer.on_next(line)
# subject.on_next(line)
# return subject
observer.on_completed()
def main():
app = DownloadApp()
app.connect("127.0.0.1", clientId=1)
app.run()
if __name__ == "__main__":
main()
# download_bars.py --size "5 min" --start-date 20110804 --end-date 20110904 AAPL
# download_bars.py --size "1 day" --duration "1 Y" --end-date 20210808 ABNB
# stated @ 2021-08-04 23:35:45.267262
# end @ 2021-08-04 23:35:46.107792 | [
"rx.subject.BehaviorSubject",
"logging.debug",
"ibapi.wrapper.EWrapper.__init__",
"rx.subject.Subject",
"ibapi.client.EClient.__init__",
"ibapi.contract.Contract",
"logging.info",
"logging.error"
] | [((5431, 5441), 'ibapi.contract.Contract', 'Contract', ([], {}), '()\n', (5439, 5441), False, 'from ibapi.contract import Contract\n'), ((1238, 1274), 'ibapi.client.EClient.__init__', 'EClient.__init__', (['self'], {'wrapper': 'self'}), '(self, wrapper=self)\n', (1254, 1274), False, 'from ibapi.client import EClient\n'), ((1283, 1314), 'ibapi.wrapper.EWrapper.__init__', 'wrapper.EWrapper.__init__', (['self'], {}), '(self)\n', (1308, 1314), False, 'from ibapi import wrapper\n'), ((1868, 1890), 'rx.subject.BehaviorSubject', 'BehaviorSubject', (['(False)'], {}), '(False)\n', (1883, 1890), False, 'from rx.subject import AsyncSubject, Subject, BehaviorSubject, ReplaySubject\n'), ((2489, 2498), 'rx.subject.Subject', 'Subject', ([], {}), '()\n', (2496, 2498), False, 'from rx.subject import AsyncSubject, Subject, BehaviorSubject, ReplaySubject\n'), ((3180, 3232), 'logging.info', 'logging.info', (["('historicalData %s, %s' % (reqId, bar))"], {}), "('historicalData %s, %s' % (reqId, bar))\n", (3192, 3232), False, 'import logging\n'), ((3597, 3663), 'logging.info', 'logging.info', (["('historicalDataEnd %s, %s, %s' % (reqId, start, end))"], {}), "('historicalDataEnd %s, %s, %s' % (reqId, start, end))\n", (3609, 3663), False, 'import logging\n'), ((3853, 3878), 'logging.info', 'logging.info', (['"""Connected"""'], {}), "('Connected')\n", (3865, 3878), False, 'import logging\n'), ((3972, 4000), 'logging.info', 'logging.info', (['"""Disconnected"""'], {}), "('Disconnected')\n", (3984, 4000), False, 'import logging\n'), ((4226, 4266), 'logging.info', 'logging.info', (['f"""nextValidId: {order_id}"""'], {}), "(f'nextValidId: {order_id}')\n", (4238, 4266), False, 'import logging\n'), ((4570, 4643), 'logging.debug', 'logging.debug', (['"""Error. Id: %s Code %s Msg: %s"""', 'req_id', 'error_code', 'error'], {}), "('Error. Id: %s Code %s Msg: %s', req_id, error_code, error)\n", (4583, 4643), False, 'import logging\n'), ((4711, 4784), 'logging.error', 'logging.error', (['"""Error. Id: %s Code %s Msg: %s"""', 'req_id', 'error_code', 'error'], {}), "('Error. Id: %s Code %s Msg: %s', req_id, error_code, error)\n", (4724, 4784), False, 'import logging\n')] |
import squareseqdigit
def test_squareseqdigit_1():
assert squareseqdigit.square_sequence_digit(1) == 1, " square_sequence_digit(1) == 1 "
def test_squareseqdigit_2():
assert squareseqdigit.square_sequence_digit(2) == 4, " square_sequence_digit(2) == 4 "
def test_squareseqdigit_3():
assert squareseqdigit.square_sequence_digit(7) == 5, " square_sequence_digit(7) == 5 "
def test_squareseqdigit_4():
assert squareseqdigit.square_sequence_digit(12) == 6, " square_sequence_digit(12) == 6 "
def test_squareseqdigit_5():
assert squareseqdigit.square_sequence_digit(17) == 0, " square_sequence_digit(17) == 0 "
def test_squareseqdigit_6():
assert squareseqdigit.square_sequence_digit(27) == 9, " square_sequence_digit(27) == 9 "
| [
"squareseqdigit.square_sequence_digit"
] | [((64, 103), 'squareseqdigit.square_sequence_digit', 'squareseqdigit.square_sequence_digit', (['(1)'], {}), '(1)\n', (100, 103), False, 'import squareseqdigit\n'), ((186, 225), 'squareseqdigit.square_sequence_digit', 'squareseqdigit.square_sequence_digit', (['(2)'], {}), '(2)\n', (222, 225), False, 'import squareseqdigit\n'), ((308, 347), 'squareseqdigit.square_sequence_digit', 'squareseqdigit.square_sequence_digit', (['(7)'], {}), '(7)\n', (344, 347), False, 'import squareseqdigit\n'), ((430, 470), 'squareseqdigit.square_sequence_digit', 'squareseqdigit.square_sequence_digit', (['(12)'], {}), '(12)\n', (466, 470), False, 'import squareseqdigit\n'), ((554, 594), 'squareseqdigit.square_sequence_digit', 'squareseqdigit.square_sequence_digit', (['(17)'], {}), '(17)\n', (590, 594), False, 'import squareseqdigit\n'), ((678, 718), 'squareseqdigit.square_sequence_digit', 'squareseqdigit.square_sequence_digit', (['(27)'], {}), '(27)\n', (714, 718), False, 'import squareseqdigit\n')] |
"""
This node is the communication layer betweeen the USR Ros subsystem and the stepper motor controllers.
"""
#TODO: add recieving info from the stepper controller
import rclpy
from rclpy.node import Node
import yaml
import serial, time
from enum import Enum
from motion_controller_msgs.msg import Mobility
class Command(Enum):
# command for the stepper controller
init_all = 1
align_all = 2
align_one = 3
stop_all = 4
stop_one = 5
blink_led = 6
class SteeringSubscriber(Node):
#T his clas is responsible for driving all of the Maxon motor controllers using published information from the
# Mobility node
def __init__(self):
super().__init__('minimal_subscriber')
self.subscription = self.create_subscription(
Mobility,
'steering',
self.listener_callback,
10)
self.subscription # prevent unused variable warning
#create controller instances for each for each of the motor bases from the config file
tmp_file = open('/home/usr/usr_ws_2020/src/motor_controllers/stepper_motor/config/stepper_config.yaml')
stepper_config = yaml.load(tmp_file, Loader=yaml.FullLoader)
self.stepper_controller = StepperController(serial_nm=stepper_config['serial'],
steps=stepper_config['steps'])
#intilialize motors and blink for conformation
#self.stepper_controller.initMotors()
time.sleep(.5)
#self.stepper_controller.blink(3)
# close the yaml configuration file
tmp_file.close()
def listener_callback(self, msg):
# first check that the controllers are ready
#TODO: incorperate the state machince variables to decide if motors should be running or not
# if motors are ready, set the new speed to each controller
self.stepper_controller.alignMotors(msg.front_left,
msg.front_right,
msg.rear_left,
msg.rear_right)
class StepperController():
"""
This class holds the imformation relevant for controller a stepper motor contorller onboard the teensy device
"""
def __init__(self, serial_nm, steps) -> None:
print('initializing stepper Ros communication')
self.serial = serial_nm # the serial number for responding to the device
self._mc = serial.Serial(serial_nm, 115200, timeout=.1) # teh micro controller serial instance
self.steps = steps
time.sleep(1) #give the connection a second to settle
print('Done initializing stepper Ros communication')
def alignMotors(self, fl, fr, bl, br):
"""
Send comm to the motor cointroller to align the front left (fl), front right (fr), back left (bl), and back right (br) motors
Inputs:
fl -> the degrees to align the front left motor
fr -> the degrees to align the front right motor
bl -> the degrees to align the back left motor
br -> the degrees to align the back right motor
Reutrn:
None
"""
# convert from degrees to steps (TODO: verify the right direction and whatnot)
# write the command to the stepper controller
# TODO: Go back to converting to steps when testing is finished
self._mc.write(self._encodeAlignCommand(min(fl+180,255),
min(fr+180,255),
min(bl+180,255),
min(br+180,255)))
def initMotors(self):
self._mc.write(self._encodeInit())
def blink(self, num_blinks):
self._mc.write(self._encodeBlink(num_blinks=num_blinks))
def _encodeAlignCommand(self, fl, fr, bl, br):
# cmd = motor<<6 | dir<<5 | steps;
# return cmd
return bytearray([Command.align_all.value, int(fl), int(fr), int(bl), int(br)])
def _encodeBlink(self, num_blinks):
return bytearray([Command.blink_led.value, num_blinks])
def _encodeInit(self):
return bytearray([Command.init_all.value])
def _deg2steps(self, deg):
"""
convert degrees to the stepper motor steps
Inputs:
deg -> the requested degrees
Return:
steps -> the resultand steps
"""
return round((deg/360)*self.steps)
def main(args=None):
rclpy.init(args=args)
# inittialize the main drving node
sub_node = SteeringSubscriber()
rclpy.spin(sub_node)
# Destroy the node explicitly
sub_node.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
| [
"rclpy.spin",
"yaml.load",
"time.sleep",
"serial.Serial",
"rclpy.init",
"rclpy.shutdown"
] | [((4590, 4611), 'rclpy.init', 'rclpy.init', ([], {'args': 'args'}), '(args=args)\n', (4600, 4611), False, 'import rclpy\n'), ((4693, 4713), 'rclpy.spin', 'rclpy.spin', (['sub_node'], {}), '(sub_node)\n', (4703, 4713), False, 'import rclpy\n'), ((4781, 4797), 'rclpy.shutdown', 'rclpy.shutdown', ([], {}), '()\n', (4795, 4797), False, 'import rclpy\n'), ((1148, 1191), 'yaml.load', 'yaml.load', (['tmp_file'], {'Loader': 'yaml.FullLoader'}), '(tmp_file, Loader=yaml.FullLoader)\n', (1157, 1191), False, 'import yaml\n'), ((1474, 1489), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1484, 1489), False, 'import serial, time\n'), ((2486, 2531), 'serial.Serial', 'serial.Serial', (['serial_nm', '(115200)'], {'timeout': '(0.1)'}), '(serial_nm, 115200, timeout=0.1)\n', (2499, 2531), False, 'import serial, time\n'), ((2605, 2618), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2615, 2618), False, 'import serial, time\n')] |
#!/usr/bin/env python2.7
# Run as:
# python setup.py install --user
# -- Standard boilerplate header - begin
import unittest as ut
import sys, os
from os.path import abspath, dirname
from os.path import join as osjoin
cdir = dirname(abspath(__file__)) # sys.argv[0])) = # testdir
pdir = dirname(cdir) #
srcdir = osjoin(pdir, 'mhut')
sys.path.insert(0, srcdir)
pathlist = []
for p in sys.path:
if not p in pathlist: pathlist.append(p)
sys.path = pathlist
from testutils import run_tests, twrap
# -- Standard boilerplate header - end
import pandas as pd
import numpy as np
from datautils import *
# -- golden dataset --
G_dict = {
'Name' : ['A Cat', 'A Dog', 'Neither'],
'my r2' : [ 1, 0, 0],
'my d2' : [ 1, 0, 0],
'other piper' : [ 0, 4, 0],
'solomon' : [ 0, 0, 2]}
G_DF = pd.DataFrame.from_dict( G_dict, orient='index')
G_DF.drop('Name', inplace=True)
G_DF.index.name = 'Name'
G_DF.columns = G_dict['Name']
G_DF = G_DF.reindex(['my r2', 'my d2', 'other piper', 'solomon'])
for c in G_DF.columns: G_DF[c] = pd.to_numeric(G_DF[c])
G1_dict = { # I II III IV V
'a': [2.071527, 1.998107, 2.029159, 1.192129, 1.459613],
'b': [1.465882, 1.242207, 2.122667, 1.587954, 1.842492],
'c': [1.505012, 1.674715, 1.436381, 1.626080, 1.435298],
'd': [2.121946, 2.005520, 2.115850, 1.795292, 2.076429]
}
G1_DF = pd.DataFrame.from_dict(G1_dict, orient='index')
G1_DF.columns = 'I II III IV V'.split()
# -- convenience datasets for checking results --
G1_dict_R0 = {
'a': [2.0, 2.0, 2.0, 1.0, 1.0], 'b': [1.0, 1.0, 2.0, 2.0, 2.0],
'c': [2.0, 2.0, 1.0, 2.0, 1.0], 'd': [2.0, 2.0, 2.0, 2.0, 2.0]
}
G1_DF_R0 = pd.DataFrame.from_dict(G1_dict_R0, orient='index')
G1_DF_R0.columns = G1_DF.columns
G1_dict_R2 = {
'a': [2.07, 2.00, 2.03, 1.19, 1.46],
'b': [1.47, 1.24, 2.12, 1.59, 1.84],
'c': [1.51, 1.67, 1.44, 1.63, 1.44],
'd': [2.12, 2.01, 2.12, 1.80, 2.08]
}
G1_DF_R2 = pd.DataFrame.from_dict(G1_dict_R2, orient='index')
G1_DF_R2.columns = G1_DF.columns
class TestDatautils(ut.TestCase):
# globally available data struct
@classmethod
def setUpClass(cls):
print("class setUp - Nothing to do")
@classmethod
def tearDownClass(cls):
print("class tearDown - Nothing to do")
# applied per method in class
def setUp(self):
pass
def tearDown(self):
pass
## --------------------------------------------------------------------- ##
@twrap
def test_filter_column(self):
df = pd.DataFrame(100*np.random.rand(12).reshape(4,3), columns=list('ABC'))
df1 = df.copy()
df1.index = [10, 10.25, 10.5, 10.75]
self.assertTrue(df.iloc[1].equals (filter_column(df, '1')))
self.assertTrue(df.iloc[1:4].equals (filter_column(df, '1:3')))
self.assertTrue(df.iloc[:3].equals (filter_column(df, ':2')))
self.assertTrue(df.iloc[2:].equals (filter_column(df, '2:')))
self.assertTrue(df[df.index<2].equals (filter_column(df, '<2')))
self.assertTrue(df[df.index <= 1].equals (filter_column(df, '<=1')))
self.assertTrue(df[df.index != 1].equals (filter_column(df, '!=1')))
self.assertTrue(df[(df.index > 1) & (df.index <= 2)].equals(
filter_column(df, '>1 & <=2')))
self.assertTrue(df1.loc[10.5].equals (filter_column(df1, '10.5')))
self.assertTrue(df1.loc[10.25:10.5].equals (filter_column(df1, '10.25:10.5')))
self.assertTrue(df1.loc[:10.5].equals (filter_column(df1, ':10.5')))
self.assertTrue(df1.loc[10.5:].equals (filter_column(df1, '10.5:')))
self.assertTrue(df1[df1.index >= 10.5].equals (filter_column(df1, '>=10.5')))
self.assertTrue(df1[df1.index == 10.25].equals (filter_column(df1, '==10.25')))
self.assertTrue(df1[(df1.index < 10.25) | (df1.index >= 10.75)].equals(
filter_column(df1, '<10.25 | >=10.75')))
# expect errors in some version of pandas
self.assertTrue(df.iloc[1].equals (filter_column(df, '1.0')))
# self.assertTrue(filter_column(df, '1.0') - df.iloc[1]) # 1.0 => 1 (returns non-empty table)
## --------------------------------------------------------------------- ##
@twrap
def test_columnize(self):
tbl = [['Strike', 'Bid', 'Ask'],
[73.0 , 2.65, 2.70],
[73.5 , 2.47, 2.52],
[74.0 , 2.30, 2.36]]
xpct = [[73.0, 73.5, 74.0],
[2.65, 2.47, 2.30],
[2.70, 2.52, 2.36]]
result = columnize(tbl, True) # strip_header
result = [list(r) for r in result] # convert back from np.array to list
self.assertEqual(xpct, result)
# TODO. N/A's are not handled gracefully
#tbl[3][1] = 'N/A' # 2.3 -> 'N/A'
#result = vectorize(tbl)
# self.assertEqual(xpct, result)
## --------------------------------------------------------------------- ##
@twrap
def test_R2(self):
self.assertEqual(4.00, R2(4))
self.assertEqual(4.50, R2(4.5))
self.assertEqual(4.58, R2(4.58))
self.assertEqual(4.59, R2(4.586))
self.assertEqual(4.58, R2(4.584))
## --------------------------------------------------------------------- ##
@twrap
def test_R3(self):
self.assertEqual(4.000, R3(4))
self.assertEqual(4.500, R3(4.5))
self.assertEqual(4.585, R3(4.585))
self.assertEqual(4.586, R3(4.5863))
self.assertEqual(4.587, R3(4.5867))
## --------------------------------------------------------------------- ##
@twrap
def test_RN(self):
self.assertEqual(4.00, RN(4))
self.assertEqual(4.57, RN(4.5736))
self.assertEqual(4.527, RN(4.5268, 3))
self.assertEqual(4.57360, RN(4.5736, 5))
self.assertEqual('abc', RN('abc', 4))
## --------------------------------------------------------------------- ##
@twrap
def test_roundoff_list(self):
alist = [ 2.5768, 'bee', 256]
roundoff_list(alist, 3)
self.assertEqual([2.577, 'bee', 256.000], alist)
alist = [ 2.5768, 'bee2', 256]
roundoff_list(alist)
self.assertEqual([2.58, 'bee2', 256.00], alist)
## --------------------------------------------------------------------- ##
@twrap
def test_roundoff_dict(self):
adict = {'A': 2.576, 'B': 'bee', 'C': 256, 'D': [32.1475, 32, 'fee']}
roundoff_dict(adict, 3)
axpct = {'A': 2.576, 'B': 'bee', 'C': 256.000, 'D': [32.148, 32.000, 'fee']}
self.assertEqual(axpct, adict)
adict = {'A': 2.576, 'B': 'bee', 'C': 256, 'D': [32.1475, 32, 'fee']}
roundoff_dict(adict, 2)
axpct = {'A': 2.58, 'B': 'bee', 'C': 256.00, 'D': [32.15, 32.00, 'fee']}
self.assertEqual(axpct, adict)
## --------------------------------------------------------------------- ##
@twrap
def test_isnumeric(self):
self.assertTrue(isnumeric(23))
self.assertTrue(isnumeric(23.57))
self.assertTrue(isnumeric('.57'))
self.assertTrue(isnumeric('257'))
self.assertFalse(isnumeric('257.a'))
self.assertFalse(isnumeric('a.bc'))
self.assertFalse(isnumeric('a.25bc'))
self.assertFalse(isnumeric('1.25.37'))
## --------------------------------------------------------------------- ##
@twrap
def test_reorder_list(self):
orig_list = ['apple', 'banana', 'cantaloupe', 'guava', 'mango']
des_order = ['banana', 'guava']
new_list = reorder_list(orig_list, des_order) # , 'any'
b_ix = new_list.index('banana')
g_ix = new_list.index('guava')
self.assertEqual(1, g_ix-b_ix)
self.assertEqual(set(orig_list), set(new_list))
self.assertNotEqual(orig_list, new_list)
new_list = reorder_list(orig_list, des_order, 'begin')
self.assertEqual(new_list, 'banana guava apple cantaloupe mango'.split())
new_list = reorder_list(orig_list, des_order, 'end')
self.assertEqual(new_list, 'apple cantaloupe mango banana guava'.split())
new_list = reorder_list(orig_list, des_order, 'before')
self.assertEqual(new_list, 'apple cantaloupe banana guava mango'.split())
new_list = reorder_list(orig_list, des_order, 'after')
self.assertEqual(new_list, 'apple banana guava cantaloupe mango'.split())
new_list = reorder_list(orig_list, 'mango cranberry cantaloupe'.split())
m_ix = new_list.index('mango')
c_ix = new_list.index('cantaloupe')
self.assertEqual(1, c_ix-m_ix)
self.assertEqual(set(orig_list), set(new_list))
des_order = 'banana apple cantaloupe something_else mango guava'.split()
new_list = reorder_list(orig_list, des_order)
self.assertEqual(new_list, ['banana', 'apple', 'cantaloupe', 'mango', 'guava'])
## --------------------------------------------------------------------- ##
@twrap
def test_df_reorder_columns(self):
A,B,C,D,E = 0,1,2,3,4
m = np.random.rand(30).reshape(6,5)
df = pd.DataFrame( m, columns=list('ABCDE') )
df1 = df_reorder_columns(df, orderlist=list('CDAEB'))
m_xpdf = np.array(list(zip(m[:,C],m[:,D],m[:,A],m[:,E],m[:,B])))
xpdf1 = pd.DataFrame( m_xpdf, columns=list('CDAEB') )
df2 = df_reorder_columns(df, list('BD'),'begin')
m_xpdf2 = np.array(list(zip(m[:,B],m[:,D],m[:,A],m[:,C],m[:,E])))
xpdf2 = pd.DataFrame( m_xpdf2, columns=list('BDACE') )
df3 = df_reorder_columns(df, list('CFA'),'end')
m_xpdf3 = np.array(list(zip(m[:,B],m[:,D],m[:,E],m[:,C],m[:,A])))
xpdf3 = pd.DataFrame( m_xpdf3, columns=list('BDECA') )
self.assertTrue(df1.equals(xpdf1))
self.assertTrue(df2.equals(xpdf2))
self.assertTrue(df3.equals(xpdf3))
## --------------------------------------------------------------------- ##
@twrap
def test_txt2df(self):
alltxt = '''
| | |
Name A Cat A Dog Neither
my r2 1 0 0
my d2 1 0 0
other piper 0 4 0
solomon 0 0 2
'''
df = txt2df(alltxt)
for c in df.columns: df[c] = pd.to_numeric(df[c])
self.assertTrue(G_DF.equals(df))
## --------------------------------------------------------------------- ##
@twrap
def test_parse2df(self):
df = parse2df(osjoin(cdir, 'test_parse2df.txt'))
for c in df.columns: df[c] = pd.to_numeric(df[c])
self.assertTrue(G_DF.equals(df))
## --------------------------------------------------------------------- ##
@twrap
def test_broadcast(self):
alist = [1,2,3,4]
aa = np.array(alist)
ma = np.matrix(alist)
sa = pd.Series(alist)
# -- check lists --
x = broadcast(alist, 3, 0)
y = broadcast(alist, 3)
xpct_x = [alist, alist, alist]
xpct_y = [[1,1,1], [2,2,2], [3,3,3], [4,4,4]]
self.assertEqual( xpct_x, x)
self.assertEqual( xpct_y, y)
# -- check arrays --
x = broadcast(aa, 3, 0)
y = broadcast(aa, 3)
xpct_x = np.array( [alist, alist, alist] )
xpct_y = np.array( [[1,1,1], [2,2,2], [3,3,3], [4,4,4]] )
self.assertEqual( (xpct_x-x).sum(), 0)
self.assertEqual( (xpct_y-y).sum(), 0)
# -- check matrices --
x = broadcast(ma, 3, 0)
y = broadcast(ma, 3)
xpct_x = np.matrix( [alist, alist, alist] )
xpct_y = np.matrix( [[1,1,1], [2,2,2], [3,3,3], [4,4,4]] )
self.assertEqual( (xpct_x-x).sum(), 0)
self.assertEqual( (xpct_y-y).sum(), 0)
# -- check series --
x = broadcast(sa, 3, 0)
y = broadcast(sa, 3)
xpct_x = pd.DataFrame( [alist, alist, alist], dtype=float )
xpct_y = pd.DataFrame( [[1,1,1], [2,2,2], [3,3,3], [4,4,4]], dtype=float )
self.assertTrue( xpct_x.equals(x) )
self.assertTrue( xpct_y.equals(y) )
## --------------------------------------------------------------------- ##
@twrap
def test_roundoff_df(self):
df = roundoff_df(G1_DF)
self.assertTrue(df.equals(G1_DF_R0))
df = roundoff_df(G1_DF, 2)
self.assertTrue(df.equals(G1_DF_R2))
df = roundoff_df(G1_DF, 2, columns=['III', 'V'])
G1_rounded = G1_DF.copy()
G1_rounded.reindex( list('abcd') ) # becomes acbd for some reason
G1_rounded['III'] = pd.Series(dict(list(zip(list('abcd'), [2.03, 2.12, 1.44, 2.12] ))))
G1_rounded['V'] = pd.Series(dict(list(zip(list('abcd'), [1.46, 1.84, 1.44, 2.08] ))))
self.assertTrue(df.equals(G1_rounded))
df = roundoff_df(G1_DF, 2, indices=['b', 'd'])
G1_rounded = G1_DF.copy()
G1_rounded.loc['b'] = [1.47, 1.24, 2.12, 1.59, 1.84]
G1_rounded.loc['d'] = [2.12, 2.01, 2.12, 1.80, 2.08]
self.assertTrue(df.equals(G1_rounded))
df = roundoff_df(G1_DF, 2, columns=['III', 'V'], indices=['b', 'd'])
G1_rounded = G1_DF.copy()
G1_rounded.loc['b', ['III', 'V']] = [2.12, 1.84]
G1_rounded.loc['d', ['III', 'V']] = [2.12, 2.08]
self.assertTrue(df.equals(G1_rounded))
if __name__ == '__main__':
# ut.main()
run_tests(TestDatautils)
| [
"pandas.Series",
"sys.path.insert",
"numpy.random.rand",
"pandas.DataFrame",
"os.path.join",
"pandas.DataFrame.from_dict",
"os.path.dirname",
"numpy.array",
"pandas.to_numeric",
"testutils.run_tests",
"os.path.abspath",
"numpy.matrix"
] | [((291, 304), 'os.path.dirname', 'dirname', (['cdir'], {}), '(cdir)\n', (298, 304), False, 'from os.path import abspath, dirname\n'), ((331, 351), 'os.path.join', 'osjoin', (['pdir', '"""mhut"""'], {}), "(pdir, 'mhut')\n", (337, 351), True, 'from os.path import join as osjoin\n'), ((352, 378), 'sys.path.insert', 'sys.path.insert', (['(0)', 'srcdir'], {}), '(0, srcdir)\n', (367, 378), False, 'import sys, os\n'), ((890, 936), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['G_dict'], {'orient': '"""index"""'}), "(G_dict, orient='index')\n", (912, 936), True, 'import pandas as pd\n'), ((1458, 1505), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['G1_dict'], {'orient': '"""index"""'}), "(G1_dict, orient='index')\n", (1480, 1505), True, 'import pandas as pd\n'), ((1760, 1810), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['G1_dict_R0'], {'orient': '"""index"""'}), "(G1_dict_R0, orient='index')\n", (1782, 1810), True, 'import pandas as pd\n'), ((2036, 2086), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['G1_dict_R2'], {'orient': '"""index"""'}), "(G1_dict_R2, orient='index')\n", (2058, 2086), True, 'import pandas as pd\n'), ((235, 252), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (242, 252), False, 'from os.path import abspath, dirname\n'), ((1124, 1146), 'pandas.to_numeric', 'pd.to_numeric', (['G_DF[c]'], {}), '(G_DF[c])\n', (1137, 1146), True, 'import pandas as pd\n'), ((13632, 13656), 'testutils.run_tests', 'run_tests', (['TestDatautils'], {}), '(TestDatautils)\n', (13641, 13656), False, 'from testutils import run_tests, twrap\n'), ((11069, 11084), 'numpy.array', 'np.array', (['alist'], {}), '(alist)\n', (11077, 11084), True, 'import numpy as np\n'), ((11098, 11114), 'numpy.matrix', 'np.matrix', (['alist'], {}), '(alist)\n', (11107, 11114), True, 'import numpy as np\n'), ((11128, 11144), 'pandas.Series', 'pd.Series', (['alist'], {}), '(alist)\n', (11137, 11144), True, 'import pandas as pd\n'), ((11519, 11550), 'numpy.array', 'np.array', (['[alist, alist, alist]'], {}), '([alist, alist, alist])\n', (11527, 11550), True, 'import numpy as np\n'), ((11570, 11624), 'numpy.array', 'np.array', (['[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]]'], {}), '([[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]])\n', (11578, 11624), True, 'import numpy as np\n'), ((11825, 11857), 'numpy.matrix', 'np.matrix', (['[alist, alist, alist]'], {}), '([alist, alist, alist])\n', (11834, 11857), True, 'import numpy as np\n'), ((11877, 11932), 'numpy.matrix', 'np.matrix', (['[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]]'], {}), '([[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]])\n', (11886, 11932), True, 'import numpy as np\n'), ((12131, 12179), 'pandas.DataFrame', 'pd.DataFrame', (['[alist, alist, alist]'], {'dtype': 'float'}), '([alist, alist, alist], dtype=float)\n', (12143, 12179), True, 'import pandas as pd\n'), ((12199, 12270), 'pandas.DataFrame', 'pd.DataFrame', (['[[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]]'], {'dtype': 'float'}), '([[1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4]], dtype=float)\n', (12211, 12270), True, 'import pandas as pd\n'), ((10567, 10587), 'pandas.to_numeric', 'pd.to_numeric', (['df[c]'], {}), '(df[c])\n', (10580, 10587), True, 'import pandas as pd\n'), ((10773, 10806), 'os.path.join', 'osjoin', (['cdir', '"""test_parse2df.txt"""'], {}), "(cdir, 'test_parse2df.txt')\n", (10779, 10806), True, 'from os.path import join as osjoin\n'), ((10845, 10865), 'pandas.to_numeric', 'pd.to_numeric', (['df[c]'], {}), '(df[c])\n', (10858, 10865), True, 'import pandas as pd\n'), ((9288, 9306), 'numpy.random.rand', 'np.random.rand', (['(30)'], {}), '(30)\n', (9302, 9306), True, 'import numpy as np\n'), ((2643, 2661), 'numpy.random.rand', 'np.random.rand', (['(12)'], {}), '(12)\n', (2657, 2661), True, 'import numpy as np\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import sqrt
from policies.base import Net
class Actor(Net):
def __init__(self):
super(Actor, self).__init__()
def forward(self):
raise NotImplementedError
def get_action(self):
raise NotImplementedError
class Linear_Actor(Actor):
def __init__(self, state_dim, action_dim, hidden_size=32):
super(Linear_Actor, self).__init__()
self.l1 = nn.Linear(state_dim, hidden_size)
self.l2 = nn.Linear(hidden_size, action_dim)
self.action_dim = action_dim
for p in self.parameters():
p.data = torch.zeros(p.shape)
def forward(self, state):
a = self.l1(state)
a = self.l2(a)
self.action = a
return a
def get_action(self):
return self.action
class FF_Actor(Actor):
def __init__(self, state_dim, action_dim, layers=(256, 256), env_name=None, nonlinearity=F.relu, normc_init=False, max_action=1):
super(FF_Actor, self).__init__()
self.actor_layers = nn.ModuleList()
self.actor_layers += [nn.Linear(state_dim, layers[0])]
for i in range(len(layers)-1):
self.actor_layers += [nn.Linear(layers[i], layers[i+1])]
self.network_out = nn.Linear(layers[-1], action_dim)
self.action = None
self.action_dim = action_dim
self.env_name = env_name
self.nonlinearity = nonlinearity
self.max_action = max_action
if normc_init:
self.initialize_parameters()
def forward(self, state):
x = state
for idx, layer in enumerate(self.actor_layers):
x = self.nonlinearity(layer(x))
self.action = torch.tanh(self.network_out(x))
return self.action
def get_action(self):
return self.action
class FF_Stochastic_Actor(Actor):
def __init__(self, state_dim, action_dim, layers=(256, 256), env_name=None, nonlinearity=F.relu, normc_init=True, bounded=False, fixed_std=None):
super(FF_Stochastic_Actor, self).__init__()
self.actor_layers = nn.ModuleList()
self.actor_layers += [nn.Linear(state_dim, layers[0])]
for i in range(len(layers)-1):
self.actor_layers += [nn.Linear(layers[i], layers[i+1])]
self.means = nn.Linear(layers[-1], action_dim)
if fixed_std is None:
self.log_stds = nn.Linear(layers[-1], action_dim)
self.learn_std = True
else:
self.fixed_std = fixed_std
self.learn_std = False
self.action = None
self.action_dim = action_dim
self.env_name = env_name
self.nonlinearity = nonlinearity
self.bounded = bounded
if normc_init:
self.initialize_parameters()
def _get_dist_params(self, state):
x = state
for idx, layer in enumerate(self.actor_layers):
x = self.nonlinearity(layer(x))
mu = self.means(x)
if self.learn_std:
sd = torch.clamp(self.log_stds(x), -2, 1).exp()
else:
sd = self.fixed_std
return mu, sd
def forward(self, state, deterministic=True, return_log_probs=False):
mu, sd = self._get_dist_params(state)
if not deterministic or return_log_probs:
dist = torch.distributions.Normal(mu, sd)
sample = dist.rsample()
if self.bounded:
self.action = torch.tanh(mu) if deterministic else torch.tanh(sample)
else:
self.action = mu if deterministic else sample
if return_log_probs:
log_prob = dist.log_prob(sample)
if self.bounded:
log_prob -= torch.log((1 - torch.tanh(sample).pow(2)) + 1e-6)
return self.action, log_prob.sum(1, keepdim=True)
else:
return self.action
def pdf(self, state):
mu, sd = self._get_dist_params(state)
return torch.distributions.Normal(mu, sd)
def get_action(self):
return self.action
class LSTM_Actor(Actor):
def __init__(self, input_dim, action_dim, layers=(128, 128), env_name=None, nonlinearity=torch.tanh, normc_init=False, bounded=False):
super(LSTM_Actor, self).__init__()
self.actor_layers = nn.ModuleList()
self.actor_layers += [nn.LSTMCell(input_dim, layers[0])]
for i in range(len(layers)-1):
self.actor_layers += [nn.LSTMCell(layers[i], layers[i+1])]
self.network_out = nn.Linear(layers[i-1], action_dim)
self.action = None
self.action_dim = action_dim
self.init_hidden_state()
self.env_name = env_name
self.nonlinearity = nonlinearity
self.bounded = bounded
self.is_recurrent = True
if normc_init:
self.initialize_parameters()
def get_hidden_state(self):
return self.hidden, self.cells
def set_hidden_state(self, data):
if len(data) != 2:
print("Got invalid hidden state data.")
exit(1)
self.hidden, self.cells = data
def init_hidden_state(self, batch_size=1):
self.hidden = [torch.zeros(batch_size, l.hidden_size) for l in self.actor_layers]
self.cells = [torch.zeros(batch_size, l.hidden_size) for l in self.actor_layers]
def forward(self, x):
dims = len(x.size())
if dims == 3: # if we get a batch of trajectories
self.init_hidden_state(batch_size=x.size(1))
y = []
for t, x_t in enumerate(x):
for idx, layer in enumerate(self.actor_layers):
c, h = self.cells[idx], self.hidden[idx]
self.hidden[idx], self.cells[idx] = layer(x_t, (h, c))
x_t = self.hidden[idx]
y.append(x_t)
x = torch.stack([x_t for x_t in y])
else:
if dims == 1: # if we get a single timestep (if not, assume we got a batch of single timesteps)
x = x.view(1, -1)
for idx, layer in enumerate(self.actor_layers):
h, c = self.hidden[idx], self.cells[idx]
self.hidden[idx], self.cells[idx] = layer(x, (h, c))
x = self.hidden[idx]
x = self.nonlinearity(self.network_out(x))
if dims == 1:
x = x.view(-1)
self.action = self.network_out(x)
return self.action
def get_action(self):
return self.action
class LSTM_Stochastic_Actor(Actor):
def __init__(self, state_dim, action_dim, layers=(128, 128), env_name=None, normc_init=False, bounded=False, fixed_std=None):
super(LSTM_Stochastic_Actor, self).__init__()
self.actor_layers = nn.ModuleList()
self.actor_layers += [nn.LSTMCell(state_dim, layers[0])]
for i in range(len(layers)-1):
self.actor_layers += [nn.LSTMCell(layers[i], layers[i+1])]
self.network_out = nn.Linear(layers[i-1], action_dim)
self.action = None
self.action_dim = action_dim
self.init_hidden_state()
self.env_name = env_name
self.bounded = bounded
self.is_recurrent = True
if fixed_std is None:
self.log_stds = nn.Linear(layers[-1], action_dim)
self.learn_std = True
else:
self.fixed_std = fixed_std
self.learn_std = False
if normc_init:
self.initialize_parameters()
def _get_dist_params(self, state):
dims = len(state.size())
x = state
if dims == 3: # if we get a batch of trajectories
self.init_hidden_state(batch_size=x.size(1))
action = []
y = []
for t, x_t in enumerate(x):
for idx, layer in enumerate(self.actor_layers):
c, h = self.cells[idx], self.hidden[idx]
self.hidden[idx], self.cells[idx] = layer(x_t, (h, c))
x_t = self.hidden[idx]
y.append(x_t)
x = torch.stack([x_t for x_t in y])
else:
if dims == 1: # if we get a single timestep (if not, assume we got a batch of single timesteps)
x = x.view(1, -1)
for idx, layer in enumerate(self.actor_layers):
h, c = self.hidden[idx], self.cells[idx]
self.hidden[idx], self.cells[idx] = layer(x, (h, c))
x = self.hidden[idx]
if dims == 1:
x = x.view(-1)
mu = self.network_out(x)
if self.learn_std:
sd = torch.clamp(self.log_stds(x), -2, 2).exp()
else:
sd = self.fixed_std
return mu, sd
def init_hidden_state(self, batch_size=1):
self.hidden = [torch.zeros(batch_size, l.hidden_size) for l in self.actor_layers]
self.cells = [torch.zeros(batch_size, l.hidden_size) for l in self.actor_layers]
def forward(self, state, deterministic=True, return_log_probs=False):
mu, sd = self._get_dist_params(state)
if not deterministic or return_log_probs:
dist = torch.distributions.Normal(mu, sd)
sample = dist.rsample()
if hasattr(self, 'bounded') and self.bounded:
self.action = torch.tanh(mu) if deterministic else torch.tanh(sample)
else:
self.action = mu if deterministic else sample
if return_log_probs:
return self.action, dist.log_prob(sample)
else:
return self.action
def pdf(self, state):
mu, sd = self._get_dist_params(state)
return torch.distributions.Normal(mu, sd)
def get_action(self):
return self.action
| [
"torch.tanh",
"torch.distributions.Normal",
"torch.nn.ModuleList",
"torch.nn.LSTMCell",
"torch.stack",
"torch.nn.Linear",
"torch.zeros"
] | [((449, 482), 'torch.nn.Linear', 'nn.Linear', (['state_dim', 'hidden_size'], {}), '(state_dim, hidden_size)\n', (458, 482), True, 'import torch.nn as nn\n'), ((497, 531), 'torch.nn.Linear', 'nn.Linear', (['hidden_size', 'action_dim'], {}), '(hidden_size, action_dim)\n', (506, 531), True, 'import torch.nn as nn\n'), ((1005, 1020), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (1018, 1020), True, 'import torch.nn as nn\n'), ((1203, 1236), 'torch.nn.Linear', 'nn.Linear', (['layers[-1]', 'action_dim'], {}), '(layers[-1], action_dim)\n', (1212, 1236), True, 'import torch.nn as nn\n'), ((1959, 1974), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (1972, 1974), True, 'import torch.nn as nn\n'), ((2151, 2184), 'torch.nn.Linear', 'nn.Linear', (['layers[-1]', 'action_dim'], {}), '(layers[-1], action_dim)\n', (2160, 2184), True, 'import torch.nn as nn\n'), ((3600, 3634), 'torch.distributions.Normal', 'torch.distributions.Normal', (['mu', 'sd'], {}), '(mu, sd)\n', (3626, 3634), False, 'import torch\n'), ((3910, 3925), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (3923, 3925), True, 'import torch.nn as nn\n'), ((4112, 4148), 'torch.nn.Linear', 'nn.Linear', (['layers[i - 1]', 'action_dim'], {}), '(layers[i - 1], action_dim)\n', (4121, 4148), True, 'import torch.nn as nn\n'), ((6109, 6124), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (6122, 6124), True, 'import torch.nn as nn\n'), ((6311, 6347), 'torch.nn.Linear', 'nn.Linear', (['layers[i - 1]', 'action_dim'], {}), '(layers[i - 1], action_dim)\n', (6320, 6347), True, 'import torch.nn as nn\n'), ((8649, 8683), 'torch.distributions.Normal', 'torch.distributions.Normal', (['mu', 'sd'], {}), '(mu, sd)\n', (8675, 8683), False, 'import torch\n'), ((614, 634), 'torch.zeros', 'torch.zeros', (['p.shape'], {}), '(p.shape)\n', (625, 634), False, 'import torch\n'), ((1047, 1078), 'torch.nn.Linear', 'nn.Linear', (['state_dim', 'layers[0]'], {}), '(state_dim, layers[0])\n', (1056, 1078), True, 'import torch.nn as nn\n'), ((2001, 2032), 'torch.nn.Linear', 'nn.Linear', (['state_dim', 'layers[0]'], {}), '(state_dim, layers[0])\n', (2010, 2032), True, 'import torch.nn as nn\n'), ((2234, 2267), 'torch.nn.Linear', 'nn.Linear', (['layers[-1]', 'action_dim'], {}), '(layers[-1], action_dim)\n', (2243, 2267), True, 'import torch.nn as nn\n'), ((3047, 3081), 'torch.distributions.Normal', 'torch.distributions.Normal', (['mu', 'sd'], {}), '(mu, sd)\n', (3073, 3081), False, 'import torch\n'), ((3952, 3985), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['input_dim', 'layers[0]'], {}), '(input_dim, layers[0])\n', (3963, 3985), True, 'import torch.nn as nn\n'), ((4706, 4744), 'torch.zeros', 'torch.zeros', (['batch_size', 'l.hidden_size'], {}), '(batch_size, l.hidden_size)\n', (4717, 4744), False, 'import torch\n'), ((4792, 4830), 'torch.zeros', 'torch.zeros', (['batch_size', 'l.hidden_size'], {}), '(batch_size, l.hidden_size)\n', (4803, 4830), False, 'import torch\n'), ((5299, 5330), 'torch.stack', 'torch.stack', (['[x_t for x_t in y]'], {}), '([x_t for x_t in y])\n', (5310, 5330), False, 'import torch\n'), ((6151, 6184), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['state_dim', 'layers[0]'], {}), '(state_dim, layers[0])\n', (6162, 6184), True, 'import torch.nn as nn\n'), ((6566, 6599), 'torch.nn.Linear', 'nn.Linear', (['layers[-1]', 'action_dim'], {}), '(layers[-1], action_dim)\n', (6575, 6599), True, 'import torch.nn as nn\n'), ((7244, 7275), 'torch.stack', 'torch.stack', (['[x_t for x_t in y]'], {}), '([x_t for x_t in y])\n', (7255, 7275), False, 'import torch\n'), ((7880, 7918), 'torch.zeros', 'torch.zeros', (['batch_size', 'l.hidden_size'], {}), '(batch_size, l.hidden_size)\n', (7891, 7918), False, 'import torch\n'), ((7966, 8004), 'torch.zeros', 'torch.zeros', (['batch_size', 'l.hidden_size'], {}), '(batch_size, l.hidden_size)\n', (7977, 8004), False, 'import torch\n'), ((8208, 8242), 'torch.distributions.Normal', 'torch.distributions.Normal', (['mu', 'sd'], {}), '(mu, sd)\n', (8234, 8242), False, 'import torch\n'), ((1145, 1180), 'torch.nn.Linear', 'nn.Linear', (['layers[i]', 'layers[i + 1]'], {}), '(layers[i], layers[i + 1])\n', (1154, 1180), True, 'import torch.nn as nn\n'), ((2099, 2134), 'torch.nn.Linear', 'nn.Linear', (['layers[i]', 'layers[i + 1]'], {}), '(layers[i], layers[i + 1])\n', (2108, 2134), True, 'import torch.nn as nn\n'), ((3154, 3168), 'torch.tanh', 'torch.tanh', (['mu'], {}), '(mu)\n', (3164, 3168), False, 'import torch\n'), ((3191, 3209), 'torch.tanh', 'torch.tanh', (['sample'], {}), '(sample)\n', (3201, 3209), False, 'import torch\n'), ((4052, 4089), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['layers[i]', 'layers[i + 1]'], {}), '(layers[i], layers[i + 1])\n', (4063, 4089), True, 'import torch.nn as nn\n'), ((6251, 6288), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['layers[i]', 'layers[i + 1]'], {}), '(layers[i], layers[i + 1])\n', (6262, 6288), True, 'import torch.nn as nn\n'), ((8344, 8358), 'torch.tanh', 'torch.tanh', (['mu'], {}), '(mu)\n', (8354, 8358), False, 'import torch\n'), ((8381, 8399), 'torch.tanh', 'torch.tanh', (['sample'], {}), '(sample)\n', (8391, 8399), False, 'import torch\n'), ((3395, 3413), 'torch.tanh', 'torch.tanh', (['sample'], {}), '(sample)\n', (3405, 3413), False, 'import torch\n')] |
import datetime
import urllib.request, urllib.parse, urllib.error
import logging
import threading
import subprocess
# Common packages
from typing import Dict
from modules.common.TqdmUpTo import TqdmUpTo
# Decorator for the threading parameter.
def threaded(fn):
def wrapper(*args, **kwargs):
thread = threading.Thread(target=fn, args=args, kwargs=kwargs)
thread.start()
return thread
return wrapper
logger = logging.getLogger(__name__)
# Generic class to download a specific URI
class DownloadResource(object):
def __init__(self, output_dir):
self.suffix = datetime.datetime.today().strftime('%Y-%m-%d')
self.output_dir = output_dir
def replace_suffix(self, args):
if args.suffix:
self.suffix = args.suffix
def set_filename(self, param_filename):
return self.output_dir + '/' + param_filename.replace('{suffix}', self.suffix)
def execute_download(self, resource_info, retry_count=1) -> str:
logger.debug("Start to download\n\t{uri} ".format(uri=resource_info.uri))
try:
opener = urllib.request.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
if resource_info.accept:
opener.addheaders = [('User-agent', 'Mozilla/5.0'), ('Accept', resource_info.accept)]
urllib.request.install_opener(opener)
destination_filename = self.set_filename(resource_info.output_filename)
with TqdmUpTo(unit='B', unit_scale=True, miniters=1,
desc=resource_info.uri.split('/')[-1]) as t: # all optional kwargs
urllib.request.urlretrieve(resource_info.uri, destination_filename,
reporthook=t.update_to, data=None)
return destination_filename
except urllib.error.URLError as e:
logger.error('Download error:', e.reason)
if retry_count > 0:
if hasattr(e, 'code') and 500 <= e.code < 600:
return self.execute_download(resource_info, retry_count - 1)
except IOError as io_error:
logger.error("IOError: {io_error}".format(io_error=io_error))
return None
except Exception as e:
logger.error("Error: {msg}".format(msg=e))
return None
@threaded
def execute_download_threaded(self, resource_info):
self.execute_download(resource_info)
def ftp_download(self, resource_info: Dict) -> str:
print("Start to download\n\t{uri} ".format(uri=resource_info.uri))
try:
filename = self.set_filename(resource_info.output_filename)
urllib.request.urlretrieve(resource_info.uri, filename)
urllib.request.urlcleanup()
except Exception:
logger.error("Warning: FTP! {file}".format(file=resource_info.uri))
# EBI FTP started to reply ConnectionResetError: [Errno 104] Connection reset by peer.
# I had an exchange of email with sysinfo, they suggested us to use wget.
cmd = 'curl ' + resource_info.uri + ' --output ' + filename
logger.info("wget attempt {cmd}".format(cmd=cmd))
subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
return filename
| [
"logging.getLogger",
"threading.Thread",
"datetime.datetime.today",
"subprocess.Popen"
] | [((447, 474), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (464, 474), False, 'import logging\n'), ((317, 370), 'threading.Thread', 'threading.Thread', ([], {'target': 'fn', 'args': 'args', 'kwargs': 'kwargs'}), '(target=fn, args=args, kwargs=kwargs)\n', (333, 370), False, 'import threading\n'), ((611, 636), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (634, 636), False, 'import datetime\n'), ((3235, 3292), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (3251, 3292), False, 'import subprocess\n')] |
import argparse
import os
import ScanInPlexCommon as Common
from ScanInPlexConfiguration import Configure
from ScanInPlexUninstaller import Uninstall
from ScanInPlexScanner import Scanner
class ScanInPlexRouter:
def __init__(self):
self.valid = True
if os.name.lower() != 'nt':
self.valid = False
print_error(f'os "{os.name}" detected, Windows required.')
os.system('pause')
return
def run(self):
if not self.valid:
return
parser = argparse.ArgumentParser(usage='ScanInPlex.py [-h] [-c [-p HOST] [-t TOKEN] [-w] [-v | -q]] | [-s -d DIR] | -u [-q]')
parser.add_argument('-c', '--configure', action="store_true", help="Configure ScanInPlex")
parser.add_argument('-p', '--host', help='Plex host (e.g. http://localhost:32400)')
parser.add_argument('-t', '--token', help='Plex token')
parser.add_argument('-w', '--web', action='store_true', help='Scan via web requests instead of Plex Media Scanner.exe. NOTE: this will store your Plex authentication token in plain text')
parser.add_argument('-v', '--verbose', action='store_true', help='Show verbose output')
parser.add_argument('-q', '--quiet', action='store_true', help='Only show error messages')
parser.add_argument('-s', '--scan', help='Scan a folder in Plex', action="store_true")
parser.add_argument('-d', '--directory', help='Folder to scan')
parser.add_argument('-u', '--uninstall', action="store_true", help='Uninstall Scan in Plex (delete regkeys)')
cmd_args = parser.parse_args()
count = sum([1 if arg else 0 for arg in [cmd_args.configure, cmd_args.scan, cmd_args.uninstall]])
if count > 1:
print_error('Cannot specify multiple top-level commands (configure, scan, uninstall)')
return
if cmd_args.configure:
Configure(cmd_args).configure()
elif cmd_args.scan:
Scanner(cmd_args).scan()
elif cmd_args.uninstall:
Uninstall(cmd_args).uninstall()
def print_error(msg):
print(f'ERROR: {msg}')
print(f'Exiting...')
if __name__ == '__main__':
ScanInPlexRouter().run() | [
"os.name.lower",
"argparse.ArgumentParser",
"ScanInPlexScanner.Scanner",
"ScanInPlexConfiguration.Configure",
"os.system",
"ScanInPlexUninstaller.Uninstall"
] | [((534, 660), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'usage': '"""ScanInPlex.py [-h] [-c [-p HOST] [-t TOKEN] [-w] [-v | -q]] | [-s -d DIR] | -u [-q]"""'}), "(usage=\n 'ScanInPlex.py [-h] [-c [-p HOST] [-t TOKEN] [-w] [-v | -q]] | [-s -d DIR] | -u [-q]'\n )\n", (557, 660), False, 'import argparse\n'), ((274, 289), 'os.name.lower', 'os.name.lower', ([], {}), '()\n', (287, 289), False, 'import os\n'), ((413, 431), 'os.system', 'os.system', (['"""pause"""'], {}), "('pause')\n", (422, 431), False, 'import os\n'), ((1913, 1932), 'ScanInPlexConfiguration.Configure', 'Configure', (['cmd_args'], {}), '(cmd_args)\n', (1922, 1932), False, 'from ScanInPlexConfiguration import Configure\n'), ((1985, 2002), 'ScanInPlexScanner.Scanner', 'Scanner', (['cmd_args'], {}), '(cmd_args)\n', (1992, 2002), False, 'from ScanInPlexScanner import Scanner\n'), ((2055, 2074), 'ScanInPlexUninstaller.Uninstall', 'Uninstall', (['cmd_args'], {}), '(cmd_args)\n', (2064, 2074), False, 'from ScanInPlexUninstaller import Uninstall\n')] |
import json
import logging
from rabbitmq_client import RMQProducer, QueueParams
from util import get_arg
from defs import CLI_HUME_UUID, HINTCommand
LOGGER = logging.getLogger(__name__)
HINT_MASTER_COMMAND_QUEUE = "hint_master"
_producer: RMQProducer
_hint_queue_params = QueueParams(HINT_MASTER_COMMAND_QUEUE, durable=True)
def init(producer_instance):
"""
:type producer_instance: rabbitmq_client.RMQProducer
"""
global _producer
_producer = producer_instance
def encode_hint_command(command: dict):
"""Formats a HINT command."""
command["uuid"] = get_arg(CLI_HUME_UUID)
return json.dumps(command)
def publish(command: dict):
"""Publish to the HINT master queue."""
_producer.publish(encode_hint_command(command), # noqa
queue_params=_hint_queue_params)
def devices_discovered(devices):
"""
This is just a forward of what was returned by DC since the messages look
exactly the same.
:type devices: [Device]
"""
LOGGER.info("sending discover devices result to HINT")
command = {
"type": HINTCommand.DISCOVER_DEVICES,
"content": [{"name": device.name,
"identifier": device.uuid} for device in devices]
}
publish(command)
def attach_failure(device):
"""
Indicates to HINT a failure to attach the input device.
:param device: Device
"""
LOGGER.info("sending attach failure to HINT")
message = {
"type": HINTCommand.ATTACH_DEVICE,
"content": {
"identifier": device.uuid,
"success": False,
},
}
publish(message)
def action_response(device, action_type, info: dict):
"""
Indicates to HINT the response to an action request.
:param device:
:param action_type:
:param info: information about the action
"""
LOGGER.info("sending action response to HINT")
message = {
"type": action_type,
"device_uuid": device.uuid,
"content": info
}
publish(message)
| [
"logging.getLogger",
"util.get_arg",
"json.dumps",
"rabbitmq_client.QueueParams"
] | [((161, 188), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (178, 188), False, 'import logging\n'), ((276, 328), 'rabbitmq_client.QueueParams', 'QueueParams', (['HINT_MASTER_COMMAND_QUEUE'], {'durable': '(True)'}), '(HINT_MASTER_COMMAND_QUEUE, durable=True)\n', (287, 328), False, 'from rabbitmq_client import RMQProducer, QueueParams\n'), ((586, 608), 'util.get_arg', 'get_arg', (['CLI_HUME_UUID'], {}), '(CLI_HUME_UUID)\n', (593, 608), False, 'from util import get_arg\n'), ((620, 639), 'json.dumps', 'json.dumps', (['command'], {}), '(command)\n', (630, 639), False, 'import json\n')] |
import pygame
from pygame.locals import *
import cv2
import numpy as np
import sys
import os
from time import sleep
import random
import tensorflow as tf
from utils import visualization_utils as viz_utils
class RockPaperScissors():
def __init__(self):
pygame.init()
# TENSORFLOW MODEL
self.detect_fn = tf.saved_model.load('../tensorflow_object_detection_api/inference_graph/saved_model')
self.category_index = {
1: {'id': 1, 'name': 'rock'},
2: {'id': 2, 'name': 'paper'},
3: {'id': 3, 'name': 'scissors'},
4: {'id': 4, 'name': 'rock'},
5: {'id': 5, 'name': 'quit'}
}
# PYGAME
self.camera = cv2.VideoCapture(0)
pygame.display.set_caption("Rock-Paper-Scissors")
self.screen = pygame.display.set_mode([1000,480])
# IMAGES
self.computer_img = pygame.image.load("icons/computer.png")
self.rock_img = pygame.image.load("icons/rock2.png")
self.rock_img = pygame.transform.scale(self.rock_img, (80, 80))
self.paper_img = pygame.image.load("icons/paper3.png")
self.paper_img = pygame.transform.scale(self.paper_img, (80, 80))
self.scissors_img = pygame.image.load("icons/scissors2.png")
self.scissors_img = pygame.transform.scale(self.scissors_img, (80, 80))
self.results_img = pygame.image.load("icons/results.png")
self.results_img = pygame.transform.scale(self.results_img, (1000-640, 50))
# FONTS
self.font = pygame.font.Font('freesansbold.ttf', 32)
self.countdown_font = pygame.font.Font('freesansbold.ttf', 50)
# COLORS
self.white = (255, 255, 255)
self.gray = (220, 220, 220)
self.red = (255, 0, 0)
self.green = (0, 255, 0)
# GAME VARIABLES
self.SIGNS = ["rock", "paper", "scissors", "quit", "other"]
self.GAME_ON = True
self.START_GAME = False
self.USER_POINTS, self.COMPUTER_POINTS = 0, 0
self.w, self.h = 100, 100
self.comp_center_coords = (170, self.h//2 - 80)
self.computer_choice, self.user_choice = "other", "paper"
self.countdown_started = False
# START GAME
self.main()
### DESTRUCTOR ###
def __del__(self):
pygame.quit()
self.camera.release()
cv2.destroyAllWindows()
sys.exit(0)
### COUNTDOWN TO COMPUTER CHOICE AND SIGNS COMPARISON BETWEEN USER AND COMPUTER ###
def start_countdown(self, start_ticks):
seconds=(pygame.time.get_ticks()-start_ticks)/1000
count = self.countdown_font.render(str(int(seconds)), False, self.white)
self.screen.blit(count, (170, self.h//2 - 80))
if seconds >= 3.99:
return False, seconds
else: return True, seconds
### CHOOSE COMPUTER SIGN AND RETURN ITS ICON ###
def show_computer_choice(self):
choice = random.choice(self.SIGNS[:-2])
if choice == "paper":
choice_img = self.paper_img
elif choice == "rock":
choice_img = self.rock_img
elif choice == "scissors":
choice_img = self.scissors_img
return choice, choice_img
### SHOW COMPUTER AND USER SCORE ON THE BOTTOM ###
def show_points(self):
self.screen.blit(self.results_img, (0, self.h-50))
count = self.font.render(f"{self.COMPUTER_POINTS} {self.USER_POINTS}", False, self.white)
self.screen.blit(count, (80, self.h-40))
### COMPARE COMPUTER'S AND USER'S SIGNS AND JUDGE WHO WINS THE ROUND ###
def compare_signs(self, user_sign, comp_sign, GAME_ON, user_points, comp_points):
if user_sign == "quit":
verdict = "YOU QUITED"
GAME_ON = False
elif user_sign == "other":
comp_points += 1
verdict = "POINT FOR PC!"
elif user_sign == comp_sign:
verdict = " IT'S A DRAW!"
else:
if user_sign == "scissors":
if comp_sign == "rock":
verdict = "POINT FOR PC!"
comp_points += 1
else:
verdict = "POINT FOR YOU!"
user_points += 1
elif user_sign == "rock":
if comp_sign == "paper":
verdict = "POINT FOR PC!"
comp_points += 1
else:
verdict = "POINT FOR YOU!"
user_points += 1
elif user_sign == "paper":
if comp_sign == "scissors":
verdict = "POINT FOR PC!"
comp_points += 1
else:
verdict = "POINT FOR YOU!"
user_points += 1
# choose verdict's colour
if "DRAW" in verdict or "QUIT" in verdict:
color = self.gray
elif "YOU" in verdict:
color = self.green
else:
color = self.red
return GAME_ON, user_points, comp_points, self.font.render(verdict, False, color)
### CONVERT FRAME TO NUMPY ARRAY AND RESHAPE IT ###
def load_image_into_numpy_array(self, image):
(im_height, im_width) = image.shape[:2]
return np.array(image).reshape(
(im_height, im_width, 3)).astype(np.uint8)
### DRAW RECTANGLE ON HAND AND RETURN CHOSEN SIGN ###
def detect_hand(self, frame, game_start):
# if game hasn't started yet, exit the function
if not game_start:
return frame, self.user_choice
frame_np = self.load_image_into_numpy_array(frame)
input_tensor = np.expand_dims(frame_np, 0)
detections = self.detect_fn(input_tensor)
viz_utils.visualize_boxes_and_labels_on_image_array(
frame_np,
detections['detection_boxes'][0].numpy(),
detections['detection_classes'][0].numpy().astype(np.int32),
detections['detection_scores'][0].numpy(),
self.category_index,
use_normalized_coordinates=True,
max_boxes_to_draw=1,
min_score_thresh=.4,
skip_scores=True,
skip_labels=True,
agnostic_mode=False
)
# choose the second detection from the array
user_choice = self.category_index[detections['detection_classes'][0].numpy().astype(np.int32)[1]]
return frame_np, user_choice["name"]
### MAIN FUNCTION ###
def main(self):
while self.GAME_ON:
ret, frame = self.camera.read()
# start detecting hand when user starts the game
frame, self.user_choice = self.detect_hand(frame, self.START_GAME)
# expand the game window on the left by filling it with colour
# and displaying computer icon
self.screen.fill([4, 47, 102])
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
self.h, self.w = frame.shape[:2]
frame = np.rot90(frame)
frame = pygame.surfarray.make_surface(frame)
self.screen.blit(frame, (1000 - self.w,0))
self.screen.blit(self.computer_img, ( (750 - self.w) // 2,100))
# if game is not started, wait for any key to be pressed
if not self.START_GAME:
start_game1 = self.font.render('Press any key', False, self.white)
smile = self.countdown_font.render(":)", False, self.white)
start_game2 = self.font.render('to START', False, self.white)
self.screen.blit(start_game1, (70, 50))
self.screen.blit(smile, (170, self.h//2 - 80))
self.screen.blit(start_game2, (100, self.h-100))
else:
# if the game is on, show the user and computer score
self.show_points()
user_choice_text = self.font.render(self.user_choice, False, self.white)
self.screen.blit(user_choice_text, (400, 30))
# if the countdown hasn't started yet, begin it
if not self.countdown_started:
start_ticks=pygame.time.get_ticks()
self.countdown_started, secs = self.start_countdown(start_ticks)
# if nearly 4 seconds have passed, compare user's and computer's signs
# show the verdict and update score
if secs >= 3.99:
start_ticks = pygame.time.get_ticks()
self.computer_choice, computer_choice_img = self.show_computer_choice()
self.GAME_ON, self.USER_POINTS, self.COMPUTER_POINTS, VERDICT = self.compare_signs(self.user_choice,
self.computer_choice,
self.GAME_ON,
self.USER_POINTS,
self.COMPUTER_POINTS)
secs2 = 0
while secs2 < 4:
self.screen.blit(computer_choice_img, (145, 140))
self.screen.blit(VERDICT, (60, 50))
pygame.display.update()
secs2 += .001
pygame.display.update()
# exit the game pressing "Q" key
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == pygame.K_q:
self.GAME_ON = False
else:
self.START_GAME = True
if __name__ == "__main__":
rps_game = RockPaperScissors() | [
"pygame.init",
"pygame.quit",
"numpy.array",
"cv2.destroyAllWindows",
"numpy.rot90",
"sys.exit",
"pygame.font.Font",
"pygame.surfarray.make_surface",
"pygame.transform.scale",
"tensorflow.saved_model.load",
"pygame.time.get_ticks",
"pygame.display.set_mode",
"pygame.image.load",
"pygame.di... | [((257, 270), 'pygame.init', 'pygame.init', ([], {}), '()\n', (268, 270), False, 'import pygame\n'), ((314, 404), 'tensorflow.saved_model.load', 'tf.saved_model.load', (['"""../tensorflow_object_detection_api/inference_graph/saved_model"""'], {}), "(\n '../tensorflow_object_detection_api/inference_graph/saved_model')\n", (333, 404), True, 'import tensorflow as tf\n'), ((644, 663), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (660, 663), False, 'import cv2\n'), ((666, 715), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Rock-Paper-Scissors"""'], {}), "('Rock-Paper-Scissors')\n", (692, 715), False, 'import pygame\n'), ((732, 768), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[1000, 480]'], {}), '([1000, 480])\n', (755, 768), False, 'import pygame\n'), ((803, 842), 'pygame.image.load', 'pygame.image.load', (['"""icons/computer.png"""'], {}), "('icons/computer.png')\n", (820, 842), False, 'import pygame\n'), ((862, 898), 'pygame.image.load', 'pygame.image.load', (['"""icons/rock2.png"""'], {}), "('icons/rock2.png')\n", (879, 898), False, 'import pygame\n'), ((918, 965), 'pygame.transform.scale', 'pygame.transform.scale', (['self.rock_img', '(80, 80)'], {}), '(self.rock_img, (80, 80))\n', (940, 965), False, 'import pygame\n'), ((986, 1023), 'pygame.image.load', 'pygame.image.load', (['"""icons/paper3.png"""'], {}), "('icons/paper3.png')\n", (1003, 1023), False, 'import pygame\n'), ((1044, 1092), 'pygame.transform.scale', 'pygame.transform.scale', (['self.paper_img', '(80, 80)'], {}), '(self.paper_img, (80, 80))\n', (1066, 1092), False, 'import pygame\n'), ((1116, 1156), 'pygame.image.load', 'pygame.image.load', (['"""icons/scissors2.png"""'], {}), "('icons/scissors2.png')\n", (1133, 1156), False, 'import pygame\n'), ((1180, 1231), 'pygame.transform.scale', 'pygame.transform.scale', (['self.scissors_img', '(80, 80)'], {}), '(self.scissors_img, (80, 80))\n', (1202, 1231), False, 'import pygame\n'), ((1254, 1292), 'pygame.image.load', 'pygame.image.load', (['"""icons/results.png"""'], {}), "('icons/results.png')\n", (1271, 1292), False, 'import pygame\n'), ((1315, 1373), 'pygame.transform.scale', 'pygame.transform.scale', (['self.results_img', '(1000 - 640, 50)'], {}), '(self.results_img, (1000 - 640, 50))\n', (1337, 1373), False, 'import pygame\n'), ((1398, 1438), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(32)'], {}), "('freesansbold.ttf', 32)\n", (1414, 1438), False, 'import pygame\n'), ((1463, 1503), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(50)'], {}), "('freesansbold.ttf', 50)\n", (1479, 1503), False, 'import pygame\n'), ((2061, 2074), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (2072, 2074), False, 'import pygame\n'), ((2101, 2124), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (2122, 2124), False, 'import cv2\n'), ((2127, 2138), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2135, 2138), False, 'import sys\n'), ((2621, 2651), 'random.choice', 'random.choice', (['self.SIGNS[:-2]'], {}), '(self.SIGNS[:-2])\n', (2634, 2651), False, 'import random\n'), ((5092, 5119), 'numpy.expand_dims', 'np.expand_dims', (['frame_np', '(0)'], {}), '(frame_np, 0)\n', (5106, 5119), True, 'import numpy as np\n'), ((6156, 6194), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (6168, 6194), False, 'import cv2\n'), ((6243, 6258), 'numpy.rot90', 'np.rot90', (['frame'], {}), '(frame)\n', (6251, 6258), True, 'import numpy as np\n'), ((6270, 6306), 'pygame.surfarray.make_surface', 'pygame.surfarray.make_surface', (['frame'], {}), '(frame)\n', (6299, 6306), False, 'import pygame\n'), ((8006, 8029), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (8027, 8029), False, 'import pygame\n'), ((8083, 8101), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (8099, 8101), False, 'import pygame\n'), ((2279, 2302), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (2300, 2302), False, 'import pygame\n'), ((7179, 7202), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (7200, 7202), False, 'import pygame\n'), ((7428, 7451), 'pygame.time.get_ticks', 'pygame.time.get_ticks', ([], {}), '()\n', (7449, 7451), False, 'import pygame\n'), ((4741, 4756), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (4749, 4756), True, 'import numpy as np\n'), ((7956, 7979), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (7977, 7979), False, 'import pygame\n')] |
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version 3.10.0-35-gd79d7781)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
from bitmap_panel import BitmapPanel
from part_select_panel import PartSelectPanel
import wx
import wx.xrc
import wx.stc
###########################################################################
## Class BLR_LMGR_FRAME
###########################################################################
class BLR_LMGR_FRAME ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u"BLRevive Loadout Manager", pos = wx.DefaultPosition, size = wx.Size( 1280,720 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
self.SetBackgroundColour( wx.Colour( 0, 0, 64 ) )
bSizer_blrlm_main = wx.BoxSizer( wx.HORIZONTAL )
bSizer3 = wx.BoxSizer( wx.VERTICAL )
self.m_panel_blrlm_preview = BitmapPanel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_STATIC|wx.TAB_TRAVERSAL )
self.m_panel_blrlm_preview.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_SCROLLBAR ) )
bSizer_blrlm_preview = wx.BoxSizer( wx.VERTICAL )
bSizer_blrlm_preview.SetMinSize( wx.Size( 420,-1 ) )
self.m_bitmap_blrlm_preview = wx.StaticBitmap( self.m_panel_blrlm_preview, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 260,132 ), wx.BORDER_SIMPLE )
bSizer_blrlm_preview.Add( self.m_bitmap_blrlm_preview, 0, wx.ALIGN_CENTER|wx.ALL|wx.FIXED_MINSIZE, 4 )
self.m_panel_blrlm_preview.SetSizer( bSizer_blrlm_preview )
self.m_panel_blrlm_preview.Layout()
bSizer_blrlm_preview.Fit( self.m_panel_blrlm_preview )
bSizer3.Add( self.m_panel_blrlm_preview, 0, wx.FIXED_MINSIZE|wx.LEFT|wx.RIGHT|wx.TOP, 8 )
self.m_panel_partselect = BitmapPanel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_STATIC|wx.TAB_TRAVERSAL )
self.m_panel_partselect.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_SCROLLBAR ) )
bSizer19 = wx.BoxSizer( wx.VERTICAL )
bSizerPartSelect = wx.BoxSizer( wx.VERTICAL )
self.m_panel_partselect_re1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_re1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSRE1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_receiver = wx.BitmapToggleButton( self.m_panel_partselect_re1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_receiver.SetValue( True )
self.m_bmToggleBtn_blrlm_receiver.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_receiver.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSRE1.Add( self.m_bmToggleBtn_blrlm_receiver, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_receiver = wx.StaticBitmap( self.m_panel_partselect_re1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_receiver.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_receiver.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSRE1.Add( self.m_bitmap_blrlm_receiver, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_receiver = wx.StaticText( self.m_panel_partselect_re1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_receiver.Wrap( -1 )
self.m_staticText_blrlm_receiver.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSRE1.Add( self.m_staticText_blrlm_receiver, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_receiver_reset = wx.BitmapButton( self.m_panel_partselect_re1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_receiver_reset.SetBitmapPosition( wx.BOTTOM )
self.m_bpButton_blrlm_receiver_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSRE1.Add( self.m_bpButton_blrlm_receiver_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_re1.SetSizer( bSizerPSRE1 )
self.m_panel_partselect_re1.Layout()
bSizerPSRE1.Fit( self.m_panel_partselect_re1 )
bSizerPartSelect.Add( self.m_panel_partselect_re1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_mz1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_mz1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSMZ1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_muzzle = wx.BitmapToggleButton( self.m_panel_partselect_mz1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_muzzle.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_muzzle.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSMZ1.Add( self.m_bmToggleBtn_blrlm_muzzle, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_muzzle = wx.StaticBitmap( self.m_panel_partselect_mz1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_muzzle.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_muzzle.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSMZ1.Add( self.m_bitmap_blrlm_muzzle, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_muzzle = wx.StaticText( self.m_panel_partselect_mz1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_muzzle.Wrap( -1 )
self.m_staticText_blrlm_muzzle.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSMZ1.Add( self.m_staticText_blrlm_muzzle, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_muzzle_reset = wx.BitmapButton( self.m_panel_partselect_mz1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_muzzle_reset.SetBitmapPosition( wx.BOTTOM )
self.m_bpButton_blrlm_muzzle_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSMZ1.Add( self.m_bpButton_blrlm_muzzle_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_mz1.SetSizer( bSizerPSMZ1 )
self.m_panel_partselect_mz1.Layout()
bSizerPSMZ1.Fit( self.m_panel_partselect_mz1 )
bSizerPartSelect.Add( self.m_panel_partselect_mz1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_gp1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_gp1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSGP1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_grip = wx.BitmapToggleButton( self.m_panel_partselect_gp1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_grip.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_grip.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSGP1.Add( self.m_bmToggleBtn_blrlm_grip, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_grip = wx.StaticBitmap( self.m_panel_partselect_gp1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_grip.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_grip.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSGP1.Add( self.m_bitmap_blrlm_grip, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_grip = wx.StaticText( self.m_panel_partselect_gp1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_grip.Wrap( -1 )
self.m_staticText_blrlm_grip.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSGP1.Add( self.m_staticText_blrlm_grip, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_grip_reset = wx.BitmapButton( self.m_panel_partselect_gp1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_grip_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSGP1.Add( self.m_bpButton_blrlm_grip_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_gp1.SetSizer( bSizerPSGP1 )
self.m_panel_partselect_gp1.Layout()
bSizerPSGP1.Fit( self.m_panel_partselect_gp1 )
bSizerPartSelect.Add( self.m_panel_partselect_gp1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_ba1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_ba1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSBA1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_barrel = wx.BitmapToggleButton( self.m_panel_partselect_ba1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_barrel.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_barrel.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSBA1.Add( self.m_bmToggleBtn_blrlm_barrel, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_barrel = wx.StaticBitmap( self.m_panel_partselect_ba1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_barrel.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_barrel.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSBA1.Add( self.m_bitmap_blrlm_barrel, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_barrel = wx.StaticText( self.m_panel_partselect_ba1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_barrel.Wrap( -1 )
self.m_staticText_blrlm_barrel.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSBA1.Add( self.m_staticText_blrlm_barrel, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_barrel_reset = wx.BitmapButton( self.m_panel_partselect_ba1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_barrel_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSBA1.Add( self.m_bpButton_blrlm_barrel_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_ba1.SetSizer( bSizerPSBA1 )
self.m_panel_partselect_ba1.Layout()
bSizerPSBA1.Fit( self.m_panel_partselect_ba1 )
bSizerPartSelect.Add( self.m_panel_partselect_ba1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_mg1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_mg1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSMG1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_magazine = wx.BitmapToggleButton( self.m_panel_partselect_mg1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_magazine.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_magazine.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSMG1.Add( self.m_bmToggleBtn_blrlm_magazine, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_magazine = wx.StaticBitmap( self.m_panel_partselect_mg1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_magazine.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_magazine.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSMG1.Add( self.m_bitmap_blrlm_magazine, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_magazine = wx.StaticText( self.m_panel_partselect_mg1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_magazine.Wrap( -1 )
self.m_staticText_blrlm_magazine.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSMG1.Add( self.m_staticText_blrlm_magazine, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_magazine_reset = wx.BitmapButton( self.m_panel_partselect_mg1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_magazine_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSMG1.Add( self.m_bpButton_blrlm_magazine_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_mg1.SetSizer( bSizerPSMG1 )
self.m_panel_partselect_mg1.Layout()
bSizerPSMG1.Fit( self.m_panel_partselect_mg1 )
bSizerPartSelect.Add( self.m_panel_partselect_mg1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_sc1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_sc1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSSC1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_scope = wx.BitmapToggleButton( self.m_panel_partselect_sc1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_scope.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_scope.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSSC1.Add( self.m_bmToggleBtn_blrlm_scope, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_scope = wx.StaticBitmap( self.m_panel_partselect_sc1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_scope.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_scope.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSSC1.Add( self.m_bitmap_blrlm_scope, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_scope = wx.StaticText( self.m_panel_partselect_sc1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_scope.Wrap( -1 )
self.m_staticText_blrlm_scope.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSSC1.Add( self.m_staticText_blrlm_scope, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_scope_reset = wx.BitmapButton( self.m_panel_partselect_sc1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_scope_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSSC1.Add( self.m_bpButton_blrlm_scope_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_sc1.SetSizer( bSizerPSSC1 )
self.m_panel_partselect_sc1.Layout()
bSizerPSSC1.Fit( self.m_panel_partselect_sc1 )
bSizerPartSelect.Add( self.m_panel_partselect_sc1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_st1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_st1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSST1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_stock = wx.BitmapToggleButton( self.m_panel_partselect_st1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_stock.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_stock.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSST1.Add( self.m_bmToggleBtn_blrlm_stock, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_stock = wx.StaticBitmap( self.m_panel_partselect_st1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_stock.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_stock.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSST1.Add( self.m_bitmap_blrlm_stock, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_stock = wx.StaticText( self.m_panel_partselect_st1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_stock.Wrap( -1 )
self.m_staticText_blrlm_stock.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSST1.Add( self.m_staticText_blrlm_stock, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_stock_reset = wx.BitmapButton( self.m_panel_partselect_st1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_stock_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSST1.Add( self.m_bpButton_blrlm_stock_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_st1.SetSizer( bSizerPSST1 )
self.m_panel_partselect_st1.Layout()
bSizerPSST1.Fit( self.m_panel_partselect_st1 )
bSizerPartSelect.Add( self.m_panel_partselect_st1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_tg1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_tg1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSTG1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_tag = wx.BitmapToggleButton( self.m_panel_partselect_tg1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_tag.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_tag.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSTG1.Add( self.m_bmToggleBtn_blrlm_tag, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_tag = wx.StaticBitmap( self.m_panel_partselect_tg1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_tag.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_tag.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSTG1.Add( self.m_bitmap_blrlm_tag, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_tag = wx.StaticText( self.m_panel_partselect_tg1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_tag.Wrap( -1 )
self.m_staticText_blrlm_tag.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSTG1.Add( self.m_staticText_blrlm_tag, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_tag_reset = wx.BitmapButton( self.m_panel_partselect_tg1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_tag_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSTG1.Add( self.m_bpButton_blrlm_tag_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_tg1.SetSizer( bSizerPSTG1 )
self.m_panel_partselect_tg1.Layout()
bSizerPSTG1.Fit( self.m_panel_partselect_tg1 )
bSizerPartSelect.Add( self.m_panel_partselect_tg1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel_partselect_cm1 = wx.Panel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
self.m_panel_partselect_cm1.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSCM1 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtn_blrlm_camo = wx.BitmapToggleButton( self.m_panel_partselect_cm1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BORDER_NONE )
self.m_bmToggleBtn_blrlm_camo.SetBitmap( wx.NullBitmap )
self.m_bmToggleBtn_blrlm_camo.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
bSizerPSCM1.Add( self.m_bmToggleBtn_blrlm_camo, 0, wx.ALL, 0 )
self.m_bitmap_blrlm_camo = wx.StaticBitmap( self.m_panel_partselect_cm1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_bitmap_blrlm_camo.SetMinSize( wx.Size( 64,32 ) )
self.m_bitmap_blrlm_camo.SetMaxSize( wx.Size( 64,32 ) )
bSizerPSCM1.Add( self.m_bitmap_blrlm_camo, 0, wx.LEFT|wx.RIGHT, 8 )
self.m_staticText_blrlm_camo = wx.StaticText( self.m_panel_partselect_cm1, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText_blrlm_camo.Wrap( -1 )
self.m_staticText_blrlm_camo.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizerPSCM1.Add( self.m_staticText_blrlm_camo, 1, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 0 )
self.m_bpButton_blrlm_camo_reset = wx.BitmapButton( self.m_panel_partselect_cm1, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.Size( 32,32 ), wx.BU_AUTODRAW|0|wx.BORDER_NONE )
self.m_bpButton_blrlm_camo_reset.SetBackgroundColour( wx.Colour( 0, 64, 128 ) )
bSizerPSCM1.Add( self.m_bpButton_blrlm_camo_reset, 0, wx.ALL, 0 )
self.m_panel_partselect_cm1.SetSizer( bSizerPSCM1 )
self.m_panel_partselect_cm1.Layout()
bSizerPSCM1.Fit( self.m_panel_partselect_cm1 )
bSizerPartSelect.Add( self.m_panel_partselect_cm1, 0, wx.EXPAND |wx.ALL, 4 )
self.m_panel14 = PartSelectPanel( self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_SIMPLE )
bSizerPartSelect.Add( self.m_panel14, 0, wx.EXPAND |wx.ALL, 4 )
bSizer19.Add( bSizerPartSelect, 0, wx.EXPAND, 0 )
bSizer22 = wx.BoxSizer( wx.VERTICAL )
bSizer24 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtnLoadout1 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnLoadout1.SetValue( True )
self.m_bmToggleBtnLoadout1.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnLoadout1.Enable( False )
self.m_bmToggleBtnLoadout1.SetMinSize( wx.Size( 130,24 ) )
bSizer24.Add( self.m_bmToggleBtnLoadout1, 0, wx.LEFT|wx.RIGHT, 2 )
bSizer24.Add( ( 0, 0), 1, wx.EXPAND, 5 )
self.m_bmToggleBtnLoadout2 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnLoadout2.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnLoadout2.Enable( False )
self.m_bmToggleBtnLoadout2.SetMinSize( wx.Size( 130,24 ) )
bSizer24.Add( self.m_bmToggleBtnLoadout2, 0, wx.LEFT|wx.RIGHT, 2 )
bSizer24.Add( ( 0, 0), 1, wx.EXPAND, 5 )
self.m_bmToggleBtnLoadout3 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnLoadout3.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnLoadout3.Enable( False )
self.m_bmToggleBtnLoadout3.SetMinSize( wx.Size( 130,24 ) )
bSizer24.Add( self.m_bmToggleBtnLoadout3, 0, wx.LEFT|wx.RIGHT, 2 )
bSizer22.Add( bSizer24, 0, wx.EXPAND, 0 )
bSizer21 = wx.BoxSizer( wx.HORIZONTAL )
bSizer221 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtnPrimary1 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnPrimary1.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnPrimary1.SetMinSize( wx.Size( 64,64 ) )
bSizer221.Add( self.m_bmToggleBtnPrimary1, 0, wx.LEFT|wx.RIGHT|wx.TOP, 2 )
self.m_bmToggleBtnSecondary1 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnSecondary1.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnSecondary1.SetMinSize( wx.Size( 64,64 ) )
bSizer221.Add( self.m_bmToggleBtnSecondary1, 0, wx.BOTTOM|wx.RIGHT|wx.TOP, 2 )
bSizer21.Add( bSizer221, 0, wx.EXPAND, 5 )
bSizer21.Add( ( 0, 0), 1, wx.EXPAND, 5 )
bSizer211 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtnPrimary2 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnPrimary2.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnPrimary2.SetMinSize( wx.Size( 64,64 ) )
bSizer211.Add( self.m_bmToggleBtnPrimary2, 0, wx.LEFT|wx.RIGHT|wx.TOP, 2 )
self.m_bmToggleBtnSecondary2 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnSecondary2.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnSecondary2.SetMinSize( wx.Size( 64,64 ) )
bSizer211.Add( self.m_bmToggleBtnSecondary2, 0, wx.BOTTOM|wx.RIGHT|wx.TOP, 2 )
bSizer21.Add( bSizer211, 0, wx.EXPAND, 5 )
bSizer21.Add( ( 0, 0), 1, wx.EXPAND, 5 )
bSizer2111 = wx.BoxSizer( wx.HORIZONTAL )
self.m_bmToggleBtnPrimary3 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnPrimary3.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnPrimary3.SetMinSize( wx.Size( 64,64 ) )
bSizer2111.Add( self.m_bmToggleBtnPrimary3, 0, wx.LEFT|wx.RIGHT|wx.TOP, 2 )
self.m_bmToggleBtnSecondary3 = wx.BitmapToggleButton( self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_NONE )
self.m_bmToggleBtnSecondary3.SetBackgroundColour( wx.Colour( 48, 48, 48 ) )
self.m_bmToggleBtnSecondary3.SetMinSize( wx.Size( 64,64 ) )
bSizer2111.Add( self.m_bmToggleBtnSecondary3, 0, wx.BOTTOM|wx.RIGHT|wx.TOP, 2 )
bSizer21.Add( bSizer2111, 0, wx.EXPAND, 5 )
bSizer22.Add( bSizer21, 1, wx.EXPAND, 5 )
bSizer19.Add( bSizer22, 0, wx.ALL|wx.EXPAND, 4 )
self.m_panel_partselect.SetSizer( bSizer19 )
self.m_panel_partselect.Layout()
bSizer19.Fit( self.m_panel_partselect )
bSizer3.Add( self.m_panel_partselect, 1, wx.EXPAND |wx.ALL, 8 )
bSizer_blrlm_main.Add( bSizer3, 0, wx.EXPAND, 0 )
bSizer10 = wx.BoxSizer( wx.VERTICAL )
self.m_listCtrl_blrlm_selector = wx.ListCtrl( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_HRULES|wx.LC_REPORT|wx.LC_SINGLE_SEL|wx.BORDER_SIMPLE )
self.m_listCtrl_blrlm_selector.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOWTEXT ) )
self.m_listCtrl_blrlm_selector.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
self.m_listCtrl_blrlm_selector.SetMinSize( wx.Size( 720,480 ) )
bSizer10.Add( self.m_listCtrl_blrlm_selector, 1, wx.BOTTOM|wx.EXPAND|wx.RIGHT|wx.TOP, 8 )
self.m_panel11 = BitmapPanel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.BORDER_STATIC )
bSizer11 = wx.BoxSizer( wx.HORIZONTAL )
bSizer14 = wx.BoxSizer( wx.VERTICAL )
bSizer12 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText6 = wx.StaticText( self.m_panel11, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText6.Wrap( -1 )
self.m_staticText6.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizer12.Add( self.m_staticText6, 0, wx.LEFT, 4 )
bSizer14.Add( bSizer12, 1, wx.EXPAND, 5 )
bSizer11.Add( bSizer14, 1, wx.EXPAND, 5 )
bSizer15 = wx.BoxSizer( wx.HORIZONTAL )
bSizer15.SetMinSize( wx.Size( 512,-1 ) )
self.m_panel121 = wx.Panel( self.m_panel11, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )
self.m_panel121.Enable( False )
self.m_panel121.Hide()
bSizer121 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText61 = wx.StaticText( self.m_panel121, wx.ID_ANY, u"Export Path:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText61.Wrap( -1 )
self.m_staticText61.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
self.m_staticText61.Enable( False )
self.m_staticText61.Hide()
bSizer121.Add( self.m_staticText61, 0, wx.ALIGN_CENTER_VERTICAL|wx.LEFT, 4 )
self.m_dirPicker1 = wx.DirPickerCtrl( self.m_panel121, wx.ID_ANY, wx.EmptyString, u"Select a folder", wx.DefaultPosition, wx.DefaultSize, wx.DIRP_DIR_MUST_EXIST|wx.DIRP_USE_TEXTCTRL )
self.m_dirPicker1.Enable( False )
self.m_dirPicker1.Hide()
bSizer121.Add( self.m_dirPicker1, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 4 )
self.m_button_export_loadout = wx.Button( self.m_panel121, wx.ID_ANY, u"Generate Loadout", wx.DefaultPosition, wx.DefaultSize, 0|wx.BORDER_THEME )
bSizer121.Add( self.m_button_export_loadout, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 4 )
self.m_panel121.SetSizer( bSizer121 )
self.m_panel121.Layout()
bSizer121.Fit( self.m_panel121 )
bSizer15.Add( self.m_panel121, 0, wx.ALL, 4 )
self.m_scintilla1 = wx.stc.StyledTextCtrl( self.m_panel11, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, 0)
self.m_scintilla1.SetUseTabs ( False )
self.m_scintilla1.SetTabWidth ( 4 )
self.m_scintilla1.SetIndent ( 4 )
self.m_scintilla1.SetTabIndents( True )
self.m_scintilla1.SetBackSpaceUnIndents( True )
self.m_scintilla1.SetViewEOL( False )
self.m_scintilla1.SetViewWhiteSpace( False )
self.m_scintilla1.SetMarginWidth( 2, 0 )
self.m_scintilla1.SetIndentationGuides( False )
self.m_scintilla1.SetReadOnly( False );
self.m_scintilla1.SetMarginWidth( 1, 0 )
self.m_scintilla1.SetMarginWidth ( 0, 0 )
self.m_scintilla1.MarkerDefine( wx.stc.STC_MARKNUM_FOLDER, wx.stc.STC_MARK_BOXPLUS )
self.m_scintilla1.MarkerSetBackground( wx.stc.STC_MARKNUM_FOLDER, wx.BLACK)
self.m_scintilla1.MarkerSetForeground( wx.stc.STC_MARKNUM_FOLDER, wx.WHITE)
self.m_scintilla1.MarkerDefine( wx.stc.STC_MARKNUM_FOLDEROPEN, wx.stc.STC_MARK_BOXMINUS )
self.m_scintilla1.MarkerSetBackground( wx.stc.STC_MARKNUM_FOLDEROPEN, wx.BLACK )
self.m_scintilla1.MarkerSetForeground( wx.stc.STC_MARKNUM_FOLDEROPEN, wx.WHITE )
self.m_scintilla1.MarkerDefine( wx.stc.STC_MARKNUM_FOLDERSUB, wx.stc.STC_MARK_EMPTY )
self.m_scintilla1.MarkerDefine( wx.stc.STC_MARKNUM_FOLDEREND, wx.stc.STC_MARK_BOXPLUS )
self.m_scintilla1.MarkerSetBackground( wx.stc.STC_MARKNUM_FOLDEREND, wx.BLACK )
self.m_scintilla1.MarkerSetForeground( wx.stc.STC_MARKNUM_FOLDEREND, wx.WHITE )
self.m_scintilla1.MarkerDefine( wx.stc.STC_MARKNUM_FOLDEROPENMID, wx.stc.STC_MARK_BOXMINUS )
self.m_scintilla1.MarkerSetBackground( wx.stc.STC_MARKNUM_FOLDEROPENMID, wx.BLACK)
self.m_scintilla1.MarkerSetForeground( wx.stc.STC_MARKNUM_FOLDEROPENMID, wx.WHITE)
self.m_scintilla1.MarkerDefine( wx.stc.STC_MARKNUM_FOLDERMIDTAIL, wx.stc.STC_MARK_EMPTY )
self.m_scintilla1.MarkerDefine( wx.stc.STC_MARKNUM_FOLDERTAIL, wx.stc.STC_MARK_EMPTY )
self.m_scintilla1.SetSelBackground( True, wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHT ) )
self.m_scintilla1.SetSelForeground( True, wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT ) )
bSizer15.Add( self.m_scintilla1, 1, wx.ALL|wx.EXPAND, 4 )
bSizer11.Add( bSizer15, 0, wx.EXPAND, 5 )
self.m_panel11.SetSizer( bSizer11 )
self.m_panel11.Layout()
bSizer11.Fit( self.m_panel11 )
bSizer10.Add( self.m_panel11, 1, wx.BOTTOM|wx.EXPAND|wx.RIGHT, 8 )
bSizer_blrlm_main.Add( bSizer10, 1, wx.EXPAND, 0 )
self.SetSizer( bSizer_blrlm_main )
self.Layout()
self.m_menubar1 = wx.MenuBar( 0|wx.BORDER_THEME|wx.CLIP_CHILDREN )
self.file = wx.Menu()
self.m_menuItem_file_playername = wx.MenuItem( self.file, wx.ID_ANY, u"Change Player Name", wx.EmptyString, wx.ITEM_NORMAL )
self.file.Append( self.m_menuItem_file_playername )
self.m_menuItem_file_playername.Enable( False )
self.m_menuItem_file_clearloadouts = wx.MenuItem( self.file, wx.ID_ANY, u"Clear All Loadouts", wx.EmptyString, wx.ITEM_NORMAL )
self.file.Append( self.m_menuItem_file_clearloadouts )
self.m_menuItem_file_clearloadouts.Enable( False )
self.m_menuItem_file_savesession = wx.MenuItem( self.file, wx.ID_ANY, u"Save Session", wx.EmptyString, wx.ITEM_NORMAL )
self.file.Append( self.m_menuItem_file_savesession )
self.m_menuItem_file_loadsession = wx.MenuItem( self.file, wx.ID_ANY, u"Load Session", wx.EmptyString, wx.ITEM_NORMAL )
self.file.Append( self.m_menuItem_file_loadsession )
self.m_menuItem_file_autosave = wx.MenuItem( self.file, wx.ID_ANY, u"Save Session on Exit", wx.EmptyString, wx.ITEM_CHECK )
self.file.Append( self.m_menuItem_file_autosave )
self.m_menuItem_file_autosave.Check( True )
self.m_menubar1.Append( self.file, u"File" )
self.edit = wx.Menu()
self.m_menuItem_edit_swapweapon = wx.MenuItem( self.edit, wx.ID_ANY, u"Swap Weapon", wx.EmptyString, wx.ITEM_NORMAL )
self.edit.Append( self.m_menuItem_edit_swapweapon )
self.m_menuItem_edit_swapweapon.Enable( False )
self.m_menubar1.Append( self.edit, u"Edit" )
self.view = wx.Menu()
self.m_menuItem_view_0 = wx.MenuItem( self.view, wx.ID_ANY, u"Some checkbox thing", wx.EmptyString, wx.ITEM_CHECK )
self.view.Append( self.m_menuItem_view_0 )
self.m_menuItem_view_0.Enable( False )
self.m_menubar1.Append( self.view, u"View" )
self.tools = wx.Menu()
self.m_menubar1.Append( self.tools, u"Tools" )
self.help = wx.Menu()
self.m_menuItem_about = wx.MenuItem( self.help, wx.ID_ANY, u"About", wx.EmptyString, wx.ITEM_NORMAL )
self.help.Append( self.m_menuItem_about )
self.m_menuItem_about.Enable( False )
self.m_menubar1.Append( self.help, u"Help" )
self.SetMenuBar( self.m_menubar1 )
self.Centre( wx.BOTH )
# Connect Events
self.Bind( wx.EVT_CLOSE, self.BLR_LMGR_FRAMEOnClose )
self.m_panel_partselect_re1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_re1OnLeftUp )
self.m_bmToggleBtn_blrlm_receiver.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_receiverOnToggleButton )
self.m_bitmap_blrlm_receiver.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_receiverOnLeftUp )
self.m_staticText_blrlm_receiver.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_receiverOnLeftUp )
self.m_bpButton_blrlm_receiver_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_receiver_resetOnButtonClick )
self.m_panel_partselect_mz1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_mz1OnLeftUp )
self.m_bmToggleBtn_blrlm_muzzle.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_muzzleOnToggleButton )
self.m_bitmap_blrlm_muzzle.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_muzzleOnLeftUp )
self.m_staticText_blrlm_muzzle.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_muzzleOnLeftUp )
self.m_bpButton_blrlm_muzzle_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_muzzle_resetOnButtonClick )
self.m_panel_partselect_gp1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_gp1OnLeftUp )
self.m_bmToggleBtn_blrlm_grip.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_gripOnToggleButton )
self.m_bitmap_blrlm_grip.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_gripOnLeftUp )
self.m_staticText_blrlm_grip.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_gripOnLeftUp )
self.m_bpButton_blrlm_grip_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_grip_resetOnButtonClick )
self.m_panel_partselect_ba1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_ba1OnLeftUp )
self.m_bmToggleBtn_blrlm_barrel.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_barrelOnToggleButton )
self.m_bitmap_blrlm_barrel.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_barrelOnLeftUp )
self.m_staticText_blrlm_barrel.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_barrelOnLeftUp )
self.m_bpButton_blrlm_barrel_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_barrel_resetOnButtonClick )
self.m_panel_partselect_mg1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_mg1OnLeftUp )
self.m_bmToggleBtn_blrlm_magazine.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_magazineOnToggleButton )
self.m_bitmap_blrlm_magazine.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_magazineOnLeftUp )
self.m_staticText_blrlm_magazine.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_magazineOnLeftUp )
self.m_bpButton_blrlm_magazine_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_magazine_resetOnButtonClick )
self.m_panel_partselect_sc1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_sc1OnLeftUp )
self.m_bmToggleBtn_blrlm_scope.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_scopeOnToggleButton )
self.m_bitmap_blrlm_scope.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_scopeOnLeftUp )
self.m_staticText_blrlm_scope.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_scopeOnLeftUp )
self.m_bpButton_blrlm_scope_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_scope_resetOnButtonClick )
self.m_panel_partselect_st1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_st1OnLeftUp )
self.m_bmToggleBtn_blrlm_stock.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_stockOnToggleButton )
self.m_bitmap_blrlm_stock.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_stockOnLeftUp )
self.m_staticText_blrlm_stock.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_stockOnLeftUp )
self.m_bpButton_blrlm_stock_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_stock_resetOnButtonClick )
self.m_panel_partselect_tg1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_tg1OnLeftUp )
self.m_bmToggleBtn_blrlm_tag.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_tagOnToggleButton )
self.m_bitmap_blrlm_tag.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_tagOnLeftUp )
self.m_staticText_blrlm_tag.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_tagOnLeftUp )
self.m_bpButton_blrlm_tag_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_tag_resetOnButtonClick )
self.m_panel_partselect_cm1.Bind( wx.EVT_LEFT_UP, self.m_panel_partselect_cm1OnLeftUp )
self.m_bmToggleBtn_blrlm_camo.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtn_blrlm_camoOnToggleButton )
self.m_bitmap_blrlm_camo.Bind( wx.EVT_LEFT_UP, self.m_bitmap_blrlm_camoOnLeftUp )
self.m_staticText_blrlm_camo.Bind( wx.EVT_LEFT_UP, self.m_staticText_blrlm_camoOnLeftUp )
self.m_bpButton_blrlm_camo_reset.Bind( wx.EVT_BUTTON, self.m_bpButton_blrlm_camo_resetOnButtonClick )
self.m_bmToggleBtnLoadout1.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnLoadout1OnToggleButton )
self.m_bmToggleBtnLoadout2.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnLoadout2OnToggleButton )
self.m_bmToggleBtnLoadout3.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnLoadout3OnToggleButton )
self.m_bmToggleBtnPrimary1.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnPrimary1OnToggleButton )
self.m_bmToggleBtnSecondary1.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnSecondary1OnToggleButton )
self.m_bmToggleBtnPrimary2.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnPrimary2OnToggleButton )
self.m_bmToggleBtnSecondary2.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnSecondary2OnToggleButton )
self.m_bmToggleBtnPrimary3.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnPrimary3OnToggleButton )
self.m_bmToggleBtnSecondary3.Bind( wx.EVT_TOGGLEBUTTON, self.m_bmToggleBtnSecondary3OnToggleButton )
self.m_listCtrl_blrlm_selector.Bind( wx.EVT_LIST_ITEM_ACTIVATED, self.m_listCtrl_blrlm_selectorOnListItemActivated )
self.m_listCtrl_blrlm_selector.Bind( wx.EVT_LIST_ITEM_FOCUSED, self.m_listCtrl_blrlm_selectorOnListItemFocused )
self.m_button_export_loadout.Bind( wx.EVT_BUTTON, self.m_button_export_loadoutOnButtonClick )
self.m_scintilla1.Bind( wx.EVT_LEFT_DCLICK, self.m_scintilla1OnLeftDClick )
self.Bind( wx.EVT_MENU, self.m_menuItem_file_playernameOnMenuSelection, id = self.m_menuItem_file_playername.GetId() )
self.Bind( wx.EVT_MENU, self.m_menuItem_file_clearloadoutsOnMenuSelection, id = self.m_menuItem_file_clearloadouts.GetId() )
self.Bind( wx.EVT_MENU, self.m_menuItem_file_savesessionOnMenuSelection, id = self.m_menuItem_file_savesession.GetId() )
self.Bind( wx.EVT_MENU, self.m_menuItem_file_loadsessionOnMenuSelection, id = self.m_menuItem_file_loadsession.GetId() )
self.Bind( wx.EVT_MENU, self.m_menuItem_file_autosaveOnMenuSelection, id = self.m_menuItem_file_autosave.GetId() )
self.Bind( wx.EVT_MENU, self.m_menuItem_aboutOnMenuSelection, id = self.m_menuItem_about.GetId() )
def __del__( self ):
pass
# Virtual event handlers, override them in your derived class
def BLR_LMGR_FRAMEOnClose( self, event ):
event.Skip()
def m_panel_partselect_re1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_receiverOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_receiverOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_receiverOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_receiver_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_mz1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_muzzleOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_muzzleOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_muzzleOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_muzzle_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_gp1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_gripOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_gripOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_gripOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_grip_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_ba1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_barrelOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_barrelOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_barrelOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_barrel_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_mg1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_magazineOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_magazineOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_magazineOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_magazine_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_sc1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_scopeOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_scopeOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_scopeOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_scope_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_st1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_stockOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_stockOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_stockOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_stock_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_tg1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_tagOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_tagOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_tagOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_tag_resetOnButtonClick( self, event ):
event.Skip()
def m_panel_partselect_cm1OnLeftUp( self, event ):
event.Skip()
def m_bmToggleBtn_blrlm_camoOnToggleButton( self, event ):
event.Skip()
def m_bitmap_blrlm_camoOnLeftUp( self, event ):
event.Skip()
def m_staticText_blrlm_camoOnLeftUp( self, event ):
event.Skip()
def m_bpButton_blrlm_camo_resetOnButtonClick( self, event ):
event.Skip()
def m_bmToggleBtnLoadout1OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnLoadout2OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnLoadout3OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnPrimary1OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnSecondary1OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnPrimary2OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnSecondary2OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnPrimary3OnToggleButton( self, event ):
event.Skip()
def m_bmToggleBtnSecondary3OnToggleButton( self, event ):
event.Skip()
def m_listCtrl_blrlm_selectorOnListItemActivated( self, event ):
event.Skip()
def m_listCtrl_blrlm_selectorOnListItemFocused( self, event ):
event.Skip()
def m_button_export_loadoutOnButtonClick( self, event ):
event.Skip()
def m_scintilla1OnLeftDClick( self, event ):
event.Skip()
def m_menuItem_file_playernameOnMenuSelection( self, event ):
event.Skip()
def m_menuItem_file_clearloadoutsOnMenuSelection( self, event ):
event.Skip()
def m_menuItem_file_savesessionOnMenuSelection( self, event ):
event.Skip()
def m_menuItem_file_loadsessionOnMenuSelection( self, event ):
event.Skip()
def m_menuItem_file_autosaveOnMenuSelection( self, event ):
event.Skip()
def m_menuItem_aboutOnMenuSelection( self, event ):
event.Skip()
| [
"part_select_panel.PartSelectPanel",
"wx.Button",
"wx.BitmapToggleButton",
"wx.Colour",
"wx.MenuBar",
"wx.BoxSizer",
"wx.Size",
"wx.StaticText",
"bitmap_panel.BitmapPanel",
"wx.MenuItem",
"wx.SystemSettings.GetColour",
"wx.Menu",
"wx.stc.StyledTextCtrl",
"wx.Panel",
"wx.ListCtrl",
"wx.... | [((1039, 1065), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (1050, 1065), False, 'import wx\n'), ((1087, 1111), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (1098, 1111), False, 'import wx\n'), ((1152, 1258), 'bitmap_panel.BitmapPanel', 'BitmapPanel', (['self', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', '(wx.BORDER_STATIC | wx.TAB_TRAVERSAL)'], {}), '(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.\n BORDER_STATIC | wx.TAB_TRAVERSAL)\n', (1163, 1258), False, 'from bitmap_panel import BitmapPanel\n'), ((1399, 1423), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (1410, 1423), False, 'import wx\n'), ((2077, 2183), 'bitmap_panel.BitmapPanel', 'BitmapPanel', (['self', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', '(wx.BORDER_STATIC | wx.TAB_TRAVERSAL)'], {}), '(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.\n BORDER_STATIC | wx.TAB_TRAVERSAL)\n', (2088, 2183), False, 'from bitmap_panel import BitmapPanel\n'), ((2309, 2333), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (2320, 2333), False, 'import wx\n'), ((2364, 2388), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (2375, 2388), False, 'import wx\n'), ((2430, 2533), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (2438, 2533), False, 'import wx\n'), ((2637, 2663), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (2648, 2663), False, 'import wx\n'), ((3178, 3292), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_re1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_re1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (3193, 3292), False, 'import wx\n'), ((3551, 3664), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_re1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_re1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (3564, 3664), False, 'import wx\n'), ((4664, 4767), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (4672, 4767), False, 'import wx\n'), ((4871, 4897), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (4882, 4897), False, 'import wx\n'), ((5343, 5457), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_mz1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_mz1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (5358, 5457), False, 'import wx\n'), ((5708, 5821), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_mz1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_mz1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (5721, 5821), False, 'import wx\n'), ((6807, 6910), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (6815, 6910), False, 'import wx\n'), ((7014, 7040), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (7025, 7040), False, 'import wx\n'), ((7476, 7590), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_gp1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_gp1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (7491, 7590), False, 'import wx\n'), ((7833, 7946), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_gp1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_gp1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (7846, 7946), False, 'import wx\n'), ((8845, 8948), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (8853, 8948), False, 'import wx\n'), ((9052, 9078), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (9063, 9078), False, 'import wx\n'), ((9524, 9638), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_ba1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_ba1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (9539, 9638), False, 'import wx\n'), ((9889, 10002), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_ba1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_ba1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (9902, 10002), False, 'import wx\n'), ((10913, 11016), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (10921, 11016), False, 'import wx\n'), ((11120, 11146), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (11131, 11146), False, 'import wx\n'), ((11602, 11716), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_mg1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_mg1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (11617, 11716), False, 'import wx\n'), ((11975, 12088), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_mg1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_mg1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (11988, 12088), False, 'import wx\n'), ((13011, 13114), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (13019, 13114), False, 'import wx\n'), ((13218, 13244), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (13229, 13244), False, 'import wx\n'), ((13685, 13799), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_sc1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_sc1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (13700, 13799), False, 'import wx\n'), ((14046, 14159), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_sc1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_sc1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (14059, 14159), False, 'import wx\n'), ((15064, 15167), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (15072, 15167), False, 'import wx\n'), ((15271, 15297), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (15282, 15297), False, 'import wx\n'), ((15738, 15852), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_st1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_st1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (15753, 15852), False, 'import wx\n'), ((16099, 16212), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_st1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_st1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (16112, 16212), False, 'import wx\n'), ((17117, 17220), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (17125, 17220), False, 'import wx\n'), ((17324, 17350), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (17335, 17350), False, 'import wx\n'), ((17781, 17895), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_tg1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_tg1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (17796, 17895), False, 'import wx\n'), ((18134, 18247), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_tg1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_tg1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (18147, 18247), False, 'import wx\n'), ((19140, 19243), 'wx.Panel', 'wx.Panel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (19148, 19243), False, 'import wx\n'), ((19347, 19373), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (19358, 19373), False, 'import wx\n'), ((19809, 19923), 'wx.StaticBitmap', 'wx.StaticBitmap', (['self.m_panel_partselect_cm1', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_cm1, wx.ID_ANY, wx.NullBitmap, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (19824, 19923), False, 'import wx\n'), ((20166, 20279), 'wx.StaticText', 'wx.StaticText', (['self.m_panel_partselect_cm1', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel_partselect_cm1, wx.ID_ANY, wx.EmptyString, wx.\n DefaultPosition, wx.DefaultSize, 0)\n', (20179, 20279), False, 'import wx\n'), ((21165, 21275), 'part_select_panel.PartSelectPanel', 'PartSelectPanel', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_SIMPLE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, wx.BORDER_SIMPLE)\n', (21180, 21275), False, 'from part_select_panel import PartSelectPanel\n'), ((21425, 21449), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (21436, 21449), False, 'import wx\n'), ((21472, 21498), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (21483, 21498), False, 'import wx\n'), ((21539, 21668), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (21560, 21668), False, 'import wx\n'), ((22083, 22212), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (22104, 22212), False, 'import wx\n'), ((22575, 22704), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (22596, 22704), False, 'import wx\n'), ((23050, 23076), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (23061, 23076), False, 'import wx\n'), ((23100, 23126), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (23111, 23126), False, 'import wx\n'), ((23167, 23296), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (23188, 23296), False, 'import wx\n'), ((23566, 23695), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (23587, 23695), False, 'import wx\n'), ((24058, 24084), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (24069, 24084), False, 'import wx\n'), ((24125, 24254), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (24146, 24254), False, 'import wx\n'), ((24524, 24653), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (24545, 24653), False, 'import wx\n'), ((25017, 25043), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (25028, 25043), False, 'import wx\n'), ((25084, 25213), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (25105, 25213), False, 'import wx\n'), ((25484, 25613), 'wx.BitmapToggleButton', 'wx.BitmapToggleButton', (['self.m_panel_partselect', 'wx.ID_ANY', 'wx.NullBitmap', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_NONE'], {}), '(self.m_panel_partselect, wx.ID_ANY, wx.NullBitmap, wx\n .DefaultPosition, wx.DefaultSize, wx.BORDER_NONE)\n', (25505, 25613), False, 'import wx\n'), ((26313, 26337), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (26324, 26337), False, 'import wx\n'), ((26382, 26518), 'wx.ListCtrl', 'wx.ListCtrl', (['self', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', '(wx.LC_HRULES | wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.BORDER_SIMPLE)'], {}), '(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.\n LC_HRULES | wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.BORDER_SIMPLE)\n', (26393, 26518), False, 'import wx\n'), ((26939, 27026), 'bitmap_panel.BitmapPanel', 'BitmapPanel', (['self', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.BORDER_STATIC'], {}), '(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.\n BORDER_STATIC)\n', (26950, 27026), False, 'from bitmap_panel import BitmapPanel\n'), ((27043, 27069), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (27054, 27069), False, 'import wx\n'), ((27092, 27116), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (27103, 27116), False, 'import wx\n'), ((27139, 27165), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (27150, 27165), False, 'import wx\n'), ((27198, 27297), 'wx.StaticText', 'wx.StaticText', (['self.m_panel11', 'wx.ID_ANY', 'wx.EmptyString', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel11, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition,\n wx.DefaultSize, 0)\n', (27211, 27297), False, 'import wx\n'), ((27627, 27653), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (27638, 27653), False, 'import wx\n'), ((27732, 27826), 'wx.Panel', 'wx.Panel', (['self.m_panel11', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', 'wx.TAB_TRAVERSAL'], {}), '(self.m_panel11, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.\n TAB_TRAVERSAL)\n', (27740, 27826), False, 'import wx\n'), ((27916, 27942), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.HORIZONTAL'], {}), '(wx.HORIZONTAL)\n', (27927, 27942), False, 'import wx\n'), ((27976, 28078), 'wx.StaticText', 'wx.StaticText', (['self.m_panel121', 'wx.ID_ANY', 'u"""Export Path:"""', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), "(self.m_panel121, wx.ID_ANY, u'Export Path:', wx.\n DefaultPosition, wx.DefaultSize, 0)\n", (27989, 28078), False, 'import wx\n'), ((28420, 28592), 'wx.DirPickerCtrl', 'wx.DirPickerCtrl', (['self.m_panel121', 'wx.ID_ANY', 'wx.EmptyString', 'u"""Select a folder"""', 'wx.DefaultPosition', 'wx.DefaultSize', '(wx.DIRP_DIR_MUST_EXIST | wx.DIRP_USE_TEXTCTRL)'], {}), "(self.m_panel121, wx.ID_ANY, wx.EmptyString,\n u'Select a folder', wx.DefaultPosition, wx.DefaultSize, wx.\n DIRP_DIR_MUST_EXIST | wx.DIRP_USE_TEXTCTRL)\n", (28436, 28592), False, 'import wx\n'), ((28782, 28902), 'wx.Button', 'wx.Button', (['self.m_panel121', 'wx.ID_ANY', 'u"""Generate Loadout"""', 'wx.DefaultPosition', 'wx.DefaultSize', '(0 | wx.BORDER_THEME)'], {}), "(self.m_panel121, wx.ID_ANY, u'Generate Loadout', wx.\n DefaultPosition, wx.DefaultSize, 0 | wx.BORDER_THEME)\n", (28791, 28902), False, 'import wx\n'), ((29196, 29288), 'wx.stc.StyledTextCtrl', 'wx.stc.StyledTextCtrl', (['self.m_panel11', 'wx.ID_ANY', 'wx.DefaultPosition', 'wx.DefaultSize', '(0)'], {}), '(self.m_panel11, wx.ID_ANY, wx.DefaultPosition, wx.\n DefaultSize, 0)\n', (29217, 29288), False, 'import wx\n'), ((31932, 31982), 'wx.MenuBar', 'wx.MenuBar', (['(0 | wx.BORDER_THEME | wx.CLIP_CHILDREN)'], {}), '(0 | wx.BORDER_THEME | wx.CLIP_CHILDREN)\n', (31942, 31982), False, 'import wx\n'), ((32001, 32010), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (32008, 32010), False, 'import wx\n'), ((32053, 32146), 'wx.MenuItem', 'wx.MenuItem', (['self.file', 'wx.ID_ANY', 'u"""Change Player Name"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.file, wx.ID_ANY, u'Change Player Name', wx.EmptyString, wx\n .ITEM_NORMAL)\n", (32064, 32146), False, 'import wx\n'), ((32306, 32399), 'wx.MenuItem', 'wx.MenuItem', (['self.file', 'wx.ID_ANY', 'u"""Clear All Loadouts"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.file, wx.ID_ANY, u'Clear All Loadouts', wx.EmptyString, wx\n .ITEM_NORMAL)\n", (32317, 32399), False, 'import wx\n'), ((32563, 32650), 'wx.MenuItem', 'wx.MenuItem', (['self.file', 'wx.ID_ANY', 'u"""Save Session"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.file, wx.ID_ANY, u'Save Session', wx.EmptyString, wx.\n ITEM_NORMAL)\n", (32574, 32650), False, 'import wx\n'), ((32753, 32840), 'wx.MenuItem', 'wx.MenuItem', (['self.file', 'wx.ID_ANY', 'u"""Load Session"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.file, wx.ID_ANY, u'Load Session', wx.EmptyString, wx.\n ITEM_NORMAL)\n", (32764, 32840), False, 'import wx\n'), ((32940, 33033), 'wx.MenuItem', 'wx.MenuItem', (['self.file', 'wx.ID_ANY', 'u"""Save Session on Exit"""', 'wx.EmptyString', 'wx.ITEM_CHECK'], {}), "(self.file, wx.ID_ANY, u'Save Session on Exit', wx.EmptyString,\n wx.ITEM_CHECK)\n", (32951, 33033), False, 'import wx\n'), ((33217, 33226), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (33224, 33226), False, 'import wx\n'), ((33269, 33355), 'wx.MenuItem', 'wx.MenuItem', (['self.edit', 'wx.ID_ANY', 'u"""Swap Weapon"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.edit, wx.ID_ANY, u'Swap Weapon', wx.EmptyString, wx.\n ITEM_NORMAL)\n", (33280, 33355), False, 'import wx\n'), ((33544, 33553), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (33551, 33553), False, 'import wx\n'), ((33587, 33679), 'wx.MenuItem', 'wx.MenuItem', (['self.view', 'wx.ID_ANY', 'u"""Some checkbox thing"""', 'wx.EmptyString', 'wx.ITEM_CHECK'], {}), "(self.view, wx.ID_ANY, u'Some checkbox thing', wx.EmptyString,\n wx.ITEM_CHECK)\n", (33598, 33679), False, 'import wx\n'), ((33852, 33861), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (33859, 33861), False, 'import wx\n'), ((33938, 33947), 'wx.Menu', 'wx.Menu', ([], {}), '()\n', (33945, 33947), False, 'import wx\n'), ((33980, 34055), 'wx.MenuItem', 'wx.MenuItem', (['self.help', 'wx.ID_ANY', 'u"""About"""', 'wx.EmptyString', 'wx.ITEM_NORMAL'], {}), "(self.help, wx.ID_ANY, u'About', wx.EmptyString, wx.ITEM_NORMAL)\n", (33991, 34055), False, 'import wx\n'), ((986, 1005), 'wx.Colour', 'wx.Colour', (['(0)', '(0)', '(64)'], {}), '(0, 0, 64)\n', (995, 1005), False, 'import wx\n'), ((1310, 1362), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_SCROLLBAR'], {}), '(wx.SYS_COLOUR_SCROLLBAR)\n', (1337, 1362), False, 'import wx\n'), ((1468, 1484), 'wx.Size', 'wx.Size', (['(420)', '(-1)'], {}), '(420, -1)\n', (1475, 1484), False, 'import wx\n'), ((1617, 1634), 'wx.Size', 'wx.Size', (['(260)', '(132)'], {}), '(260, 132)\n', (1624, 1634), False, 'import wx\n'), ((2232, 2284), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_SCROLLBAR'], {}), '(wx.SYS_COLOUR_SCROLLBAR)\n', (2259, 2284), False, 'import wx\n'), ((2588, 2609), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (2597, 2609), False, 'import wx\n'), ((2809, 2824), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (2816, 2824), False, 'import wx\n'), ((3036, 3057), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (3045, 3057), False, 'import wx\n'), ((3339, 3354), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (3346, 3354), False, 'import wx\n'), ((3407, 3422), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (3414, 3422), False, 'import wx\n'), ((3777, 3833), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (3804, 3833), False, 'import wx\n'), ((4078, 4093), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (4085, 4093), False, 'import wx\n'), ((4273, 4294), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (4282, 4294), False, 'import wx\n'), ((4822, 4843), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (4831, 4843), False, 'import wx\n'), ((5041, 5056), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (5048, 5056), False, 'import wx\n'), ((5205, 5226), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (5214, 5226), False, 'import wx\n'), ((5502, 5517), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (5509, 5517), False, 'import wx\n'), ((5568, 5583), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (5575, 5583), False, 'import wx\n'), ((5930, 5986), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (5957, 5986), False, 'import wx\n'), ((6227, 6242), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (6234, 6242), False, 'import wx\n'), ((6418, 6439), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (6427, 6439), False, 'import wx\n'), ((6965, 6986), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (6974, 6986), False, 'import wx\n'), ((7182, 7197), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (7189, 7197), False, 'import wx\n'), ((7342, 7363), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (7351, 7363), False, 'import wx\n'), ((7633, 7648), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (7640, 7648), False, 'import wx\n'), ((7697, 7712), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (7704, 7712), False, 'import wx\n'), ((8051, 8107), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (8078, 8107), False, 'import wx\n'), ((8344, 8359), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (8351, 8359), False, 'import wx\n'), ((8458, 8479), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (8467, 8479), False, 'import wx\n'), ((9003, 9024), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (9012, 9024), False, 'import wx\n'), ((9222, 9237), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (9229, 9237), False, 'import wx\n'), ((9386, 9407), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (9395, 9407), False, 'import wx\n'), ((9683, 9698), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (9690, 9698), False, 'import wx\n'), ((9749, 9764), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (9756, 9764), False, 'import wx\n'), ((10111, 10167), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (10138, 10167), False, 'import wx\n'), ((10408, 10423), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (10415, 10423), False, 'import wx\n'), ((10524, 10545), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (10533, 10545), False, 'import wx\n'), ((11071, 11092), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (11080, 11092), False, 'import wx\n'), ((11292, 11307), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (11299, 11307), False, 'import wx\n'), ((11460, 11481), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (11469, 11481), False, 'import wx\n'), ((11763, 11778), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (11770, 11778), False, 'import wx\n'), ((11831, 11846), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (11838, 11846), False, 'import wx\n'), ((12201, 12257), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (12228, 12257), False, 'import wx\n'), ((12502, 12517), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (12509, 12517), False, 'import wx\n'), ((12620, 12641), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (12629, 12641), False, 'import wx\n'), ((13169, 13190), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (13178, 13190), False, 'import wx\n'), ((13387, 13402), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (13394, 13402), False, 'import wx\n'), ((13549, 13570), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (13558, 13570), False, 'import wx\n'), ((13843, 13858), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (13850, 13858), False, 'import wx\n'), ((13908, 13923), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (13915, 13923), False, 'import wx\n'), ((14266, 14322), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (14293, 14322), False, 'import wx\n'), ((14561, 14576), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (14568, 14576), False, 'import wx\n'), ((14676, 14697), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (14685, 14697), False, 'import wx\n'), ((15222, 15243), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (15231, 15243), False, 'import wx\n'), ((15440, 15455), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (15447, 15455), False, 'import wx\n'), ((15602, 15623), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (15611, 15623), False, 'import wx\n'), ((15896, 15911), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (15903, 15911), False, 'import wx\n'), ((15961, 15976), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (15968, 15976), False, 'import wx\n'), ((16319, 16375), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (16346, 16375), False, 'import wx\n'), ((16614, 16629), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (16621, 16629), False, 'import wx\n'), ((16729, 16750), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (16738, 16750), False, 'import wx\n'), ((17275, 17296), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (17284, 17296), False, 'import wx\n'), ((17491, 17506), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (17498, 17506), False, 'import wx\n'), ((17649, 17670), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (17658, 17670), False, 'import wx\n'), ((17937, 17952), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (17944, 17952), False, 'import wx\n'), ((18000, 18015), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (18007, 18015), False, 'import wx\n'), ((18350, 18406), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (18377, 18406), False, 'import wx\n'), ((18641, 18656), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (18648, 18656), False, 'import wx\n'), ((18754, 18775), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (18763, 18775), False, 'import wx\n'), ((19298, 19319), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (19307, 19319), False, 'import wx\n'), ((19515, 19530), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (19522, 19530), False, 'import wx\n'), ((19675, 19696), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (19684, 19696), False, 'import wx\n'), ((19966, 19981), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (19973, 19981), False, 'import wx\n'), ((20030, 20045), 'wx.Size', 'wx.Size', (['(64)', '(32)'], {}), '(64, 32)\n', (20037, 20045), False, 'import wx\n'), ((20384, 20440), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (20411, 20440), False, 'import wx\n'), ((20677, 20692), 'wx.Size', 'wx.Size', (['(32)', '(32)'], {}), '(32, 32)\n', (20684, 20692), False, 'import wx\n'), ((20791, 20812), 'wx.Colour', 'wx.Colour', (['(0)', '(64)', '(128)'], {}), '(0, 64, 128)\n', (20800, 20812), False, 'import wx\n'), ((21774, 21795), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (21783, 21795), False, 'import wx\n'), ((21898, 21914), 'wx.Size', 'wx.Size', (['(130)', '(24)'], {}), '(130, 24)\n', (21905, 21914), False, 'import wx\n'), ((22266, 22287), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (22275, 22287), False, 'import wx\n'), ((22390, 22406), 'wx.Size', 'wx.Size', (['(130)', '(24)'], {}), '(130, 24)\n', (22397, 22406), False, 'import wx\n'), ((22758, 22779), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (22767, 22779), False, 'import wx\n'), ((22882, 22898), 'wx.Size', 'wx.Size', (['(130)', '(24)'], {}), '(130, 24)\n', (22889, 22898), False, 'import wx\n'), ((23350, 23371), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (23359, 23371), False, 'import wx\n'), ((23423, 23438), 'wx.Size', 'wx.Size', (['(64)', '(64)'], {}), '(64, 64)\n', (23430, 23438), False, 'import wx\n'), ((23751, 23772), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (23760, 23772), False, 'import wx\n'), ((23826, 23841), 'wx.Size', 'wx.Size', (['(64)', '(64)'], {}), '(64, 64)\n', (23833, 23841), False, 'import wx\n'), ((24308, 24329), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (24317, 24329), False, 'import wx\n'), ((24381, 24396), 'wx.Size', 'wx.Size', (['(64)', '(64)'], {}), '(64, 64)\n', (24388, 24396), False, 'import wx\n'), ((24709, 24730), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (24718, 24730), False, 'import wx\n'), ((24784, 24799), 'wx.Size', 'wx.Size', (['(64)', '(64)'], {}), '(64, 64)\n', (24791, 24799), False, 'import wx\n'), ((25267, 25288), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (25276, 25288), False, 'import wx\n'), ((25340, 25355), 'wx.Size', 'wx.Size', (['(64)', '(64)'], {}), '(64, 64)\n', (25347, 25355), False, 'import wx\n'), ((25669, 25690), 'wx.Colour', 'wx.Colour', (['(48)', '(48)', '(48)'], {}), '(48, 48, 48)\n', (25678, 25690), False, 'import wx\n'), ((25744, 25759), 'wx.Size', 'wx.Size', (['(64)', '(64)'], {}), '(64, 64)\n', (25751, 25759), False, 'import wx\n'), ((26570, 26623), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_WINDOWTEXT'], {}), '(wx.SYS_COLOUR_WINDOWTEXT)\n', (26597, 26623), False, 'import wx\n'), ((26688, 26737), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_WINDOW'], {}), '(wx.SYS_COLOUR_WINDOW)\n', (26715, 26737), False, 'import wx\n'), ((26793, 26810), 'wx.Size', 'wx.Size', (['(720)', '(480)'], {}), '(720, 480)\n', (26800, 26810), False, 'import wx\n'), ((27383, 27439), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (27410, 27439), False, 'import wx\n'), ((27686, 27702), 'wx.Size', 'wx.Size', (['(512)', '(-1)'], {}), '(512, -1)\n', (27693, 27702), False, 'import wx\n'), ((28165, 28221), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (28192, 28221), False, 'import wx\n'), ((31302, 31354), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHT'], {}), '(wx.SYS_COLOUR_HIGHLIGHT)\n', (31329, 31354), False, 'import wx\n'), ((31408, 31464), 'wx.SystemSettings.GetColour', 'wx.SystemSettings.GetColour', (['wx.SYS_COLOUR_HIGHLIGHTTEXT'], {}), '(wx.SYS_COLOUR_HIGHLIGHTTEXT)\n', (31435, 31464), False, 'import wx\n'), ((820, 838), 'wx.Size', 'wx.Size', (['(1280)', '(720)'], {}), '(1280, 720)\n', (827, 838), False, 'import wx\n')] |
""" This module contains the World class. """
from copy import deepcopy
import numpy as np
from models.object import Window
class World:
"""
Contains all objects that are supposed to be drawn in the viewport.
In this class comments, the actual slice of the world that is being
shown, is refered to as "window". The widget that shows the window is
called "viewport", it is an immutable object. On the other hand, the
window can be moved or scaled like any other object.
"""
def __init__(self, window_size):
self._objects = dict()
self.add_object(Window(*window_size))
def __getitem__(self, name):
return self._objects[name]
def viewport_transform(self, viewport_width, viewport_height):
"""
Returns a list of lists of coordinates, ready to be drawn in the
viewport. Basically this returns all world objects normalized to
the viewport coordinates.
"""
virtual_world = deepcopy(self._objects)
# rotate all objects to appear that the window rotated
for obj in virtual_world.values():
obj._transform(
self["window"].inv_rotation_matrix, self["window"].center,
np.negative(self["window"].center).tolist())
# clip objects
for obj in virtual_world.values():
obj.project()
obj.clip(virtual_world["window"])
(x_min, y_min), (x_max, y_max) = \
virtual_world["window"].expanded_boundaries
def transform_point(point):
newx = ((point[0] - x_min)/(x_max - x_min)) * viewport_width
newy = (1 - (point[1] - y_min)/(y_max - y_min)) * viewport_height
return (newx, newy)
# build a list of transformed points for each object
output = []
for obj in virtual_world.values():
new_obj = []
for face in obj.points:
new_obj.append(list(map(transform_point, face)))
output.append((new_obj, obj.color))
return output
@property
def objects(self):
""" Returns the set of objects. """
return self._objects.values()
def add_object(self, obj):
""" Adds a new object. """
self._objects[obj.name] = obj
| [
"numpy.negative",
"models.object.Window",
"copy.deepcopy"
] | [((1017, 1040), 'copy.deepcopy', 'deepcopy', (['self._objects'], {}), '(self._objects)\n', (1025, 1040), False, 'from copy import deepcopy\n'), ((618, 638), 'models.object.Window', 'Window', (['*window_size'], {}), '(*window_size)\n', (624, 638), False, 'from models.object import Window\n'), ((1267, 1301), 'numpy.negative', 'np.negative', (["self['window'].center"], {}), "(self['window'].center)\n", (1278, 1301), True, 'import numpy as np\n')] |
import os
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.models import Model
from keras import backend as K
def model():
input_img = Input(shape=(6, 20, 20))
x = Conv2D(filters = 32, kernel_size = (3, 3), strides = (1,1), padding = 'same', activation='relu')(input_img)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(16, (3, 3), activation='relu', padding='same')(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(8, (3, 3), activation='relu', padding='same')(x)
encoded = MaxPooling2D((2, 2), padding='same')(x)
# at this point the representation is (4, 4, 8) i.e. 128-dimensional
x = Conv2D(8, (3, 3), activation='relu', padding='same')(encoded)
x = UpSampling2D((2, 2))(x)
x = Conv2D(16, (3, 3), activation='relu', padding='same')(x)
x = UpSampling2D((2, 2))(x)
x = Conv2D(32, (3, 3), activation='relu')(x)
x = UpSampling2D((2, 2))(x)
decoded = Conv2D(6, (3, 3), activation='sigmoid', padding='same')(x)
autoencoder = Model(input_img, decoded)
autoencoder.compile(optimizer='Adam', loss='mean_squared_error')
return autoencoder, encoded, decoded
| [
"keras.layers.Conv2D",
"keras.layers.UpSampling2D",
"keras.layers.MaxPooling2D",
"keras.layers.Input",
"keras.models.Model"
] | [((182, 206), 'keras.layers.Input', 'Input', ([], {'shape': '(6, 20, 20)'}), '(shape=(6, 20, 20))\n', (187, 206), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((1067, 1092), 'keras.models.Model', 'Model', (['input_img', 'decoded'], {}), '(input_img, decoded)\n', (1072, 1092), False, 'from keras.models import Model\n'), ((216, 309), 'keras.layers.Conv2D', 'Conv2D', ([], {'filters': '(32)', 'kernel_size': '(3, 3)', 'strides': '(1, 1)', 'padding': '"""same"""', 'activation': '"""relu"""'}), "(filters=32, kernel_size=(3, 3), strides=(1, 1), padding='same',\n activation='relu')\n", (222, 309), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((333, 369), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'padding': '"""same"""'}), "((2, 2), padding='same')\n", (345, 369), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((382, 435), 'keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {'activation': '"""relu"""', 'padding': '"""same"""'}), "(16, (3, 3), activation='relu', padding='same')\n", (388, 435), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((448, 484), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'padding': '"""same"""'}), "((2, 2), padding='same')\n", (460, 484), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((497, 549), 'keras.layers.Conv2D', 'Conv2D', (['(8)', '(3, 3)'], {'activation': '"""relu"""', 'padding': '"""same"""'}), "(8, (3, 3), activation='relu', padding='same')\n", (503, 549), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((568, 604), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2, 2)'], {'padding': '"""same"""'}), "((2, 2), padding='same')\n", (580, 604), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((695, 747), 'keras.layers.Conv2D', 'Conv2D', (['(8)', '(3, 3)'], {'activation': '"""relu"""', 'padding': '"""same"""'}), "(8, (3, 3), activation='relu', padding='same')\n", (701, 747), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((766, 786), 'keras.layers.UpSampling2D', 'UpSampling2D', (['(2, 2)'], {}), '((2, 2))\n', (778, 786), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((799, 852), 'keras.layers.Conv2D', 'Conv2D', (['(16)', '(3, 3)'], {'activation': '"""relu"""', 'padding': '"""same"""'}), "(16, (3, 3), activation='relu', padding='same')\n", (805, 852), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((865, 885), 'keras.layers.UpSampling2D', 'UpSampling2D', (['(2, 2)'], {}), '((2, 2))\n', (877, 885), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((898, 935), 'keras.layers.Conv2D', 'Conv2D', (['(32)', '(3, 3)'], {'activation': '"""relu"""'}), "(32, (3, 3), activation='relu')\n", (904, 935), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((948, 968), 'keras.layers.UpSampling2D', 'UpSampling2D', (['(2, 2)'], {}), '((2, 2))\n', (960, 968), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n'), ((987, 1042), 'keras.layers.Conv2D', 'Conv2D', (['(6)', '(3, 3)'], {'activation': '"""sigmoid"""', 'padding': '"""same"""'}), "(6, (3, 3), activation='sigmoid', padding='same')\n", (993, 1042), False, 'from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D\n')] |
import math
l, a, p, r = map(int, input().split())
dia = math.sqrt((l*l)+(a*a)+(p*p))
if dia <= 2*r:
print("S")
else:
print("N") | [
"math.sqrt"
] | [((59, 91), 'math.sqrt', 'math.sqrt', (['(l * l + a * a + p * p)'], {}), '(l * l + a * a + p * p)\n', (68, 91), False, 'import math\n')] |
import praw
import prawcore
import requests
import pprint
import json
import time
from crawler_lib import send_message, keyword_match
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('/etc/reddit-crawler/config.ini')
reddit = praw.Reddit(user_agent=config.get('reddit','user_agent'),
client_id=config.get('reddit','client_id'), client_secret=config.get('reddit','client_secret'))
subreddit = reddit.subreddit(config.get('reddit','subreddit'))
while True:
try:
for comment in subreddit.stream.comments():
if time.time() - comment.created_utc > 300:
print("Comment " + comment.id + " was posted more than 5 minutes ago, skipping")
continue
flair = comment.author_flair_text
if flair is None:
flair = ""
if keyword_match(comment.body):
print("Matched comment " + comment.id)
send_message("New reply to " + comment.link_title, "https://www.reddit.com" + comment.permalink, comment.body, keyword_match(comment.body), "", comment.author.name, flair)
else:
print("No keyword match on comment " + comment.id)
except prawcore.exceptions.ResponseException as e:
print("Got a bad response from reddit API, waiting 5 seconds before continuing")
time.sleep(5)
except prawcore.exceptions.RequestException:
print("Unable to connect to reddit.com, likely a Reddit API outage")
time.sleep(5)
| [
"crawler_lib.keyword_match",
"time.time",
"time.sleep",
"ConfigParser.ConfigParser"
] | [((164, 191), 'ConfigParser.ConfigParser', 'ConfigParser.ConfigParser', ([], {}), '()\n', (189, 191), False, 'import ConfigParser\n'), ((857, 884), 'crawler_lib.keyword_match', 'keyword_match', (['comment.body'], {}), '(comment.body)\n', (870, 884), False, 'from crawler_lib import send_message, keyword_match\n'), ((1366, 1379), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1376, 1379), False, 'import time\n'), ((1514, 1527), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1524, 1527), False, 'import time\n'), ((576, 587), 'time.time', 'time.time', ([], {}), '()\n', (585, 587), False, 'import time\n'), ((1068, 1095), 'crawler_lib.keyword_match', 'keyword_match', (['comment.body'], {}), '(comment.body)\n', (1081, 1095), False, 'from crawler_lib import send_message, keyword_match\n')] |
import time
Q1 = input("Who do you like: ")
Q2 = input("Who do you hate: ")
Answer = f"I love {Q2} but hate {Q1}"
print(Answer)
time.sleep(3) | [
"time.sleep"
] | [((130, 143), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (140, 143), False, 'import time\n')] |
# -*- coding: utf-8 -*-
#
# Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache 2 License.
#
# This product includes software developed at Datadog
# (https://www.datadoghq.com/).
#
# Copyright 2018 Datadog, Inc.
#
"""EnvironmentVariableService"""
from os import environ
from .. import db
from ..models import ConfigValue
from ..services import ConfigValueService
class EnvironmentVariableService(object):
"""Service helpers related to non-sensitive environment variables."""
def __init__(self):
"""Instantiates a new `EnvironmentVariableService`."""
self.config_value_service = ConfigValueService()
def update_persisted_variables(self):
"""Updates environment variables.
Updates persisted non-sensitive environment variables with current
environment variable values.
Returns:
None
"""
for key, value in self._environment_variables().items():
if key == "JIRA_SERVER_ADDRESS":
if value[-1] == '/':
value = value[:-1]
self.config_value_service.create(key=key, value=value)
db.session.commit()
def export_persisted_variables(self):
"""Exports persisted non-sensitive environment variables.
Returns:
None
"""
environment_variables = ConfigValue.query.all()
for variable in environment_variables:
environ[variable.key] = variable.value
def _environment_variables(self):
"""KVPs of current non-sensitive environment variables.
Returns:
dict(str:str)
"""
return {
e: environ.get(e) for e in self._environment_variable_names()
if environ.get(e) is not None
}
def _environment_variable_names(self):
"""Names for non-sensitive configuration environment variables.
Returns:
list(str)
"""
return ['TRELLO_ORG_NAME', 'GITHUB_ORG_LOGIN', 'GITHUB_ORG_WEBHOOK_ID', 'JIRA_SERVER_ADDRESS']
| [
"os.environ.get"
] | [((1701, 1715), 'os.environ.get', 'environ.get', (['e'], {}), '(e)\n', (1712, 1715), False, 'from os import environ\n'), ((1775, 1789), 'os.environ.get', 'environ.get', (['e'], {}), '(e)\n', (1786, 1789), False, 'from os import environ\n')] |
"""Initialise and run the bot."""
import logging
from discord import Activity
from discord import ActivityType
from discord import AllowedMentions
from discord import Intents
from discord_slash import SlashCommand
from obsidion import _update_event_loop_policy
from obsidion.core import get_settings
from obsidion.core.bot import Obsidion
_update_event_loop_policy()
log = logging.getLogger("obsidion")
def main() -> None:
"""Main initialisation script."""
# So no one can abuse the bot to mass mention
allowed_mentions = AllowedMentions(everyone=False, roles=False, users=True)
# As the bot functions off just slash commands
# it only needs guild info to do some channel updating
intents = Intents.none()
intents.guilds = True
# Allow messages for testing bot
if get_settings().DEV:
intents.messages = True
intents.reactions = True
activity = Activity(
name=get_settings().ACTIVITY,
type=ActivityType.watching,
)
args = {
"description": "",
"self_bot": False,
"owner_ids": [],
"activity": activity,
"intents": intents,
"allowed_mentions": allowed_mentions,
"command_prefix": "$",
}
obsidion = Obsidion(**args)
log.info("Ready to go, building everything")
SlashCommand(obsidion, sync_commands=True, sync_on_cog_reload=True)
log.info("Initialised slash commands")
obsidion.run(get_settings().DISCORD_TOKEN)
log.info("Obsidion shutting down")
if __name__ == "__main__":
"""Run the bot."""
main()
| [
"logging.getLogger",
"discord.AllowedMentions",
"obsidion.core.get_settings",
"obsidion.core.bot.Obsidion",
"discord.Intents.none",
"obsidion._update_event_loop_policy",
"discord_slash.SlashCommand"
] | [((341, 368), 'obsidion._update_event_loop_policy', '_update_event_loop_policy', ([], {}), '()\n', (366, 368), False, 'from obsidion import _update_event_loop_policy\n'), ((376, 405), 'logging.getLogger', 'logging.getLogger', (['"""obsidion"""'], {}), "('obsidion')\n", (393, 405), False, 'import logging\n'), ((539, 595), 'discord.AllowedMentions', 'AllowedMentions', ([], {'everyone': '(False)', 'roles': '(False)', 'users': '(True)'}), '(everyone=False, roles=False, users=True)\n', (554, 595), False, 'from discord import AllowedMentions\n'), ((721, 735), 'discord.Intents.none', 'Intents.none', ([], {}), '()\n', (733, 735), False, 'from discord import Intents\n'), ((1248, 1264), 'obsidion.core.bot.Obsidion', 'Obsidion', ([], {}), '(**args)\n', (1256, 1264), False, 'from obsidion.core.bot import Obsidion\n'), ((1319, 1386), 'discord_slash.SlashCommand', 'SlashCommand', (['obsidion'], {'sync_commands': '(True)', 'sync_on_cog_reload': '(True)'}), '(obsidion, sync_commands=True, sync_on_cog_reload=True)\n', (1331, 1386), False, 'from discord_slash import SlashCommand\n'), ((807, 821), 'obsidion.core.get_settings', 'get_settings', ([], {}), '()\n', (819, 821), False, 'from obsidion.core import get_settings\n'), ((1447, 1461), 'obsidion.core.get_settings', 'get_settings', ([], {}), '()\n', (1459, 1461), False, 'from obsidion.core import get_settings\n'), ((931, 945), 'obsidion.core.get_settings', 'get_settings', ([], {}), '()\n', (943, 945), False, 'from obsidion.core import get_settings\n')] |
from datetime import datetime, timezone, timedelta
from io import StringIO
from pytest import mark, raises
from preacher.core.value import ValueContext, RelativeDatetime
from preacher.compilation.yaml import YamlError, load
def test_given_datetime_that_is_offset_naive():
stream = StringIO('2020-04-01 01:23:45')
actual = load(stream)
assert isinstance(actual, datetime)
assert actual == datetime(2020, 4, 1, 1, 23, 45)
assert actual.tzinfo is None
def test_given_datetime_that_is_offset_aware():
stream = StringIO('2020-04-01 01:23:45 +09:00')
actual = load(stream)
assert isinstance(actual, datetime)
assert (
actual - datetime(2020, 3, 31, 16, 23, 45, tzinfo=timezone.utc)
).total_seconds() == 0.0
assert actual.tzinfo
@mark.parametrize(('content', 'expected_message'), [
('!relative_datetime []', '", line 1, column 1'),
('\n- !relative_datetime invalid', '", line 2, column 3'),
('!relative_datetime {delta: invalid}', '", line 1, column 28'),
('!relative_datetime {format: {}}', '", line 1, column 29')
])
def test_given_invalid_relative_datetime(content, expected_message):
stream = StringIO(content)
with raises(YamlError) as error_info:
load(stream)
assert expected_message in str(error_info.value)
def test_given_an_empty_relative_datetime():
actual = load(StringIO('!relative_datetime'))
assert isinstance(actual, RelativeDatetime)
now = datetime.now()
resolved = actual.resolve(ValueContext(origin_datetime=now))
assert resolved.value == now
def test_given_a_valid_string_relative_datetime():
actual = load(StringIO('!relative_datetime -1 hour'))
assert isinstance(actual, RelativeDatetime)
now = datetime.now()
resolved = actual.resolve(ValueContext(origin_datetime=now))
assert resolved.value == now - timedelta(hours=1)
def test_given_an_empty_mapping_relative_datetime():
actual = load(StringIO('!relative_datetime {}'))
assert isinstance(actual, RelativeDatetime)
now = datetime.now()
resolved = actual.resolve(ValueContext(origin_datetime=now))
assert resolved.value == now
def test_given_a_filled_mapping_relative_datetime():
content = '\n'.join([
'!relative_datetime',
' delta: -1 minute',
' format: "%H:%M:%S"',
' foo: bar', # Invalid one will be ignored.
])
actual = load(StringIO(content))
assert isinstance(actual, RelativeDatetime)
now = datetime(2020, 1, 23, 12, 34, 56)
resolved = actual.resolve(ValueContext(origin_datetime=now))
assert resolved.formatted == '12:33:56'
| [
"datetime.datetime",
"preacher.compilation.yaml.load",
"datetime.timedelta",
"pytest.mark.parametrize",
"datetime.datetime.now",
"pytest.raises",
"io.StringIO",
"preacher.core.value.ValueContext"
] | [((783, 1088), 'pytest.mark.parametrize', 'mark.parametrize', (["('content', 'expected_message')", '[(\'!relative_datetime []\', \'", line 1, column 1\'), (\n """\n- !relative_datetime invalid""", \'", line 2, column 3\'), (\n \'!relative_datetime {delta: invalid}\', \'", line 1, column 28\'), (\n \'!relative_datetime {format: {}}\', \'", line 1, column 29\')]'], {}), '((\'content\', \'expected_message\'), [(\'!relative_datetime []\',\n \'", line 1, column 1\'), ("""\n- !relative_datetime invalid""",\n \'", line 2, column 3\'), (\'!relative_datetime {delta: invalid}\',\n \'", line 1, column 28\'), (\'!relative_datetime {format: {}}\',\n \'", line 1, column 29\')])\n', (799, 1088), False, 'from pytest import mark, raises\n'), ((289, 320), 'io.StringIO', 'StringIO', (['"""2020-04-01 01:23:45"""'], {}), "('2020-04-01 01:23:45')\n", (297, 320), False, 'from io import StringIO\n'), ((334, 346), 'preacher.compilation.yaml.load', 'load', (['stream'], {}), '(stream)\n', (338, 346), False, 'from preacher.compilation.yaml import YamlError, load\n'), ((536, 574), 'io.StringIO', 'StringIO', (['"""2020-04-01 01:23:45 +09:00"""'], {}), "('2020-04-01 01:23:45 +09:00')\n", (544, 574), False, 'from io import StringIO\n'), ((588, 600), 'preacher.compilation.yaml.load', 'load', (['stream'], {}), '(stream)\n', (592, 600), False, 'from preacher.compilation.yaml import YamlError, load\n'), ((1170, 1187), 'io.StringIO', 'StringIO', (['content'], {}), '(content)\n', (1178, 1187), False, 'from io import StringIO\n'), ((1460, 1474), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1472, 1474), False, 'from datetime import datetime, timezone, timedelta\n'), ((1743, 1757), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1755, 1757), False, 'from datetime import datetime, timezone, timedelta\n'), ((2044, 2058), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2056, 2058), False, 'from datetime import datetime, timezone, timedelta\n'), ((2487, 2520), 'datetime.datetime', 'datetime', (['(2020)', '(1)', '(23)', '(12)', '(34)', '(56)'], {}), '(2020, 1, 23, 12, 34, 56)\n', (2495, 2520), False, 'from datetime import datetime, timezone, timedelta\n'), ((408, 439), 'datetime.datetime', 'datetime', (['(2020)', '(4)', '(1)', '(1)', '(23)', '(45)'], {}), '(2020, 4, 1, 1, 23, 45)\n', (416, 439), False, 'from datetime import datetime, timezone, timedelta\n'), ((1197, 1214), 'pytest.raises', 'raises', (['YamlError'], {}), '(YamlError)\n', (1203, 1214), False, 'from pytest import mark, raises\n'), ((1238, 1250), 'preacher.compilation.yaml.load', 'load', (['stream'], {}), '(stream)\n', (1242, 1250), False, 'from preacher.compilation.yaml import YamlError, load\n'), ((1369, 1399), 'io.StringIO', 'StringIO', (['"""!relative_datetime"""'], {}), "('!relative_datetime')\n", (1377, 1399), False, 'from io import StringIO\n'), ((1505, 1538), 'preacher.core.value.ValueContext', 'ValueContext', ([], {'origin_datetime': 'now'}), '(origin_datetime=now)\n', (1517, 1538), False, 'from preacher.core.value import ValueContext, RelativeDatetime\n'), ((1644, 1682), 'io.StringIO', 'StringIO', (['"""!relative_datetime -1 hour"""'], {}), "('!relative_datetime -1 hour')\n", (1652, 1682), False, 'from io import StringIO\n'), ((1788, 1821), 'preacher.core.value.ValueContext', 'ValueContext', ([], {'origin_datetime': 'now'}), '(origin_datetime=now)\n', (1800, 1821), False, 'from preacher.core.value import ValueContext, RelativeDatetime\n'), ((1950, 1983), 'io.StringIO', 'StringIO', (['"""!relative_datetime {}"""'], {}), "('!relative_datetime {}')\n", (1958, 1983), False, 'from io import StringIO\n'), ((2089, 2122), 'preacher.core.value.ValueContext', 'ValueContext', ([], {'origin_datetime': 'now'}), '(origin_datetime=now)\n', (2101, 2122), False, 'from preacher.core.value import ValueContext, RelativeDatetime\n'), ((2409, 2426), 'io.StringIO', 'StringIO', (['content'], {}), '(content)\n', (2417, 2426), False, 'from io import StringIO\n'), ((2551, 2584), 'preacher.core.value.ValueContext', 'ValueContext', ([], {'origin_datetime': 'now'}), '(origin_datetime=now)\n', (2563, 2584), False, 'from preacher.core.value import ValueContext, RelativeDatetime\n'), ((1858, 1876), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1867, 1876), False, 'from datetime import datetime, timezone, timedelta\n'), ((671, 725), 'datetime.datetime', 'datetime', (['(2020)', '(3)', '(31)', '(16)', '(23)', '(45)'], {'tzinfo': 'timezone.utc'}), '(2020, 3, 31, 16, 23, 45, tzinfo=timezone.utc)\n', (679, 725), False, 'from datetime import datetime, timezone, timedelta\n')] |
import os
from datetime import datetime
from .proxmoxbalancer import ProxmoxBalancer
def balance():
print("Started at %s" % datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
if "https_proxy" in os.environ:
del os.environ["https_proxy"]
balancer = ProxmoxBalancer()
balancer.balance()
print("Finished at %s" % datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
| [
"datetime.datetime.now"
] | [((130, 144), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (142, 144), False, 'from datetime import datetime\n'), ((338, 352), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (350, 352), False, 'from datetime import datetime\n')] |
# -*- coding: UTF-8 -*-
import openpyxl
import shutil
import os
import const_define
# active sheet name
# print(workbook.active)
# load excel file
workbook = openpyxl.load_workbook(const_define.DETAILED_LEDGER_FULL_PATH, data_only=True)
# get all sheet name
# worksheets = workbook.get_sheet_names()
worksheets = tuple(workbook.sheetnames)
print(worksheets)
for i in worksheets:
print(i)
# get sheet content
sheet = workbook[worksheets[0]]
# print(sheet)
# print(sheet.title)
# print(sheet.cell(row=2, column=2).value)
# print(sheet['J2'].value)
for rowOfCell in sheet['B2':'M2']:
for cell in rowOfCell:
# print(cell.coordinate, cell.value)
print(cell.value)
# shutil.copyfile(os.path.join('T:'), os.path.join('ttt.txt'))
# filename_netdriver = os.path.join(r"t:", 'vv')
# filename_netdriver = os.path.join(filename_netdriver, 'Roy')
# filename_netdriver = os.path.join(filename_netdriver, 'command.txt')
# print(filename_netdriver)
# shutil.copy(filename_netdriver, DATA_FOLDER_FULL_PATH)
# filename_netdriver = os.path.join(r"t:", r'vv\Roy\command.txt')
# print(filename_netdriver)
# shutil.copy(filename_netdriver, DATA_FOLDER_FULL_PATH)
| [
"openpyxl.load_workbook"
] | [((161, 239), 'openpyxl.load_workbook', 'openpyxl.load_workbook', (['const_define.DETAILED_LEDGER_FULL_PATH'], {'data_only': '(True)'}), '(const_define.DETAILED_LEDGER_FULL_PATH, data_only=True)\n', (183, 239), False, 'import openpyxl\n')] |
import numpy as np
'''
!! 기본적인 개념
1차원 = 벡터
2차원 = 행렬
3차원 = 텐서
4차원 부터는 우리는 3차원의 세상에서 살고 있기 때문에 4차원 이상부터는 머리로 생각하기 어렵다.
2차원 텐서
2차원 텐서를 행렬이라고 말한다.
|t| = (batch size, dim)
batch size = "행" / dim = "열"
3차원 텐서
3차원 텐서는 그냥 텐서라고 부른다.
|t| = (batch size, width, height)
batch size = "세로" / width = "가로" / height = "높이" (입체적인 부분)
'''
# 1차원 벡터 만들기
t = np.array([0., 1., 2., 3., 4., 5., 6.])
# 벡터의 차원과 크기를 출력
print("Rank of t:", t.ndim)
print("Shape of t:", t.shape)
'''
ndim은 몇 차원인지를 출력한다.
shape은 크기를 출력한다. (1 x 7)의 크기를 말한다.
'''
# numpy에서 각 벡터의 원소에 접근하는 방법 (일반적인 파이썬 리스트를 다루는 것과 매우 유사)
print(t[0], t[1], t[-1])
print(t[:2], t[3:])
# 2차원 행렬 만들기
t = np.array([[1., 2., 3.,], [4., 5., 6.,], [7., 8., 9]])
''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
import torch
# 1차원 벡터 만들기
t = torch.FloatTensor([0., 1., 2., 3., 4., 5., 6.])
print(t)
# 텐서의 차원 확인하기
print(t.dim()) # Rank (차원)
print(t.shape) # t.size()도 가능
# 텐서 접근하기 (일반적인 파이썬 리스트 접근 및 numpy접근과 동일하다.)
print(t[0], t[1], t[-1])
print(t[2:5], t[4:-1])
print(t[:2], t[3:])
# PyTorch로 2차원 행렬 만들기
t = torch.FloatTensor([[1., 2., 3.],
[4., 5., 6.],
[7., 8., 9.],
[10., 11., 12.]])
print(t)
print(t.dim()) # Rank (차원)
print(t.shape) # t.size()도 가능
# 2차원 텐서 슬라이싱
print(t[1:3, 1]) # 첫 번째 차원에서의 슬라이싱, 두 번째 차원에서의 인덱스의 값들만 가져온다.
print(t[1:3, 1].size())
# 브로드캐스팅 이용하기
'''
두 행렬 A, B에서 덧셈은 두 행렬의 크기가 같아야하고, 곱은 A의 마지막 차원과 B의 첫 번째 차원이 일치해야한다.
그치만 딥 러닝을 수행하다보면 불가피하게 크기가 다른 행렬(텐서)에 대해서 사칙 연산을 수행할 필요가 생긴다.
이를 위해 파이토치에서는 자동으로 크기를 맞춰서 연산을 수행하게 만드는 "브로드캐스팅" 기능을 제공한다.
'''
# 일반적인 행렬 덧셈
m1 = torch.FloatTensor([[3, 3]])
m2 = torch.FloatTensor([[2, 2]])
print(m1 + m2)
# 브로드 캐스팅 적용
m2 = torch.FloatTensor([[3], [4]])
print(m1 + m2)
# 행렬 곱셈(matmul)과 원소 별 곱셈(mul)의 차이
m1 = torch.FloatTensor([[1, 2], [3, 4]])
m2 = torch.FloatTensor([[1], [2]])
print(m1.matmul(m2))
print(m1 * m2)
print(m1.mul(m2))
# 평균 구하기
t = torch.FloatTensor([1, 2])
print(t.mean())
# 2차원 행렬 평균 구하기
t = torch.FloatTensor([[1, 2],
[3, 4]])
print(t.mean()) # 전체 원소를 대상으로 평균을 구한다.
print(t.mean(dim=0)) # 첫 번째 차원을 제거하고 평균을 구한다. [1, 3]의 평균과 [2, 4]의 평균
print(t.mean(dim=1)) # 두 번째 차원을 제거하고 평균을 구한다. [1, 2]의 평균과 [3, 4]의 평균
# 행렬 덧셈
t = torch.FloatTensor([[1, 2],
[3, 4]])
print(t.sum()) # 전체 원소를 대상으로 합을 구한다.
print(t.sum(dim=0)) # 첫 번째 차원을 제거하고 합을 구한다. [1, 3]의 합과 [2, 4]의 합
print(t.sum(dim=1)) # 두 번째 차원을 제거하고 합을 구한다. [1, 2]의 합과 [3, 4]의 합
# 최대(Max)와 아그맥스(ArgMax)구하기
t = torch.FloatTensor([[1, 2],
[3, 4]])
print(t.max()) # 전체 원소를 대상으로 max를 구한다.
print(t.max(dim=0)) # 첫 번째 차원을 제거하고 max를 구한다. [1, 3]중 최대 값과 [2, 4]중 최대 값
print(t.max(dim=1)) # 두 번째 차원을 제거하고 max를 구한다. [1, 2]중 최대 값과 [3, 4]중 최대 값
'''
max() 함수는 반환 값이 두 개이다. 값과 idx를 반환해준다.
'''
# 뷰(View) - 원소의 수를 유지하면서 텐서의 크기 변경 (중요함)
'''
pytorch의 뷰는 numpy에서의 reshape와 같은 역할을 한다. 즉, 텐서의 크기를 변경해주는 역할을 한다.
'''
t = np.array([[[0, 1, 2],
[3, 4, 5]],
[[6, 7, 8],
[9, 10, 11]]])
ft = torch.FloatTensor(t)
print(ft.shape)
# ft 텐서를 view를 통하여 2차원 텐서로 변경하기
print(ft.view([-1, 3])) # ft텐서를 (?, 3)의 크기로 변경
'''
view는 기본적으로 변경 전과 후의 텐서 안의 원소의 개수가 유지되어야 한다.
파이토치의 view는 사이즈가 -1로 설정되면, 다른 차원으로부터 해당 값을 유추한다.
'''
# 3차원 텐서의 크기 변경
'''
3차원 텐서에서 3차원 텐소로 차원은 유지하되, 크기(shape)을 바꿔보자.
'''
print(ft.view([-1, 1, 3]))
# 스퀴즈(squeeze) - 1인 차원을 제거
ft = torch.FloatTensor([[0], [1], [2]])
print(ft.size())
print(ft.squeeze())
print(ft.squeeze().size())
# 언스퀴즈(unsqueeze) - 특정 위치에 1인 차원을 추가
ft = torch.FloatTensor([1, 2, 3])
print(ft.size())
print(ft.unsqueeze(0))
print(ft.unsqueeze(0).size())
# 두 텐서를 연결하기 (concatenate)
x = torch.FloatTensor([[1, 2], [3, 4]])
y = torch.FloatTensor([[5, 6], [7, 8]])
print(torch.cat([x, y], dim=0)) # dim=0은 차원을 늘리라는 의미를 가진다.
print(torch.cat([x, y], dim=1))
# 스택킹(stacking)
x = torch.FloatTensor([1, 4])
y = torch.FloatTensor([2, 5])
z = torch.FloatTensor([3, 6])
print(torch.stack([x, y, z]))
print(torch.cat([x.unsqueeze(0), y.unsqueeze(0), z.unsqueeze(0)], dim=0))
'''
순차적으로 쌓여 (3 x 2) 텐서가 된다.
그리고 두 번째 프린트 문과 같이, cat을 이용하여 연결한 것 보다 훨씬 간결해졌다.
'''
# 0과 1로 채워진 텐서
x = torch.FloatTensor([[0, 1, 2], [2, 1, 0]])
print(torch.ones_like(x)) # x 텐서와 같은 크기이지만 값이 1로만 채워진 텐서를 생성
print(torch.zeros_like(x)) # x 텐서와 같은 크기이지만 값이 0로만 채워진 텐서를 생성
# In-place operation (덮어쓰기 연산)
x = torch.FloatTensor([[1, 2], [3, 4]])
print(x.mul(2.))
print(x)
''' 변동 x '''
print(x.mul_(2.))
print(x) | [
"torch.ones_like",
"torch.stack",
"numpy.array",
"torch.zeros_like",
"torch.FloatTensor",
"torch.cat"
] | [((342, 387), 'numpy.array', 'np.array', (['[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0]'], {}), '([0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0])\n', (350, 387), True, 'import numpy as np\n'), ((641, 700), 'numpy.array', 'np.array', (['[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9]]'], {}), '([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9]])\n', (649, 700), True, 'import numpy as np\n'), ((820, 874), 'torch.FloatTensor', 'torch.FloatTensor', (['[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0]'], {}), '([0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0])\n', (837, 874), False, 'import torch\n'), ((1090, 1184), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0], [10.0, 11.0, 12.0]]'], {}), '([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0], [10.0,\n 11.0, 12.0]])\n', (1107, 1184), False, 'import torch\n'), ((1634, 1661), 'torch.FloatTensor', 'torch.FloatTensor', (['[[3, 3]]'], {}), '([[3, 3]])\n', (1651, 1661), False, 'import torch\n'), ((1667, 1694), 'torch.FloatTensor', 'torch.FloatTensor', (['[[2, 2]]'], {}), '([[2, 2]])\n', (1684, 1694), False, 'import torch\n'), ((1728, 1757), 'torch.FloatTensor', 'torch.FloatTensor', (['[[3], [4]]'], {}), '([[3], [4]])\n', (1745, 1757), False, 'import torch\n'), ((1813, 1848), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1, 2], [3, 4]]'], {}), '([[1, 2], [3, 4]])\n', (1830, 1848), False, 'import torch\n'), ((1854, 1883), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1], [2]]'], {}), '([[1], [2]])\n', (1871, 1883), False, 'import torch\n'), ((1952, 1977), 'torch.FloatTensor', 'torch.FloatTensor', (['[1, 2]'], {}), '([1, 2])\n', (1969, 1977), False, 'import torch\n'), ((2015, 2050), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1, 2], [3, 4]]'], {}), '([[1, 2], [3, 4]])\n', (2032, 2050), False, 'import torch\n'), ((2264, 2299), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1, 2], [3, 4]]'], {}), '([[1, 2], [3, 4]])\n', (2281, 2299), False, 'import torch\n'), ((2522, 2557), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1, 2], [3, 4]]'], {}), '([[1, 2], [3, 4]])\n', (2539, 2557), False, 'import torch\n'), ((2932, 2992), 'numpy.array', 'np.array', (['[[[0, 1, 2], [3, 4, 5]], [[6, 7, 8], [9, 10, 11]]]'], {}), '([[[0, 1, 2], [3, 4, 5]], [[6, 7, 8], [9, 10, 11]]])\n', (2940, 2992), True, 'import numpy as np\n'), ((3042, 3062), 'torch.FloatTensor', 'torch.FloatTensor', (['t'], {}), '(t)\n', (3059, 3062), False, 'import torch\n'), ((3390, 3424), 'torch.FloatTensor', 'torch.FloatTensor', (['[[0], [1], [2]]'], {}), '([[0], [1], [2]])\n', (3407, 3424), False, 'import torch\n'), ((3532, 3560), 'torch.FloatTensor', 'torch.FloatTensor', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (3549, 3560), False, 'import torch\n'), ((3663, 3698), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1, 2], [3, 4]]'], {}), '([[1, 2], [3, 4]])\n', (3680, 3698), False, 'import torch\n'), ((3703, 3738), 'torch.FloatTensor', 'torch.FloatTensor', (['[[5, 6], [7, 8]]'], {}), '([[5, 6], [7, 8]])\n', (3720, 3738), False, 'import torch\n'), ((3851, 3876), 'torch.FloatTensor', 'torch.FloatTensor', (['[1, 4]'], {}), '([1, 4])\n', (3868, 3876), False, 'import torch\n'), ((3881, 3906), 'torch.FloatTensor', 'torch.FloatTensor', (['[2, 5]'], {}), '([2, 5])\n', (3898, 3906), False, 'import torch\n'), ((3911, 3936), 'torch.FloatTensor', 'torch.FloatTensor', (['[3, 6]'], {}), '([3, 6])\n', (3928, 3936), False, 'import torch\n'), ((4144, 4185), 'torch.FloatTensor', 'torch.FloatTensor', (['[[0, 1, 2], [2, 1, 0]]'], {}), '([[0, 1, 2], [2, 1, 0]])\n', (4161, 4185), False, 'import torch\n'), ((4345, 4380), 'torch.FloatTensor', 'torch.FloatTensor', (['[[1, 2], [3, 4]]'], {}), '([[1, 2], [3, 4]])\n', (4362, 4380), False, 'import torch\n'), ((3745, 3769), 'torch.cat', 'torch.cat', (['[x, y]'], {'dim': '(0)'}), '([x, y], dim=0)\n', (3754, 3769), False, 'import torch\n'), ((3804, 3828), 'torch.cat', 'torch.cat', (['[x, y]'], {'dim': '(1)'}), '([x, y], dim=1)\n', (3813, 3828), False, 'import torch\n'), ((3943, 3965), 'torch.stack', 'torch.stack', (['[x, y, z]'], {}), '([x, y, z])\n', (3954, 3965), False, 'import torch\n'), ((4192, 4210), 'torch.ones_like', 'torch.ones_like', (['x'], {}), '(x)\n', (4207, 4210), False, 'import torch\n'), ((4253, 4272), 'torch.zeros_like', 'torch.zeros_like', (['x'], {}), '(x)\n', (4269, 4272), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
from django.conf.urls import url
from progressbarupload.views import upload_progress
urlpatterns = [
url(r'^upload_progress$', upload_progress, name="upload_progress"),
]
| [
"django.conf.urls.url"
] | [((130, 195), 'django.conf.urls.url', 'url', (['"""^upload_progress$"""', 'upload_progress'], {'name': '"""upload_progress"""'}), "('^upload_progress$', upload_progress, name='upload_progress')\n", (133, 195), False, 'from django.conf.urls import url\n')] |
import time
from celery import chain
from celery_app import app
@app.task
def add(x, y):
return x + y
'''
ref. http://docs.celeryq.org/en/latest/userguide/tasks.html#avoid-launching-synchronous-subtasks
'''
def chain_demo(x, y):
# add_demo -> mul_demo -> insert_db_demo
chain(add_demo.s(x, y), mul_demo.s(10), insert_db_demo.s())()
@app.task
def add_demo(x, y):
time.sleep(3)
return x + y
@app.task
def mul_demo(x, y):
time.sleep(3)
return x * y
@app.task(ignore_result=True)
def insert_db_demo(result):
print('insert db , result {}'.format(result))
| [
"celery_app.app.task",
"time.sleep"
] | [((490, 518), 'celery_app.app.task', 'app.task', ([], {'ignore_result': '(True)'}), '(ignore_result=True)\n', (498, 518), False, 'from celery_app import app\n'), ((389, 402), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (399, 402), False, 'import time\n'), ((456, 469), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (466, 469), False, 'import time\n')] |
# Time: O(n)
# Space: O(n)
# Given a string, find the first non-repeating character in it and
# return it's index. If it doesn't exist, return -1.
#
# Examples:
#
# s = "leetcode"
# return 0.
#
# s = "loveleetcode",
# return 2.
# Note: You may assume the string contain only lowercase letters.
from collections import defaultdict
class Solution(object):
def firstUniqChar(self, s):
"""
:type s: str
:rtype: int
"""
lookup = defaultdict(int)
candidtates = set()
for i, c in enumerate(s):
if lookup[c]:
candidtates.discard(lookup[c])
else:
lookup[c] = i+1
candidtates.add(i+1)
return min(candidtates)-1 if candidtates else -1
| [
"collections.defaultdict"
] | [((473, 489), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (484, 489), False, 'from collections import defaultdict\n')] |
import listexpscanner
import listexpparser
import sys
def main():
if (len(sys.argv)) != 2:
print("usage: listexp filename")
print(" listexp will interpret/compile the expression in the file named")
print(" filename and print its result to standard output")
return
strm = open(sys.argv[1],"r")
theScanner = listexpscanner.listexpScanner(strm)
theParser = listexpparser.listexpParser()
ast = theParser.parse(theScanner)
print(ast)
if __name__ == "__main__":
main()
| [
"listexpparser.listexpParser",
"listexpscanner.listexpScanner"
] | [((366, 401), 'listexpscanner.listexpScanner', 'listexpscanner.listexpScanner', (['strm'], {}), '(strm)\n', (395, 401), False, 'import listexpscanner\n'), ((418, 447), 'listexpparser.listexpParser', 'listexpparser.listexpParser', ([], {}), '()\n', (445, 447), False, 'import listexpparser\n')] |
import numpy as np
X = 2 * np.random.randn(100, 5)
y = 2.5382 * np.cos(X[:, 3]) + X[:, 0] ** 2 - 0.5
from pysr import PySRRegressor
model = PySRRegressor(
niterations=40,
binary_operators=["+", "*"],
unary_operators=[
"cos",
"exp",
"sin",
"inv(x) = 1/x", # Custom operator (julia syntax)
],
model_selection="best",
loss="loss(x, y) = (x - y)^2", # Custom loss function (julia syntax)
)
model.fit(X, y)
print(model)
| [
"pysr.PySRRegressor",
"numpy.random.randn",
"numpy.cos"
] | [((143, 321), 'pysr.PySRRegressor', 'PySRRegressor', ([], {'niterations': '(40)', 'binary_operators': "['+', '*']", 'unary_operators': "['cos', 'exp', 'sin', 'inv(x) = 1/x']", 'model_selection': '"""best"""', 'loss': '"""loss(x, y) = (x - y)^2"""'}), "(niterations=40, binary_operators=['+', '*'], unary_operators=\n ['cos', 'exp', 'sin', 'inv(x) = 1/x'], model_selection='best', loss=\n 'loss(x, y) = (x - y)^2')\n", (156, 321), False, 'from pysr import PySRRegressor\n'), ((28, 51), 'numpy.random.randn', 'np.random.randn', (['(100)', '(5)'], {}), '(100, 5)\n', (43, 51), True, 'import numpy as np\n'), ((65, 80), 'numpy.cos', 'np.cos', (['X[:, 3]'], {}), '(X[:, 3])\n', (71, 80), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
""" Perform some operations on Android method CFGs to output a more
comprehensive global app graph. """
import argparse
import os.path
import logging
from acfg_tools.builder.cfg_analyser import CfgAnalyser
DESCRIPTION = "Create a global app graph from CFGs"
log = logging.getLogger("branchexp")
def main():
argparser = argparse.ArgumentParser(description=DESCRIPTION)
argparser.add_argument("dots", type=str, help="DOT files directory")
argparser.add_argument("manifest", type=str, help="app manifest file")
argparser.add_argument("heuristics_db", type=str, help="JSON db")
argparser.add_argument("impfile", type=file, help="implicit edges file")
args = argparser.parse_args()
if not os.path.isdir(args.dots) or not os.path.isfile(args.manifest):
print("Unavailable file or directory given.")
return
# TODO: update argument list
generate_acfg(args.dots, args.manifest, args.heuristics_db, args.impfile,
(args.impfile is not None))
def generate_acfg(dots, manifest, heuristics, doImplicit, exhaustive_paths,
output_dir, suspKeys, paths_out, output_filepath = None):
""" Generate an ACFG for CFGs in the dots dir, with the app manifest and
the heuristic db provided. """
cfg_analyser = CfgAnalyser(dots, manifest, doImplicit, exhaustive_paths,
output_dir, heuristics, suspKeys, paths_out)
cfg_analyser.create_app_graph()
return cfg_analyser.app_graph.graph
if __name__ == "__main__":
main()
| [
"logging.getLogger",
"acfg_tools.builder.cfg_analyser.CfgAnalyser",
"argparse.ArgumentParser"
] | [((291, 321), 'logging.getLogger', 'logging.getLogger', (['"""branchexp"""'], {}), "('branchexp')\n", (308, 321), False, 'import logging\n'), ((352, 400), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'DESCRIPTION'}), '(description=DESCRIPTION)\n', (375, 400), False, 'import argparse\n'), ((1316, 1422), 'acfg_tools.builder.cfg_analyser.CfgAnalyser', 'CfgAnalyser', (['dots', 'manifest', 'doImplicit', 'exhaustive_paths', 'output_dir', 'heuristics', 'suspKeys', 'paths_out'], {}), '(dots, manifest, doImplicit, exhaustive_paths, output_dir,\n heuristics, suspKeys, paths_out)\n', (1327, 1422), False, 'from acfg_tools.builder.cfg_analyser import CfgAnalyser\n')] |
import json
import os
import cv2
import numpy as np
from dgp.datasets.synchronized_dataset import SynchronizedScene
from dgp.utils.visualization_engine import visualize_dataset_3d, visualize_dataset_2d, visualize_dataset_sample_3d, visualize_dataset_sample_2d
from tests import TEST_DATA_DIR
def dummy_caption(dataset, idx):
return "SAMPLE"
def test_visualize_dataset_3d():
'''
Uses parametrized testing to run multiple cases for SynchronizedSceneDataset
'''
scene_json = os.path.join(
TEST_DATA_DIR, "dgp", "test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json"
)
filepath = "./test_3d_vis.avi"
dataset = SynchronizedScene(
scene_json,
datum_names=['LIDAR', 'CAMERA_01', 'CAMERA_05', 'CAMERA_06'],
forward_context=1,
backward_context=1,
requested_annotations=("bounding_box_2d", "bounding_box_3d")
)
visualize_dataset_3d(
dataset=dataset,
camera_datum_names=['CAMERA_01'],
lidar_datum_names=['LIDAR'],
radar_datum_names=[],
output_video_file=filepath
)
assert os.path.exists(filepath)
os.remove(filepath)
def test_visualize_dataset_2d():
'''
Uses parametrized testing to run multiple cases for SynchronizedSceneDataset
'''
scene_json = os.path.join(
TEST_DATA_DIR, "dgp", "test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json"
)
filepath = "./test_2d_vis.avi"
dataset = SynchronizedScene(
scene_json,
datum_names=['LIDAR', 'CAMERA_01', 'CAMERA_05', 'CAMERA_06'],
forward_context=1,
backward_context=1,
requested_annotations=("bounding_box_2d", "bounding_box_3d")
)
visualize_dataset_2d(
dataset=dataset, camera_datum_names=['CAMERA_01'], caption_fn=dummy_caption, output_video_file=filepath
)
assert os.path.exists(filepath)
os.remove(filepath)
def test_visualize_dataset_sample_3d():
'''
Uses parametrized testing to run multiple cases for SynchronizedSceneDataset
'''
scene_json = os.path.join(
TEST_DATA_DIR, "dgp", "test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json"
)
dataset = SynchronizedScene(
scene_json,
datum_names=['LIDAR', 'CAMERA_01', 'CAMERA_05', 'CAMERA_06'],
forward_context=1,
backward_context=1,
requested_annotations=("bounding_box_2d", "bounding_box_3d")
)
result = visualize_dataset_sample_3d(dataset=dataset, scene_idx=0, sample_idx=0, camera_datum_names=['camera_05'])
data = cv2.imread('tests/data/dgp/vis_output.png', cv2.IMREAD_COLOR)
assert np.allclose(result["camera_05"], data)
| [
"os.path.exists",
"numpy.allclose",
"dgp.utils.visualization_engine.visualize_dataset_sample_3d",
"dgp.utils.visualization_engine.visualize_dataset_2d",
"os.path.join",
"dgp.utils.visualization_engine.visualize_dataset_3d",
"dgp.datasets.synchronized_dataset.SynchronizedScene",
"cv2.imread",
"os.rem... | [((498, 611), 'os.path.join', 'os.path.join', (['TEST_DATA_DIR', '"""dgp"""', '"""test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json"""'], {}), "(TEST_DATA_DIR, 'dgp',\n 'test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json')\n", (510, 611), False, 'import os\n'), ((671, 871), 'dgp.datasets.synchronized_dataset.SynchronizedScene', 'SynchronizedScene', (['scene_json'], {'datum_names': "['LIDAR', 'CAMERA_01', 'CAMERA_05', 'CAMERA_06']", 'forward_context': '(1)', 'backward_context': '(1)', 'requested_annotations': "('bounding_box_2d', 'bounding_box_3d')"}), "(scene_json, datum_names=['LIDAR', 'CAMERA_01',\n 'CAMERA_05', 'CAMERA_06'], forward_context=1, backward_context=1,\n requested_annotations=('bounding_box_2d', 'bounding_box_3d'))\n", (688, 871), False, 'from dgp.datasets.synchronized_dataset import SynchronizedScene\n'), ((914, 1073), 'dgp.utils.visualization_engine.visualize_dataset_3d', 'visualize_dataset_3d', ([], {'dataset': 'dataset', 'camera_datum_names': "['CAMERA_01']", 'lidar_datum_names': "['LIDAR']", 'radar_datum_names': '[]', 'output_video_file': 'filepath'}), "(dataset=dataset, camera_datum_names=['CAMERA_01'],\n lidar_datum_names=['LIDAR'], radar_datum_names=[], output_video_file=\n filepath)\n", (934, 1073), False, 'from dgp.utils.visualization_engine import visualize_dataset_3d, visualize_dataset_2d, visualize_dataset_sample_3d, visualize_dataset_sample_2d\n'), ((1122, 1146), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (1136, 1146), False, 'import os\n'), ((1151, 1170), 'os.remove', 'os.remove', (['filepath'], {}), '(filepath)\n', (1160, 1170), False, 'import os\n'), ((1320, 1433), 'os.path.join', 'os.path.join', (['TEST_DATA_DIR', '"""dgp"""', '"""test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json"""'], {}), "(TEST_DATA_DIR, 'dgp',\n 'test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json')\n", (1332, 1433), False, 'import os\n'), ((1493, 1693), 'dgp.datasets.synchronized_dataset.SynchronizedScene', 'SynchronizedScene', (['scene_json'], {'datum_names': "['LIDAR', 'CAMERA_01', 'CAMERA_05', 'CAMERA_06']", 'forward_context': '(1)', 'backward_context': '(1)', 'requested_annotations': "('bounding_box_2d', 'bounding_box_3d')"}), "(scene_json, datum_names=['LIDAR', 'CAMERA_01',\n 'CAMERA_05', 'CAMERA_06'], forward_context=1, backward_context=1,\n requested_annotations=('bounding_box_2d', 'bounding_box_3d'))\n", (1510, 1693), False, 'from dgp.datasets.synchronized_dataset import SynchronizedScene\n'), ((1737, 1866), 'dgp.utils.visualization_engine.visualize_dataset_2d', 'visualize_dataset_2d', ([], {'dataset': 'dataset', 'camera_datum_names': "['CAMERA_01']", 'caption_fn': 'dummy_caption', 'output_video_file': 'filepath'}), "(dataset=dataset, camera_datum_names=['CAMERA_01'],\n caption_fn=dummy_caption, output_video_file=filepath)\n", (1757, 1866), False, 'from dgp.utils.visualization_engine import visualize_dataset_3d, visualize_dataset_2d, visualize_dataset_sample_3d, visualize_dataset_sample_2d\n'), ((1888, 1912), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (1902, 1912), False, 'import os\n'), ((1917, 1936), 'os.remove', 'os.remove', (['filepath'], {}), '(filepath)\n', (1926, 1936), False, 'import os\n'), ((2093, 2206), 'os.path.join', 'os.path.join', (['TEST_DATA_DIR', '"""dgp"""', '"""test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json"""'], {}), "(TEST_DATA_DIR, 'dgp',\n 'test_scene/scene_01/scene_a8dc5ed1da0923563f85ea129f0e0a83e7fe1867.json')\n", (2105, 2206), False, 'import os\n'), ((2231, 2431), 'dgp.datasets.synchronized_dataset.SynchronizedScene', 'SynchronizedScene', (['scene_json'], {'datum_names': "['LIDAR', 'CAMERA_01', 'CAMERA_05', 'CAMERA_06']", 'forward_context': '(1)', 'backward_context': '(1)', 'requested_annotations': "('bounding_box_2d', 'bounding_box_3d')"}), "(scene_json, datum_names=['LIDAR', 'CAMERA_01',\n 'CAMERA_05', 'CAMERA_06'], forward_context=1, backward_context=1,\n requested_annotations=('bounding_box_2d', 'bounding_box_3d'))\n", (2248, 2431), False, 'from dgp.datasets.synchronized_dataset import SynchronizedScene\n'), ((2484, 2593), 'dgp.utils.visualization_engine.visualize_dataset_sample_3d', 'visualize_dataset_sample_3d', ([], {'dataset': 'dataset', 'scene_idx': '(0)', 'sample_idx': '(0)', 'camera_datum_names': "['camera_05']"}), "(dataset=dataset, scene_idx=0, sample_idx=0,\n camera_datum_names=['camera_05'])\n", (2511, 2593), False, 'from dgp.utils.visualization_engine import visualize_dataset_3d, visualize_dataset_2d, visualize_dataset_sample_3d, visualize_dataset_sample_2d\n'), ((2601, 2662), 'cv2.imread', 'cv2.imread', (['"""tests/data/dgp/vis_output.png"""', 'cv2.IMREAD_COLOR'], {}), "('tests/data/dgp/vis_output.png', cv2.IMREAD_COLOR)\n", (2611, 2662), False, 'import cv2\n'), ((2674, 2712), 'numpy.allclose', 'np.allclose', (["result['camera_05']", 'data'], {}), "(result['camera_05'], data)\n", (2685, 2712), True, 'import numpy as np\n')] |
#T# the following code shows how to draw a point of concurrency
#T# to draw a point of concurrency, the pyplot module of the matplotlib package is used
import matplotlib.pyplot as plt
#T# create the figure and axes
fig1, ax1 = plt.subplots(1, 1)
#T# set the aspect of the axes
ax1.set_aspect('equal', adjustable = 'box')
#T# hide the spines and ticks
for it1 in ['top', 'bottom', 'left', 'right']:
ax1.spines[it1].set_visible(False)
ax1.xaxis.set_visible(False)
ax1.yaxis.set_visible(False)
#T# create the variables that define the plot
p1 = (0, 0)
m1 = 3
m2 = .3
m3 = -9
#T# plot the figure
ax1.axline((p1[0], p1[1]), slope = m1, color = 'k')
ax1.axline((p1[0], p1[1]), slope = m2, color = 'k')
ax1.axline((p1[0], p1[1]), slope = m3, color = 'k')
ax1.scatter(p1[0], p1[1], s = 18, color = 'k')
#T# set the math text font to the Latex default, Computer Modern
import matplotlib
matplotlib.rcParams['mathtext.fontset'] = 'cm'
#T# create the labels
label_A = ax1.annotate(r'$A$', p1, ha = 'left', va = 'top', size = 16)
#T# drag the labels if needed
label_A.draggable()
#T# show the results
plt.show() | [
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((229, 247), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (241, 247), True, 'import matplotlib.pyplot as plt\n'), ((1103, 1113), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1111, 1113), True, 'import matplotlib.pyplot as plt\n')] |
import numpy as np
import serial
import struct
import threading
import time
from array import array
from datetime import datetime
class ImuData:
def __init__(self, t=0.0, freq=0, ypr=np.zeros(3), a=np.zeros(3), \
W=np.zeros(3)):
self.t = t
self.freq = freq
self.ypr = ypr
self.a = a
self.W = W
class Vectornav(threading.Thread):
def __init__(self, thread_id, port, baud, t0):
'''Instantiate the IMU thread.
Args:
thread_id: (int) - Thread ID
port: (string) - Port name of the IMU
baud: (int) - Baud rate of the IMU
t0: (datetime object) - Epoch
'''
threading.Thread.__init__(self)
self.thread_id = thread_id
self._lock = threading.Lock()
self._on = True
self._t0 = t0
self._port = port
self._baud = baud
self._t = (datetime.now() - t0).total_seconds()
self._ypr = np.zeros(3)
self._a = np.zeros(3)
self._W = np.zeros(3)
# This is specific message has 41 bytes. You should update this to match
# your configuration.
self._len_payload = 41
print('IMU: initialized')
def run(self):
'''Start the thread.
'''
print('IMU: reading from {} at {}'.format(self._port, self._baud))
# In case the port is not properly closed,
try:
temp = serial.Serial(self._port, self._baud)
temp.close()
except:
print('\033[91m' + 'Unable to open IMU port at ' + self._port
+ ':' + str(self._baud) + '\033[0m')
return
# Open the serial port and start reading.
with serial.Serial(self._port, self._baud, timeout=1) as s:
# Clear the buffer first.
print('IMU: clearing buffer')
num_bytes = s.in_waiting
s.read(num_bytes)
print('IMU: starting main loop')
while self._on:
imu_sync_detected = False
# Check if there are bytes waiting in the buffer.
num_bytes = s.in_waiting
if num_bytes == 0:
# Reduce/delete this sleep time if you are reading data at
# a faster rate.
time.sleep(0.01)
continue
# IMU sends 0xFA (int 250) as the first byte. This marks the
# begining of the message.
imu_sync_detected = self.check_sync_byte(s)
if not imu_sync_detected:
continue
# If the sync byte us detected, read the rest of the message.
success = self.read_imu_data(s)
if not success:
continue
print('IMU: thread closed')
def check_sync_byte(self, s):
'''Check if the sync byte is detected.
IMU sends 0xFA (int 250) as the first byte. This marks the begining of
the message.
Args:
s: (serial object) - Already open serial port of the IMU.
Return:
bool - True if the sync byte is detected in the current buffer.
'''
# Iterate over all the bytes in the current buffer.
for _ in range(s.in_waiting):
byte_in = s.read(1)
# Check if the sync byte 0xFA (int 250) is detected.
int_in = int.from_bytes(byte_in, 'little')
if int_in == 250:
return True
return False
def read_imu_data(self, s):
'''Read and parse the payload of the IMU message.
Args:
s: (serial object) - Already open serial port of the IMU.
Return:
bool - True if the operation is succesfull
'''
# Read data.
N = self._len_payload
data = s.read(N)
# Check if there are unexpected errors in the message.
# Last two bytes of the payload is the checksum bytes.
checksum_array = array('B', [data[N-1], data[N-2]])
checksum = struct.unpack('H', checksum_array)[0]
# Compare the received checksum value against the calculated checksum.
crc = self.calculate_imu_crc(data[:N-2])
if not crc == checksum:
print('IMU CRC error')
return False
# If the checksum is valid, parse the data.
return self.parse_data(data)
def parse_data(self, data):
'''Parse the bytes of the sensor measurements
Args:
data: (byte array) - data read from the serial port
Return:
bool - True if the operation is succesfull
'''
try:
with self._lock:
self._ypr[0] = struct.unpack('f', data[3:7])[0]
self._ypr[1] = struct.unpack('f', data[7:11])[0]
self._ypr[2] = struct.unpack('f', data[11:15])[0]
self._a[0] = struct.unpack('f', data[15:19])[0]
self._a[1] = struct.unpack('f', data[19:23])[0]
self._a[2] = struct.unpack('f', data[23:27])[0]
self._W[0] = struct.unpack('f', data[27:31])[0]
self._W[1] = struct.unpack('f', data[31:35])[0]
self._W[2] = struct.unpack('f', data[35:39])[0]
except:
print('IMU: error parsing data')
return False
return True
def calculate_imu_crc(self, data):
'''Calculate the 16-bit CRC for the given message.
Args:
data: (byte array) - data read from the serial port
Return:
unsigned short - CRC checksum value
'''
data = bytearray(data)
crc = np.array([0], dtype=np.ushort)
for i in range(len(data)):
crc[0] = (crc[0] >> 8) | (crc[0] << 8)
crc[0] ^= data[i]
crc[0] ^= (crc[0] & 0xff) >> 4
crc[0] ^= crc[0] << 12
crc[0] ^= (crc[0] & 0x00ff) << 5
return crc[0]
def output_data(self):
'''Output the current measurements.
Return:
ImuData - current IMU data
'''
with self._lock:
data = ImuData(
self._t,
self._ypr,
self._a,
self._W
)
return data
def end_thread(self):
'''Call to end the IMU thread.'''
self._on = False
print('IMU: thread close signal received')
| [
"threading.Thread.__init__",
"array.array",
"threading.Lock",
"time.sleep",
"numpy.array",
"numpy.zeros",
"struct.unpack",
"serial.Serial",
"datetime.datetime.now"
] | [((191, 202), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (199, 202), True, 'import numpy as np\n'), ((206, 217), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (214, 217), True, 'import numpy as np\n'), ((235, 246), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (243, 246), True, 'import numpy as np\n'), ((682, 713), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (707, 713), False, 'import threading\n'), ((771, 787), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (785, 787), False, 'import threading\n'), ((965, 976), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (973, 976), True, 'import numpy as np\n'), ((995, 1006), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (1003, 1006), True, 'import numpy as np\n'), ((1025, 1036), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (1033, 1036), True, 'import numpy as np\n'), ((4112, 4150), 'array.array', 'array', (['"""B"""', '[data[N - 1], data[N - 2]]'], {}), "('B', [data[N - 1], data[N - 2]])\n", (4117, 4150), False, 'from array import array\n'), ((5804, 5834), 'numpy.array', 'np.array', (['[0]'], {'dtype': 'np.ushort'}), '([0], dtype=np.ushort)\n', (5812, 5834), True, 'import numpy as np\n'), ((1437, 1474), 'serial.Serial', 'serial.Serial', (['self._port', 'self._baud'], {}), '(self._port, self._baud)\n', (1450, 1474), False, 'import serial\n'), ((1728, 1776), 'serial.Serial', 'serial.Serial', (['self._port', 'self._baud'], {'timeout': '(1)'}), '(self._port, self._baud, timeout=1)\n', (1741, 1776), False, 'import serial\n'), ((4166, 4200), 'struct.unpack', 'struct.unpack', (['"""H"""', 'checksum_array'], {}), "('H', checksum_array)\n", (4179, 4200), False, 'import struct\n'), ((907, 921), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (919, 921), False, 'from datetime import datetime\n'), ((2338, 2354), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (2348, 2354), False, 'import time\n'), ((4840, 4869), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[3:7]'], {}), "('f', data[3:7])\n", (4853, 4869), False, 'import struct\n'), ((4904, 4934), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[7:11]'], {}), "('f', data[7:11])\n", (4917, 4934), False, 'import struct\n'), ((4969, 5000), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[11:15]'], {}), "('f', data[11:15])\n", (4982, 5000), False, 'import struct\n'), ((5034, 5065), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[15:19]'], {}), "('f', data[15:19])\n", (5047, 5065), False, 'import struct\n'), ((5098, 5129), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[19:23]'], {}), "('f', data[19:23])\n", (5111, 5129), False, 'import struct\n'), ((5162, 5193), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[23:27]'], {}), "('f', data[23:27])\n", (5175, 5193), False, 'import struct\n'), ((5227, 5258), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[27:31]'], {}), "('f', data[27:31])\n", (5240, 5258), False, 'import struct\n'), ((5291, 5322), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[31:35]'], {}), "('f', data[31:35])\n", (5304, 5322), False, 'import struct\n'), ((5355, 5386), 'struct.unpack', 'struct.unpack', (['"""f"""', 'data[35:39]'], {}), "('f', data[35:39])\n", (5368, 5386), False, 'import struct\n')] |
import unittest
import os
import warnings
from ml4ir.base.data import ranklib_helper
import pandas as pd
warnings.filterwarnings("ignore")
INPUT_FILE = "ml4ir/applications/ranking/tests/data/ranklib/train/sample.txt"
OUTPUT_FILE = "ml4ir/applications/ranking/tests/data/ranklib/train/sample_ml4ir.csv"
QUERY_ID_NAME = 'qid'
RELEVANCE_NAME = 'relevance'
KEEP_ADDITIONAL_INFO = 1
GL_2_CLICKS = 1
NON_ZERO_FEATURES_ONLY = 0
class TestRanklibConversion(unittest.TestCase):
def setUp(self):
pass
def test_conversion(self):
"""Convert ranklib dataset to a csv"""
ranklib_helper.ranklib_to_csv(INPUT_FILE, OUTPUT_FILE, KEEP_ADDITIONAL_INFO,
GL_2_CLICKS, NON_ZERO_FEATURES_ONLY, QUERY_ID_NAME, RELEVANCE_NAME)
df = pd.read_csv(OUTPUT_FILE)
assert QUERY_ID_NAME in df.columns and RELEVANCE_NAME in df.columns
assert df[QUERY_ID_NAME].nunique() == 49
if KEEP_ADDITIONAL_INFO == 1:
assert len(df.columns) >= 138
else:
assert len(df.columns) == 138
if GL_2_CLICKS == 1:
assert sorted(list(df[RELEVANCE_NAME].unique())) == [0, 1]
def tearDown(self):
# Delete output file
os.remove(OUTPUT_FILE)
if __name__ == "__main__":
unittest.main()
| [
"pandas.read_csv",
"ml4ir.base.data.ranklib_helper.ranklib_to_csv",
"unittest.main",
"warnings.filterwarnings",
"os.remove"
] | [((107, 140), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (130, 140), False, 'import warnings\n'), ((1297, 1312), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1310, 1312), False, 'import unittest\n'), ((596, 744), 'ml4ir.base.data.ranklib_helper.ranklib_to_csv', 'ranklib_helper.ranklib_to_csv', (['INPUT_FILE', 'OUTPUT_FILE', 'KEEP_ADDITIONAL_INFO', 'GL_2_CLICKS', 'NON_ZERO_FEATURES_ONLY', 'QUERY_ID_NAME', 'RELEVANCE_NAME'], {}), '(INPUT_FILE, OUTPUT_FILE, KEEP_ADDITIONAL_INFO,\n GL_2_CLICKS, NON_ZERO_FEATURES_ONLY, QUERY_ID_NAME, RELEVANCE_NAME)\n', (625, 744), False, 'from ml4ir.base.data import ranklib_helper\n'), ((792, 816), 'pandas.read_csv', 'pd.read_csv', (['OUTPUT_FILE'], {}), '(OUTPUT_FILE)\n', (803, 816), True, 'import pandas as pd\n'), ((1241, 1263), 'os.remove', 'os.remove', (['OUTPUT_FILE'], {}), '(OUTPUT_FILE)\n', (1250, 1263), False, 'import os\n')] |
from tkinter import filedialog
from tkinter import *
from PIL import Image, ImageTk
from tkinter.messagebox import showinfo
class InfoWindow(object):
def __init__(self,master):
top=self.top=Toplevel(master)
self.infoLabel=Label(top,text="Please Enter file resoltuion with ; to separate values")
self.infoLabel.pack()
self.infoEntry=Entry(top)
self.infoEntry.pack()
self.infoButton=Button(top,text='Ok',command=self.cleanup)
self.infoButton.pack()
def cleanup(self):
self.value=self.infoEntry.get()
self.top.destroy()
class GenerateTiffStackWindow(object):
def __init__(self,master):
top=self.top=Toplevel(master)
self.generateRawTiffs = IntVar()
self.generateProcessedTiffs = IntVar()
self.generate3DModels = IntVar()
self.generateSegmentedTiffs = IntVar()
self.infoLabel=Label(top,text="Please choose the options for tiff stack generation")
self.infoLabel.pack()
self.generateRawTiffsCheck = Checkbutton(top,text="Generate Raw Tiffs", variable = self.generateRawTiffs)
self.generateRawTiffsCheck.pack()
self.generateProcessedTiffsCheck = Checkbutton(top,text="Generate Processed Tiffs", variable = self.generateProcessedTiffs)
self.generateProcessedTiffsCheck.pack()
self.generate3DModelCheck = Checkbutton(top,text="Generate 3D Models", variable = self.generate3DModels)
self.generate3DModelCheck.pack()
self.generateSegmentedTiffsCheck = Checkbutton(top,text="Generate Segmented Tiffs", variable = self.generateSegmentedTiffs)
self.generateSegmentedTiffsCheck.pack()
self.infoButton=Button(top,text='Ok',command=self.cleanup)
self.infoButton.pack()
def cleanup(self):
self.value = str(self.generateRawTiffs.get()) + ";"+ str(self.generateProcessedTiffs.get()) + ";" + str(self.generate3DModels.get())+ ";" + str(self.generateSegmentedTiffs.get())
self.top.destroy()
class GetTrayCSVs(object):
def __init__(self,master,trayCount):
top=self.top=Toplevel(master)
self.infoLabel=Label(top,text="Select directories for each trays csv, or click Ok to leave blank")
self.infoLabel.pack()
self.paths = ""
self.trayCount = trayCount
self.fileButton=Button(top,text='get files',command=self.getCSVFiles)
self.fileButton.pack()
self.infoButton=Button(top,text='Ok',command=self.cleanup)
self.infoButton.pack()
def getCSVFiles(self):
for i in range(len(self.trayCount)):
temp = filedialog.askopenfilename()
if temp == '':
temp = ' '
self.paths = self.paths+temp+"*"
self.cleanup()
def cleanup(self):
self.value=self.paths
self.top.destroy() | [
"tkinter.filedialog.askopenfilename"
] | [((2702, 2730), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {}), '()\n', (2728, 2730), False, 'from tkinter import filedialog\n')] |
#!/usr/bin/env python
import sys
import os
import subprocess
import functools
import collections
# Parameters
lbann_dir = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).strip()
lbann_proto_dir = lbann_dir + "/src/proto/"
work_dir = lbann_dir + "/model_zoo/models/vram"
template_proto = lbann_dir + "/model_zoo/models/vram/dram_template.prototext"
output_proto = lbann_dir + "/model_zoo/models/vram/dram.prototext"
# Convert a list into a space-separated string
def str_list(l):
if isinstance(l, str):
return l
else:
return " ".join(str(i) for i in l)
# Construct a new layer and add it to the model
def new_layer(model, name, parents, layer_type, device = "", weights = []):
if not isinstance(parents, collections.Iterable):
return new_layer(model, name, [parents], layer_type, device, weights)
if not isinstance(weights, collections.Iterable):
return new_layer(model, name, parents, layer_type, device, [weights])
l = model.layer.add()
l.name = name
l.parents = str_list(map(lambda l : l.name, parents))
exec("l." + layer_type + ".SetInParent()")
l.weights = str_list(map(lambda w : w.name, weights))
l.device_allocation = device
return l
# Construct a new set of weights and add it to the model
def new_weights(model, name, initializer = ""):
w = model.weights.add()
w.name = name
if initializer:
exec("w." + initializer + ".SetInParent()")
return w
class FullyConnectedCell:
name = ""
size = 0
model = None
has_bias = False
activation = None
weights = []
step = -1
def __init__(self, name, size, model,
activation = None, initializer = "constant_initializer", has_bias = True):
self.name = name
self.size = size
self.model = model
self.has_bias = has_bias
self.activation = activation
# Initialize weights
self.weights = [new_weights(model, name + "_linearity", initializer),
new_weights(model, name + "_bias", "constant_initializer")]
def __call__(self, parent):
self.step += 1
fc = new_layer(self.model, "%s_fc_step%d" % (self.name, self.step),
parent, "fully_connected", "" ,self.weights)
fc.fully_connected.num_neurons = self.size
fc.fully_connected.has_bias = self.has_bias
if self.activation:
act = new_layer(self.model,
"%s_step%d" % (self.name, self.step),
fc, self.activation)
return act
else:
fc.name = "%s_step%d" % (self.name, self.step)
return fc
class ConvolutionCell:
name = ""
num_output_channels = 0
num_dims = 0
conv_dim = 0
conv_stride = 0
conv_pad = 0
model = None
has_bias = False
activation = None
weights = []
step = -1
def __init__(self, name, num_output_channels,
num_dims, conv_dim, conv_stride, conv_pad,
model,
activation = None,
initializer = "constant_initializer",
has_bias = True):
self.name = name
self.num_output_channels = num_output_channels
self.num_dims = num_dims
self.conv_dim = conv_dim
self.conv_stride = conv_stride
self.conv_pad = conv_pad
self.model = model
self.has_bias = has_bias
self.activation = activation
# Initialize weights
self.weights = [new_weights(model, name + "_kernel", initializer),
new_weights(model, name + "_bias", "constant_initializer")]
def __call__(self, parent):
self.step += 1
conv = new_layer(self.model, "%s_conv_step%d" % (self.name, self.step),
parent, "convolution", "", self.weights)
conv.convolution.num_output_channels = self.num_output_channels
conv.convolution.num_dims = self.num_dims
conv.convolution.conv_dims_i = self.conv_dim
conv.convolution.conv_strides_i = self.conv_stride
conv.convolution.conv_pads_i = self.conv_pad
conv.convolution.has_bias = self.has_bias
if self.activation:
act = new_layer(self.model,
"%s_step%d" % (self.name, self.step),
conv, self.activation)
return act
else:
conv.name = "%s_step%d" % (self.name, self.step)
return conv
# Uses reLU activations
class LstmCell:
name = ""
size = 0
model = None
step = -1
outputs = []
cells = []
# Fully-connected layers
forget_fc = None
input_fc = None
output_fc = None
cell_fc = None
def __init__(self, name, size, model):
self.name = name
self.size = size
self.model = model
# Fully-connected layers
self.forget_gate = FullyConnectedCell(name + "_forget_gate_fc", size, model,
"sigmoid", "glorot_normal_initializer", True)
self.input_gate = FullyConnectedCell(name + "_input_gate_fc", size, model,
"sigmoid", "glorot_normal_initializer", True)
self.output_gate = FullyConnectedCell(name + "_output_gate_fc", size, model,
"sigmoid", "glorot_normal_initializer", True)
self.cell_update = FullyConnectedCell(name + "_cell_update_fc", size, model,
"relu", "he_normal_initializer", True)
# Initial state
self.outputs = [new_layer(model, name + "_output_init", [], "constant")]
self.outputs[0].constant.num_neurons = str(size)
self.cells = [new_layer(model, name + "_cell_init", [], "constant")]
self.cells[0].constant.num_neurons = str(size)
def __call__(self, parent):
self.step += 1
# LSTM input state is from parent layer and previous output
input_state = new_layer(self.model,
"%s_input_state_step%d" % (self.name, self.step),
[parent, self.outputs[-1]],
"concatenation")
# Gating units
f = self.forget_gate(input_state)
i = self.input_gate(input_state)
o = self.output_gate(input_state)
# Cell state
c = self.cell_update(input_state)
cell_forget = new_layer(self.model,
"%s_cell_forget_step%d" % (self.name, self.step),
[f, self.cells[-1]], "hadamard")
cell_input = new_layer(self.model,
"%s_cell_input_step%d" % (self.name, self.step),
[i, c], "hadamard")
self.cells.append(new_layer(self.model,
"%s_cell_step%d" % (self.name, self.step),
[cell_forget, cell_input],
"sum"))
# Output
act = new_layer(self.model,
"%s_cell_activation_step%d" % (self.name, self.step),
self.cells[-1], "relu")
self.outputs.append(new_layer(self.model,
"%s_step%d" % (self.name, self.step),
[o, act], "hadamard"))
return self.outputs[-1]
# Configure a prototext model (e.g. add layers)
def configure_model(model):
# Model parameters
unroll_depth = 4
image_dims = [3, 227, 227]
label_dims = [1000]
hidden_size = 128 # RNN state size
num_locs = 32
# Initialize input
data = new_layer(model, "data", [], "input", "cpu")
image = new_layer(model, "image", data, "split")
label = new_layer(model, "label", data, "split")
data.children = str_list([image.name, label.name])
# Initialize useful constants
zero1 = new_layer(model, "zero1", [], "constant", "cpu")
zero1.constant.value = 0.0
zero1.constant.num_neurons = str_list([1])
zero3 = new_layer(model, "zero3", [], "constant", "cpu")
zero3.constant.value = 0.0
zero3.constant.num_neurons = str_list([3])
one3 = new_layer(model, "one3", [], "constant", "cpu")
one3.constant.value = 1.0
one3.constant.num_neurons = str_list([3])
# Glimpse network components
glimpse1_conv1 = ConvolutionCell("glimpse1_conv1", 32, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse1_conv2 = ConvolutionCell("glimpse1_conv2", 64, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse1_conv3 = ConvolutionCell("glimpse1_conv3", 128, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse2_conv1 = ConvolutionCell("glimpse2_conv1", 32, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse2_conv2 = ConvolutionCell("glimpse2_conv2", 64, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse2_conv3 = ConvolutionCell("glimpse2_conv3", 128, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse3_conv1 = ConvolutionCell("glimpse3_conv1", 32, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse3_conv2 = ConvolutionCell("glimpse3_conv2", 64, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
glimpse3_conv3 = ConvolutionCell("glimpse3_conv3", 128, 2, 3, 1, 1,
model, "relu", "he_normal_initializer")
# Recurrent network components
lstm1 = LstmCell("lstm1", hidden_size, model)
lstm2 = LstmCell("lstm2", hidden_size, model)
# Location network components
loc_list = map(lambda i: 2.0 * i / num_locs - 1.0, range(num_locs))
loc = zero3
locx_network = FullyConnectedCell("locx_prob", num_locs, model,
"softmax", "glorot_normal_initializer", False)
locy_network = FullyConnectedCell("locy_prob", num_locs, model,
"softmax", "glorot_normal_initializer", False)
# Classification network components
class_network = FullyConnectedCell("class_prob", label_dims[0], model,
"softmax", "glorot_normal_initializer", False)
# Construct unrolled model
for step in range(unroll_depth):
# Extract crops and resize
scaled_loc = new_layer(model, "loc_scaled_step%d" % step,
[loc, one3], "weighted_sum", "cpu")
scaled_loc.weighted_sum.scaling_factors = str_list([0.5, 0.5])
crop1 = new_layer(model, "crop1_step%d" % step,
[image, scaled_loc], "crop", "cpu")
crop1.crop.dims = str_list([3, 32, 32])
crop2 = new_layer(model, "crop2_step%d" % step,
[image, scaled_loc], "crop", "cpu")
crop2.crop.dims = str_list([3, 64, 64])
crop2 = new_layer(model, "crop2_resized_step%d" % step, crop2, "pooling")
crop2.pooling.num_dims = 2
crop2.pooling.pool_dims_i = 2
crop2.pooling.pool_strides_i = crop2.pooling.pool_dims_i
crop2.pooling.pool_mode = "average"
crop3 = new_layer(model, "crop3_step%d" % step,
[image, scaled_loc], "crop", "cpu")
crop3.crop.dims = str_list([3, 128, 128])
crop3 = new_layer(model, "crop3_resized_step%d" % step, crop3, "pooling")
crop3.pooling.num_dims = 2
crop3.pooling.pool_dims_i = 4
crop3.pooling.pool_strides_i = crop3.pooling.pool_dims_i
crop3.pooling.pool_mode = "average"
# Glimpse networks
glimpse1 = glimpse1_conv1(crop1)
glimpse1 = glimpse1_conv2(glimpse1)
glimpse1 = glimpse1_conv3(glimpse1)
glimpse1 = new_layer(model, "glimpse1_step%d" % step, glimpse1, "pooling")
glimpse1.pooling.num_dims = 2
glimpse1.pooling.pool_dims_i = 32
glimpse1.pooling.pool_strides_i = glimpse1.pooling.pool_dims_i
glimpse1.pooling.pool_mode = "average"
glimpse2 = glimpse2_conv1(crop2)
glimpse2 = glimpse2_conv2(glimpse2)
glimpse2 = glimpse2_conv3(glimpse2)
glimpse2 = new_layer(model, "glimpse2_step%d" % step, glimpse2, "pooling")
glimpse2.pooling.num_dims = 2
glimpse2.pooling.pool_dims_i = 32
glimpse2.pooling.pool_strides_i = glimpse2.pooling.pool_dims_i
glimpse2.pooling.pool_mode = "average"
glimpse3 = glimpse3_conv1(crop3)
glimpse3 = glimpse3_conv2(glimpse3)
glimpse3 = glimpse3_conv3(glimpse3)
glimpse3 = new_layer(model, "glimpse3_step%d" % step, glimpse3, "pooling")
glimpse3.pooling.num_dims = 2
glimpse3.pooling.pool_dims_i = 32
glimpse3.pooling.pool_strides_i = glimpse3.pooling.pool_dims_i
glimpse3.pooling.pool_mode = "average"
glimpse = new_layer(model, "glimpse_step%d" % step,
[glimpse1, glimpse2, glimpse3], "concatenation")
glimpse = new_layer(model, "glimpse_flat_step%d" % step,
glimpse, "reshape")
glimpse.reshape.num_dims = 1
glimpse.reshape.dims = str_list([128 * 3])
# Recurrent network
h1 = lstm1(glimpse)
h2 = lstm2(h1)
# Location network
locx_prob = locx_network(h2)
locx_onehot = new_layer(model, "locx_onehot_step%d" % step,
locx_prob, "categorical_random", "cpu")
locx = new_layer(model, "locx_step%d" % step,
locx_onehot, "discrete_random", "cpu")
locx.discrete_random.values = str_list(loc_list)
locx.discrete_random.dims = str_list([1])
locy_prob = locy_network(h2)
locy_onehot = new_layer(model, "locy_onehot_step%d" % step,
locy_prob, "categorical_random", "cpu")
locy = new_layer(model, "locy_step%d" % step,
locy_onehot, "discrete_random", "cpu")
locy.discrete_random.values = str_list(loc_list)
locy.discrete_random.dims = str_list([1])
loc = new_layer(model, "loc_step%d" % (step+1),
[zero1, locy, locx], "concatenation", "cpu")
# Classification network
class_prob = class_network(h1)
# Categorical accuracy
acc1 = new_layer(model, "top1_accuracy_step%d" % step,
[class_prob, label], "categorical_accuracy")
acc5 = new_layer(model, "top5_accuracy_step%d" % step,
[class_prob, label], "top_k_categorical_accuracy")
acc5.top_k_categorical_accuracy.k = 5
met = model.metric.add()
met.layer_metric.name = "categorical accuracy (step %d)" % step
met.layer_metric.layer = acc1.name
met.layer_metric.unit = "%"
met = model.metric.add()
met.layer_metric.name = "top-5 categorical accuracy (step %d)" % step
met.layer_metric.layer = acc5.name
met.layer_metric.unit = "%"
# Objective function
class_obj = new_layer(model, "classification_cross_entropy_step%d" % step,
[class_prob, label], "cross_entropy")
locx_obj = new_layer(model, "locx_cross_entropy_step%d" % step,
[locx_prob, locx_onehot], "cross_entropy")
locy_obj = new_layer(model, "locy_cross_entropy_step%d" % step,
[locy_prob, locy_onehot], "cross_entropy")
obj = model.objective_function.layer_term.add()
obj.scale_factor = 1.0
obj.layer = class_obj.name
obj = model.objective_function.layer_term.add()
obj.scale_factor = 1.0
obj.layer = locx_obj.name
obj = model.objective_function.layer_term.add()
obj.scale_factor = 1.0
obj.layer = locy_obj.name
if __name__ == "__main__":
# Make sure protobuf Python implementation is built
host = subprocess.check_output("hostname").strip("\n1234567890")
protoc = lbann_dir + "/build/gnu.Release." + host + ".llnl.gov/install/bin/protoc"
proto_python_dir = lbann_dir + "/build/gnu.Release." + host + ".llnl.gov/protobuf/src/python"
os.putenv("PROTOC", protoc)
subprocess.call("cd " + proto_python_dir + "; "
+ sys.executable + " "
+ proto_python_dir + "/setup.py build",
shell=True)
sys.path.append(proto_python_dir)
import google.protobuf.text_format as txtf
# Compile LBANN protobuf
subprocess.call([protoc,
"-I=" + lbann_proto_dir,
"--python_out=" + work_dir,
lbann_proto_dir + "/lbann.proto"])
sys.path.append(work_dir)
global lbann_pb2
import lbann_pb2
# Load template prototext
with open(template_proto, "r") as f:
pb = txtf.Merge(f.read(), lbann_pb2.LbannPB())
# Configure prototext model
configure_model(pb.model)
# Export prototext
with open(output_proto, "w") as f:
f.write(txtf.MessageToString(pb))
| [
"subprocess.check_output",
"lbann_pb2.LbannPB",
"os.putenv",
"subprocess.call",
"google.protobuf.text_format.MessageToString",
"sys.path.append"
] | [((16506, 16533), 'os.putenv', 'os.putenv', (['"""PROTOC"""', 'protoc'], {}), "('PROTOC', protoc)\n", (16515, 16533), False, 'import os\n'), ((16538, 16664), 'subprocess.call', 'subprocess.call', (["('cd ' + proto_python_dir + '; ' + sys.executable + ' ' + proto_python_dir +\n '/setup.py build')"], {'shell': '(True)'}), "('cd ' + proto_python_dir + '; ' + sys.executable + ' ' +\n proto_python_dir + '/setup.py build', shell=True)\n", (16553, 16664), False, 'import subprocess\n'), ((16725, 16758), 'sys.path.append', 'sys.path.append', (['proto_python_dir'], {}), '(proto_python_dir)\n', (16740, 16758), False, 'import sys\n'), ((16840, 16956), 'subprocess.call', 'subprocess.call', (["[protoc, '-I=' + lbann_proto_dir, '--python_out=' + work_dir, \n lbann_proto_dir + '/lbann.proto']"], {}), "([protoc, '-I=' + lbann_proto_dir, '--python_out=' +\n work_dir, lbann_proto_dir + '/lbann.proto'])\n", (16855, 16956), False, 'import subprocess\n'), ((17020, 17045), 'sys.path.append', 'sys.path.append', (['work_dir'], {}), '(work_dir)\n', (17035, 17045), False, 'import sys\n'), ((129, 193), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'rev-parse', '--show-toplevel']"], {}), "(['git', 'rev-parse', '--show-toplevel'])\n", (152, 193), False, 'import subprocess\n'), ((16259, 16294), 'subprocess.check_output', 'subprocess.check_output', (['"""hostname"""'], {}), "('hostname')\n", (16282, 16294), False, 'import subprocess\n'), ((17194, 17213), 'lbann_pb2.LbannPB', 'lbann_pb2.LbannPB', ([], {}), '()\n', (17211, 17213), False, 'import lbann_pb2\n'), ((17357, 17381), 'google.protobuf.text_format.MessageToString', 'txtf.MessageToString', (['pb'], {}), '(pb)\n', (17377, 17381), True, 'import google.protobuf.text_format as txtf\n')] |
"""
WSGI config for lncRNAediting project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from os.path import join,dirname,abspath
PROJECT_DIR = dirname(dirname(abspath(__file__)))
import sys
sys.path.insert(0,PROJECT_DIR)
from django.core.wsgi import get_wsgi_application
# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lncRNAediting.settings")
os.environ["DJANGO_SETTINGS_MODULE"] = "lncRNAediting.settings"
application = get_wsgi_application()
| [
"os.path.abspath",
"django.core.wsgi.get_wsgi_application",
"sys.path.insert"
] | [((342, 373), 'sys.path.insert', 'sys.path.insert', (['(0)', 'PROJECT_DIR'], {}), '(0, PROJECT_DIR)\n', (357, 373), False, 'import sys\n'), ((582, 604), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (602, 604), False, 'from django.core.wsgi import get_wsgi_application\n'), ((310, 327), 'os.path.abspath', 'abspath', (['__file__'], {}), '(__file__)\n', (317, 327), False, 'from os.path import join, dirname, abspath\n')] |