code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import unittest
import os
import numpy as np
from constants import (
del_test_dir,
gen_test_dir,
get_output_mode,
solution_domain_setup,
CHEM_DICT_REACT,
SOL_PRIM_IN_REACT,
TEST_DIR,
)
from perform.constants import REAL_TYPE
from perform.system_solver import SystemSolver
from perform.input_funcs import read_restart_file
from perform.gas_model.calorically_perfect_gas import CaloricallyPerfectGas
from perform.time_integrator.implicit_integrator import BDF
from perform.solution.solution_interior import SolutionInterior
class SolutionIntInitTestCase(unittest.TestCase):
def setUp(self):
self.output_mode, self.output_dir = get_output_mode()
# set chemistry
self.chem_dict = CHEM_DICT_REACT
self.gas = CaloricallyPerfectGas(self.chem_dict)
# set time integrator
self.param_dict = {}
self.param_dict["dt"] = 1e-7
self.param_dict["time_scheme"] = "bdf"
self.param_dict["time_order"] = 2
self.time_int = BDF(self.param_dict)
# generate working directory
gen_test_dir()
# generate input text files
solution_domain_setup()
# set SystemSolver
self.solver = SystemSolver(TEST_DIR)
self.num_cells = 2
self.num_reactions = 1
def tearDown(self):
del_test_dir()
def test_solution_int_init(self):
sol = SolutionInterior(
self.gas, SOL_PRIM_IN_REACT, self.solver, self.num_cells, self.num_reactions, self.time_int
)
if self.output_mode:
np.save(os.path.join(self.output_dir, "sol_int_init_sol_cons.npy"), sol.sol_cons)
else:
self.assertTrue(np.array_equal(sol.sol_prim, SOL_PRIM_IN_REACT))
self.assertTrue(
np.allclose(sol.sol_cons, np.load(os.path.join(self.output_dir, "sol_int_init_sol_cons.npy")))
)
# TODO: a LOT of checking of other variables
class SolutionIntMethodsTestCase(unittest.TestCase):
def setUp(self):
self.output_mode, self.output_dir = get_output_mode()
# set chemistry
self.chem_dict = CHEM_DICT_REACT
self.gas = CaloricallyPerfectGas(self.chem_dict)
# set time integrator
self.param_dict = {}
self.param_dict["dt"] = 1e-7
self.param_dict["time_scheme"] = "bdf"
self.param_dict["time_order"] = 2
self.time_int = BDF(self.param_dict)
# generate working directory
gen_test_dir()
# generate input text files
solution_domain_setup()
# set SystemSolver
self.solver = SystemSolver(TEST_DIR)
self.num_cells = 2
self.num_reactions = 1
self.sol = SolutionInterior(
self.gas, SOL_PRIM_IN_REACT, self.solver, self.num_cells, self.num_reactions, self.time_int
)
def tearDown(self):
del_test_dir()
def test_calc_sol_jacob(self):
sol_jacob = self.sol.calc_sol_jacob(inverse=False)
sol_jacob_inv = self.sol.calc_sol_jacob(inverse=True)
if self.output_mode:
np.save(os.path.join(self.output_dir, "sol_int_sol_jacob.npy"), sol_jacob)
np.save(os.path.join(self.output_dir, "sol_int_sol_jacob_inv.npy"), sol_jacob_inv)
else:
self.assertTrue(np.allclose(sol_jacob, np.load(os.path.join(self.output_dir, "sol_int_sol_jacob.npy"))))
self.assertTrue(
np.allclose(sol_jacob_inv, np.load(os.path.join(self.output_dir, "sol_int_sol_jacob_inv.npy")))
)
def test_update_snapshots(self):
# update the snapshot matrix
for self.solver.iter in range(1, self.solver.num_steps + 1):
if (self.solver.iter % self.solver.out_interval) == 0:
self.sol.update_snapshots(self.solver)
self.assertTrue(np.array_equal(self.sol.prim_snap, np.repeat(self.sol.sol_prim[:, :, None], 6, axis=2)))
self.assertTrue(np.array_equal(self.sol.cons_snap, np.repeat(self.sol.sol_cons[:, :, None], 6, axis=2)))
self.assertTrue(
np.array_equal(self.sol.reaction_source_snap, np.repeat(self.sol.reaction_source[:, :, None], 5, axis=2))
)
self.assertTrue(
np.array_equal(self.sol.heat_release_snap, np.repeat(self.sol.heat_release[:, None], 5, axis=1))
)
self.assertTrue(np.array_equal(self.sol.rhs_snap, np.repeat(self.sol.rhs[:, :, None], 5, axis=2)))
def test_snapshot_output(self):
for self.solver.iter in range(1, self.solver.num_steps + 1):
# update the snapshot matrix
if (self.solver.iter % self.solver.out_interval) == 0:
self.sol.update_snapshots(self.solver)
# write and check intermediate results
if ((self.solver.iter % self.solver.out_itmdt_interval) == 0) and (
self.solver.iter != self.solver.num_steps
):
self.sol.write_snapshots(self.solver, intermediate=True, failed=False)
sol_prim_itmdt = np.load(
os.path.join(self.solver.unsteady_output_dir, "sol_prim_" + self.solver.sim_type + "_ITMDT.npy")
)
sol_cons_itmdt = np.load(
os.path.join(self.solver.unsteady_output_dir, "sol_cons_" + self.solver.sim_type + "_ITMDT.npy")
)
source_itmdt = np.load(
os.path.join(self.solver.unsteady_output_dir, "source_" + self.solver.sim_type + "_ITMDT.npy")
)
heat_release_itmdt = np.load(
os.path.join(self.solver.unsteady_output_dir, "heat_release_" + self.solver.sim_type + "_ITMDT.npy")
)
rhs_itmdt = np.load(
os.path.join(self.solver.unsteady_output_dir, "rhs_" + self.solver.sim_type + "_ITMDT.npy")
)
self.assertTrue(np.array_equal(sol_prim_itmdt, np.repeat(self.sol.sol_prim[:, :, None], 3, axis=2)))
self.assertTrue(np.array_equal(sol_cons_itmdt, np.repeat(self.sol.sol_cons[:, :, None], 3, axis=2)))
self.assertTrue(
np.array_equal(source_itmdt, np.repeat(self.sol.reaction_source[:, :, None], 2, axis=2))
)
self.assertTrue(
np.array_equal(heat_release_itmdt, np.repeat(self.sol.heat_release[:, None], 2, axis=1))
)
self.assertTrue(np.array_equal(rhs_itmdt, np.repeat(self.sol.rhs[:, :, None], 2, axis=2)))
# write and check "failed" snapshots
if self.solver.iter == 7:
self.sol.write_snapshots(self.solver, intermediate=False, failed=True)
sol_prim_failed = np.load(
os.path.join(self.solver.unsteady_output_dir, "sol_prim_" + self.solver.sim_type + "_FAILED.npy")
)
sol_cons_failed = np.load(
os.path.join(self.solver.unsteady_output_dir, "sol_cons_" + self.solver.sim_type + "_FAILED.npy")
)
source_failed = np.load(
os.path.join(self.solver.unsteady_output_dir, "source_" + self.solver.sim_type + "_FAILED.npy")
)
heat_release_failed = np.load(
os.path.join(
self.solver.unsteady_output_dir, "heat_release_" + self.solver.sim_type + "_FAILED.npy"
)
)
rhs_failed = np.load(
os.path.join(self.solver.unsteady_output_dir, "rhs_" + self.solver.sim_type + "_FAILED.npy")
)
self.assertTrue(np.array_equal(sol_prim_failed, np.repeat(self.sol.sol_prim[:, :, None], 4, axis=2)))
self.assertTrue(np.array_equal(sol_cons_failed, np.repeat(self.sol.sol_cons[:, :, None], 4, axis=2)))
self.assertTrue(
np.array_equal(source_failed, np.repeat(self.sol.reaction_source[:, :, None], 3, axis=2))
)
self.assertTrue(
np.array_equal(heat_release_failed, np.repeat(self.sol.heat_release[:, None], 3, axis=1))
)
self.assertTrue(np.array_equal(rhs_failed, np.repeat(self.sol.rhs[:, :, None], 3, axis=2)))
# delete intermediate results and check that they deleted properly
self.sol.delete_itmdt_snapshots(self.solver)
self.assertFalse(
os.path.isfile(
os.path.join(self.solver.unsteady_output_dir, "sol_prim_" + self.solver.sim_type + "_ITMDT.npy")
)
)
self.assertFalse(
os.path.isfile(
os.path.join(self.solver.unsteady_output_dir, "sol_cons_" + self.solver.sim_type + "_ITMDT.npy")
)
)
self.assertFalse(
os.path.isfile(
os.path.join(self.solver.unsteady_output_dir, "source_" + self.solver.sim_type + "_ITMDT.npy")
)
)
self.assertFalse(
os.path.isfile(
os.path.join(self.solver.unsteady_output_dir, "heat_release_" + self.solver.sim_type + "_ITMDT.npy")
)
)
self.assertFalse(
os.path.isfile(os.path.join(self.solver.unsteady_output_dir, "rhs_" + self.solver.sim_type + "_ITMDT.npy"))
)
# write final snapshots
self.sol.write_snapshots(self.solver, intermediate=False, failed=False)
sol_prim_final = np.load(
os.path.join(self.solver.unsteady_output_dir, "sol_prim_" + self.solver.sim_type + ".npy")
)
sol_cons_final = np.load(
os.path.join(self.solver.unsteady_output_dir, "sol_cons_" + self.solver.sim_type + ".npy")
)
source_final = np.load(os.path.join(self.solver.unsteady_output_dir, "source_" + self.solver.sim_type + ".npy"))
heat_release_final = np.load(
os.path.join(self.solver.unsteady_output_dir, "heat_release_" + self.solver.sim_type + ".npy")
)
rhs_final = np.load(os.path.join(self.solver.unsteady_output_dir, "rhs_" + self.solver.sim_type + ".npy"))
self.assertTrue(np.array_equal(sol_prim_final, np.repeat(self.sol.sol_prim[:, :, None], 6, axis=2)))
self.assertTrue(np.array_equal(sol_cons_final, np.repeat(self.sol.sol_cons[:, :, None], 6, axis=2)))
self.assertTrue(np.array_equal(source_final, np.repeat(self.sol.reaction_source[:, :, None], 5, axis=2)))
self.assertTrue(np.array_equal(heat_release_final, np.repeat(self.sol.heat_release[:, None], 5, axis=1)))
self.assertTrue(np.array_equal(rhs_final, np.repeat(self.sol.rhs[:, :, None], 5, axis=2)))
def test_write_restart_file(self):
sol_cons = self.sol.sol_cons
self.solver.sol_time = 1e-4
self.solver.iter = 2
self.solver.restart_iter = 4
self.sol.write_restart_file(self.solver)
self.assertEqual(self.solver.restart_iter, 5)
# check restart files
restart_data = np.load(os.path.join(self.solver.restart_output_dir, "restart_file_4.npz"))
self.assertTrue(
np.array_equal(
restart_data["sol_prim"],
np.repeat(SOL_PRIM_IN_REACT[:, :, None], 2, axis=-1),
)
)
self.assertTrue(
np.array_equal(
restart_data["sol_cons"],
np.repeat(sol_cons[:, :, None], 2, axis=-1),
)
)
self.assertEqual(float(restart_data["sol_time"]), 1e-4)
# check iteration files
restart_iter = int(np.loadtxt(os.path.join(self.solver.restart_output_dir, "restart_iter.dat")))
self.assertEqual(restart_iter, 4)
def test_read_restart_file(self):
self.solver.sol_time = 1e-4
self.solver.iter = 2
self.solver.restart_iter = 4
self.sol.write_restart_file(self.solver)
sol_time, sol_prim, restart_iter = read_restart_file(self.solver)
self.assertEqual(sol_time, 1e-4)
self.assertEqual(restart_iter, 5) # 1 is added to avoid overwriting
self.assertTrue(
np.array_equal(
sol_prim,
np.repeat(SOL_PRIM_IN_REACT[:, :, None], 2, axis=-1),
)
)
def test_calc_d_sol_norms(self):
self.solver.iter = 3
self.sol.d_sol_norm_hist = np.zeros((self.solver.num_steps, 2), dtype=REAL_TYPE)
self.sol.sol_hist_prim[0] = self.sol.sol_prim * 2.0
self.sol.calc_d_sol_norms(self.solver, "implicit")
self.assertAlmostEqual(self.sol.d_sol_norm_hist[2, 0], 3.46573790883)
self.assertAlmostEqual(self.sol.d_sol_norm_hist[2, 1], 3.45416666667)
def test_calc_res_norms(self):
self.solver.iter = 3
self.sol.res = self.sol.sol_prim.copy()
self.sol.calc_res_norms(self.solver, 0)
self.assertAlmostEqual(self.sol.res_norm_hist[2, 0], 3.46573790883)
self.assertAlmostEqual(self.sol.res_norm_hist[2, 1], 3.45416666667)
| [
"numpy.repeat",
"perform.input_funcs.read_restart_file",
"constants.del_test_dir",
"perform.solution.solution_interior.SolutionInterior",
"os.path.join",
"constants.gen_test_dir",
"constants.get_output_mode",
"perform.time_integrator.implicit_integrator.BDF",
"constants.solution_domain_setup",
"pe... | [((670, 687), 'constants.get_output_mode', 'get_output_mode', ([], {}), '()\n', (685, 687), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((773, 810), 'perform.gas_model.calorically_perfect_gas.CaloricallyPerfectGas', 'CaloricallyPerfectGas', (['self.chem_dict'], {}), '(self.chem_dict)\n', (794, 810), False, 'from perform.gas_model.calorically_perfect_gas import CaloricallyPerfectGas\n'), ((1021, 1041), 'perform.time_integrator.implicit_integrator.BDF', 'BDF', (['self.param_dict'], {}), '(self.param_dict)\n', (1024, 1041), False, 'from perform.time_integrator.implicit_integrator import BDF\n'), ((1088, 1102), 'constants.gen_test_dir', 'gen_test_dir', ([], {}), '()\n', (1100, 1102), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((1148, 1171), 'constants.solution_domain_setup', 'solution_domain_setup', ([], {}), '()\n', (1169, 1171), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((1222, 1244), 'perform.system_solver.SystemSolver', 'SystemSolver', (['TEST_DIR'], {}), '(TEST_DIR)\n', (1234, 1244), False, 'from perform.system_solver import SystemSolver\n'), ((1338, 1352), 'constants.del_test_dir', 'del_test_dir', ([], {}), '()\n', (1350, 1352), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((1407, 1520), 'perform.solution.solution_interior.SolutionInterior', 'SolutionInterior', (['self.gas', 'SOL_PRIM_IN_REACT', 'self.solver', 'self.num_cells', 'self.num_reactions', 'self.time_int'], {}), '(self.gas, SOL_PRIM_IN_REACT, self.solver, self.num_cells,\n self.num_reactions, self.time_int)\n', (1423, 1520), False, 'from perform.solution.solution_interior import SolutionInterior\n'), ((2090, 2107), 'constants.get_output_mode', 'get_output_mode', ([], {}), '()\n', (2105, 2107), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((2193, 2230), 'perform.gas_model.calorically_perfect_gas.CaloricallyPerfectGas', 'CaloricallyPerfectGas', (['self.chem_dict'], {}), '(self.chem_dict)\n', (2214, 2230), False, 'from perform.gas_model.calorically_perfect_gas import CaloricallyPerfectGas\n'), ((2441, 2461), 'perform.time_integrator.implicit_integrator.BDF', 'BDF', (['self.param_dict'], {}), '(self.param_dict)\n', (2444, 2461), False, 'from perform.time_integrator.implicit_integrator import BDF\n'), ((2508, 2522), 'constants.gen_test_dir', 'gen_test_dir', ([], {}), '()\n', (2520, 2522), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((2568, 2591), 'constants.solution_domain_setup', 'solution_domain_setup', ([], {}), '()\n', (2589, 2591), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((2642, 2664), 'perform.system_solver.SystemSolver', 'SystemSolver', (['TEST_DIR'], {}), '(TEST_DIR)\n', (2654, 2664), False, 'from perform.system_solver import SystemSolver\n'), ((2744, 2857), 'perform.solution.solution_interior.SolutionInterior', 'SolutionInterior', (['self.gas', 'SOL_PRIM_IN_REACT', 'self.solver', 'self.num_cells', 'self.num_reactions', 'self.time_int'], {}), '(self.gas, SOL_PRIM_IN_REACT, self.solver, self.num_cells,\n self.num_reactions, self.time_int)\n', (2760, 2857), False, 'from perform.solution.solution_interior import SolutionInterior\n'), ((2910, 2924), 'constants.del_test_dir', 'del_test_dir', ([], {}), '()\n', (2922, 2924), False, 'from constants import del_test_dir, gen_test_dir, get_output_mode, solution_domain_setup, CHEM_DICT_REACT, SOL_PRIM_IN_REACT, TEST_DIR\n'), ((12021, 12051), 'perform.input_funcs.read_restart_file', 'read_restart_file', (['self.solver'], {}), '(self.solver)\n', (12038, 12051), False, 'from perform.input_funcs import read_restart_file\n'), ((12446, 12499), 'numpy.zeros', 'np.zeros', (['(self.solver.num_steps, 2)'], {'dtype': 'REAL_TYPE'}), '((self.solver.num_steps, 2), dtype=REAL_TYPE)\n', (12454, 12499), True, 'import numpy as np\n'), ((9573, 9668), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_prim_' + self.solver.sim_type + '.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_prim_' + self.solver.\n sim_type + '.npy')\n", (9585, 9668), False, 'import os\n'), ((9720, 9815), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_cons_' + self.solver.sim_type + '.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_cons_' + self.solver.\n sim_type + '.npy')\n", (9732, 9815), False, 'import os\n'), ((9852, 9945), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('source_' + self.solver.sim_type + '.npy')"], {}), "(self.solver.unsteady_output_dir, 'source_' + self.solver.\n sim_type + '.npy')\n", (9864, 9945), False, 'import os\n'), ((9992, 10091), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('heat_release_' + self.solver.sim_type + '.npy')"], {}), "(self.solver.unsteady_output_dir, 'heat_release_' + self.solver\n .sim_type + '.npy')\n", (10004, 10091), False, 'import os\n'), ((10125, 10214), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('rhs_' + self.solver.sim_type + '.npy')"], {}), "(self.solver.unsteady_output_dir, 'rhs_' + self.solver.sim_type +\n '.npy')\n", (10137, 10214), False, 'import os\n'), ((11104, 11170), 'os.path.join', 'os.path.join', (['self.solver.restart_output_dir', '"""restart_file_4.npz"""'], {}), "(self.solver.restart_output_dir, 'restart_file_4.npz')\n", (11116, 11170), False, 'import os\n'), ((1590, 1648), 'os.path.join', 'os.path.join', (['self.output_dir', '"""sol_int_init_sol_cons.npy"""'], {}), "(self.output_dir, 'sol_int_init_sol_cons.npy')\n", (1602, 1648), False, 'import os\n'), ((1708, 1755), 'numpy.array_equal', 'np.array_equal', (['sol.sol_prim', 'SOL_PRIM_IN_REACT'], {}), '(sol.sol_prim, SOL_PRIM_IN_REACT)\n', (1722, 1755), True, 'import numpy as np\n'), ((3134, 3188), 'os.path.join', 'os.path.join', (['self.output_dir', '"""sol_int_sol_jacob.npy"""'], {}), "(self.output_dir, 'sol_int_sol_jacob.npy')\n", (3146, 3188), False, 'import os\n'), ((3221, 3279), 'os.path.join', 'os.path.join', (['self.output_dir', '"""sol_int_sol_jacob_inv.npy"""'], {}), "(self.output_dir, 'sol_int_sol_jacob_inv.npy')\n", (3233, 3279), False, 'import os\n'), ((3911, 3962), 'numpy.repeat', 'np.repeat', (['self.sol.sol_prim[:, :, None]', '(6)'], {'axis': '(2)'}), '(self.sol.sol_prim[:, :, None], 6, axis=2)\n', (3920, 3962), True, 'import numpy as np\n'), ((4024, 4075), 'numpy.repeat', 'np.repeat', (['self.sol.sol_cons[:, :, None]', '(6)'], {'axis': '(2)'}), '(self.sol.sol_cons[:, :, None], 6, axis=2)\n', (4033, 4075), True, 'import numpy as np\n'), ((4161, 4219), 'numpy.repeat', 'np.repeat', (['self.sol.reaction_source[:, :, None]', '(5)'], {'axis': '(2)'}), '(self.sol.reaction_source[:, :, None], 5, axis=2)\n', (4170, 4219), True, 'import numpy as np\n'), ((4311, 4363), 'numpy.repeat', 'np.repeat', (['self.sol.heat_release[:, None]', '(5)'], {'axis': '(1)'}), '(self.sol.heat_release[:, None], 5, axis=1)\n', (4320, 4363), True, 'import numpy as np\n'), ((4433, 4479), 'numpy.repeat', 'np.repeat', (['self.sol.rhs[:, :, None]', '(5)'], {'axis': '(2)'}), '(self.sol.rhs[:, :, None], 5, axis=2)\n', (4442, 4479), True, 'import numpy as np\n'), ((8562, 8663), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_prim_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_prim_' + self.solver.\n sim_type + '_ITMDT.npy')\n", (8574, 8663), False, 'import os\n'), ((8753, 8854), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_cons_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_cons_' + self.solver.\n sim_type + '_ITMDT.npy')\n", (8765, 8854), False, 'import os\n'), ((8944, 9043), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('source_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'source_' + self.solver.\n sim_type + '_ITMDT.npy')\n", (8956, 9043), False, 'import os\n'), ((9133, 9238), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('heat_release_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'heat_release_' + self.solver\n .sim_type + '_ITMDT.npy')\n", (9145, 9238), False, 'import os\n'), ((9311, 9406), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('rhs_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'rhs_' + self.solver.sim_type +\n '_ITMDT.npy')\n", (9323, 9406), False, 'import os\n'), ((10267, 10318), 'numpy.repeat', 'np.repeat', (['self.sol.sol_prim[:, :, None]', '(6)'], {'axis': '(2)'}), '(self.sol.sol_prim[:, :, None], 6, axis=2)\n', (10276, 10318), True, 'import numpy as np\n'), ((10376, 10427), 'numpy.repeat', 'np.repeat', (['self.sol.sol_cons[:, :, None]', '(6)'], {'axis': '(2)'}), '(self.sol.sol_cons[:, :, None], 6, axis=2)\n', (10385, 10427), True, 'import numpy as np\n'), ((10483, 10541), 'numpy.repeat', 'np.repeat', (['self.sol.reaction_source[:, :, None]', '(5)'], {'axis': '(2)'}), '(self.sol.reaction_source[:, :, None], 5, axis=2)\n', (10492, 10541), True, 'import numpy as np\n'), ((10603, 10655), 'numpy.repeat', 'np.repeat', (['self.sol.heat_release[:, None]', '(5)'], {'axis': '(1)'}), '(self.sol.heat_release[:, None], 5, axis=1)\n', (10612, 10655), True, 'import numpy as np\n'), ((10708, 10754), 'numpy.repeat', 'np.repeat', (['self.sol.rhs[:, :, None]', '(5)'], {'axis': '(2)'}), '(self.sol.rhs[:, :, None], 5, axis=2)\n', (10717, 10754), True, 'import numpy as np\n'), ((11284, 11336), 'numpy.repeat', 'np.repeat', (['SOL_PRIM_IN_REACT[:, :, None]', '(2)'], {'axis': '(-1)'}), '(SOL_PRIM_IN_REACT[:, :, None], 2, axis=-1)\n', (11293, 11336), True, 'import numpy as np\n'), ((11473, 11516), 'numpy.repeat', 'np.repeat', (['sol_cons[:, :, None]', '(2)'], {'axis': '(-1)'}), '(sol_cons[:, :, None], 2, axis=-1)\n', (11482, 11516), True, 'import numpy as np\n'), ((11677, 11741), 'os.path.join', 'os.path.join', (['self.solver.restart_output_dir', '"""restart_iter.dat"""'], {}), "(self.solver.restart_output_dir, 'restart_iter.dat')\n", (11689, 11741), False, 'import os\n'), ((12265, 12317), 'numpy.repeat', 'np.repeat', (['SOL_PRIM_IN_REACT[:, :, None]', '(2)'], {'axis': '(-1)'}), '(SOL_PRIM_IN_REACT[:, :, None], 2, axis=-1)\n', (12274, 12317), True, 'import numpy as np\n'), ((5108, 5209), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_prim_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_prim_' + self.solver.\n sim_type + '_ITMDT.npy')\n", (5120, 5209), False, 'import os\n'), ((5285, 5386), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_cons_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_cons_' + self.solver.\n sim_type + '_ITMDT.npy')\n", (5297, 5386), False, 'import os\n'), ((5460, 5559), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('source_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'source_' + self.solver.\n sim_type + '_ITMDT.npy')\n", (5472, 5559), False, 'import os\n'), ((5639, 5744), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('heat_release_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'heat_release_' + self.solver\n .sim_type + '_ITMDT.npy')\n", (5651, 5744), False, 'import os\n'), ((5815, 5910), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('rhs_' + self.solver.sim_type + '_ITMDT.npy')"], {}), "(self.solver.unsteady_output_dir, 'rhs_' + self.solver.sim_type +\n '_ITMDT.npy')\n", (5827, 5910), False, 'import os\n'), ((6825, 6927), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_prim_' + self.solver.sim_type + '_FAILED.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_prim_' + self.solver.\n sim_type + '_FAILED.npy')\n", (6837, 6927), False, 'import os\n'), ((7004, 7106), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('sol_cons_' + self.solver.sim_type + '_FAILED.npy')"], {}), "(self.solver.unsteady_output_dir, 'sol_cons_' + self.solver.\n sim_type + '_FAILED.npy')\n", (7016, 7106), False, 'import os\n'), ((7181, 7281), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('source_' + self.solver.sim_type + '_FAILED.npy')"], {}), "(self.solver.unsteady_output_dir, 'source_' + self.solver.\n sim_type + '_FAILED.npy')\n", (7193, 7281), False, 'import os\n'), ((7362, 7468), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('heat_release_' + self.solver.sim_type + '_FAILED.npy')"], {}), "(self.solver.unsteady_output_dir, 'heat_release_' + self.solver\n .sim_type + '_FAILED.npy')\n", (7374, 7468), False, 'import os\n'), ((7586, 7682), 'os.path.join', 'os.path.join', (['self.solver.unsteady_output_dir', "('rhs_' + self.solver.sim_type + '_FAILED.npy')"], {}), "(self.solver.unsteady_output_dir, 'rhs_' + self.solver.sim_type +\n '_FAILED.npy')\n", (7598, 7682), False, 'import os\n'), ((1836, 1894), 'os.path.join', 'os.path.join', (['self.output_dir', '"""sol_int_init_sol_cons.npy"""'], {}), "(self.output_dir, 'sol_int_init_sol_cons.npy')\n", (1848, 1894), False, 'import os\n'), ((3371, 3425), 'os.path.join', 'os.path.join', (['self.output_dir', '"""sol_int_sol_jacob.npy"""'], {}), "(self.output_dir, 'sol_int_sol_jacob.npy')\n", (3383, 3425), False, 'import os\n'), ((3509, 3567), 'os.path.join', 'os.path.join', (['self.output_dir', '"""sol_int_sol_jacob_inv.npy"""'], {}), "(self.output_dir, 'sol_int_sol_jacob_inv.npy')\n", (3521, 3567), False, 'import os\n'), ((5988, 6039), 'numpy.repeat', 'np.repeat', (['self.sol.sol_prim[:, :, None]', '(3)'], {'axis': '(2)'}), '(self.sol.sol_prim[:, :, None], 3, axis=2)\n', (5997, 6039), True, 'import numpy as np\n'), ((6105, 6156), 'numpy.repeat', 'np.repeat', (['self.sol.sol_cons[:, :, None]', '(3)'], {'axis': '(2)'}), '(self.sol.sol_cons[:, :, None], 3, axis=2)\n', (6114, 6156), True, 'import numpy as np\n'), ((6241, 6299), 'numpy.repeat', 'np.repeat', (['self.sol.reaction_source[:, :, None]', '(2)'], {'axis': '(2)'}), '(self.sol.reaction_source[:, :, None], 2, axis=2)\n', (6250, 6299), True, 'import numpy as np\n'), ((6407, 6459), 'numpy.repeat', 'np.repeat', (['self.sol.heat_release[:, None]', '(2)'], {'axis': '(1)'}), '(self.sol.heat_release[:, None], 2, axis=1)\n', (6416, 6459), True, 'import numpy as np\n'), ((6537, 6583), 'numpy.repeat', 'np.repeat', (['self.sol.rhs[:, :, None]', '(2)'], {'axis': '(2)'}), '(self.sol.rhs[:, :, None], 2, axis=2)\n', (6546, 6583), True, 'import numpy as np\n'), ((7761, 7812), 'numpy.repeat', 'np.repeat', (['self.sol.sol_prim[:, :, None]', '(4)'], {'axis': '(2)'}), '(self.sol.sol_prim[:, :, None], 4, axis=2)\n', (7770, 7812), True, 'import numpy as np\n'), ((7879, 7930), 'numpy.repeat', 'np.repeat', (['self.sol.sol_cons[:, :, None]', '(4)'], {'axis': '(2)'}), '(self.sol.sol_cons[:, :, None], 4, axis=2)\n', (7888, 7930), True, 'import numpy as np\n'), ((8016, 8074), 'numpy.repeat', 'np.repeat', (['self.sol.reaction_source[:, :, None]', '(3)'], {'axis': '(2)'}), '(self.sol.reaction_source[:, :, None], 3, axis=2)\n', (8025, 8074), True, 'import numpy as np\n'), ((8183, 8235), 'numpy.repeat', 'np.repeat', (['self.sol.heat_release[:, None]', '(3)'], {'axis': '(1)'}), '(self.sol.heat_release[:, None], 3, axis=1)\n', (8192, 8235), True, 'import numpy as np\n'), ((8314, 8360), 'numpy.repeat', 'np.repeat', (['self.sol.rhs[:, :, None]', '(3)'], {'axis': '(2)'}), '(self.sol.rhs[:, :, None], 3, axis=2)\n', (8323, 8360), True, 'import numpy as np\n')] |
#!/usr/bin/env python
'''Assert HDF5 input is non-zero.
Print to stderr if not.
For example,
find . -iname '*.hdf5' -exec h5zero.py {} +
'''
from __future__ import print_function
import argparse
import sys
import h5py
from dautil.IO.h5 import h5assert_nonzero
__version__ = '0.1'
def main(args):
for filename in args.input:
with h5py.File(filename, "r") as f:
try:
h5assert_nonzero(f, verbose=args.verbose)
except AssertionError:
print(filename, file=sys.stderr)
def cli():
parser = argparse.ArgumentParser(description='Assert HDF5 input is non-zero.')
parser.set_defaults(func=main)
# define args
parser.add_argument('-v', '--version', action='version',
version='%(prog)s {}'.format(__version__))
parser.add_argument('input', nargs='+',
help='Input HDF5 files. Can be more than 1.')
parser.add_argument('-V', '--verbose', action='store_true',
help='verbose to stdout.')
# parsing and run main
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
cli()
| [
"dautil.IO.h5.h5assert_nonzero",
"argparse.ArgumentParser",
"h5py.File"
] | [((566, 635), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Assert HDF5 input is non-zero."""'}), "(description='Assert HDF5 input is non-zero.')\n", (589, 635), False, 'import argparse\n'), ((350, 374), 'h5py.File', 'h5py.File', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (359, 374), False, 'import h5py\n'), ((414, 455), 'dautil.IO.h5.h5assert_nonzero', 'h5assert_nonzero', (['f'], {'verbose': 'args.verbose'}), '(f, verbose=args.verbose)\n', (430, 455), False, 'from dautil.IO.h5 import h5assert_nonzero\n')] |
#User function Template for python3
class Solution:
def calculateSpan(self,a,n):
Span = [0 for i in range(n)]
stack = [0]
#span value of first day is always 1.
Span[0] = 1
for i in range(1, n):
#we pop elements from the stack till price at top of stack is less than or equal to current price.
while (len(stack) > 0 and a[stack[-1]] <= a[i]):
stack.pop()
#if stack becomes empty, then price[i] is greater than all elements on left of it in list so span is i+1.
#Else price[i] is greater than elements after value at top of stack.
if(len(stack) <= 0):
Span[i] = i + 1
else:
Span[i] = i - stack[-1]
#pushing this element to stack.
stack.append(i)
#returning the list.
return Span
#{
# Driver Code Starts
#Initial Template for Python 3
import atexit
import io
import sys
_INPUT_LINES = sys.stdin.read().splitlines()
input = iter(_INPUT_LINES).__next__
_OUTPUT_BUFFER = io.StringIO()
sys.stdout = _OUTPUT_BUFFER
@atexit.register
def write():
sys.__stdout__.write(_OUTPUT_BUFFER.getvalue())
if __name__ == '__main__':
test_cases = int(input())
for cases in range(test_cases) :
n = int(input())
a = list(map(int,input().strip().split()))
obj = Solution()
ans = obj.calculateSpan(a, n);
print(*ans) # print space seperated elements of span array
# } Driver Code Ends
| [
"io.StringIO",
"sys.stdin.read"
] | [((1076, 1089), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1087, 1089), False, 'import io\n'), ((993, 1009), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (1007, 1009), False, 'import sys\n')] |
#!python
# -*- coding: utf-8 -*-
"""Unit testing for scriptorium"""
import os
import tempfile
import shutil
import textwrap
import unittest
import scriptorium
class TestScriptorium(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Set up unit tests for scriptorium"""
TestScriptorium.template_dir = tempfile.mkdtemp()
TestScriptorium.paper_dir = tempfile.mkdtemp()
scriptorium.CONFIG['TEMPLATE_DIR'] = TestScriptorium.template_dir
scriptorium.install_template("https://github.com/jasedit/simple_templates.git")
@classmethod
def tearDownClass(cls):
"""Tear down unit test structure."""
shutil.rmtree(TestScriptorium.template_dir, ignore_errors=True)
shutil.rmtree(TestScriptorium.paper_dir, ignore_errors=True)
def testTemplates(self):
"""Test that template has been installed"""
self.assertEqual(TestScriptorium.template_dir, scriptorium.CONFIG['TEMPLATE_DIR'])
self.assertTrue(os.path.exists(os.path.join(TestScriptorium.template_dir, 'simple_templates')))
ex_tdir = os.path.join(scriptorium.CONFIG['TEMPLATE_DIR'], 'simple_templates', 'report')
self.assertEqual(scriptorium.find_template('report'), ex_tdir)
def testCreation(self):
"""Test simple paper creation."""
example_config = {
'author': '<NAME>',
'title': 'Example Report'
}
old_dir = os.getcwd()
os.chdir(TestScriptorium.paper_dir)
self.assertEqual(scriptorium.create('ex_report', 'report', config=example_config), set())
os.chdir('ex_report')
self.assertEqual(scriptorium.paper_root('.'), 'paper.mmd')
self.assertEqual(scriptorium.get_template('paper.mmd'), 'report')
example_text = textwrap.dedent("""\n
# Introduction
This is an example paper.
# Conclusion
This paper is awesome.
""")
with open('paper.mmd', 'a') as fp:
fp.write(example_text)
pdf_path = scriptorium.to_pdf('.')
self.assertTrue(os.path.exists(pdf_path))
os.chdir(old_dir)
def testConfigLoading(self):
"""Test saving and loading configuration."""
config = scriptorium.CONFIG.copy()
scriptorium.save_config()
scriptorium.read_config()
self.assertEqual(config, scriptorium.CONFIG)
def testConfiguration(self):
"""Test configuration option issues"""
test_template_dir = "~/.scriptorium"
scriptorium.CONFIG['TEMPLATE_DIR'] = test_template_dir
scriptorium.save_config()
scriptorium.read_config()
self.assertEqual(scriptorium.CONFIG['TEMPLATE_DIR'], os.path.expanduser(test_template_dir))
scriptorium.CONFIG['TEMPLATE_DIR'] = self.template_dir
if __name__ == '__main__':
unittest.main()
| [
"os.path.expanduser",
"textwrap.dedent",
"os.path.exists",
"scriptorium.find_template",
"scriptorium.install_template",
"os.path.join",
"os.getcwd",
"os.chdir",
"scriptorium.CONFIG.copy",
"scriptorium.to_pdf",
"scriptorium.read_config",
"tempfile.mkdtemp",
"scriptorium.paper_root",
"shutil... | [((2750, 2765), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2763, 2765), False, 'import unittest\n'), ((329, 347), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (345, 347), False, 'import tempfile\n'), ((382, 400), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (398, 400), False, 'import tempfile\n'), ((479, 558), 'scriptorium.install_template', 'scriptorium.install_template', (['"""https://github.com/jasedit/simple_templates.git"""'], {}), "('https://github.com/jasedit/simple_templates.git')\n", (507, 558), False, 'import scriptorium\n'), ((654, 717), 'shutil.rmtree', 'shutil.rmtree', (['TestScriptorium.template_dir'], {'ignore_errors': '(True)'}), '(TestScriptorium.template_dir, ignore_errors=True)\n', (667, 717), False, 'import shutil\n'), ((724, 784), 'shutil.rmtree', 'shutil.rmtree', (['TestScriptorium.paper_dir'], {'ignore_errors': '(True)'}), '(TestScriptorium.paper_dir, ignore_errors=True)\n', (737, 784), False, 'import shutil\n'), ((1072, 1150), 'os.path.join', 'os.path.join', (["scriptorium.CONFIG['TEMPLATE_DIR']", '"""simple_templates"""', '"""report"""'], {}), "(scriptorium.CONFIG['TEMPLATE_DIR'], 'simple_templates', 'report')\n", (1084, 1150), False, 'import os\n'), ((1397, 1408), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1406, 1408), False, 'import os\n'), ((1415, 1450), 'os.chdir', 'os.chdir', (['TestScriptorium.paper_dir'], {}), '(TestScriptorium.paper_dir)\n', (1423, 1450), False, 'import os\n'), ((1553, 1574), 'os.chdir', 'os.chdir', (['"""ex_report"""'], {}), "('ex_report')\n", (1561, 1574), False, 'import os\n'), ((1734, 1889), 'textwrap.dedent', 'textwrap.dedent', (['"""\n\n # Introduction\n\n This is an example paper.\n\n # Conclusion\n\n This paper is awesome.\n """'], {}), '(\n """\n\n # Introduction\n\n This is an example paper.\n\n # Conclusion\n\n This paper is awesome.\n """\n )\n', (1749, 1889), False, 'import textwrap\n'), ((1972, 1995), 'scriptorium.to_pdf', 'scriptorium.to_pdf', (['"""."""'], {}), "('.')\n", (1990, 1995), False, 'import scriptorium\n'), ((2052, 2069), 'os.chdir', 'os.chdir', (['old_dir'], {}), '(old_dir)\n', (2060, 2069), False, 'import os\n'), ((2170, 2195), 'scriptorium.CONFIG.copy', 'scriptorium.CONFIG.copy', ([], {}), '()\n', (2193, 2195), False, 'import scriptorium\n'), ((2202, 2227), 'scriptorium.save_config', 'scriptorium.save_config', ([], {}), '()\n', (2225, 2227), False, 'import scriptorium\n'), ((2234, 2259), 'scriptorium.read_config', 'scriptorium.read_config', ([], {}), '()\n', (2257, 2259), False, 'import scriptorium\n'), ((2500, 2525), 'scriptorium.save_config', 'scriptorium.save_config', ([], {}), '()\n', (2523, 2525), False, 'import scriptorium\n'), ((2532, 2557), 'scriptorium.read_config', 'scriptorium.read_config', ([], {}), '()\n', (2555, 2557), False, 'import scriptorium\n'), ((1174, 1209), 'scriptorium.find_template', 'scriptorium.find_template', (['"""report"""'], {}), "('report')\n", (1199, 1209), False, 'import scriptorium\n'), ((1474, 1538), 'scriptorium.create', 'scriptorium.create', (['"""ex_report"""', '"""report"""'], {'config': 'example_config'}), "('ex_report', 'report', config=example_config)\n", (1492, 1538), False, 'import scriptorium\n'), ((1598, 1625), 'scriptorium.paper_root', 'scriptorium.paper_root', (['"""."""'], {}), "('.')\n", (1620, 1625), False, 'import scriptorium\n'), ((1663, 1700), 'scriptorium.get_template', 'scriptorium.get_template', (['"""paper.mmd"""'], {}), "('paper.mmd')\n", (1687, 1700), False, 'import scriptorium\n'), ((2019, 2043), 'os.path.exists', 'os.path.exists', (['pdf_path'], {}), '(pdf_path)\n', (2033, 2043), False, 'import os\n'), ((2618, 2655), 'os.path.expanduser', 'os.path.expanduser', (['test_template_dir'], {}), '(test_template_dir)\n', (2636, 2655), False, 'import os\n'), ((991, 1053), 'os.path.join', 'os.path.join', (['TestScriptorium.template_dir', '"""simple_templates"""'], {}), "(TestScriptorium.template_dir, 'simple_templates')\n", (1003, 1053), False, 'import os\n')] |
"""Test the module SMOTE ENN."""
# Authors: <NAME> <<EMAIL>>
# <NAME>
# License: MIT
import pytest
import numpy as np
from sklearn.utils.testing import assert_allclose, assert_array_equal
from imblearn.combine import SMOTEENN
from imblearn.under_sampling import EditedNearestNeighbours
from imblearn.over_sampling import SMOTE
RND_SEED = 0
X = np.array([[0.11622591, -0.0317206], [0.77481731, 0.60935141], [
1.25192108, -0.22367336
], [0.53366841, -0.30312976], [1.52091956,
-0.49283504], [-0.28162401, -2.10400981],
[0.83680821,
1.72827342], [0.3084254, 0.33299982], [0.70472253, -0.73309052],
[0.28893132, -0.38761769], [1.15514042, 0.0129463], [
0.88407872, 0.35454207
], [1.31301027, -0.92648734], [-1.11515198, -0.93689695], [
-0.18410027, -0.45194484
], [0.9281014, 0.53085498], [-0.14374509, 0.27370049], [
-0.41635887, -0.38299653
], [0.08711622, 0.93259929], [1.70580611, -0.11219234]])
Y = np.array([0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0])
R_TOL = 1e-4
def test_sample_regular():
smote = SMOTEENN(random_state=RND_SEED)
X_resampled, y_resampled = smote.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [
0.61319159, -0.11571667
], [0.66052536, -0.28246518], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 0, 0, 0, 1, 1, 1])
assert_allclose(X_resampled, X_gt, rtol=R_TOL)
assert_array_equal(y_resampled, y_gt)
def test_sample_regular_pass_smote_enn():
smote = SMOTEENN(
smote=SMOTE(sampling_strategy='auto', random_state=RND_SEED),
enn=EditedNearestNeighbours(sampling_strategy='all'),
random_state=RND_SEED)
X_resampled, y_resampled = smote.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [
0.61319159, -0.11571667
], [0.66052536, -0.28246518], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 0, 0, 0, 1, 1, 1])
assert_allclose(X_resampled, X_gt, rtol=R_TOL)
assert_array_equal(y_resampled, y_gt)
def test_sample_regular_half():
sampling_strategy = {0: 10, 1: 12}
smote = SMOTEENN(
sampling_strategy=sampling_strategy, random_state=RND_SEED)
X_resampled, y_resampled = smote.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 1, 1, 1])
assert_allclose(X_resampled, X_gt)
assert_array_equal(y_resampled, y_gt)
def test_validate_estimator_init():
smote = SMOTE(random_state=RND_SEED)
enn = EditedNearestNeighbours(sampling_strategy='all')
smt = SMOTEENN(smote=smote, enn=enn, random_state=RND_SEED)
X_resampled, y_resampled = smt.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [
0.61319159, -0.11571667
], [0.66052536, -0.28246518], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 0, 0, 0, 1, 1, 1])
assert_allclose(X_resampled, X_gt, rtol=R_TOL)
assert_array_equal(y_resampled, y_gt)
def test_validate_estimator_default():
smt = SMOTEENN(random_state=RND_SEED)
X_resampled, y_resampled = smt.fit_resample(X, Y)
X_gt = np.array([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [
0.61319159, -0.11571667
], [0.66052536, -0.28246518], [-0.28162401, -2.10400981],
[0.83680821, 1.72827342], [0.08711622, 0.93259929]])
y_gt = np.array([0, 0, 0, 0, 1, 1, 1])
assert_allclose(X_resampled, X_gt, rtol=R_TOL)
assert_array_equal(y_resampled, y_gt)
def test_parallelisation():
# Check if default job count is 1
smt = SMOTEENN(random_state=RND_SEED)
smt._validate_estimator()
assert smt.n_jobs == 1
assert smt.smote_.n_jobs == 1
assert smt.enn_.n_jobs == 1
# Check if job count is set
smt = SMOTEENN(random_state=RND_SEED, n_jobs=8)
smt._validate_estimator()
assert smt.n_jobs == 8
assert smt.smote_.n_jobs == 8
assert smt.enn_.n_jobs == 8
@pytest.mark.parametrize(
"smote_params, err_msg",
[({'smote': 'rnd'}, "smote needs to be a SMOTE"),
({'enn': 'rnd'}, "enn needs to be an ")]
)
def test_error_wrong_object(smote_params, err_msg):
smt = SMOTEENN(**smote_params)
with pytest.raises(ValueError, match=err_msg):
smt.fit_resample(X, Y)
| [
"sklearn.utils.testing.assert_array_equal",
"imblearn.under_sampling.EditedNearestNeighbours",
"imblearn.over_sampling.SMOTE",
"imblearn.combine.SMOTEENN",
"numpy.array",
"pytest.mark.parametrize",
"pytest.raises",
"sklearn.utils.testing.assert_allclose"
] | [((357, 935), 'numpy.array', 'np.array', (['[[0.11622591, -0.0317206], [0.77481731, 0.60935141], [1.25192108, -\n 0.22367336], [0.53366841, -0.30312976], [1.52091956, -0.49283504], [-\n 0.28162401, -2.10400981], [0.83680821, 1.72827342], [0.3084254, \n 0.33299982], [0.70472253, -0.73309052], [0.28893132, -0.38761769], [\n 1.15514042, 0.0129463], [0.88407872, 0.35454207], [1.31301027, -\n 0.92648734], [-1.11515198, -0.93689695], [-0.18410027, -0.45194484], [\n 0.9281014, 0.53085498], [-0.14374509, 0.27370049], [-0.41635887, -\n 0.38299653], [0.08711622, 0.93259929], [1.70580611, -0.11219234]]'], {}), '([[0.11622591, -0.0317206], [0.77481731, 0.60935141], [1.25192108, \n -0.22367336], [0.53366841, -0.30312976], [1.52091956, -0.49283504], [-\n 0.28162401, -2.10400981], [0.83680821, 1.72827342], [0.3084254, \n 0.33299982], [0.70472253, -0.73309052], [0.28893132, -0.38761769], [\n 1.15514042, 0.0129463], [0.88407872, 0.35454207], [1.31301027, -\n 0.92648734], [-1.11515198, -0.93689695], [-0.18410027, -0.45194484], [\n 0.9281014, 0.53085498], [-0.14374509, 0.27370049], [-0.41635887, -\n 0.38299653], [0.08711622, 0.93259929], [1.70580611, -0.11219234]])\n', (365, 935), True, 'import numpy as np\n'), ((1087, 1157), 'numpy.array', 'np.array', (['[0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0]'], {}), '([0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0])\n', (1095, 1157), True, 'import numpy as np\n'), ((4433, 4577), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""smote_params, err_msg"""', "[({'smote': 'rnd'}, 'smote needs to be a SMOTE'), ({'enn': 'rnd'},\n 'enn needs to be an ')]"], {}), "('smote_params, err_msg', [({'smote': 'rnd'},\n 'smote needs to be a SMOTE'), ({'enn': 'rnd'}, 'enn needs to be an ')])\n", (4456, 4577), False, 'import pytest\n'), ((1212, 1243), 'imblearn.combine.SMOTEENN', 'SMOTEENN', ([], {'random_state': 'RND_SEED'}), '(random_state=RND_SEED)\n', (1220, 1243), False, 'from imblearn.combine import SMOTEENN\n'), ((1312, 1519), 'numpy.array', 'np.array', (['[[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159, -\n 0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]]'], {}), '([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159,\n -0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]])\n', (1320, 1519), True, 'import numpy as np\n'), ((1557, 1588), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 1, 1, 1]'], {}), '([0, 0, 0, 0, 1, 1, 1])\n', (1565, 1588), True, 'import numpy as np\n'), ((1593, 1639), 'sklearn.utils.testing.assert_allclose', 'assert_allclose', (['X_resampled', 'X_gt'], {'rtol': 'R_TOL'}), '(X_resampled, X_gt, rtol=R_TOL)\n', (1608, 1639), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((1644, 1681), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['y_resampled', 'y_gt'], {}), '(y_resampled, y_gt)\n', (1662, 1681), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((1979, 2186), 'numpy.array', 'np.array', (['[[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159, -\n 0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]]'], {}), '([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159,\n -0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]])\n', (1987, 2186), True, 'import numpy as np\n'), ((2224, 2255), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 1, 1, 1]'], {}), '([0, 0, 0, 0, 1, 1, 1])\n', (2232, 2255), True, 'import numpy as np\n'), ((2260, 2306), 'sklearn.utils.testing.assert_allclose', 'assert_allclose', (['X_resampled', 'X_gt'], {'rtol': 'R_TOL'}), '(X_resampled, X_gt, rtol=R_TOL)\n', (2275, 2306), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((2311, 2348), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['y_resampled', 'y_gt'], {}), '(y_resampled, y_gt)\n', (2329, 2348), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((2434, 2502), 'imblearn.combine.SMOTEENN', 'SMOTEENN', ([], {'sampling_strategy': 'sampling_strategy', 'random_state': 'RND_SEED'}), '(sampling_strategy=sampling_strategy, random_state=RND_SEED)\n', (2442, 2502), False, 'from imblearn.combine import SMOTEENN\n'), ((2580, 2702), 'numpy.array', 'np.array', (['[[1.52091956, -0.49283504], [-0.28162401, -2.10400981], [0.83680821, \n 1.72827342], [0.08711622, 0.93259929]]'], {}), '([[1.52091956, -0.49283504], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]])\n', (2588, 2702), True, 'import numpy as np\n'), ((2730, 2752), 'numpy.array', 'np.array', (['[0, 1, 1, 1]'], {}), '([0, 1, 1, 1])\n', (2738, 2752), True, 'import numpy as np\n'), ((2757, 2791), 'sklearn.utils.testing.assert_allclose', 'assert_allclose', (['X_resampled', 'X_gt'], {}), '(X_resampled, X_gt)\n', (2772, 2791), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((2796, 2833), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['y_resampled', 'y_gt'], {}), '(y_resampled, y_gt)\n', (2814, 2833), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((2884, 2912), 'imblearn.over_sampling.SMOTE', 'SMOTE', ([], {'random_state': 'RND_SEED'}), '(random_state=RND_SEED)\n', (2889, 2912), False, 'from imblearn.over_sampling import SMOTE\n'), ((2923, 2971), 'imblearn.under_sampling.EditedNearestNeighbours', 'EditedNearestNeighbours', ([], {'sampling_strategy': '"""all"""'}), "(sampling_strategy='all')\n", (2946, 2971), False, 'from imblearn.under_sampling import EditedNearestNeighbours\n'), ((2982, 3035), 'imblearn.combine.SMOTEENN', 'SMOTEENN', ([], {'smote': 'smote', 'enn': 'enn', 'random_state': 'RND_SEED'}), '(smote=smote, enn=enn, random_state=RND_SEED)\n', (2990, 3035), False, 'from imblearn.combine import SMOTEENN\n'), ((3101, 3308), 'numpy.array', 'np.array', (['[[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159, -\n 0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]]'], {}), '([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159,\n -0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]])\n', (3109, 3308), True, 'import numpy as np\n'), ((3346, 3377), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 1, 1, 1]'], {}), '([0, 0, 0, 0, 1, 1, 1])\n', (3354, 3377), True, 'import numpy as np\n'), ((3382, 3428), 'sklearn.utils.testing.assert_allclose', 'assert_allclose', (['X_resampled', 'X_gt'], {'rtol': 'R_TOL'}), '(X_resampled, X_gt, rtol=R_TOL)\n', (3397, 3428), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((3433, 3470), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['y_resampled', 'y_gt'], {}), '(y_resampled, y_gt)\n', (3451, 3470), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((3522, 3553), 'imblearn.combine.SMOTEENN', 'SMOTEENN', ([], {'random_state': 'RND_SEED'}), '(random_state=RND_SEED)\n', (3530, 3553), False, 'from imblearn.combine import SMOTEENN\n'), ((3619, 3826), 'numpy.array', 'np.array', (['[[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159, -\n 0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]]'], {}), '([[1.52091956, -0.49283504], [0.84976473, -0.15570176], [0.61319159,\n -0.11571667], [0.66052536, -0.28246518], [-0.28162401, -2.10400981], [\n 0.83680821, 1.72827342], [0.08711622, 0.93259929]])\n', (3627, 3826), True, 'import numpy as np\n'), ((3864, 3895), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 1, 1, 1]'], {}), '([0, 0, 0, 0, 1, 1, 1])\n', (3872, 3895), True, 'import numpy as np\n'), ((3900, 3946), 'sklearn.utils.testing.assert_allclose', 'assert_allclose', (['X_resampled', 'X_gt'], {'rtol': 'R_TOL'}), '(X_resampled, X_gt, rtol=R_TOL)\n', (3915, 3946), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((3951, 3988), 'sklearn.utils.testing.assert_array_equal', 'assert_array_equal', (['y_resampled', 'y_gt'], {}), '(y_resampled, y_gt)\n', (3969, 3988), False, 'from sklearn.utils.testing import assert_allclose, assert_array_equal\n'), ((4067, 4098), 'imblearn.combine.SMOTEENN', 'SMOTEENN', ([], {'random_state': 'RND_SEED'}), '(random_state=RND_SEED)\n', (4075, 4098), False, 'from imblearn.combine import SMOTEENN\n'), ((4265, 4306), 'imblearn.combine.SMOTEENN', 'SMOTEENN', ([], {'random_state': 'RND_SEED', 'n_jobs': '(8)'}), '(random_state=RND_SEED, n_jobs=8)\n', (4273, 4306), False, 'from imblearn.combine import SMOTEENN\n'), ((4651, 4675), 'imblearn.combine.SMOTEENN', 'SMOTEENN', ([], {}), '(**smote_params)\n', (4659, 4675), False, 'from imblearn.combine import SMOTEENN\n'), ((4685, 4725), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'err_msg'}), '(ValueError, match=err_msg)\n', (4698, 4725), False, 'import pytest\n'), ((1762, 1816), 'imblearn.over_sampling.SMOTE', 'SMOTE', ([], {'sampling_strategy': '"""auto"""', 'random_state': 'RND_SEED'}), "(sampling_strategy='auto', random_state=RND_SEED)\n", (1767, 1816), False, 'from imblearn.over_sampling import SMOTE\n'), ((1830, 1878), 'imblearn.under_sampling.EditedNearestNeighbours', 'EditedNearestNeighbours', ([], {'sampling_strategy': '"""all"""'}), "(sampling_strategy='all')\n", (1853, 1878), False, 'from imblearn.under_sampling import EditedNearestNeighbours\n')] |
# Generated by Django 3.0.7 on 2020-09-22 05:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('listings', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='author',
),
]
| [
"django.db.migrations.RemoveField"
] | [((217, 273), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""post"""', 'name': '"""author"""'}), "(model_name='post', name='author')\n", (239, 273), False, 'from django.db import migrations\n')] |
import logging
import os
import traceback
from typing import List
from workflower.adapters.sqlalchemy.setup import Session
from workflower.adapters.sqlalchemy.unit_of_work import SqlAlchemyUnitOfWork
from workflower.application.event.commands import CreateEventCommand
from workflower.application.workflow.commands import (
ActivateWorkflowCommand,
LoadWorkflowFromYamlFileCommand,
SetWorkflowTriggerCommand,
)
from workflower.domain.entities.workflow import Workflow
logger = logging.getLogger("workflower.loader")
class WorkflowLoaderService:
def __init__(self) -> None:
self._workflows = None
@property
def workflows(self) -> List[Workflow]:
return self._workflows
def load_one_workflow_file(self, path: str, trigger: str = "on_schedule"):
"""
Load one workflow from file.
Args:
- path (str): workflow file path
- trigger (str): expects "on_schedule" or "on_demand".
"""
session = Session()
uow = SqlAlchemyUnitOfWork(session)
# TODO
# Add strategy pattern
command = LoadWorkflowFromYamlFileCommand(uow, path)
workflow = None
try:
workflow = command.execute()
except Exception:
logger.error(f"Error loading {path}:" f" {traceback.format_exc()}")
create_event_command = CreateEventCommand(
uow,
model="workflow",
model_id=None,
name="workflow_load_error",
exception=traceback.format_exc(),
)
create_event_command.execute()
if workflow:
set_trigger_command = SetWorkflowTriggerCommand(
uow, workflow.id, trigger
)
set_trigger_command.execute()
activate_Workflow_command = ActivateWorkflowCommand(
uow, workflow.id
)
activate_Workflow_command.execute()
return workflow
def load_all_from_dir(
self, path: str, trigger: str = "on_schedule"
) -> List[Workflow]:
"""
Load all workflow files from a given directory
Args:
- path (str): workflows file path
- trigger (str): expects "on_schedule" or "on_demand".
"""
self._workflows = []
logger.info(f"Loading Workflows from directory: {path}")
counter = 0
for root, dirs, files in os.walk(path):
for file in files:
if file.endswith(".yml") or file.endswith(".yaml"):
workflow_path = os.path.join(root, file)
workflow = self.load_one_workflow_file(
workflow_path, trigger=trigger
)
if workflow:
self._workflows.append(workflow)
counter += 1
logger.info(f"Workflows Loaded {counter}")
return self._workflows
| [
"logging.getLogger",
"traceback.format_exc",
"workflower.application.workflow.commands.ActivateWorkflowCommand",
"workflower.adapters.sqlalchemy.unit_of_work.SqlAlchemyUnitOfWork",
"workflower.application.workflow.commands.LoadWorkflowFromYamlFileCommand",
"os.path.join",
"workflower.adapters.sqlalchemy... | [((491, 529), 'logging.getLogger', 'logging.getLogger', (['"""workflower.loader"""'], {}), "('workflower.loader')\n", (508, 529), False, 'import logging\n'), ((999, 1008), 'workflower.adapters.sqlalchemy.setup.Session', 'Session', ([], {}), '()\n', (1006, 1008), False, 'from workflower.adapters.sqlalchemy.setup import Session\n'), ((1023, 1052), 'workflower.adapters.sqlalchemy.unit_of_work.SqlAlchemyUnitOfWork', 'SqlAlchemyUnitOfWork', (['session'], {}), '(session)\n', (1043, 1052), False, 'from workflower.adapters.sqlalchemy.unit_of_work import SqlAlchemyUnitOfWork\n'), ((1118, 1160), 'workflower.application.workflow.commands.LoadWorkflowFromYamlFileCommand', 'LoadWorkflowFromYamlFileCommand', (['uow', 'path'], {}), '(uow, path)\n', (1149, 1160), False, 'from workflower.application.workflow.commands import ActivateWorkflowCommand, LoadWorkflowFromYamlFileCommand, SetWorkflowTriggerCommand\n'), ((2469, 2482), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (2476, 2482), False, 'import os\n'), ((1693, 1745), 'workflower.application.workflow.commands.SetWorkflowTriggerCommand', 'SetWorkflowTriggerCommand', (['uow', 'workflow.id', 'trigger'], {}), '(uow, workflow.id, trigger)\n', (1718, 1745), False, 'from workflower.application.workflow.commands import ActivateWorkflowCommand, LoadWorkflowFromYamlFileCommand, SetWorkflowTriggerCommand\n'), ((1858, 1899), 'workflower.application.workflow.commands.ActivateWorkflowCommand', 'ActivateWorkflowCommand', (['uow', 'workflow.id'], {}), '(uow, workflow.id)\n', (1881, 1899), False, 'from workflower.application.workflow.commands import ActivateWorkflowCommand, LoadWorkflowFromYamlFileCommand, SetWorkflowTriggerCommand\n'), ((2619, 2643), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (2631, 2643), False, 'import os\n'), ((1556, 1578), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1576, 1578), False, 'import traceback\n'), ((1319, 1341), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1339, 1341), False, 'import traceback\n')] |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import io
import os
import re
from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
PROJECT_MODULE = 'dragonite'
PROJECT = 'dragonite'
AUTHOR = '<NAME>'
EMAIL = '<EMAIL>'
DESC = 'Dragon Con command line utility'
LONG_DESC = ''
KEYWORDS = ('dragonite', 'dragoncon', 'dragon', 'con')
URL = "https://github.com/neuroticnerd/dragoncon-bot"
REQUIRES = []
EXTRAS = {
'dev': (
'flake8 >= 2.5.0',
'twine >= 1.8.1',
'pytest >= 2.8.4',
'coverage >= 4.0.3',
),
# 'caching': (
# 'redis>=2.10.3',
# 'hiredis>=0.2.0',
# ),
}
SCRIPTS = {
"console_scripts": [
'dragonite = dragonite.cli:dragonite',
]}
LICENSE = 'Apache License, Version 2.0'
VERSION = ''
CLASSIFIERS = [
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
]
version_file = os.path.join(here, '{0}/__init__.py'.format(PROJECT_MODULE))
ver_find = r'^\s*__version__\s*=\s*[\"\'](.*)[\"\']$'
with io.open(version_file, 'r', encoding='utf-8') as ver_file:
VERSION = re.search(ver_find, ver_file.read(), re.MULTILINE).group(1)
readme_file = os.path.join(here, 'README.rst')
with io.open(readme_file, 'r', encoding='utf-8') as f:
LONG_DESC = f.read()
requirements_file = os.path.join(here, 'requirements.txt')
with io.open(requirements_file, 'r') as reqs_file:
for rawline in reqs_file:
line = rawline.strip()
if line.startswith('http'):
continue
REQUIRES.append(' >= '.join(line.split('==')))
if __name__ == '__main__':
setup(
name=PROJECT,
version=VERSION,
packages=find_packages(include=[PROJECT_MODULE + '*']),
author=AUTHOR,
author_email=EMAIL,
url=URL,
description=DESC,
long_description=LONG_DESC,
classifiers=CLASSIFIERS,
platforms=('any',),
license=LICENSE,
keywords=KEYWORDS,
install_requires=REQUIRES,
extras_require=EXTRAS,
entry_points=SCRIPTS,
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join",
"io.open"
] | [((1575, 1607), 'os.path.join', 'os.path.join', (['here', '"""README.rst"""'], {}), "(here, 'README.rst')\n", (1587, 1607), False, 'import os\n'), ((1709, 1747), 'os.path.join', 'os.path.join', (['here', '"""requirements.txt"""'], {}), "(here, 'requirements.txt')\n", (1721, 1747), False, 'import os\n'), ((205, 230), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (220, 230), False, 'import os\n'), ((1428, 1472), 'io.open', 'io.open', (['version_file', '"""r"""'], {'encoding': '"""utf-8"""'}), "(version_file, 'r', encoding='utf-8')\n", (1435, 1472), False, 'import io\n'), ((1613, 1656), 'io.open', 'io.open', (['readme_file', '"""r"""'], {'encoding': '"""utf-8"""'}), "(readme_file, 'r', encoding='utf-8')\n", (1620, 1656), False, 'import io\n'), ((1753, 1784), 'io.open', 'io.open', (['requirements_file', '"""r"""'], {}), "(requirements_file, 'r')\n", (1760, 1784), False, 'import io\n'), ((2075, 2120), 'setuptools.find_packages', 'find_packages', ([], {'include': "[PROJECT_MODULE + '*']"}), "(include=[PROJECT_MODULE + '*'])\n", (2088, 2120), False, 'from setuptools import find_packages, setup\n')] |
# django imports
from django import forms
from django.conf import settings
from django.core.cache import cache
from django.template.loader import render_to_string
# portlets imports
from portlets.models import Portlet
# lfs imports
from lfs.page.models import Page
class PagesPortlet(Portlet):
"""Portlet to display pages.
"""
class Meta:
app_label = 'portlet'
def __unicode__(self):
return u"%s" % self.id
def render(self, context):
"""Renders the portlet as html.
"""
request = context.get("request")
cache_key = "%s-pages" % settings.CACHE_MIDDLEWARE_KEY_PREFIX
pages = cache.get(cache_key)
if pages is None:
pages = Page.objects.filter(active=True, exclude_from_navigation=False)
cache.set(cache_key, pages)
return render_to_string("lfs/portlets/pages.html", request=request, context={
"title": self.title,
"pages": pages,
})
def form(self, **kwargs):
return PagesForm(instance=self, **kwargs)
class PagesForm(forms.ModelForm):
"""Form for the PagesPortlet.
"""
class Meta:
model = PagesPortlet
exclude = ()
| [
"django.core.cache.cache.set",
"lfs.page.models.Page.objects.filter",
"django.template.loader.render_to_string",
"django.core.cache.cache.get"
] | [((656, 676), 'django.core.cache.cache.get', 'cache.get', (['cache_key'], {}), '(cache_key)\n', (665, 676), False, 'from django.core.cache import cache\n'), ((843, 955), 'django.template.loader.render_to_string', 'render_to_string', (['"""lfs/portlets/pages.html"""'], {'request': 'request', 'context': "{'title': self.title, 'pages': pages}"}), "('lfs/portlets/pages.html', request=request, context={\n 'title': self.title, 'pages': pages})\n", (859, 955), False, 'from django.template.loader import render_to_string\n'), ((723, 786), 'lfs.page.models.Page.objects.filter', 'Page.objects.filter', ([], {'active': '(True)', 'exclude_from_navigation': '(False)'}), '(active=True, exclude_from_navigation=False)\n', (742, 786), False, 'from lfs.page.models import Page\n'), ((799, 826), 'django.core.cache.cache.set', 'cache.set', (['cache_key', 'pages'], {}), '(cache_key, pages)\n', (808, 826), False, 'from django.core.cache import cache\n')] |
from django.contrib import admin
from django.db import models
from django.utils.translation import gettext as _
from .models import (MilitaryRank, Platoon, ServiseID, Unit, OfficialPosition, Company,
Education, Creed, Nationality, Command)
from osoba.widgets import CustomDatePickerInput
class ServiseIDAdmin(admin.ModelAdmin):
fieldsets = (
# (None, {
# 'fields': ('field1', 'field2', 'field3')
# }),
(_('Main data'), {
'fields': ('name', 'sename', 'third_name', 'birth_date')
}),
(_('Names in accs'), { #давальний відмінок
'fields': ('name_accs', 'sename_accs', 'third_name_accs')
}),
(_('Company'), {
'fields': ('military_ranks', )
}),
(_('Info for Service begin'), {
'fields': ('military_office', 'date_of_conscription', 'order_date', 'order_number')
}),
(_('General information'), {
'fields': ('orphan',
'married', 'halforphan', 'work', 'mobilization', 'driveid', 'creed',
'nationality', 'education', 'blood_type', 'rh')
}),
(_('militaryID'), {
'fields': ('militaryID_seria', 'militaryID_number', 'who_militaryID',
'militaryID_date', 'weapon', 'military_rank_id', 'military_rank_date')
}),
(_('ID'), {
'fields': ('ID_seria', 'ID_number', 'who_ID',
'ID_date', 'ipn')
}),
(_('Address'), {
'fields': ('addres_pr', 'addres_fact')
}),
(_('Images'), {
'fields': ('image_face3x4',)
})
)
change_form_template = 'admin/ocoba_change_form.html'
# formfield_overrides = {
# models.DateField: {'widget': MonthPickerInput}
# }
admin.site.register(Company)
admin.site.register(MilitaryRank)
admin.site.register(Platoon)
admin.site.register(ServiseID, ServiseIDAdmin)
admin.site.register(Unit)
admin.site.register(OfficialPosition)
admin.site.register(Creed)
admin.site.register(Nationality)
admin.site.register(Education)
admin.site.register(Command) | [
"django.contrib.admin.site.register",
"django.utils.translation.gettext"
] | [((1846, 1874), 'django.contrib.admin.site.register', 'admin.site.register', (['Company'], {}), '(Company)\n', (1865, 1874), False, 'from django.contrib import admin\n'), ((1875, 1908), 'django.contrib.admin.site.register', 'admin.site.register', (['MilitaryRank'], {}), '(MilitaryRank)\n', (1894, 1908), False, 'from django.contrib import admin\n'), ((1909, 1937), 'django.contrib.admin.site.register', 'admin.site.register', (['Platoon'], {}), '(Platoon)\n', (1928, 1937), False, 'from django.contrib import admin\n'), ((1938, 1984), 'django.contrib.admin.site.register', 'admin.site.register', (['ServiseID', 'ServiseIDAdmin'], {}), '(ServiseID, ServiseIDAdmin)\n', (1957, 1984), False, 'from django.contrib import admin\n'), ((1985, 2010), 'django.contrib.admin.site.register', 'admin.site.register', (['Unit'], {}), '(Unit)\n', (2004, 2010), False, 'from django.contrib import admin\n'), ((2011, 2048), 'django.contrib.admin.site.register', 'admin.site.register', (['OfficialPosition'], {}), '(OfficialPosition)\n', (2030, 2048), False, 'from django.contrib import admin\n'), ((2049, 2075), 'django.contrib.admin.site.register', 'admin.site.register', (['Creed'], {}), '(Creed)\n', (2068, 2075), False, 'from django.contrib import admin\n'), ((2076, 2108), 'django.contrib.admin.site.register', 'admin.site.register', (['Nationality'], {}), '(Nationality)\n', (2095, 2108), False, 'from django.contrib import admin\n'), ((2109, 2139), 'django.contrib.admin.site.register', 'admin.site.register', (['Education'], {}), '(Education)\n', (2128, 2139), False, 'from django.contrib import admin\n'), ((2140, 2168), 'django.contrib.admin.site.register', 'admin.site.register', (['Command'], {}), '(Command)\n', (2159, 2168), False, 'from django.contrib import admin\n'), ((465, 479), 'django.utils.translation.gettext', '_', (['"""Main data"""'], {}), "('Main data')\n", (466, 479), True, 'from django.utils.translation import gettext as _\n'), ((573, 591), 'django.utils.translation.gettext', '_', (['"""Names in accs"""'], {}), "('Names in accs')\n", (574, 591), True, 'from django.utils.translation import gettext as _\n'), ((706, 718), 'django.utils.translation.gettext', '_', (['"""Company"""'], {}), "('Company')\n", (707, 718), True, 'from django.utils.translation import gettext as _\n'), ((787, 814), 'django.utils.translation.gettext', '_', (['"""Info for Service begin"""'], {}), "('Info for Service begin')\n", (788, 814), True, 'from django.utils.translation import gettext as _\n'), ((936, 960), 'django.utils.translation.gettext', '_', (['"""General information"""'], {}), "('General information')\n", (937, 960), True, 'from django.utils.translation import gettext as _\n'), ((1161, 1176), 'django.utils.translation.gettext', '_', (['"""militaryID"""'], {}), "('militaryID')\n", (1162, 1176), True, 'from django.utils.translation import gettext as _\n'), ((1367, 1374), 'django.utils.translation.gettext', '_', (['"""ID"""'], {}), "('ID')\n", (1368, 1374), True, 'from django.utils.translation import gettext as _\n'), ((1496, 1508), 'django.utils.translation.gettext', '_', (['"""Address"""'], {}), "('Address')\n", (1497, 1508), True, 'from django.utils.translation import gettext as _\n'), ((1597, 1608), 'django.utils.translation.gettext', '_', (['"""Images"""'], {}), "('Images')\n", (1598, 1608), True, 'from django.utils.translation import gettext as _\n')] |
import sys, os, glob
from skimage import io
from skimage import viewer
import registration as reg
from skimage import data
def display():
pass
if __name__ == "__main__":
# ------------------Create input ndarray------------------------
inputDir = '../data/test/'
imageFiles = glob.glob(os.path.join(inputDir, '*.jpg'))
imageVolume = io.ImageCollection(imageFiles, as_grey=True).concatenate()
stack = imageVolume
# ------------------Check that single image registration works----
src = stack[0]
dst = stack[1]
reg_dst = reg.reg(src, dst)
# ------------- Check that stack registration works -----------
reg_stack = reg.registration(stack)
merged = [reg.overlay_pics(stack[0], img) for img in stack]
merged_reg = [reg.overlay_pics(reg_stack[0], img) for img in reg_stack]
image = data.coins()
viewer = viewer.CollectionViewer(merged_reg)
viewer.show()
| [
"registration.overlay_pics",
"skimage.io.ImageCollection",
"os.path.join",
"registration.reg",
"skimage.viewer.show",
"registration.registration",
"skimage.data.coins",
"skimage.viewer.CollectionViewer"
] | [((564, 581), 'registration.reg', 'reg.reg', (['src', 'dst'], {}), '(src, dst)\n', (571, 581), True, 'import registration as reg\n'), ((672, 695), 'registration.registration', 'reg.registration', (['stack'], {}), '(stack)\n', (688, 695), True, 'import registration as reg\n'), ((849, 861), 'skimage.data.coins', 'data.coins', ([], {}), '()\n', (859, 861), False, 'from skimage import data\n'), ((876, 911), 'skimage.viewer.CollectionViewer', 'viewer.CollectionViewer', (['merged_reg'], {}), '(merged_reg)\n', (899, 911), False, 'from skimage import viewer\n'), ((916, 929), 'skimage.viewer.show', 'viewer.show', ([], {}), '()\n', (927, 929), False, 'from skimage import viewer\n'), ((304, 335), 'os.path.join', 'os.path.join', (['inputDir', '"""*.jpg"""'], {}), "(inputDir, '*.jpg')\n", (316, 335), False, 'import sys, os, glob\n'), ((711, 742), 'registration.overlay_pics', 'reg.overlay_pics', (['stack[0]', 'img'], {}), '(stack[0], img)\n', (727, 742), True, 'import registration as reg\n'), ((779, 814), 'registration.overlay_pics', 'reg.overlay_pics', (['reg_stack[0]', 'img'], {}), '(reg_stack[0], img)\n', (795, 814), True, 'import registration as reg\n'), ((355, 399), 'skimage.io.ImageCollection', 'io.ImageCollection', (['imageFiles'], {'as_grey': '(True)'}), '(imageFiles, as_grey=True)\n', (373, 399), False, 'from skimage import io\n')] |
"""
Numpy's `split` can split a multidimensional array into non-overlapping
sub-arrays. However, this is not a memory-efficient way of dealing with
non-overlapping partitions of an array because it effectively doubles
memory usage.
This module provides an iterable generator that produces tuples of slices,
each of which can be used to index into a Numpy array and obtain a small
view into it. It is very memory-efficient since no copy of the array is
ever created.
This all works because Numpy ndarrays can be indexed using a tuple of
slices: that is, `arr[a:b, c:d, e:f]` is equivalent to
`arr[(slice(a, b), slice(c, d), slice(e, f))]`.
This module doesn't import Numpy at all since it generates Python slices.
"""
from itertools import product
from typing import List, Iterable, Tuple
def array_range(start: List[int], stop: List[int], step: List[int]) -> Iterable[Tuple]:
"""
Makes an iterable of non-overlapping slices, e.g., to partition an array
Returns an iterable of tuples of slices, each of which can be used to
index into a multidimensional array such as Numpy's ndarray.
>> [arr[tup] for tup in array_range([0, 0], arr.shape, [5, 7])]
where `arr` can be indexed with a tuple of slices (e.g., Numpy), will
evaluate to a list of sub-arrays.
Same arguments as `range` except all three arguments are required and
expected to be list-like of same length. `start` indicates the indexes
to start each dimension. `stop` indicates the stop index for each
dimension. `step` is the size of the chunk in each dimension.
"""
assert len(start) == len(stop)
assert len(stop) == len(step)
assert all(map(lambda x: x > 0, step))
startRangesGen = map(lambda v: range(*v), zip(start, stop, step))
startToSliceMapper = lambda multiStart: tuple(
slice(i, min(i + step, stop)) for i, stop, step in zip(multiStart, stop, step))
return map(startToSliceMapper, product(*startRangesGen))
| [
"itertools.product"
] | [((1903, 1927), 'itertools.product', 'product', (['*startRangesGen'], {}), '(*startRangesGen)\n', (1910, 1927), False, 'from itertools import product\n')] |
# ColorPaint.py
import pygame # setup
import random
pygame.init()
screen = pygame.display.set_mode([800, 600])
pygame.display.set_caption('Click and drag to draw, using up to 3 mouse buttons')
keepGoing = True
ORANGE = (255,255,0) # RGB color triplets for 3 mousebutton colors
GREEN = (0,255,0)
PURPLE = (128,0,128)
radius = 15
mousedown = False
while keepGoing: # game loop
for event in pygame.event.get(): # handling events
if event.type == pygame.QUIT:
keepGoing = False
if event.type == pygame.MOUSEBUTTONDOWN:
mousedown = True
if event.type == pygame.MOUSEBUTTONUP:
mousedown = False
if mousedown: # draw/update graphics
spot = pygame.mouse.get_pos()
if pygame.mouse.get_pressed()[0] : # boolean for button1
button_color = ORANGE
elif pygame.mouse.get_pressed()[1]: # boolean for button2
button_color = GREEN
else: # must be button3
button_color = PURPLE
pygame.draw.circle(screen, button_color, spot, radius)
pygame.display.update() # update display
pygame.quit() # exit
| [
"pygame.draw.circle",
"pygame.mouse.get_pressed",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.mouse.get_pos",
"pygame.display.set_caption",
"pygame.display.update"
] | [((78, 91), 'pygame.init', 'pygame.init', ([], {}), '()\n', (89, 91), False, 'import pygame\n'), ((101, 136), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[800, 600]'], {}), '([800, 600])\n', (124, 136), False, 'import pygame\n'), ((137, 223), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Click and drag to draw, using up to 3 mouse buttons"""'], {}), "(\n 'Click and drag to draw, using up to 3 mouse buttons')\n", (163, 223), False, 'import pygame\n'), ((1207, 1220), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (1218, 1220), False, 'import pygame\n'), ((446, 464), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (462, 464), False, 'import pygame\n'), ((1145, 1168), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (1166, 1168), False, 'import pygame\n'), ((789, 811), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (809, 811), False, 'import pygame\n'), ((1086, 1140), 'pygame.draw.circle', 'pygame.draw.circle', (['screen', 'button_color', 'spot', 'radius'], {}), '(screen, button_color, spot, radius)\n', (1104, 1140), False, 'import pygame\n'), ((823, 849), 'pygame.mouse.get_pressed', 'pygame.mouse.get_pressed', ([], {}), '()\n', (847, 849), False, 'import pygame\n'), ((924, 950), 'pygame.mouse.get_pressed', 'pygame.mouse.get_pressed', ([], {}), '()\n', (948, 950), False, 'import pygame\n')] |
import datetime
import unittest
from accounts.models import AccountDetails
from categories.models import Category
from django.contrib.auth.models import User
from django.test import Client
from django.test import TestCase
from django.urls import reverse
from events.models import Comment
from events.models import Event
from events.models import Invite
from tasks.models import Task
class EventsTestCase(TestCase):
def setUp(self):
self.total_number_of_events = 25
self.client = Client()
self.client.login(username='john', password='<PASSWORD>')
category = Category.objects.create(
name='test event category',
description='cool description',
slug='test',
)
for event_id in range(self.total_number_of_events):
eventstring = 'test' + str(event_id)
self.event = Event.objects.create(
title=eventstring,
description=eventstring,
)
self.event.save()
self.event.category.add(category)
self.event.save()
def test_list_events_lists_event(self):
url = reverse('events.list')
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(b'test1', resp.content)
def test_list_events_lists_categories(self):
url = reverse('events.list')
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(b'test event category', resp.content)
def test_create_event(self):
pass
def test_update_event(self):
pass
def test_delete_event(self):
pass
def test_view_event(self):
pass
def test_join_event(self):
pass
def test_unjoin_event(self):
pass
def test_add_teammate(self):
pass
class EventsFeedsTestCase(TestCase):
def setUp(self):
self.total_number_of_events = 25
self.client = Client()
self.client.login(username='john', password='<PASSWORD>')
category = Category.objects.create(
name='test event',
description='cool description',
slug='test',
)
for event_id in range(self.total_number_of_events):
eventstring = 'test' + str(event_id)
self.event = Event.objects.create(
title=eventstring,
description=eventstring,
)
self.event.save()
self.event.category.add(category)
self.event.save()
def test_all_events_feed(self):
response = self.client.get(reverse('event_feed'))
latest_event = 'test' + str(self.total_number_of_events - 1)
self.assertContains(response, latest_event)
self.assertContains(response, 'test' + str(1))
def test_latest_events_feed(self):
response = self.client.get(reverse('latest_event_feed'))
first_event_title = 'test' + str(self.total_number_of_events)
self.assertNotContains(response, first_event_title)
latest_event_title = 'test' + str(1)
self.assertContains(response, latest_event_title)
class EventsUrlsTestClass(TestCase):
client = Client()
def setUp(self):
self.client = Client()
self.user = User.objects.create_user(
'john',
'<EMAIL>',
'johnpassword'
)
self.user.details = AccountDetails.objects.create(
user=self.user,
description='cool description',
slug='userslug'
)
self.client.login(username='john', password='<PASSWORD>')
category = Category.objects.create(
name='test event',
description='cool description',
slug='test',
)
self.event = Event.objects.create(
title='testy',
description='cool description',
slug='event',
added_by=self.user,
)
self.event.save()
self.event.category.add(category)
self.event.team_members.add(self.user)
self.event.save()
def url_returns_200(self, url, status_code=200):
response = self.client.get(url)
self.assertEqual(response.status_code, status_code)
def test_list_events_url(self):
self.url_returns_200(reverse('events.list'))
def test_create_event_url(self):
self.url_returns_200(reverse('events.create_event'))
def test_delete_event_url(self):
user = User.objects.create_user(
'johnaaaa',
'<EMAIL>',
'johnpasswordaaa'
)
self.client.login(username='johnaaaa', password='<PASSWORD>')
category = Category.objects.create(
name='unisdjsd',
description='cool description',
slug='tesddssst',
)
event = Event.objects.create(
title='delete',
description='cool description',
slug='delete',
added_by=user,
)
event.save()
event.category.add(category)
self.url_returns_200(reverse('events.del', kwargs={'slug': 'delete'}))
def test_delete_event_url_unsuccessful(self):
user = User.objects.create_user(
'johnaaaa',
'<EMAIL>',
'johnpasswordaaa'
)
user2 = User.objects.create_user(
'johnaaaa2',
'<EMAIL>',
'<PASSWORD>'
)
self.client.login(username='johnaaaa', password='<PASSWORD>')
category = Category.objects.create(
name='unisdjsd',
description='cool description',
slug='tesddssst',
)
event = Event.objects.create(
title='delete',
description='cool description',
slug='delete',
added_by=user2,
)
event.save()
event.category.add(category)
response = self.client.get(
reverse(
'events.del', kwargs={
'slug': 'delete'}))
self.assertEquals(response.status_code, 403)
def test_view_event_url(self):
user2 = User.objects.create_user(
username='testuser2',
password='<PASSWORD>'
)
user2.details = AccountDetails.objects.create(
user=user2,
description='cool description',
slug='userslug2'
)
self.user.details.friends.add(user2)
self.url_returns_200(reverse('event', kwargs={'slug': 'event'}))
def test_all_events_feed_url(self):
self.url_returns_200(reverse('event_feed'))
def test_latest_events_feed_url(self):
self.url_returns_200(reverse('latest_event_feed'))
def test_join_event(self):
self.url_returns_200(reverse('events.join', kwargs={'slug': 'event'}))
def test_unjoin_event(self):
self.url_returns_200(
reverse(
'events.rm_join',
kwargs={
'slug': 'event'}))
def test_event_settings_url(self):
self.url_returns_200(
reverse(
'events.settings',
kwargs={
'slug': 'event'}))
def test_event_invites_url(self):
self.url_returns_200(reverse('events.invites'))
def test_event_invite_url(self):
self.url_returns_200(
reverse(
'events.invite',
kwargs={
'slug': 'userslug',
'event': 'event'}))
def test_event_url(self):
self.url_returns_200(reverse('events.event', kwargs={'slug': 'event'}))
def test_add_team_member(self):
user = User.objects.create_user(
'johnaaaa',
'<EMAIL>',
'johnpasswordaaa'
)
event = Event.objects.create(
title='testy',
description='cool description',
slug='eventааааа',
added_by=user,
)
self.client.login(username='johnaaaa', password='<PASSWORD>')
self.url_returns_200('events/userslug/eventааааа/add_teammate')
def test_get_tasks_no_tasks(self):
response = self.client.get(reverse('events.tasks'))
self.assertNotContains(response, 'TO DO:')
self.assertNotContains(response, 'DOING:')
self.assertEqual(response.status_code, 200)
# def test_get_tasks(self):
# task_title = 'Very cooollll'
# task = Task.objects.create(
# title=task_title,
# event=self.event,
# slug='event',
# assignee=self.user,
# status='TODO'
# )
# self.client.login(username='john', password='<PASSWORD>')
# response = self.client.get(reverse('events.tasks'))
# self.assertContains(response, task_title)
# self.assertEqual(response.status_code, 200)
def test_confirm_invite(self):
user2 = User.objects.create_user(
'johnaaaa',
'<EMAIL>',
'johnpasswordaaa'
)
Invite.objects.create(
invited_user=self.user,
invited_by=user2,
event=self.event)
self.url_returns_200(
reverse(
'events.confirm_invite',
kwargs={
'slug': self.event.slug}))
def test_decline_invite(self):
user2 = User.objects.create_user(
'johnaaaa',
'<EMAIL>',
'johnpasswordaaa'
)
Invite.objects.create(
invited_user=self.user,
invited_by=user2,
event=self.event)
self.url_returns_200(
reverse(
'invites.decline_invite',
kwargs={
'slug': self.event.slug}))
def test_add_teammate_no_friends(self):
self.url_returns_200(
reverse(
'events.add_teammate',
kwargs={
'slug': self.event.slug}))
response = self.client.get(
reverse(
'events.add_teammate',
kwargs={
'slug': self.event.slug}))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'Find')
def test_add_teammate(self):
user2 = User.objects.create_user(
'friendddddddd',
'<EMAIL>',
'johnpassword'
)
user2.details = AccountDetails.objects.create(
user=user2,
description='cool description',
slug='useddddrslug'
)
self.user.details.friends.add(user2)
self.user.save()
self.user.details.save()
self.url_returns_200(
reverse(
'events.add_teammate',
kwargs={
'slug': self.event.slug
}
)
)
response = self.client.get(
reverse(
'events.add_teammate',
kwargs={
'slug': self.event.slug}))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Find')
def test_event_team_add(self):
user2 = User.objects.create_user(
'johnaaaa',
'<EMAIL>',
'johnpasswordaaa'
)
response = self.client.get(
reverse(
'events.event_team_add',
kwargs={
'slug': self.event.slug,
'user': user2
}))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Success')
self.assertContains(response, user2.username)
def test_delete_comment_by_slug(self):
Comment.objects.create(
event=self.event,
author=self.user,
title='opaaa',
content='sdasdsa')
comment = Comment.objects.first()
self.url_returns_200(
reverse(
'events.comment.del',
kwargs={
'slug': self.event.slug,
'comment': comment.pk}))
def test_edit_comment_by_slug(self):
Comment.objects.create(
event=self.event,
author=self.user,
title='opaaa',
content='sdasdsa')
comment = Comment.objects.first()
response = self.client.get(
reverse(
'events.comment.edit',
kwargs={
'slug': self.event.slug,
'comment': comment.pk}))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'opaaa')
def test_event_board(self):
self.url_returns_200(
reverse(
'events.board', kwargs={
'slug': self.event.slug}))
def test_my_events(self):
event = Event.objects.create(
title='testy',
description='cool description',
slug='eventааааа',
added_by=self.user,
)
event.attendees.add(self.user)
response = self.client.get(reverse('events.my_events'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'testy')
def test_events_I_host(self):
event = Event.objects.create(
title='testy',
description='cool description',
slug='eventааааа',
added_by=self.user,
)
event.attendees.add(self.user)
response = self.client.get(reverse('events.events_I_host'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'testy')
def test_show_random_event(self):
event = Event.objects.create(
title='testy',
description='cool description',
slug='eventааааа',
added_by=self.user,
)
response = self.client.get(reverse('events.show_random_event'))
self.assertEqual(response.status_code, 302)
def test_search_json(self):
response = self.client.get(reverse('events.search_json', kwargs={
'category_id': 1,
'slug': 'test'}))
self.assertEqual(response.status_code, 200)
| [
"accounts.models.AccountDetails.objects.create",
"events.models.Invite.objects.create",
"categories.models.Category.objects.create",
"events.models.Comment.objects.create",
"events.models.Event.objects.create",
"events.models.Comment.objects.first",
"django.urls.reverse",
"django.contrib.auth.models.U... | [((3234, 3242), 'django.test.Client', 'Client', ([], {}), '()\n', (3240, 3242), False, 'from django.test import Client\n'), ((507, 515), 'django.test.Client', 'Client', ([], {}), '()\n', (513, 515), False, 'from django.test import Client\n'), ((602, 703), 'categories.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""test event category"""', 'description': '"""cool description"""', 'slug': '"""test"""'}), "(name='test event category', description=\n 'cool description', slug='test')\n", (625, 703), False, 'from categories.models import Category\n'), ((1158, 1180), 'django.urls.reverse', 'reverse', (['"""events.list"""'], {}), "('events.list')\n", (1165, 1180), False, 'from django.urls import reverse\n'), ((1375, 1397), 'django.urls.reverse', 'reverse', (['"""events.list"""'], {}), "('events.list')\n", (1382, 1397), False, 'from django.urls import reverse\n'), ((1990, 1998), 'django.test.Client', 'Client', ([], {}), '()\n', (1996, 1998), False, 'from django.test import Client\n'), ((2085, 2176), 'categories.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""test event"""', 'description': '"""cool description"""', 'slug': '"""test"""'}), "(name='test event', description='cool description',\n slug='test')\n", (2108, 2176), False, 'from categories.models import Category\n'), ((3287, 3295), 'django.test.Client', 'Client', ([], {}), '()\n', (3293, 3295), False, 'from django.test import Client\n'), ((3316, 3375), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""john"""', '"""<EMAIL>"""', '"""johnpassword"""'], {}), "('john', '<EMAIL>', 'johnpassword')\n", (3340, 3375), False, 'from django.contrib.auth.models import User\n'), ((3451, 3550), 'accounts.models.AccountDetails.objects.create', 'AccountDetails.objects.create', ([], {'user': 'self.user', 'description': '"""cool description"""', 'slug': '"""userslug"""'}), "(user=self.user, description=\n 'cool description', slug='userslug')\n", (3480, 3550), False, 'from accounts.models import AccountDetails\n'), ((3678, 3769), 'categories.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""test event"""', 'description': '"""cool description"""', 'slug': '"""test"""'}), "(name='test event', description='cool description',\n slug='test')\n", (3701, 3769), False, 'from categories.models import Category\n'), ((3835, 3941), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': '"""testy"""', 'description': '"""cool description"""', 'slug': '"""event"""', 'added_by': 'self.user'}), "(title='testy', description='cool description', slug=\n 'event', added_by=self.user)\n", (3855, 3941), False, 'from events.models import Event\n'), ((4533, 4599), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""johnaaaa"""', '"""<EMAIL>"""', '"""johnpasswordaaa"""'], {}), "('johnaaaa', '<EMAIL>', 'johnpasswordaaa')\n", (4557, 4599), False, 'from django.contrib.auth.models import User\n'), ((4737, 4831), 'categories.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""unisdjsd"""', 'description': '"""cool description"""', 'slug': '"""tesddssst"""'}), "(name='unisdjsd', description='cool description',\n slug='tesddssst')\n", (4760, 4831), False, 'from categories.models import Category\n'), ((4891, 4994), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': '"""delete"""', 'description': '"""cool description"""', 'slug': '"""delete"""', 'added_by': 'user'}), "(title='delete', description='cool description', slug=\n 'delete', added_by=user)\n", (4911, 4994), False, 'from events.models import Event\n'), ((5252, 5318), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""johnaaaa"""', '"""<EMAIL>"""', '"""johnpasswordaaa"""'], {}), "('johnaaaa', '<EMAIL>', 'johnpasswordaaa')\n", (5276, 5318), False, 'from django.contrib.auth.models import User\n'), ((5382, 5444), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""johnaaaa2"""', '"""<EMAIL>"""', '"""<PASSWORD>"""'], {}), "('johnaaaa2', '<EMAIL>', '<PASSWORD>')\n", (5406, 5444), False, 'from django.contrib.auth.models import User\n'), ((5581, 5675), 'categories.models.Category.objects.create', 'Category.objects.create', ([], {'name': '"""unisdjsd"""', 'description': '"""cool description"""', 'slug': '"""tesddssst"""'}), "(name='unisdjsd', description='cool description',\n slug='tesddssst')\n", (5604, 5675), False, 'from categories.models import Category\n'), ((5735, 5839), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': '"""delete"""', 'description': '"""cool description"""', 'slug': '"""delete"""', 'added_by': 'user2'}), "(title='delete', description='cool description', slug=\n 'delete', added_by=user2)\n", (5755, 5839), False, 'from events.models import Event\n'), ((6193, 6262), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""testuser2"""', 'password': '"""<PASSWORD>"""'}), "(username='testuser2', password='<PASSWORD>')\n", (6217, 6262), False, 'from django.contrib.auth.models import User\n'), ((6322, 6417), 'accounts.models.AccountDetails.objects.create', 'AccountDetails.objects.create', ([], {'user': 'user2', 'description': '"""cool description"""', 'slug': '"""userslug2"""'}), "(user=user2, description='cool description',\n slug='userslug2')\n", (6351, 6417), False, 'from accounts.models import AccountDetails\n'), ((7743, 7809), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""johnaaaa"""', '"""<EMAIL>"""', '"""johnpasswordaaa"""'], {}), "('johnaaaa', '<EMAIL>', 'johnpasswordaaa')\n", (7767, 7809), False, 'from django.contrib.auth.models import User\n'), ((7872, 7978), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': '"""testy"""', 'description': '"""cool description"""', 'slug': '"""eventааааа"""', 'added_by': 'user'}), "(title='testy', description='cool description', slug=\n 'eventааааа', added_by=user)\n", (7892, 7978), False, 'from events.models import Event\n'), ((8994, 9060), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""johnaaaa"""', '"""<EMAIL>"""', '"""johnpasswordaaa"""'], {}), "('johnaaaa', '<EMAIL>', 'johnpasswordaaa')\n", (9018, 9060), False, 'from django.contrib.auth.models import User\n'), ((9115, 9201), 'events.models.Invite.objects.create', 'Invite.objects.create', ([], {'invited_user': 'self.user', 'invited_by': 'user2', 'event': 'self.event'}), '(invited_user=self.user, invited_by=user2, event=self.\n event)\n', (9136, 9201), False, 'from events.models import Invite\n'), ((9450, 9516), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""johnaaaa"""', '"""<EMAIL>"""', '"""johnpasswordaaa"""'], {}), "('johnaaaa', '<EMAIL>', 'johnpasswordaaa')\n", (9474, 9516), False, 'from django.contrib.auth.models import User\n'), ((9571, 9657), 'events.models.Invite.objects.create', 'Invite.objects.create', ([], {'invited_user': 'self.user', 'invited_by': 'user2', 'event': 'self.event'}), '(invited_user=self.user, invited_by=user2, event=self.\n event)\n', (9592, 9657), False, 'from events.models import Invite\n'), ((10382, 10450), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""friendddddddd"""', '"""<EMAIL>"""', '"""johnpassword"""'], {}), "('friendddddddd', '<EMAIL>', 'johnpassword')\n", (10406, 10450), False, 'from django.contrib.auth.models import User\n'), ((10521, 10619), 'accounts.models.AccountDetails.objects.create', 'AccountDetails.objects.create', ([], {'user': 'user2', 'description': '"""cool description"""', 'slug': '"""useddddrslug"""'}), "(user=user2, description='cool description',\n slug='useddddrslug')\n", (10550, 10619), False, 'from accounts.models import AccountDetails\n'), ((11286, 11352), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['"""johnaaaa"""', '"""<EMAIL>"""', '"""johnpasswordaaa"""'], {}), "('johnaaaa', '<EMAIL>', 'johnpasswordaaa')\n", (11310, 11352), False, 'from django.contrib.auth.models import User\n'), ((11829, 11925), 'events.models.Comment.objects.create', 'Comment.objects.create', ([], {'event': 'self.event', 'author': 'self.user', 'title': '"""opaaa"""', 'content': '"""sdasdsa"""'}), "(event=self.event, author=self.user, title='opaaa',\n content='sdasdsa')\n", (11851, 11925), False, 'from events.models import Comment\n'), ((11989, 12012), 'events.models.Comment.objects.first', 'Comment.objects.first', ([], {}), '()\n', (12010, 12012), False, 'from events.models import Comment\n'), ((12267, 12363), 'events.models.Comment.objects.create', 'Comment.objects.create', ([], {'event': 'self.event', 'author': 'self.user', 'title': '"""opaaa"""', 'content': '"""sdasdsa"""'}), "(event=self.event, author=self.user, title='opaaa',\n content='sdasdsa')\n", (12289, 12363), False, 'from events.models import Comment\n'), ((12427, 12450), 'events.models.Comment.objects.first', 'Comment.objects.first', ([], {}), '()\n', (12448, 12450), False, 'from events.models import Comment\n'), ((12981, 13092), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': '"""testy"""', 'description': '"""cool description"""', 'slug': '"""eventааааа"""', 'added_by': 'self.user'}), "(title='testy', description='cool description', slug=\n 'eventааааа', added_by=self.user)\n", (13001, 13092), False, 'from events.models import Event\n'), ((13400, 13511), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': '"""testy"""', 'description': '"""cool description"""', 'slug': '"""eventааааа"""', 'added_by': 'self.user'}), "(title='testy', description='cool description', slug=\n 'eventааааа', added_by=self.user)\n", (13420, 13511), False, 'from events.models import Event\n'), ((13827, 13938), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': '"""testy"""', 'description': '"""cool description"""', 'slug': '"""eventааааа"""', 'added_by': 'self.user'}), "(title='testy', description='cool description', slug=\n 'eventааааа', added_by=self.user)\n", (13847, 13938), False, 'from events.models import Event\n'), ((881, 945), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': 'eventstring', 'description': 'eventstring'}), '(title=eventstring, description=eventstring)\n', (901, 945), False, 'from events.models import Event\n'), ((2355, 2419), 'events.models.Event.objects.create', 'Event.objects.create', ([], {'title': 'eventstring', 'description': 'eventstring'}), '(title=eventstring, description=eventstring)\n', (2375, 2419), False, 'from events.models import Event\n'), ((2645, 2666), 'django.urls.reverse', 'reverse', (['"""event_feed"""'], {}), "('event_feed')\n", (2652, 2666), False, 'from django.urls import reverse\n'), ((2919, 2947), 'django.urls.reverse', 'reverse', (['"""latest_event_feed"""'], {}), "('latest_event_feed')\n", (2926, 2947), False, 'from django.urls import reverse\n'), ((4357, 4379), 'django.urls.reverse', 'reverse', (['"""events.list"""'], {}), "('events.list')\n", (4364, 4379), False, 'from django.urls import reverse\n'), ((4448, 4478), 'django.urls.reverse', 'reverse', (['"""events.create_event"""'], {}), "('events.create_event')\n", (4455, 4478), False, 'from django.urls import reverse\n'), ((5136, 5184), 'django.urls.reverse', 'reverse', (['"""events.del"""'], {'kwargs': "{'slug': 'delete'}"}), "('events.del', kwargs={'slug': 'delete'})\n", (5143, 5184), False, 'from django.urls import reverse\n'), ((6000, 6048), 'django.urls.reverse', 'reverse', (['"""events.del"""'], {'kwargs': "{'slug': 'delete'}"}), "('events.del', kwargs={'slug': 'delete'})\n", (6007, 6048), False, 'from django.urls import reverse\n'), ((6534, 6576), 'django.urls.reverse', 'reverse', (['"""event"""'], {'kwargs': "{'slug': 'event'}"}), "('event', kwargs={'slug': 'event'})\n", (6541, 6576), False, 'from django.urls import reverse\n'), ((6648, 6669), 'django.urls.reverse', 'reverse', (['"""event_feed"""'], {}), "('event_feed')\n", (6655, 6669), False, 'from django.urls import reverse\n'), ((6744, 6772), 'django.urls.reverse', 'reverse', (['"""latest_event_feed"""'], {}), "('latest_event_feed')\n", (6751, 6772), False, 'from django.urls import reverse\n'), ((6835, 6883), 'django.urls.reverse', 'reverse', (['"""events.join"""'], {'kwargs': "{'slug': 'event'}"}), "('events.join', kwargs={'slug': 'event'})\n", (6842, 6883), False, 'from django.urls import reverse\n'), ((6961, 7012), 'django.urls.reverse', 'reverse', (['"""events.rm_join"""'], {'kwargs': "{'slug': 'event'}"}), "('events.rm_join', kwargs={'slug': 'event'})\n", (6968, 7012), False, 'from django.urls import reverse\n'), ((7150, 7202), 'django.urls.reverse', 'reverse', (['"""events.settings"""'], {'kwargs': "{'slug': 'event'}"}), "('events.settings', kwargs={'slug': 'event'})\n", (7157, 7202), False, 'from django.urls import reverse\n'), ((7326, 7351), 'django.urls.reverse', 'reverse', (['"""events.invites"""'], {}), "('events.invites')\n", (7333, 7351), False, 'from django.urls import reverse\n'), ((7433, 7504), 'django.urls.reverse', 'reverse', (['"""events.invite"""'], {'kwargs': "{'slug': 'userslug', 'event': 'event'}"}), "('events.invite', kwargs={'slug': 'userslug', 'event': 'event'})\n", (7440, 7504), False, 'from django.urls import reverse\n'), ((7640, 7689), 'django.urls.reverse', 'reverse', (['"""events.event"""'], {'kwargs': "{'slug': 'event'}"}), "('events.event', kwargs={'slug': 'event'})\n", (7647, 7689), False, 'from django.urls import reverse\n'), ((8250, 8273), 'django.urls.reverse', 'reverse', (['"""events.tasks"""'], {}), "('events.tasks')\n", (8257, 8273), False, 'from django.urls import reverse\n'), ((9276, 9342), 'django.urls.reverse', 'reverse', (['"""events.confirm_invite"""'], {'kwargs': "{'slug': self.event.slug}"}), "('events.confirm_invite', kwargs={'slug': self.event.slug})\n", (9283, 9342), False, 'from django.urls import reverse\n'), ((9732, 9799), 'django.urls.reverse', 'reverse', (['"""invites.decline_invite"""'], {'kwargs': "{'slug': self.event.slug}"}), "('invites.decline_invite', kwargs={'slug': self.event.slug})\n", (9739, 9799), False, 'from django.urls import reverse\n'), ((9942, 10006), 'django.urls.reverse', 'reverse', (['"""events.add_teammate"""'], {'kwargs': "{'slug': self.event.slug}"}), "('events.add_teammate', kwargs={'slug': self.event.slug})\n", (9949, 10006), False, 'from django.urls import reverse\n'), ((10111, 10175), 'django.urls.reverse', 'reverse', (['"""events.add_teammate"""'], {'kwargs': "{'slug': self.event.slug}"}), "('events.add_teammate', kwargs={'slug': self.event.slug})\n", (10118, 10175), False, 'from django.urls import reverse\n'), ((10808, 10872), 'django.urls.reverse', 'reverse', (['"""events.add_teammate"""'], {'kwargs': "{'slug': self.event.slug}"}), "('events.add_teammate', kwargs={'slug': self.event.slug})\n", (10815, 10872), False, 'from django.urls import reverse\n'), ((11016, 11080), 'django.urls.reverse', 'reverse', (['"""events.add_teammate"""'], {'kwargs': "{'slug': self.event.slug}"}), "('events.add_teammate', kwargs={'slug': self.event.slug})\n", (11023, 11080), False, 'from django.urls import reverse\n'), ((11448, 11533), 'django.urls.reverse', 'reverse', (['"""events.event_team_add"""'], {'kwargs': "{'slug': self.event.slug, 'user': user2}"}), "('events.event_team_add', kwargs={'slug': self.event.slug, 'user':\n user2})\n", (11455, 11533), False, 'from django.urls import reverse\n'), ((12055, 12145), 'django.urls.reverse', 'reverse', (['"""events.comment.del"""'], {'kwargs': "{'slug': self.event.slug, 'comment': comment.pk}"}), "('events.comment.del', kwargs={'slug': self.event.slug, 'comment':\n comment.pk})\n", (12062, 12145), False, 'from django.urls import reverse\n'), ((12500, 12591), 'django.urls.reverse', 'reverse', (['"""events.comment.edit"""'], {'kwargs': "{'slug': self.event.slug, 'comment': comment.pk}"}), "('events.comment.edit', kwargs={'slug': self.event.slug, 'comment':\n comment.pk})\n", (12507, 12591), False, 'from django.urls import reverse\n'), ((12837, 12894), 'django.urls.reverse', 'reverse', (['"""events.board"""'], {'kwargs': "{'slug': self.event.slug}"}), "('events.board', kwargs={'slug': self.event.slug})\n", (12844, 12894), False, 'from django.urls import reverse\n'), ((13221, 13248), 'django.urls.reverse', 'reverse', (['"""events.my_events"""'], {}), "('events.my_events')\n", (13228, 13248), False, 'from django.urls import reverse\n'), ((13640, 13671), 'django.urls.reverse', 'reverse', (['"""events.events_I_host"""'], {}), "('events.events_I_host')\n", (13647, 13671), False, 'from django.urls import reverse\n'), ((14028, 14063), 'django.urls.reverse', 'reverse', (['"""events.show_random_event"""'], {}), "('events.show_random_event')\n", (14035, 14063), False, 'from django.urls import reverse\n'), ((14185, 14257), 'django.urls.reverse', 'reverse', (['"""events.search_json"""'], {'kwargs': "{'category_id': 1, 'slug': 'test'}"}), "('events.search_json', kwargs={'category_id': 1, 'slug': 'test'})\n", (14192, 14257), False, 'from django.urls import reverse\n')] |
# -*- coding: utf-8 -*-
"""
@inproceedings{DBLP:conf/cvpr/SunLCS19,
author = {<NAME> and
<NAME> and
Tat{-}<NAME> and
<NAME>},
title = {Meta-Transfer Learning for Few-Shot Learning},
booktitle = {{IEEE} Conference on Computer Vision and Pattern Recognition, {CVPR}
2019, Long Beach, CA, USA, June 16-20, 2019},
pages = {403--412},
year = {2019},
url = {http://openaccess.thecvf.com/content_CVPR_2019/html/Sun_Meta-Transfer_Learning_for_Few
-Shot_Learning_CVPR_2019_paper.html},
doi = {10.1109/CVPR.2019.00049}
}
https://arxiv.org/abs/1812.02391
Adapted from https://github.com/yaoyao-liu/meta-transfer-learning.
"""
import torch
from torch import digamma, nn
import torch.nn.functional as F
import copy
from core.utils import accuracy
from .meta_model import MetaModel
from ..backbone.utils import convert_mtl_module
class MTLBaseLearner(nn.Module):
"""The class for inner loop."""
def __init__(self, ways, z_dim):
super().__init__()
self.ways = ways
self.z_dim = z_dim
self.vars = nn.ParameterList()
self.fc1_w = nn.Parameter(torch.ones([self.ways, self.z_dim]))
torch.nn.init.kaiming_normal_(self.fc1_w)
self.vars.append(self.fc1_w)
self.fc1_b = nn.Parameter(torch.zeros(self.ways))
self.vars.append(self.fc1_b)
def forward(self, input_x, the_vars=None):
if the_vars is None:
the_vars = self.vars
fc1_w = the_vars[0]
fc1_b = the_vars[1]
net = F.linear(input_x, fc1_w, fc1_b)
return net
def parameters(self):
return self.vars
class MTL(MetaModel):
def __init__(self, feat_dim, num_classes, inner_param, use_MTL, **kwargs):
super(MTL, self).__init__(**kwargs)
self.feat_dim = feat_dim
self.num_classes = num_classes
self.base_learner = MTLBaseLearner(self.way_num, z_dim=self.feat_dim).to(self.device)
self.inner_param = inner_param
self.loss_func = nn.CrossEntropyLoss()
convert_mtl_module(self, use_MTL)
def set_forward(self, batch):
"""
meta-validation
"""
image, global_target = batch
image = image.to(self.device)
global_target = global_target.to(self.device)
feat = self.emb_func(image)
support_feat, query_feat, support_target, query_target = self.split_by_episode(feat, mode=4)
classifier, base_learner_weight = self.set_forward_adaptation(support_feat, support_target)
output = classifier(query_feat, base_learner_weight)
acc = accuracy(output, query_target)
return output, acc
def set_forward_loss(self, batch):
"""
meta-train
"""
image, global_target = batch
image = image.to(self.device)
global_target = global_target.to(self.device)
feat = self.emb_func(image)
support_feat, query_feat, support_target, query_target = self.split_by_episode(feat, mode=4)
classifier, base_learner_weight = self.set_forward_adaptation(support_feat, support_target)
output = classifier(query_feat, base_learner_weight)
loss = self.loss_func(output, query_target)
acc = accuracy(output, query_target)
return output, acc, loss
def set_forward_adaptation(self, support_feat, support_target):
classifier = self.base_learner
logit = self.base_learner(support_feat)
loss = self.loss_func(logit, support_target)
grad = torch.autograd.grad(loss, self.base_learner.parameters())
fast_parameters = list(
map(
lambda p: p[1] - 0.01 * p[0],
zip(grad, self.base_learner.parameters()),
)
)
for _ in range(1, self.inner_param["iter"]):
logit = self.base_learner(support_feat, fast_parameters)
loss = F.cross_entropy(logit, support_target)
grad = torch.autograd.grad(loss, fast_parameters)
fast_parameters = list(map(lambda p: p[1] - 0.01 * p[0], zip(grad, fast_parameters)))
return classifier, fast_parameters
| [
"torch.nn.functional.linear",
"core.utils.accuracy",
"torch.nn.CrossEntropyLoss",
"torch.nn.init.kaiming_normal_",
"torch.autograd.grad",
"torch.nn.functional.cross_entropy",
"torch.nn.ParameterList",
"torch.zeros",
"torch.ones"
] | [((1130, 1148), 'torch.nn.ParameterList', 'nn.ParameterList', ([], {}), '()\n', (1146, 1148), False, 'from torch import digamma, nn\n'), ((1228, 1269), 'torch.nn.init.kaiming_normal_', 'torch.nn.init.kaiming_normal_', (['self.fc1_w'], {}), '(self.fc1_w)\n', (1257, 1269), False, 'import torch\n'), ((1582, 1613), 'torch.nn.functional.linear', 'F.linear', (['input_x', 'fc1_w', 'fc1_b'], {}), '(input_x, fc1_w, fc1_b)\n', (1590, 1613), True, 'import torch.nn.functional as F\n'), ((2064, 2085), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (2083, 2085), False, 'from torch import digamma, nn\n'), ((2658, 2688), 'core.utils.accuracy', 'accuracy', (['output', 'query_target'], {}), '(output, query_target)\n', (2666, 2688), False, 'from core.utils import accuracy\n'), ((3297, 3327), 'core.utils.accuracy', 'accuracy', (['output', 'query_target'], {}), '(output, query_target)\n', (3305, 3327), False, 'from core.utils import accuracy\n'), ((1183, 1218), 'torch.ones', 'torch.ones', (['[self.ways, self.z_dim]'], {}), '([self.ways, self.z_dim])\n', (1193, 1218), False, 'import torch\n'), ((1341, 1363), 'torch.zeros', 'torch.zeros', (['self.ways'], {}), '(self.ways)\n', (1352, 1363), False, 'import torch\n'), ((3964, 4002), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['logit', 'support_target'], {}), '(logit, support_target)\n', (3979, 4002), True, 'import torch.nn.functional as F\n'), ((4022, 4064), 'torch.autograd.grad', 'torch.autograd.grad', (['loss', 'fast_parameters'], {}), '(loss, fast_parameters)\n', (4041, 4064), False, 'import torch\n')] |
# coding=utf-8
from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut
from OTLMOW.OTLModel.Classes.DwarseMarkeringToegang import DwarseMarkeringToegang
from OTLMOW.OTLModel.Datatypes.KlDwarseMarkeringVerschuindCode import KlDwarseMarkeringVerschuindCode
from OTLMOW.OTLModel.Datatypes.KlDwarseMarkeringVerschuindSoort import KlDwarseMarkeringVerschuindSoort
from OTLMOW.OTLModel.Datatypes.KwantWrdInDecimaleGraden import KwantWrdInDecimaleGraden
from OTLMOW.OTLModel.Datatypes.KwantWrdInVierkanteMeter import KwantWrdInVierkanteMeter
# Generated with OTLClassCreator. To modify: extend, do not edit
class DwarseMarkeringVerschuind(DwarseMarkeringToegang):
"""Een schuine markering dwars op de weg aangebracht om het verkeer te waarschuwen, informeren of regelen."""
typeURI = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind'
"""De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI."""
def __init__(self):
super().__init__()
self._basisoppervlakte = OTLAttribuut(field=KwantWrdInVierkanteMeter,
naam='basisoppervlakte',
label='oppervlakte',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.basisoppervlakte',
definition='De basisoppervlakte van de dwarse markering in vierkante meter.',
owner=self)
self._code = OTLAttribuut(field=KlDwarseMarkeringVerschuindCode,
naam='code',
label='code',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.code',
definition='De (COPRO/BENOR) code van dwarse markering.',
owner=self)
self._hoek = OTLAttribuut(field=KwantWrdInDecimaleGraden,
naam='hoek',
label='hoek',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.hoek',
definition='De hoek van de verschuinde dwarsmarkering in decimale graden.',
owner=self)
self._oppervlakte = OTLAttribuut(field=KwantWrdInVierkanteMeter,
naam='oppervlakte',
label='oppervlakte',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.oppervlakte',
definition='De oppervlakte van een dwarsmarkering na verschuining.',
owner=self)
self._soortOmschrijving = OTLAttribuut(field=KlDwarseMarkeringVerschuindSoort,
naam='soortOmschrijving',
label='soort omschrijving',
objectUri='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.soortOmschrijving',
definition='De soort en tevens de omschrijving van dwarse markering.',
owner=self)
@property
def basisoppervlakte(self):
"""De basisoppervlakte van de dwarse markering in vierkante meter."""
return self._basisoppervlakte.get_waarde()
@basisoppervlakte.setter
def basisoppervlakte(self, value):
self._basisoppervlakte.set_waarde(value, owner=self)
@property
def code(self):
"""De (COPRO/BENOR) code van dwarse markering."""
return self._code.get_waarde()
@code.setter
def code(self, value):
self._code.set_waarde(value, owner=self)
@property
def hoek(self):
"""De hoek van de verschuinde dwarsmarkering in decimale graden."""
return self._hoek.get_waarde()
@hoek.setter
def hoek(self, value):
self._hoek.set_waarde(value, owner=self)
@property
def oppervlakte(self):
"""De oppervlakte van een dwarsmarkering na verschuining."""
return self._oppervlakte.get_waarde()
@oppervlakte.setter
def oppervlakte(self, value):
self._oppervlakte.set_waarde(value, owner=self)
@property
def soortOmschrijving(self):
"""De soort en tevens de omschrijving van dwarse markering."""
return self._soortOmschrijving.get_waarde()
@soortOmschrijving.setter
def soortOmschrijving(self, value):
self._soortOmschrijving.set_waarde(value, owner=self)
| [
"OTLMOW.OTLModel.BaseClasses.OTLAttribuut.OTLAttribuut"
] | [((1049, 1364), 'OTLMOW.OTLModel.BaseClasses.OTLAttribuut.OTLAttribuut', 'OTLAttribuut', ([], {'field': 'KwantWrdInVierkanteMeter', 'naam': '"""basisoppervlakte"""', 'label': '"""oppervlakte"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.basisoppervlakte"""', 'definition': '"""De basisoppervlakte van de dwarse markering in vierkante meter."""', 'owner': 'self'}), "(field=KwantWrdInVierkanteMeter, naam='basisoppervlakte', label\n ='oppervlakte', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.basisoppervlakte'\n , definition=\n 'De basisoppervlakte van de dwarse markering in vierkante meter.',\n owner=self)\n", (1061, 1364), False, 'from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut\n'), ((1593, 1856), 'OTLMOW.OTLModel.BaseClasses.OTLAttribuut.OTLAttribuut', 'OTLAttribuut', ([], {'field': 'KlDwarseMarkeringVerschuindCode', 'naam': '"""code"""', 'label': '"""code"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.code"""', 'definition': '"""De (COPRO/BENOR) code van dwarse markering."""', 'owner': 'self'}), "(field=KlDwarseMarkeringVerschuindCode, naam='code', label=\n 'code', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.code'\n , definition='De (COPRO/BENOR) code van dwarse markering.', owner=self)\n", (1605, 1856), False, 'from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut\n'), ((2034, 2316), 'OTLMOW.OTLModel.BaseClasses.OTLAttribuut.OTLAttribuut', 'OTLAttribuut', ([], {'field': 'KwantWrdInDecimaleGraden', 'naam': '"""hoek"""', 'label': '"""hoek"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.hoek"""', 'definition': '"""De hoek van de verschuinde dwarsmarkering in decimale graden."""', 'owner': 'self'}), "(field=KwantWrdInDecimaleGraden, naam='hoek', label='hoek',\n objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.hoek'\n , definition=\n 'De hoek van de verschuinde dwarsmarkering in decimale graden.', owner=self\n )\n", (2046, 2316), False, 'from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut\n'), ((2492, 2783), 'OTLMOW.OTLModel.BaseClasses.OTLAttribuut.OTLAttribuut', 'OTLAttribuut', ([], {'field': 'KwantWrdInVierkanteMeter', 'naam': '"""oppervlakte"""', 'label': '"""oppervlakte"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.oppervlakte"""', 'definition': '"""De oppervlakte van een dwarsmarkering na verschuining."""', 'owner': 'self'}), "(field=KwantWrdInVierkanteMeter, naam='oppervlakte', label=\n 'oppervlakte', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.oppervlakte'\n , definition='De oppervlakte van een dwarsmarkering na verschuining.',\n owner=self)\n", (2504, 2783), False, 'from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut\n'), ((3005, 3325), 'OTLMOW.OTLModel.BaseClasses.OTLAttribuut.OTLAttribuut', 'OTLAttribuut', ([], {'field': 'KlDwarseMarkeringVerschuindSoort', 'naam': '"""soortOmschrijving"""', 'label': '"""soort omschrijving"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.soortOmschrijving"""', 'definition': '"""De soort en tevens de omschrijving van dwarse markering."""', 'owner': 'self'}), "(field=KlDwarseMarkeringVerschuindSoort, naam=\n 'soortOmschrijving', label='soort omschrijving', objectUri=\n 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#DwarseMarkeringVerschuind.soortOmschrijving'\n , definition='De soort en tevens de omschrijving van dwarse markering.',\n owner=self)\n", (3017, 3325), False, 'from OTLMOW.OTLModel.BaseClasses.OTLAttribuut import OTLAttribuut\n')] |
#!/usr/bin/env python3
"""
Copyright 2018 Couchbase, Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This short script uses curl requests to search the last 100 builds of
a jenkins job to find recurring errors, written in Python3.
It results in printing a list of links to builds that match the search
As the requests package is not included within kv, you will need to either
download this package yourself or reference the one included inside
couchbase-cli.
This is currently limited to searching for log patterns contained within
one line of the logs, as the search checks line-by-line.
Usage: python jenkins_console_log_search.py -j <job-name> -s <RegEx Search term>
"""
import argparse
import re
import requests
import sys
import time
class ASCIIFormat:
BOLD = '\033[1m'
END = '\033[0m'
# Search for searchParameter in logText, handling either a string or a RegEx inside
# searchPattern depending on whether the regex flag is True, and assuming that logText
# is line separated by \n's
def search(logText, searchPattern, isRegex):
output = []
if isRegex:
# Check regex against whole text
for find in re.finditer(pattern, logText):
group_list = []
if find.groups():
group_list.extend(find.groups())
else:
group_list.append(find.group(0))
for term in group_list:
output.append(term)
else: # Not a RegEx
lines = []
for line in logText.split('\n'):
result = line.find(searchPattern)
if result != -1:
# Wrap the search term in ASCII formatting to make it bold
lines.append(line.replace(searchPattern, ASCIIFormat.BOLD
+ searchPattern + ASCIIFormat.END))
output.extend(lines)
return output
# --- Start Main Script ---
# Create argparser so the user can specify which job to search
argParser = argparse.ArgumentParser()
argParser.add_argument('--job', '-j', type=str,
help='The cv job to query. '
"Common jobs are: 'kv_engine-ASan-UBSan-master', "
"'kv_engine-clang_analyzer-master', "
"'kv_engine-linux-master', "
"'kv_engine-threadsanitizer-master', "
"'kv_engine-windows-master', "
"'kv_engine-clang_format', "
"'kv-engine-cv-perf'", required=True)
argParser.add_argument('--search', '-s', type=str, required=True,
help='The string to search the logs for in a RegEx format')
argParser.add_argument('--build-no', '-b', type=int,
help='The build number of cv job to check backwards from. '
'0 (default) fetches latest build number', default=0)
argParser.add_argument('--no-of-builds', '-n', type=int,
help='The number of builds to check back', default=100)
argParser.add_argument('--format', '-f', default="plain", type=str,
help="Select the format to print results. "
"Available formats are: "
"plain (default), log-line, jira")
argParser.add_argument('--url-prefix', '-u', type=str, default='cv',
help='Determine the endpoint of logs to check, '
'http://<url-prefix>.jenkins.couchbase.com')
args = argParser.parse_args()
job = 'job/' + args.job + '/'
serverURL = 'http://' + str(args.url_prefix) + '.jenkins.couchbase.com/'
# Control the eventual output format of the findings
availableFormats = ["plain", "log-line", "jira"]
outputFormat = args.format.lower()
assert outputFormat in availableFormats, "%r format is not supported" % outputFormat
consoleText = '/consoleText/'
resultURLs = {}
failedBuildNums = []
if args.build_no == 0:
# Need to fetch the latest build number
r = requests.get(serverURL + job + 'lastBuild/api/json')
j = r.json()
args.build_no = j['number']
# Determine whether the inputted search parameter is a regex
isRegex = True
try:
pattern = re.compile(args.search)
searchingFor = 'RegEx "' + args.search + '"'
except re.error:
isRegex = False
pattern = args.search
searchingFor = '"' + args.search + '"'
print("Searching for", searchingFor, "in console logs of job:",
args.job, "between build", args.build_no - (args.no_of_builds - 1),
"and", args.build_no, file=sys.stderr)
# Trigger timing check start
start_time = time.time()
for i in range(0, args.no_of_builds):
print('\r >>> Current progress: {} '.format(str(i)), end='',
flush=True, file=sys.stderr)
# Get the console log text from the jenkins job
r = requests.get(serverURL + job + str(args.build_no-i) + consoleText)
if r.status_code != 200:
failedBuildNums.append(args.build_no-i)
# Perform Search
output = []
output.extend(search(r.text, pattern, isRegex))
if output:
resultURLs[serverURL + job + str(args.build_no-i) + '/console/'] = output
# Finish timing
print('\r Completed search in', (time.time() - start_time), 's', file=sys.stderr)
if failedBuildNums:
print("Failed log request on build(s) no:", failedBuildNums, file=sys.stderr)
# Ensure above prints actually print before results (and not mangled inside results)
sys.stderr.flush()
# Result output
if not resultURLs:
# Empty results, did not find any matches
print("No matches found")
elif outputFormat == 'jira':
# Print in a JIRA format
print("{panel:title=Search for", searchingFor,
"in console logs of job", args.job, "between build no",
args.build_no - (args.no_of_builds - 1), "and", args.build_no, '}')
for url in resultURLs:
print('[', url, ']', sep="")
print('{noformat}')
for line in resultURLs[url]:
print(line.replace(ASCIIFormat.BOLD, '').replace(ASCIIFormat.END, ''))
print('{noformat}')
print("{panel}")
elif outputFormat == "log-line":
# Print findings with log line attached
for url in resultURLs:
print(url, ':')
for line in resultURLs[url]:
print('\t', line)
else: # outputFormat == "plain"
# Print findings normally
for url in resultURLs:
print(url)
| [
"argparse.ArgumentParser",
"re.compile",
"sys.stderr.flush",
"requests.get",
"re.finditer",
"time.time"
] | [((2443, 2468), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2466, 2468), False, 'import argparse\n'), ((5095, 5106), 'time.time', 'time.time', ([], {}), '()\n', (5104, 5106), False, 'import time\n'), ((5926, 5944), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (5942, 5944), False, 'import sys\n'), ((4491, 4543), 'requests.get', 'requests.get', (["(serverURL + job + 'lastBuild/api/json')"], {}), "(serverURL + job + 'lastBuild/api/json')\n", (4503, 4543), False, 'import requests\n'), ((4689, 4712), 're.compile', 're.compile', (['args.search'], {}), '(args.search)\n', (4699, 4712), False, 'import re\n'), ((1625, 1654), 're.finditer', 're.finditer', (['pattern', 'logText'], {}), '(pattern, logText)\n', (1636, 1654), False, 'import re\n'), ((5689, 5700), 'time.time', 'time.time', ([], {}), '()\n', (5698, 5700), False, 'import time\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0003_auto_20150123_1148'),
]
operations = [
migrations.AlterField(
model_name='project',
name='everyone_contributes',
field=models.CharField(default=b'auth', max_length=20, choices=[(b'true', b'true'), (b'auth', b'auth'), (b'false', b'false')]),
preserve_default=True,
),
]
| [
"django.db.models.CharField"
] | [((368, 493), 'django.db.models.CharField', 'models.CharField', ([], {'default': "b'auth'", 'max_length': '(20)', 'choices': "[(b'true', b'true'), (b'auth', b'auth'), (b'false', b'false')]"}), "(default=b'auth', max_length=20, choices=[(b'true', b'true'\n ), (b'auth', b'auth'), (b'false', b'false')])\n", (384, 493), False, 'from django.db import models, migrations\n')] |
"""
nginterface.py: NovaGenesis Interface
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2016, CogRIoT Project"
__credits__ = "<NAME>"
__license__ = "MIT"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
import sys
import zmq
import threading
from bzrlib.plugins.launchpad.lp_api_lite import json
sys.path.append("../../")
from utils.logmsgs import logger
class NGInterface():
'''
Provide interface communication between CellController and NovaGenesis
to exchange configuration parameters with NovaGenesis
'''
def __init__(self, config, ng_local_address, ng_remote_address):
'''
Constructor
Receives pull and push zmq socket address
Also receives the instance of the class that access the oppened configuration file.
'''
#TODO: implement the threads (timers) to listen and to publish messages to/from novagenesis
#TODO: Parse and return configuration changes to CellController
#TODO: CellController must apply config changes
self.logger = logger.Logger()
self.logger.log('NovaGenesis interface - Starting')
self.config = config
contextPull = zmq.Context()
# recieve socket
self.consumer_receiver = contextPull.socket(zmq.PULL)
self.consumer_receiver.connect(ng_remote_address)
contextPush = zmq.Context()
# transmit socket
self.consumer_sender = contextPush.socket(zmq.PUSH)
self.consumer_sender.bind(ng_local_address)
# starting thread work thread
workThread = threading.Thread(name='NGZMQPullerWork', target=self.ZMQPuller)
workThread.daemon = True
workThread.start()
self.logger.log("NovaGenesis interface ZMQ puller thread started.")
self.logger.log('NovaGenesis interface - Started')
def setConfiguration(self, Configuration):
self.logger.log('[NG-CommandParser] You should implement this feature some time')
return 'ack'
def getInformation(self):
strout = {'capacities':
{
'sensing_freq_min': '100000000' ,
'sensing_freq_max': '1800000000',
'sensing_bw_min': '1024000',
'sensing_bw_max': '2048000',
'sensing_sectors': '1',
'sensing_direction': '0',
},
'cell_info':
{
'cellcontroller_id': self.config.cellcontroller_id ,
'cellcontroller_location': self.config.cellcontroller_location
},
'current_config':
{
'sensing_freq_start': [str(self.config.sensing_start_freq), "-1" ],
'sensing_freq_stop': [str(self.config.sensing_stop_freq), "-1"],
'sensing_bw': str(self.config.sensing_band_width),
}
}
return strout
def CommandParser(self, ReceivedCommand):
self.logger.log('[NG-CommandParser] Received command, will be analysed')
if 'set_config' in ReceivedCommand:
self.logger.log('[NG-CommandParser] Received command set_config')
return_message = self.setConfiguration(ReceivedCommand)
if return_message is 'ack':
answer = {'ans':'ack'}
else:
answer = {'ans':'nak'}
self.ZMQPusher(answer)
elif 'get_info' in ReceivedCommand:
self.logger.log('[NG-CommandParser] Received command get_info')
print(ReceivedCommand)
answer = self.getInformation()
self.ZMQPusher(answer)
else:
self.logger.log('[NG-CommandParser] Received unrecognized command')
answer = {'ans':'nak'}
self.ZMQPusher(answer)
def ZMQPuller(self):
while True:
JsonMessage = self.consumer_receiver.recv_json()
DictMessage = json.dumps(JsonMessage)
self.CommandParser(DictMessage)
def ZMQPusher(self, answer):
self.consumer_sender.send_json(answer)
pass
| [
"bzrlib.plugins.launchpad.lp_api_lite.json.dumps",
"sys.path.append",
"threading.Thread",
"utils.logmsgs.logger.Logger",
"zmq.Context"
] | [((306, 331), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (321, 331), False, 'import sys\n'), ((1050, 1065), 'utils.logmsgs.logger.Logger', 'logger.Logger', ([], {}), '()\n', (1063, 1065), False, 'from utils.logmsgs import logger\n'), ((1180, 1193), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (1191, 1193), False, 'import zmq\n'), ((1362, 1375), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (1373, 1375), False, 'import zmq\n'), ((1582, 1645), 'threading.Thread', 'threading.Thread', ([], {'name': '"""NGZMQPullerWork"""', 'target': 'self.ZMQPuller'}), "(name='NGZMQPullerWork', target=self.ZMQPuller)\n", (1598, 1645), False, 'import threading\n'), ((4125, 4148), 'bzrlib.plugins.launchpad.lp_api_lite.json.dumps', 'json.dumps', (['JsonMessage'], {}), '(JsonMessage)\n', (4135, 4148), False, 'from bzrlib.plugins.launchpad.lp_api_lite import json\n')] |
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import socket
import edgedb
from edgedb import _testbase as tb
class TestConnect(tb.AsyncQueryTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.port = cls._get_free_port()
@classmethod
def _get_free_port(cls):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(('127.0.0.1', 0))
return sock.getsockname()[1]
except Exception:
return None
finally:
sock.close()
async def test_connect_async_01(self):
orig_conn_args = self.get_connect_args()
conn_args = orig_conn_args.copy()
conn_args['port'] = self.port
conn_args['wait_until_available'] = 0
with self.assertRaisesRegex(
edgedb.ClientConnectionError,
f'(?s).*Is the server running.*port {self.port}.*'):
conn_args['host'] = '127.0.0.1'
await edgedb.async_connect(**conn_args)
with self.assertRaisesRegex(
edgedb.ClientConnectionError,
f'(?s).*Is the server running.*port {self.port}.*'):
conn_args['host'] = orig_conn_args['host']
await edgedb.async_connect(**conn_args)
def test_connect_sync_01(self):
orig_conn_args = self.get_connect_args()
conn_args = orig_conn_args.copy()
conn_args['port'] = self.port
conn_args['wait_until_available'] = 0
with self.assertRaisesRegex(
edgedb.ClientConnectionError,
f'(?s).*Is the server running.*port {self.port}.*'):
conn_args['host'] = '127.0.0.1'
edgedb.connect(**conn_args)
with self.assertRaisesRegex(
edgedb.ClientConnectionError,
f'(?s).*Is the server running.*port {self.port}.*'):
conn_args['host'] = orig_conn_args['host']
edgedb.connect(**conn_args)
| [
"edgedb.connect",
"edgedb.async_connect",
"socket.socket"
] | [((964, 1013), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (977, 1013), False, 'import socket\n'), ((2349, 2376), 'edgedb.connect', 'edgedb.connect', ([], {}), '(**conn_args)\n', (2363, 2376), False, 'import edgedb\n'), ((2597, 2624), 'edgedb.connect', 'edgedb.connect', ([], {}), '(**conn_args)\n', (2611, 2624), False, 'import edgedb\n'), ((1634, 1667), 'edgedb.async_connect', 'edgedb.async_connect', ([], {}), '(**conn_args)\n', (1654, 1667), False, 'import edgedb\n'), ((1894, 1927), 'edgedb.async_connect', 'edgedb.async_connect', ([], {}), '(**conn_args)\n', (1914, 1927), False, 'import edgedb\n')] |
#!/usr/bin/env python3
"""pdoc's CLI interface and helper functions."""
import argparse
import ast
import importlib
import inspect
import os
import os.path as path
import json
import re
import sys
import warnings
from contextlib import contextmanager
from functools import lru_cache
from http.server import BaseHTTPRequestHandler, HTTPServer
from typing import Dict, List, Sequence
from warnings import warn
import pdoc
parser = argparse.ArgumentParser(
description="Automatically generate API docs for Python modules.",
epilog="Further documentation is available at <https://pdoc3.github.io/pdoc/doc>.",
)
aa = parser.add_argument
mode_aa = parser.add_mutually_exclusive_group().add_argument
aa(
'--version', action='version', version='%(prog)s ' + pdoc.__version__)
aa(
"modules",
type=str,
metavar='MODULE',
nargs="+",
help="The Python module name. This may be an import path resolvable in "
"the current environment, or a file path to a Python module or "
"package.",
)
aa(
"-c", "--config",
type=str,
metavar='OPTION=VALUE',
action='append',
default=[],
help="Override template options. This is an alternative to using "
"a custom config.mako file in --template-dir. This option "
"can be specified multiple times.",
)
aa(
"--filter",
type=str,
metavar='STRING',
default=None,
help="Comma-separated list of filters. When specified, "
"only identifiers containing the specified string "
"will be shown in the output. Search is case sensitive. "
"Has no effect when --http is set.",
)
aa(
"-f", "--force",
action="store_true",
help="Overwrite any existing generated (--output-dir) files.",
)
mode_aa(
"--html",
action="store_true",
help="When set, the output will be HTML formatted.",
)
mode_aa(
"--pdf",
action="store_true",
help="When set, the specified modules will be printed to standard output, "
"formatted in Markdown-Extra, compatible with most "
"Markdown-(to-HTML-)to-PDF converters.",
)
aa(
"--html-dir",
type=str,
help=argparse.SUPPRESS,
)
aa(
"-o", "--output-dir",
type=str,
metavar='DIR',
help="The directory to output generated HTML/markdown files to "
"(default: ./html for --html).",
)
aa(
"--html-no-source",
action="store_true",
help=argparse.SUPPRESS,
)
aa(
"--overwrite",
action="store_true",
help=argparse.SUPPRESS,
)
aa(
"--external-links",
action="store_true",
help=argparse.SUPPRESS,
)
aa(
"--template-dir",
type=str,
metavar='DIR',
default=None,
help="Specify a directory containing Mako templates "
"(html.mako, text.mako, config.mako and/or any templates they include). "
"Alternatively, put your templates in $XDG_CONFIG_HOME/pdoc and "
"pdoc will automatically find them.",
)
aa(
"--link-prefix",
type=str,
help=argparse.SUPPRESS,
)
aa(
"--close-stdin",
action="store_true",
help="When set, stdin will be closed before importing, to account for "
"ill-behaved modules that block on stdin."
)
DEFAULT_HOST, DEFAULT_PORT = 'localhost', 8080
def _check_host_port(s):
if s and ':' not in s:
raise argparse.ArgumentTypeError(
"'{}' doesn't match '[HOST]:[PORT]'. "
"Specify `--http :` to use default hostname and port.".format(s))
return s
aa(
"--http",
default='',
type=_check_host_port,
metavar='HOST:PORT',
help="When set, pdoc will run as an HTTP server providing documentation "
"for specified modules. If you just want to use the default hostname "
"and port ({}:{}), set the parameter to :.".format(DEFAULT_HOST, DEFAULT_PORT),
)
aa(
"--skip-errors",
action="store_true",
help="Upon unimportable modules, warn instead of raising."
)
args = argparse.Namespace()
class _WebDoc(BaseHTTPRequestHandler):
args = None # Set before server instantiated
template_config = None
def do_HEAD(self):
status = 200
if self.path != "/":
status = self.check_modified()
self.send_response(status)
self.send_header("Content-type", "text/html; charset=utf-8")
self.end_headers()
def check_modified(self):
try:
module = pdoc.import_module(self.import_path_from_req_url)
new_etag = str(os.stat(module.__file__).st_mtime)
except ImportError:
return 404
old_etag = self.headers.get('If-None-Match', new_etag)
if old_etag == new_etag:
# Don't log repeating checks
self.log_request = lambda *args, **kwargs: None
return 304
return 205
def do_GET(self):
# Deny favicon shortcut early.
if self.path == "/favicon.ico":
return None
importlib.invalidate_caches()
code = 200
if self.path == "/":
modules = [pdoc.import_module(module, reload=True)
for module in self.args.modules]
modules = sorted((module.__name__, inspect.getdoc(module))
for module in modules)
out = pdoc._render_template('/html.mako',
modules=modules,
**self.template_config)
elif self.path.endswith(".ext"):
# External links are a bit weird. You should view them as a giant
# hack. Basically, the idea is to "guess" where something lives
# when documenting another module and hope that guess can actually
# track something down in a more global context.
#
# The idea here is to start specific by looking for HTML that
# exists that matches the full external path given. Then trim off
# one component at the end and try again.
#
# If no HTML is found, then we ask `pdoc` to do its thang on the
# parent module in the external path. If all goes well, that
# module will then be able to find the external identifier.
import_path = self.path[:-4].lstrip("/")
resolved = self.resolve_ext(import_path)
if resolved is None: # Try to generate the HTML...
print("Generating HTML for %s on the fly..." % import_path, file=sys.stderr)
try:
out = pdoc.html(import_path.split(".")[0], **self.template_config)
except Exception as e:
print('Error generating docs: {}'.format(e), file=sys.stderr)
# All hope is lost.
code = 404
out = "External identifier <code>%s</code> not found." % import_path
else:
return self.redirect(resolved)
# Redirect '/pdoc' to '/pdoc/' so that relative links work
# (results in '/pdoc/cli.html' instead of 'cli.html')
elif not self.path.endswith(('/', '.html')):
return self.redirect(self.path + '/')
# Redirect '/pdoc/index.html' to '/pdoc/' so it's more pretty
elif self.path.endswith(pdoc._URL_PACKAGE_SUFFIX):
return self.redirect(self.path[:-len(pdoc._URL_PACKAGE_SUFFIX)] + '/')
else:
try:
out = self.html()
except Exception:
import traceback
from html import escape
code = 404
out = "Error importing module <code>{}</code>:\n\n<pre>{}</pre>".format(
self.import_path_from_req_url, escape(traceback.format_exc()))
out = out.replace('\n', '<br>')
self.send_response(code)
self.send_header("Content-type", "text/html; charset=utf-8")
self.end_headers()
self.echo(out)
def redirect(self, location):
self.send_response(302)
self.send_header("Location", location)
self.end_headers()
def echo(self, s):
self.wfile.write(s.encode("utf-8"))
def html(self):
"""
Retrieves and sends the HTML belonging to the path given in
URL. This method is smart and will look for HTML files already
generated and account for whether they are stale compared to
the source code.
"""
return pdoc.html(self.import_path_from_req_url,
reload=True, http_server=True, external_links=True,
skip_errors=args.skip_errors,
**self.template_config)
def resolve_ext(self, import_path):
def exists(p):
p = path.join(args.output_dir, p)
pkg = path.join(p, pdoc._URL_PACKAGE_SUFFIX.lstrip('/'))
mod = p + pdoc._URL_MODULE_SUFFIX
if path.isfile(pkg):
return pkg[len(args.output_dir):]
elif path.isfile(mod):
return mod[len(args.output_dir):]
return None
parts = import_path.split(".")
for i in range(len(parts), 0, -1):
p = path.join(*parts[0:i])
realp = exists(p)
if realp is not None:
return "/%s#%s" % (realp.lstrip("/"), import_path)
return None
@property
def import_path_from_req_url(self):
pth = self.path.split('#')[0].lstrip('/')
for suffix in ('/',
pdoc._URL_PACKAGE_SUFFIX,
pdoc._URL_INDEX_MODULE_SUFFIX,
pdoc._URL_MODULE_SUFFIX):
if pth.endswith(suffix):
pth = pth[:-len(suffix)]
break
return pth.replace('/', '.')
def module_path(m: pdoc.Module, ext: str):
return path.join(args.output_dir, *re.sub(r'\.html$', ext, m.url()).split('/'))
def _quit_if_exists(m: pdoc.Module, ext: str):
if args.force:
return
paths = [module_path(m, ext)]
if m.is_package: # If package, make sure the dir doesn't exist either
paths.append(path.dirname(paths[0]))
for pth in paths:
if path.lexists(pth):
print("File '%s' already exists. Delete it, or run with --force" % pth,
file=sys.stderr)
sys.exit(1)
@contextmanager
def _open_write_file(filename):
try:
with open(filename, 'w', encoding='utf-8') as f:
yield f
print(filename) # print created file path to stdout
except Exception:
try:
os.unlink(filename)
except Exception:
pass
raise
def recursive_write_files(m: pdoc.Module, ext: str, **kwargs):
assert ext in ('.html', '.md')
filepath = module_path(m, ext=ext)
dirpath = path.dirname(filepath)
if not os.access(dirpath, os.R_OK):
os.makedirs(dirpath)
with _open_write_file(filepath) as f:
if ext == '.html':
f.write(m.html(**kwargs))
elif ext == '.md':
f.write(m.text(**kwargs))
for submodule in m.submodules():
recursive_write_files(submodule, ext=ext, **kwargs)
def _flatten_submodules(modules: Sequence[pdoc.Module]):
for module in modules:
yield module
for submodule in module.submodules():
yield from _flatten_submodules((submodule,))
def _print_pdf(modules, **kwargs):
modules = list(_flatten_submodules(modules))
print(pdoc._render_template('/pdf.mako', modules=modules, **kwargs))
def _warn_deprecated(option, alternative='', use_config_mako=False):
msg = 'Program option `{}` is deprecated.'.format(option)
if alternative:
msg += ' Use `' + alternative + '`'
if use_config_mako:
msg += ' or override config.mako template'
msg += '.'
warn(msg, DeprecationWarning, stacklevel=2)
def _generate_lunr_search(modules: List[pdoc.Module],
index_docstrings: bool,
template_config: dict):
"""Generate index.js for search"""
def trim_docstring(docstring):
return re.sub(r'''
\s+| # whitespace sequences
\s+[-=~]{3,}\s+| # title underlines
^[ \t]*[`~]{3,}\w*$| # code blocks
\s*[`#*]+\s*| # common markdown chars
\s*([^\w\d_>])\1\s*| # sequences of punct of the same kind
\s*</?\w*[^>]*>\s* # simple HTML tags
''', ' ', docstring, flags=re.VERBOSE | re.MULTILINE)
def recursive_add_to_index(dobj):
info = {
'ref': dobj.refname,
'url': to_url_id(dobj.module),
}
if index_docstrings:
info['doc'] = trim_docstring(dobj.docstring)
if isinstance(dobj, pdoc.Function):
info['func'] = 1
index.append(info)
for member_dobj in getattr(dobj, 'doc', {}).values():
recursive_add_to_index(member_dobj)
@lru_cache()
def to_url_id(module):
url = module.url()
if url not in url_cache:
url_cache[url] = len(url_cache)
return url_cache[url]
index = [] # type: List[Dict]
url_cache = {} # type: Dict[str, int]
for top_module in modules:
recursive_add_to_index(top_module)
urls = sorted(url_cache.keys(), key=url_cache.__getitem__)
main_path = args.output_dir
with _open_write_file(path.join(main_path, 'index.js')) as f:
f.write("URLS=")
json.dump(urls, f, indent=0, separators=(',', ':'))
f.write(";\nINDEX=")
json.dump(index, f, indent=0, separators=(',', ':'))
# Generate search.html
with _open_write_file(path.join(main_path, 'doc-search.html')) as f:
rendered_template = pdoc._render_template('/search.mako', **template_config)
f.write(rendered_template)
def main(_args=None):
""" Command-line entry point """
global args
args = _args or parser.parse_args()
warnings.simplefilter("once", DeprecationWarning)
if args.close_stdin:
sys.stdin.close()
if (args.html or args.http) and not args.output_dir:
args.output_dir = 'html'
if args.html_dir:
_warn_deprecated('--html-dir', '--output-dir')
args.output_dir = args.html_dir
if args.overwrite:
_warn_deprecated('--overwrite', '--force')
args.force = args.overwrite
template_config = {}
for config_str in args.config:
try:
key, value = config_str.split('=', 1)
value = ast.literal_eval(value)
template_config[key] = value
except Exception:
raise ValueError(
'Error evaluating --config statement "{}". '
'Make sure string values are quoted?'
.format(config_str)
)
if args.html_no_source:
_warn_deprecated('--html-no-source', '-c show_source_code=False', True)
template_config['show_source_code'] = False
if args.link_prefix:
_warn_deprecated('--link-prefix', '-c link_prefix="foo"', True)
template_config['link_prefix'] = args.link_prefix
if args.external_links:
_warn_deprecated('--external-links')
template_config['external_links'] = True
if args.template_dir is not None:
if not path.isdir(args.template_dir):
print('Error: Template dir {!r} is not a directory'.format(args.template_dir),
file=sys.stderr)
sys.exit(1)
pdoc.tpl_lookup.directories.insert(0, args.template_dir)
# Support loading modules specified as python paths relative to cwd
sys.path.append(os.getcwd())
# Virtual environment handling for pdoc script run from system site
try:
venv_dir = os.environ['VIRTUAL_ENV']
except KeyError:
pass # pdoc was not invoked while in a virtual environment
else:
from glob import glob
from distutils.sysconfig import get_python_lib
libdir = get_python_lib(prefix=venv_dir)
sys.path.append(libdir)
# Resolve egg-links from `setup.py develop` or `pip install -e`
# XXX: Welcome a more canonical approach
for pth in glob(path.join(libdir, '*.egg-link')):
try:
with open(pth) as f:
sys.path.append(path.join(libdir, f.readline().rstrip()))
except IOError:
warn('Invalid egg-link in venv: {!r}'.format(pth))
if args.http:
template_config['link_prefix'] = "/"
# Run the HTTP server.
_WebDoc.args = args # Pass params to HTTPServer xP
_WebDoc.template_config = template_config
host, _, port = args.http.partition(':')
host = host or DEFAULT_HOST
port = int(port or DEFAULT_PORT)
print('Starting pdoc server on {}:{}'.format(host, port), file=sys.stderr)
httpd = HTTPServer((host, port), _WebDoc)
print("pdoc server ready at http://%s:%d" % (host, port), file=sys.stderr)
# Allow tests to perform `pdoc.cli._httpd.shutdown()`
global _httpd
_httpd = httpd
try:
httpd.serve_forever()
finally:
httpd.server_close()
sys.exit(0)
docfilter = None
if args.filter and args.filter.strip():
def docfilter(obj, _filters=args.filter.strip().split(',')):
return any(f in obj.refname or
isinstance(obj, pdoc.Class) and f in obj.doc
for f in _filters)
modules = [pdoc.Module(module, docfilter=docfilter,
skip_errors=args.skip_errors)
for module in args.modules]
pdoc.link_inheritance()
if args.pdf:
_print_pdf(modules, **template_config)
import textwrap
print("""
PDF-ready markdown written to standard output.
^^^^^^^^^^^^^^^
Convert this file to PDF using e.g. Pandoc:
{PANDOC_CMD}
or using Python-Markdown and Chrome/Chromium/WkHtmlToPDF:
markdown_py --extension=meta \\
--extension=abbr \\
--extension=attr_list \\
--extension=def_list \\
--extension=fenced_code \\
--extension=footnotes \\
--extension=tables \\
--extension=admonition \\
--extension=smarty \\
--extension=toc \\
pdf.md > pdf.html
chromium --headless --disable-gpu --print-to-pdf=pdf.pdf pdf.html
wkhtmltopdf --encoding utf8 -s A4 --print-media-type pdf.html pdf.pdf
or similar, at your own discretion.""".format(PANDOC_CMD=textwrap.indent(_PANDOC_COMMAND, ' ')),
file=sys.stderr)
sys.exit(0)
for module in modules:
if args.html:
_quit_if_exists(module, ext='.html')
recursive_write_files(module, ext='.html', **template_config)
elif args.output_dir: # Generate text files
_quit_if_exists(module, ext='.md')
recursive_write_files(module, ext='.md', **template_config)
else:
sys.stdout.write(module.text(**template_config))
# Two blank lines between two modules' texts
sys.stdout.write(os.linesep * (1 + 2 * int(module != modules[-1])))
lunr_config = pdoc._get_config(**template_config).get('lunr_search')
if lunr_config is not None:
_generate_lunr_search(
modules, lunr_config.get("index_docstrings", True), template_config)
_PANDOC_COMMAND = '''\
pandoc --metadata=title:"MyProject Documentation" \\
--from=markdown+abbreviations+tex_math_single_backslash \\
--pdf-engine=xelatex --variable=mainfont:"DejaVu Sans" \\
--toc --toc-depth=4 --output=pdf.pdf pdf.md\
'''
if __name__ == "__main__":
main(parser.parse_args())
| [
"pdoc.tpl_lookup.directories.insert",
"http.server.HTTPServer",
"argparse.Namespace",
"sys.exit",
"sys.path.append",
"pdoc.html",
"pdoc._URL_PACKAGE_SUFFIX.lstrip",
"importlib.invalidate_caches",
"argparse.ArgumentParser",
"textwrap.indent",
"os.path.lexists",
"os.path.isdir",
"os.unlink",
... | [((432, 621), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Automatically generate API docs for Python modules."""', 'epilog': '"""Further documentation is available at <https://pdoc3.github.io/pdoc/doc>."""'}), "(description=\n 'Automatically generate API docs for Python modules.', epilog=\n 'Further documentation is available at <https://pdoc3.github.io/pdoc/doc>.'\n )\n", (455, 621), False, 'import argparse\n'), ((3911, 3931), 'argparse.Namespace', 'argparse.Namespace', ([], {}), '()\n', (3929, 3931), False, 'import argparse\n'), ((10798, 10820), 'os.path.dirname', 'path.dirname', (['filepath'], {}), '(filepath)\n', (10810, 10820), True, 'import os.path as path\n'), ((11833, 11876), 'warnings.warn', 'warn', (['msg', 'DeprecationWarning'], {'stacklevel': '(2)'}), '(msg, DeprecationWarning, stacklevel=2)\n', (11837, 11876), False, 'from warnings import warn\n'), ((12988, 12999), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (12997, 12999), False, 'from functools import lru_cache\n'), ((13994, 14043), 'warnings.simplefilter', 'warnings.simplefilter', (['"""once"""', 'DeprecationWarning'], {}), "('once', DeprecationWarning)\n", (14015, 14043), False, 'import warnings\n'), ((17716, 17739), 'pdoc.link_inheritance', 'pdoc.link_inheritance', ([], {}), '()\n', (17737, 17739), False, 'import pdoc\n'), ((4903, 4932), 'importlib.invalidate_caches', 'importlib.invalidate_caches', ([], {}), '()\n', (4930, 4932), False, 'import importlib\n'), ((8421, 8571), 'pdoc.html', 'pdoc.html', (['self.import_path_from_req_url'], {'reload': '(True)', 'http_server': '(True)', 'external_links': '(True)', 'skip_errors': 'args.skip_errors'}), '(self.import_path_from_req_url, reload=True, http_server=True,\n external_links=True, skip_errors=args.skip_errors, **self.template_config)\n', (8430, 8571), False, 'import pdoc\n'), ((10157, 10174), 'os.path.lexists', 'path.lexists', (['pth'], {}), '(pth)\n', (10169, 10174), True, 'import os.path as path\n'), ((10832, 10859), 'os.access', 'os.access', (['dirpath', 'os.R_OK'], {}), '(dirpath, os.R_OK)\n', (10841, 10859), False, 'import os\n'), ((10869, 10889), 'os.makedirs', 'os.makedirs', (['dirpath'], {}), '(dirpath)\n', (10880, 10889), False, 'import os\n'), ((11467, 11528), 'pdoc._render_template', 'pdoc._render_template', (['"""/pdf.mako"""'], {'modules': 'modules'}), "('/pdf.mako', modules=modules, **kwargs)\n", (11488, 11528), False, 'import pdoc\n'), ((12123, 12567), 're.sub', 're.sub', (['"""\n \\\\s+| # whitespace sequences\n \\\\s+[-=~]{3,}\\\\s+| # title underlines\n ^[ \\\\t]*[`~]{3,}\\\\w*$| # code blocks\n \\\\s*[`#*]+\\\\s*| # common markdown chars\n \\\\s*([^\\\\w\\\\d_>])\\\\1\\\\s*| # sequences of punct of the same kind\n \\\\s*</?\\\\w*[^>]*>\\\\s* # simple HTML tags\n """', '""" """', 'docstring'], {'flags': '(re.VERBOSE | re.MULTILINE)'}), '(\n """\n \\\\s+| # whitespace sequences\n \\\\s+[-=~]{3,}\\\\s+| # title underlines\n ^[ \\\\t]*[`~]{3,}\\\\w*$| # code blocks\n \\\\s*[`#*]+\\\\s*| # common markdown chars\n \\\\s*([^\\\\w\\\\d_>])\\\\1\\\\s*| # sequences of punct of the same kind\n \\\\s*</?\\\\w*[^>]*>\\\\s* # simple HTML tags\n """\n , \' \', docstring, flags=re.VERBOSE | re.MULTILINE)\n', (12129, 12567), False, 'import re\n'), ((13509, 13560), 'json.dump', 'json.dump', (['urls', 'f'], {'indent': '(0)', 'separators': "(',', ':')"}), "(urls, f, indent=0, separators=(',', ':'))\n", (13518, 13560), False, 'import json\n'), ((13598, 13650), 'json.dump', 'json.dump', (['index', 'f'], {'indent': '(0)', 'separators': "(',', ':')"}), "(index, f, indent=0, separators=(',', ':'))\n", (13607, 13650), False, 'import json\n'), ((13780, 13836), 'pdoc._render_template', 'pdoc._render_template', (['"""/search.mako"""'], {}), "('/search.mako', **template_config)\n", (13801, 13836), False, 'import pdoc\n'), ((14078, 14095), 'sys.stdin.close', 'sys.stdin.close', ([], {}), '()\n', (14093, 14095), False, 'import sys\n'), ((15526, 15582), 'pdoc.tpl_lookup.directories.insert', 'pdoc.tpl_lookup.directories.insert', (['(0)', 'args.template_dir'], {}), '(0, args.template_dir)\n', (15560, 15582), False, 'import pdoc\n'), ((15676, 15687), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (15685, 15687), False, 'import os\n'), ((16017, 16048), 'distutils.sysconfig.get_python_lib', 'get_python_lib', ([], {'prefix': 'venv_dir'}), '(prefix=venv_dir)\n', (16031, 16048), False, 'from distutils.sysconfig import get_python_lib\n'), ((16057, 16080), 'sys.path.append', 'sys.path.append', (['libdir'], {}), '(libdir)\n', (16072, 16080), False, 'import sys\n'), ((16920, 16953), 'http.server.HTTPServer', 'HTTPServer', (['(host, port)', '_WebDoc'], {}), '((host, port), _WebDoc)\n', (16930, 16953), False, 'from http.server import BaseHTTPRequestHandler, HTTPServer\n'), ((17571, 17641), 'pdoc.Module', 'pdoc.Module', (['module'], {'docfilter': 'docfilter', 'skip_errors': 'args.skip_errors'}), '(module, docfilter=docfilter, skip_errors=args.skip_errors)\n', (17582, 17641), False, 'import pdoc\n'), ((18818, 18829), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (18826, 18829), False, 'import sys\n'), ((4364, 4413), 'pdoc.import_module', 'pdoc.import_module', (['self.import_path_from_req_url'], {}), '(self.import_path_from_req_url)\n', (4382, 4413), False, 'import pdoc\n'), ((5241, 5317), 'pdoc._render_template', 'pdoc._render_template', (['"""/html.mako"""'], {'modules': 'modules'}), "('/html.mako', modules=modules, **self.template_config)\n", (5262, 5317), False, 'import pdoc\n'), ((8723, 8752), 'os.path.join', 'path.join', (['args.output_dir', 'p'], {}), '(args.output_dir, p)\n', (8732, 8752), True, 'import os.path as path\n'), ((8884, 8900), 'os.path.isfile', 'path.isfile', (['pkg'], {}), '(pkg)\n', (8895, 8900), True, 'import os.path as path\n'), ((9160, 9182), 'os.path.join', 'path.join', (['*parts[0:i]'], {}), '(*parts[0:i])\n', (9169, 9182), True, 'import os.path as path\n'), ((10099, 10121), 'os.path.dirname', 'path.dirname', (['paths[0]'], {}), '(paths[0])\n', (10111, 10121), True, 'import os.path as path\n'), ((10307, 10318), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (10315, 10318), False, 'import sys\n'), ((13436, 13468), 'os.path.join', 'path.join', (['main_path', '"""index.js"""'], {}), "(main_path, 'index.js')\n", (13445, 13468), True, 'import os.path as path\n'), ((13705, 13744), 'os.path.join', 'path.join', (['main_path', '"""doc-search.html"""'], {}), "(main_path, 'doc-search.html')\n", (13714, 13744), True, 'import os.path as path\n'), ((14559, 14582), 'ast.literal_eval', 'ast.literal_eval', (['value'], {}), '(value)\n', (14575, 14582), False, 'import ast\n'), ((15337, 15366), 'os.path.isdir', 'path.isdir', (['args.template_dir'], {}), '(args.template_dir)\n', (15347, 15366), True, 'import os.path as path\n'), ((15506, 15517), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15514, 15517), False, 'import sys\n'), ((16226, 16257), 'os.path.join', 'path.join', (['libdir', '"""*.egg-link"""'], {}), "(libdir, '*.egg-link')\n", (16235, 16257), True, 'import os.path as path\n'), ((17255, 17266), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (17263, 17266), False, 'import sys\n'), ((19406, 19441), 'pdoc._get_config', 'pdoc._get_config', ([], {}), '(**template_config)\n', (19422, 19441), False, 'import pdoc\n'), ((5004, 5043), 'pdoc.import_module', 'pdoc.import_module', (['module'], {'reload': '(True)'}), '(module, reload=True)\n', (5022, 5043), False, 'import pdoc\n'), ((8784, 8820), 'pdoc._URL_PACKAGE_SUFFIX.lstrip', 'pdoc._URL_PACKAGE_SUFFIX.lstrip', (['"""/"""'], {}), "('/')\n", (8815, 8820), False, 'import pdoc\n'), ((8969, 8985), 'os.path.isfile', 'path.isfile', (['mod'], {}), '(mod)\n', (8980, 8985), True, 'import os.path as path\n'), ((10567, 10586), 'os.unlink', 'os.unlink', (['filename'], {}), '(filename)\n', (10576, 10586), False, 'import os\n'), ((4441, 4465), 'os.stat', 'os.stat', (['module.__file__'], {}), '(module.__file__)\n', (4448, 4465), False, 'import os\n'), ((18736, 18776), 'textwrap.indent', 'textwrap.indent', (['_PANDOC_COMMAND', '""" """'], {}), "(_PANDOC_COMMAND, ' ')\n", (18751, 18776), False, 'import textwrap\n'), ((5147, 5169), 'inspect.getdoc', 'inspect.getdoc', (['module'], {}), '(module)\n', (5161, 5169), False, 'import inspect\n'), ((7693, 7715), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (7713, 7715), False, 'import traceback\n')] |
# =========================================================================== #
# DATA EXPLORER #
# =========================================================================== #
# =========================================================================== #
# Project: ML Studio #
# Version: 0.1.14 #
# File: \data_explorer.py #
# Python Version: 3.7.3 #
# --------------- #
# Author: <NAME> #
# Company: Decision Scients #
# Email: <EMAIL> #
# --------------- #
# Create Date: Friday December 6th 2019, 9:12:28 pm #
# Last Modified: Friday December 6th 2019, 9:12:35 pm #
# Modified By: <NAME> (<EMAIL>) #
# --------------- #
# License: Modified BSD #
# Copyright (c) 2019 Decision Scients #
# =========================================================================== #
"""Data Explorer - A dash powered web app for analyzing and preparing data.
This module provides a dashboard application that supports:
- Data Audit : Missing values and outliers
- Data Analysis : Exploration of data vis-a-vis statistical
assumptions of independence, linearity, normality,
and homoscedasticity
- Data Preparation : Missing values, and outliers
- Feature Selection : Identifying the features that most
influence the dependent variable
- Features Engineering : Feature transformation, Binning
One-Hot Encoding, Features Split and Scaling
- Dimensionality Reduction : PCA,
t-Distributed Stochastic Neighbor Embedding (t-SNE)
see https://www.analyticsvidhya.com/blog/2018/08/dimensionality-reduction-techniques-python/
Note: This module was highly inspired by the plotly dash-svm
at https://github.com/plotly/dash-svm.
"""
#%%
import os
import sys
sys.path.append('ml_studio')
sys.path.append('ml_studio/utils/visual')
import time
from textwrap import dedent
import warnings
import dash
import dash_core_components as dcc
import dash_daq as daq
import dash_html_components as html
from dash.dependencies import Input, Output, State
import numpy as np
import pandas as pd
from sklearn.datasets import fetch_california_housing, make_regression
from sklearn.datasets import make_classification
from sklearn.model_selection import GridSearchCV, RandomizedSearchCV
from sklearn.svm import SVC
from ml_studio.visualate.classification.figures import serve_prediction_plot, serve_roc_curve, \
serve_pie_confusion_matrix
import ml_studio
from ml_studio.utils.model import get_model_name
from ml_studio.utils.data_manager import sampler, data_split, StandardScaler
from ml_studio.utils.misc import proper
import ml_studio.utils.visual as drc
# --------------------------------------------------------------------------- #
external_scripts = [
# Normalize the CSS
"https://cdnjs.cloudflare.com/ajax/libs/normalize/8.0.1/normalize.min.css",
# Fonts
"https://fonts.googleapis.com/css?family=Open+Sans|Roboto",
"https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css"
]
app = dash.Dash(__name__,
external_scripts=external_scripts)
app.scripts.config.serve_locally = False
server = app.server
# --------------------------------------------------------------------------- #
# Generate Data #
# --------------------------------------------------------------------------- #
def generate_data(dataset, n_samples=None, n_features=None, noise=100,
seed=None):
if dataset == 'california':
return(fetch_california_housing(return_X_y=True))
elif dataset == 'msd':
data = pd.read_csv("ml_studio/data_gathering/msd/year_prediction.csv")
y = data[['label']]
X = data.drop(columns=['label'], inplace=False)
msd = (X, y)
return msd
elif dataset == 'online_news':
data = pd.read_csv("ml_studio/data_gathering/online_news_popularity/OnlineNewsPopularity.csv")
data.columns = data.columns.str.replace(r'\s+', '')
y = data[['shares']]
X = data.drop(columns=['shares'], inplace=False)
online_news = (X, y)
return online_news
elif dataset == 'speed_dating':
data = pd.read_csv("ml_studio/data_gathering/speed_dating/Speed Dating Data.csv",
encoding = 'unicode_escape')
y = data[['match']]
X = data.drop(columns=['match'], inplace=False)
speed_dating = (X, y)
return speed_dating
elif dataset == 'regression':
if n_samples is None:
warnings.warn("n_samples is None, defaulting to 10,000")
n_samples = 10000
if n_features is None:
warnings.warn("n_features is None, defaulting to 100")
n_features = 100
X, y = make_regression(n_samples, n_features,
n_informative=100,
bias=400,
effective_rank=50,
noise=100,
random_state=seed)
regression = (X, y)
return regression
elif dataset == 'binary':
X, y = make_classification(
n_samples=100,
n_features=2,
n_redundant=0,
n_informative=2,
random_state=2,
n_clusters_per_class=1
)
linearly_separable = (X, y)
return linearly_separable
else:
raise ValueError(
'Data type incorrectly specified. Please choose an existing '
'dataset.')
# --------------------------------------------------------------------------- #
# Define Tabs #
# --------------------------------------------------------------------------- #
tabs_styles = {
'height': '44px'
}
tab_style = {
'border': '1px solid #282b38',
'borderBottom': '1px solid #282b38',
'backgroundColor': '#282b38',
'padding': '6px',
'fontWeight': 'bold'
}
tab_selected_style = {
'border': '1px solid #282b38',
'borderBottom': '1px solid #31459E',
'backgroundColor': '#282b38',
'color': 'white',
'padding': '6px'
}
def build_tabs():
return html.Div(
id="tabs",
className="tabs",
children=[
dcc.Tabs(
id="app-tabs",
value="tab1",
className="custom-tabs",
children=[
dcc.Tab(
id="Analysis-tab",
label="Data Analysis",
value="tab3",
style=tab_style,
selected_style=tab_selected_style,
className="custom-tab",
selected_className="custom-tab--selected",
),
dcc.Tab(
id="Cleaning-tab",
label="Data Cleaning",
value="tab4",
style=tab_style,
selected_style=tab_selected_style,
className="custom-tab",
selected_className="custom-tab--selected",
),
dcc.Tab(
id="Feature-selection-tab",
label="Feature Selection",
value="tab5",
style=tab_style,
selected_style=tab_selected_style,
className="custom-tab",
selected_className="custom-tab--selected",
),
dcc.Tab(
id="Features-engineering-tab",
label="Feature Engineering",
value="tab6",
style=tab_style,
selected_style=tab_selected_style,
className="custom-tab",
selected_className="custom-tab--selected",
),
dcc.Tab(
id="Dimension-reduction-tab",
label="Dimension Reduction",
value="tab7",
style=tab_style,
selected_style=tab_selected_style,
className="custom-tab",
selected_className="custom-tab--selected",
),
],
)
],
)
def build_analysis_tab():
pass
app.layout = html.Div(children=[
# .container class is fixed, .container.scalable is scalable
html.Div(className="banner", children=[
# Change App Name here
html.Div(className='container scalable', children=[
# Change App Name here
html.H2(html.A(
'ML Studio Data Explorer',
href='https://github.com/decisionscients/ml-studio',
style={
'text-decoration': 'none',
'color': 'inherit'
}
)),
html.A(
# TODO: Create logo
html.Img(src="https://s3-us-west-1.amazonaws.com/plotly-tutorials/logo/new-branding/dash-logo-by-plotly-stripe-inverted.png"),
href='https://plot.ly/products/dash/'
)
]),
]),
html.Div(id='body', className='container scalable', children=[
html.Div(
id="app-container",
children=[
build_tabs()
],
),
html.Div(className='row', children=[
html.Div(
id='div-graphs',
children=dcc.Graph(
id='graph-sklearn-svm',
style={'display': 'none'}
)
),
html.Div(
className='three columns',
style={
'min-width': '24.5%',
'max-height': 'calc(100vh - 85px)',
'overflow-y': 'auto',
'overflow-x': 'hidden',
},
children=[
drc.Card([
drc.NamedDropdown(
name='Select Data Type',
id='dropdown-select-datatype',
options=[
{'label': 'Regression', 'value': 'regression'},
{'label': 'Binary Classification','value': 'binary'},
{'label': 'Multiclass Classification','value': 'multiclass'}
],
clearable=False,
searchable=False,
value='regression'
),
drc.NamedDropdown(
name='Select Dataset',
id='dropdown-select-dataset',
options=[
{'label': 'California Housing', 'value': 'california'},
{'label': 'Million Song Dataset','value': 'msd'},
{'label': 'Online News Popularity','value': 'online_news'},
{'label': 'Speed Dating', 'value': 'speed_dating'},
{'label': 'Regression', 'value': 'regression'},
{'label': 'Binary', 'value': 'binary'}
],
clearable=False,
searchable=False,
value='california'
),
]),
html.Div(
dcc.Markdown(dedent("""
[Click here](https://github.com/decisionscients/ml-studio) to visit the project repo, and learn about how to use the app.
""")),
style={'margin': '20px 0px', 'text-align': 'center'}
),
]
),
]),
])
])
# @app.callback(Output('div-graphs', 'children'),
# Input('dropdown-select-dataset', 'value'),
# Input('slider-threshold', 'value')
# def update_svm_graph(kernel,
# degree,
# C_coef,
# C_power,
# gamma_coef,
# gamma_power,
# dataset,
# noise,
# shrinking,
# threshold,
# sample_size):
# t_start = time.time()
# h = .3 # step size in the mesh
# # Data Pre-processing
# X, y = generate_data(dataset=dataset)
# StandardScaler().fit(X)
# X = StandardScaler().transform(X)
# X_train, X_test, y_train, y_test = \
# data_split(X, y, test_size=.4, seed=42)
# x_min = X[:, 0].min() - .5
# x_max = X[:, 0].max() + .5
# y_min = X[:, 1].min() - .5
# y_max = X[:, 1].max() + .5
# xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
# np.arange(y_min, y_max, h))
# C = C_coef * 10 ** C_power
# gamma = gamma_coef * 10 ** gamma_power
# # Train SVM
# clf = SVC(
# C=C,
# kernel=kernel,
# degree=degree,
# gamma=gamma,
# shrinking=shrinking
# )
# clf.fit(X_train, y_train)
# # Plot the decision boundary. For that, we will assign a color to each
# # point in the mesh [x_min, x_max]x[y_min, y_max].
# if hasattr(clf, "decision_function"):
# Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
# else:
# Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]
# prediction_figure = serve_prediction_plot(
# model=clf,
# X_train=X_train,
# X_test=X_test,
# y_train=y_train,
# y_test=y_test,
# Z=Z,
# xx=xx,
# yy=yy,
# mesh_step=h,
# threshold=threshold
# )
# roc_figure = serve_roc_curve(
# model=clf,
# X_test=X_test,
# y_test=y_test
# )
# confusion_figure = serve_pie_confusion_matrix(
# model=clf,
# X_test=X_test,
# y_test=y_test,
# Z=Z,
# threshold=threshold
# )
# print(
# f"Total Time Taken: {time.time() - t_start:.3f} sec")
# return [
# html.Div(
# className='three columns',
# style={
# 'min-width': '24.5%',
# 'height': 'calc(100vh - 90px)',
# 'margin-top': '5px',
# # Remove possibility to select the text for better UX
# 'user-select': 'none',
# '-moz-user-select': 'none',
# '-webkit-user-select': 'none',
# '-ms-user-select': 'none'
# },
# children=[
# dcc.Graph(
# id='graph-line-roc-curve',
# style={'height': '40%'},
# figure=roc_figure
# ),
# dcc.Graph(
# id='graph-pie-confusion-matrix',
# figure=confusion_figure,
# style={'height': '60%'}
# )
# ]),
# html.Div(
# className='six columns',
# style={'margin-top': '5px'},
# children=[
# dcc.Graph(
# id='graph-sklearn-svm',
# figure=prediction_figure,
# style={'height': 'calc(100vh - 90px)'}
# )
# ])
# ]
# Running the server
if __name__ == '__main__':
app.run_server(debug=True)
# %%
| [
"textwrap.dedent",
"dash_core_components.Graph",
"sklearn.datasets.make_regression",
"pandas.read_csv",
"dash_html_components.A",
"dash_core_components.Tab",
"ml_studio.utils.visual.NamedDropdown",
"sklearn.datasets.fetch_california_housing",
"dash_html_components.Img",
"warnings.warn",
"sys.pat... | [((2556, 2584), 'sys.path.append', 'sys.path.append', (['"""ml_studio"""'], {}), "('ml_studio')\n", (2571, 2584), False, 'import sys\n'), ((2585, 2626), 'sys.path.append', 'sys.path.append', (['"""ml_studio/utils/visual"""'], {}), "('ml_studio/utils/visual')\n", (2600, 2626), False, 'import sys\n'), ((3826, 3880), 'dash.Dash', 'dash.Dash', (['__name__'], {'external_scripts': 'external_scripts'}), '(__name__, external_scripts=external_scripts)\n', (3835, 3880), False, 'import dash\n'), ((4341, 4382), 'sklearn.datasets.fetch_california_housing', 'fetch_california_housing', ([], {'return_X_y': '(True)'}), '(return_X_y=True)\n', (4365, 4382), False, 'from sklearn.datasets import fetch_california_housing, make_regression\n'), ((4426, 4489), 'pandas.read_csv', 'pd.read_csv', (['"""ml_studio/data_gathering/msd/year_prediction.csv"""'], {}), "('ml_studio/data_gathering/msd/year_prediction.csv')\n", (4437, 4489), True, 'import pandas as pd\n'), ((4672, 4764), 'pandas.read_csv', 'pd.read_csv', (['"""ml_studio/data_gathering/online_news_popularity/OnlineNewsPopularity.csv"""'], {}), "(\n 'ml_studio/data_gathering/online_news_popularity/OnlineNewsPopularity.csv')\n", (4683, 4764), True, 'import pandas as pd\n'), ((5021, 5126), 'pandas.read_csv', 'pd.read_csv', (['"""ml_studio/data_gathering/speed_dating/Speed Dating Data.csv"""'], {'encoding': '"""unicode_escape"""'}), "('ml_studio/data_gathering/speed_dating/Speed Dating Data.csv',\n encoding='unicode_escape')\n", (5032, 5126), True, 'import pandas as pd\n'), ((5616, 5736), 'sklearn.datasets.make_regression', 'make_regression', (['n_samples', 'n_features'], {'n_informative': '(100)', 'bias': '(400)', 'effective_rank': '(50)', 'noise': '(100)', 'random_state': 'seed'}), '(n_samples, n_features, n_informative=100, bias=400,\n effective_rank=50, noise=100, random_state=seed)\n', (5631, 5736), False, 'from sklearn.datasets import fetch_california_housing, make_regression\n'), ((5386, 5442), 'warnings.warn', 'warnings.warn', (['"""n_samples is None, defaulting to 10,000"""'], {}), "('n_samples is None, defaulting to 10,000')\n", (5399, 5442), False, 'import warnings\n'), ((5516, 5570), 'warnings.warn', 'warnings.warn', (['"""n_features is None, defaulting to 100"""'], {}), "('n_features is None, defaulting to 100')\n", (5529, 5570), False, 'import warnings\n'), ((5989, 6113), 'sklearn.datasets.make_classification', 'make_classification', ([], {'n_samples': '(100)', 'n_features': '(2)', 'n_redundant': '(0)', 'n_informative': '(2)', 'random_state': '(2)', 'n_clusters_per_class': '(1)'}), '(n_samples=100, n_features=2, n_redundant=0,\n n_informative=2, random_state=2, n_clusters_per_class=1)\n', (6008, 6113), False, 'from sklearn.datasets import make_classification\n'), ((7332, 7523), 'dash_core_components.Tab', 'dcc.Tab', ([], {'id': '"""Analysis-tab"""', 'label': '"""Data Analysis"""', 'value': '"""tab3"""', 'style': 'tab_style', 'selected_style': 'tab_selected_style', 'className': '"""custom-tab"""', 'selected_className': '"""custom-tab--selected"""'}), "(id='Analysis-tab', label='Data Analysis', value='tab3', style=\n tab_style, selected_style=tab_selected_style, className='custom-tab',\n selected_className='custom-tab--selected')\n", (7339, 7523), True, 'import dash_core_components as dcc\n'), ((7751, 7942), 'dash_core_components.Tab', 'dcc.Tab', ([], {'id': '"""Cleaning-tab"""', 'label': '"""Data Cleaning"""', 'value': '"""tab4"""', 'style': 'tab_style', 'selected_style': 'tab_selected_style', 'className': '"""custom-tab"""', 'selected_className': '"""custom-tab--selected"""'}), "(id='Cleaning-tab', label='Data Cleaning', value='tab4', style=\n tab_style, selected_style=tab_selected_style, className='custom-tab',\n selected_className='custom-tab--selected')\n", (7758, 7942), True, 'import dash_core_components as dcc\n'), ((8170, 8374), 'dash_core_components.Tab', 'dcc.Tab', ([], {'id': '"""Feature-selection-tab"""', 'label': '"""Feature Selection"""', 'value': '"""tab5"""', 'style': 'tab_style', 'selected_style': 'tab_selected_style', 'className': '"""custom-tab"""', 'selected_className': '"""custom-tab--selected"""'}), "(id='Feature-selection-tab', label='Feature Selection', value='tab5',\n style=tab_style, selected_style=tab_selected_style, className=\n 'custom-tab', selected_className='custom-tab--selected')\n", (8177, 8374), True, 'import dash_core_components as dcc\n'), ((8602, 8812), 'dash_core_components.Tab', 'dcc.Tab', ([], {'id': '"""Features-engineering-tab"""', 'label': '"""Feature Engineering"""', 'value': '"""tab6"""', 'style': 'tab_style', 'selected_style': 'tab_selected_style', 'className': '"""custom-tab"""', 'selected_className': '"""custom-tab--selected"""'}), "(id='Features-engineering-tab', label='Feature Engineering', value=\n 'tab6', style=tab_style, selected_style=tab_selected_style, className=\n 'custom-tab', selected_className='custom-tab--selected')\n", (8609, 8812), True, 'import dash_core_components as dcc\n'), ((9039, 9248), 'dash_core_components.Tab', 'dcc.Tab', ([], {'id': '"""Dimension-reduction-tab"""', 'label': '"""Dimension Reduction"""', 'value': '"""tab7"""', 'style': 'tab_style', 'selected_style': 'tab_selected_style', 'className': '"""custom-tab"""', 'selected_className': '"""custom-tab--selected"""'}), "(id='Dimension-reduction-tab', label='Dimension Reduction', value=\n 'tab7', style=tab_style, selected_style=tab_selected_style, className=\n 'custom-tab', selected_className='custom-tab--selected')\n", (9046, 9248), True, 'import dash_core_components as dcc\n'), ((9910, 10061), 'dash_html_components.A', 'html.A', (['"""ML Studio Data Explorer"""'], {'href': '"""https://github.com/decisionscients/ml-studio"""', 'style': "{'text-decoration': 'none', 'color': 'inherit'}"}), "('ML Studio Data Explorer', href=\n 'https://github.com/decisionscients/ml-studio', style={\n 'text-decoration': 'none', 'color': 'inherit'})\n", (9916, 10061), True, 'import dash_html_components as html\n'), ((10247, 10382), 'dash_html_components.Img', 'html.Img', ([], {'src': '"""https://s3-us-west-1.amazonaws.com/plotly-tutorials/logo/new-branding/dash-logo-by-plotly-stripe-inverted.png"""'}), "(src=\n 'https://s3-us-west-1.amazonaws.com/plotly-tutorials/logo/new-branding/dash-logo-by-plotly-stripe-inverted.png'\n )\n", (10255, 10382), True, 'import dash_html_components as html\n'), ((10791, 10851), 'dash_core_components.Graph', 'dcc.Graph', ([], {'id': '"""graph-sklearn-svm"""', 'style': "{'display': 'none'}"}), "(id='graph-sklearn-svm', style={'display': 'none'})\n", (10800, 10851), True, 'import dash_core_components as dcc\n'), ((11299, 11619), 'ml_studio.utils.visual.NamedDropdown', 'drc.NamedDropdown', ([], {'name': '"""Select Data Type"""', 'id': '"""dropdown-select-datatype"""', 'options': "[{'label': 'Regression', 'value': 'regression'}, {'label':\n 'Binary Classification', 'value': 'binary'}, {'label':\n 'Multiclass Classification', 'value': 'multiclass'}]", 'clearable': '(False)', 'searchable': '(False)', 'value': '"""regression"""'}), "(name='Select Data Type', id='dropdown-select-datatype',\n options=[{'label': 'Regression', 'value': 'regression'}, {'label':\n 'Binary Classification', 'value': 'binary'}, {'label':\n 'Multiclass Classification', 'value': 'multiclass'}], clearable=False,\n searchable=False, value='regression')\n", (11316, 11619), True, 'import ml_studio.utils.visual as drc\n'), ((11971, 12439), 'ml_studio.utils.visual.NamedDropdown', 'drc.NamedDropdown', ([], {'name': '"""Select Dataset"""', 'id': '"""dropdown-select-dataset"""', 'options': "[{'label': 'California Housing', 'value': 'california'}, {'label':\n 'Million Song Dataset', 'value': 'msd'}, {'label':\n 'Online News Popularity', 'value': 'online_news'}, {'label':\n 'Speed Dating', 'value': 'speed_dating'}, {'label': 'Regression',\n 'value': 'regression'}, {'label': 'Binary', 'value': 'binary'}]", 'clearable': '(False)', 'searchable': '(False)', 'value': '"""california"""'}), "(name='Select Dataset', id='dropdown-select-dataset',\n options=[{'label': 'California Housing', 'value': 'california'}, {\n 'label': 'Million Song Dataset', 'value': 'msd'}, {'label':\n 'Online News Popularity', 'value': 'online_news'}, {'label':\n 'Speed Dating', 'value': 'speed_dating'}, {'label': 'Regression',\n 'value': 'regression'}, {'label': 'Binary', 'value': 'binary'}],\n clearable=False, searchable=False, value='california')\n", (11988, 12439), True, 'import ml_studio.utils.visual as drc\n'), ((12950, 13145), 'textwrap.dedent', 'dedent', (['"""\n [Click here](https://github.com/decisionscients/ml-studio) to visit the project repo, and learn about how to use the app.\n """'], {}), '(\n """\n [Click here](https://github.com/decisionscients/ml-studio) to visit the project repo, and learn about how to use the app.\n """\n )\n', (12956, 13145), False, 'from textwrap import dedent\n')] |
#
# Copyright (c) 2013 <NAME>, Inc. All rights reserved.
#
from setuptools import setup, find_packages
setup(
name='alarm_process_connectivity',
version='0.1dev',
packages=find_packages(),
entry_points = {
'contrail.analytics.alarms': [
'ObjectCollectorInfo = alarm_process_connectivity.main:ProcessConnectivity',
'ObjectVRouter = alarm_process_connectivity.main:ProcessConnectivity',
'ObjectConfigNode = alarm_process_connectivity.main:ProcessConnectivity',
'ObjectBgpRouter = alarm_process_connectivity.main:ProcessConnectivity',
'ObjectDatabaseInfo = alarm_process_connectivity.main:ProcessConnectivity',
],
},
zip_safe=False,
long_description="ProcessConnectivity alarm"
)
| [
"setuptools.find_packages"
] | [((186, 201), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (199, 201), False, 'from setuptools import setup, find_packages\n')] |
"""
Base class for fenpei job; this should be considered abstract.
Your custom job(s) should inherit from this job and extend the relevant methods, such as::
* is_prepared
* is_complete
* prepare
* start
* result
* summary
"""
from re import match
from sys import stdout
from bardeen.system import mkdirp
from time import time
from os import remove
from os.path import join, isdir
from shutil import rmtree
from .utils import CALC_DIR
class Job(object):
CRASHED, NONE, PREPARED, RUNNING, COMPLETED = -1, 0, 1, 2, 3
status_names = {-1: 'crashed', 0: 'nothing', 1: 'prepared', 2: 'running', 3: 'completed'}
queue = None
node = None
pid = None
status = None
""" Set a group_cls to report results together with another class (that has the same output format). """
group_cls = None
def __init__(self, name, weight=1, batch_name=None, force_node=None):
"""
Create a Job object.
:param name: unique name consisting of letters, numbers, dot (.) and underscore (_) **YOU need to make sure that name is unique (bijectively maps to job)**
:param weight: the relative resource use of this job (higher relative weights means fewer jobs will be scheduled together)
:param batch_name: optionally, a name of the same format as ``name``, which specifies the batch (will be grouped)
:param force_node: demand a specific node; it's up to the queue whether this is honoured
"""
assert match(r'^\w[/\w\.\+_-]*$', name), 'This is not a valid name: "{0:}"'.format(name)
assert weight > 0
self.name = name
self.weight = weight
self.cluster = None
self.batch_name = batch_name
self.force_node = force_node
if self.batch_name:
assert match('^\w[\w\._-]*$', batch_name)
self.directory = join(CALC_DIR, batch_name, name)
elif batch_name is None:
raise AssertionError('no batch name for {0:}; provide batch_name argument when creating jobs or set it to False'.format(self))
elif batch_name is False:
self.directory = join(CALC_DIR, name)
self.status = self.NONE
def __repr__(self):
if hasattr(self, 'name'):
return self.name
return '{0:s} id{1:}'.format(self.__class__.__name__, id(self))
def _log(self, txt, *args, **kwargs):
"""
Logging function.
.queue is not always set, so have own logging function.
"""
if self.queue is None:
if len(txt.strip()):
stdout.write('(no queue) ' + txt + '\n')
else:
stdout.write('(empty)\n')
else:
self.queue._log(txt, *args, **kwargs)
def save(self):
"""
Save information about a running job to locate the process.
"""
assert self.node is not None
assert self.pid is not None
with open('%s/node_pid.job' % self.directory, 'w+') as fh:
fh.write('%s\n%s\n%s\n%s' % (self.name, self.node, self.pid, str(time())))
self._log('job %s saved' % self, level=3)
def unsave(self):
"""
Remove the stored process details.
"""
try:
remove('%s/node_pid.job' % self.directory)
except IOError:
pass
self._log('job %s save file removed' % self.name, level=3)
def load(self):
"""
Load process details from cache.
"""
try:
with open('%s/node_pid.job' % self.directory, 'r') as fh:
lines = fh.read().splitlines()
self.node = lines[1]
self.pid = int(lines[2])
self._log('job %s loaded' % self.name, level=3)
return True
except IOError:
self._log('job %s save file not found' % self, level=3)
return False
def is_prepared(self):
pass
def is_started(self):
if not self.is_prepared():
return False
l = self.load()
return l
def is_running(self):
"""
Only called if at least prepared.
"""
if self.pid is None:
if not self.load():
return False
if not self.queue:
raise Exception('cannot check if %s is running because it is not in a queue' % self)
proc_list = self.queue.processes(self.node)
try:
return self.pid in [proc['pid'] for proc in proc_list if proc is not None]
except KeyError:
raise Exception('node %s for job %s no longer found?' % (self.node, self))
def is_complete(self):
"""
Check if job completed successfully.
Needs to be extended by child class.
Only called for jobs that are at least prepared.
"""
return True
def find_status(self):
"""
Find status using is_* methods.
"""
def check_status_indicators(self):
if self.is_prepared():
if self.is_complete():
return self.COMPLETED
elif self.is_started():
if self.is_running():
return self.RUNNING
return self.CRASHED
return self.PREPARED
return self.NONE
if time() - getattr(self, '_last_status_time', time() - 100) > 0.7:
self.status = check_status_indicators(self)
setattr(self, '_last_status_time', time())
return self.status
def status_str(self):
return self.status_names[self.find_status()]
def prepare(self, silent=False, *args, **kwargs):
"""
Prepares the job for execution.
More steps are likely necessary for child classes.
"""
self.status = self.PREPARED
if not self.is_prepared():
if self.batch_name:
mkdirp(join(CALC_DIR, self.batch_name))
mkdirp(self.directory)
if not silent:
self._log('preparing {0:s}'.format(self), level=2)
""" child method add more steps here """
def _start_pre(self, *args, **kwargs):
"""
Some checks at the beginning of .start().
"""
if self.is_running() or self.is_complete():
if not self.queue is None:
if self.queue.force:
if self.is_running():
self.kill()
else:
raise AssertionError(('you are trying to restart a job that is running '
'or completed ({0:} run={1:} complete={2:}); use restart (-e) to '
'skip such jobs or -f to overrule this warning').format(
self, self.is_running(), self.is_complete()))
if not self.is_prepared():
self.prepare(silent=True)
def _start_post(self, node, pid, *args, **kwargs):
"""
Some bookkeeping at the end of .start().
"""
self.node = node
self.pid = pid
self.save()
if self.is_running():
self.STATUS = self.RUNNING
self._log('starting %s on %s with pid %s' % (self, self.node, self.pid), level=2)
def start(self, node, *args, **kwargs):
"""
Start the job and store node/pid.
"""
self._start_pre(*args, **kwargs)
"""
Your starting code here.
"""
self._start_post(node, 'pid_here', *args, **kwargs)
return True
def fix(self, *args, **kwargs):
"""
Some code that can be ran to fix jobs, e.g. after bugfixes or updates.
Needs to be implemented by children for the specific fix applicable (if just restarting is not viable).
"""
return False
def kill(self, *args, **kwargs):
"""
Kills the current job if running using queue methods.
Any overriding should probably happen in :ref: queue.processes and :ref: queue.stop_job.
"""
if self.is_running():
assert self.node is not None
assert self.pid is not None
self._log('killing %s: %s on %s' % (self, self.pid, self.node), level=2)
self.queue.stop_job(node = self.node, pid = self.pid)
return True
self._log('job %s not running' % self, level=3)
return False
def cleanup(self, skip_conflicts=False, *args, **kwargs):
if self.is_running() or self.is_complete():
if self.queue is not None and not self.queue.force:
if skip_conflicts:
return False
raise AssertionError(('you are trying to clean up a job ({0:s}; run={1:} complete={2:}) '
'that is running or completed; use -f to force this, or -e to skip these jobs (it '
'could also mean that two jobs are use the same name and batchname).').format(
self.name, self.is_running(), self.is_complete()))
if self.batch_name is not False and isdir(self.directory):
rmtree(self.directory, ignore_errors = True)
self._log('cleaned up {0:s}'.format(self), level=2)
return True
return False
def result(self, *args, **kwargs):
"""
Collects the result of the completed job.
:return: result of the job; only requirement is that the result be compatible with :ref: summary (and other jobs), but a dict is suggested.
"""
if not self.is_complete():
return None
return None
def crash_reason(self, verbosity=0, *args, **kwargs):
"""
Find the reason the job has crashed. Should only be called for crashed jobs (by _crash_reason_if_crashed).
"""
if verbosity <= 0:
return '??'
else:
return '?? reason for crash not known'
def _crash_reason_if_crashed(self, verbosity=0, *args, **kwargs):
if not self.find_status() == self.CRASHED:
return None
return self.crash_reason(verbosity=verbosity, *args, **kwargs)
| [
"os.path.join",
"re.match",
"sys.stdout.write",
"os.remove",
"os.path.isdir",
"shutil.rmtree",
"time.time",
"bardeen.system.mkdirp"
] | [((1400, 1435), 're.match', 'match', (['"""^\\\\w[/\\\\w\\\\.\\\\+_-]*$"""', 'name'], {}), "('^\\\\w[/\\\\w\\\\.\\\\+_-]*$', name)\n", (1405, 1435), False, 'from re import match\n'), ((1660, 1697), 're.match', 'match', (['"""^\\\\w[\\\\w\\\\._-]*$"""', 'batch_name'], {}), "('^\\\\w[\\\\w\\\\._-]*$', batch_name)\n", (1665, 1697), False, 'from re import match\n'), ((1715, 1747), 'os.path.join', 'join', (['CALC_DIR', 'batch_name', 'name'], {}), '(CALC_DIR, batch_name, name)\n', (1719, 1747), False, 'from os.path import join, isdir\n'), ((2863, 2905), 'os.remove', 'remove', (["('%s/node_pid.job' % self.directory)"], {}), "('%s/node_pid.job' % self.directory)\n", (2869, 2905), False, 'from os import remove\n'), ((5033, 5055), 'bardeen.system.mkdirp', 'mkdirp', (['self.directory'], {}), '(self.directory)\n', (5039, 5055), False, 'from bardeen.system import mkdirp\n'), ((7574, 7595), 'os.path.isdir', 'isdir', (['self.directory'], {}), '(self.directory)\n', (7579, 7595), False, 'from os.path import join, isdir\n'), ((7600, 7642), 'shutil.rmtree', 'rmtree', (['self.directory'], {'ignore_errors': '(True)'}), '(self.directory, ignore_errors=True)\n', (7606, 7642), False, 'from shutil import rmtree\n'), ((2319, 2359), 'sys.stdout.write', 'stdout.write', (["('(no queue) ' + txt + '\\n')"], {}), "('(no queue) ' + txt + '\\n')\n", (2331, 2359), False, 'from sys import stdout\n'), ((2373, 2398), 'sys.stdout.write', 'stdout.write', (['"""(empty)\n"""'], {}), "('(empty)\\n')\n", (2385, 2398), False, 'from sys import stdout\n'), ((4502, 4508), 'time.time', 'time', ([], {}), '()\n', (4506, 4508), False, 'from time import time\n'), ((4652, 4658), 'time.time', 'time', ([], {}), '()\n', (4656, 4658), False, 'from time import time\n'), ((1953, 1973), 'os.path.join', 'join', (['CALC_DIR', 'name'], {}), '(CALC_DIR, name)\n', (1957, 1973), False, 'from os.path import join, isdir\n'), ((4997, 5028), 'os.path.join', 'join', (['CALC_DIR', 'self.batch_name'], {}), '(CALC_DIR, self.batch_name)\n', (5001, 5028), False, 'from os.path import join, isdir\n'), ((4546, 4552), 'time.time', 'time', ([], {}), '()\n', (4550, 4552), False, 'from time import time\n'), ((2730, 2736), 'time.time', 'time', ([], {}), '()\n', (2734, 2736), False, 'from time import time\n')] |
"""
This file is part of nucypher.
nucypher is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
nucypher is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with nucypher. If not, see <https://www.gnu.org/licenses/>.
"""
from functools import partial
import maya
import pytest
import pytest_twisted
from twisted.internet.threads import deferToThread
from nucypher.network.middleware import RestMiddleware
from nucypher.utilities.sandbox.ursula import make_federated_ursulas
def test_proper_seed_node_instantiation(ursula_federated_test_config):
lonely_ursula_maker = partial(make_federated_ursulas,
ursula_config=ursula_federated_test_config,
quantity=1,
know_each_other=False)
firstula = lonely_ursula_maker().pop()
firstula_as_seed_node = firstula.seed_node_metadata()
any_other_ursula = lonely_ursula_maker(seed_nodes=[firstula_as_seed_node]).pop()
assert not any_other_ursula.known_nodes
any_other_ursula.start_learning_loop(now=True)
assert firstula in any_other_ursula.known_nodes.values()
@pytest_twisted.inlineCallbacks
def test_get_cert_from_running_seed_node(ursula_federated_test_config):
lonely_ursula_maker = partial(make_federated_ursulas,
ursula_config=ursula_federated_test_config,
quantity=1,
know_each_other=False)
firstula = lonely_ursula_maker().pop()
node_deployer = firstula.get_deployer()
node_deployer.addServices()
node_deployer.catalogServers(node_deployer.hendrix)
node_deployer.start()
certificate_as_deployed = node_deployer.cert.to_cryptography()
firstula_as_seed_node = firstula.seed_node_metadata()
any_other_ursula = lonely_ursula_maker(seed_nodes=[firstula_as_seed_node],
network_middleware=RestMiddleware()).pop()
assert not any_other_ursula.known_nodes
def start_lonely_learning_loop():
any_other_ursula.start_learning_loop()
start = maya.now()
while not firstula in any_other_ursula.known_nodes.values():
passed = maya.now() - start
if passed.seconds > 2:
pytest.fail("Didn't find the seed node.")
yield deferToThread(start_lonely_learning_loop)
assert firstula in any_other_ursula.known_nodes.values()
certificate_as_learned = list(any_other_ursula.known_nodes.values())[0].certificate
assert certificate_as_learned == certificate_as_deployed
any_other_ursula.stop_learning_loop()
| [
"twisted.internet.threads.deferToThread",
"pytest.fail",
"functools.partial",
"nucypher.network.middleware.RestMiddleware",
"maya.now"
] | [((995, 1109), 'functools.partial', 'partial', (['make_federated_ursulas'], {'ursula_config': 'ursula_federated_test_config', 'quantity': '(1)', 'know_each_other': '(False)'}), '(make_federated_ursulas, ursula_config=ursula_federated_test_config,\n quantity=1, know_each_other=False)\n', (1002, 1109), False, 'from functools import partial\n'), ((1684, 1798), 'functools.partial', 'partial', (['make_federated_ursulas'], {'ursula_config': 'ursula_federated_test_config', 'quantity': '(1)', 'know_each_other': '(False)'}), '(make_federated_ursulas, ursula_config=ursula_federated_test_config,\n quantity=1, know_each_other=False)\n', (1691, 1798), False, 'from functools import partial\n'), ((2537, 2547), 'maya.now', 'maya.now', ([], {}), '()\n', (2545, 2547), False, 'import maya\n'), ((2761, 2802), 'twisted.internet.threads.deferToThread', 'deferToThread', (['start_lonely_learning_loop'], {}), '(start_lonely_learning_loop)\n', (2774, 2802), False, 'from twisted.internet.threads import deferToThread\n'), ((2638, 2648), 'maya.now', 'maya.now', ([], {}), '()\n', (2646, 2648), False, 'import maya\n'), ((2708, 2749), 'pytest.fail', 'pytest.fail', (['"""Didn\'t find the seed node."""'], {}), '("Didn\'t find the seed node.")\n', (2719, 2749), False, 'import pytest\n'), ((2367, 2383), 'nucypher.network.middleware.RestMiddleware', 'RestMiddleware', ([], {}), '()\n', (2381, 2383), False, 'from nucypher.network.middleware import RestMiddleware\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.rnn import pad_packed_sequence
import pdb
import math
torch.manual_seed(1)
class GRUAudio(nn.Module):
def __init__(self, num_features, hidden_dim, num_layers, dropout_rate, num_labels, batch_size, bidirectional=False):
super(GRUAudio, self).__init__()
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.num_features = num_features
self.hidden_dim = hidden_dim
self.num_layers = num_layers
self.dropout_rate = dropout_rate
self.num_labels = num_labels
self.batch_size = batch_size
self.bidirectional = bidirectional
self.num_directions = 1 + self.bidirectional
self.gru = nn.GRU(self.num_features, self.hidden_dim, self.num_layers, batch_first=True,
dropout=self.dropout_rate, bidirectional=self.bidirectional).to(self.device)
self.classification = nn.Linear(self.hidden_dim * self.num_layers * self.num_directions, self.num_labels).to(
self.device)
# self.softmax = nn.Softmax()
def forward(self, input, target, train=True, seq_length=False):
input = input.to(self.device)
target = target.to(self.device)
hidden = torch.randn(self.num_layers * self.num_directions, self.batch_size, self.hidden_dim)
hidden = hidden.to(self.device)
out, hn = self.gru(input, hidden)
# print(out, out.shape)
# if train:
# hn, _ = pad_packed_sequence(hn, batch_first=True)
hn = hn.permute([1, 0, 2])
hn = hn.reshape(hn.shape[0], -1)
# pdb.set_trace()
out = self.classification(hn)
# out = self.softmax(out)
# pdb.set_trace()
loss = F.cross_entropy(out, torch.max(target, 1)[1])
return out, loss
class AttGRU(nn.Module):
def __init__(self, num_features, hidden_dim, num_layers, dropout_rate, num_labels, batch_size, bidirectional=False):
super(AttGRU, self).__init__()
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.num_features = num_features
self.hidden_dim = hidden_dim
self.num_layers = num_layers
self.dropout_rate = dropout_rate
self.num_labels = num_labels
self.batch_size = batch_size
self.bidirectional = bidirectional
self.num_directions = 1 + self.bidirectional
self.u = nn.Parameter(torch.zeros((self.num_directions * self.hidden_dim)), requires_grad=True)
self.gru = nn.GRU(self.num_features, self.hidden_dim, self.num_layers, batch_first=True,
dropout=self.dropout_rate, bidirectional=self.bidirectional).to(self.device)
self.classification = nn.Linear(self.hidden_dim * self.num_directions, self.num_labels).to(self.device)
def forward(self, input, target, train=True, seq_length=False):
input = input.to(self.device)
target = target.to(self.device)
hidden = torch.zeros(self.num_layers * self.num_directions, self.batch_size, self.hidden_dim)
hidden = hidden.to(self.device)
out, hn = self.gru(input, hidden)
out, _ = pad_packed_sequence(out, batch_first=True)
mask = []
# pdb.set_trace()
for i in range(len(seq_length)):
mask.append([0] * int(seq_length[i].item()) + [1] * int(out.shape[1] - seq_length[i].item()))
mask = torch.ByteTensor(mask)
mask = mask.to(self.device)
x = torch.matmul(out, self.u)
x = x.masked_fill_(mask, -1e18)
alpha = F.softmax(x, dim=1)
input_linear = torch.sum(torch.matmul(alpha, out), dim=1)
out = self.classification(input_linear)
loss = F.cross_entropy(out, torch.max(target, 1)[1])
# print(self.u[10])
return out, loss
class MeanPool(nn.Module):
def __init__(self, num_features, hidden_dim, num_layers, dropout_rate, num_labels, batch_size, bidirectional=False):
super(MeanPool, self).__init__()
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.num_features = num_features
self.hidden_dim = hidden_dim
self.num_layers = num_layers
self.dropout_rate = dropout_rate
self.num_labels = num_labels
self.batch_size = batch_size
self.bidirectional = bidirectional
self.num_directions = 1 + self.bidirectional
# self.u=nn.Parameter(torch.randn(self.num_directions*self.hidden_dim)).to(self.device)
self.gru = nn.GRU(self.num_features, self.hidden_dim, self.num_layers, batch_first=True,
dropout=self.dropout_rate, bidirectional=self.bidirectional).to(self.device)
self.classification = nn.Linear(self.hidden_dim * self.num_directions, self.num_labels).to(self.device)
def forward(self, input, target, train=True, seq_length=False):
input = input.to(self.device)
target = target.to(self.device)
hidden = torch.zeros(self.num_layers * self.num_directions, self.batch_size, self.hidden_dim)
hidden = hidden.to(self.device)
out, hn = self.gru(input, hidden)
out, _ = pad_packed_sequence(out, batch_first=True)
out = torch.mean(out, dim=1)
# pdb.set_trace()
out = self.classification(out)
loss = F.cross_entropy(out, torch.max(target, 1)[1])
return out, loss
class LSTM_Audio(nn.Module):
def __init__(self, num_features, hidden_dim, num_layers, dropout_rate, num_labels, batch_size, bidirectional=False):
super(LSTM_Audio, self).__init__()
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.num_features = num_features
self.hidden_dim = hidden_dim
self.num_layers = num_layers
self.dropout_rate = dropout_rate
self.num_labels = num_labels
self.batch_size = batch_size
self.bidirectional = bidirectional
self.num_directions = 1 + self.bidirectional
# self.u=nn.Parameter(torch.randn(self.num_directions*self.hidden_dim)).to(self.device)
self.lstm = nn.LSTM(self.num_features, self.hidden_dim, self.num_layers, batch_first=True,
dropout=self.dropout_rate, bidirectional=self.bidirectional).to(self.device)
self.classification = nn.Linear(self.hidden_dim * self.num_directions, self.num_labels).to(self.device)
def forward(self, input, target, seq_length, train=True):
input = input.to(self.device)
target = target.to(self.device)
#hidden = torch.zeros(self.num_layers * self.num_directions, self.batch_size, self.hidden_dim)
#hidden = hidden.to(self.device)
# pdb.set_trace()
out, hn = self.lstm(input)
out, _ = pad_packed_sequence(out, batch_first=True)
out = torch.mean(out, dim=1)
# pdb.set_trace()
out = self.classification(out)
loss = F.cross_entropy(out, torch.max(target, 1)[1])
return out, loss
class ATT(nn.Module):
def __init__(self, num_features, hidden_dim, num_layers, dropout_rate, num_labels, batch_size, bidirectional=False):
super(ATT, self).__init__()
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.num_features = num_features
self.hidden_dim = hidden_dim
self.num_layers = num_layers
self.dropout_rate = dropout_rate
self.num_labels = num_labels
self.batch_size = batch_size
self.bidirectional = bidirectional
self.num_directions = 1 + self.bidirectional
self.attn = nn.Linear(self.hidden_dim * self.num_directions, hidden_dim)
self.u=nn.Parameter(torch.randn(self.hidden_dim))
stdv = 1. / math.sqrt(self.u.shape[0])
self.u.data.normal_(mean=0, std=stdv)
self.lstm = nn.LSTM(self.num_features, self.hidden_dim, self.num_layers, batch_first=True, dropout=self.dropout_rate, bidirectional=self.bidirectional).to(self.device)
self.fc1 = nn.Linear(self.hidden_dim * self.num_directions, self.hidden_dim).to(self.device)
self.batch1=nn.BatchNorm1d(self.hidden_dim)
self.fc2=nn.Linear(self.hidden_dim,self.num_labels).to(self.device)
self.batch2=nn.BatchNorm1d(self.num_labels)
self.batchatt=nn.BatchNorm1d(self.hidden_dim * self.num_directions)
def forward(self, input, target, seq_length, train=True):
input = input.to(self.device)
target = target.to(self.device)
out, hn = self.lstm(input)
out , _ =pad_packed_sequence(out,batch_first=True)
mask=[]
# pdb.set_trace()
for i in range(len(seq_length)):
mask.append([0]*int(seq_length[i].item())+[1]*int(out.shape[1]-seq_length[i].item()))
mask=torch.ByteTensor(mask)
mask=mask.to(self.device)
out_att=torch.tanh(self.attn(out))
x=torch.matmul(out_att,self.u)
x=x.masked_fill_(mask,-1e18)
alpha=F.softmax(x,dim=1)
input_linear=torch.sum(torch.matmul(alpha,out),dim=1)
input_linear_normalized=self.batchatt(input_linear)
out_1 = self.fc1(input_linear_normalized)
out_1_normalized=self.batch1(out_1)
out_2=self.fc2(out_1_normalized)
out_2_normalized=self.batch2(out_2)
loss = F.cross_entropy(out_2_normalized, torch.max(target, 1)[1])
# print(self.u[10])
return out_2, loss
class Mean_Pool_2(nn.Module):
def __init__(self, num_features, hidden_dim, num_layers, dropout_rate, num_labels, batch_size, bidirectional=False):
super(Mean_Pool_2, self).__init__()
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.num_features = num_features
self.hidden_dim = hidden_dim
self.num_layers = num_layers
self.dropout_rate = dropout_rate
self.num_labels = num_labels
self.batch_size = batch_size
self.bidirectional = bidirectional
self.num_directions = 1 + self.bidirectional
#self.attn = nn.Linear(self.hidden_dim * self.num_directions, hidden_dim)
#self.u=nn.Parameter(torch.randn(self.hidden_dim))
#stdv = 1. / math.sqrt(self.u.shape[0])
#self.u.data.normal_(mean=0, std=stdv)
self.lstm = nn.LSTM(self.num_features, self.hidden_dim, self.num_layers, batch_first=True, dropout=self.dropout_rate, bidirectional=self.bidirectional).to(self.device)
self.fc1 = nn.Linear(self.hidden_dim * self.num_directions, self.hidden_dim).to(self.device)
self.batch1=nn.BatchNorm1d(self.hidden_dim)
self.fc2=nn.Linear(self.hidden_dim,self.num_labels).to(self.device)
self.batch2=nn.BatchNorm1d(self.num_labels)
self.batchatt=nn.BatchNorm1d(self.hidden_dim * self.num_directions)
def forward(self, input, target, seq_length, train=True):
input = input.to(self.device)
target = target.to(self.device)
out, hn = self.lstm(input)
out , _ =pad_packed_sequence(out,batch_first=True)
x=torch.mean(out,dim=1)
input_linear_normalized=self.batchatt(x)
out_1 = self.fc1(input_linear_normalized)
out_1_normalized=self.batch1(out_1)
out_2=self.fc2(out_1_normalized)
out_2_normalized=self.batch2(out_2)
loss = F.cross_entropy(out_2_normalized, torch.max(target, 1)[1])
# print(self.u[10])
return out_2, loss
class ConvLSTMCell(nn.Module):
def __init__(self, input_channels, hidden_channels, kernel_size, kernel_size_pool=8, stride_pool=4):
super(ConvLSTMCell, self).__init__()
assert hidden_channels % 2 == 0
self.input_channels = input_channels
self.hidden_channels = hidden_channels
self.kernel_size = kernel_size
self.stride=1
self.padding = int((kernel_size-1) / 2)
self.kernel_size_pool=kernel_size_pool
self.stride_pool=stride_pool
self.Wxi = nn.Conv1d(self.input_channels, self.hidden_channels, self.kernel_size, self.stride,self.padding, bias=True)
self.Whi = nn.Conv1d(self.hidden_channels, self.hidden_channels, self.kernel_size, self.stride, self.padding, bias=False)
self.Wxf = nn.Conv1d(self.input_channels, self.hidden_channels, self.kernel_size, self.stride,self.padding, bias=True)
self.Whf = nn.Conv1d(self.hidden_channels, self.hidden_channels, self.kernel_size, self.stride,self.padding, bias=False)
self.Wxc = nn.Conv1d(self.input_channels, self.hidden_channels, self.kernel_size, self.stride, self.padding, bias=True)
self.Whc = nn.Conv1d(self.hidden_channels, self.hidden_channels, self.kernel_size, self.stride, self.padding, bias=False)
self.Wxo = nn.Conv1d(self.input_channels, self.hidden_channels, self.kernel_size, self.stride,self.padding, bias=True)
self.Who = nn.Conv1d(self.hidden_channels, self.hidden_channels, self.kernel_size, self.stride, self.padding, bias=False)
self.max_pool = nn.MaxPool1d(self.kernel_size_pool, stride=self.stride_pool)
self.batch = nn.BatchNorm1d(self.hidden_channels)
self.Wci = None
self.Wcf = None
self.Wco = None
def forward(self, x, h, c):
ci = torch.sigmoid(self.Wxi(x) + self.Whi(h) + c * self.Wci)
cf = torch.sigmoid(self.Wxf(x) + self.Whf(h) + c * self.Wcf)
cc = cf * c + ci * torch.tanh(self.Wxc(x) + self.Whc(h))
co = torch.sigmoid(self.Wxo(x) + self.Who(h) + cc * self.Wco)
ch = co * torch.tanh(cc)
ch_pool=self.batch(self.max_pool(ch))
return ch_pool, ch, cc
def init_hidden(self, batch_size, hidden, shape):
if self.Wci is None:
self.Wci = nn.Parameter(torch.zeros(1, hidden, shape)).cuda()
self.Wcf = nn.Parameter(torch.zeros(1, hidden, shape)).cuda()
self.Wco = nn.Parameter(torch.zeros(1, hidden, shape)).cuda()
return (nn.Parameter(torch.zeros(batch_size, hidden, shape)).cuda(),
nn.Parameter(torch.zeros(batch_size, hidden, shape)).cuda())
class ConvLSTM(nn.Module):
# input_channels corresponds to the first input feature map
# hidden state is a list of succeeding lstm layers.
# kernel size is also a list, same length as hidden_channels
def __init__(self, input_channels, hidden_channels, kernel_size, step):
super(ConvLSTM, self).__init__()
assert len(hidden_channels)==len(kernel_size), "size mismatch"
self.input_channels = [input_channels] + hidden_channels
self.hidden_channels = hidden_channels
self.kernel_size = kernel_size
self.num_layers = len(hidden_channels)
self.step = step
self._all_layers = []
self.num_labels=4
self.linear_dim=16*18
self.device= torch.device('cuda' if torch.cuda.is_available() else 'cpu')
self.classification = nn.Linear(self.linear_dim, self.num_labels)
for i in range(self.num_layers):
name = 'cell{}'.format(i)
cell = ConvLSTMCell(self.input_channels[i], self.hidden_channels[i], self.kernel_size[i])
setattr(self, name, cell)
self._all_layers.append(cell)
def forward(self, input, target):
# input should be a list of inputs, like a time stamp, maybe 1280 for 100 times.
internal_state = []
outputs = []
for step in range(self.step):
x = input[step]
for i in range(self.num_layers):
name = 'cell{}'.format(i)
if step == 0:
bsize, _, shape = x.size()
(h, c) = getattr(self, name).init_hidden(batch_size=bsize, hidden=self.hidden_channels[i],
shape=shape)
internal_state.append((h, c))
# do forward
(h, c) = internal_state[i]
x, new_h, new_c = getattr(self, name)(x, h, c)
internal_state[i] = (new_h, new_c)
outputs.append(x)
## mean pooling and loss function
out=[torch.unsqueeze(o, dim=3) for o in outputs]
out=torch.flatten(torch.mean(torch.cat(out,dim=3),dim=3),start_dim=1)
out = self.classification(out)
loss = F.cross_entropy(out, torch.max(target, 1)[1].to(self.device))
return torch.unsqueeze(out,dim=0), torch.unsqueeze(loss, dim=0) | [
"torch.max",
"math.sqrt",
"torch.nn.BatchNorm1d",
"torch.cuda.is_available",
"torch.ByteTensor",
"torch.nn.functional.softmax",
"torch.nn.GRU",
"torch.nn.MaxPool1d",
"torch.tanh",
"torch.mean",
"torch.unsqueeze",
"torch.nn.LSTM",
"torch.matmul",
"torch.nn.utils.rnn.pad_packed_sequence",
... | [((141, 161), 'torch.manual_seed', 'torch.manual_seed', (['(1)'], {}), '(1)\n', (158, 161), False, 'import torch\n'), ((1322, 1411), 'torch.randn', 'torch.randn', (['(self.num_layers * self.num_directions)', 'self.batch_size', 'self.hidden_dim'], {}), '(self.num_layers * self.num_directions, self.batch_size, self.\n hidden_dim)\n', (1333, 1411), False, 'import torch\n'), ((3097, 3186), 'torch.zeros', 'torch.zeros', (['(self.num_layers * self.num_directions)', 'self.batch_size', 'self.hidden_dim'], {}), '(self.num_layers * self.num_directions, self.batch_size, self.\n hidden_dim)\n', (3108, 3186), False, 'import torch\n'), ((3282, 3324), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['out'], {'batch_first': '(True)'}), '(out, batch_first=True)\n', (3301, 3324), False, 'from torch.nn.utils.rnn import pad_packed_sequence\n'), ((3539, 3561), 'torch.ByteTensor', 'torch.ByteTensor', (['mask'], {}), '(mask)\n', (3555, 3561), False, 'import torch\n'), ((3611, 3636), 'torch.matmul', 'torch.matmul', (['out', 'self.u'], {}), '(out, self.u)\n', (3623, 3636), False, 'import torch\n'), ((3693, 3712), 'torch.nn.functional.softmax', 'F.softmax', (['x'], {'dim': '(1)'}), '(x, dim=1)\n', (3702, 3712), True, 'import torch.nn.functional as F\n'), ((5124, 5213), 'torch.zeros', 'torch.zeros', (['(self.num_layers * self.num_directions)', 'self.batch_size', 'self.hidden_dim'], {}), '(self.num_layers * self.num_directions, self.batch_size, self.\n hidden_dim)\n', (5135, 5213), False, 'import torch\n'), ((5309, 5351), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['out'], {'batch_first': '(True)'}), '(out, batch_first=True)\n', (5328, 5351), False, 'from torch.nn.utils.rnn import pad_packed_sequence\n'), ((5367, 5389), 'torch.mean', 'torch.mean', (['out'], {'dim': '(1)'}), '(out, dim=1)\n', (5377, 5389), False, 'import torch\n'), ((6931, 6973), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['out'], {'batch_first': '(True)'}), '(out, batch_first=True)\n', (6950, 6973), False, 'from torch.nn.utils.rnn import pad_packed_sequence\n'), ((6989, 7011), 'torch.mean', 'torch.mean', (['out'], {'dim': '(1)'}), '(out, dim=1)\n', (6999, 7011), False, 'import torch\n'), ((7792, 7852), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_dim * self.num_directions)', 'hidden_dim'], {}), '(self.hidden_dim * self.num_directions, hidden_dim)\n', (7801, 7852), True, 'import torch.nn as nn\n'), ((8301, 8332), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.hidden_dim'], {}), '(self.hidden_dim)\n', (8315, 8332), True, 'import torch.nn as nn\n'), ((8430, 8461), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.num_labels'], {}), '(self.num_labels)\n', (8444, 8461), True, 'import torch.nn as nn\n'), ((8484, 8537), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(self.hidden_dim * self.num_directions)'], {}), '(self.hidden_dim * self.num_directions)\n', (8498, 8537), True, 'import torch.nn as nn\n'), ((8733, 8775), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['out'], {'batch_first': '(True)'}), '(out, batch_first=True)\n', (8752, 8775), False, 'from torch.nn.utils.rnn import pad_packed_sequence\n'), ((8969, 8991), 'torch.ByteTensor', 'torch.ByteTensor', (['mask'], {}), '(mask)\n', (8985, 8991), False, 'import torch\n'), ((9081, 9110), 'torch.matmul', 'torch.matmul', (['out_att', 'self.u'], {}), '(out_att, self.u)\n', (9093, 9110), False, 'import torch\n'), ((9163, 9182), 'torch.nn.functional.softmax', 'F.softmax', (['x'], {'dim': '(1)'}), '(x, dim=1)\n', (9172, 9182), True, 'import torch.nn.functional as F\n'), ((10763, 10794), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.hidden_dim'], {}), '(self.hidden_dim)\n', (10777, 10794), True, 'import torch.nn as nn\n'), ((10892, 10923), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.num_labels'], {}), '(self.num_labels)\n', (10906, 10923), True, 'import torch.nn as nn\n'), ((10946, 10999), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(self.hidden_dim * self.num_directions)'], {}), '(self.hidden_dim * self.num_directions)\n', (10960, 10999), True, 'import torch.nn as nn\n'), ((11195, 11237), 'torch.nn.utils.rnn.pad_packed_sequence', 'pad_packed_sequence', (['out'], {'batch_first': '(True)'}), '(out, batch_first=True)\n', (11214, 11237), False, 'from torch.nn.utils.rnn import pad_packed_sequence\n'), ((11248, 11270), 'torch.mean', 'torch.mean', (['out'], {'dim': '(1)'}), '(out, dim=1)\n', (11258, 11270), False, 'import torch\n'), ((12165, 12278), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.input_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(True)'}), '(self.input_channels, self.hidden_channels, self.kernel_size, self\n .stride, self.padding, bias=True)\n', (12174, 12278), True, 'import torch.nn as nn\n'), ((12293, 12407), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.hidden_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(False)'}), '(self.hidden_channels, self.hidden_channels, self.kernel_size,\n self.stride, self.padding, bias=False)\n', (12302, 12407), True, 'import torch.nn as nn\n'), ((12424, 12537), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.input_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(True)'}), '(self.input_channels, self.hidden_channels, self.kernel_size, self\n .stride, self.padding, bias=True)\n', (12433, 12537), True, 'import torch.nn as nn\n'), ((12552, 12666), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.hidden_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(False)'}), '(self.hidden_channels, self.hidden_channels, self.kernel_size,\n self.stride, self.padding, bias=False)\n', (12561, 12666), True, 'import torch.nn as nn\n'), ((12682, 12795), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.input_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(True)'}), '(self.input_channels, self.hidden_channels, self.kernel_size, self\n .stride, self.padding, bias=True)\n', (12691, 12795), True, 'import torch.nn as nn\n'), ((12810, 12924), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.hidden_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(False)'}), '(self.hidden_channels, self.hidden_channels, self.kernel_size,\n self.stride, self.padding, bias=False)\n', (12819, 12924), True, 'import torch.nn as nn\n'), ((12941, 13054), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.input_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(True)'}), '(self.input_channels, self.hidden_channels, self.kernel_size, self\n .stride, self.padding, bias=True)\n', (12950, 13054), True, 'import torch.nn as nn\n'), ((13069, 13183), 'torch.nn.Conv1d', 'nn.Conv1d', (['self.hidden_channels', 'self.hidden_channels', 'self.kernel_size', 'self.stride', 'self.padding'], {'bias': '(False)'}), '(self.hidden_channels, self.hidden_channels, self.kernel_size,\n self.stride, self.padding, bias=False)\n', (13078, 13183), True, 'import torch.nn as nn\n'), ((13213, 13273), 'torch.nn.MaxPool1d', 'nn.MaxPool1d', (['self.kernel_size_pool'], {'stride': 'self.stride_pool'}), '(self.kernel_size_pool, stride=self.stride_pool)\n', (13225, 13273), True, 'import torch.nn as nn\n'), ((13295, 13331), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.hidden_channels'], {}), '(self.hidden_channels)\n', (13309, 13331), True, 'import torch.nn as nn\n'), ((15107, 15150), 'torch.nn.Linear', 'nn.Linear', (['self.linear_dim', 'self.num_labels'], {}), '(self.linear_dim, self.num_labels)\n', (15116, 15150), True, 'import torch.nn as nn\n'), ((2546, 2596), 'torch.zeros', 'torch.zeros', (['(self.num_directions * self.hidden_dim)'], {}), '(self.num_directions * self.hidden_dim)\n', (2557, 2596), False, 'import torch\n'), ((3747, 3771), 'torch.matmul', 'torch.matmul', (['alpha', 'out'], {}), '(alpha, out)\n', (3759, 3771), False, 'import torch\n'), ((7881, 7909), 'torch.randn', 'torch.randn', (['self.hidden_dim'], {}), '(self.hidden_dim)\n', (7892, 7909), False, 'import torch\n'), ((7931, 7957), 'math.sqrt', 'math.sqrt', (['self.u.shape[0]'], {}), '(self.u.shape[0])\n', (7940, 7957), False, 'import math\n'), ((9216, 9240), 'torch.matmul', 'torch.matmul', (['alpha', 'out'], {}), '(alpha, out)\n', (9228, 9240), False, 'import torch\n'), ((13730, 13744), 'torch.tanh', 'torch.tanh', (['cc'], {}), '(cc)\n', (13740, 13744), False, 'import torch\n'), ((16328, 16353), 'torch.unsqueeze', 'torch.unsqueeze', (['o'], {'dim': '(3)'}), '(o, dim=3)\n', (16343, 16353), False, 'import torch\n'), ((16586, 16613), 'torch.unsqueeze', 'torch.unsqueeze', (['out'], {'dim': '(0)'}), '(out, dim=0)\n', (16601, 16613), False, 'import torch\n'), ((16614, 16642), 'torch.unsqueeze', 'torch.unsqueeze', (['loss'], {'dim': '(0)'}), '(loss, dim=0)\n', (16629, 16642), False, 'import torch\n'), ((399, 424), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (422, 424), False, 'import torch\n'), ((783, 926), 'torch.nn.GRU', 'nn.GRU', (['self.num_features', 'self.hidden_dim', 'self.num_layers'], {'batch_first': '(True)', 'dropout': 'self.dropout_rate', 'bidirectional': 'self.bidirectional'}), '(self.num_features, self.hidden_dim, self.num_layers, batch_first=\n True, dropout=self.dropout_rate, bidirectional=self.bidirectional)\n', (789, 926), True, 'import torch.nn as nn\n'), ((1003, 1091), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_dim * self.num_layers * self.num_directions)', 'self.num_labels'], {}), '(self.hidden_dim * self.num_layers * self.num_directions, self.\n num_labels)\n', (1012, 1091), True, 'import torch.nn as nn\n'), ((1868, 1888), 'torch.max', 'torch.max', (['target', '(1)'], {}), '(target, 1)\n', (1877, 1888), False, 'import torch\n'), ((2151, 2176), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2174, 2176), False, 'import torch\n'), ((2640, 2783), 'torch.nn.GRU', 'nn.GRU', (['self.num_features', 'self.hidden_dim', 'self.num_layers'], {'batch_first': '(True)', 'dropout': 'self.dropout_rate', 'bidirectional': 'self.bidirectional'}), '(self.num_features, self.hidden_dim, self.num_layers, batch_first=\n True, dropout=self.dropout_rate, bidirectional=self.bidirectional)\n', (2646, 2783), True, 'import torch.nn as nn\n'), ((2851, 2916), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_dim * self.num_directions)', 'self.num_labels'], {}), '(self.hidden_dim * self.num_directions, self.num_labels)\n', (2860, 2916), True, 'import torch.nn as nn\n'), ((3865, 3885), 'torch.max', 'torch.max', (['target', '(1)'], {}), '(target, 1)\n', (3874, 3885), False, 'import torch\n'), ((4186, 4211), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4209, 4211), False, 'import torch\n'), ((4667, 4810), 'torch.nn.GRU', 'nn.GRU', (['self.num_features', 'self.hidden_dim', 'self.num_layers'], {'batch_first': '(True)', 'dropout': 'self.dropout_rate', 'bidirectional': 'self.bidirectional'}), '(self.num_features, self.hidden_dim, self.num_layers, batch_first=\n True, dropout=self.dropout_rate, bidirectional=self.bidirectional)\n', (4673, 4810), True, 'import torch.nn as nn\n'), ((4878, 4943), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_dim * self.num_directions)', 'self.num_labels'], {}), '(self.hidden_dim * self.num_directions, self.num_labels)\n', (4887, 4943), True, 'import torch.nn as nn\n'), ((5501, 5521), 'torch.max', 'torch.max', (['target', '(1)'], {}), '(target, 1)\n', (5510, 5521), False, 'import torch\n'), ((5791, 5816), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5814, 5816), False, 'import torch\n'), ((6273, 6417), 'torch.nn.LSTM', 'nn.LSTM', (['self.num_features', 'self.hidden_dim', 'self.num_layers'], {'batch_first': '(True)', 'dropout': 'self.dropout_rate', 'bidirectional': 'self.bidirectional'}), '(self.num_features, self.hidden_dim, self.num_layers, batch_first=\n True, dropout=self.dropout_rate, bidirectional=self.bidirectional)\n', (6280, 6417), True, 'import torch.nn as nn\n'), ((6486, 6551), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_dim * self.num_directions)', 'self.num_labels'], {}), '(self.hidden_dim * self.num_directions, self.num_labels)\n', (6495, 6551), True, 'import torch.nn as nn\n'), ((7123, 7143), 'torch.max', 'torch.max', (['target', '(1)'], {}), '(target, 1)\n', (7132, 7143), False, 'import torch\n'), ((7408, 7433), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (7431, 7433), False, 'import torch\n'), ((8024, 8168), 'torch.nn.LSTM', 'nn.LSTM', (['self.num_features', 'self.hidden_dim', 'self.num_layers'], {'batch_first': '(True)', 'dropout': 'self.dropout_rate', 'bidirectional': 'self.bidirectional'}), '(self.num_features, self.hidden_dim, self.num_layers, batch_first=\n True, dropout=self.dropout_rate, bidirectional=self.bidirectional)\n', (8031, 8168), True, 'import torch.nn as nn\n'), ((8199, 8264), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_dim * self.num_directions)', 'self.hidden_dim'], {}), '(self.hidden_dim * self.num_directions, self.hidden_dim)\n', (8208, 8264), True, 'import torch.nn as nn\n'), ((8350, 8393), 'torch.nn.Linear', 'nn.Linear', (['self.hidden_dim', 'self.num_labels'], {}), '(self.hidden_dim, self.num_labels)\n', (8359, 8393), True, 'import torch.nn as nn\n'), ((9545, 9565), 'torch.max', 'torch.max', (['target', '(1)'], {}), '(target, 1)\n', (9554, 9565), False, 'import torch\n'), ((9866, 9891), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (9889, 9891), False, 'import torch\n'), ((10486, 10630), 'torch.nn.LSTM', 'nn.LSTM', (['self.num_features', 'self.hidden_dim', 'self.num_layers'], {'batch_first': '(True)', 'dropout': 'self.dropout_rate', 'bidirectional': 'self.bidirectional'}), '(self.num_features, self.hidden_dim, self.num_layers, batch_first=\n True, dropout=self.dropout_rate, bidirectional=self.bidirectional)\n', (10493, 10630), True, 'import torch.nn as nn\n'), ((10661, 10726), 'torch.nn.Linear', 'nn.Linear', (['(self.hidden_dim * self.num_directions)', 'self.hidden_dim'], {}), '(self.hidden_dim * self.num_directions, self.hidden_dim)\n', (10670, 10726), True, 'import torch.nn as nn\n'), ((10812, 10855), 'torch.nn.Linear', 'nn.Linear', (['self.hidden_dim', 'self.num_labels'], {}), '(self.hidden_dim, self.num_labels)\n', (10821, 10855), True, 'import torch.nn as nn\n'), ((11558, 11578), 'torch.max', 'torch.max', (['target', '(1)'], {}), '(target, 1)\n', (11567, 11578), False, 'import torch\n'), ((15038, 15063), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (15061, 15063), False, 'import torch\n'), ((16409, 16430), 'torch.cat', 'torch.cat', (['out'], {'dim': '(3)'}), '(out, dim=3)\n', (16418, 16430), False, 'import torch\n'), ((13942, 13971), 'torch.zeros', 'torch.zeros', (['(1)', 'hidden', 'shape'], {}), '(1, hidden, shape)\n', (13953, 13971), False, 'import torch\n'), ((14016, 14045), 'torch.zeros', 'torch.zeros', (['(1)', 'hidden', 'shape'], {}), '(1, hidden, shape)\n', (14027, 14045), False, 'import torch\n'), ((14090, 14119), 'torch.zeros', 'torch.zeros', (['(1)', 'hidden', 'shape'], {}), '(1, hidden, shape)\n', (14101, 14119), False, 'import torch\n'), ((14158, 14196), 'torch.zeros', 'torch.zeros', (['batch_size', 'hidden', 'shape'], {}), '(batch_size, hidden, shape)\n', (14169, 14196), False, 'import torch\n'), ((14235, 14273), 'torch.zeros', 'torch.zeros', (['batch_size', 'hidden', 'shape'], {}), '(batch_size, hidden, shape)\n', (14246, 14273), False, 'import torch\n'), ((16527, 16547), 'torch.max', 'torch.max', (['target', '(1)'], {}), '(target, 1)\n', (16536, 16547), False, 'import torch\n')] |
# Copyright 2021 TileDB Inc.
# Licensed under the MIT License.
import numpy as np
import pytest
from tiledb.cf.netcdf_engine._utils import get_netcdf_metadata, get_unpacked_dtype
netCDF4 = pytest.importorskip("netCDF4")
@pytest.mark.parametrize(
"input_dtype,scale_factor,add_offset,output_dtype",
(
(np.int16, None, None, np.int16),
(np.int16, np.float32(1), None, np.float32),
(np.int16, None, np.float32(1), np.float32),
(np.int16, np.float64(1), np.float32(1), np.float64),
),
)
def test_unpacked_dtype(input_dtype, scale_factor, add_offset, output_dtype):
"""Tests computing the unpacked data type for a NetCDF variable."""
with netCDF4.Dataset("tmp.nc", diskless=True, mode="w") as dataset:
dataset.createDimension("t", None)
variable = dataset.createVariable("x", dimensions=("t",), datatype=input_dtype)
if scale_factor is not None:
variable.setncattr("scale_factor", scale_factor)
if add_offset is not None:
variable.setncattr("add_offset", add_offset)
dtype = get_unpacked_dtype(variable)
assert dtype == output_dtype
def test_unpacked_dtype_unsupported_dtype_error():
"""Tests attempting to unpack a NetCDF variable with a data type that does not
support packing/unpacking."""
with netCDF4.Dataset("tmp.nc", diskless=True, mode="w") as dataset:
variable = dataset.createVariable("x", dimensions=tuple(), datatype="S1")
with pytest.raises(ValueError):
get_unpacked_dtype(variable)
@pytest.mark.parametrize(
"value, expected_result",
(
(np.float64(1), np.float64(1)),
(np.array((1), dtype=np.float64), np.float64(1)),
(np.array([1], dtype=np.int32), np.int32(1)),
),
)
def test_get_netcdf_metadata_number(value, expected_result):
"""Tests computing the unpacked data type for a NetCDF variable."""
key = "name"
with netCDF4.Dataset("tmp.nc", diskless=True, mode="w") as dataset:
dataset.setncattr(key, value)
result = get_netcdf_metadata(dataset, key, is_number=True)
assert result == expected_result
@pytest.mark.parametrize("value", (("",), (1, 2)))
def test_get_netcdf_metadata_number_with_warning(value):
"""Tests computing the unpacked data type for a NetCDF variable."""
key = "name"
with netCDF4.Dataset("tmp.nc", diskless=True, mode="w") as dataset:
dataset.setncattr(key, value)
with pytest.warns(Warning):
result = get_netcdf_metadata(dataset, key, is_number=True)
assert result is None
| [
"tiledb.cf.netcdf_engine._utils.get_unpacked_dtype",
"numpy.float64",
"numpy.int32",
"pytest.mark.parametrize",
"numpy.array",
"pytest.importorskip",
"tiledb.cf.netcdf_engine._utils.get_netcdf_metadata",
"pytest.raises",
"numpy.float32",
"pytest.warns"
] | [((191, 221), 'pytest.importorskip', 'pytest.importorskip', (['"""netCDF4"""'], {}), "('netCDF4')\n", (210, 221), False, 'import pytest\n'), ((2153, 2202), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""value"""', "(('',), (1, 2))"], {}), "('value', (('',), (1, 2)))\n", (2176, 2202), False, 'import pytest\n'), ((1090, 1118), 'tiledb.cf.netcdf_engine._utils.get_unpacked_dtype', 'get_unpacked_dtype', (['variable'], {}), '(variable)\n', (1108, 1118), False, 'from tiledb.cf.netcdf_engine._utils import get_netcdf_metadata, get_unpacked_dtype\n'), ((2059, 2108), 'tiledb.cf.netcdf_engine._utils.get_netcdf_metadata', 'get_netcdf_metadata', (['dataset', 'key'], {'is_number': '(True)'}), '(dataset, key, is_number=True)\n', (2078, 2108), False, 'from tiledb.cf.netcdf_engine._utils import get_netcdf_metadata, get_unpacked_dtype\n'), ((373, 386), 'numpy.float32', 'np.float32', (['(1)'], {}), '(1)\n', (383, 386), True, 'import numpy as np\n'), ((432, 445), 'numpy.float32', 'np.float32', (['(1)'], {}), '(1)\n', (442, 445), True, 'import numpy as np\n'), ((479, 492), 'numpy.float64', 'np.float64', (['(1)'], {}), '(1)\n', (489, 492), True, 'import numpy as np\n'), ((494, 507), 'numpy.float32', 'np.float32', (['(1)'], {}), '(1)\n', (504, 507), True, 'import numpy as np\n'), ((1489, 1514), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1502, 1514), False, 'import pytest\n'), ((1528, 1556), 'tiledb.cf.netcdf_engine._utils.get_unpacked_dtype', 'get_unpacked_dtype', (['variable'], {}), '(variable)\n', (1546, 1556), False, 'from tiledb.cf.netcdf_engine._utils import get_netcdf_metadata, get_unpacked_dtype\n'), ((1630, 1643), 'numpy.float64', 'np.float64', (['(1)'], {}), '(1)\n', (1640, 1643), True, 'import numpy as np\n'), ((1645, 1658), 'numpy.float64', 'np.float64', (['(1)'], {}), '(1)\n', (1655, 1658), True, 'import numpy as np\n'), ((1670, 1699), 'numpy.array', 'np.array', (['(1)'], {'dtype': 'np.float64'}), '(1, dtype=np.float64)\n', (1678, 1699), True, 'import numpy as np\n'), ((1703, 1716), 'numpy.float64', 'np.float64', (['(1)'], {}), '(1)\n', (1713, 1716), True, 'import numpy as np\n'), ((1728, 1757), 'numpy.array', 'np.array', (['[1]'], {'dtype': 'np.int32'}), '([1], dtype=np.int32)\n', (1736, 1757), True, 'import numpy as np\n'), ((1759, 1770), 'numpy.int32', 'np.int32', (['(1)'], {}), '(1)\n', (1767, 1770), True, 'import numpy as np\n'), ((2472, 2493), 'pytest.warns', 'pytest.warns', (['Warning'], {}), '(Warning)\n', (2484, 2493), False, 'import pytest\n'), ((2516, 2565), 'tiledb.cf.netcdf_engine._utils.get_netcdf_metadata', 'get_netcdf_metadata', (['dataset', 'key'], {'is_number': '(True)'}), '(dataset, key, is_number=True)\n', (2535, 2565), False, 'from tiledb.cf.netcdf_engine._utils import get_netcdf_metadata, get_unpacked_dtype\n')] |
#!/usr/bin/env python
import os
import numpy as np
import html
from bs4 import BeautifulSoup
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import convert_to_unicode
import eyed3
from tokenizer import MyTokenizer
# acronyms to spell out
acronyms = [
'LES',
'ALM',
'MYNN',
]
spelled_out_form = ['-'.join(list(acronym)) for acronym in acronyms]
class BibtexLibrary(object):
"""Class that processes bibtex file"""
def __init__(self,
bibfile,
mp3dir=os.path.join(os.environ['HOME'],
'Music','Article Abstracts')
):
parser = BibTexParser(common_strings=True)
parser.customization = convert_to_unicode
self.bibname = os.path.split(bibfile)[1]
with open(bibfile) as bib:
bibdata = bibtexparser.load(bib, parser=parser)
self.lib = bibdata.entries
self.mp3dir = mp3dir
os.makedirs(mp3dir,exist_ok=True)
self._process_bib_data()
def _process_bib_data(self):
self.keys = [article['ID'] for article in self.lib]
assert len(self.keys) == len(set(self.keys)),\
'article keys are not unique!'
self._process_bib_authors()
self._process_bib_titles()
self._process_bib_dates()
self._process_bib_pubnames()
self._process_bib_keywords()
self._process_bib_abstracts()
def _clean_text(self,s):
s = s.replace('{','').replace('}','')
s = s.replace('~','')
s = s.replace('$\\','').replace('$','')
# get rid of HTML tags
s = BeautifulSoup(html.unescape(s),'html.parser').text
# spell out common acronyms
for acronym,spelledout in zip(acronyms,spelled_out_form):
s = s.replace(acronym,spelledout)
return s
def _process_bib_authors(self):
self.author = {}
for key,article in zip(self.keys,self.lib):
authorstr = self._clean_text(article['author'])
#print(key,authorstr)
authorlist = [
author.strip().replace('.','')
for author in authorstr.split(' and ')
]
#print(key,authorlist)
authorlist_firstlast = []
for author in authorlist:
# if "lastname, first", split by comma and reverse
firstlast = [s.strip() for s in author.split(',')]
assert (len(firstlast) <= 2) # should be 2 or 1
firstlast = ' '.join(firstlast[::-1])
authorlist_firstlast.append(firstlast)
#print(key,authorlist_firstlast)
if len(authorlist_firstlast) == 1:
authorstr = authorlist_firstlast[0]
elif len(authorlist_firstlast) == 2:
authorstr = '{:s} and {:s}'.format(*authorlist_firstlast)
elif len(authorlist_firstlast) == 3:
authorstr = '{:s}, {:s}, and {:s}'.format(*authorlist_firstlast)
else:
authorstr = '{:s} et al'.format(authorlist_firstlast[0])
#print(key,authorstr)
self.author[key] = authorstr
def _process_bib_titles(self):
self.title = {}
for key,article in zip(self.keys,self.lib):
self.title[key] = self._clean_text(article['title'])
def _process_bib_dates(self):
self.year = {}
self.date = {}
for key,article in zip(self.keys,self.lib):
year = article.get('year',None)
if year is None:
self.date[key] = None
else:
self.year[key] = year
self.date[key] = year
month = article.get('month',None)
if month is not None:
self.date[key] = '{:s} {:s}'.format(month,year)
num_missing_dates = np.count_nonzero(
[(d is None) for _,d in self.date.items()]
)
if num_missing_dates > 0:
print('Note:',
num_missing_dates,'/',len(self.lib),
'articles are missing dates')
def _process_bib_pubnames(self):
self.publication = {}
for key,article in zip(self.keys,self.lib):
if article['ENTRYTYPE'] == 'article':
name = article['journal']
else:
name = article.get('booktitle',None)
if name is not None:
name = self._clean_text(name)
self.publication[key] = name
num_missing_pubnames = np.count_nonzero(
[(n is None) for _,n in self.publication.items()]
)
if num_missing_pubnames > 0:
print('Note:',
num_missing_pubnames,'/',len(self.lib),
'articles are missing publication names')
def _process_bib_keywords(self):
self.keywords = {}
for key,article in zip(self.keys,self.lib):
kw = article.get('keywords',None)
if kw is not None:
kw = self._clean_text(kw)
self.keywords[key] = kw
num_missing_keywords = np.count_nonzero(
[(kw is None) for _,kw in self.keywords.items()]
)
if num_missing_keywords > 0:
print('Note:',
num_missing_keywords,'/',len(self.lib),
'articles are missing keywords')
def _process_bib_abstracts(self):
self.abstract = {}
for key,article in zip(self.keys,self.lib):
ab = article.get('abstract',None)
if ab is not None:
ab = self._clean_text(ab)
self.abstract[key] = ab
num_missing_abstracts = np.count_nonzero(
[(ab is None) for _,ab in self.abstract.items()]
)
if num_missing_abstracts > 0:
print('Note:',
num_missing_abstracts,'/',len(self.lib),
'articles are missing abstracts')
def generate_descriptions(self):
self.description = {}
# minimal information: author, title
for key in self.keys:
if self.date[key]:
desc = 'In {:s}, '.format(self.date[key])
else:
desc = ''
desc += '{:s} published a paper entitled: {:s}.'.format(
self.author[key], self.title[key])
if self.publication[key]:
desc += ' This was published in {:s}.'.format(self.publication[key])
if self.keywords[key]:
desc += ' Publication keywords include: '
kwlist = [kw.strip() for kw in self.keywords[key].split(',')]
if kwlist == 1:
kwstr = kwlist[0]
elif kwlist == 2:
kwstr = '{:s} and {:s}'.format(*kwlist)
else:
kwlist[-1] = 'and '+kwlist[-1]
kwstr = ', '.join(kwlist)
desc += kwstr + '.'
if self.abstract[key]:
desc += ' The abstract reads: ' + self.abstract[key]
else:
desc += ' There is no abstract available.'
desc += ' This concludes the summary of the work' \
+ ' by {:s}.'.format(self.author[key])
self.description[key] = desc
def to_mp3(self,key=None,overwrite=False,language='en-GB',debug=False):
from gtts import gTTS
if key is None:
keylist = self.keys
elif isinstance(key,str):
keylist = [key]
else:
assert isinstance(key,list)
keylist = key
tokefunc = lambda text: MyTokenizer(text,debug=debug)
for key in keylist:
mp3file = os.path.join(self.mp3dir,'{:s}.mp3'.format(key))
overwriting = False
if os.path.isfile(mp3file):
if overwrite:
overwriting = True
else:
print('File exists, skipping',key)
continue
assert hasattr(self,'description'), \
'Need to run generate_descriptions'
tts = gTTS(text=self.description[key], lang=language, slow=False,
tokenizer_func=tokefunc)
if overwriting:
print('Overwriting',mp3file)
else:
print('Writing',mp3file)
tts.save(mp3file)
# add metadata
mp3 = eyed3.load(mp3file)
mp3.initTag()
mp3.tag.artist = self.author[key]
mp3.tag.title = self.title[key]
mp3.tag.album = self.bibname
mp3.tag.album_artist = 'bib2mp3.py'
mp3.tag.save()
if debug: print(key,':',self.description[key])
#==============================================================================
if __name__ == '__main__':
import sys
if len(sys.argv) <= 1:
sys.exit('Specify bib file')
bib = BibtexLibrary(sys.argv[1])
bib.generate_descriptions()
bib.to_mp3()
| [
"os.makedirs",
"tokenizer.MyTokenizer",
"os.path.join",
"html.unescape",
"os.path.split",
"bibtexparser.bparser.BibTexParser",
"os.path.isfile",
"gtts.gTTS",
"eyed3.load",
"sys.exit",
"bibtexparser.load"
] | [((558, 620), 'os.path.join', 'os.path.join', (["os.environ['HOME']", '"""Music"""', '"""Article Abstracts"""'], {}), "(os.environ['HOME'], 'Music', 'Article Abstracts')\n", (570, 620), False, 'import os\n'), ((693, 726), 'bibtexparser.bparser.BibTexParser', 'BibTexParser', ([], {'common_strings': '(True)'}), '(common_strings=True)\n', (705, 726), False, 'from bibtexparser.bparser import BibTexParser\n'), ((993, 1027), 'os.makedirs', 'os.makedirs', (['mp3dir'], {'exist_ok': '(True)'}), '(mp3dir, exist_ok=True)\n', (1004, 1027), False, 'import os\n'), ((8976, 9004), 'sys.exit', 'sys.exit', (['"""Specify bib file"""'], {}), "('Specify bib file')\n", (8984, 9004), False, 'import sys\n'), ((800, 822), 'os.path.split', 'os.path.split', (['bibfile'], {}), '(bibfile)\n', (813, 822), False, 'import os\n'), ((883, 920), 'bibtexparser.load', 'bibtexparser.load', (['bib'], {'parser': 'parser'}), '(bib, parser=parser)\n', (900, 920), False, 'import bibtexparser\n'), ((7691, 7721), 'tokenizer.MyTokenizer', 'MyTokenizer', (['text'], {'debug': 'debug'}), '(text, debug=debug)\n', (7702, 7721), False, 'from tokenizer import MyTokenizer\n'), ((7867, 7890), 'os.path.isfile', 'os.path.isfile', (['mp3file'], {}), '(mp3file)\n', (7881, 7890), False, 'import os\n'), ((8191, 8280), 'gtts.gTTS', 'gTTS', ([], {'text': 'self.description[key]', 'lang': 'language', 'slow': '(False)', 'tokenizer_func': 'tokefunc'}), '(text=self.description[key], lang=language, slow=False, tokenizer_func=\n tokefunc)\n', (8195, 8280), False, 'from gtts import gTTS\n'), ((8506, 8525), 'eyed3.load', 'eyed3.load', (['mp3file'], {}), '(mp3file)\n', (8516, 8525), False, 'import eyed3\n'), ((1684, 1700), 'html.unescape', 'html.unescape', (['s'], {}), '(s)\n', (1697, 1700), False, 'import html\n')] |
"""This module contains the views exposed to the user."""
from django.http import HttpResponse
from rest_framework.decorators import api_view
from rest_framework.request import Request
from api.view_handlers import (
handle_get_trained_city_model,
handle_persist_sight_image,
handle_add_new_city,
handle_get_supported_cities,
HTTP_200_MESSAGE,
handle_get_latest_city_model_version,
)
@api_view(["GET"])
def get_trained_city_model(request: Request, city: str) -> HttpResponse:
"""Returns a trained city model as a .pt file.
Parameters
----------
request: Request
Request object.
city: str
Name of the city.
Returns
-------
response: HttpResponse
Response object containing the trained model as a .pt file.
"""
response = handle_get_trained_city_model(city.replace(' ', '_'))
return HttpResponse(response[0], status=response[1])
@api_view(["GET"])
def get_latest_city_model_version(request: Request, city: str) -> HttpResponse:
"""Returns the latest version of the persisted city model.
Parameters
----------
request: Request
Request object.
city: str
Name of the city.
Returns
-------
response: HttpResponse
Response object containing the latest model version.
"""
response = handle_get_latest_city_model_version(city.replace(' ', '_'))
return HttpResponse(response[0], status=response[1])
@api_view(["POST"])
def persist_sight_image(request: Request, city: str) -> HttpResponse:
"""Persists an image of a given supported city in the data warehouse.
Parameters
----------
request: Request
Request object.
city: str
Name of the city.
Returns
-------
response: HttpResponse
Response object containing a status message.
"""
image = request.FILES["image"] if "image" in request.FILES else None
response = handle_persist_sight_image(city.replace(' ', '_'), image)
return HttpResponse(response[0], status=response[1])
@api_view(["POST"])
def add_new_city(request: Request, city: str) -> HttpResponse:
"""Adds a new city to the internally managed list of supported cities.
Parameters
----------
request: Request
Request object.
city: str
Name of the city to add.
Returns
-------
response: HttpResponse
Response object containing a default 200 HTTP message.
"""
response = handle_add_new_city(city.replace(' ', '_'))
return HttpResponse(response[0], status=response[1])
@api_view(["GET"])
def get_supported_cities(request: Request) -> HttpResponse:
"""Returns a list containing the currently supported cities.
Parameters
----------
request: Request
Request object.
Returns
-------
response: HttpResponse
Response object containing the list of supported cities.
"""
response_content = handle_get_supported_cities()
return HttpResponse(response_content[0], status=response_content[1])
@api_view(["GET"])
def get_index(request):
"""Returns a default 200 HTTP code.
Parameters
----------
request: Request
Request object.
Returns
-------
response: HttpResponse
Response object containing a default 200 status code.
Notes
-----
This endpoint is only provided as a best practice.
"""
return HttpResponse(HTTP_200_MESSAGE, 200)
| [
"django.http.HttpResponse",
"rest_framework.decorators.api_view",
"api.view_handlers.handle_get_supported_cities"
] | [((411, 428), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (419, 428), False, 'from rest_framework.decorators import api_view\n'), ((926, 943), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (934, 943), False, 'from rest_framework.decorators import api_view\n'), ((1460, 1478), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (1468, 1478), False, 'from rest_framework.decorators import api_view\n'), ((2058, 2076), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (2066, 2076), False, 'from rest_framework.decorators import api_view\n'), ((2580, 2597), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (2588, 2597), False, 'from rest_framework.decorators import api_view\n'), ((3053, 3070), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (3061, 3070), False, 'from rest_framework.decorators import api_view\n'), ((877, 922), 'django.http.HttpResponse', 'HttpResponse', (['response[0]'], {'status': 'response[1]'}), '(response[0], status=response[1])\n', (889, 922), False, 'from django.http import HttpResponse\n'), ((1411, 1456), 'django.http.HttpResponse', 'HttpResponse', (['response[0]'], {'status': 'response[1]'}), '(response[0], status=response[1])\n', (1423, 1456), False, 'from django.http import HttpResponse\n'), ((2009, 2054), 'django.http.HttpResponse', 'HttpResponse', (['response[0]'], {'status': 'response[1]'}), '(response[0], status=response[1])\n', (2021, 2054), False, 'from django.http import HttpResponse\n'), ((2531, 2576), 'django.http.HttpResponse', 'HttpResponse', (['response[0]'], {'status': 'response[1]'}), '(response[0], status=response[1])\n', (2543, 2576), False, 'from django.http import HttpResponse\n'), ((2947, 2976), 'api.view_handlers.handle_get_supported_cities', 'handle_get_supported_cities', ([], {}), '()\n', (2974, 2976), False, 'from api.view_handlers import handle_get_trained_city_model, handle_persist_sight_image, handle_add_new_city, handle_get_supported_cities, HTTP_200_MESSAGE, handle_get_latest_city_model_version\n'), ((2988, 3049), 'django.http.HttpResponse', 'HttpResponse', (['response_content[0]'], {'status': 'response_content[1]'}), '(response_content[0], status=response_content[1])\n', (3000, 3049), False, 'from django.http import HttpResponse\n'), ((3420, 3455), 'django.http.HttpResponse', 'HttpResponse', (['HTTP_200_MESSAGE', '(200)'], {}), '(HTTP_200_MESSAGE, 200)\n', (3432, 3455), False, 'from django.http import HttpResponse\n')] |
import os
import pytest
from dsplot.errors import InputException
from dsplot.tree import BinaryTree
def test_binary_tree():
tree = BinaryTree(nodes=[5, 4, 8, 11, None, 13, 4, 7, 2, None, None, 5, 1])
assert tree.root.val == 5
assert tree.root.right.left.val == 13
assert tree.root.right.right.left.val == 5
assert tree.preorder() == [5, 4, 11, 7, 2, 8, 13, 4, 5, 1]
assert tree.inorder() == [7, 11, 2, 4, 5, 13, 8, 5, 4, 1]
assert tree.postorder() == [7, 2, 11, 4, 13, 5, 1, 4, 8, 5]
tree.plot('tests/test_data/tree.png')
assert 'tree.png' in os.listdir('tests/test_data')
with pytest.raises(InputException) as e:
BinaryTree(nodes=[])
assert str(e.value) == 'Input list must have at least 1 element.'
| [
"os.listdir",
"dsplot.tree.BinaryTree",
"pytest.raises"
] | [((139, 207), 'dsplot.tree.BinaryTree', 'BinaryTree', ([], {'nodes': '[5, 4, 8, 11, None, 13, 4, 7, 2, None, None, 5, 1]'}), '(nodes=[5, 4, 8, 11, None, 13, 4, 7, 2, None, None, 5, 1])\n', (149, 207), False, 'from dsplot.tree import BinaryTree\n'), ((586, 615), 'os.listdir', 'os.listdir', (['"""tests/test_data"""'], {}), "('tests/test_data')\n", (596, 615), False, 'import os\n'), ((626, 655), 'pytest.raises', 'pytest.raises', (['InputException'], {}), '(InputException)\n', (639, 655), False, 'import pytest\n'), ((670, 690), 'dsplot.tree.BinaryTree', 'BinaryTree', ([], {'nodes': '[]'}), '(nodes=[])\n', (680, 690), False, 'from dsplot.tree import BinaryTree\n')] |
import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
from h2o.utils.model_utils import reset_model_threshold
def test_reset_threshold():
"""
Test the model threshold can be reset.
Performance metric should be recalculated and also predictions should be changed based on the new threshold.
"""
# import data
airlines = h2o.import_file(path=pyunit_utils.locate("smalldata/airlines/modified_airlines.csv"))
# convert columns to factors
airlines["Year"] = airlines["Year"].asfactor()
airlines["Month"] = airlines["Month"].asfactor()
airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
airlines["Cancelled"] = airlines["Cancelled"].asfactor()
airlines['FlightNum'] = airlines['FlightNum'].asfactor()
# set the predictor names and the response column name
predictors = ["Origin", "Dest", "Year", "UniqueCarrier", "DayOfWeek", "Month", "Distance", "FlightNum"]
response = "IsDepDelayed"
# split into train and validation sets
train, valid = airlines.split_frame(ratios = [.8], seed = 1234)
# initialize the estimator
model = H2OGradientBoostingEstimator(seed = 1234, ntrees=5)
# train the model
model.train(x=predictors, y=response, training_frame=train)
old_threshold = model._model_json['output']['default_threshold']
# predict
preds = model.predict(airlines)
# reset the threshold and get the old one
new_threshold = 0.6917189903082518
old_returned = reset_model_threshold(model, new_threshold)
reset_model = h2o.get_model(model.model_id)
reset_threshold = reset_model._model_json['output']['default_threshold']
# predict with reset model
preds_reset = reset_model.predict(airlines)
# compare thresholds
assert old_threshold == old_returned
assert new_threshold == reset_threshold
assert reset_threshold != old_threshold
# compare predictions
preds_local = preds.as_data_frame()
preds_reset_local = preds_reset.as_data_frame()
print("old threshold:", old_threshold, "new_threshold:", new_threshold)
for i in range(airlines.nrow):
if old_threshold <= preds_local.iloc[i, 2] < new_threshold:
assert preds_local.iloc[i, 0] != preds_reset_local.iloc[i, 0]
else:
assert preds_local.iloc[i, 0] == preds_reset_local.iloc[i, 0]
if __name__ == "__main__":
pyunit_utils.standalone_test(test_reset_threshold)
else:
test_reset_threshold()
| [
"h2o.get_model",
"sys.path.insert",
"tests.pyunit_utils.locate",
"h2o.utils.model_utils.reset_model_threshold",
"h2o.estimators.gbm.H2OGradientBoostingEstimator",
"tests.pyunit_utils.standalone_test"
] | [((11, 39), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""../../"""'], {}), "(1, '../../')\n", (26, 39), False, 'import sys\n'), ((1197, 1246), 'h2o.estimators.gbm.H2OGradientBoostingEstimator', 'H2OGradientBoostingEstimator', ([], {'seed': '(1234)', 'ntrees': '(5)'}), '(seed=1234, ntrees=5)\n', (1225, 1246), False, 'from h2o.estimators.gbm import H2OGradientBoostingEstimator\n'), ((1565, 1608), 'h2o.utils.model_utils.reset_model_threshold', 'reset_model_threshold', (['model', 'new_threshold'], {}), '(model, new_threshold)\n', (1586, 1608), False, 'from h2o.utils.model_utils import reset_model_threshold\n'), ((1627, 1656), 'h2o.get_model', 'h2o.get_model', (['model.model_id'], {}), '(model.model_id)\n', (1640, 1656), False, 'import h2o\n'), ((2462, 2512), 'tests.pyunit_utils.standalone_test', 'pyunit_utils.standalone_test', (['test_reset_threshold'], {}), '(test_reset_threshold)\n', (2490, 2512), False, 'from tests import pyunit_utils\n'), ((457, 520), 'tests.pyunit_utils.locate', 'pyunit_utils.locate', (['"""smalldata/airlines/modified_airlines.csv"""'], {}), "('smalldata/airlines/modified_airlines.csv')\n", (476, 520), False, 'from tests import pyunit_utils\n')] |
#!/usr/bin/python
import argparse
import csv
import sys
'''
This script takes a CSV file with a mandatory header and a sql tablename and converts the data in the csv file into
an SQL INSERT statement.
'''
def parse_arguments():
# initialize argumentparser and arguments
parser = argparse.ArgumentParser(description='Takes a csv file and a tablename and creates an SQL insert statement')
parser.add_argument('csvFile', type=argparse.FileType('r'), help='The CSV file to be read')
parser.add_argument('-t', '--table', dest='tablename', help='The name of the destination SQL table', required=True)
parser.add_argument('-d', '--delimiter', dest='delimiter', default=',', help='The delimiter used in the CSV')
# parse arguments
args = parser.parse_args()
return args
def main():
# parse arguments
args = parse_arguments()
# Open CSV and start output
with args.csvFile as f:
reader = csv.reader(f, delimiter=args.delimiter, quoting=csv.QUOTE_ALL)
# Create the header row, since we may have to repeat it
header_row = 'INSERT INTO `' + args.tablename + '` ('
first = True
for item in next(reader):
if first:
first = False
else:
header_row+=', '
header_row+= item
header_row+=') VALUES '
# Set a counter, since there can't be more than 1000 inserts at a time
counter = 0
# Loop through the rows...
for row in reader:
if counter % 10 == 0:
if counter != 0:
sys.stdout.write(';\n')
#print(header_row)
sys.stdout.write(header_row)
else:
sys.stdout.write(',')
sys.stdout.write('(')
first = True
# Loop through the items in each row
for item in row:
if first:
first = False
else:
sys.stdout.write(', ')
sys.stdout.write('\'' + item.replace('\'', '\'\'').replace('""', 'NULL').replace('&', '&') + '\'')
#sys.stdout.write(item)
sys.stdout.write(')')
# Increase counter
counter += 1
sys.stdout.write(';')
if __name__ == "__main__":
main()
| [
"argparse.FileType",
"csv.reader",
"argparse.ArgumentParser",
"sys.stdout.write"
] | [((290, 402), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Takes a csv file and a tablename and creates an SQL insert statement"""'}), "(description=\n 'Takes a csv file and a tablename and creates an SQL insert statement')\n", (313, 402), False, 'import argparse\n'), ((940, 1002), 'csv.reader', 'csv.reader', (['f'], {'delimiter': 'args.delimiter', 'quoting': 'csv.QUOTE_ALL'}), '(f, delimiter=args.delimiter, quoting=csv.QUOTE_ALL)\n', (950, 1002), False, 'import csv\n'), ((2263, 2284), 'sys.stdout.write', 'sys.stdout.write', (['""";"""'], {}), "(';')\n", (2279, 2284), False, 'import sys\n'), ((438, 460), 'argparse.FileType', 'argparse.FileType', (['"""r"""'], {}), "('r')\n", (455, 460), False, 'import argparse\n'), ((1772, 1793), 'sys.stdout.write', 'sys.stdout.write', (['"""("""'], {}), "('(')\n", (1788, 1793), False, 'import sys\n'), ((2176, 2197), 'sys.stdout.write', 'sys.stdout.write', (['""")"""'], {}), "(')')\n", (2192, 2197), False, 'import sys\n'), ((1675, 1703), 'sys.stdout.write', 'sys.stdout.write', (['header_row'], {}), '(header_row)\n', (1691, 1703), False, 'import sys\n'), ((1738, 1759), 'sys.stdout.write', 'sys.stdout.write', (['""","""'], {}), "(',')\n", (1754, 1759), False, 'import sys\n'), ((1600, 1623), 'sys.stdout.write', 'sys.stdout.write', (['""";\n"""'], {}), "(';\\n')\n", (1616, 1623), False, 'import sys\n'), ((2000, 2022), 'sys.stdout.write', 'sys.stdout.write', (['""", """'], {}), "(', ')\n", (2016, 2022), False, 'import sys\n')] |
from flask import Flask
from flask import render_template
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template('index.html')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/contact')
def contact():
return render_template('contact.html')
@app.route('/cv')
def cv():
return render_template('cv.html')
@app.route('/hire-me')
def hireMe():
return render_template('hire-me.html')
@app.route('/project-page')
def projectPage():
return render_template('project-page.html')
@app.route('/projects-compact-grid')
def projects1():
return render_template('projects-compact-grid.html')
@app.route('/projects-no-images')
def projects2():
return render_template('projects-no-images.html')
@app.route('/projects-with-sidebar')
def projects3():
return render_template('projects-with-sidebar.html')
@app.route('/projects-grid-cards')
def projects4():
return render_template('projects-with-sidebar.html')
if __name__ == '__main__':
app.run()
| [
"flask.render_template",
"flask.Flask"
] | [((65, 80), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (70, 80), False, 'from flask import Flask\n'), ((129, 158), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (144, 158), False, 'from flask import render_template\n'), ((205, 234), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (220, 234), False, 'from flask import render_template\n'), ((285, 316), 'flask.render_template', 'render_template', (['"""contact.html"""'], {}), "('contact.html')\n", (300, 316), False, 'from flask import render_template\n'), ((357, 383), 'flask.render_template', 'render_template', (['"""cv.html"""'], {}), "('cv.html')\n", (372, 383), False, 'from flask import render_template\n'), ((433, 464), 'flask.render_template', 'render_template', (['"""hire-me.html"""'], {}), "('hire-me.html')\n", (448, 464), False, 'from flask import render_template\n'), ((524, 560), 'flask.render_template', 'render_template', (['"""project-page.html"""'], {}), "('project-page.html')\n", (539, 560), False, 'from flask import render_template\n'), ((627, 672), 'flask.render_template', 'render_template', (['"""projects-compact-grid.html"""'], {}), "('projects-compact-grid.html')\n", (642, 672), False, 'from flask import render_template\n'), ((736, 778), 'flask.render_template', 'render_template', (['"""projects-no-images.html"""'], {}), "('projects-no-images.html')\n", (751, 778), False, 'from flask import render_template\n'), ((845, 890), 'flask.render_template', 'render_template', (['"""projects-with-sidebar.html"""'], {}), "('projects-with-sidebar.html')\n", (860, 890), False, 'from flask import render_template\n'), ((955, 1000), 'flask.render_template', 'render_template', (['"""projects-with-sidebar.html"""'], {}), "('projects-with-sidebar.html')\n", (970, 1000), False, 'from flask import render_template\n')] |
from django.db import models
from django_countries.fields import CountryField
from django.db.models.deletion import CASCADE
class Author(models.Model):
name = models.CharField(max_length=60)
nationality = CountryField()
class Book(models.Model):
name = models.CharField(max_length=60)
description = models.TextField()
author = models.ForeignKey(Author, on_delete=CASCADE)
published_at = models.DateField(auto_now=True)
| [
"django.db.models.DateField",
"django.db.models.TextField",
"django_countries.fields.CountryField",
"django.db.models.ForeignKey",
"django.db.models.CharField"
] | [((165, 196), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)'}), '(max_length=60)\n', (181, 196), False, 'from django.db import models\n'), ((215, 229), 'django_countries.fields.CountryField', 'CountryField', ([], {}), '()\n', (227, 229), False, 'from django_countries.fields import CountryField\n'), ((269, 300), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)'}), '(max_length=60)\n', (285, 300), False, 'from django.db import models\n'), ((319, 337), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (335, 337), False, 'from django.db import models\n'), ((351, 395), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Author'], {'on_delete': 'CASCADE'}), '(Author, on_delete=CASCADE)\n', (368, 395), False, 'from django.db import models\n'), ((415, 446), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (431, 446), False, 'from django.db import models\n')] |
from ..utils import pad_left, try_parse_int
from datetime import timedelta
from typing import Dict, List
TIME_PARTS: List[str] = [ "D", "H", "M", "S" ]
FIXED_INTERVALS: Dict[str, timedelta] = {
"WEEK": timedelta(weeks=1),
"DAY": timedelta(days=1),
"HOUR": timedelta(hours=1)
}
def parse_interval(value: str) -> timedelta:
args: Dict[str, int] = { part: 0 for part in TIME_PARTS }
value = value.upper()
if (fixed_interval := FIXED_INTERVALS.get(value, None)) is not None:
return fixed_interval
if ":" in value:
__parse_delimited_into(value, args)
else: __parse_denoted_into(value, args)
return timedelta(days=args["D"], hours=args["H"], minutes=args["M"], seconds=args["S"])
def __parse_delimited_into(value: str, args: Dict[str, int]) -> None:
split_values = value.split(":")
padded_values = pad_left(split_values, "0", len(TIME_PARTS))
for index in range(0, len(TIME_PARTS)):
part_value = try_parse_int(padded_values[index])
args[TIME_PARTS[index]] = part_value if part_value is not None else 0
if len(split_values) == 2:
args["H"] = args["M"]
args["M"] = args["S"]
args["S"] = 0
def __parse_denoted_into(value: str, args: Dict[str, int]) -> None:
for time_part in args.keys():
split_values = value.split(time_part, 1)
if len(split_values) == 2:
part_value = try_parse_int(split_values[0])
args[time_part] = part_value if part_value is not None else 0
value = split_values[1]
continue
value = split_values[0]
| [
"datetime.timedelta"
] | [((207, 225), 'datetime.timedelta', 'timedelta', ([], {'weeks': '(1)'}), '(weeks=1)\n', (216, 225), False, 'from datetime import timedelta\n'), ((238, 255), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (247, 255), False, 'from datetime import timedelta\n'), ((269, 287), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (278, 287), False, 'from datetime import timedelta\n'), ((657, 742), 'datetime.timedelta', 'timedelta', ([], {'days': "args['D']", 'hours': "args['H']", 'minutes': "args['M']", 'seconds': "args['S']"}), "(days=args['D'], hours=args['H'], minutes=args['M'], seconds=args['S']\n )\n", (666, 742), False, 'from datetime import timedelta\n')] |
#*
#* Copyright (C) 2017-2019 Alibaba Group Holding Limited
#*
#* Licensed under the Apache License, Version 2.0 (the "License");
#* you may not use this file except in compliance with the License.
#* You may obtain a copy of the License at
#*
#* http://www.apache.org/licenses/LICENSE-2.0
#*
#* Unless required by applicable law or agreed to in writing, software
#* distributed under the License is distributed on an "AS IS" BASIS,
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#* See the License for the specific language governing permissions and
#* limitations under the License.
import sys
import os
import glob
import argparse
parser = argparse.ArgumentParser(description='Generate conformanc tests')
parser.add_argument("-profile_level", help="Specify the profile level: 0=smoke tests; 1=full tests", type=int)
parser.parse_args()
args = parser.parse_args()
option = "-gen-onnx-smoke-tests"
if args.profile_level:
option = "-gen-onnx-smoke-tests" if args.profile_level==0 else "-gen-onnx-tests"
print("======Generating tests with option " + option + "========")
if not os.path.exists("tests"):
os.makedirs("tests")
os.system("cp ../include/onnx_*.td -r . | cp ../include/*.algorithm -r .")
dir_path = os.path.dirname(os.path.realpath(__file__))
td_files = glob.glob(os.path.join(dir_path, '*.td'))
lens = len(td_files)
for k in range(lens):
base = os.path.basename(td_files[k])
out_file_name = os.path.splitext(base)[0]
os.system("../llvm/build/bin/llvm-tblgen " + option + " " + td_files[k] + " -I ./ -o ./tests/" + out_file_name + ".py")
print(out_file_name + ".py generated.")
os.system("rm onnx_*.td | rm *.algorithm")
| [
"os.path.exists",
"os.makedirs",
"argparse.ArgumentParser",
"os.path.join",
"os.path.splitext",
"os.path.realpath",
"os.path.basename",
"os.system"
] | [((679, 743), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate conformanc tests"""'}), "(description='Generate conformanc tests')\n", (702, 743), False, 'import argparse\n'), ((1170, 1244), 'os.system', 'os.system', (['"""cp ../include/onnx_*.td -r . | cp ../include/*.algorithm -r ."""'], {}), "('cp ../include/onnx_*.td -r . | cp ../include/*.algorithm -r .')\n", (1179, 1244), False, 'import os\n'), ((1654, 1696), 'os.system', 'os.system', (['"""rm onnx_*.td | rm *.algorithm"""'], {}), "('rm onnx_*.td | rm *.algorithm')\n", (1663, 1696), False, 'import os\n'), ((1119, 1142), 'os.path.exists', 'os.path.exists', (['"""tests"""'], {}), "('tests')\n", (1133, 1142), False, 'import os\n'), ((1148, 1168), 'os.makedirs', 'os.makedirs', (['"""tests"""'], {}), "('tests')\n", (1159, 1168), False, 'import os\n'), ((1272, 1298), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1288, 1298), False, 'import os\n'), ((1321, 1351), 'os.path.join', 'os.path.join', (['dir_path', '"""*.td"""'], {}), "(dir_path, '*.td')\n", (1333, 1351), False, 'import os\n'), ((1408, 1437), 'os.path.basename', 'os.path.basename', (['td_files[k]'], {}), '(td_files[k])\n', (1424, 1437), False, 'import os\n'), ((1488, 1611), 'os.system', 'os.system', (["('../llvm/build/bin/llvm-tblgen ' + option + ' ' + td_files[k] +\n ' -I ./ -o ./tests/' + out_file_name + '.py')"], {}), "('../llvm/build/bin/llvm-tblgen ' + option + ' ' + td_files[k] +\n ' -I ./ -o ./tests/' + out_file_name + '.py')\n", (1497, 1611), False, 'import os\n'), ((1458, 1480), 'os.path.splitext', 'os.path.splitext', (['base'], {}), '(base)\n', (1474, 1480), False, 'import os\n')] |
import pytest
import sqlalchemy as sa
class ThreeLevelDeepOneToOne(object):
@pytest.fixture
def Catalog(self, Base, Category):
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column('_id', sa.Integer, primary_key=True)
category = sa.orm.relationship(
Category,
uselist=False,
backref='catalog'
)
return Catalog
@pytest.fixture
def Category(self, Base, SubCategory):
class Category(Base):
__tablename__ = 'category'
id = sa.Column('_id', sa.Integer, primary_key=True)
catalog_id = sa.Column(
'_catalog_id',
sa.Integer,
sa.ForeignKey('catalog._id')
)
sub_category = sa.orm.relationship(
SubCategory,
uselist=False,
backref='category'
)
return Category
@pytest.fixture
def SubCategory(self, Base, Product):
class SubCategory(Base):
__tablename__ = 'sub_category'
id = sa.Column('_id', sa.Integer, primary_key=True)
category_id = sa.Column(
'_category_id',
sa.Integer,
sa.ForeignKey('category._id')
)
product = sa.orm.relationship(
Product,
uselist=False,
backref='sub_category'
)
return SubCategory
@pytest.fixture
def Product(self, Base):
class Product(Base):
__tablename__ = 'product'
id = sa.Column('_id', sa.Integer, primary_key=True)
price = sa.Column(sa.Integer)
sub_category_id = sa.Column(
'_sub_category_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
)
return Product
@pytest.fixture
def init_models(self, Catalog, Category, SubCategory, Product):
pass
class ThreeLevelDeepOneToMany(object):
@pytest.fixture
def Catalog(self, Base, Category):
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column('_id', sa.Integer, primary_key=True)
categories = sa.orm.relationship(Category, backref='catalog')
return Catalog
@pytest.fixture
def Category(self, Base, SubCategory):
class Category(Base):
__tablename__ = 'category'
id = sa.Column('_id', sa.Integer, primary_key=True)
catalog_id = sa.Column(
'_catalog_id',
sa.Integer,
sa.ForeignKey('catalog._id')
)
sub_categories = sa.orm.relationship(
SubCategory, backref='category'
)
return Category
@pytest.fixture
def SubCategory(self, Base, Product):
class SubCategory(Base):
__tablename__ = 'sub_category'
id = sa.Column('_id', sa.Integer, primary_key=True)
category_id = sa.Column(
'_category_id',
sa.Integer,
sa.ForeignKey('category._id')
)
products = sa.orm.relationship(
Product,
backref='sub_category'
)
return SubCategory
@pytest.fixture
def Product(self, Base):
class Product(Base):
__tablename__ = 'product'
id = sa.Column('_id', sa.Integer, primary_key=True)
price = sa.Column(sa.Numeric)
sub_category_id = sa.Column(
'_sub_category_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
)
def __repr__(self):
return '<Product id=%r>' % self.id
return Product
@pytest.fixture
def init_models(self, Catalog, Category, SubCategory, Product):
pass
class ThreeLevelDeepManyToMany(object):
@pytest.fixture
def Catalog(self, Base, Category):
catalog_category = sa.Table(
'catalog_category',
Base.metadata,
sa.Column('catalog_id', sa.Integer, sa.ForeignKey('catalog._id')),
sa.Column('category_id', sa.Integer, sa.ForeignKey('category._id'))
)
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column('_id', sa.Integer, primary_key=True)
categories = sa.orm.relationship(
Category,
backref='catalogs',
secondary=catalog_category
)
return Catalog
@pytest.fixture
def Category(self, Base, SubCategory):
category_subcategory = sa.Table(
'category_subcategory',
Base.metadata,
sa.Column(
'category_id',
sa.Integer,
sa.ForeignKey('category._id')
),
sa.Column(
'subcategory_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
)
)
class Category(Base):
__tablename__ = 'category'
id = sa.Column('_id', sa.Integer, primary_key=True)
sub_categories = sa.orm.relationship(
SubCategory,
backref='categories',
secondary=category_subcategory
)
return Category
@pytest.fixture
def SubCategory(self, Base, Product):
subcategory_product = sa.Table(
'subcategory_product',
Base.metadata,
sa.Column(
'subcategory_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
),
sa.Column(
'product_id',
sa.Integer,
sa.ForeignKey('product._id')
)
)
class SubCategory(Base):
__tablename__ = 'sub_category'
id = sa.Column('_id', sa.Integer, primary_key=True)
products = sa.orm.relationship(
Product,
backref='sub_categories',
secondary=subcategory_product
)
return SubCategory
@pytest.fixture
def Product(self, Base):
class Product(Base):
__tablename__ = 'product'
id = sa.Column('_id', sa.Integer, primary_key=True)
price = sa.Column(sa.Numeric)
return Product
@pytest.fixture
def init_models(self, Catalog, Category, SubCategory, Product):
pass
| [
"sqlalchemy.orm.relationship",
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] | [((222, 268), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (231, 268), True, 'import sqlalchemy as sa\n'), ((292, 355), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['Category'], {'uselist': '(False)', 'backref': '"""catalog"""'}), "(Category, uselist=False, backref='catalog')\n", (311, 355), True, 'import sqlalchemy as sa\n'), ((591, 637), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (600, 637), True, 'import sqlalchemy as sa\n'), ((820, 887), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['SubCategory'], {'uselist': '(False)', 'backref': '"""category"""'}), "(SubCategory, uselist=False, backref='category')\n", (839, 887), True, 'import sqlalchemy as sa\n'), ((1130, 1176), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (1139, 1176), True, 'import sqlalchemy as sa\n'), ((1356, 1423), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['Product'], {'uselist': '(False)', 'backref': '"""sub_category"""'}), "(Product, uselist=False, backref='sub_category')\n", (1375, 1423), True, 'import sqlalchemy as sa\n'), ((1647, 1693), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (1656, 1693), True, 'import sqlalchemy as sa\n'), ((1714, 1735), 'sqlalchemy.Column', 'sa.Column', (['sa.Integer'], {}), '(sa.Integer)\n', (1723, 1735), True, 'import sqlalchemy as sa\n'), ((2216, 2262), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (2225, 2262), True, 'import sqlalchemy as sa\n'), ((2289, 2337), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['Category'], {'backref': '"""catalog"""'}), "(Category, backref='catalog')\n", (2308, 2337), True, 'import sqlalchemy as sa\n'), ((2511, 2557), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (2520, 2557), True, 'import sqlalchemy as sa\n'), ((2742, 2794), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['SubCategory'], {'backref': '"""category"""'}), "(SubCategory, backref='category')\n", (2761, 2794), True, 'import sqlalchemy as sa\n'), ((3005, 3051), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (3014, 3051), True, 'import sqlalchemy as sa\n'), ((3232, 3284), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['Product'], {'backref': '"""sub_category"""'}), "(Product, backref='sub_category')\n", (3251, 3284), True, 'import sqlalchemy as sa\n'), ((3492, 3538), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (3501, 3538), True, 'import sqlalchemy as sa\n'), ((3559, 3580), 'sqlalchemy.Column', 'sa.Column', (['sa.Numeric'], {}), '(sa.Numeric)\n', (3568, 3580), True, 'import sqlalchemy as sa\n'), ((4413, 4459), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (4422, 4459), True, 'import sqlalchemy as sa\n'), ((4486, 4563), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['Category'], {'backref': '"""catalogs"""', 'secondary': 'catalog_category'}), "(Category, backref='catalogs', secondary=catalog_category)\n", (4505, 4563), True, 'import sqlalchemy as sa\n'), ((5207, 5253), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (5216, 5253), True, 'import sqlalchemy as sa\n'), ((5284, 5375), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['SubCategory'], {'backref': '"""categories"""', 'secondary': 'category_subcategory'}), "(SubCategory, backref='categories', secondary=\n category_subcategory)\n", (5303, 5375), True, 'import sqlalchemy as sa\n'), ((6017, 6063), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (6026, 6063), True, 'import sqlalchemy as sa\n'), ((6087, 6177), 'sqlalchemy.orm.relationship', 'sa.orm.relationship', (['Product'], {'backref': '"""sub_categories"""', 'secondary': 'subcategory_product'}), "(Product, backref='sub_categories', secondary=\n subcategory_product)\n", (6106, 6177), True, 'import sqlalchemy as sa\n'), ((6396, 6442), 'sqlalchemy.Column', 'sa.Column', (['"""_id"""', 'sa.Integer'], {'primary_key': '(True)'}), "('_id', sa.Integer, primary_key=True)\n", (6405, 6442), True, 'import sqlalchemy as sa\n'), ((6463, 6484), 'sqlalchemy.Column', 'sa.Column', (['sa.Numeric'], {}), '(sa.Numeric)\n', (6472, 6484), True, 'import sqlalchemy as sa\n'), ((749, 777), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""catalog._id"""'], {}), "('catalog._id')\n", (762, 777), True, 'import sqlalchemy as sa\n'), ((1290, 1319), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""category._id"""'], {}), "('category._id')\n", (1303, 1319), True, 'import sqlalchemy as sa\n'), ((1858, 1891), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""sub_category._id"""'], {}), "('sub_category._id')\n", (1871, 1891), True, 'import sqlalchemy as sa\n'), ((2669, 2697), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""catalog._id"""'], {}), "('catalog._id')\n", (2682, 2697), True, 'import sqlalchemy as sa\n'), ((3165, 3194), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""category._id"""'], {}), "('category._id')\n", (3178, 3194), True, 'import sqlalchemy as sa\n'), ((3703, 3736), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""sub_category._id"""'], {}), "('sub_category._id')\n", (3716, 3736), True, 'import sqlalchemy as sa\n'), ((4207, 4235), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""catalog._id"""'], {}), "('catalog._id')\n", (4220, 4235), True, 'import sqlalchemy as sa\n'), ((4287, 4316), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""category._id"""'], {}), "('category._id')\n", (4300, 4316), True, 'import sqlalchemy as sa\n'), ((4916, 4945), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""category._id"""'], {}), "('category._id')\n", (4929, 4945), True, 'import sqlalchemy as sa\n'), ((5062, 5095), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""sub_category._id"""'], {}), "('sub_category._id')\n", (5075, 5095), True, 'import sqlalchemy as sa\n'), ((5724, 5757), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""sub_category._id"""'], {}), "('sub_category._id')\n", (5737, 5757), True, 'import sqlalchemy as sa\n'), ((5870, 5898), 'sqlalchemy.ForeignKey', 'sa.ForeignKey', (['"""product._id"""'], {}), "('product._id')\n", (5883, 5898), True, 'import sqlalchemy as sa\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import exceptions
import httplib
import httplib2
import zlib
import six.moves.urllib.parse as urlparse
import socket
from oslo_serialization import jsonutils
from gbpservice.nfp.core import log as nfp_logging
LOG = nfp_logging.getLogger(__name__)
class RestClientException(exceptions.Exception):
""" RestClient Exception """
class UnixHTTPConnection(httplib.HTTPConnection):
"""Connection class for HTTP over UNIX domain socket."""
def __init__(self, host, port=None, strict=None, timeout=None,
proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
self.socket_path = '/var/run/uds_socket'
def connect(self):
"""Method used to connect socket server."""
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if self.timeout:
self.sock.settimeout(self.timeout)
try:
self.sock.connect(self.socket_path)
except socket.error as exc:
raise RestClientException(
"Caught exception socket.error : %s" % exc)
class UnixRestClient(object):
def _http_request(self, url, method_type, headers=None, body=None):
try:
h = httplib2.Http()
resp, content = h.request(
url,
method=method_type,
headers=headers,
body=body,
connection_type=UnixHTTPConnection)
return resp, content
except httplib2.ServerNotFoundError:
raise RestClientException("Server Not Found")
except exceptions.Exception as e:
raise RestClientException("httplib response error %s" % (e))
def send_request(self, path, method_type, request_method='http',
server_addr='127.0.0.1',
headers=None, body=None):
"""Implementation for common interface for all unix crud requests.
Return:Http Response
"""
# prepares path, body, url for sending unix request.
if method_type.upper() != 'GET':
body = jsonutils.dumps(body)
body = zlib.compress(body)
path = '/v1/nfp/' + path
url = urlparse.urlunsplit((
request_method,
server_addr,
path,
None,
''))
try:
resp, content = self._http_request(url, method_type,
headers=headers, body=body)
if content != '':
content = zlib.decompress(content)
message = "%s:%s" % (resp, content)
LOG.info(message)
except RestClientException as rce:
message = "ERROR : %s" % (rce)
LOG.error(message)
raise rce
success_code = [200, 201, 202, 204]
# Evaluate responses into success and failures.
# Raise exception for failure cases which needs
# to be handled by caller.
if success_code.__contains__(resp.status):
return resp, content
elif resp.status == 400:
raise RestClientException("HTTPBadRequest: %s" % resp.reason)
elif resp.status == 401:
raise RestClientException("HTTPUnauthorized: %s" % resp.reason)
elif resp.status == 403:
raise RestClientException("HTTPForbidden: %s" % resp.reason)
elif resp.status == 404:
raise RestClientException("HttpNotFound: %s" % resp.reason)
elif resp.status == 405:
raise RestClientException(
"HTTPMethodNotAllowed: %s" % resp.reason)
elif resp.status == 406:
raise RestClientException("HTTPNotAcceptable: %s" % resp.reason)
elif resp.status == 408:
raise RestClientException("HTTPRequestTimeout: %s" % resp.reason)
elif resp.status == 409:
raise RestClientException("HTTPConflict: %s" % resp.reason)
elif resp.status == 415:
raise RestClientException(
"HTTPUnsupportedMediaType: %s" % resp.reason)
elif resp.status == 417:
raise RestClientException(
"HTTPExpectationFailed: %s" % resp.reason)
elif resp.status == 500:
raise RestClientException("HTTPServerError: %s" % resp.reason)
else:
raise Exception('Unhandled Exception code: %s %s' % (resp.status,
resp.reason))
def get(path):
"""Implements get method for unix restclient
Return:Http Response
"""
return UnixRestClient().send_request(path, 'GET')
def put(path, body):
"""Implements put method for unix restclient
Return:Http Response
"""
headers = {'content-type': 'application/octet-stream'}
return UnixRestClient().send_request(
path, 'PUT', headers=headers, body=body)
def post(path, body, delete=False):
"""Implements post method for unix restclient
Return:Http Response
"""
# Method-Type added here,as DELETE/CREATE
# both case are handled by post as delete also needs
# to send data to the rest-unix-server.
headers = {'content-type': 'application/octet-stream'}
if delete:
headers.update({'method-type': 'DELETE'})
else:
headers.update({'method-type': 'CREATE'})
return UnixRestClient().send_request(
path, 'POST', headers=headers, body=body)
| [
"gbpservice.nfp.core.log.getLogger",
"httplib.HTTPConnection.__init__",
"socket.socket",
"zlib.compress",
"oslo_serialization.jsonutils.dumps",
"httplib2.Http",
"six.moves.urllib.parse.urlunsplit",
"zlib.decompress"
] | [((792, 823), 'gbpservice.nfp.core.log.getLogger', 'nfp_logging.getLogger', (['__name__'], {}), '(__name__)\n', (813, 823), True, 'from gbpservice.nfp.core import log as nfp_logging\n'), ((1134, 1191), 'httplib.HTTPConnection.__init__', 'httplib.HTTPConnection.__init__', (['self', 'host', 'port', 'strict'], {}), '(self, host, port, strict)\n', (1165, 1191), False, 'import httplib\n'), ((1368, 1417), 'socket.socket', 'socket.socket', (['socket.AF_UNIX', 'socket.SOCK_STREAM'], {}), '(socket.AF_UNIX, socket.SOCK_STREAM)\n', (1381, 1417), False, 'import socket\n'), ((2806, 2872), 'six.moves.urllib.parse.urlunsplit', 'urlparse.urlunsplit', (["(request_method, server_addr, path, None, '')"], {}), "((request_method, server_addr, path, None, ''))\n", (2825, 2872), True, 'import six.moves.urllib.parse as urlparse\n'), ((1820, 1835), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (1833, 1835), False, 'import httplib2\n'), ((2697, 2718), 'oslo_serialization.jsonutils.dumps', 'jsonutils.dumps', (['body'], {}), '(body)\n', (2712, 2718), False, 'from oslo_serialization import jsonutils\n'), ((2738, 2757), 'zlib.compress', 'zlib.compress', (['body'], {}), '(body)\n', (2751, 2757), False, 'import zlib\n'), ((3144, 3168), 'zlib.decompress', 'zlib.decompress', (['content'], {}), '(content)\n', (3159, 3168), False, 'import zlib\n')] |
'''Robot sim with a nicer display.'''
from sim_framework import *
from math import radians
import tkinter
BACKGROUND_COLOR = 'grey60'
ENTITY_COLOR = 'RoyalBlue1'
OBSTACLE_COLOR = 'black'
ENTITY_TAG = 'entity'
class TKWorld(World):
'''A world that will display via tkinter instead of ascii.'''
def __init__(self, root, x_min, x_max, y_min, y_max, resolution=2, max_dist=10000, collision_delta_theta=1):
super().__init__(resolution=resolution, max_dist=max_dist, collision_delta_theta=collision_delta_theta)
if x_min >= x_max:
raise ValueError('Improperly ordered x boundaries')
self.x_min = x_min
self.x_max = x_max
if y_min >= y_max:
raise ValueError('Improperly ordered y boundaries')
self.y_min = y_min
self.y_max = y_max
self.root = root
self.room_canvas = tkinter.Canvas(self.root, bg=BACKGROUND_COLOR,
height=self.y_max - self.x_min,
width=self.x_max - self.x_min)
self.add_obs(Wall(self.x_min, '-x'))
self.add_obs(Wall(self.x_max, '+x'))
self.add_obs(Wall(self.y_min, '-y'))
self.add_obs(Wall(self.y_max, '+y'))
def add_obs(self, obstacle):
'''Adds the obstacle to tracking and also the TK canvas.'''
super().add_obs(obstacle)
if isinstance(obstacle, Wall):
# In the TK world, walls are only used for the outside of the box.
pass
elif isinstance(obstacle, Box):
box_x1, box_y1 = self.get_canvas_coords(obstacle.x_min, obstacle.y_max)
box_x2, box_y2 = self.get_canvas_coords(obstacle.x_max, obstacle.y_min)
self.room_canvas.create_rectangle(box_x1, box_y1,
box_x2, box_y2,
fill=OBSTACLE_COLOR, outline=OBSTACLE_COLOR)
else:
print('Error: Unknown obstacle type added to sim:', type(obstacle).__name__)
def get_canvas_coords(self, x, y):
'''Converts simulation coordinates to canvas coordinates.'''
disp_x = x - self.x_min
disp_y = (self.y_max - self.y_min) - (y - self.y_min) - 1
return (disp_x, disp_y)
def display(self):
'''Displays the environment, by default just as a character array.'''
try:
self.room_canvas.delete(ENTITY_TAG)
except _tkinter.TclError:
return
for ent in self.entities:
if isinstance(ent, CircleBot):
center_x, center_y = self.get_canvas_coords(ent.x, ent.y)
ent_x1 = center_x - ent.radius
ent_y1 = center_y - ent.radius
ent_x2 = center_x + ent.radius
ent_y2 = center_y + ent.radius
self.room_canvas.create_oval(ent_x1, ent_y1,
ent_x2, ent_y2,
fill=ENTITY_COLOR, outline=ENTITY_COLOR,
tags=(ENTITY_TAG,))
else:
print('Error: Unknown entity type found in sim:', type(ent).__name__)
self.room_canvas.pack()
if __name__ == '__main__':
root = tkinter.Tk()
W = TKWorld(root, -500, 500, -500, 500)
W.add_obs(Box(-500, -250, 250, 500))
W.add_obs(Box(-450, -200, 200, 450))
W.add_obs(Box(-400, -150, 150, 400))
W.add_obs(Box(-350, -100, 100, 350))
bot = CircleBot(100, 0, 0, 0)
W.add_ent(bot)
theta = radians(0)
def update():
root.after(int(1000 / 60), update)
global theta
W.display()
theta -= radians(0.2)
if W.move_ent(bot, 5, theta):
theta -= radians(360 * 1.618)
theta = theta % radians(360)
root.after(int(1000 / 60), update)
root.mainloop()
| [
"tkinter.Canvas",
"tkinter.Tk",
"math.radians"
] | [((3303, 3315), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (3313, 3315), False, 'import tkinter\n'), ((3591, 3601), 'math.radians', 'radians', (['(0)'], {}), '(0)\n', (3598, 3601), False, 'from math import radians\n'), ((868, 982), 'tkinter.Canvas', 'tkinter.Canvas', (['self.root'], {'bg': 'BACKGROUND_COLOR', 'height': '(self.y_max - self.x_min)', 'width': '(self.x_max - self.x_min)'}), '(self.root, bg=BACKGROUND_COLOR, height=self.y_max - self.\n x_min, width=self.x_max - self.x_min)\n', (882, 982), False, 'import tkinter\n'), ((3722, 3734), 'math.radians', 'radians', (['(0.2)'], {}), '(0.2)\n', (3729, 3734), False, 'from math import radians\n'), ((3794, 3814), 'math.radians', 'radians', (['(360 * 1.618)'], {}), '(360 * 1.618)\n', (3801, 3814), False, 'from math import radians\n'), ((3843, 3855), 'math.radians', 'radians', (['(360)'], {}), '(360)\n', (3850, 3855), False, 'from math import radians\n')] |
""" Here all the blog's urls routes will be mapped """
from django.urls import path
from django.conf.urls import include, url
from . import views
app_name = 'core'
urlpatterns = [
# path('', views.home, name='home-page'),
url(r'^api/', include('apps.core.api.urls', namespace='api')),
]
| [
"django.conf.urls.include"
] | [((246, 292), 'django.conf.urls.include', 'include', (['"""apps.core.api.urls"""'], {'namespace': '"""api"""'}), "('apps.core.api.urls', namespace='api')\n", (253, 292), False, 'from django.conf.urls import include, url\n')] |
from env import MsnDiscrete, MaplessNaviEnv
from robot_utils import *
from robot_utils.log import msn_debug
from robot_utils.scene import *
from env import *
from collections import Counter
MAX_FORCE = 10.
TARGET_VELOCITY = 5.
MULTIPLY = 2.0
def keyboard_control():
global MAX_FORCE
global TARGET_VELOCITY
global MULTIPLY
cid = p.connect(p.GUI)
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 0)
p.configureDebugVisualizer(p.COV_ENABLE_GUI, 0)
p.setAdditionalSearchPath(pybullet_data.getDataPath())
# 载入机器人和其他的物件
_ = p.loadURDF("plane.urdf")
urdf_path = os.path.join(os.path.dirname(__file__), "robot_utils/urdf/miniBox.urdf")
robot_id = p.loadURDF(urdf_path, basePosition=[0., 0., 0.2], baseOrientation=p.getQuaternionFromEuler([0, 0, np.pi / 2.]))
p.setJointMotorControlArray(
bodyUniqueId=robot_id,
jointIndices=[0, 1],
controlMode=p.VELOCITY_CONTROL,
forces=[0., 0.]
)
p.setGravity(0, 0, -9.8)
p.setRealTimeSimulation(1)
p.configureDebugVisualizer(p.COV_ENABLE_RENDERING, 1)
while True:
basePos, baseOrientation = p.getBasePositionAndOrientation(robot_id)
baseEuler = p.getEulerFromQuaternion(baseOrientation)
keyboard_control_miniBox(robot_id)
def u_MsnDiscrete():
env = MsnDiscrete(render=True, laser_num=18)
state = env.reset()
done = False
while not done:
action = env.sample()
state, reward, done, info = env.step(action)
env.render()
# keyboard_control()
u_MsnDiscrete() | [
"env.MsnDiscrete"
] | [((1315, 1353), 'env.MsnDiscrete', 'MsnDiscrete', ([], {'render': '(True)', 'laser_num': '(18)'}), '(render=True, laser_num=18)\n', (1326, 1353), False, 'from env import MsnDiscrete, MaplessNaviEnv\n')] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""=================================================================
@Project : Algorithm_YuweiYin/LeetCode-All-Solution/Python3
@File : LC-1728-Cat-and-Mouse-II.py
@Author : [YuweiYin](https://github.com/YuweiYin)
@Date : 2022-05-10
=================================================================="""
import sys
import time
from typing import List, Tuple
import collections
"""
LeetCode - 1728 - (Hard) - Cat and Mouse II
https://leetcode.com/problems/cat-and-mouse-ii/
Description:
A game is played by a cat and a mouse named Cat and Mouse.
The environment is represented by a grid of size rows x cols,
where each element is a wall, floor, player (Cat, Mouse), or food.
Players are represented by the characters 'C'(Cat),'M'(Mouse).
Floors are represented by the character '.' and can be walked on.
Walls are represented by the character '#' and cannot be walked on.
Food is represented by the character 'F' and can be walked on.
There is only one of each character 'C', 'M', and 'F' in grid.
Mouse and Cat play according to the following rules:
Mouse moves first, then they take turns to move.
During each turn, Cat and Mouse can jump in one of the four directions (left, right, up, down).
They cannot jump over the wall nor outside of the grid.
catJump, mouseJump are the maximum lengths Cat and Mouse can jump at a time, respectively.
Cat and Mouse can jump less than the maximum length.
Staying in the same position is allowed.
Mouse can jump over Cat.
The game can end in 4 ways:
If Cat occupies the same position as Mouse, Cat wins.
If Cat reaches the food first, Cat wins.
If Mouse reaches the food first, Mouse wins.
If Mouse cannot get to the food within 1000 turns, Cat wins.
Given a rows x cols matrix grid and two integers catJump and mouseJump,
return true if Mouse can win the game if both Cat and Mouse play optimally, otherwise return false.
Example 1:
Input: grid = ["####F","#C...","M...."], catJump = 1, mouseJump = 2
Output: true
Explanation: Cat cannot catch Mouse on its turn nor can it get the food before Mouse.
Example 2:
Input: grid = ["M.C...F"], catJump = 1, mouseJump = 4
Output: true
Example 3:
Input: grid = ["M.C...F"], catJump = 1, mouseJump = 3
Output: false
Constraints:
rows == grid.length
cols = grid[i].length
1 <= rows, cols <= 8
grid[i][j] consist only of characters 'C', 'M', 'F', '.', and '#'.
There is only one of each character 'C', 'M', and 'F' in grid.
1 <= catJump, mouseJump <= 8
"""
class Solution:
def __init__(self):
self.MOUSE_TURN = 0
self.CAT_TURN = 1
self.UNKNOWN = 0
self.MOUSE_WIN = 1
self.CAT_WIN = 2
self.MAX_MOVE = 1000
self.DIRECTION = ((-1, 0), (1, 0), (0, -1), (0, 1))
def canMouseWin(self, grid: List[str], catJump: int, mouseJump: int) -> bool:
# exception case
assert isinstance(grid, list) and 1 <= len(grid) and 1 <= len(grid[0])
assert isinstance(catJump, int) and 1 <= catJump
assert isinstance(mouseJump, int) and 1 <= mouseJump
# main method: (Game Theory & Topological Sorting)
return self._canMouseWin(grid, catJump, mouseJump)
def _canMouseWin(self, grid: List[str], catJump: int, mouseJump: int) -> bool:
assert isinstance(grid, list)
max_row = len(grid)
assert max_row >= 1
max_col = len(grid[0])
assert max_col >= 1
total_block = max_row * max_col
def __get_pos(_row: int, _col: int) -> int:
return int(_row * max_col + _col)
# get the initial positions of the mouse, cat, and food
mouse_start_pos = cat_start_pos = food_pos = 0
for row_idx in range(max_row):
for col_idx in range(max_col):
cur_block = grid[row_idx][col_idx]
if cur_block == 'M':
mouse_start_pos = __get_pos(row_idx, col_idx)
elif cur_block == 'C':
cat_start_pos = __get_pos(row_idx, col_idx)
elif cur_block == 'F':
food_pos = __get_pos(row_idx, col_idx)
# calculate the degree of each state
degrees = [[[0, 0] for _ in range(total_block)] for _ in range(total_block)]
for mouse in range(total_block):
row_mouse, col_mouse = divmod(mouse, max_col)
if grid[row_mouse][col_mouse] == '#':
continue
for cat in range(total_block):
row_cat, col_cat = divmod(cat, max_col)
if grid[row_cat][col_cat] == '#':
continue
degrees[mouse][cat][self.MOUSE_TURN] += 1
degrees[mouse][cat][self.CAT_TURN] += 1
for d_row, d_col in self.DIRECTION:
row, col, jump = row_mouse + d_row, col_mouse + d_col, 1
while 0 <= row < max_row and 0 <= col < max_col and grid[row][col] != '#' and jump <= mouseJump:
next_mouse = __get_pos(row, col)
next_cat = __get_pos(row_cat, col_cat)
degrees[next_mouse][next_cat][self.MOUSE_TURN] += 1
row += d_row
col += d_col
jump += 1
row, col, jump = row_cat + d_row, col_cat + d_col, 1
while 0 <= row < max_row and 0 <= col < max_col and grid[row][col] != '#' and jump <= catJump:
next_mouse = __get_pos(row_mouse, col_mouse)
next_cat = __get_pos(row, col)
degrees[next_mouse][next_cat][self.CAT_TURN] += 1
row += d_row
col += d_col
jump += 1
res = [[[[0, 0], [0, 0]] for _ in range(total_block)] for _ in range(total_block)]
queue = collections.deque()
# if the cat and mouse are in the same block, then the cat wins
for pos in range(total_block):
row, col = divmod(pos, max_col)
if grid[row][col] == '#':
continue
res[pos][pos][self.MOUSE_TURN][0] = self.CAT_WIN
res[pos][pos][self.MOUSE_TURN][1] = 0
res[pos][pos][self.CAT_TURN][0] = self.CAT_WIN
res[pos][pos][self.CAT_TURN][1] = 0
queue.append((pos, pos, self.MOUSE_TURN))
queue.append((pos, pos, self.CAT_TURN))
# if the cat and food are in the same block, then the cat wins
for mouse in range(total_block):
row_mouse, col_mouse = divmod(mouse, max_col)
if grid[row_mouse][col_mouse] == '#' or mouse == food_pos:
continue
res[mouse][food_pos][self.MOUSE_TURN][0] = self.CAT_WIN
res[mouse][food_pos][self.MOUSE_TURN][1] = 0
res[mouse][food_pos][self.CAT_TURN][0] = self.CAT_WIN
res[mouse][food_pos][self.CAT_TURN][1] = 0
queue.append((mouse, food_pos, self.MOUSE_TURN))
queue.append((mouse, food_pos, self.CAT_TURN))
# if the mouse and food are in the same block \land cat is somewhere else, then the mouse wins
for cat in range(total_block):
row_cat, col_cat = divmod(cat, max_col)
if grid[row_cat][col_cat] == '#' or cat == food_pos:
continue
res[food_pos][cat][self.MOUSE_TURN][0] = self.MOUSE_WIN
res[food_pos][cat][self.MOUSE_TURN][1] = 0
res[food_pos][cat][self.CAT_TURN][0] = self.MOUSE_WIN
res[food_pos][cat][self.CAT_TURN][1] = 0
queue.append((food_pos, cat, self.MOUSE_TURN))
queue.append((food_pos, cat, self.CAT_TURN))
def __get_prev_state(_mouse: int, _cat: int, _turn: int) -> List[Tuple[int, int, int]]:
r_mouse, c_mouse = divmod(_mouse, max_col)
r_cat, c_cat = divmod(_cat, max_col)
prev_turn = self.CAT_TURN if _turn == self.MOUSE_TURN else self.MOUSE_TURN
max_jump = mouseJump if prev_turn == self.MOUSE_TURN else catJump
r_start = r_mouse if prev_turn == self.MOUSE_TURN else r_cat
c_start = c_mouse if prev_turn == self.MOUSE_TURN else c_cat
prev_state = [(_mouse, _cat, prev_turn)]
for d_r, d_c in self.DIRECTION:
_r, _c, _jump = r_start + d_r, c_start + d_c, 1
while 0 <= _r < max_row and 0 <= _c < max_col and grid[_r][_c] != '#' and jump <= max_jump:
prev_r_mouse = _r if prev_turn == self.MOUSE_TURN else r_mouse
prev_c_mouse = _c if prev_turn == self.MOUSE_TURN else c_mouse
prev_mouse_pos = __get_pos(prev_r_mouse, prev_c_mouse)
prev_r_cat = r_cat if prev_turn == self.MOUSE_TURN else _r
prev_c_cat = c_cat if prev_turn == self.MOUSE_TURN else _c
prev_cat_pos = __get_pos(prev_r_cat, prev_c_cat)
prev_state.append((prev_mouse_pos, prev_cat_pos, prev_turn))
_r += d_r
_c += d_c
_jump += 1
return prev_state
# Topological Sorting
while queue:
mouse, cat, turn = queue.popleft()
result = res[mouse][cat][turn][0]
moves = res[mouse][cat][turn][1]
for previous_mouse, previous_cat, previous_turn in __get_prev_state(mouse, cat, turn):
if res[previous_mouse][previous_cat][previous_turn][0] == self.UNKNOWN:
if (result == self.MOUSE_WIN and previous_turn == self.MOUSE_TURN) or \
(result == self.CAT_WIN and previous_turn == self.CAT_TURN):
res[previous_mouse][previous_cat][previous_turn][0] = result
res[previous_mouse][previous_cat][previous_turn][1] = moves + 1
queue.append((previous_mouse, previous_cat, previous_turn))
else:
degrees[previous_mouse][previous_cat][previous_turn] -= 1
if degrees[previous_mouse][previous_cat][previous_turn] == 0:
loseResult = self.CAT_WIN if previous_turn == self.MOUSE_TURN else self.MOUSE_WIN
res[previous_mouse][previous_cat][previous_turn][0] = loseResult
res[previous_mouse][previous_cat][previous_turn][1] = moves + 1
queue.append((previous_mouse, previous_cat, previous_turn))
if res[mouse_start_pos][cat_start_pos][self.MOUSE_TURN][0] == self.MOUSE_WIN and \
res[mouse_start_pos][cat_start_pos][self.MOUSE_TURN][1] <= self.MAX_MOVE:
return True
else:
return False
def main():
# Example 1: Output: true
# grid = ["####F", "#C...", "M...."]
# catJump = 1
# mouseJump = 2
# Example 2: Output: true
# grid = ["M.C...F"]
# catJump = 1
# mouseJump = 4
# Example 3: Output: false
grid = ["M.C...F"]
catJump = 1
mouseJump = 3
# init instance
solution = Solution()
# run & time
start = time.process_time()
ans = solution.canMouseWin(grid, catJump, mouseJump)
end = time.process_time()
# show answer
print('\nAnswer:')
print(ans)
# show time consumption
print('Running Time: %.5f ms' % ((end - start) * 1000))
if __name__ == "__main__":
sys.exit(main())
| [
"time.process_time",
"collections.deque"
] | [((11395, 11414), 'time.process_time', 'time.process_time', ([], {}), '()\n', (11412, 11414), False, 'import time\n'), ((11482, 11501), 'time.process_time', 'time.process_time', ([], {}), '()\n', (11499, 11501), False, 'import time\n'), ((6084, 6103), 'collections.deque', 'collections.deque', ([], {}), '()\n', (6101, 6103), False, 'import collections\n')] |
from django.urls import path
from . import views
urlpatterns = [
path('checkout/', views.checkout),
path('orders/', views.OrdersList.as_view()),
] | [
"django.urls.path"
] | [((71, 104), 'django.urls.path', 'path', (['"""checkout/"""', 'views.checkout'], {}), "('checkout/', views.checkout)\n", (75, 104), False, 'from django.urls import path\n')] |
#
# Copyright (c) 2019 <NAME>
#
# Licensed under MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
# LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH
# THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from PyQt5.QtWidgets import QScrollArea, QWidget, QGridLayout
from QClickableImage import *
from PyQt5.QtCore import QRect
#=======================================================================================================================
class QScrollAreaImages(QScrollArea):
displayed_image_size = 100
#-------------------------------------------------------------------------------------------------------------------
def __init__(self, width=0, height=0, pixmap=None):
QScrollArea.__init__(self)
# make a scroll area resizeable
self.setWidgetResizable(True)
# make a widget for the contents of the scroll area
self.scrollAreaWidgetContents = QWidget()
#self.scrollAreaWidgetContents.setGeometry(QRect(0, 0, 421, 429))
# give this widget a grid layout
self.gridLayout = QGridLayout(self.scrollAreaWidgetContents)
# put the contents widget in the scroll area
self.setWidget(self.scrollAreaWidgetContents)
#-------------------------------------------------------------------------------------------------------------------
def get_nr_of_image_columns(self):
scroll_area_images_width = self.width()
if scroll_area_images_width > self.displayed_image_size:
nr_of_columns = scroll_area_images_width // self.displayed_image_size
else:
nr_of_columns = 1
return nr_of_columns
#-------------------------------------------------------------------------------------------------------------------
def on_resize(self, event):
nr_of_columns = self.get_nr_of_image_columns()
nr_of_widgets = self.gridLayout.count()
widgets = []
for i in range(nr_of_widgets):
widgets.append(self.gridLayout.itemAt(i))
column_nr = 0
row_nr = 0
for widget in widgets:
self.gridLayout.removeItem(widget)
self.gridLayout.addWidget(widget.widget(), row_nr, column_nr)
if column_nr == nr_of_columns - 1:
column_nr = 0
row_nr += 1
else:
column_nr += 1
#-------------------------------------------------------------------------------------------------------------------
def setDisplayedImageSize(self, image_size):
self.displayed_image_size = image_size
#-------------------------------------------------------------------------------------------------------------------
def addImage(self, pixmap, image_id):
nr_of_columns = self.get_nr_of_image_columns()
nr_of_widgets = self.gridLayout.count()
row_nr = nr_of_widgets // nr_of_columns
column_nr = nr_of_widgets % nr_of_columns
clickable_image = QClickableImage(self.displayed_image_size, self.displayed_image_size, pixmap, image_id)
clickable_image.clicked.connect(self.on_left_clicked)
clickable_image.rightClicked.connect(self.on_right_clicked)
self.gridLayout.addWidget(clickable_image, column_nr, row_nr)
#-------------------------------------------------------------------------------------------------------------------
def on_left_clicked(self, image_id):
print('left clicked - image id = ' + image_id)
#-------------------------------------------------------------------------------------------------------------------
def on_right_clicked(self, image_id):
print('right clicked - image id = ' + image_id)
#-------------------------------------------------------------------------------------------------------------------
def resizeEvent(self, event):
self.on_resize(event)
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QGridLayout",
"PyQt5.QtWidgets.QScrollArea.__init__"
] | [((1653, 1679), 'PyQt5.QtWidgets.QScrollArea.__init__', 'QScrollArea.__init__', (['self'], {}), '(self)\n', (1673, 1679), False, 'from PyQt5.QtWidgets import QScrollArea, QWidget, QGridLayout\n'), ((1866, 1875), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (1873, 1875), False, 'from PyQt5.QtWidgets import QScrollArea, QWidget, QGridLayout\n'), ((2022, 2064), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', (['self.scrollAreaWidgetContents'], {}), '(self.scrollAreaWidgetContents)\n', (2033, 2064), False, 'from PyQt5.QtWidgets import QScrollArea, QWidget, QGridLayout\n')] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
import os
import random
import time
import uuid
import pyrax
from pyrax.autoscale import AutoScaleClient
from pyrax.autoscale import AutoScalePolicy
from pyrax.autoscale import AutoScaleWebhook
from pyrax.autoscale import ScalingGroup
from pyrax.autoscale import ScalingGroupManager
from pyrax.client import BaseClient
from pyrax.clouddatabases import CloudDatabaseClient
from pyrax.clouddatabases import CloudDatabaseDatabaseManager
from pyrax.clouddatabases import CloudDatabaseInstance
from pyrax.clouddatabases import CloudDatabaseManager
from pyrax.clouddatabases import CloudDatabaseUser
from pyrax.clouddatabases import CloudDatabaseUserManager
from pyrax.clouddatabases import CloudDatabaseVolume
from pyrax.cloudblockstorage import CloudBlockStorageClient
from pyrax.cloudblockstorage import CloudBlockStorageManager
from pyrax.cloudblockstorage import CloudBlockStorageSnapshot
from pyrax.cloudblockstorage import CloudBlockStorageSnapshotManager
from pyrax.cloudblockstorage import CloudBlockStorageVolume
from pyrax.cloudloadbalancers import CloudLoadBalancer
from pyrax.cloudloadbalancers import CloudLoadBalancerManager
from pyrax.cloudloadbalancers import CloudLoadBalancerClient
from pyrax.cloudloadbalancers import Node
from pyrax.cloudloadbalancers import VirtualIP
from pyrax.clouddns import CloudDNSClient
from pyrax.clouddns import CloudDNSDomain
from pyrax.clouddns import CloudDNSManager
from pyrax.clouddns import CloudDNSRecord
from pyrax.clouddns import CloudDNSPTRRecord
from pyrax.cloudnetworks import CloudNetwork
from pyrax.cloudnetworks import CloudNetworkClient
from pyrax.cloudmonitoring import CloudMonitorClient
from pyrax.cloudmonitoring import CloudMonitorEntity
from pyrax.cloudmonitoring import CloudMonitorCheck
from pyrax.cloudmonitoring import CloudMonitorNotification
from pyrax.image import Image
from pyrax.image import ImageClient
from pyrax.image import ImageManager
from pyrax.image import ImageMemberManager
from pyrax.image import ImageTagManager
from pyrax.object_storage import BulkDeleter
from pyrax.object_storage import Container
from pyrax.object_storage import ContainerManager
from pyrax.object_storage import FolderUploader
from pyrax.object_storage import StorageClient
from pyrax.object_storage import StorageObject
from pyrax.object_storage import StorageObjectManager
from pyrax.queueing import Queue
from pyrax.queueing import QueueClaim
from pyrax.queueing import QueueMessage
from pyrax.queueing import QueueClient
from pyrax.queueing import QueueManager
import pyrax.exceptions as exc
from pyrax.base_identity import BaseIdentity
from pyrax.base_identity import Endpoint
from pyrax.base_identity import Service
from pyrax.identity.rax_identity import RaxIdentity
from pyrax.identity.keystone_identity import KeystoneIdentity
import pyrax.utils as utils
example_uri = "http://example.com"
class FakeResponse(object):
headers = {}
body = ""
status_code = 200
reason = "Oops"
content = "Oops"
@property
def status(self):
# TEMPORARY - until the cf_wrapper code is removed.
return self.status_code
@status.setter
def status(self, val):
# TEMPORARY - until the cf_wrapper code is removed.
self.status_code = val
def getheaders(self):
return self.headers
def read(self):
return "Line1\nLine2"
def get(self, arg):
return self.headers.get(arg)
def json(self):
return self.content
class FakeIterator(utils.ResultsIterator):
def _init_methods(self):
pass
class FakeClient(object):
user_agent = "Fake"
USER_AGENT = "Fake"
def __init__(self, *args, **kwargs):
self.identity = FakeIdentity()
class FakeStorageClient(StorageClient):
def __init__(self, identity=None, *args, **kwargs):
if identity is None:
identity = FakeIdentity()
super(FakeStorageClient, self).__init__(identity, *args, **kwargs)
def create(self, name):
return FakeContainer(self._manager, {"name": name})
class FakeContainerManager(ContainerManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeStorageClient()
super(FakeContainerManager, self).__init__(api, *args, **kwargs)
class FakeContainer(Container):
def __init__(self, *args, **kwargs):
super(FakeContainer, self).__init__(*args, **kwargs)
self.object_manager = FakeStorageObjectManager(self.manager.api,
uri_base=self.name)
self.object_manager._container = self
class FakeStorageObjectManager(StorageObjectManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeStorageClient()
if "uri_base" not in kwargs:
kwargs["uri_base"] = utils.random_ascii()
super(FakeStorageObjectManager, self).__init__(api, *args, **kwargs)
class FakeStorageObject(StorageObject):
def __init__(self, manager, name=None, total_bytes=None, content_type=None,
last_modified=None, etag=None, attdict=None):
"""
The object can either be initialized with individual params, or by
passing the dict that is returned by swiftclient.
"""
self.manager = manager
self.name = name
self.bytes = total_bytes or 0
self.content_type = content_type
self.last_modified = last_modified
self.hash = etag
if attdict:
self._read_attdict(attdict)
fake_attdict = {"name": "fake",
"content-length": 42,
"content-type": "text/html",
"etag": "ABC",
"last-modified": "Tue, 01 Jan 2013 01:02:03 GMT",
}
class FakeServer(object):
id = utils.random_unicode()
class FakeService(object):
user_agent = "FakeService"
USER_AGENT = "FakeService"
def __init__(self, *args, **kwargs):
self.client = FakeClient()
self.Node = FakeNode
self.VirtualIP = FakeVirtualIP
self.loadbalancers = FakeLoadBalancer()
self.id = utils.random_unicode()
def authenticate(self):
pass
def get_protocols(self):
return ["HTTP"]
def get_algorithms(self):
return ["RANDOM"]
def get_usage(self):
pass
class FakeCSClient(FakeService):
def __init__(self, *args, **kwargs):
ident = FakeIdentity()
super(FakeCSClient, self).__init__(ident, *args, **kwargs)
def dummy(self):
pass
self.servers = FakeService()
utils.add_method(self.servers, dummy, "list")
self.images = FakeService()
utils.add_method(self.images, dummy, "list")
self.flavors = FakeService()
utils.add_method(self.flavors, dummy, "list")
class FakeFolderUploader(FolderUploader):
def __init__(self, *args, **kwargs):
super(FakeFolderUploader, self).__init__(*args, **kwargs)
# Useful for when we mock out the run() method.
self.actual_run = self.run
self.run = self.fake_run
def fake_run(self):
pass
class FakeBulkDeleter(BulkDeleter):
def __init__(self, *args, **kwargs):
super(FakeBulkDeleter, self).__init__(*args, **kwargs)
# Useful for when we mock out the run() method.
self.actual_run = self.run
self.run = self.fake_run
def fake_run(self):
time.sleep(0.0001)
self.results = {}
self.completed = True
class FakeManager(object):
def __init__(self, *args, **kwargs):
super(FakeManager, self).__init__(*args, **kwargs)
self.api = FakeClient()
def list(self):
pass
def get(self, item):
pass
def delete(self, item):
pass
def create(self, *args, **kwargs):
pass
def find(self, *args, **kwargs):
pass
def action(self, item, action_type, body={}):
pass
class FakeException(BaseException):
pass
class FakeKeyring(object):
password_set = False
def get_password(self, *args, **kwargs):
return "FAKE_TOKEN|FAKE_URL"
def set_password(self, *args, **kwargs):
self.password_set = True
class FakeEntity(object):
def __init__(self, *args, **kwargs):
self.id = utils.random_unicode()
def get(self, *args, **kwargs):
pass
def list(self, *args, **kwargs):
pass
class FakeDatabaseUser(CloudDatabaseUser):
pass
class FakeDatabaseVolume(CloudDatabaseVolume):
def __init__(self, instance, *args, **kwargs):
self.instance = instance
self.size = 1
self.used = 0.2
class FakeDatabaseInstance(CloudDatabaseInstance):
def __init__(self, *args, **kwargs):
self.id = utils.random_unicode()
self.manager = FakeDatabaseManager()
self.manager.api = FakeDatabaseClient()
self._database_manager = CloudDatabaseDatabaseManager(
FakeDatabaseClient())
self._user_manager = CloudDatabaseUserManager(FakeDatabaseClient())
self.volume = FakeDatabaseVolume(self)
class FakeDatabaseManager(CloudDatabaseManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeDatabaseClient()
super(FakeDatabaseManager, self).__init__(api, *args, **kwargs)
self.uri_base = "instances"
class FakeDatabaseClient(CloudDatabaseClient):
def __init__(self, *args, **kwargs):
self._manager = FakeDatabaseManager(self)
self._flavor_manager = FakeManager()
ident = FakeIdentity()
super(FakeDatabaseClient, self).__init__(ident, "fakeuser",
"fakepassword", *args, **kwargs)
class FakeNovaVolumeClient(BaseClient):
def __init__(self, *args, **kwargs):
pass
class FakeBlockStorageManager(CloudBlockStorageManager):
def __init__(self, api=None, *args, **kwargs):
ident = FakeIdentity()
if api is None:
api = FakeBlockStorageClient(ident)
super(FakeBlockStorageManager, self).__init__(api, *args, **kwargs)
class FakeBlockStorageVolume(CloudBlockStorageVolume):
def __init__(self, *args, **kwargs):
volname = utils.random_unicode(8)
self.id = utils.random_unicode()
self.manager = FakeBlockStorageManager()
self._nova_volumes = FakeNovaVolumeClient()
class FakeBlockStorageSnapshot(CloudBlockStorageSnapshot):
def __init__(self, *args, **kwargs):
self.id = utils.random_unicode()
self.manager = FakeManager()
self.status = "available"
class FakeBlockStorageClient(CloudBlockStorageClient):
def __init__(self, *args, **kwargs):
self._types_manager = FakeManager()
self._snapshot_manager = FakeManager()
ident = FakeIdentity()
super(FakeBlockStorageClient, self).__init__(ident, "fakeuser",
"fakepassword", *args, **kwargs)
class FakeSnapshotManager(CloudBlockStorageSnapshotManager):
def __init__(self, api=None, *args, **kwargs):
ident = FakeIdentity()
if api is None:
api = FakeBlockStorageClient(ident)
super(FakeSnapshotManager, self).__init__(api, *args, **kwargs)
class FakeLoadBalancerClient(CloudLoadBalancerClient):
def __init__(self, *args, **kwargs):
ident = FakeIdentity()
super(FakeLoadBalancerClient, self).__init__(ident, "fakeuser",
"fakepassword", *args, **kwargs)
class FakeLoadBalancerManager(CloudLoadBalancerManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeLoadBalancerClient()
super(FakeLoadBalancerManager, self).__init__(api, *args, **kwargs)
class FakeLoadBalancer(CloudLoadBalancer):
def __init__(self, name=None, info=None, *args, **kwargs):
name = name or utils.random_ascii()
info = info or {"fake": "fake"}
super(FakeLoadBalancer, self).__init__(name, info, *args, **kwargs)
self.id = utils.random_ascii()
self.port = random.randint(1, 256)
self.manager = FakeLoadBalancerManager()
class FakeNode(Node):
def __init__(self, address=None, port=None, condition=None, weight=None,
status=None, parent=None, type=None, id=None):
if address is None:
address = "0.0.0.0"
if port is None:
port = 80
if id is None:
id = utils.random_unicode()
super(FakeNode, self).__init__(address=address, port=port,
condition=condition, weight=weight, status=status,
parent=parent, type=type, id=id)
class FakeVirtualIP(VirtualIP):
pass
class FakeStatusChanger(object):
check_count = 0
id = utils.random_unicode()
@property
def status(self):
if self.check_count < 2:
self.check_count += 1
return "changing"
return "ready"
class FakeDNSClient(CloudDNSClient):
def __init__(self, *args, **kwargs):
ident = FakeIdentity()
super(FakeDNSClient, self).__init__(ident, "fakeuser",
"fakepassword", *args, **kwargs)
class FakeDNSManager(CloudDNSManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeDNSClient()
super(FakeDNSManager, self).__init__(api, *args, **kwargs)
self.resource_class = FakeDNSDomain
self.response_key = "domain"
self.plural_response_key = "domains"
self.uri_base = "domains"
class FakeDNSDomain(CloudDNSDomain):
def __init__(self, *args, **kwargs):
self.id = utils.random_ascii()
self.name = utils.random_unicode()
self.manager = FakeDNSManager()
class FakeDNSRecord(CloudDNSRecord):
def __init__(self, mgr, info, *args, **kwargs):
super(FakeDNSRecord, self).__init__(mgr, info, *args, **kwargs)
class FakeDNSPTRRecord(CloudDNSPTRRecord):
pass
class FakeDNSDevice(FakeLoadBalancer):
def __init__(self, *args, **kwargs):
self.id = utils.random_unicode()
class FakeCloudNetworkClient(CloudNetworkClient):
def __init__(self, *args, **kwargs):
ident = FakeIdentity()
super(FakeCloudNetworkClient, self).__init__(ident, "fakeuser",
"fakepassword", *args, **kwargs)
class FakeCloudNetwork(CloudNetwork):
def __init__(self, *args, **kwargs):
info = kwargs.pop("info", {"fake": "fake"})
label = kwargs.pop("label", kwargs.pop("name", utils.random_unicode()))
info["label"] = label
super(FakeCloudNetwork, self).__init__(manager=None, info=info, *args,
**kwargs)
self.id = uuid.uuid4().hex
class FakeAutoScaleClient(AutoScaleClient):
def __init__(self, *args, **kwargs):
ident = FakeIdentity()
self._manager = FakeManager()
super(FakeAutoScaleClient, self).__init__(ident, *args, **kwargs)
class FakeAutoScalePolicy(AutoScalePolicy):
def __init__(self, *args, **kwargs):
super(FakeAutoScalePolicy, self).__init__(*args, **kwargs)
self.id = utils.random_ascii()
class FakeAutoScaleWebhook(AutoScaleWebhook):
def __init__(self, *args, **kwargs):
super(FakeAutoScaleWebhook, self).__init__(*args, **kwargs)
self.id = utils.random_ascii()
class FakeScalingGroupManager(ScalingGroupManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeAutoScaleClient()
super(FakeScalingGroupManager, self).__init__(api, *args, **kwargs)
self.id = utils.random_ascii()
class FakeScalingGroup(ScalingGroup):
def __init__(self, name=None, info=None, *args, **kwargs):
name = name or utils.random_ascii()
info = info or {"fake": "fake", "scalingPolicies": []}
self.groupConfiguration = {}
super(FakeScalingGroup, self).__init__(name, info, *args, **kwargs)
self.id = utils.random_ascii()
self.name = name
self.manager = FakeScalingGroupManager()
class FakeCloudMonitorClient(CloudMonitorClient):
def __init__(self, *args, **kwargs):
ident = FakeIdentity()
super(FakeCloudMonitorClient, self).__init__(ident, "fakeuser",
"fakepassword", *args, **kwargs)
class FakeCloudMonitorEntity(CloudMonitorEntity):
def __init__(self, *args, **kwargs):
info = kwargs.pop("info", {"fake": "fake"})
info["id"] = utils.random_ascii()
super(FakeCloudMonitorEntity, self).__init__(FakeManager(), info=info,
*args, **kwargs)
self.manager.api = FakeCloudMonitorClient()
class FakeCloudMonitorCheck(CloudMonitorCheck):
def __init__(self, *args, **kwargs):
info = kwargs.pop("info", {"fake": "fake"})
entity = kwargs.pop("entity", None)
info["id"] = utils.random_ascii()
super(FakeCloudMonitorCheck, self).__init__(FakeManager(), info, *args,
**kwargs)
self.set_entity(entity)
self.id = uuid.uuid4()
class FakeCloudMonitorNotification(CloudMonitorNotification):
def __init__(self, *args, **kwargs):
info = kwargs.pop("info", {"fake": "fake"})
super(FakeCloudMonitorNotification, self).__init__(manager=None,
info=info, *args, **kwargs)
self.id = uuid.uuid4()
class FakeQueue(Queue):
def __init__(self, *args, **kwargs):
info = kwargs.pop("info", {"fake": "fake"})
info["name"] = utils.random_unicode()
mgr = kwargs.pop("manager", FakeQueueManager())
super(FakeQueue, self).__init__(manager=mgr, info=info, *args, **kwargs)
class FakeQueueClaim(QueueClaim):
def __init__(self, *args, **kwargs):
info = kwargs.pop("info", {"fake": "fake"})
info["name"] = utils.random_unicode()
mgr = kwargs.pop("manager", FakeQueueManager())
super(FakeQueueClaim, self).__init__(manager=mgr, info=info, *args,
**kwargs)
class FakeQueueClient(QueueClient):
def __init__(self, *args, **kwargs):
ident = FakeIdentity()
super(FakeQueueClient, self).__init__(ident, "fakeuser",
"<PASSWORD>password", *args, **kwargs)
class FakeQueueManager(QueueManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeQueueClient()
super(FakeQueueManager, self).__init__(api, *args, **kwargs)
self.id = utils.random_ascii()
class FakeImage(Image):
def __init__(self, *args, **kwargs):
info = kwargs.pop("info", {"fake": "fake"})
info["name"] = utils.random_unicode()
info["id"] = utils.random_unicode()
mgr = kwargs.pop("manager", FakeImageManager())
kwargs["member_manager_class"] = FakeImageMemberManager
kwargs["tag_manager_class"] = FakeImageTagManager
super(FakeImage, self).__init__(mgr, info, *args, **kwargs)
class FakeImageClient(ImageClient):
def __init__(self, identity=None, *args, **kwargs):
if identity is None:
identity = FakeIdentity()
super(FakeImageClient, self).__init__(identity, "fakeuser",
"fakepassword", *args, **kwargs)
class FakeImageMemberManager(ImageMemberManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeImageClient()
super(FakeImageMemberManager, self).__init__(api, *args, **kwargs)
self.id = utils.random_ascii()
class FakeImageTagManager(ImageTagManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeImageClient()
super(FakeImageTagManager, self).__init__(api, *args, **kwargs)
self.id = utils.random_ascii()
class FakeImageManager(ImageManager):
def __init__(self, api=None, *args, **kwargs):
if api is None:
api = FakeImageClient()
super(FakeImageManager, self).__init__(api, *args, **kwargs)
self.plural_response_key = "images"
self.resource_class = FakeImage
self.id = utils.random_ascii()
class FakeIdentityService(Service):
def __init__(self, identity=None, *args, **kwargs):
self.identity = identity or FakeIdentity()
self.name = "fake"
self.prefix = ""
self.service_type = "fake"
self.clients = {}
self.endpoints = utils.DotDict()
class FakeEndpoint(Endpoint):
def __init__(self, ep_dict=None, service=None, region=None, identity=None):
if ep_dict is None:
ep_dict = {}
if identity is None:
identity = FakeIdentity()
if service is None:
service = FakeIdentityService(identity)
if region is None:
region = "fake_region"
super(FakeEndpoint, self).__init__(ep_dict, service, region, identity)
class FakeRaxIdentity(RaxIdentity):
pass
class FakeIdentity(BaseIdentity):
"""Class that returns canned authentication responses."""
def __init__(self, *args, **kwargs):
super(FakeIdentity, self).__init__(*args, **kwargs)
self._good_username = "fakeuser"
self._good_password = "<PASSWORD>"
self._default_region = random.choice(("DFW", "ORD"))
self.services = {"fake": FakeIdentityService(self)}
def authenticate(self, connect=False):
if ((self.username == self._good_username) and
(self.password == self._good_password)):
self._parse_response(self.fake_response())
self.authenticated = True
else:
self.authenticated = False
raise exc.AuthenticationFailed("No match for '%s'/'%s' "
"username/password" % (self.username, self.password))
def auth_with_token(self, token, tenant_id=None, tenant_name=None):
self.token = token
self.tenant_id = tenant_id
self.tenant_name = tenant_name
self.authenticated = True
def get_token(self, force=False):
return self.token
def fake_response(self):
return fake_identity_response
fake_config_file = """[settings]
identity_type = rackspace
keyring_username =
region = FAKE
custom_user_agent = FAKE
http_debug =
"""
# This will handle both singular and plural responses.
fake_identity_user_response = {
"users": [{"name": "fake", "id": "fake"},
{"name": "faker", "id": "faker"}],
"user": {"name": "fake", "id": "fake"},
"roles": [{u'description': 'User Admin Role.',
'id': '3',
'name': 'identity:user-admin'}],
}
fake_identity_tenant_response = {"name": "fake", "id": "fake",
"description": "fake", "enabled": True}
fake_identity_tenants_response = {
"tenants": [
{"name": "fake", "id": "fake", "description": "fake",
"enabled": True},
{"name": "faker", "id": "faker", "description": "faker",
"enabled": True},
]}
fake_identity_tokens_response = {"access":
{'metadata': {u'is_admin': 0,
'roles': [u'asdfgh',
'sdfghj',
'dfghjk']},
'serviceCatalog': [{u'endpoints': [
{u'adminURL': 'http://10.0.0.0:8774/v2/qweqweqwe',
'id': 'dddddddddd',
'publicURL': 'http://10.0.0.0:8774/v2/qweqweqwe',
'internalURL': 'http://10.0.0.0:8774/v2/qweqweqwe',
'region': 'some_region'}],
'endpoints_links': [],
'name': 'nova',
'type': 'compute'},
{u'endpoints': [{u'adminURL': 'http://10.0.0.0:35357/v2.0',
'id': 'qweqweqwe',
'internalURL': 'http://10.0.0.0:5000/v2.0',
'publicURL': 'http://10.0.0.0:5000/v2.0',
'region': 'some_region'}],
'endpoints_links': [],
'name': 'keystone',
'type': 'identity'}],
'token': {u'expires': '1999-05-04T16:45:05Z',
'id': 'qweqweqwe',
'tenant': {u'description': 'admin Tenant',
'enabled': True,
'id': 'qweqweqwe',
'name': 'admin'}},
'user': {u'id': 'qweqweqwe',
'name': 'admin',
'roles': [{u'id': 'qweqweqwe', 'name': 'admin'},
{u'id': 'qweqweqwe', 'name': 'KeystoneAdmin'},
{u'id': 'qweqweqwe',
'name': 'KeystoneServiceAdmin'}],
'roles_links': [],
'username': 'admin'}}}
fake_identity_endpoints_response = {"access": {
"endpoints": ["fake", "faker", "fakest"]}}
fake_identity_response = {u'access':
{u'serviceCatalog': [
{u'endpoints': [{u'publicURL':
'https://ord.loadbalancers.api.rackspacecloud.com/v1.0/000000',
'region': 'ORD',
'tenantId': '000000'},
{u'publicURL':
'https://dfw.loadbalancers.api.rackspacecloud.com/v1.0/000000',
'region': 'DFW',
'tenantId': '000000'},
{u'publicURL':
'https://syd.loadbalancers.api.rackspacecloud.com/v1.0/000000',
'region': 'SYD',
'tenantId': '000000'}],
'name': 'cloudLoadBalancers',
'type': 'rax:load-balancer'},
{u'endpoints': [{u'internalURL':
'https://snet-aa.fake1.clouddrive.com/v1/MossoCloudFS_abc',
'publicURL': 'https://aa.fake1.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'FAKE',
'tenantId': 'MossoCloudFS_abc'},
{u'internalURL':
'https://snet-aa.dfw1.clouddrive.com/v1/MossoCloudFS_abc',
'publicURL': 'https://aa.dfw1.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'DFW',
'tenantId': 'MossoCloudFS_abc'},
{u'internalURL':
'https://snet-aa.ord1.clouddrive.com/v1/MossoCloudFS_abc',
'publicURL': 'https://aa.ord1.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'ORD',
'tenantId': 'MossoCloudFS_abc'},
{u'internalURL':
'https://snet-aa.syd1.clouddrive.com/v1/MossoCloudFS_abc',
'publicURL': 'https://aa.ord1.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'SYD',
'tenantId': 'MossoCloudFS_abc'}],
'name': 'cloudFiles',
'type': 'object-store'},
{u'endpoints': [{u'publicURL':
'https://dfw.servers.api.rackspacecloud.com/v2/000000',
'region': 'DFW',
'tenantId': '000000',
'versionId': '2',
'versionInfo': 'https://dfw.servers.api.rackspacecloud.com/v2',
'versionList': 'https://dfw.servers.api.rackspacecloud.com/'},
{u'publicURL':
'https://ord.servers.api.rackspacecloud.com/v2/000000',
'region': 'ORD',
'tenantId': '000000',
'versionId': '2',
'versionInfo': 'https://ord.servers.api.rackspacecloud.com/v2',
'versionList': 'https://ord.servers.api.rackspacecloud.com/'},
{u'publicURL':
'https://syd.servers.api.rackspacecloud.com/v2/000000',
'region': 'SYD',
'tenantId': '000000',
'versionId': '2',
'versionInfo': 'https://syd.servers.api.rackspacecloud.com/v2',
'versionList': 'https://syd.servers.api.rackspacecloud.com/'}],
'name': 'cloudServersOpenStack',
'type': 'compute'},
{u'endpoints': [{u'publicURL':
'https://dns.api.rackspacecloud.com/v1.0/000000',
'tenantId': '000000'}],
'name': 'cloudDNS',
'type': 'rax:dns'},
{u'endpoints': [{u'publicURL':
'https://dfw.databases.api.rackspacecloud.com/v1.0/000000',
'region': 'DFW',
'tenantId': '000000'},
{u'publicURL':
'https://syd.databases.api.rackspacecloud.com/v1.0/000000',
'region': 'SYD',
'tenantId': '000000'},
{u'publicURL':
'https://ord.databases.api.rackspacecloud.com/v1.0/000000',
'region': 'ORD',
'tenantId': '000000'}],
'name': 'cloudDatabases',
'type': 'rax:database'},
{u'endpoints': [{u'publicURL':
'https://servers.api.rackspacecloud.com/v1.0/000000',
'tenantId': '000000',
'versionId': '1.0',
'versionInfo': 'https://servers.api.rackspacecloud.com/v1.0',
'versionList': 'https://servers.api.rackspacecloud.com/'}],
'name': 'cloudServers',
'type': 'compute'},
{u'endpoints': [{u'publicURL':
'https://cdn1.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'DFW',
'tenantId': 'MossoCloudFS_abc'},
{u'publicURL': 'https://cdn1.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'FAKE',
'tenantId': 'MossoCloudFS_abc'},
{u'publicURL': 'https://cdn1.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'SYD',
'tenantId': 'MossoCloudFS_abc'},
{u'publicURL': 'https://cdn2.clouddrive.com/v1/MossoCloudFS_abc',
'region': 'ORD',
'tenantId': 'MossoCloudFS_abc'}],
'name': 'cloudFilesCDN',
'type': 'rax:object-cdn'},
{u'endpoints': [{u'publicURL':
'https://monitoring.api.rackspacecloud.com/v1.0/000000',
'tenantId': '000000'}],
'name': 'cloudMonitoring',
'type': 'rax:monitor'}],
u'token': {u'expires': '2222-02-22T22:22:22.000-02:00',
'id': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx',
'tenant': {u'id': '000000', 'name': '000000'}},
u'user': {u'id': '123456',
'name': 'fakeuser',
'RAX-AUTH:defaultRegion': 'DFW',
'roles': [{u'description': 'User Admin Role.',
'id': '3',
'name': 'identity:user-admin'}],
}}}
class FakeIdentityResponse(FakeResponse):
status_code = 200
response_type = "auth"
responses = {"auth": fake_identity_response,
"users": fake_identity_user_response,
"tenant": fake_identity_tenant_response,
"tenants": fake_identity_tenants_response,
"tokens": fake_identity_tokens_response,
"endpoints": fake_identity_endpoints_response,
}
@property
def content(self):
return self.responses.get(self.response_type)
def json(self):
return self.content
def read(self):
return json.dumps(self.content)
| [
"pyrax.exceptions.AuthenticationFailed",
"pyrax.utils.random_unicode",
"random.choice",
"pyrax.utils.add_method",
"json.dumps",
"time.sleep",
"uuid.uuid4",
"pyrax.utils.DotDict",
"pyrax.utils.random_ascii",
"random.randint"
] | [((5825, 5847), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (5845, 5847), True, 'import pyrax.utils as utils\n'), ((12783, 12805), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (12803, 12805), True, 'import pyrax.utils as utils\n'), ((6150, 6172), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (6170, 6172), True, 'import pyrax.utils as utils\n'), ((6628, 6673), 'pyrax.utils.add_method', 'utils.add_method', (['self.servers', 'dummy', '"""list"""'], {}), "(self.servers, dummy, 'list')\n", (6644, 6673), True, 'import pyrax.utils as utils\n'), ((6718, 6762), 'pyrax.utils.add_method', 'utils.add_method', (['self.images', 'dummy', '"""list"""'], {}), "(self.images, dummy, 'list')\n", (6734, 6762), True, 'import pyrax.utils as utils\n'), ((6808, 6853), 'pyrax.utils.add_method', 'utils.add_method', (['self.flavors', 'dummy', '"""list"""'], {}), "(self.flavors, dummy, 'list')\n", (6824, 6853), True, 'import pyrax.utils as utils\n'), ((7466, 7484), 'time.sleep', 'time.sleep', (['(0.0001)'], {}), '(0.0001)\n', (7476, 7484), False, 'import time\n'), ((8335, 8357), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (8355, 8357), True, 'import pyrax.utils as utils\n'), ((8804, 8826), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (8824, 8826), True, 'import pyrax.utils as utils\n'), ((10251, 10274), 'pyrax.utils.random_unicode', 'utils.random_unicode', (['(8)'], {}), '(8)\n', (10271, 10274), True, 'import pyrax.utils as utils\n'), ((10293, 10315), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (10313, 10315), True, 'import pyrax.utils as utils\n'), ((10537, 10559), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (10557, 10559), True, 'import pyrax.utils as utils\n'), ((12050, 12070), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (12068, 12070), True, 'import pyrax.utils as utils\n'), ((12091, 12113), 'random.randint', 'random.randint', (['(1)', '(256)'], {}), '(1, 256)\n', (12105, 12113), False, 'import random\n'), ((13661, 13681), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (13679, 13681), True, 'import pyrax.utils as utils\n'), ((13702, 13724), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (13722, 13724), True, 'import pyrax.utils as utils\n'), ((14082, 14104), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (14102, 14104), True, 'import pyrax.utils as utils\n'), ((15135, 15155), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (15153, 15155), True, 'import pyrax.utils as utils\n'), ((15331, 15351), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (15349, 15351), True, 'import pyrax.utils as utils\n'), ((15615, 15635), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (15633, 15635), True, 'import pyrax.utils as utils\n'), ((15977, 15997), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (15995, 15997), True, 'import pyrax.utils as utils\n'), ((16483, 16503), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (16501, 16503), True, 'import pyrax.utils as utils\n'), ((16876, 16896), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (16894, 16896), True, 'import pyrax.utils as utils\n'), ((17053, 17065), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (17063, 17065), False, 'import uuid\n'), ((17358, 17370), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (17368, 17370), False, 'import uuid\n'), ((17513, 17535), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (17533, 17535), True, 'import pyrax.utils as utils\n'), ((17825, 17847), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (17845, 17847), True, 'import pyrax.utils as utils\n'), ((18474, 18494), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (18492, 18494), True, 'import pyrax.utils as utils\n'), ((18637, 18659), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (18657, 18659), True, 'import pyrax.utils as utils\n'), ((18681, 18703), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (18701, 18703), True, 'import pyrax.utils as utils\n'), ((19484, 19504), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (19502, 19504), True, 'import pyrax.utils as utils\n'), ((19752, 19772), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (19770, 19772), True, 'import pyrax.utils as utils\n'), ((20095, 20115), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (20113, 20115), True, 'import pyrax.utils as utils\n'), ((20399, 20414), 'pyrax.utils.DotDict', 'utils.DotDict', ([], {}), '()\n', (20412, 20414), True, 'import pyrax.utils as utils\n'), ((21229, 21258), 'random.choice', 'random.choice', (["('DFW', 'ORD')"], {}), "(('DFW', 'ORD'))\n", (21242, 21258), False, 'import random\n'), ((30415, 30439), 'json.dumps', 'json.dumps', (['self.content'], {}), '(self.content)\n', (30425, 30439), False, 'import json\n'), ((4898, 4918), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (4916, 4918), True, 'import pyrax.utils as utils\n'), ((11895, 11915), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (11913, 11915), True, 'import pyrax.utils as utils\n'), ((12470, 12492), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (12490, 12492), True, 'import pyrax.utils as utils\n'), ((14716, 14728), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (14726, 14728), False, 'import uuid\n'), ((15762, 15782), 'pyrax.utils.random_ascii', 'utils.random_ascii', ([], {}), '()\n', (15780, 15782), True, 'import pyrax.utils as utils\n'), ((21639, 21745), 'pyrax.exceptions.AuthenticationFailed', 'exc.AuthenticationFailed', (['("No match for \'%s\'/\'%s\' username/password" % (self.username, self.password))'], {}), '("No match for \'%s\'/\'%s\' username/password" % (self\n .username, self.password))\n', (21663, 21745), True, 'import pyrax.exceptions as exc\n'), ((14538, 14560), 'pyrax.utils.random_unicode', 'utils.random_unicode', ([], {}), '()\n', (14558, 14560), True, 'import pyrax.utils as utils\n')] |
#! /usr/bin/env python3
## Copyright 2018 <NAME> <<EMAIL>>
## Copyright 2016 <NAME> <<EMAIL>>
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import os
import sys
def buildLuaFile(targetDirectory, name, containers):
scriptdir = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(scriptdir, 'template.lua'), 'r') as infile:
content = infile.read()
with open (os.path.join (targetDirectory,
"dependencies.lua"), "r") as infile:
stages = infile.read ()
content = content \
.replace('@@STAGES@@', stages) \
.replace('@@CONTAINERS@@', containers) \
.replace('@@USERS@@', os.environ['DAGSIM_USERS']) \
.replace('@@TYPE@@',
os.environ['DAGSIM_UTHINKTIMEDISTR_TYPE']) \
.replace('@@PARAMS@@',
os.environ['DAGSIM_UTHINKTIMEDISTR_PARAMS'])
outfilename = os.path.join(targetDirectory,
'{}.lua.template'.format(name))
with open(outfilename, 'w') as outfile:
outfile.write(content)
def main():
args = sys.argv
if len(args) != 4:
print("Required args: [TARGET_DIRECTORY] [NAME]", file=sys.stderr)
sys.exit(2)
else:
if os.path.exists(str(args[1])):
buildLuaFile(str(args[1]), str(args[2]), str(args[3]))
else:
print("error: the inserted directory does not exist",
file = sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()
| [
"os.path.realpath",
"os.path.join",
"sys.exit"
] | [((763, 789), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (779, 789), False, 'import os\n'), ((1726, 1737), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1734, 1737), False, 'import sys\n'), ((806, 845), 'os.path.join', 'os.path.join', (['scriptdir', '"""template.lua"""'], {}), "(scriptdir, 'template.lua')\n", (818, 845), False, 'import os\n'), ((911, 960), 'os.path.join', 'os.path.join', (['targetDirectory', '"""dependencies.lua"""'], {}), "(targetDirectory, 'dependencies.lua')\n", (923, 960), False, 'import os\n'), ((1985, 1996), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1993, 1996), False, 'import sys\n')] |
import unittest
from geometry.point import Point
class TestPoint(unittest.TestCase):
def get_points(self):
return [
Point(0, 0),
Point(1, 1),
Point(0, 1),
Point(-1, 1),
Point(-1, 0),
Point(-1, -1),
Point(1, -1)
]
def test_get_arc(self):
points = self.get_points()
self.assertEqual(points[0].get_arc(), 0)
self.assertEqual(points[1].get_arc(), 45)
self.assertEqual(points[2].get_arc(), 90)
self.assertEqual(points[3].get_arc(), 135)
self.assertEqual(points[4].get_arc(), 180)
self.assertEqual(points[5].get_arc(), 225)
self.assertEqual(points[6].get_arc(), 315)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"geometry.point.Point"
] | [((772, 787), 'unittest.main', 'unittest.main', ([], {}), '()\n', (785, 787), False, 'import unittest\n'), ((144, 155), 'geometry.point.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (149, 155), False, 'from geometry.point import Point\n'), ((169, 180), 'geometry.point.Point', 'Point', (['(1)', '(1)'], {}), '(1, 1)\n', (174, 180), False, 'from geometry.point import Point\n'), ((194, 205), 'geometry.point.Point', 'Point', (['(0)', '(1)'], {}), '(0, 1)\n', (199, 205), False, 'from geometry.point import Point\n'), ((219, 231), 'geometry.point.Point', 'Point', (['(-1)', '(1)'], {}), '(-1, 1)\n', (224, 231), False, 'from geometry.point import Point\n'), ((245, 257), 'geometry.point.Point', 'Point', (['(-1)', '(0)'], {}), '(-1, 0)\n', (250, 257), False, 'from geometry.point import Point\n'), ((271, 284), 'geometry.point.Point', 'Point', (['(-1)', '(-1)'], {}), '(-1, -1)\n', (276, 284), False, 'from geometry.point import Point\n'), ((298, 310), 'geometry.point.Point', 'Point', (['(1)', '(-1)'], {}), '(1, -1)\n', (303, 310), False, 'from geometry.point import Point\n')] |
import yaml
import os, errno
import json
def load_config(path):
config = None
with open(path, 'r') as config_file:
config = yaml.load(config_file)
return config
def createSensorConfigs(topicSensors):
sensors = []
count = 0
for i in range(topicSensors['nb']):
config = {}
config['server'] = topicSensors['broker']
config['username'] = 'xxx'
config['password'] = '<PASSWORD>'
config['port'] = 1883
config['clientId'] = 'sensor_' + topicSensors['topic'] + '_' +str(count)
config['topic'] = topicSensors['topic']
sensors.append(config)
if 'remoteLoggingBroker' in topicSensors:
remoteLoggingConfig = {}
remoteLoggingConfig['broker'] = 'tcp://'+topicSensors['remoteLoggingBroker']['host']+':'+str(topicSensors['remoteLoggingBroker']['port'])
remoteLoggingConfig['topic'] = topicSensors['remoteLoggingBroker']['topic']
config['remoteLoggingBroker'] = remoteLoggingConfig
config['remoteLogging'] = True
count += 1
return sensors
def write_config_files(sensors):
try:
os.makedirs('sensors')
except OSError as e:
if e.errno != errno.EEXIST:
raise
for sensor in sensors:
file_name = sensor['clientId']+'.json'
with open('sensors/'+file_name,'w') as outfile:
json.dump(sensor, outfile)
def write_compose(sensors):
services = {}
for sensor in sensors:
service = {}
volumes = []
volumes.append('./sensors/'+sensor['clientId']+'.json'+":/sensor/config.json:")
volumes.append('./sensors/'+sensor['clientId']+'.csv'+":/sensor/data.csv:")
service['volumes'] = volumes
service['image'] = 'rdsea/sensor'
services[sensor['clientId']] = service
return services
def provision(config):
try:
os.makedirs('sensors')
except OSError as e:
if e.errno != errno.EEXIST:
raise
sensors = []
for topicSensors in config['sensors']:
sensors.extend(createSensorConfigs(topicSensors))
write_config_files(sensors)
return write_compose(sensors)
| [
"json.dump",
"yaml.load",
"os.makedirs"
] | [((141, 163), 'yaml.load', 'yaml.load', (['config_file'], {}), '(config_file)\n', (150, 163), False, 'import yaml\n'), ((1154, 1176), 'os.makedirs', 'os.makedirs', (['"""sensors"""'], {}), "('sensors')\n", (1165, 1176), False, 'import os, errno\n'), ((1902, 1924), 'os.makedirs', 'os.makedirs', (['"""sensors"""'], {}), "('sensors')\n", (1913, 1924), False, 'import os, errno\n'), ((1398, 1424), 'json.dump', 'json.dump', (['sensor', 'outfile'], {}), '(sensor, outfile)\n', (1407, 1424), False, 'import json\n')] |
from .backend import Backend
from .circuitbyqiskit import CircuitByQiskit
from .circuitbyprojectq import CircuitByProjectq
from .circuitbycirq import CircuitByCirq
from .circuitbyqulacs import CircuitByQulacs
# from .circuitbytket import CircuitByTket
from .circuitbytensor import CircuitByTensor
from .circuitbyqton import CircuitByQton
import warnings
warnings.filterwarnings("ignore")
__all__ = [
'Backend',
'CircuitByCirq',
'CircuitByQiskit',
'CircuitByProjectq',
'CircuitByTensor',
'CircuitByQulacs',
'CircuitByQton'
]
| [
"warnings.filterwarnings"
] | [((363, 396), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (386, 396), False, 'import warnings\n')] |
from statistics import mean
from collections import defaultdict
from cloudpredictionframework.anomaly_detection.algorithms.base_algorithm import BaseAlgorithm
class HybridAlgorithm(BaseAlgorithm):
def __init__(self, filters: [BaseAlgorithm], min_confidence=0.8):
super().__init__()
self._filters = filters
self._min_confidence = min_confidence
self._recurrency_data = {'day_of_week': defaultdict(lambda: 0),
'day_of_month': defaultdict(lambda: 0)}
def get_confidence(self):
pass
def update(self, timestamp, value):
self._samples = self._samples.append({'timestamp': timestamp, 'value': value}, ignore_index=True)
combined_states = []
for alg in self._filters:
alg.update(timestamp, value)
combined_states.append(alg.get_current_state())
if self.states.learning in combined_states:
self._current_state = self.states.learning
return
state_confidence = mean([1 if i == self.states.overutil_anomaly else 0 for i in combined_states])
self._update_recurrent(timestamp, state_confidence > self._min_confidence)
if state_confidence >= self._min_confidence:
if self._is_recurrent(timestamp):
self._current_state = self.states.normal
else:
self._current_state = self.states.overutil_anomaly
self._anomalies_overutil = self._anomalies_overutil.append({'timestamp': timestamp, 'value': value},
ignore_index=True)
else:
self._current_state = self.states.normal
self._anomalies_treshold_history = self._anomalies_treshold_history.append(
{'timestamp': timestamp,
'upper_treshold': self._upper_treshold,
'lower_treshold': self._lower_treshold},
ignore_index=True)
def _update_recurrent(self, timestamp, is_anomaly: bool):
if is_anomaly:
self._recurrency_data['day_of_week'][timestamp.dayofweek] += 1
self._recurrency_data['day_of_month'][timestamp.day] += 1
else:
dow = self._recurrency_data['day_of_week'][timestamp.dayofweek]
self._recurrency_data['day_of_week'][timestamp.dayofweek] = dow - 1 if dow > 0 else 0
dom = self._recurrency_data['day_of_month'][timestamp.day]
self._recurrency_data['day_of_month'][timestamp.day] = dom - 1 if dom > 0 else 0
def _is_recurrent(self, timestamp):
return self._recurrency_data['day_of_week'][timestamp.dayofweek] > 2 or \
self._recurrency_data['day_of_month'][timestamp.day] > 2
def __str__(self):
return "HybridAlgorithm"
| [
"statistics.mean",
"collections.defaultdict"
] | [((1033, 1118), 'statistics.mean', 'mean', (['[(1 if i == self.states.overutil_anomaly else 0) for i in combined_states]'], {}), '([(1 if i == self.states.overutil_anomaly else 0) for i in combined_states]\n )\n', (1037, 1118), False, 'from statistics import mean\n'), ((425, 448), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (436, 448), False, 'from collections import defaultdict\n'), ((498, 521), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (509, 521), False, 'from collections import defaultdict\n')] |
# vim:ts=4 sw=4 expandtab softtabstop=4
from jsonmerge.exceptions import HeadInstanceError, \
BaseInstanceError, \
SchemaError
import jsonschema
import re
class Strategy(object):
"""Base class for merge strategies.
"""
def merge(self, walk, base, head, schema, meta, **kwargs):
"""Merge head instance into base.
walk -- WalkInstance object for the current context.
base -- Value being merged into.
head -- Value being merged.
schema -- Schema used for merging.
meta -- Meta data, as passed to the Merger.merge() method.
kwargs -- Dict with any extra options given in the 'mergeOptions'
keyword
Specific merge strategies should override this method to implement
their behavior.
The function should return the object resulting from the merge.
Recursion into the next level, if necessary, is achieved by calling
walk.descend() method.
"""
raise NotImplemented
def get_schema(self, walk, schema, meta, **kwargs):
"""Return the schema for the merged document.
walk -- WalkSchema object for the current context.
schema -- Original document schema.
meta -- Schema for the meta data, as passed to the Merger.get_schema()
method.
kwargs -- Dict with any extra options given in the 'mergeOptions'
keyword.
Specific merge strategies should override this method to modify the
document schema depending on the behavior of the merge() method.
The function should return the schema for the object resulting from the
merge.
Recursion into the next level, if necessary, is achieved by calling
walk.descend() method.
Implementations should take care that all external schema references
are resolved in the returned schema. This can be achieved by calling
walk.resolve_refs() method.
"""
raise NotImplemented
class Overwrite(Strategy):
def merge(self, walk, base, head, schema, meta, **kwargs):
if head == None:
return base
else:
return head
def get_schema(self, walk, schema, meta, **kwargs):
return walk.resolve_refs(schema)
class OCDSOmit(Strategy):
def merge(self, walk, base, head, schema, meta, **kwargs):
return None
def get_schema(self, walk, schema, meta, **kwargs):
return walk.resolve_refs(schema)
class Version(Strategy):
def merge(self, walk, base, head, schema, meta, limit=None, unique=None, ignoreDups=True, **kwargs):
# backwards compatibility
if unique is False:
ignoreDups = False
if base is None:
base = []
else:
base = list(base)
if not ignoreDups or not base or base[-1]['value'] != head:
base.append(walk.add_meta(head, meta))
if limit is not None:
base = base[-limit:]
return base
def get_schema(self, walk, schema, meta, limit=None, **kwargs):
if meta is not None:
item = dict(meta)
else:
item = {}
if 'properties' not in item:
item['properties'] = {}
item['properties']['value'] = walk.resolve_refs(schema)
rv = { "type": "array",
"items": item }
if limit is not None:
rv['maxItems'] = limit
return rv
class OCDSVersion(Strategy):
def merge(self, walk, base, head, schema, meta, **kwargs):
if base is None:
base = []
else:
base = list(base)
meta = {
"releaseID": walk.merger.head_root.get('id'),
"releaseDate": walk.merger.head_root.get('date'),
"releaseTag": walk.merger.head_root.get('tag')
}
if (not base or base[-1]['value'] != head) and head != None:
base.append(walk.add_meta(head, meta))
return base
def get_schema(self, walk, schema, meta, **kwargs):
if meta is not None:
item = dict(meta)
else:
item = {}
if 'properties' not in item:
item['properties'] = {}
item['properties']['value'] = walk.resolve_refs(schema)
item['properties'].update({
"releaseDate": {
"type": "string",
"format": "date-time"
},
"releaseID": {
"type": "string"
},
"releaseTag": {
"type": "string"
}
})
rv = { "type": "array",
"items": item }
return rv
class Append(Strategy):
def merge(self, walk, base, head, schema, meta, **kwargs):
if not walk.is_type(head, "array"):
raise HeadInstanceError("Head for an 'append' merge strategy is not an array")
if base is None:
base = []
else:
if not walk.is_type(base, "array"):
raise BaseInstanceError("Base for an 'append' merge strategy is not an array")
base = list(base)
base += head
return base
def get_schema(self, walk, schema, meta, **kwargs):
schema.pop('maxItems', None)
schema.pop('uniqueItems', None)
return walk.resolve_refs(schema)
class ArrayMergeById(Strategy):
def merge(self, walk, base, head, schema, meta, idRef="id", ignoreId=None, **kwargs):
if not walk.is_type(head, "array"):
raise HeadInstanceError("Head for an 'arrayMergeById' merge strategy is not an array") # nopep8
if base is None:
base = []
else:
if not walk.is_type(base, "array"):
raise BaseInstanceError("Base for an 'arrayMergeById' merge strategy is not an array") # nopep8
base = list(base)
subschema = None
if schema:
subschema = schema.get('items')
if walk.is_type(subschema, "array"):
raise SchemaError("'arrayMergeById' not supported when 'items' is an array")
for head_item in head:
try:
head_key = walk.resolver.resolve_fragment(head_item, idRef)
except jsonschema.RefResolutionError:
# Do nothing if idRef field cannot be found.
continue
if head_key == ignoreId:
continue
key_count = 0
for i, base_item in enumerate(base):
base_key = walk.resolver.resolve_fragment(base_item, idRef)
if base_key == head_key:
key_count += 1
# If there was a match, we replace with a merged item
base[i] = walk.descend(subschema, base_item, head_item, meta)
if key_count == 0:
# If there wasn't a match, we append a new object
base.append(walk.descend(subschema, None, head_item, meta))
if key_count > 1:
raise BaseInstanceError("Id was not unique")
return base
def get_schema(self, walk, schema, meta, **kwargs):
subschema = None
if schema:
subschema = schema.get('items')
# Note we're discarding the walk.descend() result here. This is because
# it would de-reference the $ref if the subschema is a reference - i.e.
# in the result it would replace the reference with the copy of the
# target.
#
# But we want to keep the $ref and do the walk.descend() only on the target of the reference.
#
# This seems to work, but is an ugly workaround. walk.descend() should
# be fixed instead to not dereference $refs when not necessary.
walk.descend(subschema, meta)
return schema
class ObjectMerge(Strategy):
def merge(self, walk, base, head, schema, meta, **kwargs):
if not walk.is_type(head, "object"):
raise HeadInstanceError("Head for an 'object' merge strategy is not an object")
if base is None:
base = {}
else:
if not walk.is_type(base, "object"):
raise BaseInstanceError("Base for an 'object' merge strategy is not an object")
base = dict(base)
for k, v in head.items():
subschema = None
# get subschema for this element
if schema is not None:
p = schema.get('properties')
if p is not None:
subschema = p.get(k)
if subschema is None:
p = schema.get('patternProperties')
if p is not None:
for pattern, s in p.items():
if re.search(pattern, k):
subschema = s
if subschema is None:
p = schema.get('additionalProperties')
if p is not None:
subschema = p.get(k)
base[k] = walk.descend(subschema, base.get(k), v, meta)
return base
def get_schema(self, walk, schema, meta, **kwargs):
for forbidden in ("oneOf", "allOf", "anyOf"):
if forbidden in schema:
raise SchemaError("Type ambiguous schema")
schema2 = dict(schema)
def descend_keyword(keyword):
p = schema.get(keyword)
if p is not None:
for k, v in p.items():
schema2[keyword][k] = walk.descend(v, meta)
descend_keyword("properties")
descend_keyword("patternProperties")
descend_keyword("additionalProperties")
return schema2
| [
"jsonmerge.exceptions.BaseInstanceError",
"jsonmerge.exceptions.SchemaError",
"jsonmerge.exceptions.HeadInstanceError",
"re.search"
] | [((4894, 4966), 'jsonmerge.exceptions.HeadInstanceError', 'HeadInstanceError', (['"""Head for an \'append\' merge strategy is not an array"""'], {}), '("Head for an \'append\' merge strategy is not an array")\n', (4911, 4966), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((5607, 5692), 'jsonmerge.exceptions.HeadInstanceError', 'HeadInstanceError', (['"""Head for an \'arrayMergeById\' merge strategy is not an array"""'], {}), '("Head for an \'arrayMergeById\' merge strategy is not an array"\n )\n', (5624, 5692), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((6105, 6175), 'jsonmerge.exceptions.SchemaError', 'SchemaError', (['"""\'arrayMergeById\' not supported when \'items\' is an array"""'], {}), '("\'arrayMergeById\' not supported when \'items\' is an array")\n', (6116, 6175), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((8060, 8133), 'jsonmerge.exceptions.HeadInstanceError', 'HeadInstanceError', (['"""Head for an \'object\' merge strategy is not an object"""'], {}), '("Head for an \'object\' merge strategy is not an object")\n', (8077, 8133), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((5099, 5171), 'jsonmerge.exceptions.BaseInstanceError', 'BaseInstanceError', (['"""Base for an \'append\' merge strategy is not an array"""'], {}), '("Base for an \'append\' merge strategy is not an array")\n', (5116, 5171), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((5830, 5915), 'jsonmerge.exceptions.BaseInstanceError', 'BaseInstanceError', (['"""Base for an \'arrayMergeById\' merge strategy is not an array"""'], {}), '("Base for an \'arrayMergeById\' merge strategy is not an array"\n )\n', (5847, 5915), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((7110, 7148), 'jsonmerge.exceptions.BaseInstanceError', 'BaseInstanceError', (['"""Id was not unique"""'], {}), "('Id was not unique')\n", (7127, 7148), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((8267, 8340), 'jsonmerge.exceptions.BaseInstanceError', 'BaseInstanceError', (['"""Base for an \'object\' merge strategy is not an object"""'], {}), '("Base for an \'object\' merge strategy is not an object")\n', (8284, 8340), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((9365, 9401), 'jsonmerge.exceptions.SchemaError', 'SchemaError', (['"""Type ambiguous schema"""'], {}), "('Type ambiguous schema')\n", (9376, 9401), False, 'from jsonmerge.exceptions import HeadInstanceError, BaseInstanceError, SchemaError\n'), ((8855, 8876), 're.search', 're.search', (['pattern', 'k'], {}), '(pattern, k)\n', (8864, 8876), False, 'import re\n')] |
# -*- coding: utf-8 -*-
__version__ = '$Id: 024580a7ff506aa3cbda6d46122b84b1603a6c05 $'
from pywikibot import family
# Omegawiki, the Ultimate online dictionary
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'omegawiki'
self.langs['omegawiki'] = 'www.omegawiki.org'
# On most Wikipedias page names must start with a capital letter, but some
# languages don't use this.
self.nocapitalize = self.langs.keys()
def hostname(self,code):
return 'www.omegawiki.org'
def version(self, code):
return "1.16alpha"
def scriptpath(self, code):
return ''
def path(self, code):
return '/index.php'
def apipath(self, code):
return '/api.php'
| [
"pywikibot.family.Family.__init__"
] | [((228, 256), 'pywikibot.family.Family.__init__', 'family.Family.__init__', (['self'], {}), '(self)\n', (250, 256), False, 'from pywikibot import family\n')] |
import os
import sys
import shutil
import glob
import time
import multiprocessing as mp
if len(sys.argv)!=4:
print("Usage: ")
print("python extract_features_WORLD.py <path_to_wav_dir> <path_to_feat_dir> <sampling rate>")
sys.exit(1)
# top currently directory
current_dir = os.getcwd()
# input audio directory
wav_dir = sys.argv[1]
# Output features directory
out_dir = sys.argv[2]
# initializations
fs = int(sys.argv[3])
# tools directory
world = os.path.join(current_dir, "tools/bin/WORLD")
sptk = os.path.join(current_dir, "tools/bin/SPTK-3.11")
if not os.path.exists(out_dir):
os.mkdir(out_dir)
if fs == 16000:
nFFTHalf = 1024
alpha = 0.58
elif fs == 22050:
nFFTHalf = 1024
alpha = 0.65
elif fs == 44100:
nFFTHalf = 2048
alpha = 0.76
elif fs == 48000:
nFFTHalf = 2048
alpha = 0.77
else:
print("As of now, we don't support %d Hz sampling rate." %(fs))
print("Please consider either downsampling to 16000 Hz or upsampling to 48000 Hz")
sys.exit(1)
#bap order depends on sampling rate.
mcsize=59
def get_wav_filelist(wav_dir):
wav_files = []
for file in os.listdir(wav_dir):
whole_filepath = os.path.join(wav_dir,file)
if os.path.isfile(whole_filepath) and str(whole_filepath).endswith(".wav"):
wav_files.append(whole_filepath)
elif os.path.isdir(whole_filepath):
wav_files += get_wav_filelist(whole_filepath)
wav_files.sort()
return wav_files
def process(filename):
'''
The function decomposes a wav file into F0, mel-cepstral coefficients, and aperiodicity
:param filename: path to wav file
:return: .lf0, .mgc and .bap files
'''
file_id = os.path.basename(filename).split(".")[0]
print('\n' + file_id)
### WORLD ANALYSIS -- extract vocoder parameters ###
### extract f0, sp, ap ###
world_analysis_cmd = "%s %s %s %s %s" % (os.path.join(world, 'analysis'), \
filename,
os.path.join(out_dir, file_id + '.f0'), \
os.path.join(out_dir, file_id + '.sp'), \
os.path.join(out_dir, file_id + '.bapd'))
os.system(world_analysis_cmd)
### convert f0 to lf0 ###
sptk_x2x_da_cmd = "%s +da %s > %s" % (os.path.join(sptk, 'x2x'), \
os.path.join(out_dir, file_id + '.f0'), \
os.path.join(out_dir, file_id + '.f0a'))
os.system(sptk_x2x_da_cmd)
sptk_x2x_af_cmd = "%s +af %s | %s > %s " % (os.path.join(sptk, 'x2x'), \
os.path.join(out_dir, file_id + '.f0a'), \
os.path.join(sptk, 'sopr') + ' -magic 0.0 -LN -MAGIC -1.0E+10', \
os.path.join(out_dir, file_id + '.lf0'))
os.system(sptk_x2x_af_cmd)
### convert sp to mgc ###
sptk_x2x_df_cmd1 = "%s +df %s | %s | %s >%s" % (os.path.join(sptk, 'x2x'), \
os.path.join(out_dir, file_id + '.sp'), \
os.path.join(sptk, 'sopr') + ' -R -m 32768.0', \
os.path.join(sptk, 'mcep') + ' -a ' + str(alpha) + ' -m ' + str(
mcsize) + ' -l ' + str(
nFFTHalf) + ' -e 1.0E-8 -j 0 -f 0.0 -q 3 ', \
os.path.join(out_dir, file_id + '.mgc'))
os.system(sptk_x2x_df_cmd1)
### convert bapd to bap ###
sptk_x2x_df_cmd2 = "%s +df %s > %s " % (os.path.join(sptk, "x2x"), \
os.path.join(out_dir, file_id + ".bapd"), \
os.path.join(out_dir, file_id + '.bap'))
os.system(sptk_x2x_df_cmd2)
print("--- Feature extraction started ---")
start_time = time.time()
# get wav files list
wav_files = get_wav_filelist(wav_dir)
# do multi-processing
pool = mp.Pool(mp.cpu_count())
pool.map(process, wav_files)
# clean temporal files
#shutil.rmtree(out_dir, ignore_errors=True)
#shutil.rmtree(out_dir, ignore_errors=True)
#for zippath in glob.iglob(os.path.join(out_dir, '*.bapd')):
# os.remove(zippath)
clean_temp_files_cmd = "rm -rf %s/*.bapd %s/*.f0a %s/*.f0 %s/*.sp" % (out_dir, out_dir, out_dir, out_dir)
os.system(clean_temp_files_cmd)
print("You should have your features ready in: "+out_dir)
(m, s) = divmod(int(time.time() - start_time), 60)
print(("--- Feature extraction completion time: %d min. %d sec ---" % (m, s)))
| [
"os.path.exists",
"os.listdir",
"os.path.join",
"multiprocessing.cpu_count",
"os.getcwd",
"os.path.isfile",
"os.path.isdir",
"os.mkdir",
"os.path.basename",
"sys.exit",
"os.system",
"time.time"
] | [((287, 298), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (296, 298), False, 'import os\n'), ((465, 509), 'os.path.join', 'os.path.join', (['current_dir', '"""tools/bin/WORLD"""'], {}), "(current_dir, 'tools/bin/WORLD')\n", (477, 509), False, 'import os\n'), ((518, 566), 'os.path.join', 'os.path.join', (['current_dir', '"""tools/bin/SPTK-3.11"""'], {}), "(current_dir, 'tools/bin/SPTK-3.11')\n", (530, 566), False, 'import os\n'), ((4096, 4107), 'time.time', 'time.time', ([], {}), '()\n', (4105, 4107), False, 'import time\n'), ((4554, 4585), 'os.system', 'os.system', (['clean_temp_files_cmd'], {}), '(clean_temp_files_cmd)\n', (4563, 4585), False, 'import os\n'), ((234, 245), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (242, 245), False, 'import sys\n'), ((575, 598), 'os.path.exists', 'os.path.exists', (['out_dir'], {}), '(out_dir)\n', (589, 598), False, 'import os\n'), ((604, 621), 'os.mkdir', 'os.mkdir', (['out_dir'], {}), '(out_dir)\n', (612, 621), False, 'import os\n'), ((1137, 1156), 'os.listdir', 'os.listdir', (['wav_dir'], {}), '(wav_dir)\n', (1147, 1156), False, 'import os\n'), ((2263, 2292), 'os.system', 'os.system', (['world_analysis_cmd'], {}), '(world_analysis_cmd)\n', (2272, 2292), False, 'import os\n'), ((2566, 2592), 'os.system', 'os.system', (['sptk_x2x_da_cmd'], {}), '(sptk_x2x_da_cmd)\n', (2575, 2592), False, 'import os\n'), ((2969, 2995), 'os.system', 'os.system', (['sptk_x2x_af_cmd'], {}), '(sptk_x2x_af_cmd)\n', (2978, 2995), False, 'import os\n'), ((3699, 3726), 'os.system', 'os.system', (['sptk_x2x_df_cmd1'], {}), '(sptk_x2x_df_cmd1)\n', (3708, 3726), False, 'import os\n'), ((4010, 4037), 'os.system', 'os.system', (['sptk_x2x_df_cmd2'], {}), '(sptk_x2x_df_cmd2)\n', (4019, 4037), False, 'import os\n'), ((4206, 4220), 'multiprocessing.cpu_count', 'mp.cpu_count', ([], {}), '()\n', (4218, 4220), True, 'import multiprocessing as mp\n'), ((1183, 1210), 'os.path.join', 'os.path.join', (['wav_dir', 'file'], {}), '(wav_dir, file)\n', (1195, 1210), False, 'import os\n'), ((1221, 1251), 'os.path.isfile', 'os.path.isfile', (['whole_filepath'], {}), '(whole_filepath)\n', (1235, 1251), False, 'import os\n'), ((1352, 1381), 'os.path.isdir', 'os.path.isdir', (['whole_filepath'], {}), '(whole_filepath)\n', (1365, 1381), False, 'import os\n'), ((1908, 1939), 'os.path.join', 'os.path.join', (['world', '"""analysis"""'], {}), "(world, 'analysis')\n", (1920, 1939), False, 'import os\n'), ((2043, 2081), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.f0')"], {}), "(out_dir, file_id + '.f0')\n", (2055, 2081), False, 'import os\n'), ((2130, 2168), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.sp')"], {}), "(out_dir, file_id + '.sp')\n", (2142, 2168), False, 'import os\n'), ((2217, 2257), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.bapd')"], {}), "(out_dir, file_id + '.bapd')\n", (2229, 2257), False, 'import os\n'), ((2366, 2391), 'os.path.join', 'os.path.join', (['sptk', '"""x2x"""'], {}), "(sptk, 'x2x')\n", (2378, 2391), False, 'import os\n'), ((2437, 2475), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.f0')"], {}), "(out_dir, file_id + '.f0')\n", (2449, 2475), False, 'import os\n'), ((2521, 2560), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.f0a')"], {}), "(out_dir, file_id + '.f0a')\n", (2533, 2560), False, 'import os\n'), ((2642, 2667), 'os.path.join', 'os.path.join', (['sptk', '"""x2x"""'], {}), "(sptk, 'x2x')\n", (2654, 2667), False, 'import os\n'), ((2719, 2758), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.f0a')"], {}), "(out_dir, file_id + '.f0a')\n", (2731, 2758), False, 'import os\n'), ((2924, 2963), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.lf0')"], {}), "(out_dir, file_id + '.lf0')\n", (2936, 2963), False, 'import os\n'), ((3079, 3104), 'os.path.join', 'os.path.join', (['sptk', '"""x2x"""'], {}), "(sptk, 'x2x')\n", (3091, 3104), False, 'import os\n'), ((3160, 3198), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.sp')"], {}), "(out_dir, file_id + '.sp')\n", (3172, 3198), False, 'import os\n'), ((3654, 3693), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.mgc')"], {}), "(out_dir, file_id + '.mgc')\n", (3666, 3693), False, 'import os\n'), ((3804, 3829), 'os.path.join', 'os.path.join', (['sptk', '"""x2x"""'], {}), "(sptk, 'x2x')\n", (3816, 3829), False, 'import os\n'), ((3877, 3917), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.bapd')"], {}), "(out_dir, file_id + '.bapd')\n", (3889, 3917), False, 'import os\n'), ((3965, 4004), 'os.path.join', 'os.path.join', (['out_dir', "(file_id + '.bap')"], {}), "(out_dir, file_id + '.bap')\n", (3977, 4004), False, 'import os\n'), ((4670, 4681), 'time.time', 'time.time', ([], {}), '()\n', (4679, 4681), False, 'import time\n'), ((1010, 1021), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1018, 1021), False, 'import sys\n'), ((1708, 1734), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (1724, 1734), False, 'import os\n'), ((2810, 2836), 'os.path.join', 'os.path.join', (['sptk', '"""sopr"""'], {}), "(sptk, 'sopr')\n", (2822, 2836), False, 'import os\n'), ((3254, 3280), 'os.path.join', 'os.path.join', (['sptk', '"""sopr"""'], {}), "(sptk, 'sopr')\n", (3266, 3280), False, 'import os\n'), ((3355, 3381), 'os.path.join', 'os.path.join', (['sptk', '"""mcep"""'], {}), "(sptk, 'mcep')\n", (3367, 3381), False, 'import os\n')] |
from session.abstract_class import PysparkPro
class DslAdaptor(object):
pysparkpro = PysparkPro()
select = 'SELECT'
insert = 'INSERT'
delete = 'DELETE'
update = 'UPDATE'
alert = 'ALERT'
create_table = 'CREATETABLE'
drop_table = 'DROPTABLE'
create_index = 'CREATEINDEX'
drop_index = 'DROPTABLE'
create_user = 'CREATEUSER'
exit = 'EXIT'
print_table = 'PRINT'
show_tables = 'SHOW'
value = 'VALUE'
condition = 'CONDITION'
relation_attr = 'RELATTR'
grant_user = 'GRANTUSER'
revoke_user = 'REVOKEUSER'
attr_type = "ATTRTYPE"
class ConnectNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class CreateNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class InsertNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class LoadNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class RefreshNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class RegisterNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class SaveNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class ScriptNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class SelectNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class SetNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class TrainNode():
def __init__(self, select_list, from_list, where_list):
self.type = DslAdaptor.select
self.select_list = select_list
self.from_list = from_list
self.where_list = where_list
class Exit:
def __init__(self):
self.type = DslAdaptor.exit
class PrintTable:
def __init__(self, table_name):
self.type = DslAdaptor.print_table
self.table_name = table_name
class ShowTables:
def __init__(self):
self.type = DslAdaptor.show_tables
class Value:
def __init__(self, value_type, value):
self.type = DslAdaptor.value
self.value_type = value_type
self.value = value
def __str__(self):
return str(self.value) + '[' + self.value_type + ']'
class RelAttr:
def __init__(self, attr_name, table_name=None):
self.type = DslAdaptor.relation_attr
self.table_name = table_name
self.attr_name = attr_name
def __str__(self):
if self.table_name:
return self.table_name + '.' + self.attr_name
else:
return self.attr_name
class Cond:
def __init__(self, left, op, right):
self.type = DslAdaptor.condition
self.op = op.upper()
self.left = left
self.right = right
def __str__(self):
return '(' + str(self.left) + ', ' + str(self.right) + ', ' + self.op + ')'
class AttrType:
def __init__(self, attr_name, attr_type, type_len = 1):
self.type = DslAdaptor.attr_type
self.attr_type = attr_type
self.type_len = type_len
self.attr_name = attr_name
def __str__(self):
return self.attr_name + " " + self.attr_type + " " + str(self.type_len)
if __name__ == '__main__':
spark = DslAdaptor()
print(spark)
| [
"session.abstract_class.PysparkPro"
] | [((91, 103), 'session.abstract_class.PysparkPro', 'PysparkPro', ([], {}), '()\n', (101, 103), False, 'from session.abstract_class import PysparkPro\n')] |
import socket
def connect(server, port):
# open a connection to vulnserver
s = socket.socket (socket.AF_INET, socket.SOCK_STREAM)
s.connect ((server, port))
return s
def read_until(s, delim=b':'):
buf = b''
while not buf.endswith(delim):
buf += s.recv(1)
return buf
def overflow_input(num_chars=128):
for i in range(1, num_chars):
try:
s = connect(SERVER, PORT)
read_until(s)
data = 'A' * i + '\n'
data = bytes(data, encoding='utf-8')
s.send(data)
except:
print(f"Server crashed with input size {i}")
finally:
s.close()
if __name__ == "__main__":
PORT = 12345
SERVER = '<THE HOSTNAME OR IP>'
s = connect(SERVER, PORT)
print(read_until(s))
| [
"socket.socket"
] | [((88, 137), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (101, 137), False, 'import socket\n')] |
#!/usr/bin/env python
"""html in standard nbconvert format
"""
from ipypublish.html.create_tpl import create_tpl
from ipypublish.html.standard import content
from ipypublish.html.standard import content_tagging
from ipypublish.html.standard import document
from ipypublish.html.standard import inout_prompt
from ipypublish.html.standard import mathjax
from ipypublish.html.standard import widgets
oformat = 'HTML'
config = {}
template = create_tpl([
document.tpl_dict,
content.tpl_dict, content_tagging.tpl_dict,
mathjax.tpl_dict, widgets.tpl_dict,
inout_prompt.tpl_dict
])
| [
"ipypublish.html.create_tpl.create_tpl"
] | [((439, 577), 'ipypublish.html.create_tpl.create_tpl', 'create_tpl', (['[document.tpl_dict, content.tpl_dict, content_tagging.tpl_dict, mathjax.\n tpl_dict, widgets.tpl_dict, inout_prompt.tpl_dict]'], {}), '([document.tpl_dict, content.tpl_dict, content_tagging.tpl_dict,\n mathjax.tpl_dict, widgets.tpl_dict, inout_prompt.tpl_dict])\n', (449, 577), False, 'from ipypublish.html.create_tpl import create_tpl\n')] |
from logging import raiseExceptions
from typing import List
from fastapi import APIRouter,Depends,HTTPException, Response,status
from sqlalchemy.orm.session import Session
from .. database import get_db
from .. import models,schemas ,oauth2
router=APIRouter(
prefix='/posts',
tags=['Post']
)
@router.get('/',response_model=List[schemas.PostOut])
def get_lists( db:Session=Depends(get_db),current_user: int =Depends(oauth2.get_current_user)):
ps=db.query(models.Post).all()
return ps
@router.post("/")
def post_list(post:schemas.PostCreate,db:Session=Depends(get_db),current_user: int =Depends(oauth2.get_current_user)):
new_post=models.Post(user_id=current_user.id,** post.dict())
db.add(new_post)
db.commit()
db.refresh(new_post)
return new_post
@router.get("/{id}")
def get_post_by_id(id:int ,db:Session=Depends(get_db), current_user: int =Depends(oauth2.get_current_user)):
post = db.query(models.Post).filter(models.Post.id == id).first()
if post is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND ,
detail=f"post with id {id} not found")
return post
@router.put("/{id}",status_code=status.HTTP_200_OK)
def update_list(id:int,updated_list:schemas.PostCreate ,db:Session=Depends(get_db), current_user: int =Depends(oauth2.get_current_user)):
post_query=db.query(models.Post).filter(models.Post.id==id)
post=post_query.first()
if post is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND ,detail=f"post with id {id} not found")
post_query.update(updated_list.dict(),synchronize_session=False)
db.commit()
return post_query.first()
@router.delete("/{id}" ,status_code=status.HTTP_204_NO_CONTENT)
def delete_list(id:int ,db:Session=Depends(get_db), current_user: int =Depends(oauth2.get_current_user)):
post_query=db.query(models.Post).filter(models.Post.id == id)
post=post_query.first()
if post is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND ,detail=f"post with id {id} not found")
post_query.delete(synchronize_session=False)
db.commit()
return Response(status_code=status.HTTP_204_NO_CONTENT) | [
"fastapi.HTTPException",
"fastapi.APIRouter",
"fastapi.Response",
"fastapi.Depends"
] | [((251, 292), 'fastapi.APIRouter', 'APIRouter', ([], {'prefix': '"""/posts"""', 'tags': "['Post']"}), "(prefix='/posts', tags=['Post'])\n", (260, 292), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((385, 400), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (392, 400), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((420, 452), 'fastapi.Depends', 'Depends', (['oauth2.get_current_user'], {}), '(oauth2.get_current_user)\n', (427, 452), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((577, 592), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (584, 592), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((612, 644), 'fastapi.Depends', 'Depends', (['oauth2.get_current_user'], {}), '(oauth2.get_current_user)\n', (619, 644), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((854, 869), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (861, 869), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((890, 922), 'fastapi.Depends', 'Depends', (['oauth2.get_current_user'], {}), '(oauth2.get_current_user)\n', (897, 922), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((1267, 1282), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (1274, 1282), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((1303, 1335), 'fastapi.Depends', 'Depends', (['oauth2.get_current_user'], {}), '(oauth2.get_current_user)\n', (1310, 1335), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((1772, 1787), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (1779, 1787), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((1808, 1840), 'fastapi.Depends', 'Depends', (['oauth2.get_current_user'], {}), '(oauth2.get_current_user)\n', (1815, 1840), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((2140, 2188), 'fastapi.Response', 'Response', ([], {'status_code': 'status.HTTP_204_NO_CONTENT'}), '(status_code=status.HTTP_204_NO_CONTENT)\n', (2148, 2188), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((1030, 1126), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': 'status.HTTP_404_NOT_FOUND', 'detail': 'f"""post with id {id} not found"""'}), "(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'post with id {id} not found')\n", (1043, 1126), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((1465, 1561), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': 'status.HTTP_404_NOT_FOUND', 'detail': 'f"""post with id {id} not found"""'}), "(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'post with id {id} not found')\n", (1478, 1561), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n'), ((1972, 2068), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': 'status.HTTP_404_NOT_FOUND', 'detail': 'f"""post with id {id} not found"""'}), "(status_code=status.HTTP_404_NOT_FOUND, detail=\n f'post with id {id} not found')\n", (1985, 2068), False, 'from fastapi import APIRouter, Depends, HTTPException, Response, status\n')] |
import nose.tools
import unittest
import os
import json
import pandas as pd
import numpy as np
import mia
from mia.io_tools import *
from ..test_utils import get_file_path
class IOTests(unittest.TestCase):
@classmethod
def setupClass(cls):
cls._output_files = []
@classmethod
def teardownClass(cls):
for f in cls._output_files:
if os.path.isfile(f):
os.remove(f)
def test_iterate_directory(self):
img_directory = get_file_path("texture_patches")
expected_files = ['texture1.png', 'texture2.png', 'texture3.png',
'texture4.png', 'texture5.png']
expected_files = [os.path.join(img_directory, p) for p in expected_files]
dirs = list(iterate_directory(img_directory))
nose.tools.assert_equal(len(dirs), len(expected_files))
for img_path, expected in zip(dirs, expected_files):
nose.tools.assert_equal(img_path, expected)
def test_iterate_directories(self):
img_directory = get_file_path("texture_patches")
expected_files = ['texture1.png', 'texture2.png', 'texture3.png',
'texture4.png', 'texture5.png']
expected_files = [os.path.join(img_directory, p) for p in expected_files]
dirs = list(iterate_directories(img_directory, img_directory))
nose.tools.assert_equal(len(dirs), len(expected_files))
for (img_path, msk_path), expected in zip(dirs, expected_files):
nose.tools.assert_equal(img_path, expected)
nose.tools.assert_equal(msk_path, expected)
def test_check_is_file(self):
img_path = get_file_path("texture_patches/texture1.png")
nose.tools.assert_true(check_is_file(img_path, ".png"))
def test_check_is_file_multiple_images(self):
img_path = get_file_path("synthetic_patch.dcm")
nose.tools.assert_true(check_is_file(img_path, ".png", ".dcm"))
def test_check_is_file_wrong_extension(self):
img_path = get_file_path("blob_detection.csv")
nose.tools.assert_false(check_is_file(img_path, ".png", ".dcm"))
def test_check_is_image_raises_on_not_a_file(self):
img_path = get_file_path("texture_patches")
nose.tools.assert_false(check_is_file(img_path, ".png", ".dcm"))
def test_check_is_directory(self):
directory = get_file_path("texture_patches")
try:
check_is_directory(directory)
except:
self.fail("check_is_directory raised when it shouldn't have.")
def test_check_is_directory_raises(self):
img_path = get_file_path("texture_patches/not_a_directory")
nose.tools.assert_raises(ValueError, check_is_directory, img_path)
def test_dump_mapping_to_json(self):
output_file = 'test_data.json'
mapping = pd.DataFrame(np.ones((10, 2)), columns=['x', 'y'])
dump_mapping_to_json(mapping, ['x', 'y'], np.zeros(10), output_file)
nose.tools.assert_true(os.path.isfile(output_file))
with open(output_file, 'rb') as f:
data = json.load(f)
nose.tools.assert_equal(len(data), 1)
nose.tools.assert_equal(data[0]['name'], 'Class: 0')
nose.tools.assert_equal(len(data[0]['data']), 10)
self._output_files.append(output_file)
| [
"numpy.ones",
"os.path.join",
"os.path.isfile",
"numpy.zeros",
"json.load",
"os.remove"
] | [((380, 397), 'os.path.isfile', 'os.path.isfile', (['f'], {}), '(f)\n', (394, 397), False, 'import os\n'), ((683, 713), 'os.path.join', 'os.path.join', (['img_directory', 'p'], {}), '(img_directory, p)\n', (695, 713), False, 'import os\n'), ((1233, 1263), 'os.path.join', 'os.path.join', (['img_directory', 'p'], {}), '(img_directory, p)\n', (1245, 1263), False, 'import os\n'), ((2856, 2872), 'numpy.ones', 'np.ones', (['(10, 2)'], {}), '((10, 2))\n', (2863, 2872), True, 'import numpy as np\n'), ((2944, 2956), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (2952, 2956), True, 'import numpy as np\n'), ((3003, 3030), 'os.path.isfile', 'os.path.isfile', (['output_file'], {}), '(output_file)\n', (3017, 3030), False, 'import os\n'), ((3095, 3107), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3104, 3107), False, 'import json\n'), ((415, 427), 'os.remove', 'os.remove', (['f'], {}), '(f)\n', (424, 427), False, 'import os\n')] |
# Vax-Man, a re-implementation of Pacman, in Python, with PyGame.
# Forked from: https://github.com/hbokmann/Pacman
# Edited by <NAME> (2021)
# Video link: https://youtu.be/ZrqZEC6DvMc
import time
import pygame
# Ghosts multiply themselves every thirty seconds.
GHOST_MULTIPLICATION_TIME_GAP = 30
# Thirty-two times for each ghost type.
MAXIMUM_GHOSTS = 32 * 4;
indigo = ( 85, 48, 141 )
yellow = ( 255, 255, 0 )
darkRed = ( 201, 33, 30 )
darkGrey = ( 28, 28, 28 )
lightGrey = ( 238, 238, 238 )
Vaxman_icon=pygame.image.load('images/Vaxman_Big.png')
pygame.display.set_icon(Vaxman_icon)
# Add music
# Spook4 by PeriTune | http://peritune.com
# Attribution 4.0 International (CC BY 4.0)
# https://creativecommons.org/licenses/by/4.0/
# Music promoted by https://www.chosic.com/free-music/all/
pygame.mixer.init()
pygame.mixer.music.load('peritune-spook4.mp3')
pygame.mixer.music.play(-1, 0.0)
# This class represents the bar at the bottom that the player controls
class Wall(pygame.sprite.Sprite):
# Constructor function
def __init__(self,x,y,width,height, color):
# Call the parent's constructor
pygame.sprite.Sprite.__init__(self)
# Make an indigo wall, of the size specified in the parameters
self.image = pygame.Surface([width, height])
self.image.fill(color)
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.top = y
self.rect.left = x
# This creates all the walls in room 1
def setupRoomOne(all_sprites_list):
# Make the walls. (x_pos, y_pos, width, height)
wall_list=pygame.sprite.RenderPlain()
# This is a list of walls. Each is in the form [x, y, width, height]
walls = [ [0,0,6,600],
[0,0,600,6],
[0,600,606,6],
[600,0,6,606],
[300,0,6,66],
[60,60,186,6],
[360,60,186,6],
[60,120,66,6],
[60,120,6,126],
[180,120,246,6],
[300,120,6,66],
[480,120,66,6],
[540,120,6,126],
[120,180,126,6],
[120,180,6,126],
[360,180,126,6],
[480,180,6,126],
[180,240,6,126],
[180,360,246,6],
[420,240,6,126],
[240,240,42,6],
[324,240,42,6],
[240,240,6,66],
[240,300,126,6],
[360,240,6,66],
[0,300,66,6],
[540,300,66,6],
[60,360,66,6],
[60,360,6,186],
[480,360,66,6],
[540,360,6,186],
[120,420,366,6],
[120,420,6,66],
[480,420,6,66],
[180,480,246,6],
[300,480,6,66],
[120,540,126,6],
[360,540,126,6]
]
# Loop through the list. Create the wall, add it to the list.
for item in walls:
wall = Wall(item[0], item[1], item[2], item[3], indigo)
wall_list.add(wall)
all_sprites_list.add(wall)
# Return our new list.
return wall_list
def setupGate(all_sprites_list):
gate = pygame.sprite.RenderPlain()
gate.add(Wall(282, 242, 42, 2, lightGrey))
all_sprites_list.add(gate)
return gate
# This class represents the ball
# It derives from the "Sprite" class in Pygame
class Block(pygame.sprite.Sprite):
# Constructor. Pass in the color of the block,
# and its x and y position
def __init__(self, color, width, height):
# Call the parent class (Sprite) constructor
pygame.sprite.Sprite.__init__(self)
# Create an image of the block, and fill it with a color.
# This could also be an image loaded from the disk.
self.image = pygame.Surface([width, height])
self.image.fill(lightGrey)
self.image.set_colorkey(lightGrey)
pygame.draw.ellipse(self.image,color,[0,0,width,height])
# Fetch the rectangle object that has the dimensions of the image
# image.
# Update the position of this object by setting the values
# of rect.x and rect.y
self.rect = self.image.get_rect()
# This class represents the bar at the bottom that the player controls
class Player(pygame.sprite.Sprite):
# Set speed vector
change_x=0
change_y=0
# Constructor function
def __init__(self, x, y, filename):
# Call the parent's constructor
pygame.sprite.Sprite.__init__(self)
# Set height, width
self.image = pygame.image.load(filename).convert_alpha()
# Make our top-left corner the passed-in location.
self.rect = self.image.get_rect()
self.rect.top = y
self.rect.left = x
self.prev_x = x
self.prev_y = y
# Clear the speed of the player
def prevdirection(self):
self.prev_x = self.change_x
self.prev_y = self.change_y
# Change the speed of the player
def changespeed(self,x,y):
self.change_x+=x
self.change_y+=y
# Find a new position for the player
def update(self,walls,gate):
# Get the old position, in case we need to go back to it
old_x=self.rect.left
new_x=old_x+self.change_x
prev_x=old_x+self.prev_x
self.rect.left = new_x
old_y=self.rect.top
new_y=old_y+self.change_y
prev_y=old_y+self.prev_y
# Did this update cause us to hit a wall?
x_collide = pygame.sprite.spritecollide(self, walls, False)
if x_collide:
# Whoops, hit a wall. Go back to the old position
self.rect.left=old_x
# self.rect.top=prev_y
# y_collide = pygame.sprite.spritecollide(self, walls, False)
# if y_collide:
# # Whoops, hit a wall. Go back to the old position
# self.rect.top=old_y
# print('a')
else:
self.rect.top = new_y
# Did this update cause us to hit a wall?
y_collide = pygame.sprite.spritecollide(self, walls, False)
if y_collide:
# Whoops, hit a wall. Go back to the old position
self.rect.top=old_y
# self.rect.left=prev_x
# x_collide = pygame.sprite.spritecollide(self, walls, False)
# if x_collide:
# # Whoops, hit a wall. Go back to the old position
# self.rect.left=old_x
# print('b')
if gate != False:
gate_hit = pygame.sprite.spritecollide(self, gate, False)
if gate_hit:
self.rect.left=old_x
self.rect.top=old_y
#Inheritime Player klassist
class Ghost(Player):
# Change the speed of the ghost
def changespeed(self,list,ghost,turn,steps,l):
try:
z=list[turn][2]
if steps < z:
self.change_x=list[turn][0]
self.change_y=list[turn][1]
steps+=1
else:
if turn < l:
turn+=1
elif ghost == "clyde":
turn = 2
else:
turn = 0
self.change_x=list[turn][0]
self.change_y=list[turn][1]
steps = 0
return [turn,steps]
except IndexError:
return [0,0]
Pinky_directions = [
[0,-30,4],
[15,0,9],
[0,15,11],
[-15,0,23],
[0,15,7],
[15,0,3],
[0,-15,3],
[15,0,19],
[0,15,3],
[15,0,3],
[0,15,3],
[15,0,3],
[0,-15,15],
[-15,0,7],
[0,15,3],
[-15,0,19],
[0,-15,11],
[15,0,9]
]
Blinky_directions = [
[0,-15,4],
[15,0,9],
[0,15,11],
[15,0,3],
[0,15,7],
[-15,0,11],
[0,15,3],
[15,0,15],
[0,-15,15],
[15,0,3],
[0,-15,11],
[-15,0,3],
[0,-15,11],
[-15,0,3],
[0,-15,3],
[-15,0,7],
[0,-15,3],
[15,0,15],
[0,15,15],
[-15,0,3],
[0,15,3],
[-15,0,3],
[0,-15,7],
[-15,0,3],
[0,15,7],
[-15,0,11],
[0,-15,7],
[15,0,5]
]
Inky_directions = [
[30,0,2],
[0,-15,4],
[15,0,10],
[0,15,7],
[15,0,3],
[0,-15,3],
[15,0,3],
[0,-15,15],
[-15,0,15],
[0,15,3],
[15,0,15],
[0,15,11],
[-15,0,3],
[0,-15,7],
[-15,0,11],
[0,15,3],
[-15,0,11],
[0,15,7],
[-15,0,3],
[0,-15,3],
[-15,0,3],
[0,-15,15],
[15,0,15],
[0,15,3],
[-15,0,15],
[0,15,11],
[15,0,3],
[0,-15,11],
[15,0,11],
[0,15,3],
[15,0,1],
]
Clyde_directions = [
[-30,0,2],
[0,-15,4],
[15,0,5],
[0,15,7],
[-15,0,11],
[0,-15,7],
[-15,0,3],
[0,15,7],
[-15,0,7],
[0,15,15],
[15,0,15],
[0,-15,3],
[-15,0,11],
[0,-15,7],
[15,0,3],
[0,-15,11],
[15,0,9],
]
pl = len(Pinky_directions) - 1
bl = len(Blinky_directions) - 1
il = len(Inky_directions) - 1
cl = len(Clyde_directions) - 1
# Call this function so the Pygame library can initialize itself
pygame.init()
# Create an 606x606 sized screen
screen = pygame.display.set_mode([606, 606])
# This is a list of 'sprites.' Each block in the program is
# added to this list. The list is managed by a class called 'RenderPlain.'
# Set the title of the window
pygame.display.set_caption('Melly the Vax-Man')
# Create a surface we can draw on
background = pygame.Surface(screen.get_size())
# Used for converting color maps and such
background = background.convert()
# Fill the screen with a dark grey background
background.fill(darkGrey)
clock = pygame.time.Clock()
pygame.font.init()
font = pygame.font.Font("freesansbold.ttf", 24)
#default locations for Vax-Man and ghosts
w = 303 - 16 # Width
p_h = 19 + (7 * 60) # Vax-Man height
m_h = 19 + (4 * 60) # Monster height
b_h = 19 + (3 * 60) # Binky height
i_w = 303 - 16 - 32 # Inky width
c_w = 303 + (32 - 16) # Clyde width
def startGame():
all_sprites_list = pygame.sprite.RenderPlain()
block_list = pygame.sprite.RenderPlain()
ghosts_list = pygame.sprite.RenderPlain()
vaxman_collide = pygame.sprite.RenderPlain()
wall_list = setupRoomOne(all_sprites_list)
gate = setupGate(all_sprites_list)
# Create the player paddle object
Vaxman = Player(w, p_h, "images/Vaxman_Small.png")
all_sprites_list.add(Vaxman)
vaxman_collide.add(Vaxman)
Blinkies = []
Pinkies = []
Inkies = []
Clydes = []
# Draw the grid
for row in range(19):
for column in range(19):
if (row == 7 or row == 8) and (column == 8 or column == 9 or column == 10):
continue
else:
block = Block(yellow, 4, 4)
# Set a random location for the block
block.rect.x = (30*column+6)+26
block.rect.y = (30*row+6)+26
b_collide = pygame.sprite.spritecollide(block, wall_list, False)
p_collide = pygame.sprite.spritecollide(block, vaxman_collide, False)
if b_collide:
continue
elif p_collide:
continue
else:
# Add the block to the list of objects
block_list.add(block)
all_sprites_list.add(block)
bll = len(block_list)
score = 0
done = False
i = 0
previousTime = 0;
while done == False:
# ALL EVENT PROCESSING SHOULD GO BELOW THIS COMMENT
currentTime = time.time();
deltaTime = currentTime - previousTime;
if previousTime == 0 or deltaTime > GHOST_MULTIPLICATION_TIME_GAP:
if previousTime == 0 or Blinkies:
Blinkies.append( { "entity" : Ghost(w, b_h, "images/Blinky.png"), "turn" : 0, "steps" : 0 } )
ghosts_list.add(Blinkies[-1]["entity"])
all_sprites_list.add(Blinkies[-1]["entity"])
if previousTime == 0 or Pinkies:
Pinkies.append( { "entity" : Ghost(w, m_h, "images/Pinky.png"), "turn" : 0, "steps" : 0 } )
ghosts_list.add(Pinkies[-1]["entity"])
all_sprites_list.add(Pinkies[-1]["entity"])
if previousTime == 0 or Inkies:
Inkies.append( { "entity" : Ghost(i_w, m_h, "images/Inky.png"), "turn" : 0, "steps" : 0 } )
ghosts_list.add(Inkies[-1]["entity"])
all_sprites_list.add(Inkies[-1]["entity"])
if previousTime == 0 or Clydes:
Clydes.append( { "entity" : Ghost(c_w, m_h, "images/Clyde.png"), "turn" : 0, "steps" : 0 } )
ghosts_list.add(Clydes[-1]["entity"])
all_sprites_list.add(Clydes[-1]["entity"])
previousTime = currentTime
for event in pygame.event.get():
if event.type == pygame.QUIT:
done=True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
Vaxman.changespeed(-30,0)
if event.key == pygame.K_RIGHT:
Vaxman.changespeed(30,0)
if event.key == pygame.K_UP:
Vaxman.changespeed(0,-30)
if event.key == pygame.K_DOWN:
Vaxman.changespeed(0,30)
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT:
Vaxman.changespeed(30,0)
if event.key == pygame.K_RIGHT:
Vaxman.changespeed(-30,0)
if event.key == pygame.K_UP:
Vaxman.changespeed(0,30)
if event.key == pygame.K_DOWN:
Vaxman.changespeed(0,-30)
# ALL EVENT PROCESSING SHOULD GO ABOVE THIS COMMENT
# ALL GAME LOGIC SHOULD GO BELOW THIS COMMENT
Vaxman.update(wall_list, gate)
for Pinky in Pinkies:
returned = Pinky["entity"].changespeed(Pinky_directions, False, Pinky["turn"], Pinky["steps"], pl)
Pinky["turn"] = returned[0]
Pinky["steps"] = returned[1]
Pinky["entity"].changespeed(Pinky_directions, False, Pinky["turn"], Pinky["steps"], pl)
Pinky["entity"].update(wall_list, False)
for Blinky in Blinkies:
returned = Blinky["entity"].changespeed(Blinky_directions, False, Blinky["turn"], Blinky["steps"], bl)
Blinky["turn"] = returned[0]
Blinky["steps"] = returned[1]
Blinky["entity"].changespeed(Blinky_directions, False, Blinky["turn"], Blinky["steps"], bl)
Blinky["entity"].update(wall_list, False)
for Inky in Inkies:
returned = Inky["entity"].changespeed(Inky_directions, False, Inky["turn"], Inky["steps"], il)
Inky["turn"] = returned[0]
Inky["steps"] = returned[1]
Inky["entity"].changespeed(Inky_directions, False, Inky["turn"], Inky["steps"], il)
Inky["entity"].update(wall_list, False)
for Clyde in Clydes:
returned = Clyde["entity"].changespeed(Clyde_directions, "clyde", Clyde["turn"], Clyde["steps"], cl)
Clyde["turn"] = returned[0]
Clyde["steps"] = returned[1]
Clyde["entity"].changespeed(Clyde_directions, "clyde", Clyde["turn"], Clyde["steps"], cl)
Clyde["entity"].update(wall_list, False)
# See if the Vax-Man block has collided with anything.
blocks_hit_list = pygame.sprite.spritecollide(Vaxman, block_list, True)
# Check the list of collisions.
if len(blocks_hit_list) > 0:
score +=len(blocks_hit_list)
# ALL GAME LOGIC SHOULD GO ABOVE THIS COMMENT
# ALL CODE TO DRAW SHOULD GO BELOW THIS COMMENT
screen.fill(darkGrey)
wall_list.draw(screen)
gate.draw(screen)
text=font.render("Score: "+str(score)+"/"+str(bll), True, darkRed)
screen.blit(text, [10, 10])
if score == bll:
userWantsToExit = doNext("Congratulations, you won!", 145, all_sprites_list, block_list, ghosts_list, vaxman_collide, wall_list, gate)
if userWantsToExit:
break
ghosts_hit_list = pygame.sprite.spritecollide(Vaxman, ghosts_list, True)
if ghosts_hit_list:
for refBlinky in Blinkies:
if refBlinky["entity"] in ghosts_hit_list:
Blinkies = [Blinky for Blinky in Blinkies if Blinky != refBlinky]
for refPinky in Pinkies:
if refPinky["entity"] in ghosts_hit_list:
Pinkies = [Pinky for Pinky in Pinkies if Pinky != refPinky]
for refInky in Inkies:
if refInky["entity"] in ghosts_hit_list:
Inkies = [Inky for Inky in Inkies if Inky != refInky]
for refClyde in Clydes:
if refClyde["entity"] in ghosts_hit_list:
Clydes = [Clyde for Clyde in Clydes if Clyde != refClyde]
all_sprites_list.draw(screen)
ghosts_list.draw(screen)
if len(ghosts_list) >= MAXIMUM_GHOSTS:
userWantsToExit = doNext("Game Over", 235, all_sprites_list, block_list, ghosts_list, vaxman_collide, wall_list, gate)
if userWantsToExit:
break
# ALL CODE TO DRAW SHOULD GO ABOVE THIS COMMENT
pygame.display.flip()
clock.tick(10)
def doNext(message, left, all_sprites_list, block_list, ghosts_list, vaxman_collide, wall_list, gate):
while True:
# ALL EVENT PROCESSING SHOULD GO BELOW THIS COMMENT
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
return True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
return True
if event.key == pygame.K_RETURN:
del all_sprites_list
del block_list
del ghosts_list
del vaxman_collide
del wall_list
del gate
startGame()
return False
#Grey background
w = pygame.Surface((400,200)) # the size of your rect
w.set_alpha(10) # alpha level
w.fill((128,128,128)) # this fills the entire surface
screen.blit(w, (100,200)) # (0,0) are the top-left coordinates
#Won or lost
text1=font.render(message, True, lightGrey)
screen.blit(text1, [left, 233])
text2=font.render("To play again, press ENTER.", True, lightGrey)
screen.blit(text2, [135, 303])
text3=font.render("To quit, press ESCAPE.", True, lightGrey)
screen.blit(text3, [165, 333])
pygame.display.flip()
clock.tick(10)
startGame()
pygame.quit()
| [
"pygame.init",
"pygame.quit",
"pygame.font.Font",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.display.set_icon",
"pygame.font.init",
"pygame.image.load",
"pygame.mixer.music.load",
"pygame.sprite.spritecollide",
"pygame.Surface",
"pygame.draw.ellipse",
"pygame.time.Clock",
"p... | [((531, 573), 'pygame.image.load', 'pygame.image.load', (['"""images/Vaxman_Big.png"""'], {}), "('images/Vaxman_Big.png')\n", (548, 573), False, 'import pygame\n'), ((574, 610), 'pygame.display.set_icon', 'pygame.display.set_icon', (['Vaxman_icon'], {}), '(Vaxman_icon)\n', (597, 610), False, 'import pygame\n'), ((817, 836), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (834, 836), False, 'import pygame\n'), ((837, 883), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['"""peritune-spook4.mp3"""'], {}), "('peritune-spook4.mp3')\n", (860, 883), False, 'import pygame\n'), ((884, 916), 'pygame.mixer.music.play', 'pygame.mixer.music.play', (['(-1)', '(0.0)'], {}), '(-1, 0.0)\n', (907, 916), False, 'import pygame\n'), ((8747, 8760), 'pygame.init', 'pygame.init', ([], {}), '()\n', (8758, 8760), False, 'import pygame\n'), ((8806, 8841), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[606, 606]'], {}), '([606, 606])\n', (8829, 8841), False, 'import pygame\n'), ((9010, 9057), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Melly the Vax-Man"""'], {}), "('Melly the Vax-Man')\n", (9036, 9057), False, 'import pygame\n'), ((9303, 9322), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (9320, 9322), False, 'import pygame\n'), ((9324, 9342), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (9340, 9342), False, 'import pygame\n'), ((9350, 9390), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(24)'], {}), "('freesansbold.ttf', 24)\n", (9366, 9390), False, 'import pygame\n'), ((18229, 18242), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (18240, 18242), False, 'import pygame\n'), ((1639, 1666), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', ([], {}), '()\n', (1664, 1666), False, 'import pygame\n'), ((3226, 3253), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', ([], {}), '()\n', (3251, 3253), False, 'import pygame\n'), ((9702, 9729), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', ([], {}), '()\n', (9727, 9729), False, 'import pygame\n'), ((9746, 9773), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', ([], {}), '()\n', (9771, 9773), False, 'import pygame\n'), ((9791, 9818), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', ([], {}), '()\n', (9816, 9818), False, 'import pygame\n'), ((9839, 9866), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', ([], {}), '()\n', (9864, 9866), False, 'import pygame\n'), ((1146, 1181), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self'], {}), '(self)\n', (1175, 1181), False, 'import pygame\n'), ((1277, 1308), 'pygame.Surface', 'pygame.Surface', (['[width, height]'], {}), '([width, height])\n', (1291, 1308), False, 'import pygame\n'), ((3674, 3709), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self'], {}), '(self)\n', (3703, 3709), False, 'import pygame\n'), ((3860, 3891), 'pygame.Surface', 'pygame.Surface', (['[width, height]'], {}), '([width, height])\n', (3874, 3891), False, 'import pygame\n'), ((3978, 4039), 'pygame.draw.ellipse', 'pygame.draw.ellipse', (['self.image', 'color', '[0, 0, width, height]'], {}), '(self.image, color, [0, 0, width, height])\n', (3997, 4039), False, 'import pygame\n'), ((4552, 4587), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self'], {}), '(self)\n', (4581, 4587), False, 'import pygame\n'), ((5608, 5655), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'walls', '(False)'], {}), '(self, walls, False)\n', (5635, 5655), False, 'import pygame\n'), ((11137, 11148), 'time.time', 'time.time', ([], {}), '()\n', (11146, 11148), False, 'import time\n'), ((12421, 12439), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (12437, 12439), False, 'import pygame\n'), ((15008, 15061), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['Vaxman', 'block_list', '(True)'], {}), '(Vaxman, block_list, True)\n', (15035, 15061), False, 'import pygame\n'), ((15737, 15791), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['Vaxman', 'ghosts_list', '(True)'], {}), '(Vaxman, ghosts_list, True)\n', (15764, 15791), False, 'import pygame\n'), ((16825, 16846), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (16844, 16846), False, 'import pygame\n'), ((17068, 17086), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (17084, 17086), False, 'import pygame\n'), ((17598, 17624), 'pygame.Surface', 'pygame.Surface', (['(400, 200)'], {}), '((400, 200))\n', (17612, 17624), False, 'import pygame\n'), ((18171, 18192), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (18190, 18192), False, 'import pygame\n'), ((6173, 6220), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'walls', '(False)'], {}), '(self, walls, False)\n', (6200, 6220), False, 'import pygame\n'), ((6695, 6741), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'gate', '(False)'], {}), '(self, gate, False)\n', (6722, 6741), False, 'import pygame\n'), ((4641, 4668), 'pygame.image.load', 'pygame.image.load', (['filename'], {}), '(filename)\n', (4658, 4668), False, 'import pygame\n'), ((10566, 10618), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['block', 'wall_list', '(False)'], {}), '(block, wall_list, False)\n', (10593, 10618), False, 'import pygame\n'), ((10643, 10700), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['block', 'vaxman_collide', '(False)'], {}), '(block, vaxman_collide, False)\n', (10670, 10700), False, 'import pygame\n'), ((17136, 17149), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (17147, 17149), False, 'import pygame\n'), ((17268, 17281), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (17279, 17281), False, 'import pygame\n')] |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import numpy as np
from maro.rl import AbstractStateShaper
class ECRStateShaper(AbstractStateShaper):
def __init__(self, *, look_back, max_ports_downstream, port_attributes, vessel_attributes):
super().__init__()
self._look_back = look_back
self._max_ports_downstream = max_ports_downstream
self._port_attributes = port_attributes
self._vessel_attributes = vessel_attributes
self._dim = (look_back + 1) * (max_ports_downstream + 1) * len(port_attributes) + len(vessel_attributes)
def __call__(self, decision_event, snapshot_list):
tick, port_idx, vessel_idx = decision_event.tick, decision_event.port_idx, decision_event.vessel_idx
ticks = [tick - rt for rt in range(self._look_back-1)]
future_port_idx_list = snapshot_list["vessels"][tick: vessel_idx: 'future_stop_list'].astype('int')
port_features = snapshot_list["ports"][ticks: [port_idx] + list(future_port_idx_list): self._port_attributes]
vessel_features = snapshot_list["vessels"][tick: vessel_idx: self._vessel_attributes]
state = np.concatenate((port_features, vessel_features))
return str(port_idx), state
@property
def dim(self):
return self._dim
| [
"numpy.concatenate"
] | [((1172, 1220), 'numpy.concatenate', 'np.concatenate', (['(port_features, vessel_features)'], {}), '((port_features, vessel_features))\n', (1186, 1220), True, 'import numpy as np\n')] |
from os import listdir
from os.path import join, isfile
import json
from random import randint
#########################################
## START of part that students may change
from code_completion_baseline import Code_Completion_Baseline
training_dir = "./../../programs_800/"
query_dir = "./../../programs_200/"
model_file = "./../../trained_model"
use_stored_model = False
max_hole_size = 2
simplify_tokens = True
## END of part that students may change
#########################################
def simplify_token(token):
if token["type"] == "Identifier":
token["value"] = "ID"
elif token["type"] == "String":
token["value"] = "\"STR\""
elif token["type"] == "RegularExpression":
token["value"] = "/REGEXP/"
elif token["type"] == "Numeric":
token["value"] = "5"
# load sequences of tokens from files
def load_tokens(token_dir):
token_files = [join(token_dir, f) for f in listdir(token_dir) if isfile(join(token_dir, f)) and f.endswith("_tokens.json")]
token_lists = [json.load(open(f)) for f in token_files]
if simplify_tokens:
for token_list in token_lists:
for token in token_list:
simplify_token(token)
return token_lists
# removes up to max_hole_size tokens
def create_hole(tokens):
hole_size = randint(1, max_hole_size)
hole_start_idx = randint(1, len(tokens) - hole_size)
prefix = tokens[0:hole_start_idx]
expected = tokens[hole_start_idx:hole_start_idx + hole_size]
suffix = tokens[hole_start_idx + hole_size:]
return(prefix, expected, suffix)
# checks if two sequences of tokens are identical
def same_tokens(tokens1, tokens2):
if len(tokens1) != len(tokens2):
return False
for idx, t1 in enumerate(tokens1):
t2 = tokens2[idx]
if t1["type"] != t2["type"] or t1["value"] != t2["value"]:
return False
return True
#########################################
## START of part that students may change
code_completion = Code_Completion_Baseline()
## END of part that students may change
#########################################
# train the network
training_token_lists = load_tokens(training_dir)
if use_stored_model:
code_completion.load(training_token_lists, model_file)
else:
code_completion.train(training_token_lists, model_file)
# query the network and measure its accuracy
query_token_lists = load_tokens(query_dir)
correct = incorrect = 0
for tokens in query_token_lists:
(prefix, expected, suffix) = create_hole(tokens)
completion = code_completion.query(prefix, suffix)
if same_tokens(completion, expected):
correct += 1
else:
incorrect += 1
accuracy = correct / (correct + incorrect)
print("Accuracy: " + str(correct) + " correct vs. " + str(incorrect) + " incorrect = " + str(accuracy))
| [
"os.path.join",
"os.listdir",
"code_completion_baseline.Code_Completion_Baseline",
"random.randint"
] | [((2010, 2036), 'code_completion_baseline.Code_Completion_Baseline', 'Code_Completion_Baseline', ([], {}), '()\n', (2034, 2036), False, 'from code_completion_baseline import Code_Completion_Baseline\n'), ((1316, 1341), 'random.randint', 'randint', (['(1)', 'max_hole_size'], {}), '(1, max_hole_size)\n', (1323, 1341), False, 'from random import randint\n'), ((907, 925), 'os.path.join', 'join', (['token_dir', 'f'], {}), '(token_dir, f)\n', (911, 925), False, 'from os.path import join, isfile\n'), ((935, 953), 'os.listdir', 'listdir', (['token_dir'], {}), '(token_dir)\n', (942, 953), False, 'from os import listdir\n'), ((964, 982), 'os.path.join', 'join', (['token_dir', 'f'], {}), '(token_dir, f)\n', (968, 982), False, 'from os.path import join, isfile\n')] |
from flask import Flask, render_template
from config import configs
from .extensions import login_manager, db
from .account import account
from .frontend import frontend
from webapp.session import RedisSessionInterface
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(configs[config_name])
register_session_storage(app, configs[config_name])
register_blueprints(app)
init_extensions(app)
add_error_pages(app)
return app
def register_session_storage(app, conf):
if hasattr(conf, 'REDIS'):
from redis import Redis
host = conf.REDIS['host']
port = conf.REDIS['port']
db_num = conf.REDIS['db']
app.session_interface = RedisSessionInterface(Redis(host, port, db_num))
def register_blueprints(app):
app.register_blueprint(frontend)
app.register_blueprint(account)
def init_extensions(app):
login_manager.init_app(app)
db.init_app(app)
def add_error_pages(app):
@app.errorhandler(401)
def unauthorized(e):
return render_template('errors/401.html'), 401
@app.errorhandler(403)
def forbidden(e):
return render_template('errors/403.html'), 403
@app.errorhandler(404)
def not_found(e):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def internal_server_error(e):
return render_template('errors/500.html'), 500
| [
"flask.render_template",
"redis.Redis",
"flask.Flask"
] | [((261, 276), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (266, 276), False, 'from flask import Flask, render_template\n'), ((740, 765), 'redis.Redis', 'Redis', (['host', 'port', 'db_num'], {}), '(host, port, db_num)\n', (745, 765), False, 'from redis import Redis\n'), ((1049, 1083), 'flask.render_template', 'render_template', (['"""errors/401.html"""'], {}), "('errors/401.html')\n", (1064, 1083), False, 'from flask import Flask, render_template\n'), ((1154, 1188), 'flask.render_template', 'render_template', (['"""errors/403.html"""'], {}), "('errors/403.html')\n", (1169, 1188), False, 'from flask import Flask, render_template\n'), ((1259, 1293), 'flask.render_template', 'render_template', (['"""errors/404.html"""'], {}), "('errors/404.html')\n", (1274, 1293), False, 'from flask import Flask, render_template\n'), ((1376, 1410), 'flask.render_template', 'render_template', (['"""errors/500.html"""'], {}), "('errors/500.html')\n", (1391, 1410), False, 'from flask import Flask, render_template\n')] |
import pytest
from pydent.models import Plan
def test_plan_constructor(fake_session):
g = fake_session.Plan.new()
assert g.name is not None
print(g.plan_associations)
assert g.operations is None
assert g.wires == []
g = Plan(name="MyPlan", status="running")
assert g.name == "MyPlan"
assert g.status == "running"
def test_add_operation(fake_session):
op = fake_session.Operation.load({"id": 4})
p = fake_session.Plan.new()
# add first operation
assert p.operations is None
p.add_operation(op)
assert p.operations == [op]
# add second operation
op2 = fake_session.Operation.load({"id": 5})
p.add_operation(op2)
assert p.operations == [op, op2]
def test_add_operations(fake_session):
op = fake_session.Operation.load({"id": 4})
op2 = fake_session.Operation.load({"id": 5})
ops = [op, op2]
p = fake_session.Plan.new()
p.add_operations(ops)
assert p.operations == [op, op2]
@pytest.fixture(scope="function")
def fake_plan(fake_session):
p = fake_session.Plan.new()
op1 = fake_session.Operation.load({})
op2 = fake_session.Operation.load({})
src = fake_session.FieldValue.load(
{
"name": "myinput",
"parent_class": "Operation",
"operation": op1,
"role": "output",
}
)
dest = fake_session.FieldValue.load(
{
"name": "myoutput",
"parent_class": "Operation",
"operation": op2,
"role": "input",
}
)
op1.field_values = [src]
op2.field_values = [dest]
return p, src, dest
def test_wire(fake_plan):
p, src, dest = fake_plan
p.add_operations([src.operation, dest.operation])
p.wire(src, dest)
assert len(p.wires) == 1
assert p.wires[0].source.name == "myinput"
assert p.wires[0].destination.name == "myoutput"
print(p.wires)
def test_plan_copy(example_plan):
"""Copying plans should anonymize operations and wires."""
copied_plan = example_plan.copy()
assert copied_plan.operations
for op in copied_plan.operations:
assert op.id is None
assert op.operation_type_id is not None
assert op.field_values is not None
for fv in op.field_values:
assert fv.id is None
assert fv.parent_id is None
assert fv.field_type_id is not None
# TODO: make this adeterministic test
"""def test_new_plan(session):
p = fake_session.Plan.new()
p.connect_to_session(session)
assert p.operations is None
assert p.plan_associations is None
p.id = 1000000
assert p.operations == []
assert p.plan_associations == []"""
# def test_submit(session):
# primer = session.SampleType.find(1).samples[-1]
#
# # get Order Primer operation type
# ot = session.OperationType.find(328)
#
# # create an operation
# order_primer = ot.instance()
#
# # set io
# order_primer.set_output("Primer", sample=primer)
# order_primer.set_input("Urgent?", value="no")
#
# # create a new plan and add operations
# p = session.Plan(name="MyPlan")
# p.add_operation(order_primer)
#
# # save the plan
# p.create()
#
# # estimate the cost
# p.estimate_cost()
#
# # show the plan
# p.show()
#
# # submit the plan
# p.submit(session.current_user, session.current_user.budgets[0])
# def test_submit_pcr(session):
# def get_op(name):
# return session.OperationType.where(
# {'name': name, 'deployed': True})[-1].instance()
#
# make_pcr_fragment = get_op('Make PCR Fragment')
# pour_gel = get_op('Pour Gel')
# run_gel = get_op('Run Gel')
# extract_gel_slice = get_op('Extract Gel Slice')
# purify_gel = get_op('Purify Gel Slice')
#
# # setup pcr
# make_pcr_fragment.set_input('Forward Primer',
# item=session.Item.find(81867))
# make_pcr_fragment.set_input('Reverse Primer',
# item=session.Item.find(57949))
# make_pcr_fragment.set_input('Template', item=session.Item.find(61832))
# make_pcr_fragment.set_output('Fragment',
# sample=session.Sample.find(16976))
#
# # setup outputs
# # run_gel.set_output(sample=session.Sample.find(16976))
# # extract_gel_slice.set_output(sample=session.Sample.find(16976))
# # purify_gel.set_output(sample=session.Sample.find(16976))
# # purify_gel.pour_gel(sample=session.Sample.find(16976))
#
# # new plan
# p = session.fake_session.Plan.new()
# p.add_operations([make_pcr_fragment, pour_gel, run_gel,
# extract_gel_slice, purify_gel])
#
# p.add_wires([
# (make_pcr_fragment.output("Fragment"), run_gel.input("Fragment")),
# (pour_gel.output("Lane"), run_gel.input("Gel")),
# (run_gel.output("Fragment"), extract_gel_slice.input("Fragment")),
# (extract_gel_slice.output("Fragment"), purify_gel.input("Gel"))
# ])
#
# make_pcr_fragment.set_output("Fragment",
# sample=session.Sample.find(16976))
#
#
# pdata = p.to_save_json()
#
# # wire up the operations
# # p.wire(make_pcr_fragment.outputs[0], run_gel.input('Fragment'))
# # p.wire(pour_gel.outputs[0], run_gel.input('Gel'))
# # p.wire(run_gel.outputs[0], extract_gel_slice.input('Fragment'))
# # p.wire(extract_gel_slice.outputs[0], purify_gel.input('Gel'))
#
# # save the plan
# p.create()
#
# # estimate the cost
# p.estimate_cost()
#
# p.validate()
#
# # show the plan
# p.show()
#
# # submit the plan
# p.submit(session.current_user, session.current_user.budgets[0])
# # TODO: having difficulty patching plans/operations here...
# def test_replan(session):
#
# p = session.Plan.find(79797)
# newplan = p.replan()
# newplan.print()
#
# for op in newplan.operations:
# if op.operation_type.name == "Make PCR Fragment":
# op.set_input('Template', item=session.Item.find(57124))
# newplan.patch(newplan.to_save_json())
| [
"pytest.fixture",
"pydent.models.Plan"
] | [((979, 1011), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (993, 1011), False, 'import pytest\n'), ((248, 285), 'pydent.models.Plan', 'Plan', ([], {'name': '"""MyPlan"""', 'status': '"""running"""'}), "(name='MyPlan', status='running')\n", (252, 285), False, 'from pydent.models import Plan\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-06-08 22:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gymkhana', '0006_form_available'),
]
operations = [
migrations.RemoveField(
model_name='form_available',
name='id',
),
migrations.AddField(
model_name='form_available',
name='roll',
field=models.CharField(default=2016001, max_length=7, primary_key=True, serialize=False),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.CharField"
] | [((298, 360), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""form_available"""', 'name': '"""id"""'}), "(model_name='form_available', name='id')\n", (320, 360), False, 'from django.db import migrations, models\n'), ((510, 597), 'django.db.models.CharField', 'models.CharField', ([], {'default': '(2016001)', 'max_length': '(7)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=2016001, max_length=7, primary_key=True, serialize\n =False)\n', (526, 597), False, 'from django.db import migrations, models\n')] |
import os
import pygame
import random
trigger = False
x = 0
y = 0
height = 720
width = 1280
linelength = 50
lineAmt = 20
displace = 10
xpos = [random.randrange(-200,1280) for i in range(0, lineAmt + 2)]
xpos1 = [(xpos[i]+displace) for i in range(0, lineAmt + 2)]
xr = 360
yr = 240
def setup(screen, etc):
global trigger, x, y, height, width, xpos, lineAmt, xpos1, linelength, displace, xr, yr
xr = etc.xres
yr = etc.yres
height = yr
width = xr
linelength = ((50*xr)/1280)
lineAmt = ((20*xr)/1280)
displace = ((10*xr)/1280)
xpos = [random.randrange(int((-200*xr)/1280),xr) for i in range(0, lineAmt + 2)]
xpos1 = [(xpos[i]+displace) for i in range(0, lineAmt + 2)]
pass
def draw(screen, etc):
global trigger, x, y, height, width, xpos, lineAmt, xpos1, linelength, displace, xr, yr
etc.color_picker_bg(etc.knob5)
displace = ((10*xr)/1280)
linewidth = (height / lineAmt)
linelength = int(etc.knob2*((300*xr)/1280)+1)
color = etc.color_picker(etc.knob4)
minus = (etc.knob3*0.5)+0.5
shadowColor = (etc.bg_color[0]*minus, etc.bg_color[1]*minus, etc.bg_color[2]*minus)
if etc.audio_trig or etc.midi_note_new :
trigger = True
if trigger == True :
lineAmt = int(etc.knob1*((100*yr)/720) + 2)
xpos = [random.randrange(int((-200*xr)/1280),xr) for i in range(0, lineAmt + 2)]
xpos1 = [(xpos[i]+displace) for i in range(0, lineAmt + 2)]
for k in range(0, lineAmt + 2) :
x = xpos1[k] + linelength
y = (k * linewidth) + int(linewidth/2)- 1
pygame.draw.line(screen, shadowColor, (xpos1[k], y+displace), (x, y+displace), linewidth)
for j in range(0, lineAmt + 2) :
x = xpos[j] + linelength
y = (j * linewidth) + int(linewidth/2)- 1
pygame.draw.line(screen, color, (xpos[j], y), (x, y), linewidth)
trigger = False
| [
"pygame.draw.line",
"random.randrange"
] | [((144, 172), 'random.randrange', 'random.randrange', (['(-200)', '(1280)'], {}), '(-200, 1280)\n', (160, 172), False, 'import random\n'), ((1580, 1677), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'shadowColor', '(xpos1[k], y + displace)', '(x, y + displace)', 'linewidth'], {}), '(screen, shadowColor, (xpos1[k], y + displace), (x, y +\n displace), linewidth)\n', (1596, 1677), False, 'import pygame\n'), ((1806, 1870), 'pygame.draw.line', 'pygame.draw.line', (['screen', 'color', '(xpos[j], y)', '(x, y)', 'linewidth'], {}), '(screen, color, (xpos[j], y), (x, y), linewidth)\n', (1822, 1870), False, 'import pygame\n')] |
# (C) Copyright 2010-2020 Enthought, Inc., Austin, TX
# All rights reserved.
import unittest
import sys
import os
from unittest import mock
from click.testing import CliRunner
import force_wfmanager.gui.run
from force_wfmanager.tests.dummy_classes.dummy_wfmanager import \
DummyWfManager
from force_wfmanager.version import __version__
def mock_run_constructor(*args, **kwargs):
mock_wf_run = mock.Mock(spec=force_wfmanager.gui.run)
mock_wf_run.main = lambda: None
class TestClickRun(unittest.TestCase):
def test_click_cli_version(self):
clirunner = CliRunner()
clirunner.invoke(force_wfmanager.gui.run.force_wfmanager,
args="--version")
def test_click_cli_main(self):
with mock.patch('force_wfmanager.gui.run') as mock_run:
mock_run.side_effect = mock_run_constructor
force_wfmanager.gui.run.force_wfmanager()
self.assertTrue(mock_run.force_wfmanager.called)
def test_run_with_debug(self):
with mock.patch('force_wfmanager.gui.run.WfManager') as mock_wf:
mock_wf.return_value = DummyWfManager()
force_wfmanager.gui.run.main(
window_size=(1650, 1080),
debug=True,
profile=False,
workflow_file=None
)
self.log = force_wfmanager.gui.run.logging.getLogger(__name__)
self.assertEqual(self.log.getEffectiveLevel(), 10)
def test_run_with_profile(self):
with mock.patch('force_wfmanager.gui.run.WfManager') as mock_wf:
mock_wf.return_value = DummyWfManager()
force_wfmanager.gui.run.main(
window_size=(1650, 1080), debug=False,
profile=True, workflow_file=None
)
root = ('force_wfmanager-{}-{}.{}.{}'
.format(__version__,
sys.version_info.major,
sys.version_info.minor,
sys.version_info.micro))
exts = ['.pstats', '.prof']
files_exist = [False] * len(exts)
for ind, ext in enumerate(exts):
files_exist[ind] = os.path.isfile(root + ext)
os.remove(root + ext)
self.assertTrue(all(files_exist))
| [
"unittest.mock.Mock",
"click.testing.CliRunner",
"os.path.isfile",
"force_wfmanager.tests.dummy_classes.dummy_wfmanager.DummyWfManager",
"unittest.mock.patch",
"os.remove"
] | [((408, 447), 'unittest.mock.Mock', 'mock.Mock', ([], {'spec': 'force_wfmanager.gui.run'}), '(spec=force_wfmanager.gui.run)\n', (417, 447), False, 'from unittest import mock\n'), ((584, 595), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (593, 595), False, 'from click.testing import CliRunner\n'), ((755, 792), 'unittest.mock.patch', 'mock.patch', (['"""force_wfmanager.gui.run"""'], {}), "('force_wfmanager.gui.run')\n", (765, 792), False, 'from unittest import mock\n'), ((1028, 1075), 'unittest.mock.patch', 'mock.patch', (['"""force_wfmanager.gui.run.WfManager"""'], {}), "('force_wfmanager.gui.run.WfManager')\n", (1038, 1075), False, 'from unittest import mock\n'), ((1123, 1139), 'force_wfmanager.tests.dummy_classes.dummy_wfmanager.DummyWfManager', 'DummyWfManager', ([], {}), '()\n', (1137, 1139), False, 'from force_wfmanager.tests.dummy_classes.dummy_wfmanager import DummyWfManager\n'), ((1521, 1568), 'unittest.mock.patch', 'mock.patch', (['"""force_wfmanager.gui.run.WfManager"""'], {}), "('force_wfmanager.gui.run.WfManager')\n", (1531, 1568), False, 'from unittest import mock\n'), ((1616, 1632), 'force_wfmanager.tests.dummy_classes.dummy_wfmanager.DummyWfManager', 'DummyWfManager', ([], {}), '()\n', (1630, 1632), False, 'from force_wfmanager.tests.dummy_classes.dummy_wfmanager import DummyWfManager\n'), ((2207, 2233), 'os.path.isfile', 'os.path.isfile', (['(root + ext)'], {}), '(root + ext)\n', (2221, 2233), False, 'import os\n'), ((2250, 2271), 'os.remove', 'os.remove', (['(root + ext)'], {}), '(root + ext)\n', (2259, 2271), False, 'import os\n')] |
from cadastrarJogador import cadastra_jogador
from cadastrarMonstros import cadastra_monstro
from atualizaJogador import atualiza
from combate import combate_iniciado
while True:
print('Bem vindo ao RPG selecione a opção desenjada')
print('[0] - Cadastrar Novo Jogador\n[1] - Atualizar Jogador\n[2] - Cadastrar Novo Monstro\n[3] Iniciar Combate\n[4]-Sair do sistema')
o = int(input('Entre com o numero da opção desejada: '))
if o == 0:
cadastra_jogador()
elif o == 1:
cadastra_monstro()
elif o == 2:
atualiza()
elif o == 3:
combate_iniciado()
elif o == 4:
break
else:
print('Opção invalida') | [
"combate.combate_iniciado",
"cadastrarJogador.cadastra_jogador",
"atualizaJogador.atualiza",
"cadastrarMonstros.cadastra_monstro"
] | [((460, 478), 'cadastrarJogador.cadastra_jogador', 'cadastra_jogador', ([], {}), '()\n', (476, 478), False, 'from cadastrarJogador import cadastra_jogador\n'), ((504, 522), 'cadastrarMonstros.cadastra_monstro', 'cadastra_monstro', ([], {}), '()\n', (520, 522), False, 'from cadastrarMonstros import cadastra_monstro\n'), ((548, 558), 'atualizaJogador.atualiza', 'atualiza', ([], {}), '()\n', (556, 558), False, 'from atualizaJogador import atualiza\n'), ((584, 602), 'combate.combate_iniciado', 'combate_iniciado', ([], {}), '()\n', (600, 602), False, 'from combate import combate_iniciado\n')] |
"""
Custom user model for deployments.
"""
import urllib
import hashlib
import base64
import random
from authtools.models import AbstractEmailUser
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.db.models.signals import post_save
from .managers import DeployUserManager
from plugin_manager.hosts.models import Host
from plugin_manager.accounts.model_managers import DeployUserActiveManager
from plugin_manager.core.mixins.models import TrackingFields
class DeployUser(AbstractEmailUser, TrackingFields):
"""
Custom user class for deployments. Email as username using
django-custom-user.
"""
AMELIA = 'amelia.min.css'
CERULEAN = 'cerulean.min.css'
COSMO = 'cosmo.min.css'
CYBORG = 'cyborg.min.css'
DARKLY = 'darkly.min.css'
FLATLY = 'flatly.min.css'
JOURNAL = 'journal.min.css'
LUMEN = 'lumen.min.css'
READABLE = 'readable.min.css'
SIMPLEX = 'simplex.min.css'
SLATE = 'slate.min.css'
SPACELAB = 'spacelab.min.css'
SUPERHERO = 'superhero.min.css'
UNITED = 'united.min.css'
YETI = 'yeti.min.css'
TEMPLATES = (
(AMELIA, 'Amelia'),
(CERULEAN, 'Cerulean'),
(COSMO, 'Cosmo'),
(CYBORG, 'Cyborg'),
(DARKLY, 'Darkly'),
(FLATLY, 'Flatly'),
(JOURNAL, 'Journal'),
(LUMEN, 'Lumen'),
(READABLE, 'Readable'),
(SIMPLEX, 'Simplex'),
(SLATE, 'Slate'),
(SPACELAB, 'Spacelab'),
(SUPERHERO, 'Superhero'),
(UNITED, 'United'),
(YETI, 'Yeti'),
)
active_records = DeployUserActiveManager()
first_name = models.CharField(_('first name'), max_length=30, blank=False)
last_name = models.CharField(_('last name'), max_length=30,
blank=False)
template = models.CharField(max_length=255, blank=True,
choices=TEMPLATES, default=YETI)
objects = DeployUserManager()
def __unicode__(self):
return u'{} {}'.format(self.first_name, self.last_name)
@property
def role(self):
"""
Assumes the user is only assigned to one role and return it
"""
return self.group_strigify()
def _get_groups(self):
if not hasattr(self, '_cached_groups'):
self._cached_groups = list(self.groups.values_list("name",
flat=True))
return self._cached_groups
def user_is_admin(self):
if not self.pk:
return False
return "Admin" in self._get_groups()
def user_is_deployer(self):
if not self.pk:
return False
return "Deployer" in self._get_groups()
def user_is_historian(self):
if not self.pk:
return False
return "Historian" in self._get_groups()
def group_strigify(self):
"""
Converts this user's group(s) to a string and returns it.
"""
return "/".join(self._get_groups())
def gravatar(self, size=20):
"""
Construct a gravatar image address for the user
"""
default = "mm"
gravatar_url = "http://www.gravatar.com/avatar/" + hashlib.md5(
self.email.lower()).hexdigest() + "?"
gravatar_url += urllib.urlencode({'d': default, 's': str(size)})
return gravatar_url
class APIKey(models.Model):
apikey = models.CharField(max_length=255, primary_key=True)
deployuser = models.ForeignKey(DeployUser)
class Meta:
unique_together = (("apikey", "deployuser"),)
class PermissionHost(models.Model):
user = models.ForeignKey(DeployUser)
host = models.ForeignKey(Host)
def __unicode__(self):
return u'User: {} Host: {}'.format(self.user, self.host)
def generate_APIKey(sender, instance, created, **kwargs):
if created:
apikey = APIKey()
apikey.apikey = base64.b64encode(hashlib.sha256(
str(random.getrandbits(256))).digest(),
random.choice(
['rA', 'aZ', 'gQ', 'hH', 'hG',
'aR', 'DD'])).rstrip('==')
apikey.deployuser = instance
apikey.save()
post_save.connect(generate_APIKey, sender=DeployUser)
| [
"plugin_manager.accounts.model_managers.DeployUserActiveManager",
"django.utils.translation.ugettext_lazy",
"random.choice",
"django.db.models.ForeignKey",
"django.db.models.signals.post_save.connect",
"random.getrandbits",
"django.db.models.CharField"
] | [((4300, 4353), 'django.db.models.signals.post_save.connect', 'post_save.connect', (['generate_APIKey'], {'sender': 'DeployUser'}), '(generate_APIKey, sender=DeployUser)\n', (4317, 4353), False, 'from django.db.models.signals import post_save\n'), ((1603, 1628), 'plugin_manager.accounts.model_managers.DeployUserActiveManager', 'DeployUserActiveManager', ([], {}), '()\n', (1626, 1628), False, 'from plugin_manager.accounts.model_managers import DeployUserActiveManager\n'), ((1834, 1911), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'blank': '(True)', 'choices': 'TEMPLATES', 'default': 'YETI'}), '(max_length=255, blank=True, choices=TEMPLATES, default=YETI)\n', (1850, 1911), False, 'from django.db import models\n'), ((3448, 3498), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'primary_key': '(True)'}), '(max_length=255, primary_key=True)\n', (3464, 3498), False, 'from django.db import models\n'), ((3516, 3545), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DeployUser'], {}), '(DeployUser)\n', (3533, 3545), False, 'from django.db import models\n'), ((3665, 3694), 'django.db.models.ForeignKey', 'models.ForeignKey', (['DeployUser'], {}), '(DeployUser)\n', (3682, 3694), False, 'from django.db import models\n'), ((3706, 3729), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Host'], {}), '(Host)\n', (3723, 3729), False, 'from django.db import models\n'), ((1664, 1679), 'django.utils.translation.ugettext_lazy', '_', (['"""first name"""'], {}), "('first name')\n", (1665, 1679), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1742, 1756), 'django.utils.translation.ugettext_lazy', '_', (['"""last name"""'], {}), "('last name')\n", (1743, 1756), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4075, 4132), 'random.choice', 'random.choice', (["['rA', 'aZ', 'gQ', 'hH', 'hG', 'aR', 'DD']"], {}), "(['rA', 'aZ', 'gQ', 'hH', 'hG', 'aR', 'DD'])\n", (4088, 4132), False, 'import random\n'), ((3998, 4021), 'random.getrandbits', 'random.getrandbits', (['(256)'], {}), '(256)\n', (4016, 4021), False, 'import random\n')] |
import tensorflow as tf
from tensorflow.contrib import slim
def head(endpoints, embedding_dim, is_training, weights_regularizer=None):
predict_var = 0
input = endpoints['model_output']
endpoints['head_output'] = slim.fully_connected(
input, 1024, normalizer_fn=slim.batch_norm,
normalizer_params={
'decay': 0.9,
'epsilon': 1e-5,
'scale': True,
'is_training': is_training,
'updates_collections': tf.GraphKeys.UPDATE_OPS,
},
weights_regularizer=weights_regularizer
)
input_1 = endpoints['head_output']
endpoints['emb_raw'] = slim.fully_connected(
input_1, embedding_dim + predict_var, activation_fn=None,weights_regularizer=weights_regularizer,
weights_initializer=tf.orthogonal_initializer(), scope='emb')
endpoints['emb'] = tf.nn.l2_normalize(endpoints['emb_raw'], -1)
# endpoints['data_sigma'] = None
print('Normalize batch embedding')
return endpoints
| [
"tensorflow.contrib.slim.fully_connected",
"tensorflow.nn.l2_normalize",
"tensorflow.orthogonal_initializer"
] | [((225, 486), 'tensorflow.contrib.slim.fully_connected', 'slim.fully_connected', (['input', '(1024)'], {'normalizer_fn': 'slim.batch_norm', 'normalizer_params': "{'decay': 0.9, 'epsilon': 1e-05, 'scale': True, 'is_training': is_training,\n 'updates_collections': tf.GraphKeys.UPDATE_OPS}", 'weights_regularizer': 'weights_regularizer'}), "(input, 1024, normalizer_fn=slim.batch_norm,\n normalizer_params={'decay': 0.9, 'epsilon': 1e-05, 'scale': True,\n 'is_training': is_training, 'updates_collections': tf.GraphKeys.\n UPDATE_OPS}, weights_regularizer=weights_regularizer)\n", (245, 486), False, 'from tensorflow.contrib import slim\n'), ((865, 909), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (["endpoints['emb_raw']", '(-1)'], {}), "(endpoints['emb_raw'], -1)\n", (883, 909), True, 'import tensorflow as tf\n'), ((798, 825), 'tensorflow.orthogonal_initializer', 'tf.orthogonal_initializer', ([], {}), '()\n', (823, 825), True, 'import tensorflow as tf\n')] |
from topbeat import BaseTest
import os
import shutil
import time
"""
Contains tests for base config
"""
class Test(BaseTest):
def test_invalid_config(self):
"""
Checks stop when input and topbeat defined
"""
shutil.copy("./config/topbeat-input-invalid.yml",
os.path.join(self.working_dir, "invalid.yml"))
exit_code = self.run_beat(config="invalid.yml", extra_args=["-N"])
assert exit_code == 1
assert self.log_contains(
"'topbeat' and 'input' are both set in config.") is True
def test_old_config(self):
"""
Test that old config still works with deprecation warning
"""
shutil.copy("./config/topbeat-old.yml",
os.path.join(self.working_dir, "topbeat-old.yml"))
topbeat = self.start_beat(config="topbeat-old.yml", extra_args=["-N"])
time.sleep(1)
topbeat.check_kill_and_wait()
assert self.log_contains(
"Using 'input' in configuration is deprecated and is scheduled to "
"be removed in Topbeat 6.0. Use 'topbeat' instead.") is True
| [
"os.path.join",
"time.sleep"
] | [((904, 917), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (914, 917), False, 'import time\n'), ((318, 363), 'os.path.join', 'os.path.join', (['self.working_dir', '"""invalid.yml"""'], {}), "(self.working_dir, 'invalid.yml')\n", (330, 363), False, 'import os\n'), ((765, 814), 'os.path.join', 'os.path.join', (['self.working_dir', '"""topbeat-old.yml"""'], {}), "(self.working_dir, 'topbeat-old.yml')\n", (777, 814), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import asyncio
import enum
import json
import logging
import random
import time
import uuid
from typing import *
import aiohttp
import tornado.websocket
import api.base
import blivedm.blivedm as blivedm
import config
import models.avatar
import models.translate
import models.log
logger = logging.getLogger(__name__)
class Command(enum.IntEnum):
HEARTBEAT = 0
JOIN_ROOM = 1
ADD_TEXT = 2
ADD_GIFT = 3
ADD_MEMBER = 4
ADD_SUPER_CHAT = 5
DEL_SUPER_CHAT = 6
UPDATE_TRANSLATION = 7
_http_session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=10))
room_manager: Optional['RoomManager'] = None
def init():
global room_manager
room_manager = RoomManager()
class Room(blivedm.BLiveClient):
HEARTBEAT_INTERVAL = 10
# 重新定义parse_XXX是为了减少对字段名的依赖,防止B站改字段名
def __parse_danmaku(self, command):
info = command['info']
if info[3]:
room_id = info[3][3]
medal_level = info[3][0]
else:
room_id = medal_level = 0
return self._on_receive_danmaku(blivedm.DanmakuMessage(
None, None, None, info[0][4], None, None, info[0][9], None,
info[1],
info[2][0], info[2][1], info[2][2], None, None, info[2][5], info[2][6], None,
medal_level, None, None, room_id, None, None,
info[4][0], None, None,
None, None,
info[7]
))
def __parse_gift(self, command):
data = command['data']
return self._on_receive_gift(blivedm.GiftMessage(
data['giftName'], data['num'], data['uname'], data['face'], None,
data['uid'], data['timestamp'], None, None,
None, None, None, data['coin_type'], data['total_coin']
))
def __parse_buy_guard(self, command):
data = command['data']
return self._on_buy_guard(blivedm.GuardBuyMessage(
data['uid'], data['username'], data['guard_level'], None, None,
None, None, data['start_time'], None
))
def __parse_super_chat(self, command):
data = command['data']
return self._on_super_chat(blivedm.SuperChatMessage(
data['price'], data['message'], None, data['start_time'],
None, None, data['id'], None,
None, data['uid'], data['user_info']['uname'],
data['user_info']['face'], None,
None, None,
None, None, None,
None
))
_COMMAND_HANDLERS = {
**blivedm.BLiveClient._COMMAND_HANDLERS,
'DANMU_MSG': __parse_danmaku,
'SEND_GIFT': __parse_gift,
'GUARD_BUY': __parse_buy_guard,
'SUPER_CHAT_MESSAGE': __parse_super_chat
}
def __init__(self, room_id):
super().__init__(room_id, session=_http_session, heartbeat_interval=self.HEARTBEAT_INTERVAL)
self.clients: List['ChatHandler'] = []
self.auto_translate_count = 0
async def init_room(self):
await super().init_room()
return True
def stop_and_close(self):
if self.is_running:
future = self.stop()
future.add_done_callback(lambda _future: asyncio.ensure_future(self.close()))
else:
asyncio.ensure_future(self.close())
def send_message(self, cmd, data):
body = json.dumps({'cmd': cmd, 'data': data})
models.log.add_danmaku(self.room_id, body)
for client in self.clients:
try:
client.write_message(body)
except tornado.websocket.WebSocketClosedError:
room_manager.del_client(self.room_id, client)
def send_message_if(self, can_send_func: Callable[['ChatHandler'], bool], cmd, data):
body = json.dumps({'cmd': cmd, 'data': data})
for client in filter(can_send_func, self.clients):
try:
client.write_message(body)
except tornado.websocket.WebSocketClosedError:
room_manager.del_client(self.room_id, client)
async def _on_receive_danmaku(self, danmaku: blivedm.DanmakuMessage):
asyncio.ensure_future(self.__on_receive_danmaku(danmaku))
async def __on_receive_danmaku(self, danmaku: blivedm.DanmakuMessage):
if danmaku.uid == self.room_owner_uid:
author_type = 3 # 主播
elif danmaku.admin:
author_type = 2 # 房管
elif danmaku.privilege_type != 0: # 1总督,2提督,3舰长
author_type = 1 # 舰队
else:
author_type = 0
need_translate = self._need_translate(danmaku.msg)
if need_translate:
translation = models.translate.get_translation_from_cache(danmaku.msg)
if translation is None:
# 没有缓存,需要后面异步翻译后通知
translation = ''
else:
need_translate = False
else:
translation = ''
id_ = uuid.uuid4().hex
# 为了节省带宽用list而不是dict
self.send_message(Command.ADD_TEXT, make_text_message(
await models.avatar.get_avatar_url(danmaku.uid),
int(danmaku.timestamp / 1000),
danmaku.uname,
author_type,
danmaku.msg,
danmaku.privilege_type,
danmaku.msg_type,
danmaku.user_level,
danmaku.urank < 10000,
danmaku.mobile_verify,
0 if danmaku.room_id != self.room_id else danmaku.medal_level,
id_,
translation
))
if need_translate:
await self._translate_and_response(danmaku.msg, id_)
async def _on_receive_gift(self, gift: blivedm.GiftMessage):
avatar_url = models.avatar.process_avatar_url(gift.face)
models.avatar.update_avatar_cache(gift.uid, avatar_url)
if gift.coin_type != 'gold': # 丢人
return
id_ = uuid.uuid4().hex
self.send_message(Command.ADD_GIFT, {
'id': id_,
'avatarUrl': avatar_url,
'timestamp': gift.timestamp,
'authorName': gift.uname,
'totalCoin': gift.total_coin,
'giftName': gift.gift_name,
'num': gift.num
})
async def _on_buy_guard(self, message: blivedm.GuardBuyMessage):
asyncio.ensure_future(self.__on_buy_guard(message))
async def __on_buy_guard(self, message: blivedm.GuardBuyMessage):
id_ = uuid.uuid4().hex
self.send_message(Command.ADD_MEMBER, {
'id': id_,
'avatarUrl': await models.avatar.get_avatar_url(message.uid),
'timestamp': message.start_time,
'authorName': message.username,
'privilegeType': message.guard_level
})
async def _on_super_chat(self, message: blivedm.SuperChatMessage):
avatar_url = models.avatar.process_avatar_url(message.face)
models.avatar.update_avatar_cache(message.uid, avatar_url)
need_translate = self._need_translate(message.message)
if need_translate:
translation = models.translate.get_translation_from_cache(message.message)
if translation is None:
# 没有缓存,需要后面异步翻译后通知
translation = ''
else:
need_translate = False
else:
translation = ''
id_ = str(message.id)
self.send_message(Command.ADD_SUPER_CHAT, {
'id': id_,
'avatarUrl': avatar_url,
'timestamp': message.start_time,
'authorName': message.uname,
'price': message.price,
'content': message.message,
'translation': translation
})
if need_translate:
asyncio.ensure_future(self._translate_and_response(message.message, id_))
async def _on_super_chat_delete(self, message: blivedm.SuperChatDeleteMessage):
self.send_message(Command.ADD_SUPER_CHAT, {
'ids': list(map(str, message.ids))
})
def _need_translate(self, text):
cfg = config.get_config()
return (
cfg.enable_translate
and (not cfg.allow_translate_rooms or self.room_id in cfg.allow_translate_rooms)
and self.auto_translate_count > 0
and models.translate.need_translate(text)
)
async def _translate_and_response(self, text, msg_id):
translation = await models.translate.translate(text)
if translation is None:
return
self.send_message_if(
lambda client: client.auto_translate,
Command.UPDATE_TRANSLATION, make_translation_message(
msg_id,
translation
)
)
def make_text_message(avatar_url, timestamp, author_name, author_type, content, privilege_type,
is_gift_danmaku, author_level, is_newbie, is_mobile_verified, medal_level,
id_, translation):
return [
# 0: avatarUrl
avatar_url,
# 1: timestamp
timestamp,
# 2: authorName
author_name,
# 3: authorType
author_type,
# 4: content
content,
# 5: privilegeType
privilege_type,
# 6: isGiftDanmaku
1 if is_gift_danmaku else 0,
# 7: authorLevel
author_level,
# 8: isNewbie
1 if is_newbie else 0,
# 9: isMobileVerified
1 if is_mobile_verified else 0,
# 10: medalLevel
medal_level,
# 11: id
id_,
# 12: translation
translation
]
def make_translation_message(msg_id, translation):
return [
# 0: id
msg_id,
# 1: translation
translation
]
class RoomManager:
def __init__(self):
self._rooms: Dict[int, Room] = {}
async def get_room(self, room_id):
if room_id not in self._rooms:
if not await self._add_room(room_id):
return
room = self._rooms.get(room_id, None)
return room
async def add_client(self, room_id, client: 'ChatHandler'):
if room_id not in self._rooms:
if not await self._add_room(room_id):
client.close()
return
room = self._rooms.get(room_id, None)
if room is None:
return
room.clients.append(client)
logger.info('%d clients in room %s', len(room.clients), room_id)
if client.auto_translate:
room.auto_translate_count += 1
await client.on_join_room()
def del_client(self, room_id, client: 'ChatHandler'):
room = self._rooms.get(room_id, None)
if room is None:
return
try:
room.clients.remove(client)
except ValueError:
# _add_room未完成,没有执行到room.clients.append
pass
else:
logger.info('%d clients in room %s', len(room.clients), room_id)
if client.auto_translate:
room.auto_translate_count = max(0, room.auto_translate_count - 1)
if not room.clients:
self._del_room(room_id)
async def _add_room(self, room_id):
if room_id in self._rooms:
return True
logger.info('Creating room %d', room_id)
self._rooms[room_id] = room = Room(room_id)
if await room.init_room():
# start new log file
room.start()
logger.info('%d rooms', len(self._rooms))
return True
else:
self._del_room(room_id)
return False
def _del_room(self, room_id):
room = self._rooms.get(room_id, None)
if room is None:
return
logger.info('Removing room %d', room_id)
for client in room.clients:
client.close()
room.stop_and_close()
self._rooms.pop(room_id, None)
logger.info('%d rooms', len(self._rooms))
# noinspection PyAbstractClass
class ChatHandler(tornado.websocket.WebSocketHandler):
HEARTBEAT_INTERVAL = 10
RECEIVE_TIMEOUT = HEARTBEAT_INTERVAL + 5
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._heartbeat_timer_handle = None
self._receive_timeout_timer_handle = None
self.room_id = None
self.auto_translate = False
def open(self):
logger.info('Websocket connected %s', self.request.remote_ip)
self._heartbeat_timer_handle = asyncio.get_event_loop().call_later(
self.HEARTBEAT_INTERVAL, self._on_send_heartbeat
)
self._refresh_receive_timeout_timer()
def _on_send_heartbeat(self):
self.send_message(Command.HEARTBEAT, {})
self._heartbeat_timer_handle = asyncio.get_event_loop().call_later(
self.HEARTBEAT_INTERVAL, self._on_send_heartbeat
)
def _refresh_receive_timeout_timer(self):
if self._receive_timeout_timer_handle is not None:
self._receive_timeout_timer_handle.cancel()
self._receive_timeout_timer_handle = asyncio.get_event_loop().call_later(
self.RECEIVE_TIMEOUT, self._on_receive_timeout
)
def _on_receive_timeout(self):
logger.warning('Client %s timed out', self.request.remote_ip)
self._receive_timeout_timer_handle = None
self.close()
def on_close(self):
logger.info('Websocket disconnected %s room: %s', self.request.remote_ip, str(self.room_id))
if self.has_joined_room:
room_manager.del_client(self.room_id, self)
if self._heartbeat_timer_handle is not None:
self._heartbeat_timer_handle.cancel()
self._heartbeat_timer_handle = None
if self._receive_timeout_timer_handle is not None:
self._receive_timeout_timer_handle.cancel()
self._receive_timeout_timer_handle = None
def on_message(self, message):
try:
# 超时没有加入房间也断开
if self.has_joined_room:
self._refresh_receive_timeout_timer()
body = json.loads(message)
cmd = body['cmd']
if cmd == Command.HEARTBEAT:
pass
elif cmd == Command.JOIN_ROOM:
if self.has_joined_room:
return
self._refresh_receive_timeout_timer()
self.room_id = int(body['data']['roomId'])
logger.info('Client %s is joining room %d', self.request.remote_ip, self.room_id)
try:
cfg = body['data']['config']
self.auto_translate = cfg['autoTranslate']
except KeyError:
pass
asyncio.ensure_future(room_manager.add_client(self.room_id, self))
else:
logger.warning('Unknown cmd, client: %s, cmd: %d, body: %s', self.request.remote_ip, cmd, body)
except Exception:
logger.exception('on_message error, client: %s, message: %s', self.request.remote_ip, message)
# 跨域测试用
def check_origin(self, origin):
if self.application.settings['debug']:
return True
return super().check_origin(origin)
@property
def has_joined_room(self):
return self.room_id is not None
def send_message(self, cmd, data):
body = json.dumps({'cmd': cmd, 'data': data})
try:
self.write_message(body)
except tornado.websocket.WebSocketClosedError:
self.close()
async def on_join_room(self):
if self.application.settings['debug']:
await self.send_test_message()
# 不允许自动翻译的提示
if self.auto_translate:
cfg = config.get_config()
if cfg.allow_translate_rooms and self.room_id not in cfg.allow_translate_rooms:
self.send_message(Command.ADD_TEXT, make_text_message(
models.avatar.DEFAULT_AVATAR_URL,
int(time.time()),
'blivechat',
2,
'Translation is not allowed in this room. Please download to use translation',
0,
False,
60,
False,
True,
0,
uuid.uuid4().hex,
''
))
# 测试用
async def send_test_message(self):
base_data = {
'avatarUrl': await models.avatar.get_avatar_url(300474),
'timestamp': int(time.time()),
'authorName': 'xfgryujk',
}
text_data = make_text_message(
base_data['avatarUrl'],
base_data['timestamp'],
base_data['authorName'],
0,
'我能吞下玻璃而不伤身体',
0,
False,
20,
False,
True,
0,
uuid.uuid4().hex,
''
)
member_data = {
**base_data,
'id': uuid.uuid4().hex,
'privilegeType': 3
}
gift_data = {
**base_data,
'id': uuid.uuid4().hex,
'totalCoin': 450000,
'giftName': '摩天大楼',
'num': 1
}
sc_data = {
**base_data,
'id': str(random.randint(1, 65535)),
'price': 30,
'content': 'The quick brown fox jumps over the lazy dog',
'translation': ''
}
self.send_message(Command.ADD_TEXT, text_data)
text_data[2] = '主播'
text_data[3] = 3
text_data[4] = "I can eat glass, it doesn't hurt me."
text_data[11] = uuid.uuid4().hex
self.send_message(Command.ADD_TEXT, text_data)
self.send_message(Command.ADD_MEMBER, member_data)
self.send_message(Command.ADD_SUPER_CHAT, sc_data)
sc_data['id'] = str(random.randint(1, 65535))
sc_data['price'] = 100
sc_data['content'] = '敏捷的棕色狐狸跳过了懒狗'
self.send_message(Command.ADD_SUPER_CHAT, sc_data)
# self.send_message(Command.DEL_SUPER_CHAT, {'ids': [sc_data['id']]})
self.send_message(Command.ADD_GIFT, gift_data)
gift_data['id'] = uuid.uuid4().hex
gift_data['totalCoin'] = 1245000
gift_data['giftName'] = '小电视飞船'
self.send_message(Command.ADD_GIFT, gift_data)
# noinspection PyAbstractClass
class RoomInfoHandler(api.base.ApiHandler):
_host_server_list_cache = blivedm.DEFAULT_DANMAKU_SERVER_LIST
async def get(self):
room_id = int(self.get_query_argument('roomId'))
logger.info('Client %s is getting room info %d', self.request.remote_ip, room_id)
room_id, owner_uid = await self._get_room_info(room_id)
host_server_list = await self._get_server_host_list(room_id)
if owner_uid == 0:
# 缓存3分钟
self.set_header('Cache-Control', 'private, max-age=180')
else:
# 缓存1天
self.set_header('Cache-Control', 'private, max-age=86400')
self.write({
'roomId': room_id,
'ownerUid': owner_uid,
'hostServerList': host_server_list
})
@staticmethod
async def _get_room_info(room_id):
try:
async with _http_session.get(blivedm.ROOM_INIT_URL, params={'room_id': room_id}
) as res:
if res.status != 200:
logger.warning('room %d _get_room_info failed: %d %s', room_id,
res.status, res.reason)
return room_id, 0
data = await res.json()
except (aiohttp.ClientConnectionError, asyncio.TimeoutError):
logger.exception('room %d _get_room_info failed', room_id)
return room_id, 0
if data['code'] != 0:
logger.warning('room %d _get_room_info failed: %s', room_id, data['message'])
return room_id, 0
room_info = data['data']['room_info']
return room_info['room_id'], room_info['uid']
@classmethod
async def _get_server_host_list(cls, _room_id):
return cls._host_server_list_cache
# 连接其他host必须要key
# try:
# async with _http_session.get(blivedm.DANMAKU_SERVER_CONF_URL, params={'id': room_id, 'type': 0}
# ) as res:
# if res.status != 200:
# logger.warning('room %d _get_server_host_list failed: %d %s', room_id,
# res.status, res.reason)
# return cls._host_server_list_cache
# data = await res.json()
# except (aiohttp.ClientConnectionError, asyncio.TimeoutError):
# logger.exception('room %d _get_server_host_list failed', room_id)
# return cls._host_server_list_cache
#
# if data['code'] != 0:
# logger.warning('room %d _get_server_host_list failed: %s', room_id, data['message'])
# return cls._host_server_list_cache
#
# host_server_list = data['data']['host_list']
# if not host_server_list:
# logger.warning('room %d _get_server_host_list failed: host_server_list is empty')
# return cls._host_server_list_cache
#
# cls._host_server_list_cache = host_server_list
# return host_server_list
# noinspection PyAbstractClass
class AvatarHandler(api.base.ApiHandler):
async def get(self):
uid = int(self.get_query_argument('uid'))
avatar_url = await models.avatar.get_avatar_url_or_none(uid)
if avatar_url is None:
avatar_url = models.avatar.DEFAULT_AVATAR_URL
# 缓存3分钟
self.set_header('Cache-Control', 'private, max-age=180')
else:
# 缓存1天
self.set_header('Cache-Control', 'private, max-age=86400')
self.write({
'avatarUrl': avatar_url
})
# noinspection PyAbstractClass
# handle reply message
class ReplyHandler(api.base.ApiHandler):
def get(self):
self.write('pong')
async def post(self):
uid = None if self.json_args['uid'] == -1 else self.json_args['uid']
avatar_url = await models.avatar.get_avatar_url(uid)
text_message = make_text_message(
avatar_url=avatar_url,
timestamp=int(time.time()),
author_name=self.json_args['name'],
author_type=3,
content=self.json_args['content'],
author_level=0,
id_=uuid.uuid4().hex,
privilege_type=0,
is_newbie=0,
is_gift_danmaku=0,
is_mobile_verified=True,
medal_level=0,
translation=0
)
# get room
room: Room = await room_manager.get_room(room_id=self.json_args['room_id'])
room.send_message(Command.ADD_TEXT, text_message)
| [
"logging.getLogger",
"blivedm.blivedm.GuardBuyMessage",
"json.loads",
"blivedm.blivedm.GiftMessage",
"json.dumps",
"uuid.uuid4",
"aiohttp.ClientTimeout",
"config.get_config",
"blivedm.blivedm.DanmakuMessage",
"blivedm.blivedm.SuperChatMessage",
"asyncio.get_event_loop",
"time.time",
"random.... | [((316, 343), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (333, 343), False, 'import logging\n'), ((585, 616), 'aiohttp.ClientTimeout', 'aiohttp.ClientTimeout', ([], {'total': '(10)'}), '(total=10)\n', (606, 616), False, 'import aiohttp\n'), ((3341, 3379), 'json.dumps', 'json.dumps', (["{'cmd': cmd, 'data': data}"], {}), "({'cmd': cmd, 'data': data})\n", (3351, 3379), False, 'import json\n'), ((3754, 3792), 'json.dumps', 'json.dumps', (["{'cmd': cmd, 'data': data}"], {}), "({'cmd': cmd, 'data': data})\n", (3764, 3792), False, 'import json\n'), ((8018, 8037), 'config.get_config', 'config.get_config', ([], {}), '()\n', (8035, 8037), False, 'import config\n'), ((15323, 15361), 'json.dumps', 'json.dumps', (["{'cmd': cmd, 'data': data}"], {}), "({'cmd': cmd, 'data': data})\n", (15333, 15361), False, 'import json\n'), ((1093, 1366), 'blivedm.blivedm.DanmakuMessage', 'blivedm.DanmakuMessage', (['None', 'None', 'None', 'info[0][4]', 'None', 'None', 'info[0][9]', 'None', 'info[1]', 'info[2][0]', 'info[2][1]', 'info[2][2]', 'None', 'None', 'info[2][5]', 'info[2][6]', 'None', 'medal_level', 'None', 'None', 'room_id', 'None', 'None', 'info[4][0]', 'None', 'None', 'None', 'None', 'info[7]'], {}), '(None, None, None, info[0][4], None, None, info[0][9],\n None, info[1], info[2][0], info[2][1], info[2][2], None, None, info[2][\n 5], info[2][6], None, medal_level, None, None, room_id, None, None,\n info[4][0], None, None, None, None, info[7])\n', (1115, 1366), True, 'import blivedm.blivedm as blivedm\n'), ((1555, 1750), 'blivedm.blivedm.GiftMessage', 'blivedm.GiftMessage', (["data['giftName']", "data['num']", "data['uname']", "data['face']", 'None', "data['uid']", "data['timestamp']", 'None', 'None', 'None', 'None', 'None', "data['coin_type']", "data['total_coin']"], {}), "(data['giftName'], data['num'], data['uname'], data[\n 'face'], None, data['uid'], data['timestamp'], None, None, None, None,\n None, data['coin_type'], data['total_coin'])\n", (1574, 1750), True, 'import blivedm.blivedm as blivedm\n'), ((1897, 2026), 'blivedm.blivedm.GuardBuyMessage', 'blivedm.GuardBuyMessage', (["data['uid']", "data['username']", "data['guard_level']", 'None', 'None', 'None', 'None', "data['start_time']", 'None'], {}), "(data['uid'], data['username'], data['guard_level'],\n None, None, None, None, data['start_time'], None)\n", (1920, 2026), True, 'import blivedm.blivedm as blivedm\n'), ((2168, 2410), 'blivedm.blivedm.SuperChatMessage', 'blivedm.SuperChatMessage', (["data['price']", "data['message']", 'None', "data['start_time']", 'None', 'None', "data['id']", 'None', 'None', "data['uid']", "data['user_info']['uname']", "data['user_info']['face']", 'None', 'None', 'None', 'None', 'None', 'None', 'None'], {}), "(data['price'], data['message'], None, data[\n 'start_time'], None, None, data['id'], None, None, data['uid'], data[\n 'user_info']['uname'], data['user_info']['face'], None, None, None,\n None, None, None, None)\n", (2192, 2410), True, 'import blivedm.blivedm as blivedm\n'), ((4915, 4927), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4925, 4927), False, 'import uuid\n'), ((5864, 5876), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (5874, 5876), False, 'import uuid\n'), ((6402, 6414), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (6412, 6414), False, 'import uuid\n'), ((14045, 14064), 'json.loads', 'json.loads', (['message'], {}), '(message)\n', (14055, 14064), False, 'import json\n'), ((15689, 15708), 'config.get_config', 'config.get_config', ([], {}), '()\n', (15706, 15708), False, 'import config\n'), ((17656, 17668), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (17666, 17668), False, 'import uuid\n'), ((17874, 17898), 'random.randint', 'random.randint', (['(1)', '(65535)'], {}), '(1, 65535)\n', (17888, 17898), False, 'import random\n'), ((18193, 18205), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (18203, 18205), False, 'import uuid\n'), ((12449, 12473), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (12471, 12473), False, 'import asyncio\n'), ((12726, 12750), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (12748, 12750), False, 'import asyncio\n'), ((13041, 13065), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (13063, 13065), False, 'import asyncio\n'), ((16519, 16530), 'time.time', 'time.time', ([], {}), '()\n', (16528, 16530), False, 'import time\n'), ((16885, 16897), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (16895, 16897), False, 'import uuid\n'), ((16995, 17007), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (17005, 17007), False, 'import uuid\n'), ((17119, 17131), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (17129, 17131), False, 'import uuid\n'), ((17300, 17324), 'random.randint', 'random.randint', (['(1)', '(65535)'], {}), '(1, 65535)\n', (17314, 17324), False, 'import random\n'), ((22383, 22394), 'time.time', 'time.time', ([], {}), '()\n', (22392, 22394), False, 'import time\n'), ((22563, 22575), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (22573, 22575), False, 'import uuid\n'), ((15950, 15961), 'time.time', 'time.time', ([], {}), '()\n', (15959, 15961), False, 'import time\n'), ((16289, 16301), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (16299, 16301), False, 'import uuid\n')] |
import argparse
import os
import sys
import time
import torch
import torch.nn.functional as F
import torchvision
import models, lib
cfg = lib.config.BaseConfig()
cfg.parse()
print('Preparing model')
gen_model = cfg.gen_function(
upsample=cfg.upsample,
map_size=cfg.map_size,
out_dim=cfg.out_dim)
disc_model = cfg.disc_function(
downsample=cfg.downsample,
in_dim=cfg.out_dim)
if cfg.num_gpu > 1:
gen_model = torch.nn.DataParallel(gen_model)
disc_model = torch.nn.DataParallel(disc_model)
gen_model.cuda()
disc_model.cuda()
print(gen_model)
print(disc_model)
print("=> Generator")
print(gen_model)
print("=> Discriminator")
print(disc_model)
if cfg.args.delayed_batch_size > -1:
trainer = lib.train.DelayLBSTrainer(gen_model=gen_model, disc_model=disc_model, dataloader=cfg.dl, cfg=cfg)
else:
trainer = lib.train.BaseGANTrainer(gen_model=gen_model, disc_model=disc_model, dataloader=cfg.dl, cfg=cfg)
trainer.train()
| [
"lib.train.BaseGANTrainer",
"lib.config.BaseConfig",
"torch.nn.DataParallel",
"lib.train.DelayLBSTrainer"
] | [((139, 162), 'lib.config.BaseConfig', 'lib.config.BaseConfig', ([], {}), '()\n', (160, 162), False, 'import models, lib\n'), ((433, 465), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['gen_model'], {}), '(gen_model)\n', (454, 465), False, 'import torch\n'), ((483, 516), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['disc_model'], {}), '(disc_model)\n', (504, 516), False, 'import torch\n'), ((722, 823), 'lib.train.DelayLBSTrainer', 'lib.train.DelayLBSTrainer', ([], {'gen_model': 'gen_model', 'disc_model': 'disc_model', 'dataloader': 'cfg.dl', 'cfg': 'cfg'}), '(gen_model=gen_model, disc_model=disc_model,\n dataloader=cfg.dl, cfg=cfg)\n', (747, 823), False, 'import models, lib\n'), ((840, 940), 'lib.train.BaseGANTrainer', 'lib.train.BaseGANTrainer', ([], {'gen_model': 'gen_model', 'disc_model': 'disc_model', 'dataloader': 'cfg.dl', 'cfg': 'cfg'}), '(gen_model=gen_model, disc_model=disc_model,\n dataloader=cfg.dl, cfg=cfg)\n', (864, 940), False, 'import models, lib\n')] |
from flask_restful import Resource
from server.platform_properties import PLATFORM_PROPERTIES
from server.resources.models.platform_properties import PlatformPropertiesSchema
from server.resources.decorators import marshal_response
class Platform(Resource):
@marshal_response(PlatformPropertiesSchema())
def get(self):
return PlatformPropertiesSchema().load(PLATFORM_PROPERTIES).data
| [
"server.resources.models.platform_properties.PlatformPropertiesSchema"
] | [((282, 308), 'server.resources.models.platform_properties.PlatformPropertiesSchema', 'PlatformPropertiesSchema', ([], {}), '()\n', (306, 308), False, 'from server.resources.models.platform_properties import PlatformPropertiesSchema\n'), ((344, 370), 'server.resources.models.platform_properties.PlatformPropertiesSchema', 'PlatformPropertiesSchema', ([], {}), '()\n', (368, 370), False, 'from server.resources.models.platform_properties import PlatformPropertiesSchema\n')] |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 <NAME>.
#
# Invenio-Flow is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Useful decorators."""
from celery import shared_task
from .api import Task
def task(*args, **kwargs):
"""Wrapper around shared task to set default base class."""
kwargs.setdefault('base', Task)
return shared_task(*args, **kwargs)
| [
"celery.shared_task"
] | [((426, 454), 'celery.shared_task', 'shared_task', (['*args'], {}), '(*args, **kwargs)\n', (437, 454), False, 'from celery import shared_task\n')] |
import sys
import os
import matplotlib.pylab as plt
import numpy as np
import mpl_toolkits.mplot3d.axes3d as p3
import seaborn as sea
import torch
from TorchProteinLibrary import FullAtomModel
if __name__=='__main__':
# p2c = FullAtomModel.PDB2Coords.PDB2CoordsBiopython()
p2c = FullAtomModel.PDB2CoordsUnordered()
coords, res, anames, num_atoms = p2c(["f4TQ1_B.pdb"])
print (coords.size())
print (res.size())
print (anames.size())
print (num_atoms)
coords = coords.numpy()
coords = coords.reshape(int(coords.shape[1]/3), 3)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
x = coords[:,0]
y = coords[:,1]
z = coords[:,2]
ax.scatter(x,y,z)
plt.show()
| [
"TorchProteinLibrary.FullAtomModel.PDB2CoordsUnordered",
"matplotlib.pylab.show",
"matplotlib.pylab.figure"
] | [((290, 325), 'TorchProteinLibrary.FullAtomModel.PDB2CoordsUnordered', 'FullAtomModel.PDB2CoordsUnordered', ([], {}), '()\n', (323, 325), False, 'from TorchProteinLibrary import FullAtomModel\n'), ((575, 587), 'matplotlib.pylab.figure', 'plt.figure', ([], {}), '()\n', (585, 587), True, 'import matplotlib.pylab as plt\n'), ((721, 731), 'matplotlib.pylab.show', 'plt.show', ([], {}), '()\n', (729, 731), True, 'import matplotlib.pylab as plt\n')] |
"""
Copyright (c) Facebook, Inc. and its affiliates.
"""
import logging
import queue
from multiprocessing import Queue, Process
import sys
import os
from mc_memory_nodes import InstSegNode, PropSegNode
from heuristic_perception import all_nearby_objects
from shapes import get_bounds
VISION_DIR = os.path.dirname(os.path.realpath(__file__))
CRAFTASSIST_DIR = os.path.join(VISION_DIR, "../")
SEMSEG_DIR = os.path.join(VISION_DIR, "semantic_segmentation/")
sys.path.append(CRAFTASSIST_DIR)
sys.path.append(SEMSEG_DIR)
import build_utils as bu
from semseg_models import SemSegWrapper
# TODO all "subcomponent" operations are replaced with InstSeg
class SubcomponentClassifierWrapper:
def __init__(self, agent, model_path, vocab_path, perceive_freq=0):
self.agent = agent
self.memory = self.agent.memory
self.perceive_freq = perceive_freq
self.true_temp = 1
if model_path is not None:
self.subcomponent_classifier = SubComponentClassifier(
voxel_model_path=model_path, vocab_path=vocab_path,
)
self.subcomponent_classifier.start()
else:
self.subcomponent_classifier = None
def perceive(self, force=False):
if self.perceive_freq == 0 and not force:
return
if self.perceive_freq > 0 and self.agent.count % self.perceive_freq != 0 and not force:
return
if self.subcomponent_classifier is None:
return
# TODO don't all_nearby_objects again, search in memory instead
to_label = []
# add all blocks in marked areas
for pos, radius in self.agent.areas_to_perceive:
for obj in all_nearby_objects(self.agent.get_blocks, pos, radius):
to_label.append(obj)
# add all blocks near the agent
for obj in all_nearby_objects(self.agent.get_blocks, self.agent.pos):
to_label.append(obj)
for obj in to_label: # (6, 69, 11) in [b[0] for b in obj]
self.subcomponent_classifier.block_objs_q.put(obj)
# everytime we try to retrieve as many recognition results as possible
while not self.subcomponent_classifier.loc2labels_q.empty():
loc2labels, obj = self.subcomponent_classifier.loc2labels_q.get() # (6, 69, 11) in [b[0] for b in obj]
loc2ids = dict(obj)
label2blocks = {}
def contaminated(blocks):
"""
Check if blocks are still consistent with the current world
"""
mx, Mx, my, My, mz, Mz = get_bounds(blocks)
yzxb = self.agent.get_blocks(mx, Mx, my, My, mz, Mz)
for b, _ in blocks:
x, y, z = b
if loc2ids[b][0] != yzxb[y - my, z - mz, x - mx, 0]:
return True
return False
for loc, labels in loc2labels.items():
b = (loc, loc2ids[loc])
for l in labels:
if l in label2blocks:
label2blocks[l].append(b)
else:
label2blocks[l] = [b]
labels_str = " ".join(list(label2blocks.keys()))
if len(labels_str) == 1:
self.agent.send_chat(
"I found this in the scene: " + labels_str
)
elif len(labels_str) > 1:
self.agent.send_chat(
"I found these in the scene: " + labels_str
)
for l, blocks in label2blocks.items():
## if the blocks are contaminated we just ignore
if not contaminated(blocks):
#locs = [loc for loc, idm in blocks]
InstSegNode.create(
self.memory, blocks, [l, 'semseg'])
def update(self, label, blocks, house):
pass
#self.subcomponent_classifier.to_update_q.put((label, blocks, house))
class SubComponentClassifier(Process):
"""
A classifier class that calls a voxel model to output object tags.
"""
def __init__(self, voxel_model_path=None, vocab_path=None, true_temp=1):
super().__init__()
if voxel_model_path is not None:
logging.info(
"SubComponentClassifier using voxel_model_path={}".format(voxel_model_path)
)
self.model = SemSegWrapper(voxel_model_path, vocab_path)
else:
raise Exception("specify a segmentation model")
self.block_objs_q = Queue() # store block objects to be recognized
self.loc2labels_q = Queue() # store loc2labels dicts to be retrieved by the agent
#self.to_update_q = Queue()
self.daemon = True
def run(self):
"""
The main recognition loop of the classifier
"""
while True: # run forever
#for _ in range(100):
# print("If I print here, it solves the bug ¯\_(ツ)_/¯, priority thing?")
tb = self.block_objs_q.get(block=True, timeout=None)
loc2labels = self._watch_single_object(tb)
for k in loc2labels.keys():
loc2labels[k].append("house")
self.loc2labels_q.put((loc2labels, tb))
#try:
# label, blocks, house = self.to_update_q.get_nowait()
# self.update(label, blocks, house)
#except queue.Empty:
# pass
def _watch_single_object(self, tuple_blocks, t=1):
"""
Input: a list of tuples, where each tuple is ((x, y, z), [bid, mid]). This list
represents a block object.
Output: a dict of (loc, [tag1, tag2, ..]) pairs for all non-air blocks.
"""
def get_tags(p):
"""
convert a list of tag indices to a list of tags
"""
return [self.model.tags[i][0] for i in p]
def apply_offsets(cube_loc, offsets):
"""
Convert the cube location back to world location
"""
return (cube_loc[0] + offsets[0], cube_loc[1] + offsets[1], cube_loc[2] + offsets[2])
np_blocks, offsets = bu.blocks_list_to_npy(blocks=tuple_blocks, xyz=True)
pred = self.model.segment_object(np_blocks, T=t)
# convert prediction results to string tags
return dict([(apply_offsets(loc, offsets), get_tags([p])) for loc, p in pred.items()])
def recognize(self, list_of_tuple_blocks):
"""
Multiple calls to _watch_single_object
"""
tags = dict()
for tb in list_of_tuple_blocks:
tags.update(self._watch_single_object(tb))
return tags
def update(self, label, blocks, house):
# changes can come in from adds or removals, if add, update house
logging.info("Updated label {}".format(label))
if blocks[0][0][0] > 0:
house += blocks
blocks = [(xyz, (1, 0)) for xyz, _ in blocks]
np_house, offsets = bu.blocks_list_to_npy(blocks=house, xyz=True)
np_blocks, _ = bu.blocks_list_to_npy(
blocks=blocks, xyz=False, offsets=offsets, shape=np_house.shape) # shape is still xyz bc of shape arg
self.model.update(label, np_blocks, np_house)
| [
"build_utils.blocks_list_to_npy",
"heuristic_perception.all_nearby_objects",
"mc_memory_nodes.InstSegNode.create",
"os.path.join",
"shapes.get_bounds",
"os.path.realpath",
"semseg_models.SemSegWrapper",
"multiprocessing.Queue",
"sys.path.append"
] | [((361, 392), 'os.path.join', 'os.path.join', (['VISION_DIR', '"""../"""'], {}), "(VISION_DIR, '../')\n", (373, 392), False, 'import os\n'), ((406, 456), 'os.path.join', 'os.path.join', (['VISION_DIR', '"""semantic_segmentation/"""'], {}), "(VISION_DIR, 'semantic_segmentation/')\n", (418, 456), False, 'import os\n'), ((457, 489), 'sys.path.append', 'sys.path.append', (['CRAFTASSIST_DIR'], {}), '(CRAFTASSIST_DIR)\n', (472, 489), False, 'import sys\n'), ((490, 517), 'sys.path.append', 'sys.path.append', (['SEMSEG_DIR'], {}), '(SEMSEG_DIR)\n', (505, 517), False, 'import sys\n'), ((315, 341), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (331, 341), False, 'import os\n'), ((1847, 1904), 'heuristic_perception.all_nearby_objects', 'all_nearby_objects', (['self.agent.get_blocks', 'self.agent.pos'], {}), '(self.agent.get_blocks, self.agent.pos)\n', (1865, 1904), False, 'from heuristic_perception import all_nearby_objects\n'), ((4649, 4656), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (4654, 4656), False, 'from multiprocessing import Queue, Process\n'), ((4725, 4732), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (4730, 4732), False, 'from multiprocessing import Queue, Process\n'), ((6287, 6339), 'build_utils.blocks_list_to_npy', 'bu.blocks_list_to_npy', ([], {'blocks': 'tuple_blocks', 'xyz': '(True)'}), '(blocks=tuple_blocks, xyz=True)\n', (6308, 6339), True, 'import build_utils as bu\n'), ((7119, 7164), 'build_utils.blocks_list_to_npy', 'bu.blocks_list_to_npy', ([], {'blocks': 'house', 'xyz': '(True)'}), '(blocks=house, xyz=True)\n', (7140, 7164), True, 'import build_utils as bu\n'), ((7189, 7280), 'build_utils.blocks_list_to_npy', 'bu.blocks_list_to_npy', ([], {'blocks': 'blocks', 'xyz': '(False)', 'offsets': 'offsets', 'shape': 'np_house.shape'}), '(blocks=blocks, xyz=False, offsets=offsets, shape=\n np_house.shape)\n', (7210, 7280), True, 'import build_utils as bu\n'), ((1695, 1749), 'heuristic_perception.all_nearby_objects', 'all_nearby_objects', (['self.agent.get_blocks', 'pos', 'radius'], {}), '(self.agent.get_blocks, pos, radius)\n', (1713, 1749), False, 'from heuristic_perception import all_nearby_objects\n'), ((4502, 4545), 'semseg_models.SemSegWrapper', 'SemSegWrapper', (['voxel_model_path', 'vocab_path'], {}), '(voxel_model_path, vocab_path)\n', (4515, 4545), False, 'from semseg_models import SemSegWrapper\n'), ((2645, 2663), 'shapes.get_bounds', 'get_bounds', (['blocks'], {}), '(blocks)\n', (2655, 2663), False, 'from shapes import get_bounds\n'), ((3854, 3908), 'mc_memory_nodes.InstSegNode.create', 'InstSegNode.create', (['self.memory', 'blocks', "[l, 'semseg']"], {}), "(self.memory, blocks, [l, 'semseg'])\n", (3872, 3908), False, 'from mc_memory_nodes import InstSegNode, PropSegNode\n')] |
"""
Pure-Python implementation of a Python 2-like str object for Python 3.
"""
from numbers import Integral
from past.utils import PY2, with_metaclass
if PY2:
from collections import Iterable
else:
from collections.abc import Iterable
_builtin_bytes = bytes
class BaseOldStr(type):
def __instancecheck__(cls, instance):
return isinstance(instance, _builtin_bytes)
def unescape(s):
r"""
Interprets strings with escape sequences
Example:
>>> s = unescape(r'abc\\def') # i.e. 'abc\\\\def'
>>> print(s)
'abc\def'
>>> s2 = unescape('abc\\ndef')
>>> len(s2)
8
>>> print(s2)
abc
def
"""
return s.encode().decode('unicode_escape')
class oldstr(with_metaclass(BaseOldStr, _builtin_bytes)):
"""
A forward port of the Python 2 8-bit string object to Py3
"""
# Python 2 strings have no __iter__ method:
@property
def __iter__(self):
raise AttributeError
def __dir__(self):
return [thing for thing in dir(_builtin_bytes) if thing != '__iter__']
# def __new__(cls, *args, **kwargs):
# """
# From the Py3 bytes docstring:
# bytes(iterable_of_ints) -> bytes
# bytes(string, encoding[, errors]) -> bytes
# bytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer
# bytes(int) -> bytes object of size given by the parameter initialized with null bytes
# bytes() -> empty bytes object
#
# Construct an immutable array of bytes from:
# - an iterable yielding integers in range(256)
# - a text string encoded using the specified encoding
# - any object implementing the buffer API.
# - an integer
# """
#
# if len(args) == 0:
# return super(newbytes, cls).__new__(cls)
# # Was: elif isinstance(args[0], newbytes):
# # We use type() instead of the above because we're redefining
# # this to be True for all unicode string subclasses. Warning:
# # This may render newstr un-subclassable.
# elif type(args[0]) == newbytes:
# return args[0]
# elif isinstance(args[0], _builtin_bytes):
# value = args[0]
# elif isinstance(args[0], unicode):
# if 'encoding' not in kwargs:
# raise TypeError('unicode string argument without an encoding')
# ###
# # Was: value = args[0].encode(**kwargs)
# # Python 2.6 string encode() method doesn't take kwargs:
# # Use this instead:
# newargs = [kwargs['encoding']]
# if 'errors' in kwargs:
# newargs.append(kwargs['errors'])
# value = args[0].encode(*newargs)
# ###
# elif isinstance(args[0], Iterable):
# if len(args[0]) == 0:
# # What is this?
# raise ValueError('unknown argument type')
# elif len(args[0]) > 0 and isinstance(args[0][0], Integral):
# # It's a list of integers
# value = b''.join([chr(x) for x in args[0]])
# else:
# raise ValueError('item cannot be interpreted as an integer')
# elif isinstance(args[0], Integral):
# if args[0] < 0:
# raise ValueError('negative count')
# value = b'\x00' * args[0]
# else:
# value = args[0]
# return super(newbytes, cls).__new__(cls, value)
def __repr__(self):
s = super(oldstr, self).__repr__() # e.g. b'abc' on Py3, b'abc' on Py3
return s[1:]
def __str__(self):
s = super(oldstr, self).__str__() # e.g. "b'abc'" or "b'abc\\ndef'
# TODO: fix this:
assert s[:2] == "b'" and s[-1] == "'"
return unescape(s[2:-1]) # e.g. 'abc' or 'abc\ndef'
def __getitem__(self, y):
if isinstance(y, Integral):
return super(oldstr, self).__getitem__(slice(y, y+1))
else:
return super(oldstr, self).__getitem__(y)
def __getslice__(self, *args):
return self.__getitem__(slice(*args))
def __contains__(self, key):
if isinstance(key, int):
return False
def __native__(self):
return bytes(self)
__all__ = ['oldstr']
| [
"past.utils.with_metaclass"
] | [((725, 767), 'past.utils.with_metaclass', 'with_metaclass', (['BaseOldStr', '_builtin_bytes'], {}), '(BaseOldStr, _builtin_bytes)\n', (739, 767), False, 'from past.utils import PY2, with_metaclass\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from sqlalchemy.event import listen
from app.factory import db
class BaseModel(db.Model):
"""
Base model with `created_at` and `updated_at` fields
"""
__abstract__ = True
fields_to_serialize = []
created_at = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
updated_at = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
@classmethod
def all(cls):
return cls.query.all()
def serialize(self, fields=None):
serialized = {}
for field in fields or self.fields_to_serialize:
field_value = getattr(self, field)
if isinstance(field_value, datetime):
field_value = field_value.isoformat()
serialized[field] = field_value
return serialized
def set_updated_at(target, value, oldvalue):
"""
Set updated_at value
"""
value.updated_at = datetime.now()
listen(BaseModel, "before_update", set_updated_at)
| [
"sqlalchemy.event.listen",
"app.factory.db.Column",
"app.factory.db.session.delete",
"datetime.datetime.now",
"app.factory.db.session.add",
"app.factory.db.session.commit"
] | [((1157, 1207), 'sqlalchemy.event.listen', 'listen', (['BaseModel', '"""before_update"""', 'set_updated_at'], {}), "(BaseModel, 'before_update', set_updated_at)\n", (1163, 1207), False, 'from sqlalchemy.event import listen\n'), ((314, 377), 'app.factory.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)', 'default': 'datetime.utcnow'}), '(db.DateTime, nullable=False, default=datetime.utcnow)\n', (323, 377), False, 'from app.factory import db\n'), ((395, 458), 'app.factory.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)', 'default': 'datetime.utcnow'}), '(db.DateTime, nullable=False, default=datetime.utcnow)\n', (404, 458), False, 'from app.factory import db\n'), ((1140, 1154), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1152, 1154), False, 'from datetime import datetime\n'), ((488, 508), 'app.factory.db.session.add', 'db.session.add', (['self'], {}), '(self)\n', (502, 508), False, 'from app.factory import db\n'), ((517, 536), 'app.factory.db.session.commit', 'db.session.commit', ([], {}), '()\n', (534, 536), False, 'from app.factory import db\n'), ((568, 591), 'app.factory.db.session.delete', 'db.session.delete', (['self'], {}), '(self)\n', (585, 591), False, 'from app.factory import db\n'), ((600, 619), 'app.factory.db.session.commit', 'db.session.commit', ([], {}), '()\n', (617, 619), False, 'from app.factory import db\n')] |
"""
Raw HTML widget.
Adapted/copied from https://github.com/makukha/cmsplugin-raw-html
"""
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.template import Template
from django.utils.safestring import mark_safe
from .models import RawHtmlModel, CMSMember
from django.utils.translation import ugettext as _
class RawHtmlPlugin(CMSPluginBase):
model = RawHtmlModel
name = 'HTML'
render_template = 'cms/raw_html_widget.html'
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'body': mark_safe(Template(instance.body).render(context)),
'object': instance,
'placeholder': placeholder
})
return context
plugin_pool.register_plugin(RawHtmlPlugin)
class MemberPlugin(CMSPluginBase):
"""
This needs to be defined in `tweaks` because it has to be after `cms`, whereas
`AUTH_USER_MODEL` needs to be loaded before `cms`.
"""
model = CMSMember # model where plugin data are saved
module = _('Member')
name = _('Member info') # name of the plugin in the interface
render_template = 'members_widget.html'
def render(self, context, instance, placeholder):
context.update(dict(
inst=instance,
title=instance.title,
description=instance.description,
users=instance.members.all(),
))
return context
plugin_pool.register_plugin(MemberPlugin) # register the plugin
| [
"django.utils.translation.ugettext",
"django.template.Template",
"cms.plugin_pool.plugin_pool.register_plugin"
] | [((707, 749), 'cms.plugin_pool.plugin_pool.register_plugin', 'plugin_pool.register_plugin', (['RawHtmlPlugin'], {}), '(RawHtmlPlugin)\n', (734, 749), False, 'from cms.plugin_pool import plugin_pool\n'), ((1324, 1365), 'cms.plugin_pool.plugin_pool.register_plugin', 'plugin_pool.register_plugin', (['MemberPlugin'], {}), '(MemberPlugin)\n', (1351, 1365), False, 'from cms.plugin_pool import plugin_pool\n'), ((995, 1006), 'django.utils.translation.ugettext', '_', (['"""Member"""'], {}), "('Member')\n", (996, 1006), True, 'from django.utils.translation import ugettext as _\n'), ((1015, 1031), 'django.utils.translation.ugettext', '_', (['"""Member info"""'], {}), "('Member info')\n", (1016, 1031), True, 'from django.utils.translation import ugettext as _\n'), ((587, 610), 'django.template.Template', 'Template', (['instance.body'], {}), '(instance.body)\n', (595, 610), False, 'from django.template import Template\n')] |
from unittest import TestCase, mock
import pytest
from requests import Response
import aftership
class TrackingTestCase(TestCase):
def setUp(self):
self.slug = "4px"
self.tracking_number = "HH19260817"
self.tracking_id = "k5lh7dy7vvqeck71p5loe011"
@pytest.mark.vcr()
def test_create_tracking(self):
response = aftership.tracking.create_tracking(
tracking={"slug": self.slug, "tracking_number": self.tracking_number}
)
@pytest.mark.vcr()
def test_get_tracking(self):
response = aftership.tracking.get_tracking(slug=self.slug, tracking_number=self.tracking_number)
# @pytest.mark.vcr()
# def test_delete_tracking(self):
# response = aftership.tracking.delete_tracking(slug='china-ems',tracking_number='1234567890')
@pytest.mark.vcr()
def test_list_trackings(self):
response = aftership.tracking.list_trackings(slug=self.slug, limit=1)
@pytest.mark.vcr()
def test_update_tracking(self):
response = aftership.tracking.update_tracking(tracking_id=self.tracking_id, tracking={"title": "new title"})
@pytest.mark.vcr()
def test_retrack(self):
response = aftership.tracking.retrack(tracking_id=self.tracking_id)
@pytest.mark.vcr()
def test_get_last_checkpoint(self):
response = aftership.tracking.get_last_checkpoint(tracking_id=self.tracking_id)
class TrackingWithAdditionalFieldsTestCase(TestCase):
def setUp(self):
self.tracking_id = "wuuxyb7ohjx55kmpt5r7y017"
self.slug = "postnl-3s"
self.tracking_number = "3SKAAG5995399"
self.destination_country = "ESP"
self.postal_code = "46970"
@pytest.mark.vcr()
def test_create_tracking(self):
response = aftership.tracking.create_tracking(
tracking={
"slug": self.slug,
"tracking_number": self.tracking_number,
"tracking_destination_country": self.destination_country,
"tracking_postal_code": self.postal_code,
}
)
@pytest.mark.vcr()
def test_get_tracking(self):
response = aftership.tracking.get_tracking(
slug=self.slug,
tracking_number=self.tracking_number,
tracking_destination_country=self.destination_country,
tracking_postal_code=self.postal_code,
)
@pytest.mark.vcr()
def test_get_tracking_by_id(self):
response = aftership.tracking.get_tracking(tracking_id=self.tracking_id)
@pytest.mark.vcr()
def test_update_tracking(self):
response = aftership.tracking.update_tracking(tracking_id=self.tracking_id, tracking={"title": "new title"})
@pytest.mark.vcr()
def test_get_last_checkpoint(self):
response = aftership.tracking.get_last_checkpoint(tracking_id=self.tracking_id)
@pytest.mark.vcr()
def test_get_tracking_with_internal_error(self):
with self.assertRaises(aftership.exception.InternalError):
response = aftership.tracking.get_tracking(
slug=self.slug,
tracking_number=self.tracking_number,
tracking_destination_country=self.destination_country,
tracking_postal_code=self.postal_code,
)
| [
"aftership.tracking.get_tracking",
"aftership.tracking.retrack",
"aftership.tracking.update_tracking",
"aftership.tracking.create_tracking",
"pytest.mark.vcr",
"aftership.tracking.get_last_checkpoint",
"aftership.tracking.list_trackings"
] | [((286, 303), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (301, 303), False, 'import pytest\n'), ((493, 510), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (508, 510), False, 'import pytest\n'), ((822, 839), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (837, 839), False, 'import pytest\n'), ((959, 976), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (974, 976), False, 'import pytest\n'), ((1136, 1153), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (1151, 1153), False, 'import pytest\n'), ((1264, 1281), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (1279, 1281), False, 'import pytest\n'), ((1702, 1719), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (1717, 1719), False, 'import pytest\n'), ((2088, 2105), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2103, 2105), False, 'import pytest\n'), ((2403, 2420), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2418, 2420), False, 'import pytest\n'), ((2547, 2564), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2562, 2564), False, 'import pytest\n'), ((2724, 2741), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2739, 2741), False, 'import pytest\n'), ((2876, 2893), 'pytest.mark.vcr', 'pytest.mark.vcr', ([], {}), '()\n', (2891, 2893), False, 'import pytest\n'), ((359, 468), 'aftership.tracking.create_tracking', 'aftership.tracking.create_tracking', ([], {'tracking': "{'slug': self.slug, 'tracking_number': self.tracking_number}"}), "(tracking={'slug': self.slug,\n 'tracking_number': self.tracking_number})\n", (393, 468), False, 'import aftership\n'), ((563, 653), 'aftership.tracking.get_tracking', 'aftership.tracking.get_tracking', ([], {'slug': 'self.slug', 'tracking_number': 'self.tracking_number'}), '(slug=self.slug, tracking_number=self.\n tracking_number)\n', (594, 653), False, 'import aftership\n'), ((894, 952), 'aftership.tracking.list_trackings', 'aftership.tracking.list_trackings', ([], {'slug': 'self.slug', 'limit': '(1)'}), '(slug=self.slug, limit=1)\n', (927, 952), False, 'import aftership\n'), ((1032, 1134), 'aftership.tracking.update_tracking', 'aftership.tracking.update_tracking', ([], {'tracking_id': 'self.tracking_id', 'tracking': "{'title': 'new title'}"}), "(tracking_id=self.tracking_id, tracking={\n 'title': 'new title'})\n", (1066, 1134), False, 'import aftership\n'), ((1201, 1257), 'aftership.tracking.retrack', 'aftership.tracking.retrack', ([], {'tracking_id': 'self.tracking_id'}), '(tracking_id=self.tracking_id)\n', (1227, 1257), False, 'import aftership\n'), ((1341, 1409), 'aftership.tracking.get_last_checkpoint', 'aftership.tracking.get_last_checkpoint', ([], {'tracking_id': 'self.tracking_id'}), '(tracking_id=self.tracking_id)\n', (1379, 1409), False, 'import aftership\n'), ((1775, 1988), 'aftership.tracking.create_tracking', 'aftership.tracking.create_tracking', ([], {'tracking': "{'slug': self.slug, 'tracking_number': self.tracking_number,\n 'tracking_destination_country': self.destination_country,\n 'tracking_postal_code': self.postal_code}"}), "(tracking={'slug': self.slug,\n 'tracking_number': self.tracking_number, 'tracking_destination_country':\n self.destination_country, 'tracking_postal_code': self.postal_code})\n", (1809, 1988), False, 'import aftership\n'), ((2158, 2346), 'aftership.tracking.get_tracking', 'aftership.tracking.get_tracking', ([], {'slug': 'self.slug', 'tracking_number': 'self.tracking_number', 'tracking_destination_country': 'self.destination_country', 'tracking_postal_code': 'self.postal_code'}), '(slug=self.slug, tracking_number=self.\n tracking_number, tracking_destination_country=self.destination_country,\n tracking_postal_code=self.postal_code)\n', (2189, 2346), False, 'import aftership\n'), ((2479, 2540), 'aftership.tracking.get_tracking', 'aftership.tracking.get_tracking', ([], {'tracking_id': 'self.tracking_id'}), '(tracking_id=self.tracking_id)\n', (2510, 2540), False, 'import aftership\n'), ((2620, 2722), 'aftership.tracking.update_tracking', 'aftership.tracking.update_tracking', ([], {'tracking_id': 'self.tracking_id', 'tracking': "{'title': 'new title'}"}), "(tracking_id=self.tracking_id, tracking={\n 'title': 'new title'})\n", (2654, 2722), False, 'import aftership\n'), ((2801, 2869), 'aftership.tracking.get_last_checkpoint', 'aftership.tracking.get_last_checkpoint', ([], {'tracking_id': 'self.tracking_id'}), '(tracking_id=self.tracking_id)\n', (2839, 2869), False, 'import aftership\n'), ((3037, 3225), 'aftership.tracking.get_tracking', 'aftership.tracking.get_tracking', ([], {'slug': 'self.slug', 'tracking_number': 'self.tracking_number', 'tracking_destination_country': 'self.destination_country', 'tracking_postal_code': 'self.postal_code'}), '(slug=self.slug, tracking_number=self.\n tracking_number, tracking_destination_country=self.destination_country,\n tracking_postal_code=self.postal_code)\n', (3068, 3225), False, 'import aftership\n')] |
from __future__ import division
import numpy as np
__all__ = ['subtract_CAR',
'subtract_common_median_reference']
def subtract_CAR(X, b_size=16):
"""
Compute and subtract common average reference in 16 channel blocks.
"""
channels, time_points = X.shape
s = channels // b_size
r = channels % b_size
X_1 = X[:channels-r].copy()
X_1 = X_1.reshape((s, b_size, time_points))
X_1 -= np.nanmean(X_1, axis=1, keepdims=True)
if r > 0:
X_2 = X[channels-r:].copy()
X_2 -= np.nanmean(X_2, axis=0, keepdims=True)
X = np.vstack([X_1.reshape((s*b_size, time_points)), X_2])
return X
else:
return X_1.reshape((s*b_size, time_points))
def subtract_common_median_reference(X, channel_axis=-2):
"""
Compute and subtract common median reference
for the entire grid.
Parameters
----------
X : ndarray (..., n_channels, n_time)
Data to common median reference.
Returns
-------
Xp : ndarray (..., n_channels, n_time)
Common median referenced data.
"""
median = np.nanmedian(X, axis=channel_axis, keepdims=True)
Xp = X - median
return Xp
| [
"numpy.nanmean",
"numpy.nanmedian"
] | [((431, 469), 'numpy.nanmean', 'np.nanmean', (['X_1'], {'axis': '(1)', 'keepdims': '(True)'}), '(X_1, axis=1, keepdims=True)\n', (441, 469), True, 'import numpy as np\n'), ((1105, 1154), 'numpy.nanmedian', 'np.nanmedian', (['X'], {'axis': 'channel_axis', 'keepdims': '(True)'}), '(X, axis=channel_axis, keepdims=True)\n', (1117, 1154), True, 'import numpy as np\n'), ((535, 573), 'numpy.nanmean', 'np.nanmean', (['X_2'], {'axis': '(0)', 'keepdims': '(True)'}), '(X_2, axis=0, keepdims=True)\n', (545, 573), True, 'import numpy as np\n')] |
################################################################################################################
# Author: <NAME>
# <EMAIL>
################################################################################################################
import numpy as np
import os
from utils import *
from tqdm import tqdm
import scipy.io
import torch
import torch.autograd as autograd
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
#################################################################################################################
def GetTopK_UsingCosineSim(outfn, queries, documents, TopK, queryBatchSize=10, docBatchSize=100):
n_docs = documents.shape[0]
n_queries = queries.shape[0]
query_row = 0
with open(outfn, 'w') as out_fn:
for q_idx in tqdm(range(0, n_queries, queryBatchSize), desc='Query', ncols=0):
query_batch_s_idx = q_idx
query_batch_e_idx = min(query_batch_s_idx + queryBatchSize, n_queries)
queryMats = torch.cuda.FloatTensor(queries[query_batch_s_idx:query_batch_e_idx].toarray())
queryNorm2 = torch.norm(queryMats, 2, dim=1)
queryNorm2.unsqueeze_(1)
queryMats.unsqueeze_(2)
scoreList = []
indicesList = []
#print('{}: perform cosine sim ...'.format(q_idx))
for idx in tqdm(range(0, n_docs, docBatchSize), desc='Doc', leave=False, ncols=0):
batch_s_idx = idx
batch_e_idx = min(batch_s_idx + docBatchSize, n_docs)
n_doc_in_batch = batch_e_idx - batch_s_idx
#if batch_s_idx > 1000:
# break
candidateMats = torch.cuda.FloatTensor(documents[batch_s_idx:batch_e_idx].toarray())
candidateNorm2 = torch.norm(candidateMats, 2, dim=1)
candidateNorm2.unsqueeze_(0)
candidateMats.unsqueeze_(2)
candidateMats = candidateMats.permute(2, 1, 0)
# compute cosine similarity
queryMatsExpand = queryMats.expand(queryMats.size(0), queryMats.size(1), candidateMats.size(2))
candidateMats = candidateMats.expand_as(queryMatsExpand)
cos_sim_scores = torch.sum(queryMatsExpand * candidateMats, dim=1) / (queryNorm2 * candidateNorm2)
K = min(TopK, n_doc_in_batch)
scores, indices = torch.topk(cos_sim_scores, K, dim=1, largest=True)
del cos_sim_scores
del queryMatsExpand
del candidateMats
del candidateNorm2
scoreList.append(scores)
indicesList.append(indices + batch_s_idx)
all_scores = torch.cat(scoreList, dim=1)
all_indices = torch.cat(indicesList, dim=1)
_, indices = torch.topk(all_scores, TopK, dim=1, largest=True)
topK_indices = torch.gather(all_indices, 1, indices)
#all_topK_indices.append(topK_indices)
#all_topK_scores.append(scores)
del queryMats
del queryNorm2
del scoreList
del indicesList
topK_indices = topK_indices.cpu().numpy()
for row in topK_indices:
out_fn.write("{}:".format(query_row))
outtext = ','.join([str(col) for col in row])
out_fn.write(outtext)
out_fn.write('\n')
query_row += 1
torch.cuda.empty_cache()
#################################################################################################################
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--gpunum")
parser.add_argument("--dataset")
parser.add_argument("--usetrain", action='store_true')
args = parser.parse_args()
if args.gpunum:
print("Use GPU #:{}".format(args.gpunum))
gpunum = args.gpunum
else:
print("Use GPU #0 as a default gpu")
gpunum = "0"
os.environ["CUDA_VISIBLE_DEVICES"]=gpunum
if args.dataset:
print("load {} dataset".format(args.dataset))
dataset = args.dataset
else:
parser.error("Need to provide the dataset.")
data = Load_Dataset("data/ng20.mat")
print("num train:{} num tests:{}".format(data.n_trains, data.n_tests))
if args.usetrain:
print("use train as a query corpus")
query_corpus = data.train
out_fn = "bm25/{}_train_top101.txt".format(dataset)
else:
print("use test as a query corpus")
query_corpus = data.test
out_fn = "bm25/{}_test_top101.txt".format(dataset)
print("save the result to {}".format(out_fn))
GetTopK_UsingCosineSim(out_fn, query_corpus, data.train, TopK=101, queryBatchSize=500, docBatchSize=100) | [
"argparse.ArgumentParser",
"torch.topk",
"torch.norm",
"torch.sum",
"torch.cuda.empty_cache",
"torch.gather",
"torch.cat"
] | [((3698, 3723), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3721, 3723), False, 'import argparse\n'), ((1157, 1188), 'torch.norm', 'torch.norm', (['queryMats', '(2)'], {'dim': '(1)'}), '(queryMats, 2, dim=1)\n', (1167, 1188), False, 'import torch\n'), ((2779, 2806), 'torch.cat', 'torch.cat', (['scoreList'], {'dim': '(1)'}), '(scoreList, dim=1)\n', (2788, 2806), False, 'import torch\n'), ((2833, 2862), 'torch.cat', 'torch.cat', (['indicesList'], {'dim': '(1)'}), '(indicesList, dim=1)\n', (2842, 2862), False, 'import torch\n'), ((2888, 2937), 'torch.topk', 'torch.topk', (['all_scores', 'TopK'], {'dim': '(1)', 'largest': '(True)'}), '(all_scores, TopK, dim=1, largest=True)\n', (2898, 2937), False, 'import torch\n'), ((2966, 3003), 'torch.gather', 'torch.gather', (['all_indices', '(1)', 'indices'], {}), '(all_indices, 1, indices)\n', (2978, 3003), False, 'import torch\n'), ((3532, 3556), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (3554, 3556), False, 'import torch\n'), ((1845, 1880), 'torch.norm', 'torch.norm', (['candidateMats', '(2)'], {'dim': '(1)'}), '(candidateMats, 2, dim=1)\n', (1855, 1880), False, 'import torch\n'), ((2461, 2511), 'torch.topk', 'torch.topk', (['cos_sim_scores', 'K'], {'dim': '(1)', 'largest': '(True)'}), '(cos_sim_scores, K, dim=1, largest=True)\n', (2471, 2511), False, 'import torch\n'), ((2298, 2347), 'torch.sum', 'torch.sum', (['(queryMatsExpand * candidateMats)'], {'dim': '(1)'}), '(queryMatsExpand * candidateMats, dim=1)\n', (2307, 2347), False, 'import torch\n')] |
from matekasse import create_app, db
from matekasse.models import User, Transaction
import sqlite3
import argparse
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument("-p", "--path", action='store', type=str, required=True, help="Path to fnordcredit database")
inp = parser.parse_args()
app = create_app()
ctx = app.app_context()
ctx.push()
try:
conn = sqlite3.connect(inp.path)
cursor = conn.cursor()
cursor.execute('SELECT * FROM user')
rows = cursor.fetchall()
for r in rows:
user = r[5]
credit = r[1] * 100
newuser = User(username=user, credit=credit)
db.session.add(newuser)
'''cursor.execute('SELECT * FROM transaction')
rows = cursor.fetchall()
for r in rows:
user = r[5]
trans = r[2] * 100
newtrans = Transaction(userid=user, credit=trans)
db.session.add(newtrans)'''
db.session.commit()
except sqlite3.Error as error:
print(error)
finally:
if conn:
conn.close()
print('Migration complete')
ctx.pop()
exit()
| [
"matekasse.create_app",
"sqlite3.connect",
"argparse.ArgumentParser",
"matekasse.models.User",
"matekasse.db.session.add",
"matekasse.db.session.commit"
] | [((125, 168), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'allow_abbrev': '(False)'}), '(allow_abbrev=False)\n', (148, 168), False, 'import argparse\n'), ((317, 329), 'matekasse.create_app', 'create_app', ([], {}), '()\n', (327, 329), False, 'from matekasse import create_app, db\n'), ((382, 407), 'sqlite3.connect', 'sqlite3.connect', (['inp.path'], {}), '(inp.path)\n', (397, 407), False, 'import sqlite3\n'), ((901, 920), 'matekasse.db.session.commit', 'db.session.commit', ([], {}), '()\n', (918, 920), False, 'from matekasse import create_app, db\n'), ((590, 624), 'matekasse.models.User', 'User', ([], {'username': 'user', 'credit': 'credit'}), '(username=user, credit=credit)\n', (594, 624), False, 'from matekasse.models import User, Transaction\n'), ((633, 656), 'matekasse.db.session.add', 'db.session.add', (['newuser'], {}), '(newuser)\n', (647, 656), False, 'from matekasse import create_app, db\n')] |
from oauthlib.oauth2 import InvalidClientError, MissingTokenError
import pytest
from test import configure_mendeley, cassette
def test_should_get_authenticated_session():
mendeley = configure_mendeley()
auth = mendeley.start_client_credentials_flow()
with cassette('fixtures/auth/client_credentials/get_authenticated_session.yaml'):
session = auth.authenticate()
assert session.token['access_token']
assert session.host == 'https://api.mendeley.com'
def test_should_throw_exception_on_incorrect_credentials():
mendeley = configure_mendeley()
mendeley.client_secret += '-invalid'
auth = mendeley.start_client_credentials_flow()
# We should never get an access token back
# and the OAuth library should be unhappy about that
with cassette('fixtures/auth/client_credentials/incorrect_credentials.yaml'), pytest.raises(MissingTokenError):
auth.authenticate()
| [
"test.configure_mendeley",
"pytest.raises",
"test.cassette"
] | [((189, 209), 'test.configure_mendeley', 'configure_mendeley', ([], {}), '()\n', (207, 209), False, 'from test import configure_mendeley, cassette\n'), ((568, 588), 'test.configure_mendeley', 'configure_mendeley', ([], {}), '()\n', (586, 588), False, 'from test import configure_mendeley, cassette\n'), ((272, 347), 'test.cassette', 'cassette', (['"""fixtures/auth/client_credentials/get_authenticated_session.yaml"""'], {}), "('fixtures/auth/client_credentials/get_authenticated_session.yaml')\n", (280, 347), False, 'from test import configure_mendeley, cassette\n'), ((800, 871), 'test.cassette', 'cassette', (['"""fixtures/auth/client_credentials/incorrect_credentials.yaml"""'], {}), "('fixtures/auth/client_credentials/incorrect_credentials.yaml')\n", (808, 871), False, 'from test import configure_mendeley, cassette\n'), ((873, 905), 'pytest.raises', 'pytest.raises', (['MissingTokenError'], {}), '(MissingTokenError)\n', (886, 905), False, 'import pytest\n')] |
__all__ = [
'generate_client_id',
'get_album_art',
'get_transcoder',
'transcode_to_mp3',
]
import os
import shutil
import subprocess
from base64 import b64encode
from binascii import unhexlify
from hashlib import md5
import audio_metadata
# The id is found by: getting md5sum of audio, base64 encode md5sum, removing trailing '='.
def generate_client_id(song):
if not isinstance(song, audio_metadata.Format):
song = audio_metadata.load(song)
md5sum = None
if isinstance(song, audio_metadata.FLAC):
md5sum = unhexlify(song.streaminfo.md5)
else:
m = md5()
audio_size = song.streaminfo._size
with open(song.filepath, 'rb') as f:
f.seek(song.streaminfo._start)
# Speed up by reading in chunks
read = 0
while True:
read_size = min(audio_size - read, 65536)
if not read_size:
break
read += read_size
data = f.read(read_size)
m.update(data)
md5sum = m.digest()
client_id = b64encode(md5sum).rstrip(b'=').decode('ascii')
return client_id
def get_album_art(song):
if not isinstance(song, audio_metadata.Format):
song = audio_metadata.load(song)
album_art = next(
(
picture.data
for picture in song.pictures
if picture.type == 3
),
None
)
return album_art
def get_transcoder():
"""Return the path to a transcoder (ffmpeg or avconv) with MP3 support."""
transcoders = ['ffmpeg', 'avconv']
transcoder_details = {}
for transcoder in transcoders:
command_path = shutil.which(transcoder)
if command_path is None:
transcoder_details[transcoder] = 'Not installed.'
continue
stdout = subprocess.run(
[command_path, '-codecs'],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
universal_newlines=True,
).stdout
mp3_encoding_support = (
'libmp3lame' in stdout
and 'disable-libmp3lame' not in stdout
)
if mp3_encoding_support:
transcoder_details[transcoder] = "MP3 encoding support."
break
else:
transcoder_details[transcoder] = "No MP3 encoding support."
else:
raise ValueError(
f"ffmpeg or avconv must be in the path and support mp3 encoding."
"\nDetails: {transcoder_details}"
)
return command_path
def _transcode(command, input_=None):
try:
transcode = subprocess.run(
command,
input=input_,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
transcode.check_returncode()
except (OSError, subprocess.CalledProcessError) as e:
error_msg = f"Transcode command '{' '.join(command)}' failed: {e}. "
if 'No such file or directory' in str(e):
error_msg += '\nffmpeg or avconv must be installed PATH.'
if transcode.stderr is not None:
error_msg += f"\nstderr: '{transcode.stderr}'"
e.message = error_msg
raise
else:
return transcode.stdout
def transcode_to_mp3(song, *, slice_start=None, slice_duration=None, quality='320k'):
command_path = get_transcoder()
input_ = None
if isinstance(song, audio_metadata.Format):
if hasattr(song.filepath, 'read'):
raise ValueError("Audio metadata must be from a file.")
# command = [command_path, '-i', '-']
# input_ = song.filepath.read()
else:
command = [command_path, '-i', song.filepath]
elif isinstance(song, bytes):
command = [command_path, '-i', '-']
input_ = song
elif isinstance(song, str):
command = [command_path, '-i', song]
elif isinstance(song, os.PathLike):
command = [command_path, '-i', song.__fspath__()]
else:
raise ValueError(
"'song' must be os.PathLike, filepath string, a file/bytes-like object, or binary data."
)
if slice_duration is not None:
command.extend(['-t', str(slice_duration)])
if slice_start is not None:
command.extend(['-ss', str(slice_start)])
if isinstance(quality, int):
command.extend(['-q:a', str(quality)])
elif isinstance(quality, str):
command.extend(['-b:a', str(quality)])
# Use 's16le' to not output id3 headers.
command.extend(['-f', 's16le', '-c', 'libmp3lame', '-'])
return _transcode(command, input_=input_)
| [
"hashlib.md5",
"base64.b64encode",
"subprocess.run",
"shutil.which",
"audio_metadata.load",
"binascii.unhexlify"
] | [((427, 452), 'audio_metadata.load', 'audio_metadata.load', (['song'], {}), '(song)\n', (446, 452), False, 'import audio_metadata\n'), ((523, 553), 'binascii.unhexlify', 'unhexlify', (['song.streaminfo.md5'], {}), '(song.streaminfo.md5)\n', (532, 553), False, 'from binascii import unhexlify\n'), ((567, 572), 'hashlib.md5', 'md5', ([], {}), '()\n', (570, 572), False, 'from hashlib import md5\n'), ((1085, 1110), 'audio_metadata.load', 'audio_metadata.load', (['song'], {}), '(song)\n', (1104, 1110), False, 'import audio_metadata\n'), ((1453, 1477), 'shutil.which', 'shutil.which', (['transcoder'], {}), '(transcoder)\n', (1465, 1477), False, 'import shutil\n'), ((2209, 2299), 'subprocess.run', 'subprocess.run', (['command'], {'input': 'input_', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(command, input=input_, stdout=subprocess.PIPE, stderr=\n subprocess.PIPE)\n', (2223, 2299), False, 'import subprocess\n'), ((1582, 1704), 'subprocess.run', 'subprocess.run', (["[command_path, '-codecs']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.DEVNULL', 'universal_newlines': '(True)'}), "([command_path, '-codecs'], stdout=subprocess.PIPE, stderr=\n subprocess.DEVNULL, universal_newlines=True)\n", (1596, 1704), False, 'import subprocess\n'), ((934, 951), 'base64.b64encode', 'b64encode', (['md5sum'], {}), '(md5sum)\n', (943, 951), False, 'from base64 import b64encode\n')] |
#!/usr/local/bin/python3
import sys,os,string,glob,subprocess
from setuptools import setup,Extension
from setuptools.command.build_ext import build_ext
from setuptools.command.install import install
import numpy
long_description = """\
This module uses the RRG method to measure the shapes of galaxies
in Hubble Space Telescope data
"""
#sudo python3 setup.py sdist upload -r pypi
version='0.1.2'
INCDIRS=['.']
packages = ['pyRRG', 'RRGtools','asciidata']
package_dir = {'RRGtools':'./lib/RRGtools',
'pyRRG':'./src',
'asciidata':'./lib/asciidata'}
package_data = {'pyRRG': ['psf_lib/*/*','sex_files/*','*.pkl']}
setup ( name = "pyRRG",
version = version,
author = "<NAME>",
author_email = "<EMAIL>",
description = "pyRRG module",
license = 'MIT',
packages = packages,
package_dir = package_dir,
package_data = package_data,
scripts = ['scripts/pyRRG'],
url = 'https://github.com/davidharvey1986/pyRRG', # use the URL to the github repo
download_url = 'https://github.com/davidharvey1986/pyRRG/archive/'+version+'.tar.gz',
install_requires=['scikit-learn',\
'numpy', \
'ipdb', 'pyraf',\
'scipy'],
)
| [
"setuptools.setup"
] | [((727, 1177), 'setuptools.setup', 'setup', ([], {'name': '"""pyRRG"""', 'version': 'version', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'description': '"""pyRRG module"""', 'license': '"""MIT"""', 'packages': 'packages', 'package_dir': 'package_dir', 'package_data': 'package_data', 'scripts': "['scripts/pyRRG']", 'url': '"""https://github.com/davidharvey1986/pyRRG"""', 'download_url': "('https://github.com/davidharvey1986/pyRRG/archive/' + version + '.tar.gz')", 'install_requires': "['scikit-learn', 'numpy', 'ipdb', 'pyraf', 'scipy']"}), "(name='pyRRG', version=version, author='<NAME>', author_email=\n '<EMAIL>', description='pyRRG module', license='MIT', packages=packages,\n package_dir=package_dir, package_data=package_data, scripts=[\n 'scripts/pyRRG'], url='https://github.com/davidharvey1986/pyRRG',\n download_url='https://github.com/davidharvey1986/pyRRG/archive/' +\n version + '.tar.gz', install_requires=['scikit-learn', 'numpy', 'ipdb',\n 'pyraf', 'scipy'])\n", (732, 1177), False, 'from setuptools import setup, Extension\n')] |
from concurrent.futures import ThreadPoolExecutor
import time
from PySide2.QtCore import QCoreApplication
thread_pool=None
def init_thread_pool():
global thread_pool
thread_pool=ThreadPoolExecutor()
def deinit_thread_pool():
global thread_pool
thread_pool.shutdown()
def submit_task(function,*args,**kwargs):
global thread_pool
return thread_pool.submit(function,*args,**kwargs)
#TODO: find a less hacky way to keep events processed
def async_run_await_result(function,*args,**kwargs):
future_obj=thread_pool.submit(function,*args,**kwargs)
while not future_obj.done():
QCoreApplication.processEvents()
time.sleep(0.1)
future_result=future_obj.result()
return future_result
| [
"PySide2.QtCore.QCoreApplication.processEvents",
"concurrent.futures.ThreadPoolExecutor",
"time.sleep"
] | [((189, 209), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {}), '()\n', (207, 209), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((616, 648), 'PySide2.QtCore.QCoreApplication.processEvents', 'QCoreApplication.processEvents', ([], {}), '()\n', (646, 648), False, 'from PySide2.QtCore import QCoreApplication\n'), ((657, 672), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (667, 672), False, 'import time\n')] |
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import json
import numpy as np
import scipy.sparse as sparse
import defenses
import upper_bounds
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
assert len(np.shape(obj)) == 1 # Can only handle 1D ndarrays
return obj.tolist()
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.int16):
return str(obj)
else:
return super(NumpyEncoder, self).default(obj)
def get_class_map():
return {-1: 0, 1: 1}
def get_centroids(X, Y, class_map):
num_classes = len(set(Y))
num_features = X.shape[1]
centroids = np.zeros((num_classes, num_features))
for y in set(Y):
centroids[class_map[y], :] = np.mean(X[Y == y, :], axis=0)
return centroids
def get_centroid_vec(centroids):
assert centroids.shape[0] == 2
centroid_vec = centroids[0, :] - centroids[1, :]
centroid_vec /= np.linalg.norm(centroid_vec)
centroid_vec = np.reshape(centroid_vec, (1, -1))
return centroid_vec
# Can speed this up if necessary
def get_sqrt_inv_cov(X, Y, class_map):
num_classes = len(set(Y))
num_features = X.shape[1]
sqrt_inv_covs = np.zeros((num_classes, num_features, num_features))
for y in set(Y):
cov = np.cov(X[Y == y, :], rowvar=False)
U_cov, S_cov, _ = np.linalg.svd(cov + 1e-6 * np.eye(num_features))
print(' min eigenvalue of cov after 1e-6 reg is %s' % np.min(S_cov))
sqrt_inv_covs[class_map[y], ...] = U_cov.dot(np.diag(1 / np.sqrt(S_cov)).dot(U_cov.T))
return sqrt_inv_covs
# Can speed this up if necessary
def get_data_params(X, Y, percentile):
num_classes = len(set(Y))
num_features = X.shape[1]
centroids = np.zeros((num_classes, num_features))
class_map = get_class_map()
centroids = get_centroids(X, Y, class_map)
# Get radii for sphere
sphere_radii = np.zeros(2)
dists = defenses.compute_dists_under_Q(
X, Y,
Q=None,
centroids=centroids,
class_map=class_map,
norm=2)
for y in set(Y):
sphere_radii[class_map[y]] = np.percentile(dists[Y == y], percentile)
# Get vector between centroids
centroid_vec = get_centroid_vec(centroids)
# Get radii for slab
slab_radii = np.zeros(2)
for y in set(Y):
dists = np.abs(
(X[Y == y, :].dot(centroid_vec.T) - centroids[class_map[y], :].dot(centroid_vec.T)))
slab_radii[class_map[y]] = np.percentile(dists, percentile)
return class_map, centroids, centroid_vec, sphere_radii, slab_radii
def vstack(A, B):
if (sparse.issparse(A) or sparse.issparse(B)):
return sparse.vstack((A, B), format='csr')
else:
return np.concatenate((A, B), axis=0)
def add_points(x, y, X, Y, num_copies=1):
if num_copies == 0:
return X, Y
x = np.array(x).reshape(-1)
if sparse.issparse(X):
X_modified = sparse.vstack((
X,
sparse.csr_matrix(
np.tile(x, num_copies).reshape(-1, len(x)))))
else:
X_modified = np.append(
X,
np.tile(x, num_copies).reshape(-1, len(x)),
axis=0)
Y_modified = np.append(Y, np.tile(y, num_copies))
return X_modified, Y_modified
def copy_random_points(X, Y, mask_to_choose_from=None, target_class=1, num_copies=1,
random_seed=18, replace=False):
# Only copy from points where mask_to_choose_from == True
np.random.seed(random_seed)
combined_mask = (np.array(Y, dtype=int) == target_class)
if mask_to_choose_from is not None:
combined_mask = combined_mask & mask_to_choose_from
idx_to_copy = np.random.choice(
np.where(combined_mask)[0],
size=num_copies,
replace=replace)
if sparse.issparse(X):
X_modified = sparse.vstack((X, X[idx_to_copy, :]))
else:
X_modified = np.append(X, X[idx_to_copy, :], axis=0)
Y_modified = np.append(Y, Y[idx_to_copy])
return X_modified, Y_modified
def threshold(X):
return np.clip(X, 0, np.max(X))
def rround(X, random_seed=3, return_sparse=True):
if sparse.issparse(X):
X = X.toarray()
X_frac, X_int = np.modf(X)
X_round = X_int + (np.random.random_sample(X.shape) < X_frac)
if return_sparse:
return sparse.csr_matrix(X_round)
else:
return X_round
def rround_with_repeats(X, Y, repeat_points, random_seed=3, return_sparse=True):
X_round = rround(X, random_seed=random_seed, return_sparse=return_sparse)
assert Y.shape[0] == X.shape[0]
if repeat_points > 1:
pos_idx = 0
neg_idx = 0
for i in range(X_round.shape[0]):
if Y[i] == 1:
if pos_idx % repeat_points == 0:
last_pos_x = X_round[i, :]
else:
X_round[i, :] = last_pos_x
pos_idx += 1
else:
if neg_idx % repeat_points == 0:
last_neg_x = X_round[i, :]
else:
X_round[i, :] = last_neg_x
neg_idx += 1
return X_round
def project_onto_sphere(X, Y, radii, centroids, class_map):
for y in set(Y):
idx = class_map[y]
radius = radii[idx]
centroid = centroids[idx, :]
shifts_from_center = X[Y == y, :] - centroid
dists_from_center = np.linalg.norm(shifts_from_center, axis=1)
shifts_from_center[dists_from_center > radius, :] *= radius / np.reshape(dists_from_center[dists_from_center > radius], (-1, 1))
X[Y == y, :] = shifts_from_center + centroid
print("Number of (%s) points projected onto sphere: %s" % (y, np.sum(dists_from_center > radius)))
return X
def project_onto_slab(X, Y, v, radii, centroids, class_map):
"""
v^T x needs to be within radius of v^T centroid.
v is 1 x d and normalized.
"""
v = np.reshape(v / np.linalg.norm(v), (1, -1))
for y in set(Y):
idx = class_map[y]
radius = radii[idx]
centroid = centroids[idx, :]
# If v^T x is too large, then dists_along_v is positive
# If it's too small, then dists_along_v is negative
dists_along_v = (X[Y == y, :] - centroid).dot(v.T)
shifts_along_v = np.reshape(
dists_along_v - np.clip(dists_along_v, -radius, radius),
(1, -1))
X[Y == y, :] -= shifts_along_v.T.dot(v)
print("Number of (%s) points projected onto slab: %s" % (y, np.sum(np.abs(dists_along_v) > radius)))
return X
def get_projection_fn(
X_clean,
Y_clean,
sphere=True,
slab=True,
non_negative=False,
less_than_one=False,
use_lp_rounding=False,
percentile=90):
print(X_clean)
goal = 'find_nearest_point'
class_map, centroids, centroid_vec, sphere_radii, slab_radii = get_data_params(X_clean, Y_clean, percentile)
if use_lp_rounding or non_negative or less_than_one or (sphere and slab):
if use_lp_rounding:
projector = upper_bounds.Minimizer(
d=X_clean.shape[1],
use_sphere=sphere,
use_slab=slab,
non_negative=non_negative,
less_than_one=less_than_one,
constrain_max_loss=False,
goal=goal,
X=X_clean
)
projector = upper_bounds.Minimizer(
d=X_clean.shape[1],
use_sphere=sphere,
use_slab=slab,
non_negative=non_negative,
less_than_one=less_than_one,
constrain_max_loss=False,
goal=goal,
X=X_clean
)
else:
projector = upper_bounds.Minimizer(
d=X_clean.shape[1],
use_sphere=sphere,
use_slab=slab,
non_negative=non_negative,
less_than_one=less_than_one,
constrain_max_loss=False,
goal=goal
)
# Add back low-rank projection if we move back to just sphere+slab
def project_onto_feasible_set(
X, Y,
theta=None,
bias=None,
):
num_examples = X.shape[0]
proj_X = np.zeros_like(X)
for idx in range(num_examples):
x = X[idx, :]
y = Y[idx]
class_idx = class_map[y]
centroid = centroids[class_idx, :]
sphere_radius = sphere_radii[class_idx]
slab_radius = slab_radii[class_idx]
proj_X[idx, :] = projector.minimize_over_feasible_set(
None,
x,
centroid,
centroid_vec,
sphere_radius,
slab_radius)
num_projected = np.sum(np.max(X - proj_X, axis=1) > 1e-6)
print('Projected %s examples.' % num_projected)
return proj_X
else:
def project_onto_feasible_set(X, Y, theta=None, bias=None):
if sphere:
X = project_onto_sphere(X, Y, sphere_radii, centroids, class_map)
elif slab:
X = project_onto_slab(X, Y, centroid_vec, slab_radii, centroids, class_map)
return X
return project_onto_feasible_set
def filter_points_outside_feasible_set(X, Y,
centroids, centroid_vec,
sphere_radii, slab_radii,
class_map):
sphere_dists = defenses.compute_dists_under_Q(
X,
Y,
Q=None,
centroids=centroids,
class_map=class_map)
slab_dists = defenses.compute_dists_under_Q(
X,
Y,
Q=centroid_vec,
centroids=centroids,
class_map=class_map)
idx_to_keep = np.array([True] * X.shape[0])
for y in set(Y):
idx_to_keep[np.where(Y == y)[0][sphere_dists[Y == y] > sphere_radii[class_map[y]]]] = False
idx_to_keep[np.where(Y == y)[0][slab_dists[Y == y] > slab_radii[class_map[y]]]] = False
print(np.sum(idx_to_keep))
return X[idx_to_keep, :], Y[idx_to_keep]
| [
"numpy.clip",
"numpy.sqrt",
"numpy.array",
"numpy.linalg.norm",
"numpy.cov",
"defenses.compute_dists_under_Q",
"numpy.mean",
"numpy.reshape",
"numpy.where",
"numpy.max",
"numpy.random.seed",
"numpy.concatenate",
"numpy.min",
"scipy.sparse.csr_matrix",
"numpy.tile",
"numpy.eye",
"nump... | [((833, 870), 'numpy.zeros', 'np.zeros', (['(num_classes, num_features)'], {}), '((num_classes, num_features))\n', (841, 870), True, 'import numpy as np\n'), ((1122, 1150), 'numpy.linalg.norm', 'np.linalg.norm', (['centroid_vec'], {}), '(centroid_vec)\n', (1136, 1150), True, 'import numpy as np\n'), ((1170, 1203), 'numpy.reshape', 'np.reshape', (['centroid_vec', '(1, -1)'], {}), '(centroid_vec, (1, -1))\n', (1180, 1203), True, 'import numpy as np\n'), ((1381, 1432), 'numpy.zeros', 'np.zeros', (['(num_classes, num_features, num_features)'], {}), '((num_classes, num_features, num_features))\n', (1389, 1432), True, 'import numpy as np\n'), ((1929, 1966), 'numpy.zeros', 'np.zeros', (['(num_classes, num_features)'], {}), '((num_classes, num_features))\n', (1937, 1966), True, 'import numpy as np\n'), ((2093, 2104), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (2101, 2104), True, 'import numpy as np\n'), ((2117, 2216), 'defenses.compute_dists_under_Q', 'defenses.compute_dists_under_Q', (['X', 'Y'], {'Q': 'None', 'centroids': 'centroids', 'class_map': 'class_map', 'norm': '(2)'}), '(X, Y, Q=None, centroids=centroids, class_map\n =class_map, norm=2)\n', (2147, 2216), False, 'import defenses\n'), ((2478, 2489), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (2486, 2489), True, 'import numpy as np\n'), ((3079, 3097), 'scipy.sparse.issparse', 'sparse.issparse', (['X'], {}), '(X)\n', (3094, 3097), True, 'import scipy.sparse as sparse\n'), ((3674, 3701), 'numpy.random.seed', 'np.random.seed', (['random_seed'], {}), '(random_seed)\n', (3688, 3701), True, 'import numpy as np\n'), ((3994, 4012), 'scipy.sparse.issparse', 'sparse.issparse', (['X'], {}), '(X)\n', (4009, 4012), True, 'import scipy.sparse as sparse\n'), ((4161, 4189), 'numpy.append', 'np.append', (['Y', 'Y[idx_to_copy]'], {}), '(Y, Y[idx_to_copy])\n', (4170, 4189), True, 'import numpy as np\n'), ((4339, 4357), 'scipy.sparse.issparse', 'sparse.issparse', (['X'], {}), '(X)\n', (4354, 4357), True, 'import scipy.sparse as sparse\n'), ((4404, 4414), 'numpy.modf', 'np.modf', (['X'], {}), '(X)\n', (4411, 4414), True, 'import numpy as np\n'), ((9839, 9930), 'defenses.compute_dists_under_Q', 'defenses.compute_dists_under_Q', (['X', 'Y'], {'Q': 'None', 'centroids': 'centroids', 'class_map': 'class_map'}), '(X, Y, Q=None, centroids=centroids, class_map\n =class_map)\n', (9869, 9930), False, 'import defenses\n'), ((9984, 10082), 'defenses.compute_dists_under_Q', 'defenses.compute_dists_under_Q', (['X', 'Y'], {'Q': 'centroid_vec', 'centroids': 'centroids', 'class_map': 'class_map'}), '(X, Y, Q=centroid_vec, centroids=centroids,\n class_map=class_map)\n', (10014, 10082), False, 'import defenses\n'), ((10139, 10168), 'numpy.array', 'np.array', (['([True] * X.shape[0])'], {}), '([True] * X.shape[0])\n', (10147, 10168), True, 'import numpy as np\n'), ((929, 958), 'numpy.mean', 'np.mean', (['X[Y == y, :]'], {'axis': '(0)'}), '(X[Y == y, :], axis=0)\n', (936, 958), True, 'import numpy as np\n'), ((1469, 1503), 'numpy.cov', 'np.cov', (['X[Y == y, :]'], {'rowvar': '(False)'}), '(X[Y == y, :], rowvar=False)\n', (1475, 1503), True, 'import numpy as np\n'), ((2311, 2351), 'numpy.percentile', 'np.percentile', (['dists[Y == y]', 'percentile'], {}), '(dists[Y == y], percentile)\n', (2324, 2351), True, 'import numpy as np\n'), ((2667, 2699), 'numpy.percentile', 'np.percentile', (['dists', 'percentile'], {}), '(dists, percentile)\n', (2680, 2699), True, 'import numpy as np\n'), ((2801, 2819), 'scipy.sparse.issparse', 'sparse.issparse', (['A'], {}), '(A)\n', (2816, 2819), True, 'import scipy.sparse as sparse\n'), ((2823, 2841), 'scipy.sparse.issparse', 'sparse.issparse', (['B'], {}), '(B)\n', (2838, 2841), True, 'import scipy.sparse as sparse\n'), ((2859, 2894), 'scipy.sparse.vstack', 'sparse.vstack', (['(A, B)'], {'format': '"""csr"""'}), "((A, B), format='csr')\n", (2872, 2894), True, 'import scipy.sparse as sparse\n'), ((2920, 2950), 'numpy.concatenate', 'np.concatenate', (['(A, B)'], {'axis': '(0)'}), '((A, B), axis=0)\n', (2934, 2950), True, 'import numpy as np\n'), ((3407, 3429), 'numpy.tile', 'np.tile', (['y', 'num_copies'], {}), '(y, num_copies)\n', (3414, 3429), True, 'import numpy as np\n'), ((3723, 3745), 'numpy.array', 'np.array', (['Y'], {'dtype': 'int'}), '(Y, dtype=int)\n', (3731, 3745), True, 'import numpy as np\n'), ((4035, 4072), 'scipy.sparse.vstack', 'sparse.vstack', (['(X, X[idx_to_copy, :])'], {}), '((X, X[idx_to_copy, :]))\n', (4048, 4072), True, 'import scipy.sparse as sparse\n'), ((4104, 4143), 'numpy.append', 'np.append', (['X', 'X[idx_to_copy, :]'], {'axis': '(0)'}), '(X, X[idx_to_copy, :], axis=0)\n', (4113, 4143), True, 'import numpy as np\n'), ((4269, 4278), 'numpy.max', 'np.max', (['X'], {}), '(X)\n', (4275, 4278), True, 'import numpy as np\n'), ((4518, 4544), 'scipy.sparse.csr_matrix', 'sparse.csr_matrix', (['X_round'], {}), '(X_round)\n', (4535, 4544), True, 'import scipy.sparse as sparse\n'), ((5596, 5638), 'numpy.linalg.norm', 'np.linalg.norm', (['shifts_from_center'], {'axis': '(1)'}), '(shifts_from_center, axis=1)\n', (5610, 5638), True, 'import numpy as np\n'), ((10397, 10416), 'numpy.sum', 'np.sum', (['idx_to_keep'], {}), '(idx_to_keep)\n', (10403, 10416), True, 'import numpy as np\n'), ((3047, 3058), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (3055, 3058), True, 'import numpy as np\n'), ((3908, 3931), 'numpy.where', 'np.where', (['combined_mask'], {}), '(combined_mask)\n', (3916, 3931), True, 'import numpy as np\n'), ((4438, 4470), 'numpy.random.random_sample', 'np.random.random_sample', (['X.shape'], {}), '(X.shape)\n', (4461, 4470), True, 'import numpy as np\n'), ((5710, 5776), 'numpy.reshape', 'np.reshape', (['dists_from_center[dists_from_center > radius]', '(-1, 1)'], {}), '(dists_from_center[dists_from_center > radius], (-1, 1))\n', (5720, 5776), True, 'import numpy as np\n'), ((6138, 6155), 'numpy.linalg.norm', 'np.linalg.norm', (['v'], {}), '(v)\n', (6152, 6155), True, 'import numpy as np\n'), ((7235, 7423), 'upper_bounds.Minimizer', 'upper_bounds.Minimizer', ([], {'d': 'X_clean.shape[1]', 'use_sphere': 'sphere', 'use_slab': 'slab', 'non_negative': 'non_negative', 'less_than_one': 'less_than_one', 'constrain_max_loss': '(False)', 'goal': 'goal', 'X': 'X_clean'}), '(d=X_clean.shape[1], use_sphere=sphere, use_slab=slab,\n non_negative=non_negative, less_than_one=less_than_one,\n constrain_max_loss=False, goal=goal, X=X_clean)\n', (7257, 7423), False, 'import upper_bounds\n'), ((7586, 7774), 'upper_bounds.Minimizer', 'upper_bounds.Minimizer', ([], {'d': 'X_clean.shape[1]', 'use_sphere': 'sphere', 'use_slab': 'slab', 'non_negative': 'non_negative', 'less_than_one': 'less_than_one', 'constrain_max_loss': '(False)', 'goal': 'goal', 'X': 'X_clean'}), '(d=X_clean.shape[1], use_sphere=sphere, use_slab=slab,\n non_negative=non_negative, less_than_one=less_than_one,\n constrain_max_loss=False, goal=goal, X=X_clean)\n', (7608, 7774), False, 'import upper_bounds\n'), ((7951, 8128), 'upper_bounds.Minimizer', 'upper_bounds.Minimizer', ([], {'d': 'X_clean.shape[1]', 'use_sphere': 'sphere', 'use_slab': 'slab', 'non_negative': 'non_negative', 'less_than_one': 'less_than_one', 'constrain_max_loss': '(False)', 'goal': 'goal'}), '(d=X_clean.shape[1], use_sphere=sphere, use_slab=slab,\n non_negative=non_negative, less_than_one=less_than_one,\n constrain_max_loss=False, goal=goal)\n', (7973, 8128), False, 'import upper_bounds\n'), ((8505, 8521), 'numpy.zeros_like', 'np.zeros_like', (['X'], {}), '(X)\n', (8518, 8521), True, 'import numpy as np\n'), ((1644, 1657), 'numpy.min', 'np.min', (['S_cov'], {}), '(S_cov)\n', (1650, 1657), True, 'import numpy as np\n'), ((6529, 6568), 'numpy.clip', 'np.clip', (['dists_along_v', '(-radius)', 'radius'], {}), '(dists_along_v, -radius, radius)\n', (6536, 6568), True, 'import numpy as np\n'), ((378, 391), 'numpy.shape', 'np.shape', (['obj'], {}), '(obj)\n', (386, 391), True, 'import numpy as np\n'), ((1557, 1577), 'numpy.eye', 'np.eye', (['num_features'], {}), '(num_features)\n', (1563, 1577), True, 'import numpy as np\n'), ((3313, 3335), 'numpy.tile', 'np.tile', (['x', 'num_copies'], {}), '(x, num_copies)\n', (3320, 3335), True, 'import numpy as np\n'), ((5901, 5935), 'numpy.sum', 'np.sum', (['(dists_from_center > radius)'], {}), '(dists_from_center > radius)\n', (5907, 5935), True, 'import numpy as np\n'), ((9113, 9139), 'numpy.max', 'np.max', (['(X - proj_X)'], {'axis': '(1)'}), '(X - proj_X, axis=1)\n', (9119, 9139), True, 'import numpy as np\n'), ((10210, 10226), 'numpy.where', 'np.where', (['(Y == y)'], {}), '(Y == y)\n', (10218, 10226), True, 'import numpy as np\n'), ((10310, 10326), 'numpy.where', 'np.where', (['(Y == y)'], {}), '(Y == y)\n', (10318, 10326), True, 'import numpy as np\n'), ((1724, 1738), 'numpy.sqrt', 'np.sqrt', (['S_cov'], {}), '(S_cov)\n', (1731, 1738), True, 'import numpy as np\n'), ((3198, 3220), 'numpy.tile', 'np.tile', (['x', 'num_copies'], {}), '(x, num_copies)\n', (3205, 3220), True, 'import numpy as np\n'), ((6715, 6736), 'numpy.abs', 'np.abs', (['dists_along_v'], {}), '(dists_along_v)\n', (6721, 6736), True, 'import numpy as np\n')] |
""" Server module
Quandl API limits:
Authenticated users have a limit of 300 calls per 10 seconds,
2,000 calls per 10 minutes and a limit of 50,000 calls per day.
"""
import urllib
import logging
from twisted.internet import reactor
from twisted.web.client import Agent, readBody
from . import settings
from . import resources
logger = logging.getLogger(settings.LOG_NAME + ".server")
def main():
from .data import data_list
try:
resource_name = data_list[0]["resource"]
resource = resources.__dict__[resource_name]
key = data_list[0]["api_key"]
key = settings.__dict__[key]
resource = resource(key)
except KeyError as e:
logger.warning("KeyError while trying to instantiate Resource class with : " + str(e))
else:
# TODO: go to next item
pass
url = resource.get_url(data_list[0]["url"])
agent = Agent(reactor)
d = agent.request(
str.encode(data_list[0]["method"], "utf-8"),
str.encode(url, "ascii")
)
def cbResponse(response):
if response.code == 200:
def cbBody(body):
resource.save(body)
pass
d = readBody(response)
d.addCallback(cbBody)
return d
d.addCallback(cbResponse)
def cbShutdown(ignored):
if not ignored:
logger.warning("request failed.")
reactor.stop()
d.addBoth(cbShutdown)
reactor.run()
if __name__ == "__main__":
main() | [
"logging.getLogger",
"twisted.internet.reactor.stop",
"twisted.web.client.Agent",
"twisted.internet.reactor.run",
"twisted.web.client.readBody"
] | [((341, 389), 'logging.getLogger', 'logging.getLogger', (["(settings.LOG_NAME + '.server')"], {}), "(settings.LOG_NAME + '.server')\n", (358, 389), False, 'import logging\n'), ((903, 917), 'twisted.web.client.Agent', 'Agent', (['reactor'], {}), '(reactor)\n', (908, 917), False, 'from twisted.web.client import Agent, readBody\n'), ((1466, 1479), 'twisted.internet.reactor.run', 'reactor.run', ([], {}), '()\n', (1477, 1479), False, 'from twisted.internet import reactor\n'), ((1420, 1434), 'twisted.internet.reactor.stop', 'reactor.stop', ([], {}), '()\n', (1432, 1434), False, 'from twisted.internet import reactor\n'), ((1204, 1222), 'twisted.web.client.readBody', 'readBody', (['response'], {}), '(response)\n', (1212, 1222), False, 'from twisted.web.client import Agent, readBody\n')] |
import pytest
import logging
import ipaddress
import json
import re
import time
from tests.common.dualtor.dual_tor_mock import *
from tests.common.helpers.assertions import pytest_assert as pt_assert
from tests.common.dualtor.dual_tor_utils import rand_selected_interface, verify_upstream_traffic, get_crm_nexthop_counter
from tests.common.utilities import compare_crm_facts
from tests.common.config_reload import config_reload
from tests.common.dualtor.mux_simulator_control import toggle_all_simulator_ports
from tests.common.fixtures.ptfhost_utils import change_mac_addresses, run_garp_service, run_icmp_responder
logger = logging.getLogger(__file__)
pytestmark = [
pytest.mark.topology('t0'),
pytest.mark.usefixtures('apply_mock_dual_tor_tables', 'apply_mock_dual_tor_kernel_configs', 'run_garp_service', 'run_icmp_responder')
]
PAUSE_TIME = 10
def get_l2_rx_drop(host, itfs):
"""
Return L2 rx packet drop counter for given interface
"""
res = {}
stdout = host.shell("portstat -j")['stdout']
match = re.search("Last cached time was.*\n", stdout)
if match:
stdout = re.sub("Last cached time was.*\n", "", stdout)
data = json.loads(stdout)
return int(data[itfs]['RX_DRP'])
def clear_portstat(dut):
dut.shell("portstat -c")
@pytest.fixture(scope='module', autouse=True)
def test_cleanup(rand_selected_dut):
"""
Issue a config reload at the end of module
"""
yield
config_reload(rand_selected_dut)
def test_standby_tor_upstream_mux_toggle(
rand_selected_dut, tbinfo, ptfadapter, rand_selected_interface,
require_mocked_dualtor, toggle_all_simulator_ports, set_crm_polling_interval):
itfs, ip = rand_selected_interface
PKT_NUM = 100
# Step 1. Set mux state to standby and verify traffic is dropped by ACL rule and drop counters incremented
set_mux_state(rand_selected_dut, tbinfo, 'standby', [itfs], toggle_all_simulator_ports)
# Wait sometime for mux toggle
time.sleep(PAUSE_TIME)
crm_facts0 = rand_selected_dut.get_crm_facts()
# Verify packets are not go up
verify_upstream_traffic(host=rand_selected_dut,
ptfadapter=ptfadapter,
tbinfo=tbinfo,
itfs=itfs,
server_ip=ip['server_ipv4'].split('/')[0],
pkt_num=PKT_NUM,
drop=True)
time.sleep(5)
# Verify dropcounter is increased
drop_counter = get_l2_rx_drop(rand_selected_dut, itfs)
pt_assert(drop_counter >= PKT_NUM,
"RX_DRP for {} is expected to increase by {} actually {}".format(itfs, PKT_NUM, drop_counter))
# Step 2. Toggle mux state to active, and verify traffic is not dropped by ACL and fwd-ed to uplinks; verify CRM show and no nexthop objects are stale
set_mux_state(rand_selected_dut, tbinfo, 'active', [itfs], toggle_all_simulator_ports)
# Wait sometime for mux toggle
time.sleep(PAUSE_TIME)
# Verify packets are not go up
verify_upstream_traffic(host=rand_selected_dut,
ptfadapter=ptfadapter,
tbinfo=tbinfo,
itfs=itfs,
server_ip=ip['server_ipv4'].split('/')[0],
pkt_num=PKT_NUM,
drop=False)
# Step 3. Toggle mux state to standby, and verify traffic is dropped by ACL; verify CRM show and no nexthop objects are stale
set_mux_state(rand_selected_dut, tbinfo, 'standby', [itfs], toggle_all_simulator_ports)
# Wait sometime for mux toggle
time.sleep(PAUSE_TIME)
# Verify packets are not go up again
verify_upstream_traffic(host=rand_selected_dut,
ptfadapter=ptfadapter,
tbinfo=tbinfo,
itfs=itfs,
server_ip=ip['server_ipv4'].split('/')[0],
pkt_num=PKT_NUM,
drop=True)
# Verify dropcounter is increased
drop_counter = get_l2_rx_drop(rand_selected_dut, itfs)
pt_assert(drop_counter >= PKT_NUM,
"RX_DRP for {} is expected to increase by {} actually {}".format(itfs, PKT_NUM, drop_counter))
crm_facts1 = rand_selected_dut.get_crm_facts()
unmatched_crm_facts = compare_crm_facts(crm_facts0, crm_facts1)
pt_assert(len(unmatched_crm_facts)==0, 'Unmatched CRM facts: {}'.format(json.dumps(unmatched_crm_facts, indent=4)))
| [
"logging.getLogger",
"json.loads",
"tests.common.utilities.compare_crm_facts",
"pytest.mark.topology",
"json.dumps",
"time.sleep",
"re.sub",
"pytest.mark.usefixtures",
"pytest.fixture",
"tests.common.config_reload.config_reload",
"re.search"
] | [((627, 654), 'logging.getLogger', 'logging.getLogger', (['__file__'], {}), '(__file__)\n', (644, 654), False, 'import logging\n'), ((1290, 1334), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'autouse': '(True)'}), "(scope='module', autouse=True)\n", (1304, 1334), False, 'import pytest\n'), ((675, 701), 'pytest.mark.topology', 'pytest.mark.topology', (['"""t0"""'], {}), "('t0')\n", (695, 701), False, 'import pytest\n'), ((707, 848), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""apply_mock_dual_tor_tables"""', '"""apply_mock_dual_tor_kernel_configs"""', '"""run_garp_service"""', '"""run_icmp_responder"""'], {}), "('apply_mock_dual_tor_tables',\n 'apply_mock_dual_tor_kernel_configs', 'run_garp_service',\n 'run_icmp_responder')\n", (730, 848), False, 'import pytest\n'), ((1040, 1085), 're.search', 're.search', (['"""Last cached time was.*\n"""', 'stdout'], {}), "('Last cached time was.*\\n', stdout)\n", (1049, 1085), False, 'import re\n'), ((1175, 1193), 'json.loads', 'json.loads', (['stdout'], {}), '(stdout)\n', (1185, 1193), False, 'import json\n'), ((1449, 1481), 'tests.common.config_reload.config_reload', 'config_reload', (['rand_selected_dut'], {}), '(rand_selected_dut)\n', (1462, 1481), False, 'from tests.common.config_reload import config_reload\n'), ((1976, 1998), 'time.sleep', 'time.sleep', (['PAUSE_TIME'], {}), '(PAUSE_TIME)\n', (1986, 1998), False, 'import time\n'), ((2430, 2443), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (2440, 2443), False, 'import time\n'), ((2977, 2999), 'time.sleep', 'time.sleep', (['PAUSE_TIME'], {}), '(PAUSE_TIME)\n', (2987, 2999), False, 'import time\n'), ((3639, 3661), 'time.sleep', 'time.sleep', (['PAUSE_TIME'], {}), '(PAUSE_TIME)\n', (3649, 3661), False, 'import time\n'), ((4367, 4408), 'tests.common.utilities.compare_crm_facts', 'compare_crm_facts', (['crm_facts0', 'crm_facts1'], {}), '(crm_facts0, crm_facts1)\n', (4384, 4408), False, 'from tests.common.utilities import compare_crm_facts\n'), ((1117, 1163), 're.sub', 're.sub', (['"""Last cached time was.*\n"""', '""""""', 'stdout'], {}), "('Last cached time was.*\\n', '', stdout)\n", (1123, 1163), False, 'import re\n'), ((4485, 4526), 'json.dumps', 'json.dumps', (['unmatched_crm_facts'], {'indent': '(4)'}), '(unmatched_crm_facts, indent=4)\n', (4495, 4526), False, 'import json\n')] |
'''
Classes from the 'SplitKit' framework.
'''
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
PodsDummy_SplitKit = _Class('PodsDummy_SplitKit')
InstantPanGestureRecognizer = _Class('SplitKit.InstantPanGestureRecognizer')
HandleView = _Class('SplitKit.HandleView')
SPKSplitViewController = _Class('SPKSplitViewController')
| [
"rubicon.objc.ObjCClass"
] | [((200, 215), 'rubicon.objc.ObjCClass', 'ObjCClass', (['name'], {}), '(name)\n', (209, 215), False, 'from rubicon.objc import ObjCClass\n')] |
"""Module controling search and replace tab."""
import logging
from wiki_music.constants import GUI_HEADERS
from wiki_music.gui_lib import BaseGui, CheckableListModel
from wiki_music.gui_lib.qt_importer import QMessageBox, QPushButton, QIcon, QStyle
__all__ = ["Replacer"]
log = logging.getLogger(__name__)
log.debug("finished gui search & replace imports")
class Replacer(BaseGui):
"""Controls the search and replace tab in GUI.
Warnings
--------
This class is not ment to be instantiated, only inherited.
"""
def __init__(self) -> None:
super().__init__()
self.replace_tag_selector_model = CheckableListModel()
self._fill_tags_list()
def _fill_tags_list(self):
"""Create a checkable list with table column name headers."""
for tag in GUI_HEADERS:
self.replace_tag_selector_model.add(tag)
self.replace_tag_selector_view.setModel(
self.replace_tag_selector_model)
def _setup_search_replace(self):
"""Connect to signals essential for search and replace tab."""
# re-run search when search columns are reselected
self.replace_tag_selector_model.itemChanged.connect(
self._search_replace_run)
# re-run search when options are checked
self.search_support_re.stateChanged.connect(
self._search_replace_run)
self.search_support_wildcard.stateChanged.connect(
self._search_replace_run)
self.search_case_sensitive.stateChanged.connect(
self._search_replace_run)
# connect to control buttons
self.search_next.clicked.connect(self.tableView.search_next)
self.search_previous.clicked.connect(self.tableView.search_previous)
self.replace_one.clicked.connect(
lambda: self.tableView.replace_one(
self.search_string_input.text(),
self.replace_string_input.text()))
self.replace_all.clicked.connect(
lambda: self.tableView.replace_all(
self.search_string_input.text(),
self.replace_string_input.text()
))
# search is run interacively as user is typing
self.search_string_input.textChanged.connect(
self._search_replace_run)
# on tab change change selection and search highlight mode
self.tool_tab.currentChanged.connect(
self.tableView.set_search_visibility)
# seems that filtering is done by rows
# self.search_string_input.textChanged.connect(
# self.proxy.setFilterFixedString)
def _search_replace_run(self, string: str):
"""Process search parameters and call table search method.
Parameters
----------
string: str
string to search for
"""
if (self.search_support_re.isChecked() and
self.search_support_wildcard.isChecked()):
msg = QMessageBox(QMessageBox.Warning, "Warning",
"Attempting to use regex and wildcards at once "
"may return unexpected results. "
"Do you want to proceed?",
QMessageBox.Yes | QMessageBox.No)
if msg.exec_() == QMessageBox.No:
return
else:
log.warning("Wildcard and regex used at once in search")
self.tableView.search_string(
self.search_string_input.text(),
self.search_case_sensitive.isChecked(),
self.search_support_re.isChecked(),
self.search_support_wildcard.isChecked(),
self.replace_tag_selector_model.get_checked_indices()
)
| [
"logging.getLogger",
"wiki_music.gui_lib.qt_importer.QMessageBox",
"wiki_music.gui_lib.CheckableListModel"
] | [((282, 309), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (299, 309), False, 'import logging\n'), ((641, 661), 'wiki_music.gui_lib.CheckableListModel', 'CheckableListModel', ([], {}), '()\n', (659, 661), False, 'from wiki_music.gui_lib import BaseGui, CheckableListModel\n'), ((2960, 3150), 'wiki_music.gui_lib.qt_importer.QMessageBox', 'QMessageBox', (['QMessageBox.Warning', '"""Warning"""', '"""Attempting to use regex and wildcards at once may return unexpected results. Do you want to proceed?"""', '(QMessageBox.Yes | QMessageBox.No)'], {}), "(QMessageBox.Warning, 'Warning',\n 'Attempting to use regex and wildcards at once may return unexpected results. Do you want to proceed?'\n , QMessageBox.Yes | QMessageBox.No)\n", (2971, 3150), False, 'from wiki_music.gui_lib.qt_importer import QMessageBox, QPushButton, QIcon, QStyle\n')] |
#%% [markdown]
#
# We will load EEG data from the lab and attemp to build a classifier that distinguishes between learners and non-learners
#%%
import mne
import numpy as np
import os.path
import glob
import re
import pandas as pd
# try to enable cuda support to speed up filtering, make sure the MNE_USE_CUDA environment variable is set to true
mne.cuda.init_cuda()
DATA_DIR = "../../EEGdata/Fish_5Block"
event_dict = {
"cat":{
"1": 20,
"2": 21
}
}
data_path = os.path.join(DATA_DIR, "Tail/Learner/126670_EXP_FISH.bdf")
test_data = mne.io.read_raw_edf(data_path, preload=True)
# find the related behavioral data
participant_number = re.search(r"^(\d+)_EXP_FISH", os.path.basename(data_path))[1]
behav_path = [filename for filename in glob.glob(os.path.join(DATA_DIR, "EXP_fish2_Tomy/Cat_data/*.csv")) if participant_number in filename][0]
behav_df = pd.read_csv(behav_path)
learning_curve = behav_df["Resultat"].rolling(20).mean() # our in house definition of current learning performance
learning_time = (learning_curve >= 0.8).idxmax() # using a 80% correct categorization threshold
#%% [markdown]
# We now need to find the event times and give the same code to all stimulus presentation events since we don't want to differentiate among category 1 or 2
#%%
events = mne.find_events(test_data)
events = np.array(events)
events[events[:,2]==event_dict["cat"]["2"],2] = 20
events = events.tolist()
#%% [markdown]
# visualize data
#%%
#test_data.plot()
#%%
test_data.set_eeg_reference("average", projection=False)
test_data.filter(0.1, 50.0, n_jobs="cuda")
stim_epochs = mne.Epochs(test_data, events=events, event_id={"stimulus presentation":20}, tmin=-0.2, tmax=0.8, reject={"eeg":200-6})
# do basic cleaning by bandpass filtering, we will need to load the data
stim_epochs.load_data()
stim_epochs.resample(256)
#%% building the pytorch model
pass
| [
"mne.cuda.init_cuda",
"pandas.read_csv",
"mne.find_events",
"mne.Epochs",
"numpy.array",
"mne.io.read_raw_edf"
] | [((361, 381), 'mne.cuda.init_cuda', 'mne.cuda.init_cuda', ([], {}), '()\n', (379, 381), False, 'import mne\n'), ((591, 635), 'mne.io.read_raw_edf', 'mne.io.read_raw_edf', (['data_path'], {'preload': '(True)'}), '(data_path, preload=True)\n', (610, 635), False, 'import mne\n'), ((913, 936), 'pandas.read_csv', 'pd.read_csv', (['behav_path'], {}), '(behav_path)\n', (924, 936), True, 'import pandas as pd\n'), ((1357, 1383), 'mne.find_events', 'mne.find_events', (['test_data'], {}), '(test_data)\n', (1372, 1383), False, 'import mne\n'), ((1394, 1410), 'numpy.array', 'np.array', (['events'], {}), '(events)\n', (1402, 1410), True, 'import numpy as np\n'), ((1675, 1801), 'mne.Epochs', 'mne.Epochs', (['test_data'], {'events': 'events', 'event_id': "{'stimulus presentation': 20}", 'tmin': '(-0.2)', 'tmax': '(0.8)', 'reject': "{'eeg': 200 - 6}"}), "(test_data, events=events, event_id={'stimulus presentation': 20},\n tmin=-0.2, tmax=0.8, reject={'eeg': 200 - 6})\n", (1685, 1801), False, 'import mne\n')] |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import integrate, optimize
from scipy.signal import savgol_filter
from dane import population as popu
dias_restar = 4 # Los últimos días de información que no se tienen en cuenta
dias_pred = 31 # Días sobre los cuáles se hará la predicción a corto plazo
media_movil = 4 # Días que se promediaran en las series para mitigar errores en los datos
Ciudades_dicc = {'Bog': 'Bogotá D.C.', 'Mde': 'Medellín', 'Cal': 'Cali', 'Brr': 'Barranquilla',
'Ctg': 'Cartagena de Indias'}
Ciudades = ['Bog','Mde','Cal', 'Brr', 'Ctg']
Covid_Col = pd.read_csv("https://www.datos.gov.co/api/views/gt2j-8ykr/rows.csv?accessType=DOWNLOAD", sep=',',
encoding='utf-8', low_memory=False)
def limpieza_datos():
# Covid_Col=pd.read_csv("C:\Users\danie\DS\vagrant4docker-master\laboratorios\covid-19-guaya-kilera\Casos_positivos_de_COVID-19_en_Colombia.csv", sep=',', encoding='utf-8', low_memory=False)
Covid_Col.drop(['ID de caso', 'Código DIVIPOLA', 'Departamento o Distrito ', 'País de procedencia', 'Tipo',
'Codigo departamento',
'Codigo pais', 'Tipo recuperación', 'Pertenencia etnica', 'Nombre grupo etnico', 'atención'],
axis=1, inplace=True)
Covid_Col['FIS'] = Covid_Col['FIS'].replace('Asintomático', np.nan)
Covid_Col['FIS'] = pd.to_datetime(Covid_Col['FIS'].str[:10])
Covid_Col['fecha reporte web'] = pd.to_datetime(Covid_Col['fecha reporte web'].str[:10])
Covid_Col['Fecha de notificación'] = pd.to_datetime(Covid_Col['Fecha de notificación'].str[:10])
Covid_Col['Fecha de muerte'] = pd.to_datetime(Covid_Col['Fecha de muerte'].str[:10])
Covid_Col['Fecha diagnostico'] = pd.to_datetime(Covid_Col['Fecha diagnostico'].str[:10])
Covid_Col['Fecha recuperado'] = pd.to_datetime(Covid_Col['Fecha recuperado'].str[:10])
# Covid_Col[(Covid_Col['Fecha diagnostico']<Covid_Col['Fecha de notificación']) & Covid_Col['FIS'].isnull()]
Covid_Col['Fecha contagio'] = Covid_Col['FIS']
Covid_Col.loc[Covid_Col['Fecha contagio'].isnull(), 'Fecha contagio'] = Covid_Col['Fecha de notificación']
Covid_Col.drop(['Fecha de notificación', 'FIS', 'Fecha diagnostico', 'fecha reporte web'], axis=1, inplace=True)
Covid_Col['Cantidad de personas'] = 1
Fecha_Inicio = Covid_Col['Fecha contagio'][0]
Fecha_Fin = max(Covid_Col['Fecha contagio']) - pd.to_timedelta(dias_restar, unit='d')
Fecha_Fin_pred = Fecha_Fin + pd.to_timedelta(dias_pred - 1, unit='d')
globals()['Fechas_pred_i'] = pd.date_range(start=Fecha_Inicio, end=Fecha_Fin_pred)
Fechas_evaluar_i = pd.date_range(start=Fecha_Inicio, end=Fecha_Fin)
Fechas_evaluar = pd.DataFrame(index=Fechas_evaluar_i)
for ciudad in Ciudades:
globals()["Covid_" + str(ciudad)] = Covid_Col[Covid_Col['Ciudad de ubicación'] == Ciudades_dicc[ciudad]]
globals()["nuevos_" + str(ciudad)] = globals()["Covid_" + str(ciudad)].groupby('Fecha contagio').sum()
globals()["nuevos_" + str(ciudad)].drop(['Edad'], axis=1, inplace=True)
globals()["nuevos_" + str(ciudad)] = pd.merge(Fechas_evaluar, globals()["nuevos_" + str(ciudad)], \
how='left', left_index=True, right_index=True)
globals()["nuevos_" + str(ciudad)] = globals()["nuevos_" + str(ciudad)].replace(np.nan, 0)
globals()["confirmados_" + str(ciudad)] = globals()["nuevos_" + str(ciudad)].cumsum()
globals()["nuevos_" + str(ciudad)].rename(columns={'Cantidad de personas': "Casos_nuevos_"}, inplace=True)
globals()["confirmados_" + str(ciudad)].rename(columns={'Cantidad de personas': "Casos_confirmados_"},
inplace=True)
globals()["recuperados_" + str(ciudad)] = globals()["Covid_" + str(ciudad)].groupby('Fecha recuperado').sum()
globals()["recuperados_" + str(ciudad)].drop(['Edad'], axis=1, inplace=True)
globals()["recuperados_" + str(ciudad)] = pd.merge(Fechas_evaluar, globals()["recuperados_" + str(ciudad)], \
how='left', left_index=True, right_index=True)
globals()["recuperados_" + str(ciudad)] = globals()["recuperados_" + str(ciudad)].replace(np.nan, 0)
# globals()["recuperados_" + str(ciudad)]=globals()["recuperados_" + str(ciudad)].cumsum()
globals()["recuperados_" + str(ciudad)].rename(columns={'Cantidad de personas': "Casos_recuperados_"},
inplace=True)
globals()["muertes_" + str(ciudad)] = globals()["Covid_" + str(ciudad)].groupby('Fecha de muerte').sum()
globals()["muertes_" + str(ciudad)].drop(['Edad'], axis=1, inplace=True)
globals()["muertes_" + str(ciudad)] = pd.merge(Fechas_evaluar, globals()["muertes_" + str(ciudad)], how='left', \
left_index=True, right_index=True)
globals()["muertes_" + str(ciudad)] = globals()["muertes_" + str(ciudad)].replace(np.nan, 0)
# globals()["muertes_" + str(ciudad)]=globals()["muertes_" + str(ciudad)].cumsum()
globals()["muertes_" + str(ciudad)].rename(columns={'Cantidad de personas': "muertes_"}, inplace=True)
globals()["activos_" + str(ciudad)] = pd.concat([globals()["confirmados_" + str(ciudad)], \
globals()["recuperados_" + str(ciudad)],
globals()["muertes_" + str(ciudad)],
globals()["nuevos_" + str(ciudad)]], axis=1)
globals()["activos_" + str(ciudad)]['Casos_activos_'] = globals()["activos_" + str(ciudad)][
"Casos_confirmados_"] - \
globals()["activos_" + str(ciudad)][
"Casos_recuperados_"].cumsum() - \
globals()["activos_" + str(ciudad)]["muertes_"].cumsum()
globals()["Casos_" + str(ciudad)] = globals()["activos_" + str(ciudad)].copy()
globals()["activos_" + str(ciudad)].drop(
["Casos_confirmados_", "Casos_recuperados_", "muertes_", "Casos_nuevos_"], axis=1, inplace=True)
globals()["Casos_" + str(ciudad)]["Total_recuperados_"] = globals()["Casos_" + str(ciudad)][
"Casos_recuperados_"].cumsum()
globals()["Casos_" + str(ciudad)]["Total_muertes_"] = globals()["Casos_" + str(ciudad)]["muertes_"].cumsum()
#%%
limpieza_datos()
#%%
def casos():
for ciudad in Ciudades:
globals()['N'+str(ciudad)] = popu(ciudad)
globals()['real_'+str(ciudad)] = [i for i in globals()["confirmados_" + str(ciudad)]['Casos_confirmados_']]
globals()['poly_pred_'+str(ciudad)] = savgol_filter(globals()['real_'+str(ciudad)], 51,3) # window size 51, polynomial order 1
globals()['df_pred_'+str(ciudad)] = pd.DataFrame(globals()['poly_pred_'+str(ciudad)])
globals()['df_real_'+str(ciudad)] = pd.DataFrame(globals()['real_'+str(ciudad)]) #Casos confirmados por día desde el caso 0
# return N,df_poly,df_vec_real,poly,vec_real_140,ciudad
# plt.figure(figsize=(12,6))
# plt.plot(globals()['poly_pred_'+str(ciudad)])
# plt.plot(globals()['real_'+str(ciudad)])
# plt.legend(["Predicción","Real"], loc='upper left')
# plt.title("Infecciones por COVID-19 desde el primer caso"+" "+ str(Ciudades_dicc.get(ciudad)), size=15)
# plt.xlabel("Days", size=13)
# plt.ylabel("Infecciones", size=13)
# plt.ylim(0, max(globals()['real_'+str(ciudad)])+1000)
# plt.show()
N = globals()['N'+str(ciudad)]
depart_df = pd.DataFrame()
depart_df['ConfirmedCases'] = globals()['real_'+str(ciudad)]
depart_df = depart_df[10:]
depart_df['day_count'] = list(range(1,len(depart_df)+1))
ydata = [i for i in depart_df.ConfirmedCases]
xdata = depart_df.day_count
ydata = np.array(ydata, dtype=float)
xdata = np.array(xdata, dtype=float)
inf0 = ydata[0]
sus0 = N - inf0
rec0 = 0.0
def sir_model(y, x, beta, gamma):
sus = -beta * y[0] * y[1] / N
rec = gamma * y[1]
inf = -(sus + rec)
return sus, inf, rec
def fit_odeint(x, beta, gamma):
return integrate.odeint(sir_model, (sus0, inf0, rec0), x, args=(beta, gamma))[:,1]
if ciudad == 'Bog':
popt = np.array([0.2783922953043075, 0.2165019796859231])
else:
popt, pcov = optimize.curve_fit(fit_odeint, xdata, ydata, maxfev=5000)
fitted = fit_odeint(xdata, *popt)
plt.plot(xdata, ydata, 'o')
plt.plot(xdata, fitted)
plt.title("Modelo SIR"+" "+ str(Ciudades_dicc.get(ciudad)), size=15)
plt.ylabel("Population infected")
plt.xlabel("Days")
plt.show()
print("Optimal parameters: beta =", popt[0], " and gamma = ", popt[1])
#%%
casos()
#%%
# t = np.linspace(0,400,400)
# import plotly.offline as py
#
# for ciudad in Ciudades:
# py.iplot([{
# 'x': t,
# 'y': globals()['real_' + str(ciudad)]
# }], filename='cufflinks/multiple-lines-on-same-chart')
#
# max(globals()['real_' + str(ciudad)])
#%%
valores = [(popt[0],popt[1])]
def modelo(beta,gamma):
# Initial values
I0, R0 = ydata[0], 0
###
S0 = N - I0 - R0
def deriv(y,t,N,beta, gamma):
S,I,R = y
dSdt = -beta * S * I /N
dIdt = beta * S * I /N - gamma * I
dRdt = gamma * I
return dSdt, dIdt, dRdt
#Vector de condiciones iniciales
y0 = S0, I0, R0
#Solucion Equation System
ret = integrate.odeint(deriv, y0, t, args=(N, beta, gamma))
S, I, R =ret.T
return I
import cufflinks as cf
import plotly.offline as py
py.iplot([{
'x':t,
'y': modelo(*valor),
'name': str(valor),
} for valor in valores], filename = 'cufflinks/multiple-lines-on-same-chart')
# plt.figure(figsize=(12, 8))
# #plt.plot(modelo(0.42715777117416, 0.36645292847392247)[0])
# plt.plot(modelo(0.42715777117416, 0.36645292847392247)[1])
# # plt.plot(modelo(0.42715777117416, 0.36645292847392247)[2])
# plt.ylabel('Población')
# plt.legend(['Susceptible', 'Infectados', 'Recuperados'])
# plt.xlabel('Días')
# plt.show()
| [
"scipy.optimize.curve_fit",
"pandas.to_timedelta",
"pandas.read_csv",
"matplotlib.pyplot.ylabel",
"scipy.integrate.odeint",
"dane.population",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.array",
"pandas.date_range",
"pandas.DataFrame",
"pandas.to_datetime",
"matplotlib.pyplo... | [((631, 774), 'pandas.read_csv', 'pd.read_csv', (['"""https://www.datos.gov.co/api/views/gt2j-8ykr/rows.csv?accessType=DOWNLOAD"""'], {'sep': '""","""', 'encoding': '"""utf-8"""', 'low_memory': '(False)'}), "(\n 'https://www.datos.gov.co/api/views/gt2j-8ykr/rows.csv?accessType=DOWNLOAD'\n , sep=',', encoding='utf-8', low_memory=False)\n", (642, 774), True, 'import pandas as pd\n'), ((1418, 1459), 'pandas.to_datetime', 'pd.to_datetime', (["Covid_Col['FIS'].str[:10]"], {}), "(Covid_Col['FIS'].str[:10])\n", (1432, 1459), True, 'import pandas as pd\n'), ((1497, 1552), 'pandas.to_datetime', 'pd.to_datetime', (["Covid_Col['fecha reporte web'].str[:10]"], {}), "(Covid_Col['fecha reporte web'].str[:10])\n", (1511, 1552), True, 'import pandas as pd\n'), ((1595, 1654), 'pandas.to_datetime', 'pd.to_datetime', (["Covid_Col['Fecha de notificación'].str[:10]"], {}), "(Covid_Col['Fecha de notificación'].str[:10])\n", (1609, 1654), True, 'import pandas as pd\n'), ((1689, 1742), 'pandas.to_datetime', 'pd.to_datetime', (["Covid_Col['Fecha de muerte'].str[:10]"], {}), "(Covid_Col['Fecha de muerte'].str[:10])\n", (1703, 1742), True, 'import pandas as pd\n'), ((1780, 1835), 'pandas.to_datetime', 'pd.to_datetime', (["Covid_Col['Fecha diagnostico'].str[:10]"], {}), "(Covid_Col['Fecha diagnostico'].str[:10])\n", (1794, 1835), True, 'import pandas as pd\n'), ((1872, 1926), 'pandas.to_datetime', 'pd.to_datetime', (["Covid_Col['Fecha recuperado'].str[:10]"], {}), "(Covid_Col['Fecha recuperado'].str[:10])\n", (1886, 1926), True, 'import pandas as pd\n'), ((2613, 2666), 'pandas.date_range', 'pd.date_range', ([], {'start': 'Fecha_Inicio', 'end': 'Fecha_Fin_pred'}), '(start=Fecha_Inicio, end=Fecha_Fin_pred)\n', (2626, 2666), True, 'import pandas as pd\n'), ((2691, 2739), 'pandas.date_range', 'pd.date_range', ([], {'start': 'Fecha_Inicio', 'end': 'Fecha_Fin'}), '(start=Fecha_Inicio, end=Fecha_Fin)\n', (2704, 2739), True, 'import pandas as pd\n'), ((2761, 2797), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'Fechas_evaluar_i'}), '(index=Fechas_evaluar_i)\n', (2773, 2797), True, 'import pandas as pd\n'), ((9772, 9825), 'scipy.integrate.odeint', 'integrate.odeint', (['deriv', 'y0', 't'], {'args': '(N, beta, gamma)'}), '(deriv, y0, t, args=(N, beta, gamma))\n', (9788, 9825), False, 'from scipy import integrate, optimize\n'), ((2466, 2504), 'pandas.to_timedelta', 'pd.to_timedelta', (['dias_restar'], {'unit': '"""d"""'}), "(dias_restar, unit='d')\n", (2481, 2504), True, 'import pandas as pd\n'), ((2538, 2578), 'pandas.to_timedelta', 'pd.to_timedelta', (['(dias_pred - 1)'], {'unit': '"""d"""'}), "(dias_pred - 1, unit='d')\n", (2553, 2578), True, 'import pandas as pd\n'), ((6881, 6893), 'dane.population', 'popu', (['ciudad'], {}), '(ciudad)\n', (6885, 6893), True, 'from dane import population as popu\n'), ((7915, 7929), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (7927, 7929), True, 'import pandas as pd\n'), ((8181, 8209), 'numpy.array', 'np.array', (['ydata'], {'dtype': 'float'}), '(ydata, dtype=float)\n', (8189, 8209), True, 'import numpy as np\n'), ((8222, 8250), 'numpy.array', 'np.array', (['xdata'], {'dtype': 'float'}), '(xdata, dtype=float)\n', (8230, 8250), True, 'import numpy as np\n'), ((8818, 8845), 'matplotlib.pyplot.plot', 'plt.plot', (['xdata', 'ydata', '"""o"""'], {}), "(xdata, ydata, 'o')\n", (8826, 8845), True, 'import matplotlib.pyplot as plt\n'), ((8850, 8873), 'matplotlib.pyplot.plot', 'plt.plot', (['xdata', 'fitted'], {}), '(xdata, fitted)\n', (8858, 8873), True, 'import matplotlib.pyplot as plt\n'), ((8951, 8984), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Population infected"""'], {}), "('Population infected')\n", (8961, 8984), True, 'import matplotlib.pyplot as plt\n'), ((8989, 9007), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Days"""'], {}), "('Days')\n", (8999, 9007), True, 'import matplotlib.pyplot as plt\n'), ((9012, 9022), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9020, 9022), True, 'import matplotlib.pyplot as plt\n'), ((8635, 8685), 'numpy.array', 'np.array', (['[0.2783922953043075, 0.2165019796859231]'], {}), '([0.2783922953043075, 0.2165019796859231])\n', (8643, 8685), True, 'import numpy as np\n'), ((8717, 8774), 'scipy.optimize.curve_fit', 'optimize.curve_fit', (['fit_odeint', 'xdata', 'ydata'], {'maxfev': '(5000)'}), '(fit_odeint, xdata, ydata, maxfev=5000)\n', (8735, 8774), False, 'from scipy import integrate, optimize\n'), ((8520, 8590), 'scipy.integrate.odeint', 'integrate.odeint', (['sir_model', '(sus0, inf0, rec0)', 'x'], {'args': '(beta, gamma)'}), '(sir_model, (sus0, inf0, rec0), x, args=(beta, gamma))\n', (8536, 8590), False, 'from scipy import integrate, optimize\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[7]:
import os
write_to_csv_file = 'million_song_subset.csv'
csv_file_read = open(write_to_csv_file,'r')
csv_file_write = open(write_to_csv_file,'a')
while True:
next_line = csv_file_read.readline()
if not next_line:
break
csv_file_size = os.path.getsize(write_to_csv_file)
print("file size: {}".format(str(csv_file_size/1048576)))
# if the csv file larger than or euqal to 5GB exist for loop
if csv_file_size >= 5368709120:
break
if next_line.startswith("song_id"):
continue
csv_file_write.write(next_line)
print("appended: {}".format(next_line))
csv_file_read.close()
csv_file_write.close()
# In[ ]:
| [
"os.path.getsize"
] | [((314, 348), 'os.path.getsize', 'os.path.getsize', (['write_to_csv_file'], {}), '(write_to_csv_file)\n', (329, 348), False, 'import os\n')] |
# Copyright (c) 2013, Regents of the University of California
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer. Redistributions in binary
# form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with
# the distribution. Neither the name of the University of California, Berkeley
# nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission. THIS
# SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import errno
import os
from matplotlib import use, rc
use('Agg')
import matplotlib.pyplot as plt
def mkdir_p(path):
path = path.replace(" ", "_")
dir_path = os.path.dirname(path)
try:
os.makedirs(dir_path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(dir_path):
pass
else:
raise
return path
# plot saving utility function
def writeout(filename_base, formats=['pdf']):
mkdir_p(os.path.dirname(filename_base))
for fmt in formats:
plt.savefig("%s.%s" % (filename_base, fmt), format=fmt, bbox_inches='tight')
# plt.savefig("%s.%s" % (filename_base, fmt), format=fmt)
def set_leg_fontsize(size):
rc('legend', fontsize=size)
def set_paper_rcs():
rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica'],
'serif': ['Helvetica'], 'size': 22})
# rc('text', usetex=True)
# rc('legend', fontsize=7)
# rc('figure', figsize=(3.33, 2.22))
# # rc('figure.subplot', left=0.10, top=0.90, bottom=0.12, right=0.95)
# rc('axes', linewidth=0.5)
rc('lines', linewidth=4)
# rc('figure', figsize=[20, 6])
def set_rcs():
rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica'],
'serif': ['Times'], 'size': 12})
rc('text', usetex=True)
rc('legend', fontsize=7)
rc('figure', figsize=(6, 4))
rc('figure.subplot', left=0.10, top=0.90, bottom=0.12, right=0.95)
rc('axes', linewidth=0.5)
rc('lines', linewidth=0.5, color='y')
def append_or_create(d, i, e):
if i not in d:
d[i] = [e]
else:
d[i].append(e)
# Append e to the array at position (i,k).
# d - a dictionary of dictionaries of arrays, essentially a 2d dictionary.
# i, k - essentially a 2 element tuple to use as the key into this 2d dict.
# e - the value to add to the array indexed by key (i,k).
def append_or_create_2d(d, i, k, e):
if i not in d:
d[i] = {k: [e]}
elif k not in d[i]:
d[i][k] = [e]
else:
d[i][k].append(e)
# Append e to the array at position (i,k).
# d - a dictionary of dictionaries of arrays, essentially a 2d dictionary.
# i, k - essentially a 2 element tuple to use as the key into this 2d dict.
# e - the value to add to the array indexed by key (i,k).
def append_or_create_3d(d, i, k, e, v):
if i not in d:
d[i] = {k: {e: [v]}}
elif k not in d[i]:
d[i][k] = {e: [v]}
elif e not in d[i][k]:
d[i][k][e] = [v]
else:
d[i][k][e].append(v)
def cell_to_anon(cell):
# if cell == 'A':
# return 'A'
# elif cell == 'B':
# return 'B'
# elif cell == 'C':
# return 'C'
# elif cell == 'Eurecom':
# return 'Eurecom'
# elif cell == 'example':
# return 'example'
# else:
# return 'SYNTH'
return cell
| [
"matplotlib.pyplot.savefig",
"os.makedirs",
"matplotlib.use",
"os.path.dirname",
"os.path.isdir",
"matplotlib.rc"
] | [((1618, 1628), 'matplotlib.use', 'use', (['"""Agg"""'], {}), "('Agg')\n", (1621, 1628), False, 'from matplotlib import use, rc\n'), ((1731, 1752), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (1746, 1752), False, 'import os\n'), ((2290, 2317), 'matplotlib.rc', 'rc', (['"""legend"""'], {'fontsize': 'size'}), "('legend', fontsize=size)\n", (2292, 2317), False, 'from matplotlib import use, rc\n'), ((2345, 2452), 'matplotlib.rc', 'rc', (['"""font"""'], {}), "('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica'], 'serif':\n ['Helvetica'], 'size': 22})\n", (2347, 2452), False, 'from matplotlib import use, rc\n'), ((2681, 2705), 'matplotlib.rc', 'rc', (['"""lines"""'], {'linewidth': '(4)'}), "('lines', linewidth=4)\n", (2683, 2705), False, 'from matplotlib import use, rc\n'), ((2763, 2866), 'matplotlib.rc', 'rc', (['"""font"""'], {}), "('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica'], 'serif':\n ['Times'], 'size': 12})\n", (2765, 2866), False, 'from matplotlib import use, rc\n'), ((2885, 2908), 'matplotlib.rc', 'rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (2887, 2908), False, 'from matplotlib import use, rc\n'), ((2913, 2937), 'matplotlib.rc', 'rc', (['"""legend"""'], {'fontsize': '(7)'}), "('legend', fontsize=7)\n", (2915, 2937), False, 'from matplotlib import use, rc\n'), ((2942, 2970), 'matplotlib.rc', 'rc', (['"""figure"""'], {'figsize': '(6, 4)'}), "('figure', figsize=(6, 4))\n", (2944, 2970), False, 'from matplotlib import use, rc\n'), ((2975, 3039), 'matplotlib.rc', 'rc', (['"""figure.subplot"""'], {'left': '(0.1)', 'top': '(0.9)', 'bottom': '(0.12)', 'right': '(0.95)'}), "('figure.subplot', left=0.1, top=0.9, bottom=0.12, right=0.95)\n", (2977, 3039), False, 'from matplotlib import use, rc\n'), ((3046, 3071), 'matplotlib.rc', 'rc', (['"""axes"""'], {'linewidth': '(0.5)'}), "('axes', linewidth=0.5)\n", (3048, 3071), False, 'from matplotlib import use, rc\n'), ((3076, 3113), 'matplotlib.rc', 'rc', (['"""lines"""'], {'linewidth': '(0.5)', 'color': '"""y"""'}), "('lines', linewidth=0.5, color='y')\n", (3078, 3113), False, 'from matplotlib import use, rc\n'), ((1770, 1791), 'os.makedirs', 'os.makedirs', (['dir_path'], {}), '(dir_path)\n', (1781, 1791), False, 'import os\n'), ((2056, 2086), 'os.path.dirname', 'os.path.dirname', (['filename_base'], {}), '(filename_base)\n', (2071, 2086), False, 'import os\n'), ((2120, 2196), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('%s.%s' % (filename_base, fmt))"], {'format': 'fmt', 'bbox_inches': '"""tight"""'}), "('%s.%s' % (filename_base, fmt), format=fmt, bbox_inches='tight')\n", (2131, 2196), True, 'import matplotlib.pyplot as plt\n'), ((1875, 1898), 'os.path.isdir', 'os.path.isdir', (['dir_path'], {}), '(dir_path)\n', (1888, 1898), False, 'import os\n')] |
from flask_wtf import FlaskForm
from wtforms import validators
from wtforms.fields import *
class login_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
])
password = PasswordField('Password', [
validators.DataRequired(),
validators.length(min=6, max=35)
])
submit = SubmitField()
class register_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
], description="You need to signup with an email")
password = PasswordField('Create Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match'),
], description="Create a password ")
confirm = PasswordField('Repeat Password', description="Please retype your password to confirm it is correct")
submit = SubmitField()
class create_user_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
], description="You need to signup with an email")
password = PasswordField('Create Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match'),
], description="Create a password ")
confirm = PasswordField('Repeat Password', description="Please retype your password to confirm it is correct")
is_admin = BooleanField('Admin', render_kw={'value':'1'})
submit = SubmitField()
class profile_form(FlaskForm):
about = TextAreaField('About', [validators.length(min=6, max=300)],
description="Please add information about yourself")
submit = SubmitField()
class user_edit_form(FlaskForm):
about = TextAreaField('About', [validators.length(min=6, max=300)],
description="Please add information about yourself")
is_admin = BooleanField('Admin', render_kw={'value':'1'})
submit = SubmitField()
class security_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
], description="You can change your email address")
password = PasswordField('Create A New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match'),
], description="Create a password ")
confirm = PasswordField('Re-<PASSWORD> New Password', description="Please retype your password to confirm it is correct")
submit = SubmitField()
class csv_upload(FlaskForm):
file = FileField()
submit = SubmitField() | [
"wtforms.validators.length",
"wtforms.validators.DataRequired",
"wtforms.validators.EqualTo"
] | [((173, 198), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (196, 198), False, 'from wtforms import validators\n'), ((259, 284), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (282, 284), False, 'from wtforms import validators\n'), ((294, 326), 'wtforms.validators.length', 'validators.length', ([], {'min': '(6)', 'max': '(35)'}), '(min=6, max=35)\n', (311, 326), False, 'from wtforms import validators\n'), ((445, 470), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (468, 470), False, 'from wtforms import validators\n'), ((587, 612), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (610, 612), False, 'from wtforms import validators\n'), ((622, 683), 'wtforms.validators.EqualTo', 'validators.EqualTo', (['"""confirm"""'], {'message': '"""Passwords must match"""'}), "('confirm', message='Passwords must match')\n", (640, 683), False, 'from wtforms import validators\n'), ((955, 980), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (978, 980), False, 'from wtforms import validators\n'), ((1097, 1122), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (1120, 1122), False, 'from wtforms import validators\n'), ((1132, 1193), 'wtforms.validators.EqualTo', 'validators.EqualTo', (['"""confirm"""'], {'message': '"""Passwords must match"""'}), "('confirm', message='Passwords must match')\n", (1150, 1193), False, 'from wtforms import validators\n'), ((1510, 1543), 'wtforms.validators.length', 'validators.length', ([], {'min': '(6)', 'max': '(300)'}), '(min=6, max=300)\n', (1527, 1543), False, 'from wtforms import validators\n'), ((1723, 1756), 'wtforms.validators.length', 'validators.length', ([], {'min': '(6)', 'max': '(300)'}), '(min=6, max=300)\n', (1740, 1756), False, 'from wtforms import validators\n'), ((2011, 2036), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (2034, 2036), False, 'from wtforms import validators\n'), ((2160, 2185), 'wtforms.validators.DataRequired', 'validators.DataRequired', ([], {}), '()\n', (2183, 2185), False, 'from wtforms import validators\n'), ((2195, 2256), 'wtforms.validators.EqualTo', 'validators.EqualTo', (['"""confirm"""'], {'message': '"""Passwords must match"""'}), "('confirm', message='Passwords must match')\n", (2213, 2256), False, 'from wtforms import validators\n')] |
import os
import firebase_admin
from firebase_admin import credentials, messaging
from django.conf import settings
from utils import common, constants
logger = common.get_system_logger()
cred = credentials.Certificate(os.path.join(
settings.BASE_DIR,
'data',
'sales-yang-firebase-adminsdk-2ga7e-17745491f0.json'
))
firebase_admin.initialize_app(credential=cred)
# def subscribe_to_topic(registration_tokens, topic):
# """トピックにデバイスを登録する。
#
# :param registration_tokens: Instance IDリスト
# :param topic: トピック名称
# :return:
# """
# res = messaging.subscribe_to_topic(registration_tokens, topic)
# return res.success_count, res.failure_count, res.errors
#
#
# def unsubscribe_from_topic(registration_tokens, topic):
# """トピックにデバイスの登録を解除する。
#
# :param registration_tokens: Instance IDリスト
# :param topic: トピック名称
# :return:
# """
# res = messaging.unsubscribe_from_topic(registration_tokens, topic)
# return res.success_count, res.failure_count, res.errors
def send_message_to_topic(topic, title, body, forward=None):
"""ユーザーにメッセージを通知する
メッセージを先にDB登録してから通知します、
そうしないと画面の通知一覧にメッセージが表示できない場合があります。
:param topic: マスターに登録済のトピック(Firebaseに登録済のトピックではありません)
:param title: タイトル
:param body: メッセージ内容
:param forward: メッセージを押下後の遷移先
:return:
"""
from account.models import Notification
from master.models import FirebaseDevice
Notification.add_by_topic(topic.name, title, body, forward=forward)
devices = FirebaseDevice.objects.filter(user__in=topic.users.all())
if devices.count() == 0:
# トピックに登録したデバイスがない場合
logger.info(constants.INFO_FIREBASE_NO_DEVICE.format(topic=topic.name))
return
# ユーザーに通知する
message = messaging.MulticastMessage(data={
'title': title,
'body': body
}, tokens=[item.token for item in devices])
res = messaging.send_multicast(message)
logger.info(constants.INFO_FIREBASE_SEND_MESSAGE.format(topic=topic.name))
| [
"utils.constants.INFO_FIREBASE_SEND_MESSAGE.format",
"firebase_admin.initialize_app",
"utils.common.get_system_logger",
"os.path.join",
"account.models.Notification.add_by_topic",
"firebase_admin.messaging.send_multicast",
"firebase_admin.messaging.MulticastMessage",
"utils.constants.INFO_FIREBASE_NO_... | [((163, 189), 'utils.common.get_system_logger', 'common.get_system_logger', ([], {}), '()\n', (187, 189), False, 'from utils import common, constants\n'), ((332, 378), 'firebase_admin.initialize_app', 'firebase_admin.initialize_app', ([], {'credential': 'cred'}), '(credential=cred)\n', (361, 378), False, 'import firebase_admin\n'), ((223, 320), 'os.path.join', 'os.path.join', (['settings.BASE_DIR', '"""data"""', '"""sales-yang-firebase-adminsdk-2ga7e-17745491f0.json"""'], {}), "(settings.BASE_DIR, 'data',\n 'sales-yang-firebase-adminsdk-2ga7e-17745491f0.json')\n", (235, 320), False, 'import os\n'), ((1429, 1496), 'account.models.Notification.add_by_topic', 'Notification.add_by_topic', (['topic.name', 'title', 'body'], {'forward': 'forward'}), '(topic.name, title, body, forward=forward)\n', (1454, 1496), False, 'from account.models import Notification\n'), ((1752, 1861), 'firebase_admin.messaging.MulticastMessage', 'messaging.MulticastMessage', ([], {'data': "{'title': title, 'body': body}", 'tokens': '[item.token for item in devices]'}), "(data={'title': title, 'body': body}, tokens=[\n item.token for item in devices])\n", (1778, 1861), False, 'from firebase_admin import credentials, messaging\n'), ((1890, 1923), 'firebase_admin.messaging.send_multicast', 'messaging.send_multicast', (['message'], {}), '(message)\n', (1914, 1923), False, 'from firebase_admin import credentials, messaging\n'), ((1940, 2001), 'utils.constants.INFO_FIREBASE_SEND_MESSAGE.format', 'constants.INFO_FIREBASE_SEND_MESSAGE.format', ([], {'topic': 'topic.name'}), '(topic=topic.name)\n', (1983, 2001), False, 'from utils import common, constants\n'), ((1647, 1705), 'utils.constants.INFO_FIREBASE_NO_DEVICE.format', 'constants.INFO_FIREBASE_NO_DEVICE.format', ([], {'topic': 'topic.name'}), '(topic=topic.name)\n', (1687, 1705), False, 'from utils import common, constants\n')] |
# Copyright 2019 The Keras Tuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Tuner class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
import six
import tensorflow as tf
from tensorflow import keras
from ..abstractions import display
class TunerStats(object):
"""Track tuner statistics."""
def __init__(self):
self.num_generated_models = 0 # overall number of instances generated
self.num_invalid_models = 0 # how many models didn't work
self.num_oversized_models = 0 # num models with params> max_params
def summary(self, extended=False):
display.subsection('Tuning stats')
display.display_settings(self.get_config())
def get_config(self):
return {
'num_generated_models': self.num_generated_models,
'num_invalid_models': self.num_invalid_models,
'num_oversized_models': self.num_oversized_models
}
@classmethod
def from_config(cls, config):
stats = cls()
stats.num_generated_models = config['num_generated_models']
stats.num_invalid_models = config['num_invalid_models']
stats.num_oversized_models = config['num_oversized_models']
return stats
def get_max_epochs_and_steps(fit_args, fit_kwargs):
if fit_args:
x = tf.nest.flatten(fit_args)[0]
else:
x = tf.nest.flatten(fit_kwargs.get('x'))[0]
batch_size = fit_kwargs.get('batch_size', 32)
if hasattr(x, '__len__'):
max_steps = math.ceil(float(len(x)) / batch_size)
else:
max_steps = fit_kwargs.get('steps')
max_epochs = fit_kwargs.get('epochs', 1)
return max_epochs, max_steps
class TunerCallback(keras.callbacks.Callback):
def __init__(self, tuner, trial):
super(TunerCallback, self).__init__()
self.tuner = tuner
self.trial = trial
def on_epoch_begin(self, epoch, logs=None):
self.tuner.on_epoch_begin(
self.trial, self.model, epoch, logs=logs)
def on_batch_begin(self, batch, logs=None):
self.tuner.on_batch_begin(self.trial, self.model, batch, logs)
def on_batch_end(self, batch, logs=None):
self.tuner.on_batch_end(self.trial, self.model, batch, logs)
def on_epoch_end(self, epoch, logs=None):
self.tuner.on_epoch_end(
self.trial, self.model, epoch, logs=logs)
# TODO: Add more extensive display.
class Display(object):
def on_trial_begin(self, trial):
display.section('New model')
trial.summary()
def on_trial_end(self, trial):
display.section('Trial complete')
trial.summary()
def average_histories(histories):
"""Averages the per-epoch metrics from multiple executions."""
averaged = {}
metrics = histories[0].keys()
for metric in metrics:
values = []
for epoch_values in six.moves.zip_longest(
*[h[metric] for h in histories],
fillvalue=np.nan):
values.append(np.nanmean(epoch_values))
averaged[metric] = values
# Convert {str: [float]} to [{str: float}]
averaged = [dict(zip(metrics, vals)) for vals in zip(*averaged.values())]
return averaged
| [
"six.moves.zip_longest",
"numpy.nanmean",
"tensorflow.nest.flatten"
] | [((3473, 3545), 'six.moves.zip_longest', 'six.moves.zip_longest', (['*[h[metric] for h in histories]'], {'fillvalue': 'np.nan'}), '(*[h[metric] for h in histories], fillvalue=np.nan)\n', (3494, 3545), False, 'import six\n'), ((1924, 1949), 'tensorflow.nest.flatten', 'tf.nest.flatten', (['fit_args'], {}), '(fit_args)\n', (1939, 1949), True, 'import tensorflow as tf\n'), ((3606, 3630), 'numpy.nanmean', 'np.nanmean', (['epoch_values'], {}), '(epoch_values)\n', (3616, 3630), True, 'import numpy as np\n')] |
from flask import Flask, render_template, flash, abort, redirect, url_for, request
import os
import common
import json
import numbers
import urllib.parse
import pandas as pd
from datetime import datetime
from math import log10, floor
base_dir = '/home/nick/Data/_ensembles'
app = Flask(__name__)
app.config['ENV'] = 'development'
app.config['DEBUG'] = True
app.config['TESTING'] = True
app.config.from_mapping(
SECRET_KEY='dev'
)
# predictions_home_dir = os.path.join(base_dir, 'outlier-predictions-2019_11_13-15_38_28')
predictions_home_dir = os.path.join(base_dir, 'outlier-predictions-2020_01_03-11_15_41')
file_config = common.load_file_config(predictions_home_dir)
labels_dir = os.path.join(predictions_home_dir, 'labels')
# priors_parent_dir = os.path.join(base_dir, 'priors-2019_11_12-19_33_13')
priors_parent_dir = os.path.join(base_dir, 'priors-2019_12_30-18_30_22')
predictions_dir = os.path.join(predictions_home_dir, 'predictions')
priors_dir = os.path.join(priors_parent_dir, 'priors')
prediction_summary = pd.read_csv(os.path.join(predictions_home_dir, 'summary.csv'))
prediction_summary = prediction_summary.sort_values('prediction', ascending=False)
prediction_summary = prediction_summary.reset_index()
def get_flow(flow):
file = os.path.join(predictions_dir, flow + '.json')
if not os.path.isfile(file):
flash(f'{flow} was not found.')
abort(404)
with open(file) as f:
flow = json.load(f)
return flow
def make_label(flow, username, threat_level, classifier, description):
if not os.path.isdir(labels_dir): # make label directory if it doesn't exist.
os.mkdir(labels_dir)
flow_data = get_flow(flow)
prediction_values = list()
for obj in flow_data['objects']:
prediction_values.append((obj['id'], obj['value'], obj['prediction']))
label_file = os.path.join(labels_dir, flow + '.json') # get filename based on flow name
if os.path.isfile(label_file):
# jsn = []
with open(label_file, 'r') as f:
jsn = json.load(f) # if file already exists, get json.
else:
jsn = []
dict = {'userName': username,
'threatLevel': threat_level,
'classifier': classifier,
'description': description,
'timestamp': str(datetime.now()),
'version': common.__version__,
'data': prediction_values}
jsn.append(dict)
with open(label_file, 'w') as f:
json.dump(jsn, f)
def remove_label(flow, index):
label_file = os.path.join(labels_dir, flow + '.json') # get filename based on flow name
with open(label_file, 'r') as f:
jsn = json.load(f) # if file already exists, get json.
del jsn[index]
with open(label_file, 'w') as f:
json.dump(jsn, f)
def get_labels(flow):
label_file = os.path.join(labels_dir, flow + '.json') # get filename based on flow name
if os.path.isfile(label_file):
with open(label_file, 'r') as f:
jsn = json.load(f) # if file already exists, get json.
else:
jsn = []
return jsn
def round_structure(x, sig=2):
if isinstance(x, numbers.Number):
if x == 0 or x != x: # alo check for NaN
return 0
return round(x, sig - int(floor(log10(abs(x)))) - 1)
elif isinstance(x, dict):
dct = dict()
for k, v in x.items():
dct[k] = round_structure(v, sig)
return dct
elif isinstance(x, list):
lst = list()
for itm in x:
lst.append(round_structure(itm, sig))
return lst
elif type(x) in (str, bool):
return x
else:
raise TypeError
class PredictionTrace(object):
levels = ['Flow', 'Object', 'Subject']
def __init__(self, flow, obj=None, subject=None):
if flow is None:
raise ValueError(f'Flow parameter cannot be None')
field_predictions = None
flow = urllib.parse.unquote(flow)
jsn = get_flow(flow)
jsn = round_structure(jsn)
raw_data = jsn.get('raw_data')
self.flow = flow
self.biflow_object = obj
self.subject = subject
self.raw_data = raw_data
level = self.levels[0]
prediction_trace = [(level, 'Outlier Score', '', jsn['prediction'])]
prediction_list = 'objects'
prediction_field = 'id'
if obj is not None:
obj = urllib.parse.unquote(obj)
jsn = self.get_level_json(jsn, obj, prediction_list, prediction_field)
level = self.levels[1]
prediction_trace.append((level, obj, jsn['value'], jsn['prediction']))
prediction_list = 'subjects'
prediction_field = 'id'
if subject is not None:
subject = urllib.parse.unquote(subject)
jsn = self.get_level_json(jsn, subject, prediction_list, prediction_field)
level = self.levels[2]
prediction_trace.append((level, subject, jsn['value'], jsn['prediction']))
prediction_list = None
prediction_field = None
field_predictions = jsn
predictions = []
if prediction_field is not None:
for identifier in jsn[prediction_list]:
predictions.append({'id': identifier[prediction_field],
'pred': identifier['prediction'],
'val': identifier.get('value')})
self.level = level
self.prediction_trace = prediction_trace
self.predictions = sorted(predictions, key=lambda i: i['pred'], reverse=True)
self.field_predictions = field_predictions
@property
def my_direction(self):
return file_config.my_direction(self.subject)
@property
def their_direction(self):
return file_config.their_direction(self.subject)
@property
def field_value(self):
if self.biflow_object == file_config.uniflow_indicator: # special case
return str(self.my_direction == file_config.biflow_src_prfx).lower()
else:
return self.raw_data[self.biflow_object]
@property
def field_prior(self):
if self.biflow_object is None:
raise ValueError(f'Can only pull prior based on a field.')
if self.subject.endswith(file_config.hierarchy[0]): # subnet
path = os.path.join(priors_dir,
self.raw_data[self.my_direction + file_config.hierarchy[0]])
elif self.subject.endswith(file_config.hierarchy[1]): # ip
path = os.path.join(priors_dir,
self.raw_data[self.my_direction + file_config.hierarchy[0]],
self.raw_data[self.my_direction + file_config.hierarchy[1]])
else:
raise ValueError(f'Did not recognize level "{self.subject}"')
file = os.path.join(path, '.json')
if not os.path.isfile(file):
raise ValueError(f'Priors file {file} was not found.')
with open(file) as f:
prior = json.load(f)
field_prior = prior[self.uniflow_object]
return field_prior
@property
def uniflow_object(self):
if self.subject is None:
raise ValueError(f'Cannot call uniflow_object without both a _subject_ (ex. dst.ip) and an _object_ (ex. '
f'src.bytes).')
if self.biflow_object.startswith(self.my_direction):
return self.biflow_object.replace(self.my_direction, file_config.uniflow_this_prfx)
elif self.biflow_object.startswith(self.their_direction):
return self.biflow_object.replace(self.their_direction, file_config.uniflow_that_prfx)
else:
return self.biflow_object
@property
def child_level(self):
this = self.level
print(this)
print(self.levels[2])
if this == self.levels[2]:
raise ValueError(f'"Subject" level has no child.')
return self.levels[self.levels.index(this) + 1]
def build_url(self, lvl):
if lvl not in self.levels:
raise ValueError(f'build_url requires one of the 4 defined levels')
segments = ['/prediction', urllib.parse.quote(self.flow)]
if lvl != self.levels[0]:
segments.append(urllib.parse.quote(self.biflow_object))
if lvl != self.levels[1]:
segments.append(urllib.parse.quote(self.subject))
return '/'.join(segments)
@staticmethod
def get_level_json(jsn, value, prediction_list, prediction_field):
level_json = [p for p in jsn.get(prediction_list) if p[prediction_field] == value]
if len(level_json) == 0:
flash(f'{level_json} was not found.')
abort(404)
return level_json.pop()
@property
def chart_data(self):
primary_color = '#007bff'
secondary_color = '#6c757d'
max_columns = 15
cdf = self.field_prior['cdf']
if self.uniflow_object in common.numeric_vars():
typ = 'scatter'
data = [{'x': float(k), 'y': v} for k, v in cdf.items()]
full_data = {'datasets': [{'label': self.uniflow_object,
'backgroundColor': secondary_color,
'data': data},
{'label': self.field_value,
'backgroundColor': primary_color,
'showLine': 'true',
'borderColor': primary_color,
'data': [{'x': 0, 'y': self.field_value},
{'x': 1, 'y': self.field_value}]},
]}
elif self.uniflow_object in common.binary_vars() or self.uniflow_object in common.categorical_vars():
typ = 'bar'
ln = len(cdf)
ix = None
if self.field_value in cdf.keys():
ix = list(cdf.keys()).index(self.field_value)
if ln < max_columns:
indexes = list(range(0, ln))
else:
if ix is None or ix < 10 or ix > ln - 4:
indexes = list(range(0, 10)) + [f'MANY\n({ln - 14})'] + list(range(ln - 4, ln))
else:
indexes = list(range(0, 10)) + [f'MANY\n({ix - 10})'] + [ix] + [f'MANY\n({ln - ix - 3})'] + list(range(ln - 3, ln))
labels = [list(cdf.keys())[idx] if type(idx) == int else idx for idx in indexes]
data = [list(cdf.values())[idx] if type(idx) == int else 0 for idx in indexes]
colors = [primary_color if itm == self.field_value else secondary_color for itm in labels]
full_data = {'labels': labels,
'datasets': [{'label': self.uniflow_object,
'backgroundColor': colors,
'data': data}]}
else:
raise ValueError(f'Field does not seem to be valid, has value {self.uniflow_object}')
chart_data = {'type': typ,
'data': full_data,
'options': {
'legend': {'display': 'false'},
'scales': {'yAxes': [{'ticks': {'min': 0}}]}}}
return chart_data
@app.route('/')
@app.route('/summary/')
@app.route('/prediction/')
def index():
return redirect(url_for('summary', page_num=1))
@app.route('/summary/<int:page_num>')
def summary(page_num=1):
results_per_page = 10
i = (page_num - 1) * results_per_page
if i > len(prediction_summary):
abort(404)
predictions = []
n = 0
while n < results_per_page and i < len(prediction_summary):
p = prediction_summary.loc[i]
id = p['filename'].replace('.json','')
data = id.split('_')
ts = datetime.fromtimestamp(int(data[0])/1000)
pred = round_structure(p['prediction'])
labels = get_labels(id)
if len(labels):
classification = labels[0]['threatLevel']
else:
classification = ''
predictions.append({'id': id, 'timestamp': ts, 'src_ip': data[1], 'src_port': p['src.port'], 'dst_ip': data[2],
'dst_port': p['dst.port'], 'classification': classification, 'pred': pred, 'index': i})
i += 1
n += 1
last_page = floor(len(prediction_summary) / results_per_page) + 1
nav_display = dict()
nav_display.update({1: '«', last_page: '»'})
if page_num not in (1, last_page):
nav_display.update({n: str(n) for n in list(range(page_num - 1, page_num + 2))})
if page_num <= 3:
nav_display.update({n: str(n) for n in list(range(1,4))})
if page_num >= last_page - 3:
nav_display.update({n: str(n) for n in list(range(last_page-2, last_page+1))})
nav_display = dict(sorted(nav_display.items()))
return render_template('summary.html', predictions=predictions, page_num=page_num, nav_display=nav_display)
def resolve_user_label(flow, request):
if request.method == "POST":
if request.form.get('threatLevel') is not None: # if user added new label
make_label(flow, username=request.form.get('userName'), threat_level=request.form.get('threatLevel'),
classifier=request.form.get('classifier'), description=request.form.get('description'))
else: # if user trying to delete label
i = 1
while i <= len(get_labels(flow)):
if request.form.get(str(i)) is not None:
print(i)
remove_label(flow, i-1)
i += 1
@app.route('/prediction/<flow>', methods=['GET', 'POST'])
@app.route('/prediction/<flow>/<object>', methods=['GET', 'POST'])
def flow_prediction(flow, object=None):
resolve_user_label(flow, request)
trace = PredictionTrace(flow, object)
return render_template('level_explorer.html', trace=trace, labels=get_labels(flow))
@app.route('/prediction/<flow>/<object>/<subject>', methods=['GET', 'POST'])
def field_prediction(flow, object, subject):
resolve_user_label(flow, request)
trace = PredictionTrace(flow, object, subject)
return render_template('field_explorer.html', trace=trace, labels=get_labels(flow))
@app.route('/refs')
def refs():
return render_template('references.html')
@app.route('/admin/')
def admin():
return redirect(url_for('admin_data'))
@app.route('/admin/data', methods=['GET', 'POST'])
def admin_data():
def get_metadata(dir, pattern):
metadata = list()
for subdir in os.listdir(dir):
path = os.path.join(dir, subdir)
if subdir.startswith(pattern) and os.path.isdir(path):
filepath = os.path.join(path, 'metadata.json')
if os.path.isfile(filepath):
with open(filepath) as f:
jsn = json.load(f)
md = {'directory': os.path.basename(dir),
'md5': jsn.get('md5'),
'filename': jsn.get('filename'),
'size (GB)': jsn.get('size (GB)'),
'number of rows': jsn.get('number of rows'),
'start date': jsn.get('start date'),
'end date': jsn.get('end date'),
'package version': jsn.get('package version'),
}
metadata.append(md)
return metadata
prior_metadata = get_metadata(base_dir, 'priors')
pred_metadata = get_metadata(base_dir, 'outlier-predictions')
raw_metadata = get_metadata(base_dir, 'raw-data')
return render_template('admin_data.html', pred_metadata=pred_metadata, prior_metadata=prior_metadata, raw_metadata=raw_metadata)
@app.route('/admin/labels', methods=['GET', 'POST'])
def admin_labels():
return render_template('admin_labels.html')
@app.route('/admin/file-config', methods=['GET', 'POST'])
def admin_data_config():
return render_template('admin_file_config.html')
@app.errorhandler(404)
def page_not_found(e):
flash(f'404: Page not found.')
return render_template('base.html')
| [
"flask.render_template",
"flask.Flask",
"os.listdir",
"flask.flash",
"flask.request.form.get",
"os.path.isdir",
"os.mkdir",
"common.categorical_vars",
"flask.abort",
"common.load_file_config",
"os.path.isfile",
"common.binary_vars",
"common.numeric_vars",
"os.path.join",
"flask.url_for",... | [((281, 296), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (286, 296), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((551, 616), 'os.path.join', 'os.path.join', (['base_dir', '"""outlier-predictions-2020_01_03-11_15_41"""'], {}), "(base_dir, 'outlier-predictions-2020_01_03-11_15_41')\n", (563, 616), False, 'import os\n'), ((631, 676), 'common.load_file_config', 'common.load_file_config', (['predictions_home_dir'], {}), '(predictions_home_dir)\n', (654, 676), False, 'import common\n'), ((690, 734), 'os.path.join', 'os.path.join', (['predictions_home_dir', '"""labels"""'], {}), "(predictions_home_dir, 'labels')\n", (702, 734), False, 'import os\n'), ((830, 882), 'os.path.join', 'os.path.join', (['base_dir', '"""priors-2019_12_30-18_30_22"""'], {}), "(base_dir, 'priors-2019_12_30-18_30_22')\n", (842, 882), False, 'import os\n'), ((901, 950), 'os.path.join', 'os.path.join', (['predictions_home_dir', '"""predictions"""'], {}), "(predictions_home_dir, 'predictions')\n", (913, 950), False, 'import os\n'), ((964, 1005), 'os.path.join', 'os.path.join', (['priors_parent_dir', '"""priors"""'], {}), "(priors_parent_dir, 'priors')\n", (976, 1005), False, 'import os\n'), ((1039, 1088), 'os.path.join', 'os.path.join', (['predictions_home_dir', '"""summary.csv"""'], {}), "(predictions_home_dir, 'summary.csv')\n", (1051, 1088), False, 'import os\n'), ((1260, 1305), 'os.path.join', 'os.path.join', (['predictions_dir', "(flow + '.json')"], {}), "(predictions_dir, flow + '.json')\n", (1272, 1305), False, 'import os\n'), ((1851, 1891), 'os.path.join', 'os.path.join', (['labels_dir', "(flow + '.json')"], {}), "(labels_dir, flow + '.json')\n", (1863, 1891), False, 'import os\n'), ((1935, 1961), 'os.path.isfile', 'os.path.isfile', (['label_file'], {}), '(label_file)\n', (1949, 1961), False, 'import os\n'), ((2536, 2576), 'os.path.join', 'os.path.join', (['labels_dir', "(flow + '.json')"], {}), "(labels_dir, flow + '.json')\n", (2548, 2576), False, 'import os\n'), ((2838, 2878), 'os.path.join', 'os.path.join', (['labels_dir', "(flow + '.json')"], {}), "(labels_dir, flow + '.json')\n", (2850, 2878), False, 'import os\n'), ((2921, 2947), 'os.path.isfile', 'os.path.isfile', (['label_file'], {}), '(label_file)\n', (2935, 2947), False, 'import os\n'), ((13085, 13189), 'flask.render_template', 'render_template', (['"""summary.html"""'], {'predictions': 'predictions', 'page_num': 'page_num', 'nav_display': 'nav_display'}), "('summary.html', predictions=predictions, page_num=page_num,\n nav_display=nav_display)\n", (13100, 13189), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((14514, 14548), 'flask.render_template', 'render_template', (['"""references.html"""'], {}), "('references.html')\n", (14529, 14548), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((15886, 16011), 'flask.render_template', 'render_template', (['"""admin_data.html"""'], {'pred_metadata': 'pred_metadata', 'prior_metadata': 'prior_metadata', 'raw_metadata': 'raw_metadata'}), "('admin_data.html', pred_metadata=pred_metadata,\n prior_metadata=prior_metadata, raw_metadata=raw_metadata)\n", (15901, 16011), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((16094, 16130), 'flask.render_template', 'render_template', (['"""admin_labels.html"""'], {}), "('admin_labels.html')\n", (16109, 16130), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((16227, 16268), 'flask.render_template', 'render_template', (['"""admin_file_config.html"""'], {}), "('admin_file_config.html')\n", (16242, 16268), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((16321, 16351), 'flask.flash', 'flash', (['f"""404: Page not found."""'], {}), "(f'404: Page not found.')\n", (16326, 16351), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((16363, 16391), 'flask.render_template', 'render_template', (['"""base.html"""'], {}), "('base.html')\n", (16378, 16391), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((1317, 1337), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (1331, 1337), False, 'import os\n'), ((1347, 1378), 'flask.flash', 'flash', (['f"""{flow} was not found."""'], {}), "(f'{flow} was not found.')\n", (1352, 1378), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((1387, 1397), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1392, 1397), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((1439, 1451), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1448, 1451), False, 'import json\n'), ((1553, 1578), 'os.path.isdir', 'os.path.isdir', (['labels_dir'], {}), '(labels_dir)\n', (1566, 1578), False, 'import os\n'), ((1633, 1653), 'os.mkdir', 'os.mkdir', (['labels_dir'], {}), '(labels_dir)\n', (1641, 1653), False, 'import os\n'), ((2468, 2485), 'json.dump', 'json.dump', (['jsn', 'f'], {}), '(jsn, f)\n', (2477, 2485), False, 'import json\n'), ((2663, 2675), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2672, 2675), False, 'import json\n'), ((2779, 2796), 'json.dump', 'json.dump', (['jsn', 'f'], {}), '(jsn, f)\n', (2788, 2796), False, 'import json\n'), ((6934, 6961), 'os.path.join', 'os.path.join', (['path', '""".json"""'], {}), "(path, '.json')\n", (6946, 6961), False, 'import os\n'), ((11572, 11602), 'flask.url_for', 'url_for', (['"""summary"""'], {'page_num': '(1)'}), "('summary', page_num=1)\n", (11579, 11602), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((11781, 11791), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (11786, 11791), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((14606, 14627), 'flask.url_for', 'url_for', (['"""admin_data"""'], {}), "('admin_data')\n", (14613, 14627), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((14784, 14799), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (14794, 14799), False, 'import os\n'), ((2041, 2053), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2050, 2053), False, 'import json\n'), ((2301, 2315), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2313, 2315), False, 'from datetime import datetime\n'), ((3008, 3020), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3017, 3020), False, 'import json\n'), ((6413, 6503), 'os.path.join', 'os.path.join', (['priors_dir', 'self.raw_data[self.my_direction + file_config.hierarchy[0]]'], {}), '(priors_dir, self.raw_data[self.my_direction + file_config.\n hierarchy[0]])\n', (6425, 6503), False, 'import os\n'), ((6977, 6997), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (6991, 6997), False, 'import os\n'), ((7116, 7128), 'json.load', 'json.load', (['f'], {}), '(f)\n', (7125, 7128), False, 'import json\n'), ((8775, 8812), 'flask.flash', 'flash', (['f"""{level_json} was not found."""'], {}), "(f'{level_json} was not found.')\n", (8780, 8812), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((8825, 8835), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (8830, 8835), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((9076, 9097), 'common.numeric_vars', 'common.numeric_vars', ([], {}), '()\n', (9095, 9097), False, 'import common\n'), ((13271, 13302), 'flask.request.form.get', 'request.form.get', (['"""threatLevel"""'], {}), "('threatLevel')\n", (13287, 13302), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((14820, 14845), 'os.path.join', 'os.path.join', (['dir', 'subdir'], {}), '(dir, subdir)\n', (14832, 14845), False, 'import os\n'), ((6619, 6770), 'os.path.join', 'os.path.join', (['priors_dir', 'self.raw_data[self.my_direction + file_config.hierarchy[0]]', 'self.raw_data[self.my_direction + file_config.hierarchy[1]]'], {}), '(priors_dir, self.raw_data[self.my_direction + file_config.\n hierarchy[0]], self.raw_data[self.my_direction + file_config.hierarchy[1]])\n', (6631, 6770), False, 'import os\n'), ((14892, 14911), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (14905, 14911), False, 'import os\n'), ((14940, 14975), 'os.path.join', 'os.path.join', (['path', '"""metadata.json"""'], {}), "(path, 'metadata.json')\n", (14952, 14975), False, 'import os\n'), ((14995, 15019), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (15009, 15019), False, 'import os\n'), ((9903, 9923), 'common.binary_vars', 'common.binary_vars', ([], {}), '()\n', (9921, 9923), False, 'import common\n'), ((9950, 9975), 'common.categorical_vars', 'common.categorical_vars', ([], {}), '()\n', (9973, 9975), False, 'import common\n'), ((13381, 13409), 'flask.request.form.get', 'request.form.get', (['"""userName"""'], {}), "('userName')\n", (13397, 13409), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((13424, 13455), 'flask.request.form.get', 'request.form.get', (['"""threatLevel"""'], {}), "('threatLevel')\n", (13440, 13455), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((13491, 13521), 'flask.request.form.get', 'request.form.get', (['"""classifier"""'], {}), "('classifier')\n", (13507, 13521), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((13535, 13566), 'flask.request.form.get', 'request.form.get', (['"""description"""'], {}), "('description')\n", (13551, 13566), False, 'from flask import Flask, render_template, flash, abort, redirect, url_for, request\n'), ((15097, 15109), 'json.load', 'json.load', (['f'], {}), '(f)\n', (15106, 15109), False, 'import json\n'), ((15149, 15170), 'os.path.basename', 'os.path.basename', (['dir'], {}), '(dir)\n', (15165, 15170), False, 'import os\n')] |