code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import logging
from dotenv import load_dotenv
import sqlalchemy
import urllib
import pyodbc
import os
ROOT = os.path.dirname(os.path.abspath(__name__))
load_dotenv(os.path.join(ROOT, '.env'))
LOG = logging.getLogger('luigi-interface')
RPT_SERVER = os.environ['SERVER_A']
SCG_SERVER = os.environ['SERVER_B']
RPT_DB = os.environ['DB_A']
SCG_DB = os.environ['DB_B']
SCG_USR = os.environ['SERVER_B_USR']
SCG_PWD = os.environ['SERVER_B_PWD']
class Utils:
def get_db_connection(server, database):
driver = '{SQL Server}'
server = '{%s}' % server
database = '{%s}' % database
conn_str = ('DRIVER={};SERVER={};DATABASE={}'
';Trusted_Connection=yes').format(
driver,
server,
database
)
s = urllib.parse.quote_plus(conn_str)
engine = sqlalchemy.create_engine(
'mssql+pyodbc:///?odbc_connect={}'.format(s))
return engine | [
"logging.getLogger",
"os.path.abspath",
"urllib.parse.quote_plus",
"os.path.join"
] | [((203, 239), 'logging.getLogger', 'logging.getLogger', (['"""luigi-interface"""'], {}), "('luigi-interface')\n", (220, 239), False, 'import logging\n'), ((129, 154), 'os.path.abspath', 'os.path.abspath', (['__name__'], {}), '(__name__)\n', (144, 154), False, 'import os\n'), ((168, 194), 'os.path.join', 'os.path.join', (['ROOT', '""".env"""'], {}), "(ROOT, '.env')\n", (180, 194), False, 'import os\n'), ((842, 875), 'urllib.parse.quote_plus', 'urllib.parse.quote_plus', (['conn_str'], {}), '(conn_str)\n', (865, 875), False, 'import urllib\n')] |
import os
import torch
import time
import numpy as np
from tqdm import tqdm
import seaborn as sns
import matplotlib
import matplotlib.pyplot as plt
from generator import generator
from utils import get_model
sns.set_style("whitegrid")
font = {'family': 'serif',
'style': 'normal',
'size': 10}
matplotlib.rc('font', **font)
sfmt = matplotlib.ticker.ScalarFormatter(useMathText=True)
sfmt.set_powerlimits((0, 0))
matplotlib.use("Agg")
class DeepPrior(object):
def __init__(self, args):
if torch.cuda.is_available() and args.cuda:
self.device = torch.device('cuda')
torch.set_default_tensor_type('torch.cuda.FloatTensor')
else:
self.device = torch.device('cpu')
torch.set_default_tensor_type('torch.FloatTensor')
def train(self, args):
x, spacing, shape = get_model()
self.x = x.to(self.device)
self.extent = np.array([0., self.x.shape[2]*spacing[0],
self.x.shape[3]*spacing[1], 0.])/1.0e3
G = generator(self.x.shape).to(self.device)
z = torch.randn([1, 3] + G.crop.d_dim).to(self.device)
self.y = self.x + args.sigma*torch.randn(self.x.shape).to(self.device)
optim = torch.optim.Adam(G.parameters(), lr=args.lr,
weight_decay=args.weight_decay)
lmbda = lambda epoch: 0.55
scheduler = torch.optim.lr_scheduler.MultiplicativeLR(optim,
lr_lambda=lmbda)
self.obj_log = []
self.err_log = []
with tqdm(range(args.max_itr), unit=" itr's", colour='#B5F2A9') as pb:
for itr in pb:
self.xhat = G(z)
obj = torch.norm(self.xhat - self.y)**2
obj.backward()
optim.step()
optim.zero_grad()
if itr%100 == 0 or itr == args.max_itr - 1:
self.test(args, itr)
scheduler.step()
self.obj_log.append(obj.item())
self.err_log.append((torch.norm(self.x - self.xhat)**2).item())
pb.set_postfix(obj="{:.2e}".format(self.obj_log[-1]),
error="{:.2e}".format(self.err_log[-1]))
def test(self, args, itr):
fig = plt.figure("Objective", figsize=(7, 2.5))
plt.semilogy(self.obj_log, label="objective")
plt.semilogy(self.err_log, label="error")
plt.legend()
plt.title("Training objecjtive and prediction error")
plt.xlabel("Iterations")
plt.ylabel("Loss vs model error")
plt.grid(True)
plt.savefig(os.path.join(args.save_path, "training_obj.png"),
format="png", bbox_inches="tight", dpi=200,
pad_inches=.05)
plt.close(fig)
fig = plt.figure("predeiction", figsize=(7, 2.5))
plt.imshow(self.xhat.cpu().detach().squeeze().numpy().T,
vmin=-.04, vmax=.04, aspect=1,
extent=self.extent, cmap='seismic', alpha=1.0,
resample=True, interpolation="lanczos", filterrad=1)
plt.colorbar(fraction=0.085, pad=0.01, format=sfmt)
plt.xlabel("Horizontal distance (km)")
plt.ylabel("Depth (km)")
plt.grid(False)
plt.title("Prediction after " + str(itr) + " updates");
plt.savefig(os.path.join(args.save_path, "xhat_" +
str(itr) + ".png"), format="png",
bbox_inches="tight", dpi=200,
pad_inches=.05)
plt.close(fig)
if itr == 0:
fig = plt.figure("true model", figsize=(7, 2.5))
plt.imshow(self.x.cpu().detach().squeeze().numpy().T,
vmin=-.04, vmax=.04, aspect=1,
extent=self.extent, cmap='seismic', alpha=1.0,
resample=True, interpolation="lanczos", filterrad=1)
plt.colorbar(fraction=0.085, pad=0.01, format=sfmt)
plt.xlabel("Horizontal distance (km)")
plt.ylabel("Depth (km)")
plt.title("True model")
plt.grid(False)
plt.savefig(os.path.join(args.save_path, "x.png"),format="png",
bbox_inches="tight", dpi=200,
pad_inches=.05)
plt.close(fig)
fig = plt.figure("observed data", figsize=(7, 2.5))
plt.imshow(self.y.cpu().detach().squeeze().numpy().T,
vmin=-.04, vmax=.04, aspect=1,
extent=self.extent, cmap='seismic', alpha=1.0,
resample=True, interpolation="lanczos", filterrad=1)
plt.colorbar(fraction=0.085, pad=0.01, format=sfmt)
plt.xlabel("Horizontal distance (km)")
plt.ylabel("Depth (km)")
plt.title("Observed data")
plt.grid(False)
plt.savefig(os.path.join(args.save_path, "y.png"),format="png",
bbox_inches="tight", dpi=200,
pad_inches=.05)
plt.close(fig)
| [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"seaborn.set_style",
"numpy.array",
"matplotlib.ticker.ScalarFormatter",
"torch.cuda.is_available",
"matplotlib.rc",
"matplotlib.pyplot.semilogy",
"generator.generator",
"matplotlib.pyplot.xlabel",
"torch.set_default_tensor_type",
"matplotl... | [((208, 234), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (221, 234), True, 'import seaborn as sns\n'), ((309, 338), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {}), "('font', **font)\n", (322, 338), False, 'import matplotlib\n'), ((346, 397), 'matplotlib.ticker.ScalarFormatter', 'matplotlib.ticker.ScalarFormatter', ([], {'useMathText': '(True)'}), '(useMathText=True)\n', (379, 397), False, 'import matplotlib\n'), ((427, 448), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (441, 448), False, 'import matplotlib\n'), ((854, 865), 'utils.get_model', 'get_model', ([], {}), '()\n', (863, 865), False, 'from utils import get_model\n'), ((1414, 1479), 'torch.optim.lr_scheduler.MultiplicativeLR', 'torch.optim.lr_scheduler.MultiplicativeLR', (['optim'], {'lr_lambda': 'lmbda'}), '(optim, lr_lambda=lmbda)\n', (1455, 1479), False, 'import torch\n'), ((2346, 2387), 'matplotlib.pyplot.figure', 'plt.figure', (['"""Objective"""'], {'figsize': '(7, 2.5)'}), "('Objective', figsize=(7, 2.5))\n", (2356, 2387), True, 'import matplotlib.pyplot as plt\n'), ((2396, 2441), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['self.obj_log'], {'label': '"""objective"""'}), "(self.obj_log, label='objective')\n", (2408, 2441), True, 'import matplotlib.pyplot as plt\n'), ((2450, 2491), 'matplotlib.pyplot.semilogy', 'plt.semilogy', (['self.err_log'], {'label': '"""error"""'}), "(self.err_log, label='error')\n", (2462, 2491), True, 'import matplotlib.pyplot as plt\n'), ((2500, 2512), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2510, 2512), True, 'import matplotlib.pyplot as plt\n'), ((2521, 2574), 'matplotlib.pyplot.title', 'plt.title', (['"""Training objecjtive and prediction error"""'], {}), "('Training objecjtive and prediction error')\n", (2530, 2574), True, 'import matplotlib.pyplot as plt\n'), ((2583, 2607), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Iterations"""'], {}), "('Iterations')\n", (2593, 2607), True, 'import matplotlib.pyplot as plt\n'), ((2616, 2649), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss vs model error"""'], {}), "('Loss vs model error')\n", (2626, 2649), True, 'import matplotlib.pyplot as plt\n'), ((2658, 2672), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (2666, 2672), True, 'import matplotlib.pyplot as plt\n'), ((2851, 2865), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (2860, 2865), True, 'import matplotlib.pyplot as plt\n'), ((2881, 2924), 'matplotlib.pyplot.figure', 'plt.figure', (['"""predeiction"""'], {'figsize': '(7, 2.5)'}), "('predeiction', figsize=(7, 2.5))\n", (2891, 2924), True, 'import matplotlib.pyplot as plt\n'), ((3186, 3237), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'fraction': '(0.085)', 'pad': '(0.01)', 'format': 'sfmt'}), '(fraction=0.085, pad=0.01, format=sfmt)\n', (3198, 3237), True, 'import matplotlib.pyplot as plt\n'), ((3246, 3284), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Horizontal distance (km)"""'], {}), "('Horizontal distance (km)')\n", (3256, 3284), True, 'import matplotlib.pyplot as plt\n'), ((3293, 3317), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Depth (km)"""'], {}), "('Depth (km)')\n", (3303, 3317), True, 'import matplotlib.pyplot as plt\n'), ((3326, 3341), 'matplotlib.pyplot.grid', 'plt.grid', (['(False)'], {}), '(False)\n', (3334, 3341), True, 'import matplotlib.pyplot as plt\n'), ((3613, 3627), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (3622, 3627), True, 'import matplotlib.pyplot as plt\n'), ((518, 543), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (541, 543), False, 'import torch\n'), ((585, 605), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (597, 605), False, 'import torch\n'), ((618, 673), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['"""torch.cuda.FloatTensor"""'], {}), "('torch.cuda.FloatTensor')\n", (647, 673), False, 'import torch\n'), ((714, 733), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (726, 733), False, 'import torch\n'), ((746, 796), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['"""torch.FloatTensor"""'], {}), "('torch.FloatTensor')\n", (775, 796), False, 'import torch\n'), ((923, 1008), 'numpy.array', 'np.array', (['[0.0, self.x.shape[2] * spacing[0], self.x.shape[3] * spacing[1], 0.0]'], {}), '([0.0, self.x.shape[2] * spacing[0], self.x.shape[3] * spacing[1], 0.0]\n )\n', (931, 1008), True, 'import numpy as np\n'), ((2693, 2741), 'os.path.join', 'os.path.join', (['args.save_path', '"""training_obj.png"""'], {}), "(args.save_path, 'training_obj.png')\n", (2705, 2741), False, 'import os\n'), ((3668, 3710), 'matplotlib.pyplot.figure', 'plt.figure', (['"""true model"""'], {'figsize': '(7, 2.5)'}), "('true model', figsize=(7, 2.5))\n", (3678, 3710), True, 'import matplotlib.pyplot as plt\n'), ((3989, 4040), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'fraction': '(0.085)', 'pad': '(0.01)', 'format': 'sfmt'}), '(fraction=0.085, pad=0.01, format=sfmt)\n', (4001, 4040), True, 'import matplotlib.pyplot as plt\n'), ((4053, 4091), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Horizontal distance (km)"""'], {}), "('Horizontal distance (km)')\n", (4063, 4091), True, 'import matplotlib.pyplot as plt\n'), ((4104, 4128), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Depth (km)"""'], {}), "('Depth (km)')\n", (4114, 4128), True, 'import matplotlib.pyplot as plt\n'), ((4141, 4164), 'matplotlib.pyplot.title', 'plt.title', (['"""True model"""'], {}), "('True model')\n", (4150, 4164), True, 'import matplotlib.pyplot as plt\n'), ((4177, 4192), 'matplotlib.pyplot.grid', 'plt.grid', (['(False)'], {}), '(False)\n', (4185, 4192), True, 'import matplotlib.pyplot as plt\n'), ((4375, 4389), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (4384, 4389), True, 'import matplotlib.pyplot as plt\n'), ((4409, 4454), 'matplotlib.pyplot.figure', 'plt.figure', (['"""observed data"""'], {'figsize': '(7, 2.5)'}), "('observed data', figsize=(7, 2.5))\n", (4419, 4454), True, 'import matplotlib.pyplot as plt\n'), ((4733, 4784), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'fraction': '(0.085)', 'pad': '(0.01)', 'format': 'sfmt'}), '(fraction=0.085, pad=0.01, format=sfmt)\n', (4745, 4784), True, 'import matplotlib.pyplot as plt\n'), ((4797, 4835), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Horizontal distance (km)"""'], {}), "('Horizontal distance (km)')\n", (4807, 4835), True, 'import matplotlib.pyplot as plt\n'), ((4848, 4872), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Depth (km)"""'], {}), "('Depth (km)')\n", (4858, 4872), True, 'import matplotlib.pyplot as plt\n'), ((4885, 4911), 'matplotlib.pyplot.title', 'plt.title', (['"""Observed data"""'], {}), "('Observed data')\n", (4894, 4911), True, 'import matplotlib.pyplot as plt\n'), ((4924, 4939), 'matplotlib.pyplot.grid', 'plt.grid', (['(False)'], {}), '(False)\n', (4932, 4939), True, 'import matplotlib.pyplot as plt\n'), ((5122, 5136), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (5131, 5136), True, 'import matplotlib.pyplot as plt\n'), ((1049, 1072), 'generator.generator', 'generator', (['self.x.shape'], {}), '(self.x.shape)\n', (1058, 1072), False, 'from generator import generator\n'), ((1102, 1136), 'torch.randn', 'torch.randn', (['([1, 3] + G.crop.d_dim)'], {}), '([1, 3] + G.crop.d_dim)\n', (1113, 1136), False, 'import torch\n'), ((4217, 4254), 'os.path.join', 'os.path.join', (['args.save_path', '"""x.png"""'], {}), "(args.save_path, 'x.png')\n", (4229, 4254), False, 'import os\n'), ((4964, 5001), 'os.path.join', 'os.path.join', (['args.save_path', '"""y.png"""'], {}), "(args.save_path, 'y.png')\n", (4976, 5001), False, 'import os\n'), ((1759, 1789), 'torch.norm', 'torch.norm', (['(self.xhat - self.y)'], {}), '(self.xhat - self.y)\n', (1769, 1789), False, 'import torch\n'), ((1190, 1215), 'torch.randn', 'torch.randn', (['self.x.shape'], {}), '(self.x.shape)\n', (1201, 1215), False, 'import torch\n'), ((2113, 2143), 'torch.norm', 'torch.norm', (['(self.x - self.xhat)'], {}), '(self.x - self.xhat)\n', (2123, 2143), False, 'import torch\n')] |
# coding=utf-8
# Copyright 2017 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Train on TPU."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import os
import sys
# Dependency imports
from tensor2tensor import models # pylint: disable=unused-import
from tensor2tensor import problems as problems_lib # pylint: disable=unused-import
from tensor2tensor.tpu import tpu_trainer_lib
from tensor2tensor.utils import decoding
from tensor2tensor.utils import flags as t2t_flags # pylint: disable=unused-import
from tensor2tensor.utils import registry
from tensor2tensor.utils import usr_dir
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
# See flags.py for additional command-line flags.
flags.DEFINE_string("t2t_usr_dir", "",
"Path to a Python module that will be imported. The "
"__init__.py file should include the necessary imports. "
"The imported files should contain registrations, "
"e.g. @registry.register_model calls, that will then be "
"available to the t2t-trainer.")
flags.DEFINE_integer("random_seed", 1234, "Random seed.")
flags.DEFINE_integer("tpu_num_shards", 8, "Number of tpu shards.")
flags.DEFINE_integer("iterations_per_loop", 1000,
"Number of iterations in a TPU training loop.")
flags.DEFINE_bool("use_tpu", False, "Whether to use TPU.")
flags.DEFINE_bool("generate_data", False, "Generate data before training?")
flags.DEFINE_string("tmp_dir", "/tmp/t2t_datagen",
"Temporary storage directory, used if --generate_data.")
flags.DEFINE_bool("profile", False, "Profile performance?")
# To maintain compatibility with some internal libs, we guard against these flag
# definitions possibly erroring. Apologies for the ugliness.
try:
flags.DEFINE_string("master", "", "Address of TensorFlow master.")
flags.DEFINE_string("output_dir", "", "Base output directory for run.")
flags.DEFINE_string("schedule", "continuous_train_and_eval",
"Method of Experiment to run.")
flags.DEFINE_integer("eval_steps", 10000,
"Number of steps in evaluation. By default, eval will "
"stop after eval_steps or when it runs through the eval "
"dataset once in full, whichever comes first, so this "
"can be a very large number.")
except: # pylint: disable=bare-except
pass
def get_problem_name():
problems = FLAGS.problems.split("-")
assert len(problems) == 1
return problems[0]
def create_hparams():
return tpu_trainer_lib.create_hparams(FLAGS.hparams_set, FLAGS.hparams)
def create_experiment_fn():
return tpu_trainer_lib.create_experiment_fn(
model_name=FLAGS.model,
problem_name=get_problem_name(),
data_dir=os.path.expanduser(FLAGS.data_dir),
train_steps=FLAGS.train_steps,
eval_steps=FLAGS.eval_steps,
min_eval_frequency=FLAGS.local_eval_frequency,
schedule=FLAGS.schedule,
export=FLAGS.export_saved_model,
decode_hparams=decoding.decode_hparams(FLAGS.decode_hparams),
use_tfdbg=FLAGS.tfdbg,
use_dbgprofile=FLAGS.dbgprofile,
eval_early_stopping_steps=FLAGS.eval_early_stopping_steps,
eval_early_stopping_metric=FLAGS.eval_early_stopping_metric,
eval_early_stopping_metric_delta=FLAGS.eval_early_stopping_metric_delta,
eval_early_stopping_metric_minimize=FLAGS.
eval_early_stopping_metric_minimize,
use_tpu=FLAGS.use_tpu)
def create_run_config(hp):
return tpu_trainer_lib.create_run_config(
model_dir=os.path.expanduser(FLAGS.output_dir),
master=FLAGS.master,
iterations_per_loop=FLAGS.iterations_per_loop,
num_shards=FLAGS.tpu_num_shards,
log_device_placement=FLAGS.log_device_placement,
save_checkpoints_steps=max(FLAGS.iterations_per_loop,
FLAGS.local_eval_frequency),
keep_checkpoint_max=FLAGS.keep_checkpoint_max,
keep_checkpoint_every_n_hours=FLAGS.keep_checkpoint_every_n_hours,
num_gpus=FLAGS.worker_gpu,
gpu_order=FLAGS.gpu_order,
shard_to_cpu=FLAGS.locally_shard_to_cpu,
num_async_replicas=FLAGS.worker_replicas,
gpu_mem_fraction=FLAGS.worker_gpu_memory_fraction,
enable_graph_rewriter=FLAGS.experimental_optimize_placement,
use_tpu=FLAGS.use_tpu,
schedule=FLAGS.schedule,
no_data_parallelism=hp.no_data_parallelism,
daisy_chain_variables=hp.daisy_chain_variables,
ps_replicas=FLAGS.ps_replicas,
ps_job=FLAGS.ps_job,
ps_gpu=FLAGS.ps_gpu,
sync=FLAGS.sync,
worker_id=FLAGS.worker_id,
worker_job=FLAGS.worker_job)
def generate_data():
# Generate data if requested.
data_dir = os.path.expanduser(FLAGS.data_dir)
tmp_dir = os.path.expanduser(FLAGS.tmp_dir)
tf.gfile.MakeDirs(data_dir)
tf.gfile.MakeDirs(tmp_dir)
problem_name = get_problem_name()
tf.logging.info("Generating data for %s" % problem_name)
registry.problem(problem_name).generate_data(data_dir, tmp_dir)
@contextlib.contextmanager
def profile_context():
if FLAGS.profile:
with tf.contrib.tfprof.ProfileContext("t2tprof",
trace_steps=range(100),
dump_steps=range(100)) as pctx:
opts = tf.profiler.ProfileOptionBuilder.time_and_memory()
pctx.add_auto_profiling("op", opts, range(100))
yield
else:
yield
def log_registry():
if FLAGS.registry_help:
tf.logging.info(registry.help_string())
sys.exit(0)
def execute_schedule(exp):
if not hasattr(exp, FLAGS.schedule):
raise ValueError(
"Experiment has no method %s, from --schedule" % FLAGS.schedule)
with profile_context():
getattr(exp, FLAGS.schedule)()
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
tpu_trainer_lib.set_random_seed(FLAGS.random_seed)
usr_dir.import_usr_dir(FLAGS.t2t_usr_dir)
log_registry()
if FLAGS.generate_data:
generate_data()
hparams = create_hparams()
run_config = create_run_config(hparams)
exp_fn = create_experiment_fn()
exp = exp_fn(run_config, hparams)
execute_schedule(exp)
if __name__ == "__main__":
tf.app.run()
| [
"sys.exit",
"tensorflow.profiler.ProfileOptionBuilder.time_and_memory",
"tensorflow.logging.info",
"tensorflow.logging.set_verbosity",
"tensor2tensor.utils.registry.help_string",
"tensor2tensor.tpu.tpu_trainer_lib.set_random_seed",
"tensor2tensor.utils.registry.problem",
"tensor2tensor.utils.usr_dir.i... | [((3213, 3277), 'tensor2tensor.tpu.tpu_trainer_lib.create_hparams', 'tpu_trainer_lib.create_hparams', (['FLAGS.hparams_set', 'FLAGS.hparams'], {}), '(FLAGS.hparams_set, FLAGS.hparams)\n', (3243, 3277), False, 'from tensor2tensor.tpu import tpu_trainer_lib\n'), ((5386, 5420), 'os.path.expanduser', 'os.path.expanduser', (['FLAGS.data_dir'], {}), '(FLAGS.data_dir)\n', (5404, 5420), False, 'import os\n'), ((5433, 5466), 'os.path.expanduser', 'os.path.expanduser', (['FLAGS.tmp_dir'], {}), '(FLAGS.tmp_dir)\n', (5451, 5466), False, 'import os\n'), ((5469, 5496), 'tensorflow.gfile.MakeDirs', 'tf.gfile.MakeDirs', (['data_dir'], {}), '(data_dir)\n', (5486, 5496), True, 'import tensorflow as tf\n'), ((5499, 5525), 'tensorflow.gfile.MakeDirs', 'tf.gfile.MakeDirs', (['tmp_dir'], {}), '(tmp_dir)\n', (5516, 5525), True, 'import tensorflow as tf\n'), ((5565, 5621), 'tensorflow.logging.info', 'tf.logging.info', (["('Generating data for %s' % problem_name)"], {}), "('Generating data for %s' % problem_name)\n", (5580, 5621), True, 'import tensorflow as tf\n'), ((6450, 6491), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', (['tf.logging.INFO'], {}), '(tf.logging.INFO)\n', (6474, 6491), True, 'import tensorflow as tf\n'), ((6494, 6544), 'tensor2tensor.tpu.tpu_trainer_lib.set_random_seed', 'tpu_trainer_lib.set_random_seed', (['FLAGS.random_seed'], {}), '(FLAGS.random_seed)\n', (6525, 6544), False, 'from tensor2tensor.tpu import tpu_trainer_lib\n'), ((6547, 6588), 'tensor2tensor.utils.usr_dir.import_usr_dir', 'usr_dir.import_usr_dir', (['FLAGS.t2t_usr_dir'], {}), '(FLAGS.t2t_usr_dir)\n', (6569, 6588), False, 'from tensor2tensor.utils import usr_dir\n'), ((6851, 6863), 'tensorflow.app.run', 'tf.app.run', ([], {}), '()\n', (6861, 6863), True, 'import tensorflow as tf\n'), ((6197, 6208), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (6205, 6208), False, 'import sys\n'), ((3439, 3473), 'os.path.expanduser', 'os.path.expanduser', (['FLAGS.data_dir'], {}), '(FLAGS.data_dir)\n', (3457, 3473), False, 'import os\n'), ((3691, 3736), 'tensor2tensor.utils.decoding.decode_hparams', 'decoding.decode_hparams', (['FLAGS.decode_hparams'], {}), '(FLAGS.decode_hparams)\n', (3714, 3736), False, 'from tensor2tensor.utils import decoding\n'), ((4227, 4263), 'os.path.expanduser', 'os.path.expanduser', (['FLAGS.output_dir'], {}), '(FLAGS.output_dir)\n', (4245, 4263), False, 'import os\n'), ((5624, 5654), 'tensor2tensor.utils.registry.problem', 'registry.problem', (['problem_name'], {}), '(problem_name)\n', (5640, 5654), False, 'from tensor2tensor.utils import registry\n'), ((5966, 6016), 'tensorflow.profiler.ProfileOptionBuilder.time_and_memory', 'tf.profiler.ProfileOptionBuilder.time_and_memory', ([], {}), '()\n', (6014, 6016), True, 'import tensorflow as tf\n'), ((6169, 6191), 'tensor2tensor.utils.registry.help_string', 'registry.help_string', ([], {}), '()\n', (6189, 6191), False, 'from tensor2tensor.utils import registry\n')] |
import numpy as np
from gym.envs.mujoco import mujoco_env
from gym import utils
import os
from scipy.spatial.distance import euclidean
from meta_mb.meta_envs.base import RandomEnv
#from mujoco-py.mujoco_py.pxd.mujoco import local
import mujoco_py
class PegFullBlueEnv(RandomEnv, utils.EzPickle):
def __init__(self, goal_dist=3e-2):
utils.EzPickle.__init__(**locals())
xml_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'assets', 'blue_full_peg_v1.xml')
x = 0.005
y = -0.5
z = -0.35
self.top_goal = np.array([x, y, z+0.15])
self.center_goal = np.array([x, y, z])
self.bottom_goal= np.array([x, y, z-0.15])
self.peg_loc = self.center_goal
self.goal_dist = goal_dist # permissible distance from goal
RandomEnv.__init__(self, 2, xml_file, 2)
def _get_obs(self):
return np.concatenate([
self.sim.data.qpos.flat,
self.sim.data.qvel.flat[:-3],
#self.sim.data.body_xpos.flat[:3],
self.peg_location() - self.center_goal
])
def step(self, action):
self.do_simulation(action, self.frame_skip)
self.peg_loc = self.peg_location()
reward_dist = -self.peg_dist()
reward_ctrl = -np.square(action).sum()
reward = reward_dist + 1.25e-4 * reward_ctrl
self.peg_orient()
observation = self._get_obs()
done = False
info = dict(reward_dist=reward_dist, reward_ctrl=reward_ctrl)
print(reward)
return observation, reward, done, info
def reset_model(self):
qpos = self.init_qpos + self.np_random.uniform(low=-0.01, high=0.01, size=self.model.nq)
qvel = self.init_qvel + self.np_random.uniform(low=-0.01, high=0.01, size=self.model.nv)
peg_table_position = np.random.uniform(low=[-0.2, -1, 0.3], high=[0.75, -0.6, 0.3])
self.sim.model.body_pos[-8] = peg_table_position
self.top_goal = self.get_body_com("g1")
self.center_goal = self.get_body_com("g2")
self.bottom_goal = self.get_body_com("g3")
qpos[-6:-3] = np.zeros((3, ))
qpos[-3:] = self.center_goal
qvel[-6:] = 0
self.set_state(qpos, qvel)
observation = self._get_obs()
return observation
def reward(self, obs, act, obs_next):
assert obs.ndim == act.ndim == obs_next.ndim
if obs.ndim == 2:
assert obs.shape == obs_next.shape and act.shape[0] == obs.shape[0]
reward_ctrl = -np.sum(np.square(act), axis=1)
reward_dist = -self.peg_dist()
reward = reward_dist + 1.25e-4 * reward_ctrl
return np.clip(reward, -1e2, 1e2)
elif obs.ndim == 1:
assert obs.shape == obs_next.shape
reward_ctrl = -np.sum(np.square(act))
reward_dist = -self.peg_dist()
reward = reward_run + 1.25e-4 * reward_ctrl
return np.clip(reward, -1e2, 1e2)
else:
raise NotImplementedError
def peg_orient(self):
return self.data.get_body_xquat("peg-center")
def peg_dist(self):
top = self.get_body_com("peg-top")
center = self.get_body_com("peg-center")
bottom = self.get_body_com("peg-bottom")
distance = (euclidean(top, self.top_goal)
+ euclidean(center, self.center_goal)
+ euclidean(bottom, self.bottom_goal))
return distance
def peg_location(self):
return self.get_body_com("peg-center")
def top(self, center):
x = center[0]
y = center[1] + 0.3
z = center[2] - 0.4
return np.array([x, y, z])
def center(self, center):
x = center[0]
y = center[1] + 0.3
z = center[2] - 0.55
return np.array([x, y, z])
def bottom(self, center):
x = center[0]
y = center[1] + 0.3
z = center[2] - 0.7
return np.array([x, y, z])
def viewer_setup(self):
self.viewer.cam.distance = self.model.stat.extent * 2
self.viewer.cam.elevation = -20
self.viewer.cam.type = 0
self.viewer.cam.azimuth = 180
if __name__ == "__main__":
env = PegFullBlueEnv()
while True:
env.reset()
for _ in range(500):
action = env.action_space.sample()
env.step(action)
env.render() | [
"numpy.clip",
"numpy.square",
"numpy.array",
"numpy.zeros",
"scipy.spatial.distance.euclidean",
"os.path.dirname",
"numpy.random.uniform",
"meta_mb.meta_envs.base.RandomEnv.__init__"
] | [((571, 597), 'numpy.array', 'np.array', (['[x, y, z + 0.15]'], {}), '([x, y, z + 0.15])\n', (579, 597), True, 'import numpy as np\n'), ((623, 642), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (631, 642), True, 'import numpy as np\n'), ((669, 695), 'numpy.array', 'np.array', (['[x, y, z - 0.15]'], {}), '([x, y, z - 0.15])\n', (677, 695), True, 'import numpy as np\n'), ((813, 853), 'meta_mb.meta_envs.base.RandomEnv.__init__', 'RandomEnv.__init__', (['self', '(2)', 'xml_file', '(2)'], {}), '(self, 2, xml_file, 2)\n', (831, 853), False, 'from meta_mb.meta_envs.base import RandomEnv\n'), ((1848, 1910), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '[-0.2, -1, 0.3]', 'high': '[0.75, -0.6, 0.3]'}), '(low=[-0.2, -1, 0.3], high=[0.75, -0.6, 0.3])\n', (1865, 1910), True, 'import numpy as np\n'), ((2142, 2156), 'numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (2150, 2156), True, 'import numpy as np\n'), ((3682, 3701), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (3690, 3701), True, 'import numpy as np\n'), ((3827, 3846), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (3835, 3846), True, 'import numpy as np\n'), ((3971, 3990), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (3979, 3990), True, 'import numpy as np\n'), ((2697, 2727), 'numpy.clip', 'np.clip', (['reward', '(-100.0)', '(100.0)'], {}), '(reward, -100.0, 100.0)\n', (2704, 2727), True, 'import numpy as np\n'), ((3424, 3459), 'scipy.spatial.distance.euclidean', 'euclidean', (['bottom', 'self.bottom_goal'], {}), '(bottom, self.bottom_goal)\n', (3433, 3459), False, 'from scipy.spatial.distance import euclidean\n'), ((430, 455), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (445, 455), False, 'import os\n'), ((2967, 2997), 'numpy.clip', 'np.clip', (['reward', '(-100.0)', '(100.0)'], {}), '(reward, -100.0, 100.0)\n', (2974, 2997), True, 'import numpy as np\n'), ((3314, 3343), 'scipy.spatial.distance.euclidean', 'euclidean', (['top', 'self.top_goal'], {}), '(top, self.top_goal)\n', (3323, 3343), False, 'from scipy.spatial.distance import euclidean\n'), ((3366, 3401), 'scipy.spatial.distance.euclidean', 'euclidean', (['center', 'self.center_goal'], {}), '(center, self.center_goal)\n', (3375, 3401), False, 'from scipy.spatial.distance import euclidean\n'), ((1294, 1311), 'numpy.square', 'np.square', (['action'], {}), '(action)\n', (1303, 1311), True, 'import numpy as np\n'), ((2554, 2568), 'numpy.square', 'np.square', (['act'], {}), '(act)\n', (2563, 2568), True, 'import numpy as np\n'), ((2833, 2847), 'numpy.square', 'np.square', (['act'], {}), '(act)\n', (2842, 2847), True, 'import numpy as np\n')] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<site_settings_pk>\d+)/special-page/add/$',
views.special_page_add, name='special-page-add'),
url(r'^(?P<site_settings_pk>\d+)/special-page/'
r'(?P<pk>\d+)/update/$',
views.special_page_edit, name='special-page-edit'),
url(r'^(?P<site_settings_pk>\d+)/special-page/'
r'(?P<pk>\d+)/delete/$',
views.special_page_delete, name='special-page-delete')
]
| [
"django.conf.urls.url"
] | [((75, 182), 'django.conf.urls.url', 'url', (['"""^(?P<site_settings_pk>\\\\d+)/special-page/add/$"""', 'views.special_page_add'], {'name': '"""special-page-add"""'}), "('^(?P<site_settings_pk>\\\\d+)/special-page/add/$', views.\n special_page_add, name='special-page-add')\n", (78, 182), False, 'from django.conf.urls import url\n'), ((191, 316), 'django.conf.urls.url', 'url', (['"""^(?P<site_settings_pk>\\\\d+)/special-page/(?P<pk>\\\\d+)/update/$"""', 'views.special_page_edit'], {'name': '"""special-page-edit"""'}), "('^(?P<site_settings_pk>\\\\d+)/special-page/(?P<pk>\\\\d+)/update/$', views\n .special_page_edit, name='special-page-edit')\n", (194, 316), False, 'from django.conf.urls import url\n'), ((336, 465), 'django.conf.urls.url', 'url', (['"""^(?P<site_settings_pk>\\\\d+)/special-page/(?P<pk>\\\\d+)/delete/$"""', 'views.special_page_delete'], {'name': '"""special-page-delete"""'}), "('^(?P<site_settings_pk>\\\\d+)/special-page/(?P<pk>\\\\d+)/delete/$', views\n .special_page_delete, name='special-page-delete')\n", (339, 465), False, 'from django.conf.urls import url\n')] |
import pickle
from functools import lru_cache
from pathlib import Path
import dask
from sample_pipeline.pipeline import get_full_pipeline
NON_REGRESSION_DATA_FILE = Path(__file__).parent / "non_regression_data.pickle"
NON_REGRESSION_TICKERS = {"AAPL", "MSFT", "AMZN", "GOOGL"}
NON_REGRESSION_START_DATE = "2021-01-04"
NON_REGRESSION_END_DATE = "2021-01-29"
def get_non_regression_pipeline():
return get_full_pipeline(
tickers=NON_REGRESSION_TICKERS,
start_date=NON_REGRESSION_START_DATE,
end_date=NON_REGRESSION_END_DATE,
)
@lru_cache()
def load_non_regression_data():
with open(NON_REGRESSION_DATA_FILE, "rb") as fp:
return pickle.load(fp)
def generate_non_regression_data(non_reg_file=NON_REGRESSION_DATA_FILE):
# Dict of delayed operations
full_pipeline = get_non_regression_pipeline()
# Evaluate them
_compute = dask.compute(full_pipeline)
# The value returned by dask.compute is a tuple of one element
(_evaluated_pipeline,) = _compute
# Dump the values on disk
with open(non_reg_file, "wb") as fp:
pickle.dump(_evaluated_pipeline, fp)
| [
"pickle.dump",
"dask.compute",
"pathlib.Path",
"pickle.load",
"sample_pipeline.pipeline.get_full_pipeline",
"functools.lru_cache"
] | [((564, 575), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (573, 575), False, 'from functools import lru_cache\n'), ((408, 534), 'sample_pipeline.pipeline.get_full_pipeline', 'get_full_pipeline', ([], {'tickers': 'NON_REGRESSION_TICKERS', 'start_date': 'NON_REGRESSION_START_DATE', 'end_date': 'NON_REGRESSION_END_DATE'}), '(tickers=NON_REGRESSION_TICKERS, start_date=\n NON_REGRESSION_START_DATE, end_date=NON_REGRESSION_END_DATE)\n', (425, 534), False, 'from sample_pipeline.pipeline import get_full_pipeline\n'), ((886, 913), 'dask.compute', 'dask.compute', (['full_pipeline'], {}), '(full_pipeline)\n', (898, 913), False, 'import dask\n'), ((168, 182), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (172, 182), False, 'from pathlib import Path\n'), ((676, 691), 'pickle.load', 'pickle.load', (['fp'], {}), '(fp)\n', (687, 691), False, 'import pickle\n'), ((1100, 1136), 'pickle.dump', 'pickle.dump', (['_evaluated_pipeline', 'fp'], {}), '(_evaluated_pipeline, fp)\n', (1111, 1136), False, 'import pickle\n')] |
#!/usr/bin/env python
import os
from ehive.runnable.IGFBaseProcess import IGFBaseProcess
from igf_data.utils.fileutils import get_temp_dir
from igf_data.process.data_qc.check_sequence_index_barcodes import CheckSequenceIndexBarcodes,IndexBarcodeValidationError
class CheckIndexStats(IGFBaseProcess):
'''
A ehive process class for checking barcode stats and report to slack and asana
'''
def param_defaults(self):
params_dict = super(CheckIndexStats,self).param_defaults()
params_dict.update({
'stats_filename':'Stats/Stats.json',
'strict_check':True,
'use_ephemeral_space':0,
})
return params_dict
def run(self):
try:
samplesheet_file = self.param_required('original_samplesheet')
seqrun_igf_id = self.param_required('seqrun_igf_id')
fastq_dir = self.param_required('fastq_dir')
model_name = self.param_required('model_name')
project_name = self.param_required('project_name')
stats_filename = self.param('stats_filename')
strict_check = self.param('strict_check')
use_ephemeral_space = self.param('use_ephemeral_space')
work_dir = \
get_temp_dir(use_ephemeral_space=use_ephemeral_space) # get work directory name
stats_json_file = \
os.path.join(
fastq_dir,
stats_filename) # get stats file path
barcode_stat = \
CheckSequenceIndexBarcodes(
stats_json_file=stats_json_file,
samplesheet_file=samplesheet_file,
platform_name=model_name) # create check instance
barcode_stat.\
validate_barcode_stats(
work_dir=work_dir, \
strict_check=strict_check) # validate seqrun stats
self.param('dataflow_params',
{'barcode_qc_stats':'PASS'}) # seed dataflow parame for the qc passed lanes
except IndexBarcodeValidationError as e:
self.param('dataflow_params',
{'barcode_qc_stats':'FAIL'}) # seed dataflow for failed lanes
message = \
'project: {0}, message:{1}'.\
format(
project_name,
e.message)
if len(e.plots)==0:
self.post_message_to_slack(\
message=e.message,
reaction='fail') # only post msg to slack if no plots
self.comment_asana_task(\
task_name=seqrun_igf_id,
comment=e.message) # log to asana task
self.post_message_to_ms_team(
message=message,
reaction='fail')
else:
for plot_file in e.plots:
self.post_file_to_slack(message=message,filepath=plot_file) # posting plot files to slack
self.upload_file_to_asana_task(\
task_name=seqrun_igf_id,
filepath=plot_file, \
comment=message) # upload plots to asana
except Exception as e:
message = \
'seqrun: {2}, Error in {0}: {1}'.\
format(
self.__class__.__name__,
e,
seqrun_igf_id)
self.warning(message)
self.post_message_to_slack(message,reaction='fail') # post msg to slack for failed jobs
self.post_message_to_ms_team(
message=message,
reaction='fail')
raise | [
"igf_data.utils.fileutils.get_temp_dir",
"os.path.join",
"igf_data.process.data_qc.check_sequence_index_barcodes.CheckSequenceIndexBarcodes"
] | [((1149, 1202), 'igf_data.utils.fileutils.get_temp_dir', 'get_temp_dir', ([], {'use_ephemeral_space': 'use_ephemeral_space'}), '(use_ephemeral_space=use_ephemeral_space)\n', (1161, 1202), False, 'from igf_data.utils.fileutils import get_temp_dir\n'), ((1281, 1320), 'os.path.join', 'os.path.join', (['fastq_dir', 'stats_filename'], {}), '(fastq_dir, stats_filename)\n', (1293, 1320), False, 'import os\n'), ((1449, 1573), 'igf_data.process.data_qc.check_sequence_index_barcodes.CheckSequenceIndexBarcodes', 'CheckSequenceIndexBarcodes', ([], {'stats_json_file': 'stats_json_file', 'samplesheet_file': 'samplesheet_file', 'platform_name': 'model_name'}), '(stats_json_file=stats_json_file,\n samplesheet_file=samplesheet_file, platform_name=model_name)\n', (1475, 1573), False, 'from igf_data.process.data_qc.check_sequence_index_barcodes import CheckSequenceIndexBarcodes, IndexBarcodeValidationError\n')] |
import os
import numpy as np
import tensorflow as tf
import math
from PIL import Image
#import pdb
F = tf.app.flags.FLAGS
"""
Save tensorflow model
Parameters:
* checkpoint_dir - name of the directory where model is to be saved
* sess - current tensorflow session
* saver - tensorflow saver
"""
def save_model(checkpoint_dir, sess, saver):
model_name = "model.ckpt"
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver.save(sess, os.path.join(checkpoint_dir, model_name))
"""
Load tensorflow model
Parameters:
* checkpoint_dir - name of the directory where model is to be loaded from
* sess - current tensorflow session
* saver - tensorflow saver
Returns: True if the model loaded successfully, else False
"""
def load_model(checkpoint_dir, sess, saver):
print(" [*] Reading checkpoints...")
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
saver.restore(sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
"""
To recompose an array of 3D images from patches
"""
def recompose3D_overlap(preds, img_h, img_w, img_d, stride_h, stride_w, stride_d):
patch_h = preds.shape[1]
patch_w = preds.shape[2]
patch_d = preds.shape[3]
N_patches_h = (img_h-patch_h)//stride_h+1
N_patches_w = (img_w-patch_w)//stride_w+1
N_patches_d = (img_d-patch_d)//stride_d+1
N_patches_img = N_patches_h * N_patches_w * N_patches_d
print("N_patches_h: " ,N_patches_h)
print("N_patches_w: " ,N_patches_w)
print("N_patches_d: " ,N_patches_d)
print("N_patches_img: ",N_patches_img)
assert(preds.shape[0]%N_patches_img==0)
N_full_imgs = preds.shape[0]//N_patches_img
print("According to the dimension inserted, there are " \
+str(N_full_imgs) +" full images (of " +str(img_h)+"x" +str(img_w)+"x" +str(img_d) +" each)")
# itialize to zero mega array with sum of Probabilities
raw_pred_martrix = np.zeros((N_full_imgs,img_h,img_w,img_d))
raw_sum = np.zeros((N_full_imgs,img_h,img_w,img_d))
final_matrix = np.zeros((N_full_imgs,img_h,img_w,img_d),dtype='uint16')
k = 0
# iterator over all the patches
for i in range(N_full_imgs):
for h in range((img_h-patch_h)//stride_h+1):
for w in range((img_w-patch_w)//stride_w+1):
for d in range((img_d-patch_d)//stride_d+1):
raw_pred_martrix[i,h*stride_h:(h*stride_h)+patch_h,\
w*stride_w:(w*stride_w)+patch_w,\
d*stride_d:(d*stride_d)+patch_d]+=preds[k]
raw_sum[i,h*stride_h:(h*stride_h)+patch_h,\
w*stride_w:(w*stride_w)+patch_w,\
d*stride_d:(d*stride_d)+patch_d]+=1.0
k+=1
assert(k==preds.shape[0])
#To check for non zero sum matrix
assert(np.min(raw_sum)>=1.0)
final_matrix = np.around(raw_pred_martrix/raw_sum)
return final_matrix
#functions below are added by liuhuaqing 2019-07-15
def make_grid(tensor, nrow=8, padding=2,
normalize=False, scale_each=False):
"""Code based on https://github.com/pytorch/vision/blob/master/torchvision/utils.py"""
nmaps = tensor.shape[0]
xmaps = min(nrow, nmaps)
ymaps = int(math.ceil(float(nmaps) / xmaps))
height, width = int(tensor.shape[1] + padding), int(tensor.shape[2] + padding)
grid = np.zeros([height * ymaps + 1 + padding // 2, width * xmaps + 1 + padding // 2], dtype=np.uint8)
k = 0
for y in range(ymaps):
for x in range(xmaps):
if k >= nmaps:
break
h, h_width = y * height + 1 + padding // 2, height - padding
w, w_width = x * width + 1 + padding // 2, width - padding
grid[h:h+h_width, w:w+w_width] = tensor[k]
k = k + 1
return grid
def save_image(tensor, filename, nrow=8, padding=2,
normalize=False, scale_each=False):
ndarr = make_grid(tensor, nrow=nrow, padding=padding,
normalize=normalize, scale_each=scale_each)
im = Image.fromarray(ndarr)
im.save(filename)
# 语义分割准确率的定义和计算,参考:https://blog.csdn.net/majinlei121/article/details/78965435
def fast_hist(a, b, n):
k = (a >= 0) & (a < n) #正常情况下全是True
return np.bincount(n * a[k].astype(int) + b[k], minlength=n**2).reshape(n, n)#np.bincount 用于统计数组中(从小到大)给取值出现的次数
def Hist(a,b,n):
hist = fast_hist(a,b,n)
return hist
def pixelAccuracy(trueMask,predMask,n_cls):
hist = Hist(trueMask,predMask,n_cls)
PA = np.diag(hist).sum() / hist.sum()
return PA
def MeanPixelAccuracy(trueMask,predMask,n_cls):
#epsilon = 1
hist = Hist(trueMask,predMask,n_cls)
PAs = np.diag(hist) / hist.sum(1)
return PAs
def IntersectionoverUnion(trueMask,predMask,n_cls):
#epsilon = 1
hist = Hist(trueMask,predMask,n_cls)
IoUs = np.diag(hist) / (hist.sum(1) + hist.sum(0) - np.diag(hist))
return IoUs
def DiceScore(trueMask,predMask,n_cls):
# epsilon = 1
hist = Hist(trueMask,predMask,n_cls)
correct_pred = np.diag(hist) # 给类别正确预测的像素点数
pred_classes = np.sum(hist,0) # 预测处的各类别像素点数,
true_classes = np.sum(hist,1) # 真实的各类别像素点数
DSs = 2*correct_pred/(pred_classes+true_classes)
return DSs | [
"os.path.exists",
"PIL.Image.fromarray",
"os.makedirs",
"os.path.join",
"numpy.diag",
"tensorflow.train.get_checkpoint_state",
"numpy.sum",
"numpy.zeros",
"numpy.around",
"os.path.basename",
"numpy.min"
] | [((838, 883), 'tensorflow.train.get_checkpoint_state', 'tf.train.get_checkpoint_state', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (867, 883), True, 'import tensorflow as tf\n'), ((1990, 2034), 'numpy.zeros', 'np.zeros', (['(N_full_imgs, img_h, img_w, img_d)'], {}), '((N_full_imgs, img_h, img_w, img_d))\n', (1998, 2034), True, 'import numpy as np\n'), ((2045, 2089), 'numpy.zeros', 'np.zeros', (['(N_full_imgs, img_h, img_w, img_d)'], {}), '((N_full_imgs, img_h, img_w, img_d))\n', (2053, 2089), True, 'import numpy as np\n'), ((2104, 2164), 'numpy.zeros', 'np.zeros', (['(N_full_imgs, img_h, img_w, img_d)'], {'dtype': '"""uint16"""'}), "((N_full_imgs, img_h, img_w, img_d), dtype='uint16')\n", (2112, 2164), True, 'import numpy as np\n'), ((2902, 2939), 'numpy.around', 'np.around', (['(raw_pred_martrix / raw_sum)'], {}), '(raw_pred_martrix / raw_sum)\n', (2911, 2939), True, 'import numpy as np\n'), ((3396, 3495), 'numpy.zeros', 'np.zeros', (['[height * ymaps + 1 + padding // 2, width * xmaps + 1 + padding // 2]'], {'dtype': 'np.uint8'}), '([height * ymaps + 1 + padding // 2, width * xmaps + 1 + padding //\n 2], dtype=np.uint8)\n', (3404, 3495), True, 'import numpy as np\n'), ((4090, 4112), 'PIL.Image.fromarray', 'Image.fromarray', (['ndarr'], {}), '(ndarr)\n', (4105, 4112), False, 'from PIL import Image\n'), ((5079, 5092), 'numpy.diag', 'np.diag', (['hist'], {}), '(hist)\n', (5086, 5092), True, 'import numpy as np\n'), ((5127, 5142), 'numpy.sum', 'np.sum', (['hist', '(0)'], {}), '(hist, 0)\n', (5133, 5142), True, 'import numpy as np\n'), ((5176, 5191), 'numpy.sum', 'np.sum', (['hist', '(1)'], {}), '(hist, 1)\n', (5182, 5191), True, 'import numpy as np\n'), ((380, 410), 'os.path.exists', 'os.path.exists', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (394, 410), False, 'import os\n'), ((416, 443), 'os.makedirs', 'os.makedirs', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (427, 443), False, 'import os\n'), ((463, 503), 'os.path.join', 'os.path.join', (['checkpoint_dir', 'model_name'], {}), '(checkpoint_dir, model_name)\n', (475, 503), False, 'import os\n'), ((942, 986), 'os.path.basename', 'os.path.basename', (['ckpt.model_checkpoint_path'], {}), '(ckpt.model_checkpoint_path)\n', (958, 986), False, 'import os\n'), ((2863, 2878), 'numpy.min', 'np.min', (['raw_sum'], {}), '(raw_sum)\n', (2869, 2878), True, 'import numpy as np\n'), ((4720, 4733), 'numpy.diag', 'np.diag', (['hist'], {}), '(hist)\n', (4727, 4733), True, 'import numpy as np\n'), ((4885, 4898), 'numpy.diag', 'np.diag', (['hist'], {}), '(hist)\n', (4892, 4898), True, 'import numpy as np\n'), ((1011, 1050), 'os.path.join', 'os.path.join', (['checkpoint_dir', 'ckpt_name'], {}), '(checkpoint_dir, ckpt_name)\n', (1023, 1050), False, 'import os\n'), ((4930, 4943), 'numpy.diag', 'np.diag', (['hist'], {}), '(hist)\n', (4937, 4943), True, 'import numpy as np\n'), ((4556, 4569), 'numpy.diag', 'np.diag', (['hist'], {}), '(hist)\n', (4563, 4569), True, 'import numpy as np\n')] |
from os.path import join, dirname
from pystacia import lena
dest = join(dirname(__file__), '../_static/generated')
image = lena(256)
image.sketch(3)
image.write(join(dest, 'lena_sketch3.jpg'))
image.close()
image = lena(256)
image.sketch(6, 0)
image.write(join(dest, 'lena_sketch6,0.jpg'))
image.close()
| [
"os.path.dirname",
"os.path.join",
"pystacia.lena"
] | [((126, 135), 'pystacia.lena', 'lena', (['(256)'], {}), '(256)\n', (130, 135), False, 'from pystacia import lena\n'), ((219, 228), 'pystacia.lena', 'lena', (['(256)'], {}), '(256)\n', (223, 228), False, 'from pystacia import lena\n'), ((74, 91), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (81, 91), False, 'from os.path import join, dirname\n'), ((164, 194), 'os.path.join', 'join', (['dest', '"""lena_sketch3.jpg"""'], {}), "(dest, 'lena_sketch3.jpg')\n", (168, 194), False, 'from os.path import join, dirname\n'), ((260, 292), 'os.path.join', 'join', (['dest', '"""lena_sketch6,0.jpg"""'], {}), "(dest, 'lena_sketch6,0.jpg')\n", (264, 292), False, 'from os.path import join, dirname\n')] |
#Author: <NAME>
#Contact: <EMAIL>
#Date: Aug 02, 2020
import numpy as np
def cal_emp_cdf(insamples):
'''
This is function to calcualte emperical CDF of Dirichlet distributed facies proportion samples.
Variables:
insamples - input samples of facies proportions,
3D array, [n_seis_features, n_posterior_samples, n_facies]
'''
smpls_cdf_libs = []
for i in range(len(insamples)):
samples = insamples[i]
cdfs = [np.count_nonzero(samples[j,0]>samples[:,0])/samples.shape[0] \
for j in range(samples.shape[0])]
cdfs = np.asarray(cdfs)
smpls_cdf_libs.append(np.c_[samples, cdfs])
return smpls_cdf_libs | [
"numpy.count_nonzero",
"numpy.asarray"
] | [((598, 614), 'numpy.asarray', 'np.asarray', (['cdfs'], {}), '(cdfs)\n', (608, 614), True, 'import numpy as np\n'), ((469, 516), 'numpy.count_nonzero', 'np.count_nonzero', (['(samples[j, 0] > samples[:, 0])'], {}), '(samples[j, 0] > samples[:, 0])\n', (485, 516), True, 'import numpy as np\n')] |
import cv2
import numpy as np
import matplotlib.pyplot as plt
from skimage.filters import gabor
import mahotas as mt
import pandas as pd
from glob import glob
from skimage.feature import local_binary_pattern
def fun1(img_mask,Label):
count = 0
gaborenergy1 = []
gaborentropy1 = []
w1=[]
h1=[]
area1 = []
perimeter1 = []
rectArea1= []
aspectratio1 = []
rectangularity1 = []
circularity1 = []
equi_diameter1 = []
red_mean1 = []
green_mean1 = []
blue_mean1 = []
red_var1 = []
blue_var1 = []
green_var1 = []
contrast1 = []
correlation1 = []
inversedifferencemoments1 = []
entropy1 = []
Label1 = []
LBP = []
extent1= []
solidity1=[]
hull_area1=[]
equi_diameter1 = []
radius = 3
no_points = 8 * radius
img_names = glob(img_mask)
iasd=0
for fn in img_names:
#print('processing %s...' % fn,i)
print(iasd,end="\t")
iasd=iasd+1
img = cv2.imread(fn)
#cv2.imshow("original",img)
####### Converting image to grayscale #########
gs = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# GABOR filter....................................................................
gaborFilt_real, gaborFilt_imag = gabor(gs, frequency=0.6)
gaborFilt = (gaborFilt_real ** 2 + gaborFilt_imag ** 2) // 2
#fig, ax = plt.subplots(1, 3)
#ax[0].imshow(gaborFilt_real, cmap='gray')
#ax[1].imshow(gaborFilt_imag, cmap='gray')
#ax[2].imshow(gaborFilt, cmap='gray')
#plt.show()
# energy and entropy of GABOR filter response......................................
gabor_hist, _ = np.histogram(gaborFilt, 8)
gabor_hist = np.array(gabor_hist, dtype=float)
gabor_prob = np.divide(gabor_hist, np.sum(gabor_hist))
gabor_energy = np.sum(gabor_prob ** 2)
gabor_entropy = -np.sum(np.multiply(gabor_prob, np.log2(gabor_prob)))
#print("gabor_energy:" + str(gabor_energy))
#print("gabor_entropy:" + str(gabor_entropy))
count = count+1
#print(count)
#########################local_binary_pattern#########################
lbp = local_binary_pattern(gs, no_points, radius, method='uniform')
###### Smoothing image using Guassian filter
blur = cv2.GaussianBlur(gs, (25,25),0)
#print(gs.shape)
####Adaptive image thresholding using Otsu's thresholding method
ret_otsu,im_bw_otsu = cv2.threshold(blur,0,255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
#cv2.imshow("Thresholding",im_bw_otsu)
####Boundary extraction using sobel filters
sobelx64f = cv2.Sobel(im_bw_otsu,cv2.CV_64F,1,0,ksize=5)
abs_sobel64f = np.absolute(sobelx64f)
sobel_8u = np.uint8(abs_sobel64f)
#cv2.imshow("Boundary Extraction",abs_sobel64f)
ret_sobel,im_bw_sobel = cv2.threshold(sobel_8u,1,255,cv2.THRESH_BINARY)
#cv2.imshow("boundary",im_bw_sobel)
kernel_edge = np.ones((15,15),np.uint8)
closing_edge = cv2.morphologyEx(im_bw_sobel, cv2.MORPH_CLOSE, kernel_edge)
#cv2.imshow("Closing Edge",closing_edge)
#cv2.imshow("Boundary ",im_bw_otsu)
##### Boundary extraction using contours
ret, thresh = cv2.threshold(gs, 127, 255, 0)
contours, hierarchy = cv2.findContours(im_bw_otsu, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
len(contours)
cnt=contours[0]
len(cnt)
plottedContour = cv2.drawContours(gs,contours,-1,(0,255,0),10)
#cv2.imshow("Plotted Contour",plottedContour)
##### Shape based features
M = cv2.moments(cnt)
#print("MOments: ",M)
area = cv2.contourArea(cnt)
#print("Area",area)
perimeter = cv2.arcLength(cnt,True)
#print("Perimeter",perimeter)
rect = cv2.minAreaRect(cnt)
box = cv2.boxPoints(rect)
box = np.int0(box)
contours_im = cv2.drawContours(im_bw_otsu,[box],0,(255,255,255),2)
#cv2.imshow("best fit rect",contours_im)
#ellipse = cv2.fitEllipse(cnt)
#im = cv2.ellipse(im_bw_otsu,ellipse,(255,255,255),2)
#cv2.imshow("")
x,y,w,h = cv2.boundingRect(cnt)
aspect_ratio = float(w)/h
#print("Aspect Ratio: ",aspect_ratio)
######### Extent#############
rect_area = w * h
extent = float(area) / rect_area
######### solidity #############
hull = cv2.convexHull(cnt)
hull_area = cv2.contourArea(hull)
if hull_area != 0:
solidity = float(area) / hull_area
else:
solidity = 0
####Shape based features calculated - Aspect ratio, rectangularity, circularity
if area !=0:
rectangularity =w*h/area
circularity = ((perimeter) ** 2) / area
else:
rectangularity=0
circularity = 0
#print("rectangularity: ",rectangularity)
#print("circularity: ",circularity)
equi_diameter = np.sqrt(4*area/np.pi)
#print("equi_diameter:",equi_diameter)
#(x,y),(MA,ma),angle = cv2.fitEllipse(cnt)
#cv2.imshow("Original1",img)
###Calculating color based features - mean, std-dev of the RGB channels
red_channel = img[:,:,0]
#cv2.imshow("red channel: ",red_channel)
green_channel = img[:,:,1]
#cv2.imshow("green channel: ",green_channel)
blue_channel = img[:,:,2]
#cv2.imshow("blue channel: ",blue_channel)
g=np.mean(blue_channel)
h = np.mean(red_channel)
i = np.mean(green_channel)
#print("RedMean",h)
#print("BlueMean",g)
#print("GreenMean", i)
blue_channel[blue_channel == 255] = 0
green_channel[green_channel == 255] = 0
red_channel[red_channel == 255] = 0
red_mean = np.mean(red_channel)
#print("red_mean: ",red_mean)
green_mean = np.mean(green_channel)
#print("green_mean",green_mean)
blue_mean = np.mean(blue_channel)
#print("blue_mean: ",blue_mean)
red_var = np.std(red_channel)
#print("red_var: ",red_var)
blue_var = np.std(blue_channel)
green_var = np.std(green_channel)
######### Texture Features ##########
textures = mt.features.haralick(gs)
ht_mean = textures.mean(axis=0)
#print(ht_mean)
#print(ht_mean[1]) #contrast
#print(ht_mean[2]) #correlation
#print(ht_mean[4]) #inverse difference moments
#print(ht_mean[8]) #entropy
gaborenergy1.append(gabor_energy)
gaborentropy1.append(gabor_entropy)
w1.append(w)
h1.append(h)
area1.append(area)
rectArea1.append(rect_area)
perimeter1.append(perimeter)
aspectratio1.append(aspect_ratio)
rectangularity1.append(rectangularity)
circularity1.append(circularity)
equi_diameter1.append(equi_diameter)
red_mean1.append(red_mean)
green_mean1.append(green_mean)
blue_mean1.append(blue_mean)
red_var1.append(red_var)
blue_var1.append(blue_var)
green_var1.append(green_var)
contrast1.append(ht_mean[1])
correlation1.append(ht_mean[2])
inversedifferencemoments1.append(ht_mean[4])
entropy1.append(ht_mean[8])
LBP.append(lbp)
extent1.append(extent)
solidity1.append(solidity)
hull_area1.append(hull_area)
# dictionary of lists
dict1 = {'Label':Label,'gaborenergy': gaborenergy1, 'gaborentropy': gaborentropy1,'width':w1,'Length':h1, 'area': area1,'Rect_Area':rectArea1, 'perimeter': perimeter1,'Extent': extent1,
'Solidity':solidity1,'Hull_Area':hull_area1,'AspectRatio': aspectratio1, 'Rectangularity': rectangularity1, 'Circularity': circularity1,
'EquiDimeter': equi_diameter1, 'RedMean': red_mean1, 'GreenMean': green_mean1, 'BlueMean': blue_mean1,
'RedVar': red_var1,'BlueVar': blue_var1,'GreenVar': green_var1, 'contrast': contrast1, 'correlation': correlation1,
'inverse difference moments': inversedifferencemoments1, 'entropy': entropy1 }
df = pd.DataFrame(dict1)
# f=open("f1.csv","a")
# saving the dataframe
df.to_csv("Labled_DATAUpdate1.csv", mode='a', header=False)
| [
"numpy.uint8",
"numpy.sqrt",
"numpy.array",
"mahotas.features.haralick",
"numpy.mean",
"numpy.histogram",
"cv2.threshold",
"cv2.arcLength",
"cv2.contourArea",
"cv2.minAreaRect",
"pandas.DataFrame",
"glob.glob",
"cv2.drawContours",
"numpy.ones",
"cv2.boxPoints",
"cv2.boundingRect",
"n... | [((879, 893), 'glob.glob', 'glob', (['img_mask'], {}), '(img_mask)\n', (883, 893), False, 'from glob import glob\n'), ((8435, 8454), 'pandas.DataFrame', 'pd.DataFrame', (['dict1'], {}), '(dict1)\n', (8447, 8454), True, 'import pandas as pd\n'), ((1041, 1055), 'cv2.imread', 'cv2.imread', (['fn'], {}), '(fn)\n', (1051, 1055), False, 'import cv2\n'), ((1160, 1197), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_RGB2GRAY'], {}), '(img, cv2.COLOR_RGB2GRAY)\n', (1172, 1197), False, 'import cv2\n'), ((1334, 1358), 'skimage.filters.gabor', 'gabor', (['gs'], {'frequency': '(0.6)'}), '(gs, frequency=0.6)\n', (1339, 1358), False, 'from skimage.filters import gabor\n'), ((1754, 1780), 'numpy.histogram', 'np.histogram', (['gaborFilt', '(8)'], {}), '(gaborFilt, 8)\n', (1766, 1780), True, 'import numpy as np\n'), ((1803, 1836), 'numpy.array', 'np.array', (['gabor_hist'], {'dtype': 'float'}), '(gabor_hist, dtype=float)\n', (1811, 1836), True, 'import numpy as np\n'), ((1925, 1948), 'numpy.sum', 'np.sum', (['(gabor_prob ** 2)'], {}), '(gabor_prob ** 2)\n', (1931, 1948), True, 'import numpy as np\n'), ((2281, 2342), 'skimage.feature.local_binary_pattern', 'local_binary_pattern', (['gs', 'no_points', 'radius'], {'method': '"""uniform"""'}), "(gs, no_points, radius, method='uniform')\n", (2301, 2342), False, 'from skimage.feature import local_binary_pattern\n'), ((2413, 2446), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['gs', '(25, 25)', '(0)'], {}), '(gs, (25, 25), 0)\n', (2429, 2446), False, 'import cv2\n'), ((2576, 2644), 'cv2.threshold', 'cv2.threshold', (['blur', '(0)', '(255)', '(cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)'], {}), '(blur, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)\n', (2589, 2644), False, 'import cv2\n'), ((2762, 2810), 'cv2.Sobel', 'cv2.Sobel', (['im_bw_otsu', 'cv2.CV_64F', '(1)', '(0)'], {'ksize': '(5)'}), '(im_bw_otsu, cv2.CV_64F, 1, 0, ksize=5)\n', (2771, 2810), False, 'import cv2\n'), ((2831, 2853), 'numpy.absolute', 'np.absolute', (['sobelx64f'], {}), '(sobelx64f)\n', (2842, 2853), True, 'import numpy as np\n'), ((2874, 2896), 'numpy.uint8', 'np.uint8', (['abs_sobel64f'], {}), '(abs_sobel64f)\n', (2882, 2896), True, 'import numpy as np\n'), ((2987, 3037), 'cv2.threshold', 'cv2.threshold', (['sobel_8u', '(1)', '(255)', 'cv2.THRESH_BINARY'], {}), '(sobel_8u, 1, 255, cv2.THRESH_BINARY)\n', (3000, 3037), False, 'import cv2\n'), ((3101, 3128), 'numpy.ones', 'np.ones', (['(15, 15)', 'np.uint8'], {}), '((15, 15), np.uint8)\n', (3108, 3128), True, 'import numpy as np\n'), ((3151, 3210), 'cv2.morphologyEx', 'cv2.morphologyEx', (['im_bw_sobel', 'cv2.MORPH_CLOSE', 'kernel_edge'], {}), '(im_bw_sobel, cv2.MORPH_CLOSE, kernel_edge)\n', (3167, 3210), False, 'import cv2\n'), ((3373, 3403), 'cv2.threshold', 'cv2.threshold', (['gs', '(127)', '(255)', '(0)'], {}), '(gs, 127, 255, 0)\n', (3386, 3403), False, 'import cv2\n'), ((3435, 3503), 'cv2.findContours', 'cv2.findContours', (['im_bw_otsu', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(im_bw_otsu, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (3451, 3503), False, 'import cv2\n'), ((3596, 3647), 'cv2.drawContours', 'cv2.drawContours', (['gs', 'contours', '(-1)', '(0, 255, 0)', '(10)'], {}), '(gs, contours, -1, (0, 255, 0), 10)\n', (3612, 3647), False, 'import cv2\n'), ((3744, 3760), 'cv2.moments', 'cv2.moments', (['cnt'], {}), '(cnt)\n', (3755, 3760), False, 'import cv2\n'), ((3810, 3830), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (3825, 3830), False, 'import cv2\n'), ((3887, 3911), 'cv2.arcLength', 'cv2.arcLength', (['cnt', '(True)'], {}), '(cnt, True)\n', (3900, 3911), False, 'import cv2\n'), ((3968, 3988), 'cv2.minAreaRect', 'cv2.minAreaRect', (['cnt'], {}), '(cnt)\n', (3983, 3988), False, 'import cv2\n'), ((4004, 4023), 'cv2.boxPoints', 'cv2.boxPoints', (['rect'], {}), '(rect)\n', (4017, 4023), False, 'import cv2\n'), ((4039, 4051), 'numpy.int0', 'np.int0', (['box'], {}), '(box)\n', (4046, 4051), True, 'import numpy as np\n'), ((4075, 4133), 'cv2.drawContours', 'cv2.drawContours', (['im_bw_otsu', '[box]', '(0)', '(255, 255, 255)', '(2)'], {}), '(im_bw_otsu, [box], 0, (255, 255, 255), 2)\n', (4091, 4133), False, 'import cv2\n'), ((4315, 4336), 'cv2.boundingRect', 'cv2.boundingRect', (['cnt'], {}), '(cnt)\n', (4331, 4336), False, 'import cv2\n'), ((4591, 4610), 'cv2.convexHull', 'cv2.convexHull', (['cnt'], {}), '(cnt)\n', (4605, 4610), False, 'import cv2\n'), ((4632, 4653), 'cv2.contourArea', 'cv2.contourArea', (['hull'], {}), '(hull)\n', (4647, 4653), False, 'import cv2\n'), ((5180, 5205), 'numpy.sqrt', 'np.sqrt', (['(4 * area / np.pi)'], {}), '(4 * area / np.pi)\n', (5187, 5205), True, 'import numpy as np\n'), ((5693, 5714), 'numpy.mean', 'np.mean', (['blue_channel'], {}), '(blue_channel)\n', (5700, 5714), True, 'import numpy as np\n'), ((5728, 5748), 'numpy.mean', 'np.mean', (['red_channel'], {}), '(red_channel)\n', (5735, 5748), True, 'import numpy as np\n'), ((5762, 5784), 'numpy.mean', 'np.mean', (['green_channel'], {}), '(green_channel)\n', (5769, 5784), True, 'import numpy as np\n'), ((6041, 6061), 'numpy.mean', 'np.mean', (['red_channel'], {}), '(red_channel)\n', (6048, 6061), True, 'import numpy as np\n'), ((6125, 6147), 'numpy.mean', 'np.mean', (['green_channel'], {}), '(green_channel)\n', (6132, 6147), True, 'import numpy as np\n'), ((6212, 6233), 'numpy.mean', 'np.mean', (['blue_channel'], {}), '(blue_channel)\n', (6219, 6233), True, 'import numpy as np\n'), ((6296, 6315), 'numpy.std', 'np.std', (['red_channel'], {}), '(red_channel)\n', (6302, 6315), True, 'import numpy as np\n'), ((6375, 6395), 'numpy.std', 'np.std', (['blue_channel'], {}), '(blue_channel)\n', (6381, 6395), True, 'import numpy as np\n'), ((6417, 6438), 'numpy.std', 'np.std', (['green_channel'], {}), '(green_channel)\n', (6423, 6438), True, 'import numpy as np\n'), ((6506, 6530), 'mahotas.features.haralick', 'mt.features.haralick', (['gs'], {}), '(gs)\n', (6526, 6530), True, 'import mahotas as mt\n'), ((1881, 1899), 'numpy.sum', 'np.sum', (['gabor_hist'], {}), '(gabor_hist)\n', (1887, 1899), True, 'import numpy as np\n'), ((2006, 2025), 'numpy.log2', 'np.log2', (['gabor_prob'], {}), '(gabor_prob)\n', (2013, 2025), True, 'import numpy as np\n')] |
from __future__ import annotations
from typing import Any
import numpy as np
AR_i8: np.ndarray[Any, np.dtype[np.int_]] = np.arange(10)
ar_iter = np.lib.Arrayterator(AR_i8)
ar_iter.var
ar_iter.buf_size
ar_iter.start
ar_iter.stop
ar_iter.step
ar_iter.shape
ar_iter.flat
ar_iter.__array__()
for i in ar_iter:
pass
ar_iter[0]
ar_iter[...]
ar_iter[:]
ar_iter[0, 0, 0]
ar_iter[..., 0, :]
| [
"numpy.lib.Arrayterator",
"numpy.arange"
] | [((124, 137), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (133, 137), True, 'import numpy as np\n'), ((148, 174), 'numpy.lib.Arrayterator', 'np.lib.Arrayterator', (['AR_i8'], {}), '(AR_i8)\n', (167, 174), True, 'import numpy as np\n')] |
import discord
from discord.ext import commands
class kick(commands.Cog):
def __init__(self, client):
self.client = client
#kick command
@commands.command()
@commands.has_permissions(kick_members=True)
async def kick(self, ctx, member: discord.Member, *, reason=None):
await member.kick(reason=reason)
await ctx.send(f'User {member} has kicked.')
def setup(client):
client.add_cog(kick(client))
| [
"discord.ext.commands.has_permissions",
"discord.ext.commands.command"
] | [((161, 179), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (177, 179), False, 'from discord.ext import commands\n'), ((185, 228), 'discord.ext.commands.has_permissions', 'commands.has_permissions', ([], {'kick_members': '(True)'}), '(kick_members=True)\n', (209, 228), False, 'from discord.ext import commands\n')] |
#!/usr/bin/python
# Copyright (c) 2016-2017, Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import random
#from ansible.module_utils.basic import *
id_dict = {
'common':'/sys/class/net/%s/device/{}',
'vendor_id':'vendor',
'device_id':'device',
'virt_id':'virtfn0/device',}
def enable_vf(sriov_intf, num_vfs):
"""Enable a number of virtual_functions on network interface"""
with open(id_dict['common'].format('sriov_numvfs') %sriov_intf, 'w') as fp:
fp.write(str(num_vfs))
def get_interface_id(sriov_intf, get_id):
"""Find and return: Vendor id, device id, virtual function id"""
with open(id_dict['common'].format(id_dict[get_id]) %sriov_intf) as fp:
return_id = (fp.read().lstrip('0x').rstrip())
return return_id
def set_vf_address(module, sriov_intf, num_vfs):
"""Set mac address for VF"""
for vf_num in xrange(num_vfs):
set_command = "ip link set {} vf {} mac {}".format(sriov_intf, vf_num, spawn_mac())
module.run_command(set_command)
def spawn_mac():
"""Generate mac address"""
mac = [ 0x52, 0x54, 0x00,
random.randint(0x00, 0x7f),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff) ]
return ':'.join(map(lambda x: "%02x" % x, mac))
def main():
"""Enable SR-IOV on interface, create VFs and set mac address for each. Return vendor/device id """
module = AnsibleModule(
argument_spec={
'sriov_intf': {'required': True, 'type': 'str'},
'num_vfs': {'required': True, 'type': 'int'}
}
)
#Get parameters from ansible.
params = module.params
sriov_intf = params['sriov_intf']
num_vfs = int(params['num_vfs'])
enable_vf(sriov_intf, num_vfs)
set_vf_address(module, sriov_intf, num_vfs)
sriov_vendor_id = get_interface_id(sriov_intf, 'vendor_id')
sriov_device_id = get_interface_id(sriov_intf, 'device_id')
sriov_virt_id = get_interface_id(sriov_intf, 'virt_id')
ansible_facts = {
"sriov_vendor_id": sriov_vendor_id,
"sriov_device_id": sriov_device_id,
"sriov_virt_id": sriov_virt_id,
}
module.exit_json(Changed=True, ansible_facts=ansible_facts)
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
if __name__ == '__main__':
main()
| [
"random.randint"
] | [((2605, 2627), 'random.randint', 'random.randint', (['(0)', '(127)'], {}), '(0, 127)\n', (2619, 2627), False, 'import random\n'), ((2641, 2663), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (2655, 2663), False, 'import random\n'), ((2677, 2699), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (2691, 2699), False, 'import random\n')] |
#coding: utf-8
#python 2 only!
from __future__ import unicode_literals, absolute_import
from django.utils.encoding import python_2_unicode_compatible
####################################################################################
from django.utils.translation import ugettext_lazy as _ #pootle do prowadzenia tlumaczen
from django.core.urlresolvers import reverse_lazy # zamienia nazwe widoku podając ewentuale parametry na konkretna sciezke.
from django.db import models
# Create your models here.
@python_2_unicode_compatible
class Author(models.Model):
first_name = models.CharField(_("first_name"),max_length=20)
last_name = models.CharField(_("last_name"), max_length=50)
def __str__(self):
return _("{first_name} {last_name}").format(first_name=self.first_name, last_name=self.last_name,)
class Meta:
ordering = ('last_name', 'first_name') #w jaki sposob maja byc sortowane dane domyslnie w bazie
verbose_name = _('author') #Raczej dopelniacz niz mianownik
verbose_name_plural = _('authors')
@python_2_unicode_compatible
class Publisher(models.Model):
name = models.CharField(max_length=70)
def __str__(self):
return self.name
@python_2_unicode_compatible
class BookCategory(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Book(models.Model):
'''
Cos w rodzaju rekopisu.
'''
title = models.CharField(max_length=100)
authors = models.ManyToManyField(Author)
categories = models.ManyToManyField(BookCategory)
#author = models.ForeignKey(Author)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse_lazy('shelf:book-detail', kwargs={'pk': self.id})
@python_2_unicode_compatible
class BookEdition(models.Model):
'''
Wydanie okreslonej ksiazki
'''
book = models.ForeignKey(Book, related_name='editions')
publisher = models.ForeignKey(Publisher)
date = models.DateField()
isbn = models.CharField(max_length=17, blank=True)
def __str__(self):
return "{book.title}, {publisher.name}".format(book=self.book, publisher=self.publisher)
COVER_TYPES = (
('soft', 'Soft'),
('hard', 'Hard')
#(wartosc_w_bazie, wartosc_wyswietlania)
)
@python_2_unicode_compatible
class BookItem(models.Model):
'''
Konkretny egzemplarz
'''
edition = models.ForeignKey(BookEdition)
catalogue_number = models.CharField(max_length=30)
cover_type = models.CharField(max_length=4, choices=COVER_TYPES)
def __str__(self):
return "{edition}, {cover}".format(edition=self.edition,
cover=self.get_cover_type_display())
| [
"django.utils.translation.ugettext_lazy",
"django.db.models.DateField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.core.urlresolvers.reverse_lazy",
"django.db.models.CharField"
] | [((1141, 1172), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(70)'}), '(max_length=70)\n', (1157, 1172), False, 'from django.db import models\n'), ((1296, 1327), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (1312, 1327), False, 'from django.db import models\n'), ((1489, 1521), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1505, 1521), False, 'from django.db import models\n'), ((1536, 1566), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Author'], {}), '(Author)\n', (1558, 1566), False, 'from django.db import models\n'), ((1584, 1620), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['BookCategory'], {}), '(BookCategory)\n', (1606, 1620), False, 'from django.db import models\n'), ((1938, 1986), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Book'], {'related_name': '"""editions"""'}), "(Book, related_name='editions')\n", (1955, 1986), False, 'from django.db import models\n'), ((2003, 2031), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Publisher'], {}), '(Publisher)\n', (2020, 2031), False, 'from django.db import models\n'), ((2043, 2061), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (2059, 2061), False, 'from django.db import models\n'), ((2073, 2116), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(17)', 'blank': '(True)'}), '(max_length=17, blank=True)\n', (2089, 2116), False, 'from django.db import models\n'), ((2456, 2486), 'django.db.models.ForeignKey', 'models.ForeignKey', (['BookEdition'], {}), '(BookEdition)\n', (2473, 2486), False, 'from django.db import models\n'), ((2510, 2541), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (2526, 2541), False, 'from django.db import models\n'), ((2559, 2610), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(4)', 'choices': 'COVER_TYPES'}), '(max_length=4, choices=COVER_TYPES)\n', (2575, 2610), False, 'from django.db import models\n'), ((597, 612), 'django.utils.translation.ugettext_lazy', '_', (['"""first_name"""'], {}), "('first_name')\n", (598, 612), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((661, 675), 'django.utils.translation.ugettext_lazy', '_', (['"""last_name"""'], {}), "('last_name')\n", (662, 675), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((968, 979), 'django.utils.translation.ugettext_lazy', '_', (['"""author"""'], {}), "('author')\n", (969, 979), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1056, 1068), 'django.utils.translation.ugettext_lazy', '_', (['"""authors"""'], {}), "('authors')\n", (1057, 1068), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1759, 1816), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""shelf:book-detail"""'], {'kwargs': "{'pk': self.id}"}), "('shelf:book-detail', kwargs={'pk': self.id})\n", (1771, 1816), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((731, 760), 'django.utils.translation.ugettext_lazy', '_', (['"""{first_name} {last_name}"""'], {}), "('{first_name} {last_name}')\n", (732, 760), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
import cv2
# ---------------------原始图像o1的边缘-------------------------
o1 = cv2.imread("cs1.bmp")
cv2.imshow("original1", o1)
gray1 = cv2.cvtColor(o1, cv2.COLOR_BGR2GRAY)
ret, binary1 = cv2.threshold(gray1, 127, 255, cv2.THRESH_BINARY)
contours1, hierarchy = cv2.findContours(binary1, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
cnt1 = contours1[0]
# ---------------------原始图像o2的边缘--------------------------
o2 = cv2.imread("cs3.bmp")
cv2.imshow("original2", o2)
gray2 = cv2.cvtColor(o2, cv2.COLOR_BGR2GRAY)
ret, binary2 = cv2.threshold(gray2, 127, 255, cv2.THRESH_BINARY)
contours2, hierarchy = cv2.findContours(binary2, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
cnt2 = contours2[0]
# ---------------------原始图像o3的边缘--------------------------
o3 = cv2.imread("hand.bmp")
cv2.imshow("original3", o3)
gray3 = cv2.cvtColor(o3, cv2.COLOR_BGR2GRAY)
ret, binary3 = cv2.threshold(gray3, 127, 255, cv2.THRESH_BINARY)
contours3, hierarchy = cv2.findContours(binary3, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
cnt3 = contours3[0]
# ---------------------构造距离提取算子--------------------------
sd = cv2.createShapeContextDistanceExtractor()
print(sd)
print("===========")
print(cnt1)
print("===========")
print(cnt2)
print("===========")
print(cnt3)
# ---------------------计算距离--------------------------
d1 = sd.computeDistance(cnt1, cnt1)
print("与自身的距离d1=", d1)
d2 = sd.computeDistance(cnt2, cnt2)
print("与旋转缩放后的自身图像的距离d2=", d2)
d3 = sd.computeDistance(cnt1, cnt3)
print("与不相似对象的距离d3=", d3)
# ----------------显示--------------------
cv2.waitKey()
cv2.destroyAllWindows()
| [
"cv2.createShapeContextDistanceExtractor",
"cv2.threshold",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.cvtColor",
"cv2.findContours",
"cv2.imread"
] | [((75, 96), 'cv2.imread', 'cv2.imread', (['"""cs1.bmp"""'], {}), "('cs1.bmp')\n", (85, 96), False, 'import cv2\n'), ((97, 124), 'cv2.imshow', 'cv2.imshow', (['"""original1"""', 'o1'], {}), "('original1', o1)\n", (107, 124), False, 'import cv2\n'), ((133, 169), 'cv2.cvtColor', 'cv2.cvtColor', (['o1', 'cv2.COLOR_BGR2GRAY'], {}), '(o1, cv2.COLOR_BGR2GRAY)\n', (145, 169), False, 'import cv2\n'), ((185, 234), 'cv2.threshold', 'cv2.threshold', (['gray1', '(127)', '(255)', 'cv2.THRESH_BINARY'], {}), '(gray1, 127, 255, cv2.THRESH_BINARY)\n', (198, 234), False, 'import cv2\n'), ((258, 323), 'cv2.findContours', 'cv2.findContours', (['binary1', 'cv2.RETR_LIST', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(binary1, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)\n', (274, 323), False, 'import cv2\n'), ((409, 430), 'cv2.imread', 'cv2.imread', (['"""cs3.bmp"""'], {}), "('cs3.bmp')\n", (419, 430), False, 'import cv2\n'), ((431, 458), 'cv2.imshow', 'cv2.imshow', (['"""original2"""', 'o2'], {}), "('original2', o2)\n", (441, 458), False, 'import cv2\n'), ((467, 503), 'cv2.cvtColor', 'cv2.cvtColor', (['o2', 'cv2.COLOR_BGR2GRAY'], {}), '(o2, cv2.COLOR_BGR2GRAY)\n', (479, 503), False, 'import cv2\n'), ((519, 568), 'cv2.threshold', 'cv2.threshold', (['gray2', '(127)', '(255)', 'cv2.THRESH_BINARY'], {}), '(gray2, 127, 255, cv2.THRESH_BINARY)\n', (532, 568), False, 'import cv2\n'), ((592, 657), 'cv2.findContours', 'cv2.findContours', (['binary2', 'cv2.RETR_LIST', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(binary2, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)\n', (608, 657), False, 'import cv2\n'), ((743, 765), 'cv2.imread', 'cv2.imread', (['"""hand.bmp"""'], {}), "('hand.bmp')\n", (753, 765), False, 'import cv2\n'), ((766, 793), 'cv2.imshow', 'cv2.imshow', (['"""original3"""', 'o3'], {}), "('original3', o3)\n", (776, 793), False, 'import cv2\n'), ((802, 838), 'cv2.cvtColor', 'cv2.cvtColor', (['o3', 'cv2.COLOR_BGR2GRAY'], {}), '(o3, cv2.COLOR_BGR2GRAY)\n', (814, 838), False, 'import cv2\n'), ((854, 903), 'cv2.threshold', 'cv2.threshold', (['gray3', '(127)', '(255)', 'cv2.THRESH_BINARY'], {}), '(gray3, 127, 255, cv2.THRESH_BINARY)\n', (867, 903), False, 'import cv2\n'), ((927, 992), 'cv2.findContours', 'cv2.findContours', (['binary3', 'cv2.RETR_LIST', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(binary3, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)\n', (943, 992), False, 'import cv2\n'), ((1077, 1118), 'cv2.createShapeContextDistanceExtractor', 'cv2.createShapeContextDistanceExtractor', ([], {}), '()\n', (1116, 1118), False, 'import cv2\n'), ((1511, 1524), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (1522, 1524), False, 'import cv2\n'), ((1525, 1548), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1546, 1548), False, 'import cv2\n')] |
#-----------------------------------------------------------
# create-palette.py
#-----------------------------------------------------------
import StringIO
NL = '\015\012'
FILE_NAME = 'palette.dat'
#-----------------------------------------------------------
def Color8to16(color8):
r = (color8/0x04)/0x08
g = (color8/0x04)%0x08
b = color8%0x04
r = (r*0x20)/0x08
g = (g*0x40)/0x08
b = (b*0x20)/0x04
return b | (g << 5) | (r << 11)
#-----------------------------------------------------------
def CreatePalette():
output = StringIO.StringIO()
for i in xrange(0, 256):
color = Color8to16(i)
if i%10 == 0:
output.write(' ')
output.write(' 0x%.4X' % color)
if i != 255:
output.write(',')
if i%10 == 9:
output.write(NL)
output.write(NL)
output_file_name = FILE_NAME
fh = open(output_file_name, 'wb')
fh.write(output.getvalue())
fh.close()
output.close()
CreatePalette()
| [
"StringIO.StringIO"
] | [((561, 580), 'StringIO.StringIO', 'StringIO.StringIO', ([], {}), '()\n', (578, 580), False, 'import StringIO\n')] |
"""Tests."""
import logging
import sys
import pytest
from _pytest.capture import CaptureFixture
from _pytest.monkeypatch import MonkeyPatch
from boilerplatepython.logging import LogFormatter
from .utils import __file__ as utils_filename, generate_log_statements
def _init_logger(name: str, formatter: logging.Formatter) -> logging.Logger:
"""Create a logger for tests."""
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
return logger
@pytest.mark.parametrize(
"force_wide,terminal_width",
[
(False, 160),
(True, 80),
(True, 160),
],
)
@pytest.mark.usefixtures("freeze_time")
def test_wide(capsys: CaptureFixture, monkeypatch: MonkeyPatch, logger_name: str, force_wide: bool, terminal_width: int):
"""Test.
:param capsys: pytest fixture.
:param monkeypatch: pytest fixture.
:param logger_name: conftest fixture.
:param force_wide: Don't automatically use narrow formatting.
:param terminal_width: Mock terminal width.
"""
monkeypatch.setattr("boilerplatepython.logging.get_terminal_size", lambda: type("", (), {"columns": terminal_width}))
log = _init_logger(logger_name, LogFormatter(force_wide=force_wide, traceback=False))
generate_log_statements(log)
output = capsys.readouterr()[0].splitlines()
expected = [
"2019-12-19T21:18:05.415 [DEBUG ] generate_log_statements:15: Some debug statements: var",
"2019-12-19T21:18:05.415 [INFO ] generate_log_statements:16: An info statement.",
"2019-12-19T21:18:05.415 [WARNING ] do_log:9: This is a warning statement: 123",
"2019-12-19T21:18:05.415 [ERROR ] do_log:10: An error has occurred.",
"2019-12-19T21:18:05.415 [CRITICAL] generate_log_statements:18: Critical failure: ERR",
"2019-12-19T21:18:05.415 [ERROR ] generate_log_statements:23: Here be an exception.",
]
assert output == expected
@pytest.mark.parametrize("traceback", [True, False])
@pytest.mark.usefixtures("freeze_time")
def test_traceback(capsys: CaptureFixture, logger_name: str, traceback: bool):
"""Test.
:param capsys: pytest fixture.
:param logger_name: conftest fixture.
:param traceback: Enable printing traceback.
"""
log = _init_logger(logger_name, LogFormatter(traceback=traceback))
generate_log_statements(log, emit_warnings=False)
output = capsys.readouterr()[0].splitlines()
expected = [
"19T21:18:05.415 DBUG: Some debug statements: var",
"19T21:18:05.415 INFO: An info statement.",
"19T21:18:05.415 WARN: This is a warning statement: 123",
"19T21:18:05.415 ERRO: An error has occurred.",
"19T21:18:05.415 CRIT: Critical failure: ERR",
"19T21:18:05.415 ERRO: Here be an exception.",
]
if not traceback:
assert output == expected
return
expected += [
"Traceback (most recent call last):",
f' File "{utils_filename}", line 21, in generate_log_statements',
' raise RuntimeError("An exception has occurred.")',
"RuntimeError: An exception has occurred.",
]
assert output == expected
@pytest.mark.usefixtures("freeze_time")
def test_colors(capsys: CaptureFixture, logger_name: str):
"""Test.
:param capsys: pytest fixture.
:param logger_name: conftest fixture.
"""
log = _init_logger(logger_name, LogFormatter(colors=True, traceback=True))
generate_log_statements(log, emit_warnings=False)
output = capsys.readouterr()[0].splitlines()
assert "19T21:18:05.415 \033[91mERRO\033[0m: An error has occurred." in output
assert "Traceback \033[1;36m(most recent call last)\033[0m:" in output
| [
"logging.getLogger",
"logging.StreamHandler",
"pytest.mark.parametrize",
"pytest.mark.usefixtures",
"boilerplatepython.logging.LogFormatter"
] | [((624, 722), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""force_wide,terminal_width"""', '[(False, 160), (True, 80), (True, 160)]'], {}), "('force_wide,terminal_width', [(False, 160), (True, \n 80), (True, 160)])\n", (647, 722), False, 'import pytest\n'), ((761, 799), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""freeze_time"""'], {}), "('freeze_time')\n", (784, 799), False, 'import pytest\n'), ((2081, 2132), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""traceback"""', '[True, False]'], {}), "('traceback', [True, False])\n", (2104, 2132), False, 'import pytest\n'), ((2134, 2172), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""freeze_time"""'], {}), "('freeze_time')\n", (2157, 2172), False, 'import pytest\n'), ((3308, 3346), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""freeze_time"""'], {}), "('freeze_time')\n", (3331, 3346), False, 'import pytest\n'), ((393, 416), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (410, 416), False, 'import logging\n'), ((466, 499), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (487, 499), False, 'import logging\n'), ((1333, 1385), 'boilerplatepython.logging.LogFormatter', 'LogFormatter', ([], {'force_wide': 'force_wide', 'traceback': '(False)'}), '(force_wide=force_wide, traceback=False)\n', (1345, 1385), False, 'from boilerplatepython.logging import LogFormatter\n'), ((2436, 2469), 'boilerplatepython.logging.LogFormatter', 'LogFormatter', ([], {'traceback': 'traceback'}), '(traceback=traceback)\n', (2448, 2469), False, 'from boilerplatepython.logging import LogFormatter\n'), ((3541, 3582), 'boilerplatepython.logging.LogFormatter', 'LogFormatter', ([], {'colors': '(True)', 'traceback': '(True)'}), '(colors=True, traceback=True)\n', (3553, 3582), False, 'from boilerplatepython.logging import LogFormatter\n')] |
#!/usr/bin/env python
"""
# Get distance matrices from Google Maps API for development
"""
import sys
import os
import asyncio
from typing import List
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from core.distancematrix.google_distance_matrix import (
get_distancematrix_from_google,
parse_distancematrix_response,
)
from core.helpers.read_write import save_dict_to_json
async def process_distancematrix_response(
origins: List[str], dir_for_data: str
) -> bool:
dm_response: dict = await get_distancematrix_from_google(origins)
dm_json: str = os.path.join(dir_for_data, "distance_matrix.json")
raw_response = await save_dict_to_json(dm_response, dm_json)
parsed_dm: dict = await parse_distancematrix_response(dm_response)
json_file: str = os.path.join(dir_for_data, "parsed_distance_matrix.json")
parsed_response = await save_dict_to_json(parsed_dm, json_file)
return raw_response and parsed_response
async def create_distancematrix_for_dev(origins: List[str]) -> bool:
current_dir: str = os.path.dirname(os.path.abspath(__file__))
dir_for_data: str = os.path.join(current_dir, "data")
return await process_distancematrix_response(origins, dir_for_data)
if __name__ == "__main__":
origins_for_dev = [
"Las Vegas McCarran International Airport, NV",
"Los Angeles International Airport, CA",
"Death Valley Furnace Creek Visitor Center, Furnace Creek, CA",
"Mojave Kelso Depot Visitor Center, CA",
"Joshua Tree National Park Visitor Center, Park Boulevard, Joshua Tree, CA",
"Sequoia National Park - Visitor Center, Generals Highway, Three Rivers, CA",
"Zion National Park Visitor Center, Zion – Mount Carmel Highway, Hurricane, UT",
"Bryce Canyon National Park Visitor Center, Utah 63, Bryce Canyon City, UT",
"Grand Canyon North Rim Visitor Center, AZ-67, North Rim, AZ 86023",
"Grand Canyon Visitor Center, South Entrance Road, Grand Canyon Village, AZ",
]
asyncio.run(create_distancematrix_for_dev(origins_for_dev))
| [
"core.distancematrix.google_distance_matrix.parse_distancematrix_response",
"os.path.join",
"os.path.dirname",
"os.path.abspath",
"core.helpers.read_write.save_dict_to_json",
"core.distancematrix.google_distance_matrix.get_distancematrix_from_google"
] | [((610, 660), 'os.path.join', 'os.path.join', (['dir_for_data', '"""distance_matrix.json"""'], {}), "(dir_for_data, 'distance_matrix.json')\n", (622, 660), False, 'import os\n'), ((820, 877), 'os.path.join', 'os.path.join', (['dir_for_data', '"""parsed_distance_matrix.json"""'], {}), "(dir_for_data, 'parsed_distance_matrix.json')\n", (832, 877), False, 'import os\n'), ((1152, 1185), 'os.path.join', 'os.path.join', (['current_dir', '"""data"""'], {}), "(current_dir, 'data')\n", (1164, 1185), False, 'import os\n'), ((550, 589), 'core.distancematrix.google_distance_matrix.get_distancematrix_from_google', 'get_distancematrix_from_google', (['origins'], {}), '(origins)\n', (580, 589), False, 'from core.distancematrix.google_distance_matrix import get_distancematrix_from_google, parse_distancematrix_response\n'), ((686, 725), 'core.helpers.read_write.save_dict_to_json', 'save_dict_to_json', (['dm_response', 'dm_json'], {}), '(dm_response, dm_json)\n', (703, 725), False, 'from core.helpers.read_write import save_dict_to_json\n'), ((755, 797), 'core.distancematrix.google_distance_matrix.parse_distancematrix_response', 'parse_distancematrix_response', (['dm_response'], {}), '(dm_response)\n', (784, 797), False, 'from core.distancematrix.google_distance_matrix import get_distancematrix_from_google, parse_distancematrix_response\n'), ((906, 945), 'core.helpers.read_write.save_dict_to_json', 'save_dict_to_json', (['parsed_dm', 'json_file'], {}), '(parsed_dm, json_file)\n', (923, 945), False, 'from core.helpers.read_write import save_dict_to_json\n'), ((1101, 1126), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1116, 1126), False, 'import os\n'), ((202, 227), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (217, 227), False, 'import os\n')] |
"""
精简代码
爬虫退出登录
create by judy 2019/01/24
"""
import threading
import traceback
from datacontract import Task, ECommandStatus
from datacontract.apps.appbase import AppConfig
from idownclient.spider.spiderbase import SpiderBase
from idownclient.spidermanagent.spidermanagebase import SpiderManagebase
class SpiderLogout(SpiderManagebase):
def __init__(self):
SpiderManagebase.__init__(self)
def _get_logout_data_info(self, tsk: Task):
# 查询有效任务的cookie
res = False
sql = '''
select * from task
where taskid=? and batchid=?
'''
pars = (
tsk.parenttaskid,
tsk.parentbatchid,
)
res_info = self._sqlfunc.query_task_by_sql(sql, pars)
if len(res_info) == 0:
return res
res_one = res_info[0]
# 取出来的数据有cookie,并且目前的数据没有cookie就通过cookie赋值
if res_one.get('cookie') is not None:
self._logger.info(f"Apptype:{tsk.apptype} will logout out")
res = True
tsk.cookie = res_one.get('cookie')
return res
# 登出
def logout(self, tsk: Task):
# 同步要登出账号的cookie
syn_res = self._get_logout_data_info(tsk)
if not syn_res:
self._write_tgback(tsk, ECommandStatus.Failed, "当前账号没有在机器上登陆")
return
try:
with self._spider_threads_locker:
if self._spider_dealing_dict.__contains__(tsk):
self._logger.info("{} is processing logout task {}".format(tsk.batchid, self._spider_dealing_dict[tsk].name))
return
appcfg: AppConfig = self._spideradapter.adapter(tsk)[0]
if not isinstance(appcfg, AppConfig):
self._logger.info("No spider match:\nbatchid:{}\ntasktpe:{}\napptype:{}"
.format(tsk.batchid, tsk.tasktype.name, tsk.apptype))
return
spider: SpiderBase = appcfg._appclass(tsk, self._get_appcfg(appcfg), self._clientid)
t = threading.Thread(target=self._execute_logout, daemon=True, args=(spider,))
t.start()
with self._spider_threads_locker:
# 用元组存入 插件对象 和 线程对象
self._spider_dealing_dict[tsk] = spider
except Exception:
self._logger.log("Task logout error: {}".format(traceback.format_exc()))
self._write_tgback(tsk, ECommandStatus.Failed, "执行爬虫插件出错,请检查client环境重试")
return
def _execute_logout(self, spider: SpiderBase):
try:
res = spider.logout()
if res:
self._logger.info("Logout task complete, this mission is over")
except Exception:
self._logger.error("Execute task error:\nbatchid:{}\nerror:{}"
.format(spider.task.batchid, traceback.format_exc()))
self._write_tgback(spider.task, ECommandStatus.Failed, "执行任务出现不可知错误")
finally:
with self._spider_threads_locker:
if self._spider_dealing_dict.__contains__(spider.task):
self._spider_dealing_dict.pop(spider.task, None)
if spider.task is not None:
if callable(spider.task.on_complete):
spider.task.on_complete(spider.task)
| [
"traceback.format_exc",
"threading.Thread",
"idownclient.spidermanagent.spidermanagebase.SpiderManagebase.__init__"
] | [((374, 405), 'idownclient.spidermanagent.spidermanagebase.SpiderManagebase.__init__', 'SpiderManagebase.__init__', (['self'], {}), '(self)\n', (399, 405), False, 'from idownclient.spidermanagent.spidermanagebase import SpiderManagebase\n'), ((2032, 2106), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._execute_logout', 'daemon': '(True)', 'args': '(spider,)'}), '(target=self._execute_logout, daemon=True, args=(spider,))\n', (2048, 2106), False, 'import threading\n'), ((2354, 2376), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2374, 2376), False, 'import traceback\n'), ((2839, 2861), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2859, 2861), False, 'import traceback\n')] |
import discord
from discord.ext import commands
from discord.utils import get
class c211(commands.Cog, name="c211"):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.command(name='Scorn_Operative_Turncoat', aliases=['c211','Scorn_Operative_17'])
async def example_embed(self, ctx):
embed = discord.Embed(title='Scorn Operative - Turncoat',
color=0x00008B)
embed.set_thumbnail(url='https://www.duelingbook.com/images/custom-pics/2300000/2348936.jpg')
embed.add_field(name='Status (Archetype)', value='Casual:3/Tournament:3 (Scorn Operative)', inline=True)
embed.add_field(name='Type (Attribute)', value='Cyberse/Link/Effect (DARK)', inline=False)
embed.add_field(name='Link Rating (ATK/Link Arrows)', value='1 (1500/⬇️)', inline=False)
embed.add_field(name='Monster Effect', value='1 "Scorn Operative" monster with exactly 2000 ATK\nQuick Effect): You can return 1 "Scorn Operative" non-Effect Monster you control to the hand or Extra Deck; reduce the ATK of 1 monster on the field by 1500, and if you do, increase this card\'s ATK by 1000 until the End Phase.', inline=False)
embed.set_footer(text='Set Code: GMMP')
await ctx.send(embed=embed)
def setup(bot: commands.Bot):
bot.add_cog(c211(bot)) | [
"discord.Embed",
"discord.ext.commands.command"
] | [((190, 283), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""Scorn_Operative_Turncoat"""', 'aliases': "['c211', 'Scorn_Operative_17']"}), "(name='Scorn_Operative_Turncoat', aliases=['c211',\n 'Scorn_Operative_17'])\n", (206, 283), False, 'from discord.ext import commands\n'), ((335, 395), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Scorn Operative - Turncoat"""', 'color': '(139)'}), "(title='Scorn Operative - Turncoat', color=139)\n", (348, 395), False, 'import discord\n')] |
#!/usr/bin/env python
from os.path import join
from itertools import permutations
from random import shuffle
import pickle
import argparse
import uuid
def create_examples(multitext_ex, n_pairs):
# shuffle the languages in the example, yield n_pairs permutations
languages = [k for k in multitext_ex if k != "uid"]
assert n_pairs <= len(languages) * (len(languages) - 1)
shuffle(languages)
all_pairs = permutations(languages, 2)
for i in range(n_pairs):
src, tgt = next(all_pairs)
ret = {"uid": str(uuid.uuid4()) + "-" + multitext_ex["uid"]}
ret["sourceLanguage"] = src
ret["targetLanguage"] = tgt
ret["sourceText"] = multitext_ex[src]
yield ret
def main():
parser = argparse.ArgumentParser()
parser.add_argument("flores_path")
parser.add_argument("out_path")
parser.add_argument("--n_examples", type=int, default=100)
parser.add_argument("--pairs_per", type=int, default=2)
opt = parser.parse_args()
languages = ["eng", "ind", "jav", "msa", "tam", "tgl"]
multitext = [{"uid": str(i)} for i in range(opt.n_examples)]
for language in languages:
with open(join(opt.flores_path, language + ".dev")) as f:
for i in range(opt.n_examples):
lang_ex = f.readline().strip()
multitext[i][language] = lang_ex
output = []
for example in multitext:
for ex in create_examples(example, opt.pairs_per):
output.append(ex)
with open(opt.out_path, "wb") as f:
pickle.dump(output, f)
if __name__ == "__main__":
main()
| [
"pickle.dump",
"random.shuffle",
"argparse.ArgumentParser",
"os.path.join",
"uuid.uuid4",
"itertools.permutations"
] | [((389, 407), 'random.shuffle', 'shuffle', (['languages'], {}), '(languages)\n', (396, 407), False, 'from random import shuffle\n'), ((424, 450), 'itertools.permutations', 'permutations', (['languages', '(2)'], {}), '(languages, 2)\n', (436, 450), False, 'from itertools import permutations\n'), ((747, 772), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (770, 772), False, 'import argparse\n'), ((1548, 1570), 'pickle.dump', 'pickle.dump', (['output', 'f'], {}), '(output, f)\n', (1559, 1570), False, 'import pickle\n'), ((1176, 1216), 'os.path.join', 'join', (['opt.flores_path', "(language + '.dev')"], {}), "(opt.flores_path, language + '.dev')\n", (1180, 1216), False, 'from os.path import join\n'), ((541, 553), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (551, 553), False, 'import uuid\n')] |
import os
import sys
import socket
import asyncio
import time
import types
import threading
from functools import partial
from collections import OrderedDict
import signal as signal
from typing import Any, Callable, Union
try:
import uvloop
uvloop.install()
except ImportError:
uvloop = None
import tornado.web
from tornado.ioloop import IOLoop
import tornado.options
from tornado.log import enable_pretty_logging
from .defines import CommandLine
from .router import app, Router
from tweb.utils import daemon
from tweb.utils.attr_util import AttrDict
from tweb.utils.signal import SignalHandler
from tweb.utils import strings
from tweb.utils.environment import env
from tweb.utils.settings import default_settings, TronadoStdout,\
DEF_COOKIE_SECRET
class Application(tornado.web.Application):
def __init__(self,
handlers=None,
default_host=None,
transforms=None,
**settings):
super().__init__(handlers=handlers,
default_host=default_host,
transforms=transforms,
**settings)
def init_with_loop(self,
loop: asyncio.BaseEventLoop,
tasks: list = None) -> None:
if tasks and isinstance(tasks, list):
loop.run_until_complete(asyncio.wait(tasks))
class HttpServer:
'''
Tornado web server class.
'''
def __init__(self, router: Router = None,
ssl_options: Any = None, addresss: str = "",
options: AttrDict = None) -> None:
env.setenv('CREATE_CONFIG', True)
self.router = router
self.ssl_options = ssl_options
self.address = addresss
self.application: Application = None
self._conf_handlers = {}
self._port = None
self._conf_locale = False
self.logger = None
self.options = None
self.conf = None
# e.g: val = [conn.closed,(True,),{...}]
self._atexit_callbacks = OrderedDict()
self._init_options(options)
self._init_config()
def _init_options(self, options: AttrDict = None):
if not options:
self.options = CommandLine().parse_args()
else:
self.options = options
for k, v in options.items():
if TronadoStdout.has_opt(k):
TronadoStdout.set(k, v)
enable_pretty_logging(AttrDict(TronadoStdout.getall()))
def _init_config(self):
from .config import conf
self.conf = conf
def _check_daemon(self):
return self.options.daemon
def _get_pid_path(self) -> str:
if self.options.pid and not os.path.exists(self.options.pid):
base_dir = os.path.dirname(self.options.pid)
if os.access(base_dir, os.W_OK):
return self.options.pid
return os.path.join(strings.get_root_path(), 'server.pid')
@staticmethod
def _log_func(logger, handler: tornado.web.RequestHandler) -> None:
if handler.get_status() < 400:
log_method = logger.info
elif handler.get_status() < 500:
log_method = logger.warning
else:
log_method = logger.error
req_time = 1000.0 * handler.request.request_time()
log_method('%d %s %.2fms', handler.get_status(),
handler._request_summary(), req_time)
@staticmethod
def check_port(port: int, addr: str = '0.0.0.0', timeout: int = 1) -> bool:
'''
check port status
:param port: `<int>`
:param addr: `<str>` default 'localhost'
:return: True -> used, False -> not used
'''
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(timeout)
code = sock.connect_ex((addr, port))
sock.close()
return True if code == 0 else False
def configure_port(self) -> None:
if not self.options.port or self.options.port <= 0:
self._port = self.conf.get_int_option('setting', 'port', 8888)
elif self.options.port > 65535:
self._port = 8888
else:
self._port = self.options.port
if self.check_port(self._port) and self.options.signal is None:
self.logger.error(
f'Server is running in http://localhost:{self._port}')
sys.exit(1)
def get_settings(self, settings_: dict = None) -> dict:
'''Tornado settings'''
debug = self.conf.get_bool_option('setting', '_debug', False)
cookie_secret = self.conf.get_option('setting', 'cookie_secret',
DEF_COOKIE_SECRET)
data = default_settings(debug, cookie_secret=cookie_secret)
if settings_:
data.update(settings_)
if 'log_function' not in data:
data.update({'log_function': partial(self._log_func, self.logger)})
return data
def is_debug(self):
if self.options.debug is True:
return True
if self.conf.get_bool_option('setting', '_debug', False):
return True
return False
def atexit_register(self, callback: Callable,
*args: Any, **kwargs: Any) -> None:
'''
Register callback function, when system exit before execute.
e.g:
async def unregister(name):
...
def conn_close(db,ignore=False):
...
http = HttpServer()
http.atexit_register(unregister, 'name')
http.atexit_register(conn_close, db, ignore=True)
http.start()
:param callback: `<callable>`
:return:
'''
assert isinstance(callback, Callable), 'callback must be callable'
self._atexit_callbacks[time.time()] = [callback, args, kwargs]
def _atexit_call(self):
if not self._atexit_callbacks:
return
loop = asyncio.new_event_loop()
loop_thread = threading.Thread(
target=self._start_thread_loop, args=(loop,))
loop_thread.setDaemon(True)
loop_thread.start()
for _, callable_ in self._atexit_callbacks.items():
callback, args, kwargs = callable_
func_ = callback(*args, **kwargs)
if isinstance(func_, types.CoroutineType):
future = asyncio.run_coroutine_threadsafe(func_, loop)
future.result()
@staticmethod
def _start_thread_loop(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
def _atexit_signal(self, signalnum, frame):
# received signal, stop server
if signalnum != signal.SIGHUP:
self.logger.error(
f'Received system input signal: {signalnum}, closed server.')
try:
self._atexit_call()
except (Exception, RuntimeError):
pass
IOLoop.current().stop()
sys.exit(1)
else:
SignalHandler.restart()
def _signal_handler(self) -> bool:
if self.options.signal:
if self.options.signal not in SignalHandler.signals:
self.logger.error(
'Error: signal options not in [restart, stop]')
sys.exit(1)
assert self._port, 'Please configure server port'
if not self.check_port(self._port):
return False
pid_file = self._get_pid_path()
return SignalHandler.signal_handler(
pid_file, SignalHandler.signals[self.options.signal])
return False
def configure_daemon(self):
# setting daemon, command line parameter first
_pfile = self._get_pid_path()
if self._check_daemon() is False:
self.logger.info(f'Server pid [{os.getpid()}].')
daemon.write_pid(_pfile, os.getpid())
return
daemon.fork(_pfile)
self.logger.info(f'Server pid [{os.getpid()}].')
def configure_locale(self,
locale_path: str = None,
locale_name: str = 'messages') -> None:
'''Config internationalization.
:param locale_path: `<str>` locale path base dir
:param locale_name: `<str>` locale file name, default messages(.mo)
:param lang: `<str>` default en_US (e.g: en_US/zh_CN)
'''
if locale_path and os.path.exists(locale_path):
tornado.locale.set_default_locale(
self.conf.get_option('setting', 'language', 'en_US'))
tornado.locale.load_gettext_translations(locale_path, locale_name)
self._conf_locale = True
def configure_default_handler(self, handler: tornado.web.RequestHandler):
self._conf_handlers['default_handler_class'] = handler
def configure_static_handler(self, handler: tornado.web.RequestHandler):
self._conf_handlers['static_handler_class'] = handler
def configure_logger(self) -> None:
from tweb.utils.log import logger
self.logger = logger
def configure_settings(self,
settings_: dict = None,
module: str = None) -> tuple:
settings = self.get_settings(settings_)
# setting default error handler or static file handler such as: 404
if self._conf_handlers:
settings.update(self._conf_handlers)
modules = []
if self.options.debug is True:
settings['autoreload'] = True
settings['debug'] = True
else:
settings['autoreload'] = settings['debug']
modules = app.loading_handlers(name=module)
return modules, settings
def configure_http_server(self) -> None:
if not self.application:
self.logger.error('Please create application.')
sys.exit(1)
server = tornado.httpserver.HTTPServer(self.application,
xheaders=True,
ssl_options=self.ssl_options)
if self.options.proc is None:
proc = self.conf.get_int_option('setting', 'processes', default=0)
else:
proc = self.options.proc
if self.application.settings['debug'] is True or proc == 1:
server.listen(self._port, address=self.address)
else:
sockets = tornado.netutil.bind_sockets(self._port,
address=self.address)
tornado.process.fork_processes(proc)
server.add_sockets(sockets)
self.logger.info(f'Running on: http://localhost:{self._port}')
def create_application(self, settings: dict, modules: list) -> Application:
settings['server_port'] = self._port
settings['server_host'] = socket.gethostname()
settings['server_daemon'] = self._check_daemon()
settings['server_debug'] = settings['debug']
settings['server_config'] = self.options.conf
settings['server_locale'] = self.conf.get_option(
'setting', 'language', 'en_US')
settings['server_conf_locale'] = self._conf_locale
self.logger.info(f"Daemon mode: {settings['server_daemon']}")
self.logger.info(f"Debug mode: {settings['debug']}")
self.logger.info(f'Archive log: {self.logger.is_archive}')
self.application = Application(modules, **settings)
return self.application
def initialize_tasks(self, tasks: Union[list] = None) -> None:
if not tasks or not self.application:
return
_tasks = []
for obj in tasks:
argcount = obj['func'].__code__.co_argcount
if argcount > 0:
if isinstance(obj['func'], types.MethodType) and argcount == 1:
_func = obj['func']()
else:
_func = obj['func'](*obj.get('args'), **obj.get('kwargs'))
else:
_func = obj['func']()
if isinstance(_func, types.CoroutineType):
_tasks.append(_func)
self.application.init_with_loop(IOLoop.current().asyncio_loop, _tasks)
self.logger.info('Initialize tasks done.')
def start(self,
settings: dict = None,
tasks: Union[list] = None,
module: str = None) -> None:
self.configure_logger()
self.configure_port()
if self._signal_handler():
return
SignalHandler.listen(self._atexit_signal)
self.configure_daemon()
# self.configure_locale()
modules, settings_ = self.configure_settings(settings, module)
self.create_application(settings_, modules)
self.configure_http_server()
self.initialize_tasks(tasks)
IOLoop.current().start()
| [
"uvloop.install",
"sys.exit",
"tweb.utils.environment.env.setenv",
"tweb.utils.daemon.fork",
"tweb.utils.settings.TronadoStdout.has_opt",
"tweb.utils.settings.default_settings",
"os.path.exists",
"tweb.utils.signal.SignalHandler.restart",
"tweb.utils.signal.SignalHandler.listen",
"asyncio.new_even... | [((249, 265), 'uvloop.install', 'uvloop.install', ([], {}), '()\n', (263, 265), False, 'import uvloop\n'), ((1622, 1655), 'tweb.utils.environment.env.setenv', 'env.setenv', (['"""CREATE_CONFIG"""', '(True)'], {}), "('CREATE_CONFIG', True)\n", (1632, 1655), False, 'from tweb.utils.environment import env\n'), ((2056, 2069), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (2067, 2069), False, 'from collections import OrderedDict\n'), ((3744, 3793), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (3757, 3793), False, 'import socket\n'), ((4750, 4802), 'tweb.utils.settings.default_settings', 'default_settings', (['debug'], {'cookie_secret': 'cookie_secret'}), '(debug, cookie_secret=cookie_secret)\n', (4766, 4802), False, 'from tweb.utils.settings import default_settings, TronadoStdout, DEF_COOKIE_SECRET\n'), ((6020, 6044), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (6042, 6044), False, 'import asyncio\n'), ((6067, 6129), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._start_thread_loop', 'args': '(loop,)'}), '(target=self._start_thread_loop, args=(loop,))\n', (6083, 6129), False, 'import threading\n'), ((6579, 6607), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['loop'], {}), '(loop)\n', (6601, 6607), False, 'import asyncio\n'), ((7998, 8017), 'tweb.utils.daemon.fork', 'daemon.fork', (['_pfile'], {}), '(_pfile)\n', (8009, 8017), False, 'from tweb.utils import daemon\n'), ((10926, 10946), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (10944, 10946), False, 'import socket\n'), ((12593, 12634), 'tweb.utils.signal.SignalHandler.listen', 'SignalHandler.listen', (['self._atexit_signal'], {}), '(self._atexit_signal)\n', (12613, 12634), False, 'from tweb.utils.signal import SignalHandler\n'), ((2793, 2826), 'os.path.dirname', 'os.path.dirname', (['self.options.pid'], {}), '(self.options.pid)\n', (2808, 2826), False, 'import os\n'), ((2842, 2870), 'os.access', 'os.access', (['base_dir', 'os.W_OK'], {}), '(base_dir, os.W_OK)\n', (2851, 2870), False, 'import os\n'), ((2940, 2963), 'tweb.utils.strings.get_root_path', 'strings.get_root_path', ([], {}), '()\n', (2961, 2963), False, 'from tweb.utils import strings\n'), ((4424, 4435), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4432, 4435), False, 'import sys\n'), ((5878, 5889), 'time.time', 'time.time', ([], {}), '()\n', (5887, 5889), False, 'import time\n'), ((7039, 7050), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7047, 7050), False, 'import sys\n'), ((7077, 7100), 'tweb.utils.signal.SignalHandler.restart', 'SignalHandler.restart', ([], {}), '()\n', (7098, 7100), False, 'from tweb.utils.signal import SignalHandler\n'), ((7571, 7658), 'tweb.utils.signal.SignalHandler.signal_handler', 'SignalHandler.signal_handler', (['pid_file', 'SignalHandler.signals[self.options.signal]'], {}), '(pid_file, SignalHandler.signals[self.options.\n signal])\n', (7599, 7658), False, 'from tweb.utils.signal import SignalHandler\n'), ((8497, 8524), 'os.path.exists', 'os.path.exists', (['locale_path'], {}), '(locale_path)\n', (8511, 8524), False, 'import os\n'), ((9944, 9955), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9952, 9955), False, 'import sys\n'), ((1365, 1384), 'asyncio.wait', 'asyncio.wait', (['tasks'], {}), '(tasks)\n', (1377, 1384), False, 'import asyncio\n'), ((2377, 2401), 'tweb.utils.settings.TronadoStdout.has_opt', 'TronadoStdout.has_opt', (['k'], {}), '(k)\n', (2398, 2401), False, 'from tweb.utils.settings import default_settings, TronadoStdout, DEF_COOKIE_SECRET\n'), ((2486, 2508), 'tweb.utils.settings.TronadoStdout.getall', 'TronadoStdout.getall', ([], {}), '()\n', (2506, 2508), False, 'from tweb.utils.settings import default_settings, TronadoStdout, DEF_COOKIE_SECRET\n'), ((2736, 2768), 'os.path.exists', 'os.path.exists', (['self.options.pid'], {}), '(self.options.pid)\n', (2750, 2768), False, 'import os\n'), ((6440, 6485), 'asyncio.run_coroutine_threadsafe', 'asyncio.run_coroutine_threadsafe', (['func_', 'loop'], {}), '(func_, loop)\n', (6472, 6485), False, 'import asyncio\n'), ((7357, 7368), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7365, 7368), False, 'import sys\n'), ((7958, 7969), 'os.getpid', 'os.getpid', ([], {}), '()\n', (7967, 7969), False, 'import os\n'), ((12237, 12253), 'tornado.ioloop.IOLoop.current', 'IOLoop.current', ([], {}), '()\n', (12251, 12253), False, 'from tornado.ioloop import IOLoop\n'), ((12906, 12922), 'tornado.ioloop.IOLoop.current', 'IOLoop.current', ([], {}), '()\n', (12920, 12922), False, 'from tornado.ioloop import IOLoop\n'), ((2423, 2446), 'tweb.utils.settings.TronadoStdout.set', 'TronadoStdout.set', (['k', 'v'], {}), '(k, v)\n', (2440, 2446), False, 'from tweb.utils.settings import default_settings, TronadoStdout, DEF_COOKIE_SECRET\n'), ((4940, 4976), 'functools.partial', 'partial', (['self._log_func', 'self.logger'], {}), '(self._log_func, self.logger)\n', (4947, 4976), False, 'from functools import partial\n'), ((7003, 7019), 'tornado.ioloop.IOLoop.current', 'IOLoop.current', ([], {}), '()\n', (7017, 7019), False, 'from tornado.ioloop import IOLoop\n'), ((8058, 8069), 'os.getpid', 'os.getpid', ([], {}), '()\n', (8067, 8069), False, 'import os\n'), ((7904, 7915), 'os.getpid', 'os.getpid', ([], {}), '()\n', (7913, 7915), False, 'import os\n')] |
from typing import Any, Callable
from unittest import TestCase
from puma.runnable import Runnable
from puma.runnable.runner import ThreadRunner
def call_runnable_method_on_running_instance(test_case: TestCase, runnable_factory: Callable[[], Runnable], test_callback: Callable[[Runnable], Any]) -> None:
runnable = runnable_factory()
with ThreadRunner(runnable) as runner:
runner.start_blocking()
with test_case.assertRaises(ValueError):
test_callback(runnable)
| [
"puma.runnable.runner.ThreadRunner"
] | [((349, 371), 'puma.runnable.runner.ThreadRunner', 'ThreadRunner', (['runnable'], {}), '(runnable)\n', (361, 371), False, 'from puma.runnable.runner import ThreadRunner\n')] |
import argparse
import os
parser=argparse.ArgumentParser()
parser.add_argument("--save",type=str,default="1",help='end ratio')
parser.add_argument("--get",type=str,default="ptcut",help='end ratio')
parser.add_argument("--gpu",type=str,default="0,1,2,3",help='end ratio')
parser.add_argument("--pt",type=str,default="100,200,500,1000",help='end ratio')
parser.add_argument("--isz",type=int,default=0,help='end ratio')
parser.add_argument("--etabin",type=float,default=2.4,help='end ratio')
parser.add_argument("--unscale",type=int,default=1,help='end ratio')
parser.add_argument("--parton",type=int,default=0,help='end ratio')
parser.add_argument("--run",type=int,default=0,help='end ratio')
args=parser.parse_args()
save=args.save
pt=eval(args.pt)
gpu=args.gpu
if(pt==100):
ptmin=0.815
ptmax=1.159
if(pt==200):
ptmin=0.819
ptmax=1.123
if(pt==500):
ptmin=0.821
ptmax=1.093
if(pt==1000):
ptmin=0.8235
ptmax=1.076
if(args.run<1):
os.system("python partonrun.py --pt {pt} --save {save} --end 100000 --epochs 50 --gpu {gpu} --ptmin {ptmin} --ptmax {ptmax} --batch_size 1000000 --isz {isz} --etabin {etabin}".format(save=save.format(pt),pt=pt,gpu=gpu,isz=args.isz,etabin=args.etabin,ptmin=ptmin,ptmax=ptmax))
if(args.run<2):
os.system("python partonpred.py --save {save} --pt {pt} --isz {isz} --gpu {gpu} ".format(save=save.format(pt),pt=pt,gpu=gpu,isz=args.isz))
if(args.run<3):
os.system("python getauc.py --save {save} --pt {pt} --get {get} --parton {parton} ".format(save=save.format(pt),pt=pt,get=args.get,parton=args.parton))
| [
"argparse.ArgumentParser"
] | [((34, 59), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (57, 59), False, 'import argparse\n')] |
"""Fix types works
Revision ID: 340c59e2160c
Revises: 874db9c5a19d
Create Date: 2021-12-05 13:47:04.772395
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '340c59e2160c'
down_revision = '874db9c5a19d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('works', 'headline',
type_=sa.String(),
nullable=False)
op.alter_column('works', 'description',
type_=sa.String(),
nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# WARING! Not downgradable
# op.alter_column('works', 'headline',
# type_=sa.DateTime(),
# server_default='now()',
# nullable=False)
# op.alter_column('works', 'description',
# type_=sa.DateTime(),
# server_default='now()',
# nullable=True)
# ### end Alembic commands ###
| [
"sqlalchemy.String"
] | [((449, 460), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (458, 460), True, 'import sqlalchemy as sa\n'), ((568, 579), 'sqlalchemy.String', 'sa.String', ([], {}), '()\n', (577, 579), True, 'import sqlalchemy as sa\n')] |
import sys
import lzma
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('-i', dest="indices", metavar="Packages.xz", nargs='+', help='Index file to look in (Packages.xz)', required=True)
parser.add_argument('-p', dest="packages", metavar="libpackage-dev", nargs='+', help='Package name to search for', required=True)
args = parser.parse_args()
packages = set(args.packages)
def iter_entries(index):
with lzma.open(index, 'r') as f:
buffer = []
package_name = None
for line in f:
line = str(line, 'utf8')
if line[:9] == 'Package: ':
package_name = line[9:-1]
if line.isspace():
yield package_name, buffer
buffer = []
else:
buffer.append(line)
def parse_control(entries):
res = dict()
prev_key = None
for entry in entries:
if entry[0].isspace():
res[prev_key] += '\n' + entry[1:]
continue
k, v = entry.split(': ', 1)
prev_key = k
res[k] = v[:-1]
return res
FOUND_PACKAGES = dict()
for index in args.indices:
for package_name, entry in iter_entries(index):
if package_name in packages:
entry = parse_control(entry)
if package_name in FOUND_PACKAGES:
raise RuntimeError("Duplicate package: " + package_name)
FOUND_PACKAGES[package_name] = entry
for package in packages:
if package not in FOUND_PACKAGES:
raise RuntimeError("Package not found: " + package)
result = []
for package in FOUND_PACKAGES.values():
result.append({
"name": package['Package'],
"filename": package['Filename'],
"sha256": package['SHA256']
})
print(json.dumps(result, sort_keys=True, indent=4))
| [
"lzma.open",
"json.dumps",
"argparse.ArgumentParser"
] | [((62, 87), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (85, 87), False, 'import argparse\n'), ((1799, 1843), 'json.dumps', 'json.dumps', (['result'], {'sort_keys': '(True)', 'indent': '(4)'}), '(result, sort_keys=True, indent=4)\n', (1809, 1843), False, 'import json\n'), ((447, 468), 'lzma.open', 'lzma.open', (['index', '"""r"""'], {}), "(index, 'r')\n", (456, 468), False, 'import lzma\n')] |
from decimal import Decimal
import csv
from datetime import datetime
from imap_tools import MailBox, AND
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from django.views.generic import CreateView, UpdateView, FormView, TemplateView
from django.views.generic.dates import ( ArchiveIndexView, YearArchiveView,
MonthArchiveView, )
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.contrib.auth.decorators import permission_required
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils.translation import gettext as _
from django.urls import reverse
from .models import Invoice, CSVInvoice
from .forms import InvoiceCreateForm, InvoiceDeleteForm, CSVInvoiceCreateForm
from .management.commands.fetch_invoice_emails import do_command
from .choices import CAT
class InvoiceArchiveIndexView(PermissionRequiredMixin, ArchiveIndexView):
model = Invoice
permission_required = 'accounting.view_invoice'
date_field = 'date'
allow_future = True
context_object_name = 'all_invoices'
paginate_by = 50
allow_empty = True
def setup(self, request, *args, **kwargs):
super(InvoiceArchiveIndexView, self).setup(request, *args, **kwargs)
do_command()
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if 'created' in self.request.GET:
context['created'] = self.request.GET['created']
elif 'modified' in self.request.GET:
context['modified'] = self.request.GET['modified']
elif 'deleted' in self.request.GET:
context['deleted'] = self.request.GET['deleted']
elif 'csv_created' in self.request.GET:
context['csv_created'] = self.request.GET['csv_created']
context['csv_modified'] = self.request.GET['csv_modified']
context['csv_failed'] = self.request.GET['csv_failed']
return context
class ChartMixin:
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
active = context['all_invoices'].filter(active = True)
passive = context['all_invoices'].filter(active = False)
choices = CAT
#total active invoices
sum = Decimal('0.00')
for inv in active:
sum += inv.get_total()
context['active_sum'] = round(sum, 0)
#total passive invoices
sum = Decimal('0.00')
for inv in passive:
sum += inv.get_total()
context['passive_sum'] = round(sum, 0)
#total active invoices by category
active_cat = {}
active_left = Decimal('0.00')
for ch in choices:
if ch[0].startswith('A'):
act_cat = active.filter(category = ch[0])
sum = Decimal('0.00')
for act in act_cat:
sum += act.get_total()
active_cat[ch[1].replace('A-', '')] = round(sum, 0)
active_left += sum
left = context['active_sum'] - active_left
active_cat[_('Other')] = round(left, 0)
context['active_cat'] = active_cat
#total active invoices by category
passive_cat = {}
passive_left = Decimal('0.00')
for ch in choices:
if ch[0].startswith('P'):
pass_cat = passive.filter(category = ch[0])
sum = Decimal('0.00')
for pasv in pass_cat:
sum += pasv.get_total()
passive_cat[ch[1].replace('P-', '')] = round(sum, 0)
passive_left += sum
left = context['passive_sum'] - passive_left
passive_cat[_('Other')] = round(left, 0)
context['passive_cat'] = passive_cat
return context
class InvoiceYearArchiveView(PermissionRequiredMixin, ChartMixin, YearArchiveView):
model = Invoice
permission_required = 'accounting.view_invoice'
make_object_list = True
date_field = 'date'
allow_future = True
context_object_name = 'all_invoices'
year_format = '%Y'
allow_empty = True
class InvoiceMonthArchiveView(PermissionRequiredMixin, ChartMixin, MonthArchiveView):
model = Invoice
permission_required = 'accounting.view_invoice'
date_field = 'date'
allow_future = True
context_object_name = 'all_invoices'
year_format = '%Y'
month_format = '%m'
allow_empty = True
class AddAnotherMixin:
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if 'created' in self.request.GET:
context['created'] = self.request.GET['created']
elif 'modified' in self.request.GET:
context['modified'] = self.request.GET['modified']
elif 'csv_created' in self.request.GET:
context['csv_created'] = self.request.GET['csv_created']
context['csv_modified'] = self.request.GET['csv_modified']
context['csv_failed'] = self.request.GET['csv_failed']
return context
class InvoiceCreateView(PermissionRequiredMixin, AddAnotherMixin, CreateView):
model = Invoice
permission_required = 'accounting.add_invoice'
form_class = InvoiceCreateForm
def get_success_url(self):
if 'add_another' in self.request.POST:
return reverse('invoices:add') + f'?created={self.object.number}'
else:
return reverse('invoices:index') + f'?created={self.object.number}'
class InvoiceUpdateView(PermissionRequiredMixin, AddAnotherMixin, UpdateView):
model = Invoice
permission_required = 'accounting.change_invoice'
form_class = InvoiceCreateForm
template_name = 'accounting/invoice_update_form.html'
def get_success_url(self):
if 'add_another' in self.request.POST:
return reverse('invoices:add') + f'?modified={self.object.number}'
else:
return reverse('invoices:index') + f'?modified={self.object.number}'
class InvoiceDeleteView(PermissionRequiredMixin, FormView):
model = Invoice
permission_required = 'accounting.delete_invoice'
form_class = InvoiceDeleteForm
template_name = 'accounting/invoice_delete_form.html'
def form_valid(self, form):
invoice = get_object_or_404(Invoice, id = self.kwargs['pk'])
self.number = invoice.number
invoice.delete()
return super(InvoiceDeleteView, self).form_valid(form)
def get_success_url(self):
return reverse('invoices:index') + f'?deleted={self.number}'
class CSVInvoiceCreateView(PermissionRequiredMixin, AddAnotherMixin, FormView):
model = CSVInvoice
template_name = 'accounting/csvinvoice_form.html'
permission_required = 'accounting.add_csvinvoice'
form_class = CSVInvoiceCreateForm
def form_valid(self, form):
self.created = 0
self.modified = 0
self.failed = 0
files = self.request.FILES.getlist('csv')
for f in files:
instance = CSVInvoice(csv=f)
instance.save()
self.created += instance.created
self.modified += instance.modified
self.failed += instance.failed
return super(CSVInvoiceCreateView, self).form_valid(form)
def get_success_url(self):
if 'add_another' in self.request.POST:
return (reverse('invoices:csv') +
f'?csv_created={self.created}&csv_modified={self.modified}&csv_failed={self.failed}')
else:
return (reverse('invoices:index') +
f'?csv_created={self.created}&csv_modified={self.modified}&csv_failed={self.failed}')
def csv_writer(writer, qs):
writer.writerow([_('Number'), _('Client'), _('Active?'), _('dd/mm/yy'),
_('Description'), _('Taxable'), _('Social security'), _('VAT'),
_('Category'), _('Paid?')])
for i in qs:
active = 'yes' if i.active else ''
paid = 'yes' if i.paid else ''
date = datetime.strftime(i.date, '%d/%m/%y')
writer.writerow([i.number, i.client, active, date, i.descr,
i.amount, i.security, i.vat, i.category, paid])
return writer
@permission_required('accounting.view_invoice')
def year_download(request, year):
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment; filename="%(invoices)s-%(year)d.csv"' %
{'invoices': _('Invoices'), 'year': year})
qs = Invoice.objects.filter(date__year=year)
writer = csv.writer(response)
writer = csv_writer(writer, qs)
return response
@permission_required('accounting.view_invoice')
def month_download(request, year, month):
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = ('attachment; filename="%(invoices)s-%(year)d-%(month)d.csv"' %
{'invoices': _('Invoices'), 'year': year, 'month': month})
qs = Invoice.objects.filter(date__year=year).filter(date__month=month)
writer = csv.writer(response)
writer = csv_writer(writer, qs)
return response
#class CSVInvoiceMailTemplateView(PermissionRequiredMixin, TemplateView):
#permission_required = 'accounting.view_invoice'
#template_name = 'accounting/email.html'
#def get_context_data(self, **kwargs):
#context = super().get_context_data(**kwargs)
#context['csv_created'] = 0
#context['csv_modified'] = 0
#context['csv_failed'] = 0
#HOST = settings.IMAP_HOST
#USER = settings.IMAP_USER
#PASSWORD = settings.IMAP_PWD
#PORT = settings.IMAP_PORT
#FROM = settings.IMAP_FROM
#with MailBox(HOST).login(USER, PASSWORD, 'INBOX') as mailbox:
#for message in mailbox.fetch(AND(seen=False, subject=_('invoices'),
#from_=FROM), mark_seen=True):
#for att in message.attachments: # list: [Attachment objects]
#file = SimpleUploadedFile(att.filename, att.payload,
#att.content_type)
#instance = CSVInvoice(csv=file)
#instance.save()
#context['csv_created'] += instance.created
#context['csv_modified'] += instance.modified
#context['csv_failed'] += instance.failed
#return context
| [
"django.utils.translation.gettext",
"django.http.HttpResponse",
"csv.writer",
"django.shortcuts.get_object_or_404",
"django.contrib.auth.decorators.permission_required",
"django.urls.reverse",
"datetime.datetime.strftime",
"decimal.Decimal"
] | [((8191, 8237), 'django.contrib.auth.decorators.permission_required', 'permission_required', (['"""accounting.view_invoice"""'], {}), "('accounting.view_invoice')\n", (8210, 8237), False, 'from django.contrib.auth.decorators import permission_required\n'), ((8682, 8728), 'django.contrib.auth.decorators.permission_required', 'permission_required', (['"""accounting.view_invoice"""'], {}), "('accounting.view_invoice')\n", (8701, 8728), False, 'from django.contrib.auth.decorators import permission_required\n'), ((8357, 8394), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""text/csv"""'}), "(content_type='text/csv')\n", (8369, 8394), False, 'from django.http import HttpResponse\n'), ((8602, 8622), 'csv.writer', 'csv.writer', (['response'], {}), '(response)\n', (8612, 8622), False, 'import csv\n'), ((8856, 8893), 'django.http.HttpResponse', 'HttpResponse', ([], {'content_type': '"""text/csv"""'}), "(content_type='text/csv')\n", (8868, 8893), False, 'from django.http import HttpResponse\n'), ((9153, 9173), 'csv.writer', 'csv.writer', (['response'], {}), '(response)\n', (9163, 9173), False, 'import csv\n'), ((2316, 2331), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (2323, 2331), False, 'from decimal import Decimal\n'), ((2486, 2501), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (2493, 2501), False, 'from decimal import Decimal\n'), ((2701, 2716), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (2708, 2716), False, 'from decimal import Decimal\n'), ((3301, 3316), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (3308, 3316), False, 'from decimal import Decimal\n'), ((6308, 6356), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Invoice'], {'id': "self.kwargs['pk']"}), "(Invoice, id=self.kwargs['pk'])\n", (6325, 6356), False, 'from django.shortcuts import render, get_object_or_404\n'), ((8005, 8042), 'datetime.datetime.strftime', 'datetime.strftime', (['i.date', '"""%d/%m/%y"""'], {}), "(i.date, '%d/%m/%y')\n", (8022, 8042), False, 'from datetime import datetime\n'), ((6531, 6556), 'django.urls.reverse', 'reverse', (['"""invoices:index"""'], {}), "('invoices:index')\n", (6538, 6556), False, 'from django.urls import reverse\n'), ((7728, 7739), 'django.utils.translation.gettext', '_', (['"""Number"""'], {}), "('Number')\n", (7729, 7739), True, 'from django.utils.translation import gettext as _\n'), ((7741, 7752), 'django.utils.translation.gettext', '_', (['"""Client"""'], {}), "('Client')\n", (7742, 7752), True, 'from django.utils.translation import gettext as _\n'), ((7754, 7766), 'django.utils.translation.gettext', '_', (['"""Active?"""'], {}), "('Active?')\n", (7755, 7766), True, 'from django.utils.translation import gettext as _\n'), ((7768, 7781), 'django.utils.translation.gettext', '_', (['"""dd/mm/yy"""'], {}), "('dd/mm/yy')\n", (7769, 7781), True, 'from django.utils.translation import gettext as _\n'), ((7791, 7807), 'django.utils.translation.gettext', '_', (['"""Description"""'], {}), "('Description')\n", (7792, 7807), True, 'from django.utils.translation import gettext as _\n'), ((7809, 7821), 'django.utils.translation.gettext', '_', (['"""Taxable"""'], {}), "('Taxable')\n", (7810, 7821), True, 'from django.utils.translation import gettext as _\n'), ((7823, 7843), 'django.utils.translation.gettext', '_', (['"""Social security"""'], {}), "('Social security')\n", (7824, 7843), True, 'from django.utils.translation import gettext as _\n'), ((7845, 7853), 'django.utils.translation.gettext', '_', (['"""VAT"""'], {}), "('VAT')\n", (7846, 7853), True, 'from django.utils.translation import gettext as _\n'), ((7863, 7876), 'django.utils.translation.gettext', '_', (['"""Category"""'], {}), "('Category')\n", (7864, 7876), True, 'from django.utils.translation import gettext as _\n'), ((7878, 7888), 'django.utils.translation.gettext', '_', (['"""Paid?"""'], {}), "('Paid?')\n", (7879, 7888), True, 'from django.utils.translation import gettext as _\n'), ((8508, 8521), 'django.utils.translation.gettext', '_', (['"""Invoices"""'], {}), "('Invoices')\n", (8509, 8521), True, 'from django.utils.translation import gettext as _\n'), ((9017, 9030), 'django.utils.translation.gettext', '_', (['"""Invoices"""'], {}), "('Invoices')\n", (9018, 9030), True, 'from django.utils.translation import gettext as _\n'), ((2862, 2877), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (2869, 2877), False, 'from decimal import Decimal\n'), ((3138, 3148), 'django.utils.translation.gettext', '_', (['"""Other"""'], {}), "('Other')\n", (3139, 3148), True, 'from django.utils.translation import gettext as _\n'), ((3464, 3479), 'decimal.Decimal', 'Decimal', (['"""0.00"""'], {}), "('0.00')\n", (3471, 3479), False, 'from decimal import Decimal\n'), ((3748, 3758), 'django.utils.translation.gettext', '_', (['"""Other"""'], {}), "('Other')\n", (3749, 3758), True, 'from django.utils.translation import gettext as _\n'), ((5376, 5399), 'django.urls.reverse', 'reverse', (['"""invoices:add"""'], {}), "('invoices:add')\n", (5383, 5399), False, 'from django.urls import reverse\n'), ((5468, 5493), 'django.urls.reverse', 'reverse', (['"""invoices:index"""'], {}), "('invoices:index')\n", (5475, 5493), False, 'from django.urls import reverse\n'), ((5874, 5897), 'django.urls.reverse', 'reverse', (['"""invoices:add"""'], {}), "('invoices:add')\n", (5881, 5897), False, 'from django.urls import reverse\n'), ((5967, 5992), 'django.urls.reverse', 'reverse', (['"""invoices:index"""'], {}), "('invoices:index')\n", (5974, 5992), False, 'from django.urls import reverse\n'), ((7386, 7409), 'django.urls.reverse', 'reverse', (['"""invoices:csv"""'], {}), "('invoices:csv')\n", (7393, 7409), False, 'from django.urls import reverse\n'), ((7548, 7573), 'django.urls.reverse', 'reverse', (['"""invoices:index"""'], {}), "('invoices:index')\n", (7555, 7573), False, 'from django.urls import reverse\n')] |
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.db import models
"""
TAB_USER_INFO
字段Id 字段名称 类型/长度 必填 功能描述
userID 用户ID CHAR(8) Y PK
userName 用户名 VARCHAR(60) Y
passWord 密码 VARCHAR(120) Y 加密存储
phoneNum 联系电话 CHAR(20) Y
crtTime 创建时间 datetime Y 默认值当前时间戳
updTime 更新时间 datetime Y 默认值当前时间戳
"""
class UserModel(AbstractUser):
user_id = models.AutoField(verbose_name='用户ID', primary_key=True)
phone_num = models.CharField(max_length=20, verbose_name='联系电话')
crt_time = models.DateTimeField(verbose_name='创建时间', auto_now_add=True)
upd_time = models.DateTimeField(verbose_name='更新时间', auto_now=True)
class Meta:
db_table = 'TAB_USER_INFO'
verbose_name = '用户表'
verbose_name_plural = verbose_name
def __str__(self):
return self.username
| [
"django.db.models.DateTimeField",
"django.db.models.AutoField",
"django.db.models.CharField"
] | [((383, 438), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""用户ID"""', 'primary_key': '(True)'}), "(verbose_name='用户ID', primary_key=True)\n", (399, 438), False, 'from django.db import models\n'), ((455, 507), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'verbose_name': '"""联系电话"""'}), "(max_length=20, verbose_name='联系电话')\n", (471, 507), False, 'from django.db import models\n'), ((523, 583), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""创建时间"""', 'auto_now_add': '(True)'}), "(verbose_name='创建时间', auto_now_add=True)\n", (543, 583), False, 'from django.db import models\n'), ((599, 655), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""更新时间"""', 'auto_now': '(True)'}), "(verbose_name='更新时间', auto_now=True)\n", (619, 655), False, 'from django.db import models\n')] |
import numpy as np
import eqsig
from liquepy.element.models import ShearTest
from liquepy.element import assess
def test_with_one_cycle_no_dissipation():
strs = np.array([0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0])
tau = np.array([0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0])
expected_energy = 0
assert np.isclose(expected_energy, assess.calc_diss_energy_fd(tau, strs)[-1])
def test_with_one_cycle_no_dissipation_with_offset():
strs = np.array([0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0]) + 4
tau = np.array([0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0])
expected_energy = 0
assert np.isclose(expected_energy, assess.calc_diss_energy_fd(tau, strs)[-1])
def test_with_one_cycle_circle():
angle = np.linspace(0, 2 * np.pi, 3600)
strs = 4 * np.sin(angle)
tau = 4 * np.cos(angle)
expected_energy = 4 ** 2 * np.pi
assert np.isclose(expected_energy, assess.calc_diss_energy_fd(tau, strs)[-1])
def test_with_one_cycle_circle_with_offset():
angle = np.linspace(0, 2 * np.pi, 3600)
strs = 4 * np.sin(angle) + 4
tau = 4 * np.cos(angle) + 10
expected_energy = 4 ** 2 * np.pi
assert np.isclose(expected_energy, assess.calc_diss_energy_fd(tau, strs)[-1])
def test_with_one_cycle_triangles():
strs = np.array([0, -1, -2, -3, -4, -4, -3, -2, -1, 0, 1, 2, 3, 4, 4, 3, 2, 1, 0])
tau = np.array([0, -2, -4, -6, -8, 0, 0, 0, 0, 0, 2, 4, 6, 8, 0, 0, 0, 0, 0])
expected_energy = 8 * 4.
assert np.isclose(expected_energy, assess.calc_diss_energy_fd(tau, strs)[-1])
def test_average_of_absolute_simple():
values = np.array([4, -3])
expected = 12.5 / 7
av_abs = assess.average_of_absolute_via_trapz(values)
assert np.isclose(av_abs, expected), (av_abs, expected)
def test_average_of_absolute_matching_neg():
values = np.array([3, -3, 3])
expected = 1.5
av_abs = assess.average_of_absolute_via_trapz(values)
assert np.isclose(av_abs[0], expected), (av_abs[0], expected)
assert np.isclose(av_abs[1], expected), (av_abs[1], expected)
def test_determine_cum_stored_energy_series_simple():
gamma = np.array([0, 4, 0, -3, 0])
tau = np.array([0, 4, 0, -3, 0])
two_times_triangle_1 = 2 * (4 * 4 / 2)
two_times_triangle_2 = 2 * (3 * 3 / 2)
expected_energy = two_times_triangle_1 + two_times_triangle_2
et = ShearTest(tau, gamma, 1)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_energy, (energy[-1], expected_energy)
def test_small_cycle_behaviour_increases_case():
gamma_1 = np.array([0, 4, -2, 0])
tau_1 = np.array([0, 4, -4, 0])
et_1 = ShearTest(tau_1, gamma_1, 1)
energy_1 = assess.calc_case_et(et_1)
gamma_2 = np.array([0, 4, 3, 4, -2, 0])
tau_2 = np.array([0, 4, 1, 1, -4, 0])
et_2 = ShearTest(tau_2, gamma_2, 1)
energy_2 = assess.calc_case_et(et_2)
assert energy_2[-1] > energy_1[-1]
def skip_test_strain_bulge_behaviour_increases_case():
gamma_1 = np.array([0, 4, -2, 0])
tau_1 = np.array([0, 4, -4, 0])
et_1 = ShearTest(tau_1, gamma_1, 1)
energy_1 = assess.calc_case_et(et_1)
gamma_2 = np.array([0, 4, 4.1, -2, 0])
tau_2 = np.array([0, 4, 1, -4, 0])
et_2 = ShearTest(tau_2, gamma_2, 1)
energy_2 = assess.calc_case_et(et_2)
assert energy_2[-1] > energy_1[-1]
def test_determine_cum_stored_energy_series_simple_up_down():
"""
/\
:return:
"""
gamma = np.array([0., 1., 0.5])
tau = np.array([0., 1., 0])
expected_delta_e = 0.75 # two triangles (1x1x0.5 + 1x0.5x0.5)
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, energy
def test_determine_cum_stored_energy_series_simple_up_down_neg():
gamma = np.array([0., 1., -1])
tau = np.array([0., 1., -1])
expected_delta_e = 1.5
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, energy
def test_determine_cum_stored_energy_series_simple_close_loop():
gamma = np.array([1., -1, 1])
tau = np.array([1., -1, 1])
expected_delta_e = 2
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, energy
def test_determine_cum_stored_energy_series_simple_4points():
gamma = np.array([0, 1, -1, 2])
tau = np.array([0, 1, -1, 1])
step_1 = (0 + 1) / 2 * (1 - 0)
step_2 = (0 + 2) / 2 * (1 - 0)
expected_delta_e = step_1 * 4 + step_2
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, (energy, expected_delta_e)
def test_determine_cum_stored_energy_series_simple_trapz_zero():
gamma = np.array([0, 2, 1])
tau = np.array([0, 2, 1])
step_1 = (0 + 2) / 2 * (2 - 0)
step_2 = (2 + 1) / 2 * abs(2 - 1)
expected_delta_e = step_1 + step_2
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, (energy, expected_delta_e)
def test_determine_cum_stored_energy_series_simple_trapz():
gamma = np.array([1, 3, 2])
tau = np.array([1, 2, 0])
step_1 = (0 + 1) / 2 * (2 - 0)
step_2 = (2 + 1) / 2 * abs(2 - 0)
expected_delta_e = step_1 + step_2
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, (energy, expected_delta_e)
def test_determine_cum_stored_energy_series_simple_5points():
gamma = np.array([0, 2, 1, 3, 2])
tau = np.array([0, 2, 1, 2, 0])
step_1 = (0 + 2) / 2 * (2 - 0)
step_2 = (2 + 1) / 2 * abs(2 - 1)
step_3 = (0 + 1) / 2 * (2 - 0)
step_4 = (2 + 1) / 2 * abs(2 - 0)
expected_delta_e = step_1 + step_2 + step_3 + step_4
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, (energy, expected_delta_e)
def test_case_et_simple_6points():
gamma = np.array([0, 1, 0.5, 1.5, -1, 2])
tau = np.array([0, 1, 0.5, 1, -1, 1])
expected_delta_e = 4.375
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-1] == expected_delta_e, (energy, expected_delta_e)
def test_get_energy_peaks_for_cyclic_loading():
fs = np.array([0, 1., 2., 3., 4., 5., 5.5, 5.5, 4., 3., 2.5, 2.0, 1., 0., -1, -2, -5, 1, 3, 3.5,
2.5, 3.5, 2.5, -1, -3])
ds = np.array([0, 0.5, 1., 1.5, 2.5, 3., 4.25, 5.5, 5.5, 5.25, 5.5, 5.25, 4., 3., 1.5, 0.5, -3, -2, -1, -0.5,
-0.75, 1.5, 1., -1.5, -5])
inds = assess.get_energy_peaks_for_cyclic_loading(-fs, -ds)
expected = np.array([0, 7, 16, 21, 24])
assert np.sum(abs(inds - expected)) == 0
| [
"liquepy.element.assess.calc_diss_energy_fd",
"numpy.isclose",
"liquepy.element.assess.get_energy_peaks_for_cyclic_loading",
"liquepy.element.models.ShearTest",
"numpy.array",
"numpy.linspace",
"liquepy.element.assess.average_of_absolute_via_trapz",
"numpy.cos",
"numpy.sin",
"liquepy.element.asses... | [((169, 237), 'numpy.array', 'np.array', (['[0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0]'], {}), '([0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0])\n', (177, 237), True, 'import numpy as np\n'), ((248, 316), 'numpy.array', 'np.array', (['[0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0]'], {}), '([0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0])\n', (256, 316), True, 'import numpy as np\n'), ((573, 641), 'numpy.array', 'np.array', (['[0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0]'], {}), '([0, -2, -4, -6, -8, -6, -4, -2, 0, 2, 4, 6, 8, 6, 4, 2, 0])\n', (581, 641), True, 'import numpy as np\n'), ((796, 827), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(3600)'], {}), '(0, 2 * np.pi, 3600)\n', (807, 827), True, 'import numpy as np\n'), ((1064, 1095), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(3600)'], {}), '(0, 2 * np.pi, 3600)\n', (1075, 1095), True, 'import numpy as np\n'), ((1331, 1406), 'numpy.array', 'np.array', (['[0, -1, -2, -3, -4, -4, -3, -2, -1, 0, 1, 2, 3, 4, 4, 3, 2, 1, 0]'], {}), '([0, -1, -2, -3, -4, -4, -3, -2, -1, 0, 1, 2, 3, 4, 4, 3, 2, 1, 0])\n', (1339, 1406), True, 'import numpy as np\n'), ((1417, 1488), 'numpy.array', 'np.array', (['[0, -2, -4, -6, -8, 0, 0, 0, 0, 0, 2, 4, 6, 8, 0, 0, 0, 0, 0]'], {}), '([0, -2, -4, -6, -8, 0, 0, 0, 0, 0, 2, 4, 6, 8, 0, 0, 0, 0, 0])\n', (1425, 1488), True, 'import numpy as np\n'), ((1654, 1671), 'numpy.array', 'np.array', (['[4, -3]'], {}), '([4, -3])\n', (1662, 1671), True, 'import numpy as np\n'), ((1709, 1753), 'liquepy.element.assess.average_of_absolute_via_trapz', 'assess.average_of_absolute_via_trapz', (['values'], {}), '(values)\n', (1745, 1753), False, 'from liquepy.element import assess\n'), ((1765, 1793), 'numpy.isclose', 'np.isclose', (['av_abs', 'expected'], {}), '(av_abs, expected)\n', (1775, 1793), True, 'import numpy as np\n'), ((1874, 1894), 'numpy.array', 'np.array', (['[3, -3, 3]'], {}), '([3, -3, 3])\n', (1882, 1894), True, 'import numpy as np\n'), ((1927, 1971), 'liquepy.element.assess.average_of_absolute_via_trapz', 'assess.average_of_absolute_via_trapz', (['values'], {}), '(values)\n', (1963, 1971), False, 'from liquepy.element import assess\n'), ((1983, 2014), 'numpy.isclose', 'np.isclose', (['av_abs[0]', 'expected'], {}), '(av_abs[0], expected)\n', (1993, 2014), True, 'import numpy as np\n'), ((2049, 2080), 'numpy.isclose', 'np.isclose', (['av_abs[1]', 'expected'], {}), '(av_abs[1], expected)\n', (2059, 2080), True, 'import numpy as np\n'), ((2173, 2199), 'numpy.array', 'np.array', (['[0, 4, 0, -3, 0]'], {}), '([0, 4, 0, -3, 0])\n', (2181, 2199), True, 'import numpy as np\n'), ((2210, 2236), 'numpy.array', 'np.array', (['[0, 4, 0, -3, 0]'], {}), '([0, 4, 0, -3, 0])\n', (2218, 2236), True, 'import numpy as np\n'), ((2398, 2422), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma', '(1)'], {}), '(tau, gamma, 1)\n', (2407, 2422), False, 'from liquepy.element.models import ShearTest\n'), ((2436, 2459), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (2455, 2459), False, 'from liquepy.element import assess\n'), ((2598, 2621), 'numpy.array', 'np.array', (['[0, 4, -2, 0]'], {}), '([0, 4, -2, 0])\n', (2606, 2621), True, 'import numpy as np\n'), ((2634, 2657), 'numpy.array', 'np.array', (['[0, 4, -4, 0]'], {}), '([0, 4, -4, 0])\n', (2642, 2657), True, 'import numpy as np\n'), ((2669, 2697), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau_1', 'gamma_1', '(1)'], {}), '(tau_1, gamma_1, 1)\n', (2678, 2697), False, 'from liquepy.element.models import ShearTest\n'), ((2713, 2738), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et_1'], {}), '(et_1)\n', (2732, 2738), False, 'from liquepy.element import assess\n'), ((2753, 2782), 'numpy.array', 'np.array', (['[0, 4, 3, 4, -2, 0]'], {}), '([0, 4, 3, 4, -2, 0])\n', (2761, 2782), True, 'import numpy as np\n'), ((2795, 2824), 'numpy.array', 'np.array', (['[0, 4, 1, 1, -4, 0]'], {}), '([0, 4, 1, 1, -4, 0])\n', (2803, 2824), True, 'import numpy as np\n'), ((2836, 2864), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau_2', 'gamma_2', '(1)'], {}), '(tau_2, gamma_2, 1)\n', (2845, 2864), False, 'from liquepy.element.models import ShearTest\n'), ((2880, 2905), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et_2'], {}), '(et_2)\n', (2899, 2905), False, 'from liquepy.element import assess\n'), ((3017, 3040), 'numpy.array', 'np.array', (['[0, 4, -2, 0]'], {}), '([0, 4, -2, 0])\n', (3025, 3040), True, 'import numpy as np\n'), ((3053, 3076), 'numpy.array', 'np.array', (['[0, 4, -4, 0]'], {}), '([0, 4, -4, 0])\n', (3061, 3076), True, 'import numpy as np\n'), ((3088, 3116), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau_1', 'gamma_1', '(1)'], {}), '(tau_1, gamma_1, 1)\n', (3097, 3116), False, 'from liquepy.element.models import ShearTest\n'), ((3132, 3157), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et_1'], {}), '(et_1)\n', (3151, 3157), False, 'from liquepy.element import assess\n'), ((3172, 3200), 'numpy.array', 'np.array', (['[0, 4, 4.1, -2, 0]'], {}), '([0, 4, 4.1, -2, 0])\n', (3180, 3200), True, 'import numpy as np\n'), ((3213, 3239), 'numpy.array', 'np.array', (['[0, 4, 1, -4, 0]'], {}), '([0, 4, 1, -4, 0])\n', (3221, 3239), True, 'import numpy as np\n'), ((3251, 3279), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau_2', 'gamma_2', '(1)'], {}), '(tau_2, gamma_2, 1)\n', (3260, 3279), False, 'from liquepy.element.models import ShearTest\n'), ((3295, 3320), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et_2'], {}), '(et_2)\n', (3314, 3320), False, 'from liquepy.element import assess\n'), ((3472, 3497), 'numpy.array', 'np.array', (['[0.0, 1.0, 0.5]'], {}), '([0.0, 1.0, 0.5])\n', (3480, 3497), True, 'import numpy as np\n'), ((3506, 3529), 'numpy.array', 'np.array', (['[0.0, 1.0, 0]'], {}), '([0.0, 1.0, 0])\n', (3514, 3529), True, 'import numpy as np\n'), ((3604, 3625), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (3613, 3625), False, 'from liquepy.element.models import ShearTest\n'), ((3639, 3662), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (3658, 3662), False, 'from liquepy.element import assess\n'), ((3793, 3817), 'numpy.array', 'np.array', (['[0.0, 1.0, -1]'], {}), '([0.0, 1.0, -1])\n', (3801, 3817), True, 'import numpy as np\n'), ((3826, 3850), 'numpy.array', 'np.array', (['[0.0, 1.0, -1]'], {}), '([0.0, 1.0, -1])\n', (3834, 3850), True, 'import numpy as np\n'), ((3885, 3906), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (3894, 3906), False, 'from liquepy.element.models import ShearTest\n'), ((3920, 3943), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (3939, 3943), False, 'from liquepy.element import assess\n'), ((4073, 4095), 'numpy.array', 'np.array', (['[1.0, -1, 1]'], {}), '([1.0, -1, 1])\n', (4081, 4095), True, 'import numpy as np\n'), ((4105, 4127), 'numpy.array', 'np.array', (['[1.0, -1, 1]'], {}), '([1.0, -1, 1])\n', (4113, 4127), True, 'import numpy as np\n'), ((4162, 4183), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (4171, 4183), False, 'from liquepy.element.models import ShearTest\n'), ((4197, 4220), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (4216, 4220), False, 'from liquepy.element import assess\n'), ((4348, 4371), 'numpy.array', 'np.array', (['[0, 1, -1, 2]'], {}), '([0, 1, -1, 2])\n', (4356, 4371), True, 'import numpy as np\n'), ((4382, 4405), 'numpy.array', 'np.array', (['[0, 1, -1, 1]'], {}), '([0, 1, -1, 1])\n', (4390, 4405), True, 'import numpy as np\n'), ((4528, 4549), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (4537, 4549), False, 'from liquepy.element.models import ShearTest\n'), ((4563, 4586), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (4582, 4586), False, 'from liquepy.element import assess\n'), ((4737, 4756), 'numpy.array', 'np.array', (['[0, 2, 1]'], {}), '([0, 2, 1])\n', (4745, 4756), True, 'import numpy as np\n'), ((4767, 4786), 'numpy.array', 'np.array', (['[0, 2, 1]'], {}), '([0, 2, 1])\n', (4775, 4786), True, 'import numpy as np\n'), ((4908, 4929), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (4917, 4929), False, 'from liquepy.element.models import ShearTest\n'), ((4943, 4966), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (4962, 4966), False, 'from liquepy.element import assess\n'), ((5112, 5131), 'numpy.array', 'np.array', (['[1, 3, 2]'], {}), '([1, 3, 2])\n', (5120, 5131), True, 'import numpy as np\n'), ((5142, 5161), 'numpy.array', 'np.array', (['[1, 2, 0]'], {}), '([1, 2, 0])\n', (5150, 5161), True, 'import numpy as np\n'), ((5283, 5304), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (5292, 5304), False, 'from liquepy.element.models import ShearTest\n'), ((5318, 5341), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (5337, 5341), False, 'from liquepy.element import assess\n'), ((5489, 5514), 'numpy.array', 'np.array', (['[0, 2, 1, 3, 2]'], {}), '([0, 2, 1, 3, 2])\n', (5497, 5514), True, 'import numpy as np\n'), ((5525, 5550), 'numpy.array', 'np.array', (['[0, 2, 1, 2, 0]'], {}), '([0, 2, 1, 2, 0])\n', (5533, 5550), True, 'import numpy as np\n'), ((5763, 5784), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (5772, 5784), False, 'from liquepy.element.models import ShearTest\n'), ((5798, 5821), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (5817, 5821), False, 'from liquepy.element import assess\n'), ((5942, 5975), 'numpy.array', 'np.array', (['[0, 1, 0.5, 1.5, -1, 2]'], {}), '([0, 1, 0.5, 1.5, -1, 2])\n', (5950, 5975), True, 'import numpy as np\n'), ((5986, 6017), 'numpy.array', 'np.array', (['[0, 1, 0.5, 1, -1, 1]'], {}), '([0, 1, 0.5, 1, -1, 1])\n', (5994, 6017), True, 'import numpy as np\n'), ((6056, 6077), 'liquepy.element.models.ShearTest', 'ShearTest', (['tau', 'gamma'], {}), '(tau, gamma)\n', (6065, 6077), False, 'from liquepy.element.models import ShearTest\n'), ((6091, 6114), 'liquepy.element.assess.calc_case_et', 'assess.calc_case_et', (['et'], {}), '(et)\n', (6110, 6114), False, 'from liquepy.element import assess\n'), ((6244, 6373), 'numpy.array', 'np.array', (['[0, 1.0, 2.0, 3.0, 4.0, 5.0, 5.5, 5.5, 4.0, 3.0, 2.5, 2.0, 1.0, 0.0, -1, -2,\n -5, 1, 3, 3.5, 2.5, 3.5, 2.5, -1, -3]'], {}), '([0, 1.0, 2.0, 3.0, 4.0, 5.0, 5.5, 5.5, 4.0, 3.0, 2.5, 2.0, 1.0, \n 0.0, -1, -2, -5, 1, 3, 3.5, 2.5, 3.5, 2.5, -1, -3])\n', (6252, 6373), True, 'import numpy as np\n'), ((6388, 6528), 'numpy.array', 'np.array', (['[0, 0.5, 1.0, 1.5, 2.5, 3.0, 4.25, 5.5, 5.5, 5.25, 5.5, 5.25, 4.0, 3.0, 1.5,\n 0.5, -3, -2, -1, -0.5, -0.75, 1.5, 1.0, -1.5, -5]'], {}), '([0, 0.5, 1.0, 1.5, 2.5, 3.0, 4.25, 5.5, 5.5, 5.25, 5.5, 5.25, 4.0,\n 3.0, 1.5, 0.5, -3, -2, -1, -0.5, -0.75, 1.5, 1.0, -1.5, -5])\n', (6396, 6528), True, 'import numpy as np\n'), ((6550, 6602), 'liquepy.element.assess.get_energy_peaks_for_cyclic_loading', 'assess.get_energy_peaks_for_cyclic_loading', (['(-fs)', '(-ds)'], {}), '(-fs, -ds)\n', (6592, 6602), False, 'from liquepy.element import assess\n'), ((6618, 6646), 'numpy.array', 'np.array', (['[0, 7, 16, 21, 24]'], {}), '([0, 7, 16, 21, 24])\n', (6626, 6646), True, 'import numpy as np\n'), ((490, 558), 'numpy.array', 'np.array', (['[0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0]'], {}), '([0, -1, -2, -3, -4, -3, -2, -1, 0, 1, 2, 3, 4, 3, 2, 1, 0])\n', (498, 558), True, 'import numpy as np\n'), ((843, 856), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (849, 856), True, 'import numpy as np\n'), ((871, 884), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (877, 884), True, 'import numpy as np\n'), ((380, 417), 'liquepy.element.assess.calc_diss_energy_fd', 'assess.calc_diss_energy_fd', (['tau', 'strs'], {}), '(tau, strs)\n', (406, 417), False, 'from liquepy.element import assess\n'), ((705, 742), 'liquepy.element.assess.calc_diss_energy_fd', 'assess.calc_diss_energy_fd', (['tau', 'strs'], {}), '(tau, strs)\n', (731, 742), False, 'from liquepy.element import assess\n'), ((961, 998), 'liquepy.element.assess.calc_diss_energy_fd', 'assess.calc_diss_energy_fd', (['tau', 'strs'], {}), '(tau, strs)\n', (987, 998), False, 'from liquepy.element import assess\n'), ((1111, 1124), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (1117, 1124), True, 'import numpy as np\n'), ((1143, 1156), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (1149, 1156), True, 'import numpy as np\n'), ((1238, 1275), 'liquepy.element.assess.calc_diss_energy_fd', 'assess.calc_diss_energy_fd', (['tau', 'strs'], {}), '(tau, strs)\n', (1264, 1275), False, 'from liquepy.element import assess\n'), ((1557, 1594), 'liquepy.element.assess.calc_diss_energy_fd', 'assess.calc_diss_energy_fd', (['tau', 'strs'], {}), '(tau, strs)\n', (1583, 1594), False, 'from liquepy.element import assess\n')] |
#! /usr/bin/env python
import geometry_msgs.msg
import rospy
import tf2_ros
from arc_utilities import transformation_helper
from geometry_msgs.msg import Pose
class TF2Wrapper:
def __init__(self):
self.tf_buffer = tf2_ros.Buffer()
self.tf_listener = tf2_ros.TransformListener(self.tf_buffer)
self.tf_broadcaster = tf2_ros.TransformBroadcaster()
self.tf_static_broadcasters = []
def get_transform(self,
parent,
child,
verbose=True,
spin_delay=rospy.Duration(secs=0, nsecs=500 * 1000 * 1000),
time=rospy.Time()):
"""
Waits for a transform to become available. Blocks until a transform is available or an exception is raised.
:param parent: frame name for the parent (see below)
:param child: frame name for the child (see below)
:param verbose: If verbose is True, then output messages are sent on rosinfo as the function waits for
a transform, otherwise on rosdebug
:param spin_delay: How long to wait between output messages
:param time: The timepoint to request a transform at. Defaults to "latest available".
:return: A matrix representation of the transform (numpy). Returns None if a tf2 exception is raised.
The notation here follows the following convention:
p_measured_in_parent = returned_transform * p_measured_in_child
p_measured_in_target = returned_transform * p_measured_in_source
"""
try:
transform = self.get_transform_msg(parent=parent, child=child, verbose=verbose, spin_delay=spin_delay,
time=time)
except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
rospy.logerr("No transform available: %s to %s", parent, child)
return None
return transformation_helper.BuildMatrixRos(transform.transform.translation, transform.transform.rotation)
def get_transform_msg(self,
parent,
child,
verbose=True,
spin_delay=rospy.Duration(secs=0, nsecs=500 * 1000 * 1000),
time=rospy.Time()):
while not self.tf_buffer.can_transform(target_frame=parent, source_frame=child,
time=time, timeout=spin_delay):
if rospy.is_shutdown():
raise KeyboardInterrupt("ROS has shutdown")
if verbose:
rospy.loginfo("Waiting for TF frames %s and %s", parent, child)
else:
rospy.logdebug("Waiting for TF frames %s and %s", parent, child)
transform = self.tf_buffer.lookup_transform(target_frame=parent, source_frame=child, time=time)
return transform
def send_transform_matrix(self, transform, parent, child, is_static=False, time=None):
"""
:param parent: frame name for the parent (see below)
:param child: frame name for the child (see below)
:param transform: A matrix representation of the transform (presumably numpy)
:param time: The timestamp for the transform, defaults to now()
The notation here follows the following convention:
p_measured_in_parent = transform * p_measured_in_child
p_measured_in_target = transform * p_measured_in_source
"""
[translation, quaternion] = transformation_helper.ExtractFromMatrix(transform)
self.send_transform(translation, quaternion, parent, child, is_static, time)
def send_transform_from_pose_msg(self, pose: Pose, parent, child, is_static=False, time=None):
if time is None:
time = rospy.Time.now()
t = geometry_msgs.msg.TransformStamped()
t.header.stamp = time
t.header.frame_id = parent
t.child_frame_id = child
t.transform.translation.x = pose.position.x
t.transform.translation.y = pose.position.y
t.transform.translation.z = pose.position.z
t.transform.rotation = pose.orientation
if is_static:
self.tf_static_broadcasters.append(tf2_ros.StaticTransformBroadcaster())
self.tf_static_broadcasters[-1].sendTransform(t)
else:
self.tf_broadcaster.sendTransform(t)
def send_transform(self, translation, quaternion, parent, child, is_static=False, time=None):
"""
:param parent: frame name for the parent (see below)
:param child: frame name for the child (see below)
:param translation: [x, y, z]
:param quaternion: [x, y, z, w]
:param time: The timestamp for the transform, defaults to now()
The notation here follows the following convention:
p_measured_in_parent = transform * p_measured_in_child
p_measured_in_target = transform * p_measured_in_source
"""
if time is None:
time = rospy.Time.now()
t = geometry_msgs.msg.TransformStamped()
t.header.stamp = time
t.header.frame_id = parent
t.child_frame_id = child
t.transform.translation.x = translation[0]
t.transform.translation.y = translation[1]
t.transform.translation.z = translation[2]
t.transform.rotation.x = quaternion[0]
t.transform.rotation.y = quaternion[1]
t.transform.rotation.z = quaternion[2]
t.transform.rotation.w = quaternion[3]
if is_static:
self.tf_static_broadcasters.append(tf2_ros.StaticTransformBroadcaster())
self.tf_static_broadcasters[-1].sendTransform(t)
else:
self.tf_broadcaster.sendTransform(t)
def transform_to_frame(self, object_stamped, target_frame, timeout=rospy.Duration(0), new_type=None):
"""
Transforms many "stamped" data types between frames. The specific package for the type of stamped object needs
to be imported prior to use. Examples are tf2_geometry_msgs and tf2_py.
If new_type is not None, the type specified must have a valid conversion from the input type, else the function
will raise an exception.
Example usage:
from arc_utilities import ros_helpers
import tf2_geometry_msgs
...
self.tf2 = ros_helpers.TF2Wrapper()
...
p_in_native_frame = PointStamped()
p_in_native_frame.header.stamp = rospy.Time.now() # This will likely cause an extrapolation warning/exception without a timeout set
p_in_native_frame.header.frame_id = frame_point_is_measured_in
p_in_native_frame.point = ...
p_in_world = self.tf2.transform_to_frame(object_stamped=p_in_native_frame, target_frame=world_frame_name)
:param object_stamped: The timestamped object the transform.
:param target_frame: Name of the frame to transform the input into.
:param timeout: (Optional) Time to wait for the target frame to become available.
:param new_type: (Optional) Type to convert the object to.
:return: The transformed, timestamped output, possibly converted to a new type.
"""
return self.tf_buffer.transform(object_stamped, target_frame, timeout, new_type)
| [
"rospy.logerr",
"rospy.is_shutdown",
"tf2_ros.TransformListener",
"tf2_ros.TransformBroadcaster",
"arc_utilities.transformation_helper.BuildMatrixRos",
"tf2_ros.Buffer",
"rospy.Time.now",
"rospy.loginfo",
"rospy.Time",
"tf2_ros.StaticTransformBroadcaster",
"rospy.Duration",
"rospy.logdebug",
... | [((229, 245), 'tf2_ros.Buffer', 'tf2_ros.Buffer', ([], {}), '()\n', (243, 245), False, 'import tf2_ros\n'), ((273, 314), 'tf2_ros.TransformListener', 'tf2_ros.TransformListener', (['self.tf_buffer'], {}), '(self.tf_buffer)\n', (298, 314), False, 'import tf2_ros\n'), ((345, 375), 'tf2_ros.TransformBroadcaster', 'tf2_ros.TransformBroadcaster', ([], {}), '()\n', (373, 375), False, 'import tf2_ros\n'), ((574, 621), 'rospy.Duration', 'rospy.Duration', ([], {'secs': '(0)', 'nsecs': '(500 * 1000 * 1000)'}), '(secs=0, nsecs=500 * 1000 * 1000)\n', (588, 621), False, 'import rospy\n'), ((650, 662), 'rospy.Time', 'rospy.Time', ([], {}), '()\n', (660, 662), False, 'import rospy\n'), ((1984, 2087), 'arc_utilities.transformation_helper.BuildMatrixRos', 'transformation_helper.BuildMatrixRos', (['transform.transform.translation', 'transform.transform.rotation'], {}), '(transform.transform.translation,\n transform.transform.rotation)\n', (2020, 2087), False, 'from arc_utilities import transformation_helper\n'), ((2261, 2308), 'rospy.Duration', 'rospy.Duration', ([], {'secs': '(0)', 'nsecs': '(500 * 1000 * 1000)'}), '(secs=0, nsecs=500 * 1000 * 1000)\n', (2275, 2308), False, 'import rospy\n'), ((2341, 2353), 'rospy.Time', 'rospy.Time', ([], {}), '()\n', (2351, 2353), False, 'import rospy\n'), ((3570, 3620), 'arc_utilities.transformation_helper.ExtractFromMatrix', 'transformation_helper.ExtractFromMatrix', (['transform'], {}), '(transform)\n', (3609, 3620), False, 'from arc_utilities import transformation_helper\n'), ((5887, 5904), 'rospy.Duration', 'rospy.Duration', (['(0)'], {}), '(0)\n', (5901, 5904), False, 'import rospy\n'), ((2538, 2557), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (2555, 2557), False, 'import rospy\n'), ((3850, 3866), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (3864, 3866), False, 'import rospy\n'), ((5077, 5093), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (5091, 5093), False, 'import rospy\n'), ((1880, 1943), 'rospy.logerr', 'rospy.logerr', (['"""No transform available: %s to %s"""', 'parent', 'child'], {}), "('No transform available: %s to %s', parent, child)\n", (1892, 1943), False, 'import rospy\n'), ((2659, 2722), 'rospy.loginfo', 'rospy.loginfo', (['"""Waiting for TF frames %s and %s"""', 'parent', 'child'], {}), "('Waiting for TF frames %s and %s', parent, child)\n", (2672, 2722), False, 'import rospy\n'), ((2757, 2821), 'rospy.logdebug', 'rospy.logdebug', (['"""Waiting for TF frames %s and %s"""', 'parent', 'child'], {}), "('Waiting for TF frames %s and %s', parent, child)\n", (2771, 2821), False, 'import rospy\n'), ((4289, 4325), 'tf2_ros.StaticTransformBroadcaster', 'tf2_ros.StaticTransformBroadcaster', ([], {}), '()\n', (4323, 4325), False, 'import tf2_ros\n'), ((5653, 5689), 'tf2_ros.StaticTransformBroadcaster', 'tf2_ros.StaticTransformBroadcaster', ([], {}), '()\n', (5687, 5689), False, 'import tf2_ros\n')] |
# Used to retrieve the most current lottery data daily
import re
from time import strptime
from datetime import datetime
from .populate_database import (cash5, powerball,
mega_millions, GAMES)
from .models import (Pick3, Pick4, Cash5,
PowerBall, MegaMillions,
LuckyForLife)
import feedparser
def trim_spaces(data):
return data.replace(' ', '')
def format_data(summary, title):
"""
Used to format the number and date data for each game.
Example summary: 21-22-29-38-40
Example title: Powerball Winning Numbers on Wednesday, January 18, 2017
"""
numbers = summary.replace('-', ',')
if 'PB' or 'MB' in numbers:
numbers = re.sub('PB|PP|MB|MP', '', numbers)
numbers = trim_spaces(numbers).replace(',,', ',')
if ',' in title:
raw_date = title.split(',')
date_year = raw_date[2]
# Change ' Nov' to a numerical number(note space before)
date_month = strptime(raw_date[1][0:4].lstrip(), '%b').tm_mon
date_day = re.findall(' \d.*$', raw_date[1])
date_day = trim_spaces(date_day[0])
if len(date_day) == 1:
date_day = '0' + date_day[0]
date_day = trim_spaces(date_day)
date = date_year + '-' + str(date_month) + '-' + date_day
date = trim_spaces(date)
else:
# Lucky for Life is formatted differently than the rest
# title: 'Lucky For Life Winning Numbers on 10/31/2016'
raw_date = re.findall('\d*./\d*./\d*.$', title)
raw_date = raw_date[0]
raw_date = datetime.strptime(raw_date, '%m/%d/%Y')
date = raw_date.strftime('%Y-%m-%d')
return date, numbers
def write_to_database(date, numbers, model, **kwargs):
time = kwargs.get('time', None)
jackpot = kwargs.get('jackpot', None)
_powerball = kwargs.get('powerball', None)
megaball = kwargs.get('megaball', None)
multiplier = kwargs.get('multiplier', None)
row_data = model()
if time and model == Pick3 or model == Pick4:
row_data.drawing_time = time
if jackpot and model == Cash5:
row_data.jackpot = jackpot
if powerball and model == PowerBall:
row_data.powerball = _powerball
if megaball and multiplier and model == MegaMillions:
row_data.megaball = megaball
row_data.multiplier = multiplier
row_data.drawing_date = date
row_data.drawing_numbers = numbers
row_data.save()
def scrape_rss():
"""
Used to scrape data for Pick3, Pick4, and Lucky for Life.
Date is in the title tag except for the Carolina
Pick 3 evening drawing.
'title': 'Carolina Pick 3 Evening Winning Numbers',
'summary': '4-4-8',
"""
url = 'http://www.nc-educationlottery.org/rss_winning_numbers.aspx'
rss_data = feedparser.parse(url)
for i in range(len(rss_data['entries'])):
entry = rss_data['entries'][i]
summary = entry['summary'] # number data
title = entry['title'] # game name and date
if 'Carolina Pick 3 Daytime' in title:
data = format_data(summary, title)
write_to_database(data[0], data[1], Pick3, time="D")
elif 'Carolina Pick 3 Evening' in title:
# Date is in 'published' instead of title.
# 'published': u'Monday, October 31, 2016'
data = format_data(summary, entry['published'])
write_to_database(data[0], data[1], Pick3, time="E")
elif 'Carolina Pick 4 Daytime' in title:
data = format_data(summary, title)
write_to_database(data[0], data[1], Pick4, time="D")
elif 'Carolina Pick 4 Evening' in title:
data = format_data(summary, title)
write_to_database(data[0], data[1], Pick4, time="E")
elif 'Lucky For Life Winning Numbers' in title:
data = format_data(summary, title)
write_to_database(data[0], data[1], LuckyForLife)
scrape_rss()
cash5(GAMES['cash5'])
powerball()
mega_millions()
| [
"feedparser.parse",
"re.sub",
"re.findall",
"datetime.datetime.strptime"
] | [((2842, 2863), 'feedparser.parse', 'feedparser.parse', (['url'], {}), '(url)\n', (2858, 2863), False, 'import feedparser\n'), ((740, 774), 're.sub', 're.sub', (['"""PB|PP|MB|MP"""', '""""""', 'numbers'], {}), "('PB|PP|MB|MP', '', numbers)\n", (746, 774), False, 'import re\n'), ((1077, 1111), 're.findall', 're.findall', (['""" \\\\d.*$"""', 'raw_date[1]'], {}), "(' \\\\d.*$', raw_date[1])\n", (1087, 1111), False, 'import re\n'), ((1530, 1569), 're.findall', 're.findall', (['"""\\\\d*./\\\\d*./\\\\d*.$"""', 'title'], {}), "('\\\\d*./\\\\d*./\\\\d*.$', title)\n", (1540, 1569), False, 'import re\n'), ((1617, 1656), 'datetime.datetime.strptime', 'datetime.strptime', (['raw_date', '"""%m/%d/%Y"""'], {}), "(raw_date, '%m/%d/%Y')\n", (1634, 1656), False, 'from datetime import datetime\n')] |
import git
import os
import time
class GitExtension:
""" Get git into on the site for display on the about page """
first = True
needs = {'request'}
provides = {'git'}
def __init__(self):
pass
def start(self, context):
format_string = "%Y-%m-%d %H:%M:%S %z"
context.repo = git.Repo(os.path.join(os.path.dirname(__file__), '..', '..'))
context.git_hexsha = context.repo.head.commit.hexsha
committed_date = context.repo.head.commit.committed_date
context.git_date = time.strftime(format_string, time.gmtime(committed_date))
context.git_message = context.repo.head.commit.message
context.git_name = context.repo.head.commit.committer.name
context.git_total_commits = len(list(context.repo.iter_commits()))
context.git_release = None
context.git_tags = {}
context.git_totals = {'files': 0,
'lines': 0,
'insertions': 0,
'deletions': 0,}
context.git_commits = []
for i, c in enumerate(context.repo.iter_commits('master')):
context.git_commits += [[c, c.stats]]
for s in ('files', 'lines', 'insertions', 'deletions'):
context.git_totals[s] += c.stats.total[s]
for t in context.repo.tags:
context.git_tags[t.commit.hexsha] = t.name
tagged_date = t.tag.tagged_date
if context.git_hexsha == t.commit.hexsha or tagged_date < committed_date:
context.git_release = t.name
if tagged_date != committed_date and context.git_hexsha != t.commit.hexsha:
context.git_release += ' +' + context.git_hexsha[:7]
context.git_release_date = time.strftime(format_string, time.gmtime(tagged_date))
| [
"os.path.dirname",
"time.gmtime"
] | [((569, 596), 'time.gmtime', 'time.gmtime', (['committed_date'], {}), '(committed_date)\n', (580, 596), False, 'import time\n'), ((347, 372), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (362, 372), False, 'import os\n'), ((1807, 1831), 'time.gmtime', 'time.gmtime', (['tagged_date'], {}), '(tagged_date)\n', (1818, 1831), False, 'import time\n')] |
"""
Abstraction for NE root shell
"""
import os
import time
from pss1830ssh.pss1830 import PSS1830
from pss1830ssh.pss1830 import PSSException
def get_ec_ip(shelf, ec):
return '100.0.{shelf}.{ec}'.format(shelf=shelf, ec=ec)
class PSS1830Root(PSS1830):
"""Wrapper for PSS root mode."""
PROMPT_RE = '(root@EC1830-\d+-\d+-ACT:/root[\r\n]*# $)|'\
'(root@32EC2-\d+-\d+-ACT:[~\r\n]*# $)|'\
'(root@EC1830-\d+-\d+-STDBY:/root[\r\n]*# $)|'\
'(root@32EC2-\d+-\d+-STDBY:[~\r\n]*# $)'
telnet_prompt_re = '.*# $'
on_master = True
slot_ip = '100.0.{shelf}.{slot}'
def open(self):
super(PSS1830Root, self).open()
self._recv()
self._get_prompt()
def close(self):
self._send('exit')
super(PSS1830Root, self).close()
def is_on_master(self):
return self.on_master
def is_on_active(self):
"""Check if it is currently on active EC."""
return 'ACT' in self.prompt
def _exit(self):
self._send('exit')
self._get_prompt()
def _telnet(self, ip):
self.logger.debug('telnet %s', ip)
self._send('telnet %s' % ip)
if self._expect('login:'):
self._send(self.username)
time.sleep(1)
data = self._recv()
if self._match('Password:', data):
self._send(self.password)
self.logger.debug('telnet %s succeeded', ip)
return True
elif self._match(self.telnet_prompt_re, data):
self.logger.debug('telnet %s succeeded', ip)
return True
self.cancel()
self.logger.debug('telnet %s failed', ip)
return False
def login_to_slot(self, shelf, slot):
"""Telnet to a card/slot."""
self.logger.debug('Logging in slot: %s/%s', shelf, slot)
if self._telnet(self.slot_ip.format(shelf=shelf, slot=slot)):
if self._get_prompt(self.telnet_prompt_re):
self.logger.debug('Logged in slot: %s/%s', shelf, slot)
return True
raise PSSException('Failed to login to slot: %s/%s' % (shelf, slot))
def logout_from_slot(self):
if not self.is_on_active():
self.logger.debug('Logging out slot')
self._exit()
def login_to_shelf(self, shelf, ec=None, act=True):
"""Telnet to a slave shelf."""
self.logger.debug('Logging in shelf: %s (ec=%s, act=%s)' % (shelf, ec, act))
ec_cards = [ec] if ec else [1, 18]
login_ok = False
for e in ec_cards:
if self._telnet(get_ec_ip(shelf, e)):
if self._get_prompt() and self.is_on_active() == act:
self.on_master = shelf == 81
login_ok = True
break
elif self.prompt:
self._exit()
else:
self.cancel()
if not login_ok:
raise PSSException('Failed to login to shelf (shelf=%s, ec=%s)' % (shelf, e))
def logout_from_shelf(self):
"""Logout from a slave shelf."""
self.logger.debug('Logging out shelf')
if not self.is_on_master():
self._exit()
if not self.prompt:
raise PSSException('Logout from an EC failed. Failed to get the prompt')
def login_to_stdby(self):
"""Login to Standby EC."""
if self.is_on_active() and self.is_on_master():
self.logger.debug('Logging in standby EC')
act_ec = int(self.prompt.split('-')[2])
stdby_ec = 1 if act_ec == 18 else 18
self.login_to_shelf(shelf=81, ec=stdby_ec, act=False)
def logout_from_stdby(self):
if not self.is_on_active():
self.logger.debug('Logging out standby EC')
self._exit()
def get_file(self, remotepath, localpath, callback=None, recursive=True):
"""Get files from the NE to the local machine
"""
self.logger.debug('Openning SFTP')
scp = self.client.open_sftp()
if recursive:
scp.chdir(remotepath)
for fname in scp.listdir():
remote = os.path.join(remotepath, fname)
local = os.path.join(localpath, fname)
self.logger.debug('Transferring: %s to %s' % (remote, local))
scp.get(remote, local, callback)
else:
self.logger.debug('Transferring: %s to %s' % (remotepath, localpath))
scp.get(remotepath, localpath, callback)
scp.close()
| [
"os.path.join",
"time.sleep",
"pss1830ssh.pss1830.PSSException"
] | [((2206, 2268), 'pss1830ssh.pss1830.PSSException', 'PSSException', (["('Failed to login to slot: %s/%s' % (shelf, slot))"], {}), "('Failed to login to slot: %s/%s' % (shelf, slot))\n", (2218, 2268), False, 'from pss1830ssh.pss1830 import PSSException\n'), ((1332, 1345), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1342, 1345), False, 'import time\n'), ((3109, 3180), 'pss1830ssh.pss1830.PSSException', 'PSSException', (["('Failed to login to shelf (shelf=%s, ec=%s)' % (shelf, e))"], {}), "('Failed to login to shelf (shelf=%s, ec=%s)' % (shelf, e))\n", (3121, 3180), False, 'from pss1830ssh.pss1830 import PSSException\n'), ((3426, 3492), 'pss1830ssh.pss1830.PSSException', 'PSSException', (['"""Logout from an EC failed. Failed to get the prompt"""'], {}), "('Logout from an EC failed. Failed to get the prompt')\n", (3438, 3492), False, 'from pss1830ssh.pss1830 import PSSException\n'), ((4362, 4393), 'os.path.join', 'os.path.join', (['remotepath', 'fname'], {}), '(remotepath, fname)\n', (4374, 4393), False, 'import os\n'), ((4419, 4449), 'os.path.join', 'os.path.join', (['localpath', 'fname'], {}), '(localpath, fname)\n', (4431, 4449), False, 'import os\n')] |
#!/usr/bin/env python
"""
mbed
Copyright (c) 2017-2017 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import sys
import os
import argparse
from os.path import join, abspath, dirname
from flash_algo import PackFlashAlgo
from fuzzywuzzy import process
from itertools import takewhile
# Be sure that the tools directory is in the search path
ROOT = abspath(join(dirname(__file__), "..", ".."))
sys.path.insert(0, ROOT)
from tools.targets import TARGETS
from tools.arm_pack_manager import Cache
TEMPLATE_PATH = join(dirname(__file__),"c_blob.tmpl")
# TODO
# FIXED LENGTH - remove and these (shrink offset to 4 for bkpt only)
BLOB_HEADER = '0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,'
HEADER_SIZE = 0x20
def str_to_num(val):
return int(val,0) #convert string to number and automatically handle hex conversion
def find_possible(match, choices):
return process.extractOne(match, choices)
def main():
"""Generate flash algorithms"""
parser = argparse.ArgumentParser(description='Flash generator')
parser.add_argument("--rebuild_all", action="store_true",
help="Rebuild entire cache")
parser.add_argument("--rebuild_descriptors", action="store_true",
help="Rebuild descriptors")
parser.add_argument("--target", default=None,
help="Name of target to generate algo for")
parser.add_argument("--daplink", default=None,
help="Root location of daplink")
parser.add_argument("--all", action="store_true",
help="Build all flash algos for devcies")
parser.add_argument("--blob_start", default=0x20000000, type=str_to_num, help="Starting "
"address of the flash blob. Used only for DAPLink.")
args = parser.parse_args()
cache = Cache(True, True)
if args.rebuild_all:
cache.cache_everything()
print("Cache rebuilt")
return
if args.rebuild_descriptors:
cache.cache_descriptors()
print("Descriptors rebuilt")
return
if args.target is None:
device_and_filenames = [(target.device_name, target.name.lower()) for target
in TARGETS if hasattr(target, "device_name")]
else:
device_and_filenames = [(args.target, args.target.replace("/", "-"))]
try:
os.mkdir("output")
except OSError:
# Directory already exists
pass
target_to_file = get_daplink_files(args.daplink)
SP = args.blob_start + 2048
data_dict = {
'prog_header': BLOB_HEADER,
'header_size': HEADER_SIZE,
'entry': args.blob_start,
'stack_pointer': SP,
}
print(len(target_to_file.keys()))
print(len(device_and_filenames))
added = []
for device, mbed_target in device_and_filenames:
dev = cache.index[device]
if(mbed_target not in target_to_file):
fuzz1 = find_possible(mbed_target, target_to_file.keys())
fuzz2 = find_possible(device, target_to_file.keys())
if fuzz1[1] >= 90:
mbed_target = fuzz1[0]
elif fuzz2[1] >= 90:
mbed_target = fuzz2[0]
else:
continue
added.append(mbed_target)
binaries = cache.get_flash_algorthim_binary(device, all=True)
algos = [PackFlashAlgo(binary.read()) for binary in binaries]
filtered_algos = algos if args.all else filter_algos(dev, algos)
for idx, algo in enumerate(filtered_algos):
algo.process_template(TEMPLATE_PATH, target_to_file[mbed_target], data_dict)
print("%s: %s \r" % (device, target_to_file[mbed_target]))
write_missing_symbols([dev for dev in target_to_file.keys() if dev not in added], target_to_file)
def write_missing_symbols(missing_devices, target_to_file):
for device in missing_devices:
flash_file = target_to_file[device]
empty_array = 'static const uint32_t sectors_info[] = {};'
with open(flash_file, 'a') as f:
f.write(empty_array)
def get_daplink_files(daplink_root):
daplink_targets = join(daplink_root, 'source', 'target')
target_to_file = {}
print(os.getcwd())
for root,dirs,files in os.walk(daplink_targets):
if 'flash_blob.c' in files:
target = os.path.basename(os.path.normpath(root))
flash_file = join(root, 'flash_blob.c')
target_to_file[target] = flash_file
return target_to_file
def filter_algos(dev, algos):
if "memory" not in dev:
return algos
if "IROM1" not in dev["memory"] or "PROGRAM_FLASH" not in dev["memory"]:
return algos
if "IROM2" in dev["memory"]:
return algos
rom_rgn = dev["memory"]["IROM1"]
try:
start = int(rom_rgn["start"], 0)
size = int(rom_rgn["size"], 0)
except ValueError:
return algos
matching_algos = [algo for algo in algos if
algo.flash_start == start and algo.flash_size == size]
return matching_algos if len(matching_algos) == 1 else algos
if __name__ == '__main__':
main()
| [
"sys.path.insert",
"argparse.ArgumentParser",
"os.path.join",
"os.getcwd",
"os.path.normpath",
"os.path.dirname",
"fuzzywuzzy.process.extractOne",
"os.mkdir",
"tools.arm_pack_manager.Cache",
"os.walk"
] | [((936, 960), 'sys.path.insert', 'sys.path.insert', (['(0)', 'ROOT'], {}), '(0, ROOT)\n', (951, 960), False, 'import sys\n'), ((1059, 1076), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (1066, 1076), False, 'from os.path import join, abspath, dirname\n'), ((1457, 1491), 'fuzzywuzzy.process.extractOne', 'process.extractOne', (['match', 'choices'], {}), '(match, choices)\n', (1475, 1491), False, 'from fuzzywuzzy import process\n'), ((1555, 1609), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Flash generator"""'}), "(description='Flash generator')\n", (1578, 1609), False, 'import argparse\n'), ((2408, 2425), 'tools.arm_pack_manager.Cache', 'Cache', (['(True)', '(True)'], {}), '(True, True)\n', (2413, 2425), False, 'from tools.arm_pack_manager import Cache\n'), ((4729, 4767), 'os.path.join', 'join', (['daplink_root', '"""source"""', '"""target"""'], {}), "(daplink_root, 'source', 'target')\n", (4733, 4767), False, 'from os.path import join, abspath, dirname\n'), ((4842, 4866), 'os.walk', 'os.walk', (['daplink_targets'], {}), '(daplink_targets)\n', (4849, 4866), False, 'import os\n'), ((904, 921), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (911, 921), False, 'from os.path import join, abspath, dirname\n'), ((2948, 2966), 'os.mkdir', 'os.mkdir', (['"""output"""'], {}), "('output')\n", (2956, 2966), False, 'import os\n'), ((4802, 4813), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4811, 4813), False, 'import os\n'), ((4991, 5017), 'os.path.join', 'join', (['root', '"""flash_blob.c"""'], {}), "(root, 'flash_blob.c')\n", (4995, 5017), False, 'from os.path import join, abspath, dirname\n'), ((4942, 4964), 'os.path.normpath', 'os.path.normpath', (['root'], {}), '(root)\n', (4958, 4964), False, 'import os\n')] |
from glob import glob
from reportlab.lib.pagesizes import letter
from reportlab.lib import colors
from reportlab.platypus import SimpleDocTemplate
from reportlab.platypus import Paragraph
from reportlab.platypus import Spacer
from reportlab.platypus import Image
from reportlab.platypus import PageBreak
from reportlab.platypus import Table
from reportlab.platypus import TableStyle
from reportlab.lib.units import cm
from mdloader import LoadMDfile
from stylesconfig import styles
class PDFBuilder:
def __init__(self, directory):
self.name = directory
self.doc = SimpleDocTemplate(
self.name + ".pdf",
pagesize=letter,
rightMargin=72,
leftMargin=72,
topMargin=72,
bottomMargin=42,
)
self.styles = styles
@staticmethod
def BackGroundSetup(canvas, _):
background = "tools/background_42_ai.png"
canvas.saveState()
width, height = letter
canvas.drawImage(background, 0, 0, width, height)
canvas.restoreState()
def FirstPage(self, Story):
Story.append(Spacer(1, 2 * cm))
Story.append(Paragraph("Bootcamp", self.styles["main_title"]))
Story.append(Spacer(1, 12))
Story.append(Paragraph("Python", self.styles["main_title"]))
Story.append(Spacer(1, 4 * cm))
im = Image("tools/logo_v4_noir.png", 6 * cm, 6 * cm)
Story.append(im)
Story.append(PageBreak())
return Story
def MainPage(self, Story, filename):
data = LoadMDfile().readfile(filename)
Story = self.FirstPage(Story)
for tType, Content in data[1:]:
if tType == "space":
Story.append(Spacer(1, 12))
continue
if tType == "table":
Story = self.addTable(Story, Content)
continue
if tType == "code":
Story = self.addCode(Story, Content)
continue
if tType == "h1":
Story = self.addH1(Story, Content)
continue
if tType == "h2":
Story = self.addH2(Story, Content)
continue
if tType == "h3":
Story = self.addH3(Story, Content)
continue
if tType == "list":
Story = self.addList(Story, Content)
continue
Story = self.addOther(Story, Content)
Story.append(PageBreak())
return Story
def TableFormater(self, content):
table = []
for line in content:
table.append([Paragraph(elem, self.styles["ai_other"]) for elem in line])
return table
def addH1(self, Story, Content):
Story.append(Paragraph(Content, self.styles["ai_h1"]))
Story.append(Spacer(1, 1 * cm))
return Story
def addH2(self, Story, Content):
Story.append(Paragraph(Content, self.styles["ai_h2"]))
return Story
def addH3(self, Story, Content):
Story.append(Paragraph(Content, self.styles["ai_h3"]))
return Story
def addImage(self, Story, Content):
table = Table(
data=[[Image(Content, 12*cm, 8*cm)]],
colWidths=12*cm,
rowHeights=8*cm,
style=[
('ALIGN', (0, 0), (0, 0), 'CENTER'),
('BOX', (0, 0), (0, 0), 1, colors.black),
('VALIGN', (0, 0), (0, 0), 'MIDDLE'),
]
)
Story.append(table)
return Story
def addTable(self, Story, Content):
table = Table(
self.TableFormater(Content), colWidths=[5 * cm, 10 * cm], hAlign="LEFT"
)
table.setStyle(
TableStyle(
[
("INNERGRID", (0, 0), (-1, -1), 0.25, colors.black),
("BOX", (0, 0), (-1, -1), 0.25, colors.black),
]
)
)
Story.append(table)
Story.append(Spacer(1, 1 * cm))
return Story
def addCode(self, Story, Content):
Story.append(Spacer(1, 5))
Story.append(Paragraph(Content, self.styles["ai_code"]))
Story.append(Spacer(1, 5))
return Story
def addList(self, Story, Content):
Story.append(Paragraph(Content, self.styles["ai_list"]))
return Story
def addOther(self, Story, Content):
Story.append(Paragraph(Content, self.styles["ai_other"]))
return Story
def BuildExercice(self, Story, filename):
data = LoadMDfile().readfile(filename)
for tType, Content in data:
if tType == "space":
Story.append(Spacer(1, 12))
continue
if tType == "table":
Story = self.addTable(Story, Content)
continue
if tType == "code":
Story = self.addCode(Story, Content)
continue
if tType == "h1":
Story = self.addH1(Story, Content)
continue
if tType == "h2":
Story = self.addH2(Story, Content)
continue
if tType == "h3":
Story = self.addH3(Story, Content)
continue
if tType == "list":
Story = self.addList(Story, Content)
continue
if tType == "image":
Story = self.addImage(Story, Content)
continue
Story = self.addOther(Story, Content)
Story.append(PageBreak())
return Story
def SavePDF(self, Story):
self.doc.build(
Story,
onFirstPage=PDFBuilder.BackGroundSetup,
onLaterPages=PDFBuilder.BackGroundSetup,
)
def MetaData(self):
self.doc.title = self.name + ".pdf"
self.doc.author = "42-ai"
self.doc.creator = "42-ai"
self.doc.subject = "Python training exercices"
self.doc.keywords = ["python", "42", "coding", "training"]
def Build(self):
self.MetaData()
Story = []
directory = self.name
main_file = "{directory}/{name}.md".format(directory=directory, name=self.name)
exercices = glob(directory + "/ex*/*.md")
exercices.sort()
Story = self.MainPage(Story, main_file)
for file in exercices:
Story = self.BuildExercice(Story, file)
self.SavePDF(Story)
if __name__ == "__main__":
for day in glob("day*"):
if "." in day:
continue
pdf = PDFBuilder(day)
pdf.Build()
| [
"reportlab.platypus.TableStyle",
"reportlab.platypus.Paragraph",
"reportlab.platypus.Spacer",
"reportlab.platypus.SimpleDocTemplate",
"mdloader.LoadMDfile",
"reportlab.platypus.PageBreak",
"glob.glob",
"reportlab.platypus.Image"
] | [((6507, 6519), 'glob.glob', 'glob', (['"""day*"""'], {}), "('day*')\n", (6511, 6519), False, 'from glob import glob\n'), ((588, 708), 'reportlab.platypus.SimpleDocTemplate', 'SimpleDocTemplate', (["(self.name + '.pdf')"], {'pagesize': 'letter', 'rightMargin': '(72)', 'leftMargin': '(72)', 'topMargin': '(72)', 'bottomMargin': '(42)'}), "(self.name + '.pdf', pagesize=letter, rightMargin=72,\n leftMargin=72, topMargin=72, bottomMargin=42)\n", (605, 708), False, 'from reportlab.platypus import SimpleDocTemplate\n'), ((1370, 1417), 'reportlab.platypus.Image', 'Image', (['"""tools/logo_v4_noir.png"""', '(6 * cm)', '(6 * cm)'], {}), "('tools/logo_v4_noir.png', 6 * cm, 6 * cm)\n", (1375, 1417), False, 'from reportlab.platypus import Image\n'), ((6249, 6278), 'glob.glob', 'glob', (["(directory + '/ex*/*.md')"], {}), "(directory + '/ex*/*.md')\n", (6253, 6278), False, 'from glob import glob\n'), ((1122, 1139), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(2 * cm)'], {}), '(1, 2 * cm)\n', (1128, 1139), False, 'from reportlab.platypus import Spacer\n'), ((1162, 1210), 'reportlab.platypus.Paragraph', 'Paragraph', (['"""Bootcamp"""', "self.styles['main_title']"], {}), "('Bootcamp', self.styles['main_title'])\n", (1171, 1210), False, 'from reportlab.platypus import Paragraph\n'), ((1233, 1246), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(12)'], {}), '(1, 12)\n', (1239, 1246), False, 'from reportlab.platypus import Spacer\n'), ((1269, 1315), 'reportlab.platypus.Paragraph', 'Paragraph', (['"""Python"""', "self.styles['main_title']"], {}), "('Python', self.styles['main_title'])\n", (1278, 1315), False, 'from reportlab.platypus import Paragraph\n'), ((1338, 1355), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(4 * cm)'], {}), '(1, 4 * cm)\n', (1344, 1355), False, 'from reportlab.platypus import Spacer\n'), ((1464, 1475), 'reportlab.platypus.PageBreak', 'PageBreak', ([], {}), '()\n', (1473, 1475), False, 'from reportlab.platypus import PageBreak\n'), ((2488, 2499), 'reportlab.platypus.PageBreak', 'PageBreak', ([], {}), '()\n', (2497, 2499), False, 'from reportlab.platypus import PageBreak\n'), ((2775, 2815), 'reportlab.platypus.Paragraph', 'Paragraph', (['Content', "self.styles['ai_h1']"], {}), "(Content, self.styles['ai_h1'])\n", (2784, 2815), False, 'from reportlab.platypus import Paragraph\n'), ((2838, 2855), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(1 * cm)'], {}), '(1, 1 * cm)\n', (2844, 2855), False, 'from reportlab.platypus import Spacer\n'), ((2937, 2977), 'reportlab.platypus.Paragraph', 'Paragraph', (['Content', "self.styles['ai_h2']"], {}), "(Content, self.styles['ai_h2'])\n", (2946, 2977), False, 'from reportlab.platypus import Paragraph\n'), ((3059, 3099), 'reportlab.platypus.Paragraph', 'Paragraph', (['Content', "self.styles['ai_h3']"], {}), "(Content, self.styles['ai_h3'])\n", (3068, 3099), False, 'from reportlab.platypus import Paragraph\n'), ((3746, 3862), 'reportlab.platypus.TableStyle', 'TableStyle', (["[('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black), ('BOX', (0, 0), (-1, \n -1), 0.25, colors.black)]"], {}), "([('INNERGRID', (0, 0), (-1, -1), 0.25, colors.black), ('BOX', (0,\n 0), (-1, -1), 0.25, colors.black)])\n", (3756, 3862), False, 'from reportlab.platypus import TableStyle\n'), ((4007, 4024), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(1 * cm)'], {}), '(1, 1 * cm)\n', (4013, 4024), False, 'from reportlab.platypus import Spacer\n'), ((4108, 4120), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(5)'], {}), '(1, 5)\n', (4114, 4120), False, 'from reportlab.platypus import Spacer\n'), ((4143, 4185), 'reportlab.platypus.Paragraph', 'Paragraph', (['Content', "self.styles['ai_code']"], {}), "(Content, self.styles['ai_code'])\n", (4152, 4185), False, 'from reportlab.platypus import Paragraph\n'), ((4208, 4220), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(5)'], {}), '(1, 5)\n', (4214, 4220), False, 'from reportlab.platypus import Spacer\n'), ((4304, 4346), 'reportlab.platypus.Paragraph', 'Paragraph', (['Content', "self.styles['ai_list']"], {}), "(Content, self.styles['ai_list'])\n", (4313, 4346), False, 'from reportlab.platypus import Paragraph\n'), ((4431, 4474), 'reportlab.platypus.Paragraph', 'Paragraph', (['Content', "self.styles['ai_other']"], {}), "(Content, self.styles['ai_other'])\n", (4440, 4474), False, 'from reportlab.platypus import Paragraph\n'), ((5562, 5573), 'reportlab.platypus.PageBreak', 'PageBreak', ([], {}), '()\n', (5571, 5573), False, 'from reportlab.platypus import PageBreak\n'), ((1555, 1567), 'mdloader.LoadMDfile', 'LoadMDfile', ([], {}), '()\n', (1565, 1567), False, 'from mdloader import LoadMDfile\n'), ((4559, 4571), 'mdloader.LoadMDfile', 'LoadMDfile', ([], {}), '()\n', (4569, 4571), False, 'from mdloader import LoadMDfile\n'), ((1727, 1740), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(12)'], {}), '(1, 12)\n', (1733, 1740), False, 'from reportlab.platypus import Spacer\n'), ((2635, 2675), 'reportlab.platypus.Paragraph', 'Paragraph', (['elem', "self.styles['ai_other']"], {}), "(elem, self.styles['ai_other'])\n", (2644, 2675), False, 'from reportlab.platypus import Paragraph\n'), ((4689, 4702), 'reportlab.platypus.Spacer', 'Spacer', (['(1)', '(12)'], {}), '(1, 12)\n', (4695, 4702), False, 'from reportlab.platypus import Spacer\n'), ((3205, 3236), 'reportlab.platypus.Image', 'Image', (['Content', '(12 * cm)', '(8 * cm)'], {}), '(Content, 12 * cm, 8 * cm)\n', (3210, 3236), False, 'from reportlab.platypus import Image\n')] |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Software License Agreement (BSD)
#
# file @data.py
# authors <NAME> <<EMAIL>>
# NovAtel <novatel.com/support>
# copyright Copyright (c) 2012, Clearpath Robotics, Inc., All rights reserved.
# Copyright (c) 2014, NovAtel Inc., All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# * Neither the name of Clearpath Robotics nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WAR-
# RANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, IN-
# DIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import rospy
import novatel_msgs.msg
from novatel_span_driver.port import Port
from novatel_span_driver.mapping import msgs
from novatel_span_driver.handlers import MessageHandler
import novatel_span_driver.translator
from io import BytesIO
from threading import Lock
class DataPort(Port):
def run(self):
# Set up handlers for translating different novatel messages as they arrive.
handlers = {}
pkt_counters = {}
pkt_times = {}
for msg_id in msgs.keys():
handlers[msg_id] = MessageHandler(*msgs[msg_id])
pkt_counters[msg_id] = 0
pkt_times[msg_id] = 0
bad_pkts = set()
pkt_id = None
while not self.finish.is_set():
try:
header, pkt_str = self.recv()
if header is not None:
handlers[header.id].handle(BytesIO(pkt_str), header)
if header.id not in pkt_counters:
pkt_counters[header.id] = 0
else:
pkt_counters[header.id] += 1
pkt_times[header.id] = header.gps_week_seconds # only track times of msgs that are part of novatel msgs
except ValueError as e:
# Some problem in the recv() routine.
rospy.logwarn(str(e))
continue
except KeyError as e:
if header.id not in handlers and header.id not in pkt_counters:
rospy.logwarn("No handler for message id %d" % header.id)
except novatel_span_driver.translator.TranslatorError:
if header.id not in bad_pkts:
rospy.logwarn("Error parsing %s.%d" % header.id)
bad_pkts.add(pkt)
| [
"novatel_span_driver.mapping.msgs.keys",
"novatel_span_driver.handlers.MessageHandler",
"io.BytesIO",
"rospy.logwarn"
] | [((2291, 2302), 'novatel_span_driver.mapping.msgs.keys', 'msgs.keys', ([], {}), '()\n', (2300, 2302), False, 'from novatel_span_driver.mapping import msgs\n'), ((2335, 2364), 'novatel_span_driver.handlers.MessageHandler', 'MessageHandler', (['*msgs[msg_id]'], {}), '(*msgs[msg_id])\n', (2349, 2364), False, 'from novatel_span_driver.handlers import MessageHandler\n'), ((2674, 2690), 'io.BytesIO', 'BytesIO', (['pkt_str'], {}), '(pkt_str)\n', (2681, 2690), False, 'from io import BytesIO\n'), ((3303, 3360), 'rospy.logwarn', 'rospy.logwarn', (["('No handler for message id %d' % header.id)"], {}), "('No handler for message id %d' % header.id)\n", (3316, 3360), False, 'import rospy\n'), ((3495, 3543), 'rospy.logwarn', 'rospy.logwarn', (["('Error parsing %s.%d' % header.id)"], {}), "('Error parsing %s.%d' % header.id)\n", (3508, 3543), False, 'import rospy\n')] |
import re
_VALID_IP_RE = '(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])'
_VALID_HOSTNAME_RE = '(?:(?:[a-zA-Z]|[a-zA-Z][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*(?:[A-Za-z]|[A-Za-z][A-Za-z0-9\-]*[A-Za-z0-9])'
_VALID_NODEID_RE = re.compile('(?:%s|%s):(?P<port>[0-9]+)$' % (_VALID_HOSTNAME_RE, _VALID_IP_RE))
def _validate_nodeid(nodeid):
if '|' in nodeid:
nodeid = nodeid.split('|', 1)[0]
# call from app code
m = _VALID_NODEID_RE.match(nodeid)
if not m: # pragma: no cover
raise ValueError("Node IDs should be in the format '<ip-or-hostname>:<port>': %s" % (nodeid,))
port = int(m.group(1))
if not (0 <= port <= 65535): # pragma: no cover
raise ValueError("Ports should be in the range 0-65535: %d" % (port,))
| [
"re.compile"
] | [((281, 359), 're.compile', 're.compile', (["('(?:%s|%s):(?P<port>[0-9]+)$' % (_VALID_HOSTNAME_RE, _VALID_IP_RE))"], {}), "('(?:%s|%s):(?P<port>[0-9]+)$' % (_VALID_HOSTNAME_RE, _VALID_IP_RE))\n", (291, 359), False, 'import re\n')] |
"""Library of Region objects I use in my research"""
from aospy.region import Region
china_west = Region(
name='china_west',
description='Western China',
lat_bounds=(35, 45),
lon_bounds=(75, 100),
do_land_mask=False
)
china_east = Region(
name='china_east',
description='Eastern China',
lat_bounds=(22, 32),
lon_bounds=(105, 120),
do_land_mask=True
)
globe = Region(
name='globe',
description='Entire globe',
lat_bounds=(-90, 90),
lon_bounds=(0, 360),
do_land_mask=False
)
land = Region(
name='land',
description='Land',
lat_bounds=(-90, 90),
lon_bounds=(0, 360),
do_land_mask=True
)
ocean = Region(
name='ocean',
description='Ocean',
lat_bounds=(-90, 90),
lon_bounds=(0, 360),
do_land_mask='ocean'
)
nh = Region(
name='nh',
description='Northern hemisphere',
lat_bounds=(0, 90),
lon_bounds=(0, 360),
do_land_mask=False
)
sh = Region(
name='sh',
description='Southern hemisphere',
lat_bounds=(-90, 0),
lon_bounds=(0, 360),
do_land_mask=False
)
eh = Region(
name='eh',
description='Eastern hemisphere',
lat_bounds=(-90, 90),
lon_bounds=(0, 180),
do_land_mask=False
)
wh = Region(
name='wh',
description='Western hemisphere',
lat_bounds=(-90, 90),
lon_bounds=(180, 360),
do_land_mask=False
)
tropics = Region(
name='tropics',
description='Tropics (30S-30N)',
lat_bounds=(-30, 30),
lon_bounds=(0, 360),
do_land_mask=False
)
trop_land = Region(
name='tropics_land',
description='All land 30S-30N',
lat_bounds=(-30, 30),
lon_bounds=(0, 360),
do_land_mask=True
)
trop_ocean = Region(
description='All ocean 30S-30N',
name='tropics_ocean',
lat_bounds=(-30, 30),
lon_bounds=(0, 360),
do_land_mask='ocean'
)
deep_tropics = Region(
name='deep_tropics',
description='Deep tropics (10S-10N)',
lat_bounds=(-10, 10),
lon_bounds=(0, 360),
do_land_mask=False
)
atlantic = Region(
name='atlantic',
description='Atlantic Ocean',
do_land_mask='ocean',
# atlantic.mask_bounds=[((-90, 90), (0, 25)), ((-90, 90), (290, 360)),
# # Atlantic 1
# ((xlat(j) ge -90. and (xlon(i) gt 290. or xlon(i) lt 25.)) or $
# (xlat(j) gt 0. and xlat(j) lt 20. and ((xlon(i)+xlat(j)) gt 290.)) or $
# (xlat(j) le 65. and xlat(j) gt 15 and (xlon(i) gt 260. or xlon(i) lt 50.)) or $
# (xlat(j) gt 65.))
# # Atlantic 2
# ((xlat(j) ge -90. and (xlon(i) gt 290. or xlon(i) lt 25.)) or $
# (xlat(j) gt 0. and xlat(j) lt 20. and ((xlon(i)+xlat(j)) gt 290.)) or $
# (xlat(j) le 65. and xlat(j) gt 15 and (xlon(i) gt 260. or xlon(i) lt 50.)) or $
# (xlat(j) gt 65.))
# # Indian
# (xlon(i) le 100.5 or (xlon(i) gt 100.5 and xlon(i) lt 128.5 $
# and (28.*(xlat(j)+14.5)+14.*(xlon(i)-100.5)) le 14.*28.) $
# or (xlon(i) lt 145.5 and xlat(j) lt -29.5))
)
sahel = Region(
name='sahel',
description='African Sahel',
mask_bounds=[((10, 20), (0, 40)), ((10, 20), (342, 360))],
do_land_mask=True
)
sahel2 = Region(
name='sahel2',
description='African Sahel w/ longitude bounds 15W-30E',
mask_bounds=[((10, 20), (0, 30)), ((10, 20), (345, 360))],
do_land_mask=True
)
sahel3 = Region(
name='sahel3',
description=('Western part of African Sahel. Used by some to '
'specify the whole Sahel.'),
mask_bounds=[((10, 20), (0, 10)), ((10, 20), (340, 360))],
do_land_mask=False
)
sahel_north = Region(
name='sahel_north',
description='Northern half of African Sahel',
mask_bounds=[((15, 20), (0, 40)), ((15, 20), (342, 360))],
do_land_mask=True
)
sahel_south = Region(
name='sahel_south',
description='Southern half of African Sahel',
mask_bounds=[((10, 15), (0, 40)), ((10, 15), (342, 360))],
do_land_mask=True
)
sahel_west = Region(
name='sahel_west',
description='Western half of African Sahel',
mask_bounds=[((10, 20), (0, 11)), ((10, 20), (342, 360))],
do_land_mask=True
)
sahel_east = Region(
name='sahel_east',
description='Eastern half of African Sahel',
lat_bounds=(10, 20),
lon_bounds=(11, 40),
do_land_mask=True
)
sahara = Region(
name='sahara',
description='African Sahara, as defined by Biasutti et al 2009',
mask_bounds=[((20, 30), (0, 35)), ((20, 30), (350, 360))],
do_land_mask=True
)
ind_monsoon = Region(
name='ind_monsoon',
description='Indian monsoon',
lat_bounds=(10, 30),
lon_bounds=(60, 100),
do_land_mask=False
)
warm_pool = Region(
name='warm_pool',
description='Indo-Pacific warm pool. Ocean mask',
lat_bounds=(-20, 20),
lon_bounds=(60, 180),
do_land_mask='ocean'
)
wpwp = Region(
name='wpwp',
description='West Pacific Warm Pool',
lat_bounds=(-5, 5),
lon_bounds=(80, 160),
do_land_mask=False
)
epac = Region(
name='epac',
description='East Pacific cold tongue',
lat_bounds=(-5, 5),
lon_bounds=(200, 280),
do_land_mask=False
)
epac_watl = Region(
name='epac_watl',
description='East Pacific and West Atlantic, including C. and S. America',
lat_bounds=(0, 15),
lon_bounds=(240, 300),
do_land_mask=False
)
epac_itcz = Region(
name='epac_itcz',
description='East Pacific ITCZ for NH summer',
lat_bounds=(0, 20),
lon_bounds=(180, 250),
do_land_mask=False
)
atl_itcz = Region(
name='atl_itcz',
description='Atlantic ITCZ for NH summer',
lat_bounds=(0, 20),
lon_bounds=(300, 345),
do_land_mask=False
)
burls_wpac = Region(
name='burls_wpac',
description='Equatorial W. Pacific region used by Burls and Fedorov 2014',
lat_bounds=(-8, 8),
lon_bounds=(130, 205),
do_land_mask=False
)
burls_epac = Region(
name='burls_epac',
description='Equatorial E. Pacific region used by Burls and Fedorov 2014',
lat_bounds=(-8, 8),
lon_bounds=(205, 280),
do_land_mask=False
)
burls_pac = Region(
name='burls_pac',
description='Pacific region used by Burls and Fedorov 2014',
mask_bounds=[(( 15, 65), (100, 260)),
(( 10, 15), (100, 275)),
(( -5, 10), (100, 290)),
((-65, -5), (130, 290))],
do_land_mask='strict_ocean'
)
burls_trop_pac = Region(
name='burls_trop_pac',
description='Tropical Pacific region used by Burls and Fedorov 2014',
mask_bounds=[(( -5, 8), (100, 290)),
(( -8, -5), (130, 290))],
do_land_mask='strict_ocean'
)
burls_ext_pac = Region(
name='burls_ext_pac',
description='Extratropical Pacific region used by Burls and Fedorov 2014',
mask_bounds=[(( 15, 65), (100, 260)),
(( 10, 15), (100, 275)),
(( 8, 10), (100, 290)),
((-65, -8), (130, 290))],
do_land_mask='strict_ocean'
)
nino1_2 = Region(
name='nino1_2',
description='Standard Nino 1+2 regions of equatorial E. Pacific',
lat_bounds=(-10, 0),
lon_bounds=(270, 280),
do_land_mask=False
)
nino3 = Region(
name='nino3',
description='Standard Nino 3 region of equatorial E. Pacific',
lat_bounds=(-5, 5),
lon_bounds=(210, 270),
do_land_mask=False
)
nino3_4 = Region(
name='nino3.4',
description='Standard Nino 3.4 region of equatorial E. Pacific',
lat_bounds=(-5, 5),
lon_bounds=(190, 240),
do_land_mask=False
)
nino4 = Region(
name='nino4',
description='Standard Nino 4 region of equatorial E. Pacific',
lat_bounds=(-5, 5),
lon_bounds=(160, 210),
do_land_mask=False
)
cld_seed_np = Region(
name='cld_seed_np',
description='North Pacific region of Hill & Ming 2012 GRL cloud brightening geoengineering study',
lat_bounds=(10, 30),
lon_bounds=(204, 244),
do_land_mask='ocean'
)
cld_seed_sp = Region(
name='cld_seed_sp',
description='South Pacific region of Hill & Ming 2012 GRL cloud brightening geoengineering study',
lat_bounds=(-30, -5),
lon_bounds=(240, 285),
do_land_mask='ocean'
)
cld_seed_sa = Region(
name='cld_seed_sa',
description='South Atlantic region of Hill & Ming 2012 GRL cloud brightening geoengineering study',
mask_bounds=[((-30, 5), (0, 12)),
((-30, 5), (342, 360))],
do_land_mask='ocean'
)
cld_seed_all = Region(
name='cld_seed_all',
description='All 3 regions from Hill & Ming 2012 GRL',
mask_bounds=[((-30, 5), (0, 12)),
((-30, 5), (342, 360)),
((-30, -5), (240, 285)),
((10, 30), (204, 244))],
do_land_mask='ocean'
)
east_asia_monsoon = Region(
name='east_asia_monsoon',
description='East Asian Monsoon land region',
lat_bounds=(22.5, 40),
lon_bounds=(100, 122.5),
do_land_mask=False
)
extrop = Region(
name='extratropics',
description='Extratropics (poleward of 30S/N)',
mask_bounds=[((-90, -30), (0, 360)),
((30, 90), (0, 360))],
do_land_mask=False
)
nh_tropics = Region(
name='nh_tropics',
description='Northern hemisphere tropics: 0-30N',
lat_bounds=(0, 30),
lon_bounds=(0, 360),
do_land_mask=False
)
sh_tropics = Region(
name='sh_tropics',
description='Southern hemisphere tropics: 30S-0',
lat_bounds=(-30, 0),
lon_bounds=(0, 360),
do_land_mask=False
)
nh_land = Region(
name='nh_land',
description='Northern hemisphere land',
lat_bounds=(0, 90),
lon_bounds=(0, 360),
do_land_mask=True
)
nh_ocean = Region(
name='nh_ocean',
description='Northern hemisphere ocean',
lat_bounds=(0, 90),
lon_bounds=(0, 360),
do_land_mask='ocean'
)
sh_land = Region(
name='sh_land',
description='Southern hemisphere land',
lat_bounds=(-90, 0),
lon_bounds=(0, 360),
do_land_mask=True
)
sh_ocean = Region(
name='sh_ocean',
description='Southern hemisphere ocean',
lat_bounds=(-90, 0),
lon_bounds=(0, 360),
do_land_mask='ocean'
)
extratrop_land = Region(
name='extratrop_land',
description='Extratropical (poleward of 30S/N) land',
mask_bounds=[((-90, -30), (0, 360)),
((30, 90), (0, 360))],
do_land_mask=True
)
extratrop_ocean = Region(
name='extratrop_ocean',
description='Extratropical (poleward of 30S/N) ocean',
mask_bounds=[((-90, -30), (0, 360)),
((30, 90), (0, 360))],
do_land_mask='ocean'
)
nh_extratrop = Region(
name='nh_extratrop',
description='Northern hemisphere extratropics (30-90N)',
lat_bounds=(30, 90),
lon_bounds=(0, 360),
do_land_mask=False
)
sh_extratrop = Region(
name='sh_extratrop',
description='Southern hemisphere extratropics (90S-30S)',
lat_bounds=(-90, -30),
lon_bounds=(0, 360),
do_land_mask=False
)
| [
"aospy.region.Region"
] | [((100, 221), 'aospy.region.Region', 'Region', ([], {'name': '"""china_west"""', 'description': '"""Western China"""', 'lat_bounds': '(35, 45)', 'lon_bounds': '(75, 100)', 'do_land_mask': '(False)'}), "(name='china_west', description='Western China', lat_bounds=(35, 45),\n lon_bounds=(75, 100), do_land_mask=False)\n", (106, 221), False, 'from aospy.region import Region\n'), ((253, 374), 'aospy.region.Region', 'Region', ([], {'name': '"""china_east"""', 'description': '"""Eastern China"""', 'lat_bounds': '(22, 32)', 'lon_bounds': '(105, 120)', 'do_land_mask': '(True)'}), "(name='china_east', description='Eastern China', lat_bounds=(22, 32),\n lon_bounds=(105, 120), do_land_mask=True)\n", (259, 374), False, 'from aospy.region import Region\n'), ((401, 516), 'aospy.region.Region', 'Region', ([], {'name': '"""globe"""', 'description': '"""Entire globe"""', 'lat_bounds': '(-90, 90)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='globe', description='Entire globe', lat_bounds=(-90, 90),\n lon_bounds=(0, 360), do_land_mask=False)\n", (407, 516), False, 'from aospy.region import Region\n'), ((542, 647), 'aospy.region.Region', 'Region', ([], {'name': '"""land"""', 'description': '"""Land"""', 'lat_bounds': '(-90, 90)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(True)'}), "(name='land', description='Land', lat_bounds=(-90, 90), lon_bounds=(0,\n 360), do_land_mask=True)\n", (548, 647), False, 'from aospy.region import Region\n'), ((674, 785), 'aospy.region.Region', 'Region', ([], {'name': '"""ocean"""', 'description': '"""Ocean"""', 'lat_bounds': '(-90, 90)', 'lon_bounds': '(0, 360)', 'do_land_mask': '"""ocean"""'}), "(name='ocean', description='Ocean', lat_bounds=(-90, 90), lon_bounds=\n (0, 360), do_land_mask='ocean')\n", (680, 785), False, 'from aospy.region import Region\n'), ((808, 925), 'aospy.region.Region', 'Region', ([], {'name': '"""nh"""', 'description': '"""Northern hemisphere"""', 'lat_bounds': '(0, 90)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='nh', description='Northern hemisphere', lat_bounds=(0, 90),\n lon_bounds=(0, 360), do_land_mask=False)\n", (814, 925), False, 'from aospy.region import Region\n'), ((949, 1067), 'aospy.region.Region', 'Region', ([], {'name': '"""sh"""', 'description': '"""Southern hemisphere"""', 'lat_bounds': '(-90, 0)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='sh', description='Southern hemisphere', lat_bounds=(-90, 0),\n lon_bounds=(0, 360), do_land_mask=False)\n", (955, 1067), False, 'from aospy.region import Region\n'), ((1091, 1209), 'aospy.region.Region', 'Region', ([], {'name': '"""eh"""', 'description': '"""Eastern hemisphere"""', 'lat_bounds': '(-90, 90)', 'lon_bounds': '(0, 180)', 'do_land_mask': '(False)'}), "(name='eh', description='Eastern hemisphere', lat_bounds=(-90, 90),\n lon_bounds=(0, 180), do_land_mask=False)\n", (1097, 1209), False, 'from aospy.region import Region\n'), ((1233, 1353), 'aospy.region.Region', 'Region', ([], {'name': '"""wh"""', 'description': '"""Western hemisphere"""', 'lat_bounds': '(-90, 90)', 'lon_bounds': '(180, 360)', 'do_land_mask': '(False)'}), "(name='wh', description='Western hemisphere', lat_bounds=(-90, 90),\n lon_bounds=(180, 360), do_land_mask=False)\n", (1239, 1353), False, 'from aospy.region import Region\n'), ((1382, 1505), 'aospy.region.Region', 'Region', ([], {'name': '"""tropics"""', 'description': '"""Tropics (30S-30N)"""', 'lat_bounds': '(-30, 30)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='tropics', description='Tropics (30S-30N)', lat_bounds=(-30, 30\n ), lon_bounds=(0, 360), do_land_mask=False)\n", (1388, 1505), False, 'from aospy.region import Region\n'), ((1535, 1660), 'aospy.region.Region', 'Region', ([], {'name': '"""tropics_land"""', 'description': '"""All land 30S-30N"""', 'lat_bounds': '(-30, 30)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(True)'}), "(name='tropics_land', description='All land 30S-30N', lat_bounds=(-30,\n 30), lon_bounds=(0, 360), do_land_mask=True)\n", (1541, 1660), False, 'from aospy.region import Region\n'), ((1692, 1823), 'aospy.region.Region', 'Region', ([], {'description': '"""All ocean 30S-30N"""', 'name': '"""tropics_ocean"""', 'lat_bounds': '(-30, 30)', 'lon_bounds': '(0, 360)', 'do_land_mask': '"""ocean"""'}), "(description='All ocean 30S-30N', name='tropics_ocean', lat_bounds=(-\n 30, 30), lon_bounds=(0, 360), do_land_mask='ocean')\n", (1698, 1823), False, 'from aospy.region import Region\n'), ((1856, 1988), 'aospy.region.Region', 'Region', ([], {'name': '"""deep_tropics"""', 'description': '"""Deep tropics (10S-10N)"""', 'lat_bounds': '(-10, 10)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='deep_tropics', description='Deep tropics (10S-10N)',\n lat_bounds=(-10, 10), lon_bounds=(0, 360), do_land_mask=False)\n", (1862, 1988), False, 'from aospy.region import Region\n'), ((2018, 2093), 'aospy.region.Region', 'Region', ([], {'name': '"""atlantic"""', 'description': '"""Atlantic Ocean"""', 'do_land_mask': '"""ocean"""'}), "(name='atlantic', description='Atlantic Ocean', do_land_mask='ocean')\n", (2024, 2093), False, 'from aospy.region import Region\n'), ((2890, 3022), 'aospy.region.Region', 'Region', ([], {'name': '"""sahel"""', 'description': '"""African Sahel"""', 'mask_bounds': '[((10, 20), (0, 40)), ((10, 20), (342, 360))]', 'do_land_mask': '(True)'}), "(name='sahel', description='African Sahel', mask_bounds=[((10, 20), (\n 0, 40)), ((10, 20), (342, 360))], do_land_mask=True)\n", (2896, 3022), False, 'from aospy.region import Region\n'), ((3045, 3210), 'aospy.region.Region', 'Region', ([], {'name': '"""sahel2"""', 'description': '"""African Sahel w/ longitude bounds 15W-30E"""', 'mask_bounds': '[((10, 20), (0, 30)), ((10, 20), (345, 360))]', 'do_land_mask': '(True)'}), "(name='sahel2', description=\n 'African Sahel w/ longitude bounds 15W-30E', mask_bounds=[((10, 20), (0,\n 30)), ((10, 20), (345, 360))], do_land_mask=True)\n", (3051, 3210), False, 'from aospy.region import Region\n'), ((3229, 3431), 'aospy.region.Region', 'Region', ([], {'name': '"""sahel3"""', 'description': '"""Western part of African Sahel. Used by some to specify the whole Sahel."""', 'mask_bounds': '[((10, 20), (0, 10)), ((10, 20), (340, 360))]', 'do_land_mask': '(False)'}), "(name='sahel3', description=\n 'Western part of African Sahel. Used by some to specify the whole Sahel.',\n mask_bounds=[((10, 20), (0, 10)), ((10, 20), (340, 360))], do_land_mask\n =False)\n", (3235, 3431), False, 'from aospy.region import Region\n'), ((3472, 3631), 'aospy.region.Region', 'Region', ([], {'name': '"""sahel_north"""', 'description': '"""Northern half of African Sahel"""', 'mask_bounds': '[((15, 20), (0, 40)), ((15, 20), (342, 360))]', 'do_land_mask': '(True)'}), "(name='sahel_north', description='Northern half of African Sahel',\n mask_bounds=[((15, 20), (0, 40)), ((15, 20), (342, 360))], do_land_mask\n =True)\n", (3478, 3631), False, 'from aospy.region import Region\n'), ((3655, 3814), 'aospy.region.Region', 'Region', ([], {'name': '"""sahel_south"""', 'description': '"""Southern half of African Sahel"""', 'mask_bounds': '[((10, 15), (0, 40)), ((10, 15), (342, 360))]', 'do_land_mask': '(True)'}), "(name='sahel_south', description='Southern half of African Sahel',\n mask_bounds=[((10, 15), (0, 40)), ((10, 15), (342, 360))], do_land_mask\n =True)\n", (3661, 3814), False, 'from aospy.region import Region\n'), ((3837, 3994), 'aospy.region.Region', 'Region', ([], {'name': '"""sahel_west"""', 'description': '"""Western half of African Sahel"""', 'mask_bounds': '[((10, 20), (0, 11)), ((10, 20), (342, 360))]', 'do_land_mask': '(True)'}), "(name='sahel_west', description='Western half of African Sahel',\n mask_bounds=[((10, 20), (0, 11)), ((10, 20), (342, 360))], do_land_mask\n =True)\n", (3843, 3994), False, 'from aospy.region import Region\n'), ((4017, 4152), 'aospy.region.Region', 'Region', ([], {'name': '"""sahel_east"""', 'description': '"""Eastern half of African Sahel"""', 'lat_bounds': '(10, 20)', 'lon_bounds': '(11, 40)', 'do_land_mask': '(True)'}), "(name='sahel_east', description='Eastern half of African Sahel',\n lat_bounds=(10, 20), lon_bounds=(11, 40), do_land_mask=True)\n", (4023, 4152), False, 'from aospy.region import Region\n'), ((4180, 4353), 'aospy.region.Region', 'Region', ([], {'name': '"""sahara"""', 'description': '"""African Sahara, as defined by Biasutti et al 2009"""', 'mask_bounds': '[((20, 30), (0, 35)), ((20, 30), (350, 360))]', 'do_land_mask': '(True)'}), "(name='sahara', description=\n 'African Sahara, as defined by Biasutti et al 2009', mask_bounds=[((20,\n 30), (0, 35)), ((20, 30), (350, 360))], do_land_mask=True)\n", (4186, 4353), False, 'from aospy.region import Region\n'), ((4377, 4501), 'aospy.region.Region', 'Region', ([], {'name': '"""ind_monsoon"""', 'description': '"""Indian monsoon"""', 'lat_bounds': '(10, 30)', 'lon_bounds': '(60, 100)', 'do_land_mask': '(False)'}), "(name='ind_monsoon', description='Indian monsoon', lat_bounds=(10, 30\n ), lon_bounds=(60, 100), do_land_mask=False)\n", (4383, 4501), False, 'from aospy.region import Region\n'), ((4531, 4675), 'aospy.region.Region', 'Region', ([], {'name': '"""warm_pool"""', 'description': '"""Indo-Pacific warm pool. Ocean mask"""', 'lat_bounds': '(-20, 20)', 'lon_bounds': '(60, 180)', 'do_land_mask': '"""ocean"""'}), "(name='warm_pool', description='Indo-Pacific warm pool. Ocean mask',\n lat_bounds=(-20, 20), lon_bounds=(60, 180), do_land_mask='ocean')\n", (4537, 4675), False, 'from aospy.region import Region\n'), ((4701, 4825), 'aospy.region.Region', 'Region', ([], {'name': '"""wpwp"""', 'description': '"""West Pacific Warm Pool"""', 'lat_bounds': '(-5, 5)', 'lon_bounds': '(80, 160)', 'do_land_mask': '(False)'}), "(name='wpwp', description='West Pacific Warm Pool', lat_bounds=(-5, 5\n ), lon_bounds=(80, 160), do_land_mask=False)\n", (4707, 4825), False, 'from aospy.region import Region\n'), ((4850, 4976), 'aospy.region.Region', 'Region', ([], {'name': '"""epac"""', 'description': '"""East Pacific cold tongue"""', 'lat_bounds': '(-5, 5)', 'lon_bounds': '(200, 280)', 'do_land_mask': '(False)'}), "(name='epac', description='East Pacific cold tongue', lat_bounds=(-5,\n 5), lon_bounds=(200, 280), do_land_mask=False)\n", (4856, 4976), False, 'from aospy.region import Region\n'), ((5007, 5178), 'aospy.region.Region', 'Region', ([], {'name': '"""epac_watl"""', 'description': '"""East Pacific and West Atlantic, including C. and S. America"""', 'lat_bounds': '(0, 15)', 'lon_bounds': '(240, 300)', 'do_land_mask': '(False)'}), "(name='epac_watl', description=\n 'East Pacific and West Atlantic, including C. and S. America',\n lat_bounds=(0, 15), lon_bounds=(240, 300), do_land_mask=False)\n", (5013, 5178), False, 'from aospy.region import Region\n'), ((5204, 5342), 'aospy.region.Region', 'Region', ([], {'name': '"""epac_itcz"""', 'description': '"""East Pacific ITCZ for NH summer"""', 'lat_bounds': '(0, 20)', 'lon_bounds': '(180, 250)', 'do_land_mask': '(False)'}), "(name='epac_itcz', description='East Pacific ITCZ for NH summer',\n lat_bounds=(0, 20), lon_bounds=(180, 250), do_land_mask=False)\n", (5210, 5342), False, 'from aospy.region import Region\n'), ((5372, 5505), 'aospy.region.Region', 'Region', ([], {'name': '"""atl_itcz"""', 'description': '"""Atlantic ITCZ for NH summer"""', 'lat_bounds': '(0, 20)', 'lon_bounds': '(300, 345)', 'do_land_mask': '(False)'}), "(name='atl_itcz', description='Atlantic ITCZ for NH summer',\n lat_bounds=(0, 20), lon_bounds=(300, 345), do_land_mask=False)\n", (5378, 5505), False, 'from aospy.region import Region\n'), ((5537, 5709), 'aospy.region.Region', 'Region', ([], {'name': '"""burls_wpac"""', 'description': '"""Equatorial W. Pacific region used by Burls and Fedorov 2014"""', 'lat_bounds': '(-8, 8)', 'lon_bounds': '(130, 205)', 'do_land_mask': '(False)'}), "(name='burls_wpac', description=\n 'Equatorial W. Pacific region used by Burls and Fedorov 2014',\n lat_bounds=(-8, 8), lon_bounds=(130, 205), do_land_mask=False)\n", (5543, 5709), False, 'from aospy.region import Region\n'), ((5736, 5908), 'aospy.region.Region', 'Region', ([], {'name': '"""burls_epac"""', 'description': '"""Equatorial E. Pacific region used by Burls and Fedorov 2014"""', 'lat_bounds': '(-8, 8)', 'lon_bounds': '(205, 280)', 'do_land_mask': '(False)'}), "(name='burls_epac', description=\n 'Equatorial E. Pacific region used by Burls and Fedorov 2014',\n lat_bounds=(-8, 8), lon_bounds=(205, 280), do_land_mask=False)\n", (5742, 5908), False, 'from aospy.region import Region\n'), ((5934, 6172), 'aospy.region.Region', 'Region', ([], {'name': '"""burls_pac"""', 'description': '"""Pacific region used by Burls and Fedorov 2014"""', 'mask_bounds': '[((15, 65), (100, 260)), ((10, 15), (100, 275)), ((-5, 10), (100, 290)), ((\n -65, -5), (130, 290))]', 'do_land_mask': '"""strict_ocean"""'}), "(name='burls_pac', description=\n 'Pacific region used by Burls and Fedorov 2014', mask_bounds=[((15, 65),\n (100, 260)), ((10, 15), (100, 275)), ((-5, 10), (100, 290)), ((-65, -5),\n (130, 290))], do_land_mask='strict_ocean')\n", (5940, 6172), False, 'from aospy.region import Region\n'), ((6249, 6453), 'aospy.region.Region', 'Region', ([], {'name': '"""burls_trop_pac"""', 'description': '"""Tropical Pacific region used by Burls and Fedorov 2014"""', 'mask_bounds': '[((-5, 8), (100, 290)), ((-8, -5), (130, 290))]', 'do_land_mask': '"""strict_ocean"""'}), "(name='burls_trop_pac', description=\n 'Tropical Pacific region used by Burls and Fedorov 2014', mask_bounds=[\n ((-5, 8), (100, 290)), ((-8, -5), (130, 290))], do_land_mask='strict_ocean'\n )\n", (6255, 6453), False, 'from aospy.region import Region\n'), ((6493, 6748), 'aospy.region.Region', 'Region', ([], {'name': '"""burls_ext_pac"""', 'description': '"""Extratropical Pacific region used by Burls and Fedorov 2014"""', 'mask_bounds': '[((15, 65), (100, 260)), ((10, 15), (100, 275)), ((8, 10), (100, 290)), ((-\n 65, -8), (130, 290))]', 'do_land_mask': '"""strict_ocean"""'}), "(name='burls_ext_pac', description=\n 'Extratropical Pacific region used by Burls and Fedorov 2014',\n mask_bounds=[((15, 65), (100, 260)), ((10, 15), (100, 275)), ((8, 10),\n (100, 290)), ((-65, -8), (130, 290))], do_land_mask='strict_ocean')\n", (6499, 6748), False, 'from aospy.region import Region\n'), ((6818, 6980), 'aospy.region.Region', 'Region', ([], {'name': '"""nino1_2"""', 'description': '"""Standard Nino 1+2 regions of equatorial E. Pacific"""', 'lat_bounds': '(-10, 0)', 'lon_bounds': '(270, 280)', 'do_land_mask': '(False)'}), "(name='nino1_2', description=\n 'Standard Nino 1+2 regions of equatorial E. Pacific', lat_bounds=(-10, \n 0), lon_bounds=(270, 280), do_land_mask=False)\n", (6824, 6980), False, 'from aospy.region import Region\n'), ((7001, 7156), 'aospy.region.Region', 'Region', ([], {'name': '"""nino3"""', 'description': '"""Standard Nino 3 region of equatorial E. Pacific"""', 'lat_bounds': '(-5, 5)', 'lon_bounds': '(210, 270)', 'do_land_mask': '(False)'}), "(name='nino3', description=\n 'Standard Nino 3 region of equatorial E. Pacific', lat_bounds=(-5, 5),\n lon_bounds=(210, 270), do_land_mask=False)\n", (7007, 7156), False, 'from aospy.region import Region\n'), ((7180, 7339), 'aospy.region.Region', 'Region', ([], {'name': '"""nino3.4"""', 'description': '"""Standard Nino 3.4 region of equatorial E. Pacific"""', 'lat_bounds': '(-5, 5)', 'lon_bounds': '(190, 240)', 'do_land_mask': '(False)'}), "(name='nino3.4', description=\n 'Standard Nino 3.4 region of equatorial E. Pacific', lat_bounds=(-5, 5),\n lon_bounds=(190, 240), do_land_mask=False)\n", (7186, 7339), False, 'from aospy.region import Region\n'), ((7361, 7516), 'aospy.region.Region', 'Region', ([], {'name': '"""nino4"""', 'description': '"""Standard Nino 4 region of equatorial E. Pacific"""', 'lat_bounds': '(-5, 5)', 'lon_bounds': '(160, 210)', 'do_land_mask': '(False)'}), "(name='nino4', description=\n 'Standard Nino 4 region of equatorial E. Pacific', lat_bounds=(-5, 5),\n lon_bounds=(160, 210), do_land_mask=False)\n", (7367, 7516), False, 'from aospy.region import Region\n'), ((7544, 7745), 'aospy.region.Region', 'Region', ([], {'name': '"""cld_seed_np"""', 'description': '"""North Pacific region of Hill & Ming 2012 GRL cloud brightening geoengineering study"""', 'lat_bounds': '(10, 30)', 'lon_bounds': '(204, 244)', 'do_land_mask': '"""ocean"""'}), "(name='cld_seed_np', description=\n 'North Pacific region of Hill & Ming 2012 GRL cloud brightening geoengineering study'\n , lat_bounds=(10, 30), lon_bounds=(204, 244), do_land_mask='ocean')\n", (7550, 7745), False, 'from aospy.region import Region\n'), ((7772, 7974), 'aospy.region.Region', 'Region', ([], {'name': '"""cld_seed_sp"""', 'description': '"""South Pacific region of Hill & Ming 2012 GRL cloud brightening geoengineering study"""', 'lat_bounds': '(-30, -5)', 'lon_bounds': '(240, 285)', 'do_land_mask': '"""ocean"""'}), "(name='cld_seed_sp', description=\n 'South Pacific region of Hill & Ming 2012 GRL cloud brightening geoengineering study'\n , lat_bounds=(-30, -5), lon_bounds=(240, 285), do_land_mask='ocean')\n", (7778, 7974), False, 'from aospy.region import Region\n'), ((8001, 8222), 'aospy.region.Region', 'Region', ([], {'name': '"""cld_seed_sa"""', 'description': '"""South Atlantic region of Hill & Ming 2012 GRL cloud brightening geoengineering study"""', 'mask_bounds': '[((-30, 5), (0, 12)), ((-30, 5), (342, 360))]', 'do_land_mask': '"""ocean"""'}), "(name='cld_seed_sa', description=\n 'South Atlantic region of Hill & Ming 2012 GRL cloud brightening geoengineering study'\n , mask_bounds=[((-30, 5), (0, 12)), ((-30, 5), (342, 360))],\n do_land_mask='ocean')\n", (8007, 8222), False, 'from aospy.region import Region\n'), ((8259, 8485), 'aospy.region.Region', 'Region', ([], {'name': '"""cld_seed_all"""', 'description': '"""All 3 regions from Hill & Ming 2012 GRL"""', 'mask_bounds': '[((-30, 5), (0, 12)), ((-30, 5), (342, 360)), ((-30, -5), (240, 285)), ((10,\n 30), (204, 244))]', 'do_land_mask': '"""ocean"""'}), "(name='cld_seed_all', description=\n 'All 3 regions from Hill & Ming 2012 GRL', mask_bounds=[((-30, 5), (0, \n 12)), ((-30, 5), (342, 360)), ((-30, -5), (240, 285)), ((10, 30), (204,\n 244))], do_land_mask='ocean')\n", (8265, 8485), False, 'from aospy.region import Region\n'), ((8561, 8717), 'aospy.region.Region', 'Region', ([], {'name': '"""east_asia_monsoon"""', 'description': '"""East Asian Monsoon land region"""', 'lat_bounds': '(22.5, 40)', 'lon_bounds': '(100, 122.5)', 'do_land_mask': '(False)'}), "(name='east_asia_monsoon', description=\n 'East Asian Monsoon land region', lat_bounds=(22.5, 40), lon_bounds=(\n 100, 122.5), do_land_mask=False)\n", (8567, 8717), False, 'from aospy.region import Region\n'), ((8739, 8902), 'aospy.region.Region', 'Region', ([], {'name': '"""extratropics"""', 'description': '"""Extratropics (poleward of 30S/N)"""', 'mask_bounds': '[((-90, -30), (0, 360)), ((30, 90), (0, 360))]', 'do_land_mask': '(False)'}), "(name='extratropics', description='Extratropics (poleward of 30S/N)',\n mask_bounds=[((-90, -30), (0, 360)), ((30, 90), (0, 360))],\n do_land_mask=False)\n", (8745, 8902), False, 'from aospy.region import Region\n'), ((8943, 9083), 'aospy.region.Region', 'Region', ([], {'name': '"""nh_tropics"""', 'description': '"""Northern hemisphere tropics: 0-30N"""', 'lat_bounds': '(0, 30)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='nh_tropics', description='Northern hemisphere tropics: 0-30N',\n lat_bounds=(0, 30), lon_bounds=(0, 360), do_land_mask=False)\n", (8949, 9083), False, 'from aospy.region import Region\n'), ((9115, 9256), 'aospy.region.Region', 'Region', ([], {'name': '"""sh_tropics"""', 'description': '"""Southern hemisphere tropics: 30S-0"""', 'lat_bounds': '(-30, 0)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='sh_tropics', description='Southern hemisphere tropics: 30S-0',\n lat_bounds=(-30, 0), lon_bounds=(0, 360), do_land_mask=False)\n", (9121, 9256), False, 'from aospy.region import Region\n'), ((9285, 9412), 'aospy.region.Region', 'Region', ([], {'name': '"""nh_land"""', 'description': '"""Northern hemisphere land"""', 'lat_bounds': '(0, 90)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(True)'}), "(name='nh_land', description='Northern hemisphere land', lat_bounds=(\n 0, 90), lon_bounds=(0, 360), do_land_mask=True)\n", (9291, 9412), False, 'from aospy.region import Region\n'), ((9441, 9573), 'aospy.region.Region', 'Region', ([], {'name': '"""nh_ocean"""', 'description': '"""Northern hemisphere ocean"""', 'lat_bounds': '(0, 90)', 'lon_bounds': '(0, 360)', 'do_land_mask': '"""ocean"""'}), "(name='nh_ocean', description='Northern hemisphere ocean', lat_bounds\n =(0, 90), lon_bounds=(0, 360), do_land_mask='ocean')\n", (9447, 9573), False, 'from aospy.region import Region\n'), ((9601, 9729), 'aospy.region.Region', 'Region', ([], {'name': '"""sh_land"""', 'description': '"""Southern hemisphere land"""', 'lat_bounds': '(-90, 0)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(True)'}), "(name='sh_land', description='Southern hemisphere land', lat_bounds=(\n -90, 0), lon_bounds=(0, 360), do_land_mask=True)\n", (9607, 9729), False, 'from aospy.region import Region\n'), ((9758, 9891), 'aospy.region.Region', 'Region', ([], {'name': '"""sh_ocean"""', 'description': '"""Southern hemisphere ocean"""', 'lat_bounds': '(-90, 0)', 'lon_bounds': '(0, 360)', 'do_land_mask': '"""ocean"""'}), "(name='sh_ocean', description='Southern hemisphere ocean', lat_bounds\n =(-90, 0), lon_bounds=(0, 360), do_land_mask='ocean')\n", (9764, 9891), False, 'from aospy.region import Region\n'), ((9926, 10097), 'aospy.region.Region', 'Region', ([], {'name': '"""extratrop_land"""', 'description': '"""Extratropical (poleward of 30S/N) land"""', 'mask_bounds': '[((-90, -30), (0, 360)), ((30, 90), (0, 360))]', 'do_land_mask': '(True)'}), "(name='extratrop_land', description=\n 'Extratropical (poleward of 30S/N) land', mask_bounds=[((-90, -30), (0,\n 360)), ((30, 90), (0, 360))], do_land_mask=True)\n", (9932, 10097), False, 'from aospy.region import Region\n'), ((10142, 10318), 'aospy.region.Region', 'Region', ([], {'name': '"""extratrop_ocean"""', 'description': '"""Extratropical (poleward of 30S/N) ocean"""', 'mask_bounds': '[((-90, -30), (0, 360)), ((30, 90), (0, 360))]', 'do_land_mask': '"""ocean"""'}), "(name='extratrop_ocean', description=\n 'Extratropical (poleward of 30S/N) ocean', mask_bounds=[((-90, -30), (0,\n 360)), ((30, 90), (0, 360))], do_land_mask='ocean')\n", (10148, 10318), False, 'from aospy.region import Region\n'), ((10360, 10515), 'aospy.region.Region', 'Region', ([], {'name': '"""nh_extratrop"""', 'description': '"""Northern hemisphere extratropics (30-90N)"""', 'lat_bounds': '(30, 90)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='nh_extratrop', description=\n 'Northern hemisphere extratropics (30-90N)', lat_bounds=(30, 90),\n lon_bounds=(0, 360), do_land_mask=False)\n", (10366, 10515), False, 'from aospy.region import Region\n'), ((10544, 10702), 'aospy.region.Region', 'Region', ([], {'name': '"""sh_extratrop"""', 'description': '"""Southern hemisphere extratropics (90S-30S)"""', 'lat_bounds': '(-90, -30)', 'lon_bounds': '(0, 360)', 'do_land_mask': '(False)'}), "(name='sh_extratrop', description=\n 'Southern hemisphere extratropics (90S-30S)', lat_bounds=(-90, -30),\n lon_bounds=(0, 360), do_land_mask=False)\n", (10550, 10702), False, 'from aospy.region import Region\n')] |
#!/usr/bin/env python
import serial
import time
BAUD = 57600
with serial.Serial('/dev/cu.usbserial-DN05KLWU', BAUD) as port:
time.sleep(5.0)
n = 0
while True:
port.write(bytearray([n & 0xFF]))
n += 1
| [
"serial.Serial",
"time.sleep"
] | [((69, 118), 'serial.Serial', 'serial.Serial', (['"""/dev/cu.usbserial-DN05KLWU"""', 'BAUD'], {}), "('/dev/cu.usbserial-DN05KLWU', BAUD)\n", (82, 118), False, 'import serial\n'), ((130, 145), 'time.sleep', 'time.sleep', (['(5.0)'], {}), '(5.0)\n', (140, 145), False, 'import time\n')] |
# restful.py
# import objects from the flask model
from flask import Flask, jsonify, request
# define an app using flask
app = Flask(__name__)
languages = [{'name':'Python'}, {'name':'Ruby'}, {'name':'JavaScript'}]
@app.route('/', methods=['GET'])
def test():
return jsonify({'message':'It works!'})
@app.route('/lang', methods=['GET'])
def returnAll():
return jsonify({'languages': languages})
@app.route('/lang/<string:name>', methods=['GET'])
def returnOne(name):
#use lamba to search through the list
langs = [language for language in languages if language['name'] == name]
return jsonify({'language': langs[0]})
# Perform POST REQUEST NOW.
@app.route('/lang', methods=['POST'])
def addOne():
# create a dic key will have json object and whatever value needs to be passed
language = {'name': request.json['name']}
print(language)
languages.append(language)
return jsonify({'languages': languages})
# perform PUT REQUEST
@app.route('/lang/<string:name>', methods=['PUT'])
def editOne(name):
langs = [language for language in languages if language['name'] == name]
langs[0]['name'] = request.json['name']
return jsonify({'languages': langs[0]})
# perform DELETE REQUEST
@app.route('/lang/<string:name>', methods=['DELETE'])
def deleteOne(name):
lang = [ language for language in languages if language['name'] == name]
languages.remove(lang[0])
return jsonify({'languages': languages})
if __name__ == '__main__':
#app.run(debug=True)
app.run(host='0.0.0.0', port=8080, debug=True) # c9 | [
"flask.jsonify",
"flask.Flask"
] | [((130, 145), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (135, 145), False, 'from flask import Flask, jsonify, request\n'), ((276, 309), 'flask.jsonify', 'jsonify', (["{'message': 'It works!'}"], {}), "({'message': 'It works!'})\n", (283, 309), False, 'from flask import Flask, jsonify, request\n'), ((375, 408), 'flask.jsonify', 'jsonify', (["{'languages': languages}"], {}), "({'languages': languages})\n", (382, 408), False, 'from flask import Flask, jsonify, request\n'), ((612, 643), 'flask.jsonify', 'jsonify', (["{'language': langs[0]}"], {}), "({'language': langs[0]})\n", (619, 643), False, 'from flask import Flask, jsonify, request\n'), ((917, 950), 'flask.jsonify', 'jsonify', (["{'languages': languages}"], {}), "({'languages': languages})\n", (924, 950), False, 'from flask import Flask, jsonify, request\n'), ((1177, 1209), 'flask.jsonify', 'jsonify', (["{'languages': langs[0]}"], {}), "({'languages': langs[0]})\n", (1184, 1209), False, 'from flask import Flask, jsonify, request\n'), ((1429, 1462), 'flask.jsonify', 'jsonify', (["{'languages': languages}"], {}), "({'languages': languages})\n", (1436, 1462), False, 'from flask import Flask, jsonify, request\n')] |
import numpy as np
import cv2
def nms(bboxs, thresh):
# get all parameters
x1, y1, x2, y2, scores = [bboxs[:, i] for i in range(len(bboxs[0]))]
# calculate all areas of boxed
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
# sort boxes according to their class score
sorted_index = scores.argsort()[::-1]
# result list
result = []
while sorted_index.size > 0:
# get the box with largest score
max_box = bboxs[sorted_index[0]]
# add it to our result
result.append(max_box)
# calculate intersection coordinates
xx1 = np.maximum(max_box[0], x1[sorted_index[1:]])
yy1 = np.maximum(max_box[1], y1[sorted_index[1:]])
xx2 = np.minimum(max_box[2], x2[sorted_index[1:]])
yy2 = np.minimum(max_box[3], y2[sorted_index[1:]])
# calculate intersection area
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
intersection = w * h
# calculate ious
ious = intersection / (areas[sorted_index[0]] + areas[sorted_index[1:]] - intersection)
# retain all the boxes whose ious are less than the threshold
sorted_index = sorted_index[1:][ious <= thresh]
return result
def draw_bbox(bboxs, pic_name):
pic = np.zeros((850, 850), np.uint8)
for bbox in bboxs:
x1, y1, x2, y2 = map(int, bbox[:-1])
pic = cv2.rectangle(pic, (x1, y1), (x2, y2), (255, 0, 0), 2)
cv2.imshow(pic_name,pic)
cv2.waitKey(0)
if __name__ == "__main__":
bboxs = np.array([
[720, 690, 820, 800, 0.5],
[204, 102, 358, 250, 0.5],
[257, 118, 380, 250, 0.8],
[700, 700, 800, 800, 0.4],
[280, 135, 400, 250, 0.7],
[255, 118, 360, 235, 0.7]])
thresh = 0.3
draw_bbox(bboxs, "Before_NMS")
result = nms(bboxs, thresh)
draw_bbox(result, "After_NMS") | [
"cv2.rectangle",
"numpy.minimum",
"cv2.imshow",
"numpy.array",
"numpy.zeros",
"numpy.maximum",
"cv2.waitKey"
] | [((1286, 1316), 'numpy.zeros', 'np.zeros', (['(850, 850)', 'np.uint8'], {}), '((850, 850), np.uint8)\n', (1294, 1316), True, 'import numpy as np\n'), ((1458, 1483), 'cv2.imshow', 'cv2.imshow', (['pic_name', 'pic'], {}), '(pic_name, pic)\n', (1468, 1483), False, 'import cv2\n'), ((1487, 1501), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1498, 1501), False, 'import cv2\n'), ((1543, 1725), 'numpy.array', 'np.array', (['[[720, 690, 820, 800, 0.5], [204, 102, 358, 250, 0.5], [257, 118, 380, 250,\n 0.8], [700, 700, 800, 800, 0.4], [280, 135, 400, 250, 0.7], [255, 118, \n 360, 235, 0.7]]'], {}), '([[720, 690, 820, 800, 0.5], [204, 102, 358, 250, 0.5], [257, 118, \n 380, 250, 0.8], [700, 700, 800, 800, 0.4], [280, 135, 400, 250, 0.7], [\n 255, 118, 360, 235, 0.7]])\n', (1551, 1725), True, 'import numpy as np\n'), ((598, 642), 'numpy.maximum', 'np.maximum', (['max_box[0]', 'x1[sorted_index[1:]]'], {}), '(max_box[0], x1[sorted_index[1:]])\n', (608, 642), True, 'import numpy as np\n'), ((657, 701), 'numpy.maximum', 'np.maximum', (['max_box[1]', 'y1[sorted_index[1:]]'], {}), '(max_box[1], y1[sorted_index[1:]])\n', (667, 701), True, 'import numpy as np\n'), ((716, 760), 'numpy.minimum', 'np.minimum', (['max_box[2]', 'x2[sorted_index[1:]]'], {}), '(max_box[2], x2[sorted_index[1:]])\n', (726, 760), True, 'import numpy as np\n'), ((775, 819), 'numpy.minimum', 'np.minimum', (['max_box[3]', 'y2[sorted_index[1:]]'], {}), '(max_box[3], y2[sorted_index[1:]])\n', (785, 819), True, 'import numpy as np\n'), ((871, 901), 'numpy.maximum', 'np.maximum', (['(0.0)', '(xx2 - xx1 + 1)'], {}), '(0.0, xx2 - xx1 + 1)\n', (881, 901), True, 'import numpy as np\n'), ((914, 944), 'numpy.maximum', 'np.maximum', (['(0.0)', '(yy2 - yy1 + 1)'], {}), '(0.0, yy2 - yy1 + 1)\n', (924, 944), True, 'import numpy as np\n'), ((1399, 1453), 'cv2.rectangle', 'cv2.rectangle', (['pic', '(x1, y1)', '(x2, y2)', '(255, 0, 0)', '(2)'], {}), '(pic, (x1, y1), (x2, y2), (255, 0, 0), 2)\n', (1412, 1453), False, 'import cv2\n')] |
import pipe.gui.select_from_list as sfl
from pymel.core import *
from pipe.gui import quick_dialogs as qd
from pipe.tools.mayatools.utils.utils import *
class Tagger:
def __init__(self):
self.selected_string = self.get_selected_string()
def tag(self):
if self.selected:
response = qd.binary_option("Add Alembic tag to:\n" + str(self.selected_string), "Yes", "No", title='Add Alembic Tag')
else:
qd.warning("Nothing is selected")
return
if response:
for node in self.selected:
tag_node_with_flag(node, "DCC_Alembic_Export_Flag")
qd.info("tag successful!")
def untag(self):
response = qd.binary_option("Remove Alembic tag from:\n" + str(self.selected_string), "Yes", "No", title='Remove Alembic Tag')
if response:
for node in self.selected:
untag_node_with_flag(node, "DCC_Alembic_Export_Flag")
qd.info("untag successful!")
def untag_multiple(self):
tagged_items={}
for node in self.all:
if(node_is_tagged_with_flag(node,"DCC_Alembic_Export_Flag")):
tagged_items.update({str(node) : node})
self.item_gui = sfl.SelectFromList(l=tagged_items, parent=maya_main_window(), title="Untag Multiple", multiple_selection=True)
self.item_gui.submitted.connect(self.mass_untag)
def get_selected_string(self):
self.selected = ls(sl=True, tr=True)
self.all = ls(tr=True)
selected_string = ""
for node in self.selected:
selected_string += node
return selected_string
def mass_untag(self, value):
for name in value:
for object in self.all:
if str(object) == str(name):
node = object
break
untag_node_with_flag(node, "DCC_Alembic_Export_Flag")
| [
"pipe.gui.quick_dialogs.info",
"pipe.gui.quick_dialogs.warning"
] | [((455, 488), 'pipe.gui.quick_dialogs.warning', 'qd.warning', (['"""Nothing is selected"""'], {}), "('Nothing is selected')\n", (465, 488), True, 'from pipe.gui import quick_dialogs as qd\n'), ((650, 676), 'pipe.gui.quick_dialogs.info', 'qd.info', (['"""tag successful!"""'], {}), "('tag successful!')\n", (657, 676), True, 'from pipe.gui import quick_dialogs as qd\n'), ((978, 1006), 'pipe.gui.quick_dialogs.info', 'qd.info', (['"""untag successful!"""'], {}), "('untag successful!')\n", (985, 1006), True, 'from pipe.gui import quick_dialogs as qd\n')] |
# Copyright (c) Facebook, Inc. and its affiliates.
import os
'''
This forces the environment to use only 1 cpu when running.
This could be helpful when launching multiple environment simulatenously.
'''
os.environ['OPENBLAS_NUM_THREADS'] = '1'
os.environ['MKL_NUM_THREADS'] = '1'
# os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
import numpy as np
import copy
import pybullet as pb
import pybullet_data
from bullet import bullet_client
from bullet import bullet_utils as bu
from fairmotion.ops import conversions
from fairmotion.ops import math
from fairmotion.utils import constants
import sim_agent
import sim_obstacle
import importlib.util
class Env(object):
'''
This environment defines a base environment where the simulated
characters exist and they are controlled by tracking controllers
'''
def __init__(self,
fps_sim,
fps_act,
char_info_module,
sim_char_file,
ref_motion_scale,
actuation,
self_collision=None,
contactable_body=None,
verbose=False,
):
self._num_agent = len(sim_char_file)
assert self._num_agent > 0
assert self._num_agent == len(char_info_module)
assert self._num_agent == len(ref_motion_scale)
self._char_info = []
for i in range(self._num_agent):
''' Load Character Info Moudle '''
spec = importlib.util.spec_from_file_location(
"char_info%d"%(i), char_info_module[i])
char_info = importlib.util.module_from_spec(spec)
spec.loader.exec_module(char_info)
self._char_info.append(char_info)
''' Modfiy Contactable Body Parts '''
if contactable_body:
contact_allow_all = True if 'all' in contactable_body else False
for joint in list(char_info.contact_allow_map.keys()):
char_info.contact_allow_map[joint] = \
contact_allow_all or char_info.joint_name[joint] in contactable_body
self._v_up = self._char_info[0].v_up_env
''' Define PyBullet Client '''
self._pb_client = bullet_client.BulletClient(
connection_mode=pb.DIRECT, options=' --opengl2')
self._pb_client.setAdditionalSearchPath(pybullet_data.getDataPath())
''' timestep for physics simulation '''
self._dt_sim = 1.0/fps_sim
''' timestep for control of dynamic controller '''
self._dt_act = 1.0/fps_act
if fps_sim%fps_act != 0:
raise Exception('FPS_SIM should be a multiples of FPS_ACT')
self._num_substep = fps_sim//fps_act
self._verbose = verbose
self.setup_physics_scene(sim_char_file,
self._char_info,
ref_motion_scale,
self_collision,
actuation)
''' Elapsed time after the environment starts '''
self._elapsed_time = 0.0
''' For tracking the length of current episode '''
self._episode_len = 0.0
''' Create a Manager for Handling Obstacles '''
self._obs_manager = sim_obstacle.ObstacleManager(
self._pb_client, self._dt_act, self._char_info[0].v_up_env)
''' Save the initial pybullet state to clear all thing before calling reset '''
self._init_state = None
self.reset()
self._init_state = self._pb_client.saveState()
def setup_physics_scene(self, sim_char_file, char_info, ref_motion_scale, self_collision, actuation):
self._pb_client.resetSimulation()
self.create_ground()
self._agent = []
for i in range(self._num_agent):
self._agent.append(sim_agent.SimAgent(name='sim_agent_%d'%(i),
pybullet_client=self._pb_client,
model_file=sim_char_file[i],
char_info=char_info[i],
ref_scale=ref_motion_scale[i],
self_collision=self_collision[i],
actuation=actuation[i],
kinematic_only=False,
verbose=self._verbose))
def create_ground(self):
''' Create Plane '''
if np.allclose(np.array([0.0, 0.0, 1.0]), self._v_up):
R_plane = constants.eye_R()
else:
R_plane = math.R_from_vectors(np.array([0.0, 0.0, 1.0]), self._v_up)
self._plane_id = \
self._pb_client.loadURDF(
"plane_implicit.urdf",
[0, 0, 0],
conversions.R2Q(R_plane),
useMaximalCoordinates=True)
self._pb_client.changeDynamics(self._plane_id, linkIndex=-1, lateralFriction=0.9)
''' Dynamics parameters '''
assert np.allclose(np.linalg.norm(self._v_up), 1.0)
gravity = -9.8 * self._v_up
self._pb_client.setGravity(gravity[0], gravity[1], gravity[2])
self._pb_client.setTimeStep(self._dt_sim)
self._pb_client.setPhysicsEngineParameter(numSubSteps=2)
self._pb_client.setPhysicsEngineParameter(numSolverIterations=10)
# self._pb_client.setPhysicsEngineParameter(solverResidualThreshold=1e-10)
def check_collision(self, body_id1, body_id2, link_id1=None, link_id2=None):
''' collision between two bodies '''
pts = self._pb_client.getContactPoints(
bodyA=body_id1, bodyB=body_id2, linkIndexA=link_id1, linkIndexB=link_id2)
return len(p) > 0
# def check_falldown(self, agent, plane_id=None):
# ''' check if any non-allowed body part hits the ground '''
# if plane_id is None: plane_id = self._plane_id
# pts = self._pb_client.getContactPoints()
# for p in pts:
# part = None
# #ignore self-collision
# if p[1] == p[2]: continue
# if p[1] == agent._body_id and p[2] == plane_id: part = p[3]
# if p[2] == agent._body_id and p[1] == plane_id: part = p[4]
# #ignore collision of other agents
# if part == None: continue
# if not agent._char_info.contact_allow_map[part]: return True
# return False
def check_falldown(self, agent, plane_id=None):
''' check if any non-allowed body part hits the ground '''
if plane_id is None: plane_id = self._plane_id
pts = self._pb_client.getContactPoints(
bodyA=agent._body_id, bodyB=plane_id)
for p in pts:
part = p[3] if p[1] == agent._body_id else p[4]
if agent._char_info.contact_allow_map[part]:
continue
else:
return True
return False
def is_sim_div(self, agent):
''' TODO: check divergence of simulation '''
return False
def step(self, target_poses=[]):
'''
One Step-forward Simulation
'''
''' Increase elapsed time '''
self._elapsed_time += self._dt_act
self._episode_len += self._dt_act
''' Update simulation '''
for _ in range(self._num_substep):
for i, target_pose in enumerate(target_poses):
self._agent[i].actuate(pose=target_pose,
vel=None)
self._pb_client.stepSimulation()
self._obs_manager.update()
def reset(self, time=0.0, poses=None, vels=None, pb_state_id=None):
''' remove obstacles in the scene '''
self._obs_manager.clear()
'''
Restore internal pybullet state
by uisng the saved info when Env was initially created
'''
if pb_state_id is not None:
self._pb_client.restoreState(pb_state_id)
self._elapsed_time = time
if poses is None:
if self._init_state is not None:
self._pb_client.restoreState(self._init_state)
else:
for i in range(self._num_agent):
pose = poses[i]
vel = None if vels is None else vels[i]
self._agent[i].set_pose(pose, vel)
self._episode_len = 0.0
def add_noise_to_pose_vel(self, agent, pose, vel=None, return_as_copied=True):
'''
Add a little bit of noise to the given pose and velocity
'''
ref_pose = copy.deepcopy(pose) if return_as_copied else pose
if vel:
ref_vel = copy.deepcopy(vel) if return_as_copied else vel
dof_cnt = 0
for j in agent._joint_indices:
joint_type = agent.get_joint_type(j)
''' Ignore fixed joints '''
if joint_type == self._pb_client.JOINT_FIXED:
continue
''' Ignore if there is no corresponding joint '''
if agent._char_info.bvh_map[j] == None:
continue
T = ref_pose.get_transform(agent._char_info.bvh_map[j], local=True)
R, p = conversions.T2Rp(T)
if joint_type == self._pb_client.JOINT_SPHERICAL:
dR = math.random_rotation(
mu_theta=agent._char_info.noise_pose[j][0],
sigma_theta=agent._char_info.noise_pose[j][1],
lower_theta=agent._char_info.noise_pose[j][2],
upper_theta=agent._char_info.noise_pose[j][3])
dof_cnt += 3
elif joint_type == self._pb_client.JOINT_REVOLUTE:
theta = math.truncnorm(
mu=agent._char_info.noise_pose[j][0],
sigma=agent._char_info.noise_pose[j][1],
lower=agent._char_info.noise_pose[j][2],
upper=agent._char_info.noise_pose[j][3])
joint_axis = agent.get_joint_axis(j)
dR = conversions.A2R(joint_axis*theta)
dof_cnt += 1
else:
raise NotImplementedError
T_new = conversions.Rp2T(np.dot(R, dR), p)
ref_pose.set_transform(agent._char_info.bvh_map[j], T_new, do_ortho_norm=False, local=True)
if vel is not None:
dw = math.truncnorm(
mu=np.full(3, agent._char_info.noise_vel[j][0]),
sigma=np.full(3, agent._char_info.noise_vel[j][1]),
lower=np.full(3, agent._char_info.noise_vel[j][2]),
upper=np.full(3, agent._char_info.noise_vel[j][3]))
ref_vel.data_local[j][:3] += dw
return ref_pose, ref_vel
def render(self, rm, ground_height=0.0):
colors = rm.COLORS_FOR_AGENTS
rm.gl.glEnable(rm.gl.GL_LIGHTING)
rm.gl.glEnable(rm.gl.GL_BLEND)
rm.gl.glBlendFunc(rm.gl.GL_SRC_ALPHA, rm.gl.GL_ONE_MINUS_SRC_ALPHA)
for i in range(self._num_agent):
sim_agent = self._agent[i]
char_info = self._char_info[i]
if rm.flag['sim_model']:
rm.gl.glEnable(rm.gl.GL_DEPTH_TEST)
if rm.flag['shadow']:
rm.gl.glPushMatrix()
d = np.array([1, 1, 1])
d = d - math.projectionOnVector(d, char_info.v_up_env)
offset = (0.001 + ground_height) * char_info.v_up_env
rm.gl.glTranslatef(offset[0], offset[1], offset[2])
rm.gl.glScalef(d[0], d[1], d[2])
rm.bullet_render.render_model(self._pb_client,
sim_agent._body_id,
draw_link=True,
draw_link_info=False,
draw_joint=False,
draw_joint_geom=False,
ee_indices=None,
color=[0.5,0.5,0.5,1.0],
lighting=False)
rm.gl.glPopMatrix()
rm.bullet_render.render_model(self._pb_client,
sim_agent._body_id,
draw_link=True,
draw_link_info=True,
draw_joint=rm.flag['joint'],
draw_joint_geom=True,
ee_indices=char_info.end_effector_indices,
color=colors[i])
if rm.flag['collision'] and self._elapsed_time > 0.0:
rm.gl.glPushAttrib(rm.gl.GL_LIGHTING|rm.gl.GL_DEPTH_TEST|rm.gl.GL_BLEND)
rm.gl.glEnable(rm.gl.GL_BLEND)
rm.bullet_render.render_contacts(self._pb_client, sim_agent._body_id)
rm.gl.glPopAttrib()
if rm.flag['com_vel']:
p, Q, v, w = sim_agent.get_root_state()
p, v = sim_agent.get_com_and_com_vel()
rm.gl_render.render_arrow(p, p+v, D=0.01, color=[0, 0, 0, 1])
if rm.flag['facing_frame']:
rm.gl.glPushAttrib(rm.gl.GL_LIGHTING|rm.gl.GL_DEPTH_TEST|rm.gl.GL_BLEND)
rm.gl.glEnable(rm.gl.GL_BLEND)
rm.gl_render.render_transform(
sim_agent.get_facing_transform(ground_height),
scale=0.5,
use_arrow=True)
rm.gl.glPopAttrib()
if rm.flag['obstacle']:
self._obs_manager.render()
if __name__ == '__main__':
import env_renderer as er
import render_module as rm
from fairmotion.viz.utils import TimeChecker
rm.initialize()
class EnvRenderer(er.EnvRenderer):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.time_checker_auto_play = TimeChecker()
self.reset()
def reset(self):
self.env.reset()
def one_step(self):
# a = np.zeros(100)
self.env.step()
def extra_render_callback(self):
self.env.render(self.rm)
def extra_idle_callback(self):
time_elapsed = self.time_checker_auto_play.get_time(restart=False)
if self.rm.flag['auto_play'] and time_elapsed >= self.env._dt_act:
self.time_checker_auto_play.begin()
self.one_step()
def extra_keyboard_callback(self, key):
if key == b'r':
self.reset()
elif key == b'O':
size = np.random.uniform(0.1, 0.3, 3)
p, Q, v, w = self.env._agent[0].get_root_state()
self.env._obs_manager.throw(p, size=size)
print('=====Motion Tracking Controller=====')
env = Env(fps_sim=480,
fps_act=30,
verbose=False,
char_info_module=['amass_char_info.py'],
sim_char_file=['data/character/amass.urdf'],
ref_motion_scale=[1.0],
self_collision=[True],
actuation=["spd"])
cam = rm.camera.Camera(pos=np.array([12.0, 0.0, 12.0]),
origin=np.array([0.0, 0.0, 0.0]),
vup=np.array([0.0, 0.0, 1.0]),
fov=30.0)
renderer = EnvRenderer(env=env, cam=cam)
renderer.run()
| [
"pybullet_data.getDataPath",
"fairmotion.ops.math.projectionOnVector",
"numpy.array",
"copy.deepcopy",
"numpy.linalg.norm",
"sim_agent.get_root_state",
"render_module.gl.glPushAttrib",
"render_module.bullet_render.render_contacts",
"render_module.initialize",
"render_module.gl.glBlendFunc",
"fai... | [((14150, 14165), 'render_module.initialize', 'rm.initialize', ([], {}), '()\n', (14163, 14165), True, 'import render_module as rm\n'), ((2242, 2317), 'bullet.bullet_client.BulletClient', 'bullet_client.BulletClient', ([], {'connection_mode': 'pb.DIRECT', 'options': '""" --opengl2"""'}), "(connection_mode=pb.DIRECT, options=' --opengl2')\n", (2268, 2317), False, 'from bullet import bullet_client\n'), ((3292, 3385), 'sim_obstacle.ObstacleManager', 'sim_obstacle.ObstacleManager', (['self._pb_client', 'self._dt_act', 'self._char_info[0].v_up_env'], {}), '(self._pb_client, self._dt_act, self._char_info\n [0].v_up_env)\n', (3320, 3385), False, 'import sim_obstacle\n'), ((10957, 10990), 'render_module.gl.glEnable', 'rm.gl.glEnable', (['rm.gl.GL_LIGHTING'], {}), '(rm.gl.GL_LIGHTING)\n', (10971, 10990), True, 'import render_module as rm\n'), ((10999, 11029), 'render_module.gl.glEnable', 'rm.gl.glEnable', (['rm.gl.GL_BLEND'], {}), '(rm.gl.GL_BLEND)\n', (11013, 11029), True, 'import render_module as rm\n'), ((11038, 11105), 'render_module.gl.glBlendFunc', 'rm.gl.glBlendFunc', (['rm.gl.GL_SRC_ALPHA', 'rm.gl.GL_ONE_MINUS_SRC_ALPHA'], {}), '(rm.gl.GL_SRC_ALPHA, rm.gl.GL_ONE_MINUS_SRC_ALPHA)\n', (11055, 11105), True, 'import render_module as rm\n'), ((2379, 2406), 'pybullet_data.getDataPath', 'pybullet_data.getDataPath', ([], {}), '()\n', (2404, 2406), False, 'import pybullet_data\n'), ((4634, 4659), 'numpy.array', 'np.array', (['[0.0, 0.0, 1.0]'], {}), '([0.0, 0.0, 1.0])\n', (4642, 4659), True, 'import numpy as np\n'), ((4696, 4713), 'fairmotion.utils.constants.eye_R', 'constants.eye_R', ([], {}), '()\n', (4711, 4713), False, 'from fairmotion.utils import constants\n'), ((4958, 4982), 'fairmotion.ops.conversions.R2Q', 'conversions.R2Q', (['R_plane'], {}), '(R_plane)\n', (4973, 4982), False, 'from fairmotion.ops import conversions\n'), ((5183, 5209), 'numpy.linalg.norm', 'np.linalg.norm', (['self._v_up'], {}), '(self._v_up)\n', (5197, 5209), True, 'import numpy as np\n'), ((8705, 8724), 'copy.deepcopy', 'copy.deepcopy', (['pose'], {}), '(pose)\n', (8718, 8724), False, 'import copy\n'), ((9310, 9329), 'fairmotion.ops.conversions.T2Rp', 'conversions.T2Rp', (['T'], {}), '(T)\n', (9326, 9329), False, 'from fairmotion.ops import conversions\n'), ((14325, 14338), 'fairmotion.viz.utils.TimeChecker', 'TimeChecker', ([], {}), '()\n', (14336, 14338), False, 'from fairmotion.viz.utils import TimeChecker\n'), ((15570, 15597), 'numpy.array', 'np.array', (['[12.0, 0.0, 12.0]'], {}), '([12.0, 0.0, 12.0])\n', (15578, 15597), True, 'import numpy as np\n'), ((15633, 15658), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0])\n', (15641, 15658), True, 'import numpy as np\n'), ((15692, 15717), 'numpy.array', 'np.array', (['[0.0, 0.0, 1.0]'], {}), '([0.0, 0.0, 1.0])\n', (15700, 15717), True, 'import numpy as np\n'), ((3884, 4161), 'sim_agent.SimAgent', 'sim_agent.SimAgent', ([], {'name': "('sim_agent_%d' % i)", 'pybullet_client': 'self._pb_client', 'model_file': 'sim_char_file[i]', 'char_info': 'char_info[i]', 'ref_scale': 'ref_motion_scale[i]', 'self_collision': 'self_collision[i]', 'actuation': 'actuation[i]', 'kinematic_only': '(False)', 'verbose': 'self._verbose'}), "(name='sim_agent_%d' % i, pybullet_client=self._pb_client,\n model_file=sim_char_file[i], char_info=char_info[i], ref_scale=\n ref_motion_scale[i], self_collision=self_collision[i], actuation=\n actuation[i], kinematic_only=False, verbose=self._verbose)\n", (3902, 4161), False, 'import sim_agent\n'), ((4770, 4795), 'numpy.array', 'np.array', (['[0.0, 0.0, 1.0]'], {}), '([0.0, 0.0, 1.0])\n', (4778, 4795), True, 'import numpy as np\n'), ((8793, 8811), 'copy.deepcopy', 'copy.deepcopy', (['vel'], {}), '(vel)\n', (8806, 8811), False, 'import copy\n'), ((9413, 9627), 'fairmotion.ops.math.random_rotation', 'math.random_rotation', ([], {'mu_theta': 'agent._char_info.noise_pose[j][0]', 'sigma_theta': 'agent._char_info.noise_pose[j][1]', 'lower_theta': 'agent._char_info.noise_pose[j][2]', 'upper_theta': 'agent._char_info.noise_pose[j][3]'}), '(mu_theta=agent._char_info.noise_pose[j][0],\n sigma_theta=agent._char_info.noise_pose[j][1], lower_theta=agent.\n _char_info.noise_pose[j][2], upper_theta=agent._char_info.noise_pose[j][3])\n', (9433, 9627), False, 'from fairmotion.ops import math\n'), ((10307, 10320), 'numpy.dot', 'np.dot', (['R', 'dR'], {}), '(R, dR)\n', (10313, 10320), True, 'import numpy as np\n'), ((11283, 11318), 'render_module.gl.glEnable', 'rm.gl.glEnable', (['rm.gl.GL_DEPTH_TEST'], {}), '(rm.gl.GL_DEPTH_TEST)\n', (11297, 11318), True, 'import render_module as rm\n'), ((12380, 12607), 'render_module.bullet_render.render_model', 'rm.bullet_render.render_model', (['self._pb_client', 'sim_agent._body_id'], {'draw_link': '(True)', 'draw_link_info': '(True)', 'draw_joint': "rm.flag['joint']", 'draw_joint_geom': '(True)', 'ee_indices': 'char_info.end_effector_indices', 'color': 'colors[i]'}), "(self._pb_client, sim_agent._body_id,\n draw_link=True, draw_link_info=True, draw_joint=rm.flag['joint'],\n draw_joint_geom=True, ee_indices=char_info.end_effector_indices, color=\n colors[i])\n", (12409, 12607), True, 'import render_module as rm\n'), ((9816, 10001), 'fairmotion.ops.math.truncnorm', 'math.truncnorm', ([], {'mu': 'agent._char_info.noise_pose[j][0]', 'sigma': 'agent._char_info.noise_pose[j][1]', 'lower': 'agent._char_info.noise_pose[j][2]', 'upper': 'agent._char_info.noise_pose[j][3]'}), '(mu=agent._char_info.noise_pose[j][0], sigma=agent._char_info\n .noise_pose[j][1], lower=agent._char_info.noise_pose[j][2], upper=agent\n ._char_info.noise_pose[j][3])\n', (9830, 10001), False, 'from fairmotion.ops import math\n'), ((10147, 10182), 'fairmotion.ops.conversions.A2R', 'conversions.A2R', (['(joint_axis * theta)'], {}), '(joint_axis * theta)\n', (10162, 10182), False, 'from fairmotion.ops import conversions\n'), ((11377, 11397), 'render_module.gl.glPushMatrix', 'rm.gl.glPushMatrix', ([], {}), '()\n', (11395, 11397), True, 'import render_module as rm\n'), ((11422, 11441), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (11430, 11441), True, 'import numpy as np\n'), ((11611, 11662), 'render_module.gl.glTranslatef', 'rm.gl.glTranslatef', (['offset[0]', 'offset[1]', 'offset[2]'], {}), '(offset[0], offset[1], offset[2])\n', (11629, 11662), True, 'import render_module as rm\n'), ((11683, 11715), 'render_module.gl.glScalef', 'rm.gl.glScalef', (['d[0]', 'd[1]', 'd[2]'], {}), '(d[0], d[1], d[2])\n', (11697, 11715), True, 'import render_module as rm\n'), ((11736, 11951), 'render_module.bullet_render.render_model', 'rm.bullet_render.render_model', (['self._pb_client', 'sim_agent._body_id'], {'draw_link': '(True)', 'draw_link_info': '(False)', 'draw_joint': '(False)', 'draw_joint_geom': '(False)', 'ee_indices': 'None', 'color': '[0.5, 0.5, 0.5, 1.0]', 'lighting': '(False)'}), '(self._pb_client, sim_agent._body_id,\n draw_link=True, draw_link_info=False, draw_joint=False, draw_joint_geom\n =False, ee_indices=None, color=[0.5, 0.5, 0.5, 1.0], lighting=False)\n', (11765, 11951), True, 'import render_module as rm\n'), ((12343, 12362), 'render_module.gl.glPopMatrix', 'rm.gl.glPopMatrix', ([], {}), '()\n', (12360, 12362), True, 'import render_module as rm\n'), ((13013, 13089), 'render_module.gl.glPushAttrib', 'rm.gl.glPushAttrib', (['(rm.gl.GL_LIGHTING | rm.gl.GL_DEPTH_TEST | rm.gl.GL_BLEND)'], {}), '(rm.gl.GL_LIGHTING | rm.gl.GL_DEPTH_TEST | rm.gl.GL_BLEND)\n', (13031, 13089), True, 'import render_module as rm\n'), ((13106, 13136), 'render_module.gl.glEnable', 'rm.gl.glEnable', (['rm.gl.GL_BLEND'], {}), '(rm.gl.GL_BLEND)\n', (13120, 13136), True, 'import render_module as rm\n'), ((13157, 13226), 'render_module.bullet_render.render_contacts', 'rm.bullet_render.render_contacts', (['self._pb_client', 'sim_agent._body_id'], {}), '(self._pb_client, sim_agent._body_id)\n', (13189, 13226), True, 'import render_module as rm\n'), ((13247, 13266), 'render_module.gl.glPopAttrib', 'rm.gl.glPopAttrib', ([], {}), '()\n', (13264, 13266), True, 'import render_module as rm\n'), ((13339, 13365), 'sim_agent.get_root_state', 'sim_agent.get_root_state', ([], {}), '()\n', (13363, 13365), False, 'import sim_agent\n'), ((13393, 13424), 'sim_agent.get_com_and_com_vel', 'sim_agent.get_com_and_com_vel', ([], {}), '()\n', (13422, 13424), False, 'import sim_agent\n'), ((13445, 13508), 'render_module.gl_render.render_arrow', 'rm.gl_render.render_arrow', (['p', '(p + v)'], {'D': '(0.01)', 'color': '[0, 0, 0, 1]'}), '(p, p + v, D=0.01, color=[0, 0, 0, 1])\n', (13470, 13508), True, 'import render_module as rm\n'), ((13571, 13647), 'render_module.gl.glPushAttrib', 'rm.gl.glPushAttrib', (['(rm.gl.GL_LIGHTING | rm.gl.GL_DEPTH_TEST | rm.gl.GL_BLEND)'], {}), '(rm.gl.GL_LIGHTING | rm.gl.GL_DEPTH_TEST | rm.gl.GL_BLEND)\n', (13589, 13647), True, 'import render_module as rm\n'), ((13664, 13694), 'render_module.gl.glEnable', 'rm.gl.glEnable', (['rm.gl.GL_BLEND'], {}), '(rm.gl.GL_BLEND)\n', (13678, 13694), True, 'import render_module as rm\n'), ((13914, 13933), 'render_module.gl.glPopAttrib', 'rm.gl.glPopAttrib', ([], {}), '()\n', (13931, 13933), True, 'import render_module as rm\n'), ((15023, 15053), 'numpy.random.uniform', 'np.random.uniform', (['(0.1)', '(0.3)', '(3)'], {}), '(0.1, 0.3, 3)\n', (15040, 15053), True, 'import numpy as np\n'), ((10521, 10565), 'numpy.full', 'np.full', (['(3)', 'agent._char_info.noise_vel[j][0]'], {}), '(3, agent._char_info.noise_vel[j][0])\n', (10528, 10565), True, 'import numpy as np\n'), ((10593, 10637), 'numpy.full', 'np.full', (['(3)', 'agent._char_info.noise_vel[j][1]'], {}), '(3, agent._char_info.noise_vel[j][1])\n', (10600, 10637), True, 'import numpy as np\n'), ((10665, 10709), 'numpy.full', 'np.full', (['(3)', 'agent._char_info.noise_vel[j][2]'], {}), '(3, agent._char_info.noise_vel[j][2])\n', (10672, 10709), True, 'import numpy as np\n'), ((10737, 10781), 'numpy.full', 'np.full', (['(3)', 'agent._char_info.noise_vel[j][3]'], {}), '(3, agent._char_info.noise_vel[j][3])\n', (10744, 10781), True, 'import numpy as np\n'), ((11470, 11516), 'fairmotion.ops.math.projectionOnVector', 'math.projectionOnVector', (['d', 'char_info.v_up_env'], {}), '(d, char_info.v_up_env)\n', (11493, 11516), False, 'from fairmotion.ops import math\n'), ((13770, 13815), 'sim_agent.get_facing_transform', 'sim_agent.get_facing_transform', (['ground_height'], {}), '(ground_height)\n', (13800, 13815), False, 'import sim_agent\n')] |
import logging
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), "../../../")))
sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), "../../../../FedML")))
try:
from fedml_core.distributed.client.client_manager import ClientManager
from fedml_core.distributed.communication.message import Message
except ImportError:
from FedML.fedml_core.distributed.client.client_manager import ClientManager
from FedML.fedml_core.distributed.communication.message import Message
from .message_define import MyMessage
from .utils import random_matrix,transform_list_to_tensor, post_complete_message_to_sweep_process
from .GoWrappers import *
import numpy as np
import time
class FedAVGClientManager(ClientManager):
def __init__(self,trainer,worker_num,robust,log_degree, log_scale, resiliency,params_count,args, comm, rank, size, backend="MPI"):
super().__init__(args, comm, rank, size, backend)
self.worker_num = worker_num
self.num_rounds = args.comm_round
self.robust = robust
self.status = 1
if not self.robust:
self.status = 1
self.log_degree = log_degree
self.log_scale = log_scale
self.resiliency = resiliency
self.trainer = trainer
self.params_count = params_count
#print("params_count",params_count)
self.shamirshare_list = []
self.SSstr = None
self.collective_shamirshare = dict()
self.flag_shamirshare_uploaded_dict = dict()
for idx in range(self.worker_num):
self.flag_shamirshare_uploaded_dict[idx] = False
self.compression = args.compression
self.rate = args.compression_rate
if self.compression == 0:
self.rate = 1.0
self.samples = int(self.params_count / self.rate)
self.error = np.zeros((self.params_count,1))
self.alpha = args.compression_alpha
self.beta = 1 / self.alpha / (self.rate + 1 + 1 / self.alpha)
def register_message_receive_handlers(self):
self.register_message_receive_handler(MyMessage.MSG_TYPE_S2C_PUBLIC_KEY_TO_CLIENT,self.handle_message_public_key_from_server)
self.register_message_receive_handler(MyMessage.MSG_TYPE_S2C_INIT_CONFIG,self.handle_message_init)
self.register_message_receive_handler(MyMessage.MSG_TYPE_S2C_SEND_AGGR_ENCRYPTED_MODEL,self.handle_message_enc_aggregated_model_from_server)
self.register_message_receive_handler(MyMessage.MSG_TYPE_S2C_SEND_DECRYPTION_INFO,self.handle_message_decryption_info_from_server)
self.register_message_receive_handler(MyMessage.MSG_TYPE_S2C_SYNC_MODEL_TO_CLIENT,self.handle_message_receive_model_from_server)
self.register_message_receive_handler(MyMessage.MSG_TYPE_C2C_SEND_PROCESSED_SS,self.handle_message_shamirshares)
def run(self):
super().run()
def handle_message_shamirshares(self,msg_params):
sender_id = msg_params.get(MyMessage.MSG_ARG_KEY_SENDER)
#logging.info("handle_message_client %d receive_ss_from_client %d."% (self.get_sender_id(),sender_id))
shamirshares = msg_params.get(MyMessage.MSG_ARG_KEY_SS)
self.flag_shamirshare_uploaded_dict[sender_id-1] = True
self.collective_shamirshare[sender_id-1] = shamirshares
all_received = self.check_whether_all_receive()
self.shamirshare_list.append(shamirshares)
if all_received:
collecitve_shamirshare = ':'.join(self.shamirshare_list)
collecitve_shamirshare += "\n"
#print("gen css of client", self.get_sender_id())
self.SSstr = genShamirShareString_robust(collecitve_shamirshare, self.worker_num, self.log_degree,self.log_scale)
#print("gen shamirshare string")
self.send_message_CPK_to_server(0,self.CPK)
#def handle_message_receive_model_from_server(self):
def handle_message_receive_model_from_server(self, msg_params):
#logging.info("handle_message_receive_model_from_server.")
model_params = msg_params.get(MyMessage.MSG_ARG_KEY_MODEL_PARAMS)
client_index = msg_params.get(MyMessage.MSG_ARG_KEY_CLIENT_INDEX)
if self.args.is_mobile == 1:
model_params = transform_list_to_tensor(model_params)
self.trainer.update_model(model_params)
self.trainer.update_dataset(int(client_index))
self.round_idx += 1
self.__train(self.round_idx)
if self.round_idx == self.num_rounds - 1:
# post_complete_message_to_sweep_process(self.args)
self.finish()
def handle_message_init(self, msg_params):
global_model_params = msg_params.get(MyMessage.MSG_ARG_KEY_MODEL_PARAMS)
client_index = msg_params.get(MyMessage.MSG_ARG_KEY_CLIENT_INDEX)
if self.args.is_mobile == 1:
global_model_params = transform_list_to_tensor(global_model_params)
self.trainer.update_model(global_model_params)
self.trainer.update_dataset(int(client_index))
self.round_idx = 0
self.__train(self.round_idx)
def check_whether_all_receive(self):
for idx in range(self.worker_num):
if not self.flag_shamirshare_uploaded_dict[idx]:
return False
for idx in range(self.worker_num):
self.flag_shamirshare_uploaded_dict[idx] = False
return True
def __train(self,round_idx):
logging.info("#######training########### round_id = %d" % self.round_idx)
weights, local_sample_num = self.trainer.train(self.round_idx)
#print(weights[0:10])
weights = weights.reshape(-1,1)
error_compensated = weights + self.error
if self.compression==1:
phi = random_matrix(self.alpha/2/self.samples, self.samples,self.params_count,seed = round_idx)
compressed = self.beta * phi.dot(error_compensated)
recov = phi.transpose().dot(compressed)
self.error = error_compensated - recov
else:
compressed = weights
enc_weights = self.encrypt(compressed)
self.send_model_to_server(0, enc_weights, local_sample_num)
def handle_message_decryption_info_from_server(self,msg_params):
decryptionParticipation = msg_params.get(MyMessage.MSG_ARG_KEY_DECRYPTION_PARTICIPATION)
decryptionCoefficients = msg_params.get(MyMessage.MSG_ARG_KEY_DECRYPTION_COEFFI)
if decryptionParticipation == 1:
tpk = msg_params.get(MyMessage.MSG_ARG_KEY_TPK)
PCKSShare = genPCKSShare(self.enc_aggregated_model,tpk,self.SSstr, decryptionCoefficients, self.samples, self.robust, self.log_degree, self.log_scale)
self.send_PCKS_share_to_server(PCKSShare)
def handle_message_public_key_from_server(self,msg_params):
print("Setup Phase time", time.time() - self.init)
self.pk = msg_params.get(MyMessage.MSG_ARG_KEY_PUBLIC_KEY)
self.send_message_phase1_done_to_server()
def send_message_phase1_done_to_server(self):
message = Message(MyMessage.MSG_TYPE_C2S_PHASE1_DONE, self.get_sender_id(), 0)
message.add_params(MyMessage.MSG_ARG_KEY_PHASE1_FLAG, "1")
self.send_message(message)
def send_PCKS_share_to_server(self,PCKS_shair):
message = Message(MyMessage.MSG_TYPE_C2S_PCKS_SHARE, self.get_sender_id(), 0)
message.add_params(MyMessage.MSG_ARG_KEY_PCKS_SHARE, PCKS_shair)
self.send_message(message)
def handle_message_enc_aggregated_model_from_server(self,msg_params):
#client_index = msg_params.get(MyMessage.MSG_ARG_KEY_CLIENT_INDEX)
self.enc_aggregated_model = msg_params.get(MyMessage.MSG_ARG_KEY_ENCRYPTED_MODEL_PARAMS)
self.announce_liveness_status()
def announce_liveness_status(self):
message = Message(MyMessage.MSG_TYPE_C2S_SEND_LIVENESS_STATUS, self.get_sender_id(), 0)
message.add_params(MyMessage.MSG_ARG_KEY_LIVENESS_STATUS,self.status)
self.send_message(message)
def send_SS(self):
self.init = time.time()
ShamirShares, self.CPK = genShamirShares(self.worker_num,self.log_degree,self.log_scale, self.resiliency)
ShamirShares = ShamirShares.decode()
sharesArr = ShamirShares.split(':')
assert len(sharesArr)-1==self.worker_num
for partyCntr in range(self.worker_num):
sharedParts = sharesArr[partyCntr].split('/')
assert len(sharedParts)==2
if int(sharedParts[0])+1 == self.get_sender_id():
self.flag_shamirshare_uploaded_dict[int(sharedParts[0])] = True
self.collective_shamirshare[int(sharedParts[0])] = sharedParts[1]
self.shamirshare_list.append(sharedParts[1])
else:
self.send_message_ShamirShares(int(sharedParts[0])+1,sharedParts[1])
def send_pk_to_server(self):
self.init = time.time()
CPK, self.SSstr= genCollectiveKeyShare_not_robust(self.worker_num,self.log_degree,self.log_scale, self.resiliency)
self.send_message_CPK_to_server(0,CPK)
def send_message_ShamirShares(self, receive_id, ShamirShares):
message = Message(MyMessage.MSG_TYPE_C2C_SEND_PROCESSED_SS, self.get_sender_id(), receive_id)
message.add_params(MyMessage.MSG_ARG_KEY_SS, ShamirShares)
self.send_message(message)
def send_message_CPK_to_server(self, receive_id, CPK):
#logging.info("send_message_CPK_to_server. receive_id = %d" % receive_id)
message = Message(MyMessage.MSG_TYPE_C2S_SEND_CPK_TO_SERVER, self.get_sender_id(), receive_id)
message.add_params(MyMessage.MSG_ARG_KEY_CPK, CPK)
self.send_message(message)
def send_model_to_server(self, receive_id, weights, local_sample_num):
message = Message(MyMessage.MSG_TYPE_C2S_SEND_ENC_MODEL_TO_SERVER, self.get_sender_id(), receive_id)
message.add_params(MyMessage.MSG_ARG_KEY_ENCRYPTED_MODEL_PARAMS, weights)
message.add_params(MyMessage.MSG_ARG_KEY_NUM_SAMPLES, local_sample_num)
self.send_message(message)
def encrypt(self,weights):
ct = encrypt(weights.reshape(-1), self.pk, self.SSstr, self.robust,self.log_degree, self.log_scale, self.resiliency)
return ct
| [
"numpy.zeros",
"os.getcwd",
"logging.info",
"time.time"
] | [((1855, 1887), 'numpy.zeros', 'np.zeros', (['(self.params_count, 1)'], {}), '((self.params_count, 1))\n', (1863, 1887), True, 'import numpy as np\n'), ((5430, 5503), 'logging.info', 'logging.info', (["('#######training########### round_id = %d' % self.round_idx)"], {}), "('#######training########### round_id = %d' % self.round_idx)\n", (5442, 5503), False, 'import logging\n'), ((8057, 8068), 'time.time', 'time.time', ([], {}), '()\n', (8066, 8068), False, 'import time\n'), ((8910, 8921), 'time.time', 'time.time', ([], {}), '()\n', (8919, 8921), False, 'import time\n'), ((85, 96), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (94, 96), False, 'import os\n'), ((161, 172), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (170, 172), False, 'import os\n'), ((6839, 6850), 'time.time', 'time.time', ([], {}), '()\n', (6848, 6850), False, 'import time\n')] |
import os
from pathlib import Path
import aiofiles
import tempfile
import sys
from aionetworking.compatibility import py37
APP_NAME = 'AIONetworking'
FILE_OPENER = aiofiles.open
APP_CONFIG = {}
def __getattr__(name):
path = None
if name == 'TEMPDIR':
path = Path(tempfile.gettempdir()) / sys.modules[__name__].APP_NAME.replace(" ", "")
elif name == 'APP_HOME':
path = Path(os.environ.get('appdata', Path.home()), sys.modules[__name__].APP_NAME)
if path:
path.mkdir(parents=True, exist_ok=True)
return path
if not py37:
from pep562 import Pep562
Pep562(__name__)
| [
"tempfile.gettempdir",
"pathlib.Path.home",
"pep562.Pep562"
] | [((607, 623), 'pep562.Pep562', 'Pep562', (['__name__'], {}), '(__name__)\n', (613, 623), False, 'from pep562 import Pep562\n'), ((283, 304), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (302, 304), False, 'import tempfile\n'), ((431, 442), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (440, 442), False, 'from pathlib import Path\n')] |
from django.contrib import admin
from .models import Profile
# register the user
class ProfileAdmin(admin.ModelAdmin):
readonly_fields = ('id','user_status')
admin.site.register(Profile, ProfileAdmin) | [
"django.contrib.admin.site.register"
] | [((163, 205), 'django.contrib.admin.site.register', 'admin.site.register', (['Profile', 'ProfileAdmin'], {}), '(Profile, ProfileAdmin)\n', (182, 205), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed May 13 12:25:22 2020
@author: MCARAYA
"""
__version__ = '0.5.20-06-05'
import os
from shutil import copyfile
from datafiletoolbox import extension
from gpx_reader.calculate import MD5
def rename(fullPath,newName) :
"""
renames the file on the fullPath to the newName,
in the same orginal directory
Parameters
----------
fullPath : a string
the fullpath to the file or folder to be renamed.
newPathOrName : a string
the new name of the file or folder.
Returns
-------
True if succesfuly renamed
False if failed
"""
if os.path.exists(extension(fullPath)[3]) :
fullPath = extension(fullPath)[3]
else :
raise FileNotFoundError(fullPath)
newName = extension(newName)[3]
if not os.path.isabs(newName) :
if extension(newName)[2] == '' :
newName = extension(fullPath)[2] + extension(newName)[1] + extension(newName)[0]
else :
raise TypeError(' newName must be a full path or simply a filename.extension')
try :
os.rename(fullPath,newName)
return True
except :
return False
def copy(source,destination,MD5_check=True) :
"""
Parameters
----------
source : a string
the fullpath of the source file or directory to be copied.
destination : a string
the fullpath of the destination,
if destination is a directory the same filename will be used.
MD5_check : True or false
True will check the MD5 of the copied file matches the MD5 of the source file.
False will not check MD5.
Returns
-------
True if succesfuly copied
if MD5_check=True then the MD5 will be returned of succesfull copy
False if failed
"""
prepareFolder( extension(destination)[2] )
### attempt to copy the file and calculate checksum
if MD5_check :
MD5_flag = False
attempt = 0
# maximum three attemps
while attempt < 3 and MD5_flag == False :
Failure = 'COPY'
try : # try to copy and calculate checksum
# try to copy, message will contain the destination path
if Failure == 'COPY' :
message = copyfile(source, destination)
# calculate MD5 for the original file
try :
MD5_str = MD5(source)
# calculate MD5 for the copied file and compare to the original's one
if MD5_str == MD5(destination) : # if OK, write to log and exit loop
MD5_flag = True
Failure = ''
else : # if MD5s does not match, count +1 and try again
attempt += 1
MD5_flag = False
MD5_str = ''
Failure = 'COPY'
except :
Failure = 'MD5'
except : # if try fails (mostly I/O issues during the copy or reading for the MD5)
if Failure == 'COPY' :
attempt += 1
elif Failure == 'MD5' :
attempt += 1
else :
pass
if MD5_flag :
return MD5_str
else :
return False
else : # not MD5_check
copy_flag = False
attempt = 0
# maximum three attemps
while attempt < 3 and copy_flag == False :
Failure = 'COPY'
try : # try to copy
# try to copy, message will contain the destination path
if Failure == 'COPY' :
message = copyfile(source, destination)
copy_flag = True
Failure = ''
except : # if try fails (mostly I/O issues during the copy or reading for the MD5)
if Failure == 'COPY' :
attempt += 1
else :
pass
return copy_flag
def delete(fullpath) :
"""
removes the file indicated by fullpath
returns True if succesful
"""
try :
os.remove(fullpath)
return True
except:
return False
def move(source,destination,MD5_check=True) :
"""
copy
if True
delete
Parameters
----------
source : a string
the fullpath of the source file or directory to be copied.
destination : a string
the fullpath of the destination,
if destination is a directory the same filename will be used.
Returns
-------
a tuple where:
item 0 is the result of the copy operation:
True or the MD5 if succesful
False if failes
item 1 is the result of the delete operation:
True if sucessful
False if not
None if the copy failed
"""
cp = copy(source,destination,MD5_check)
if type(cp) is str or cp is True :
dl = delete(source)
print('MOVE',cp,dl)
return (cp,dl)
else :
print('MOVE',cp,dl)
return (cp,None)
def prepareFolder(folderPath) :
### Create target Directory if don't exist
# check if that folder already exists
if not os.path.exists(folderPath): # does not exist
# level by level check that folders exists
for i in range(5, len(folderPath.split('/'))) : # starts from the 5th position on the full path because the first five items must exist: '//danas/Dig_Trans_Proj/DATALAKE_EP_PRE_INGESTION/'
checkPath = '/'.join( folderPath.split('/')[:i] )
if not os.path.exists( checkPath ) :
os.mkdir(checkPath)
print("Directory " , folderPath , " Created ")
dirMsg = "Directory '" + folderPath + "' Created "
else: # already exists
print("Directory " , folderPath , " already exists")
dirMsg = "Directory '" + folderPath + " already exists"
| [
"os.path.exists",
"gpx_reader.calculate.MD5",
"os.path.isabs",
"os.rename",
"datafiletoolbox.extension",
"shutil.copyfile",
"os.mkdir",
"os.remove"
] | [((787, 805), 'datafiletoolbox.extension', 'extension', (['newName'], {}), '(newName)\n', (796, 805), False, 'from datafiletoolbox import extension\n'), ((820, 842), 'os.path.isabs', 'os.path.isabs', (['newName'], {}), '(newName)\n', (833, 842), False, 'import os\n'), ((1108, 1136), 'os.rename', 'os.rename', (['fullPath', 'newName'], {}), '(fullPath, newName)\n', (1117, 1136), False, 'import os\n'), ((4261, 4280), 'os.remove', 'os.remove', (['fullpath'], {}), '(fullpath)\n', (4270, 4280), False, 'import os\n'), ((5363, 5389), 'os.path.exists', 'os.path.exists', (['folderPath'], {}), '(folderPath)\n', (5377, 5389), False, 'import os\n'), ((652, 671), 'datafiletoolbox.extension', 'extension', (['fullPath'], {}), '(fullPath)\n', (661, 671), False, 'from datafiletoolbox import extension\n'), ((697, 716), 'datafiletoolbox.extension', 'extension', (['fullPath'], {}), '(fullPath)\n', (706, 716), False, 'from datafiletoolbox import extension\n'), ((1840, 1862), 'datafiletoolbox.extension', 'extension', (['destination'], {}), '(destination)\n', (1849, 1862), False, 'from datafiletoolbox import extension\n'), ((856, 874), 'datafiletoolbox.extension', 'extension', (['newName'], {}), '(newName)\n', (865, 874), False, 'from datafiletoolbox import extension\n'), ((5737, 5762), 'os.path.exists', 'os.path.exists', (['checkPath'], {}), '(checkPath)\n', (5751, 5762), False, 'import os\n'), ((5783, 5802), 'os.mkdir', 'os.mkdir', (['checkPath'], {}), '(checkPath)\n', (5791, 5802), False, 'import os\n'), ((957, 975), 'datafiletoolbox.extension', 'extension', (['newName'], {}), '(newName)\n', (966, 975), False, 'from datafiletoolbox import extension\n'), ((2312, 2341), 'shutil.copyfile', 'copyfile', (['source', 'destination'], {}), '(source, destination)\n', (2320, 2341), False, 'from shutil import copyfile\n'), ((2448, 2459), 'gpx_reader.calculate.MD5', 'MD5', (['source'], {}), '(source)\n', (2451, 2459), False, 'from gpx_reader.calculate import MD5\n'), ((3772, 3801), 'shutil.copyfile', 'copyfile', (['source', 'destination'], {}), '(source, destination)\n', (3780, 3801), False, 'from shutil import copyfile\n'), ((908, 927), 'datafiletoolbox.extension', 'extension', (['fullPath'], {}), '(fullPath)\n', (917, 927), False, 'from datafiletoolbox import extension\n'), ((933, 951), 'datafiletoolbox.extension', 'extension', (['newName'], {}), '(newName)\n', (942, 951), False, 'from datafiletoolbox import extension\n'), ((2584, 2600), 'gpx_reader.calculate.MD5', 'MD5', (['destination'], {}), '(destination)\n', (2587, 2600), False, 'from gpx_reader.calculate import MD5\n')] |
# -*- coding: utf-8 -*-
import pandas as pd
import re
import pickle
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
import numpy as np
from dl_architecture import make_charvec, build_model
from keras.callbacks import ModelCheckpoint
from keras import backend as K
from sklearn.preprocessing import Normalizer
from sklearn import pipeline
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn import preprocessing
from sklearn.metrics import f1_score, accuracy_score, confusion_matrix
from collections import defaultdict
from bm25 import BM25Transformer
import gc
def remove_email(text, replace_token):
return re.sub(r'[\w\.-]+@[\w\.-]+', replace_token, text)
def remove_url(text, replace_token):
regex = 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
return re.sub(regex, replace_token, text)
def preprocess(df_data):
df_data['text_clean'] = df_data['text'].map(lambda x: remove_url(x, "HTTPURL"))
df_data['text_clean'] = df_data['text_clean'].map(lambda x: remove_email(x, 'EMAIL'))
return df_data
def preprocess_data(df_data, target, drop, tags_to_idx = []):
df_data = preprocess(df_data)
# shuffle the corpus and optionaly choose the chunk you want to use if you don't want to use the whole thing - will be much faster
df_data = df_data.sample(frac=1, random_state=1)
tags = df_data[target].tolist()
if len(tags_to_idx) < 1:
tags_to_idx = list(set(df_data[target].tolist()))
df_data = df_data.drop([target], axis=1)
if len(drop) > 0:
df_data = df_data.drop(drop, axis=1)
y = np.array([tags_to_idx.index(tmp_y) for tmp_y in tags])
return df_data, y, tags_to_idx
class text_col(BaseEstimator, TransformerMixin):
def __init__(self, key):
self.key = key
def fit(self, x, y=None):
return self
def transform(self, data_dict):
return data_dict[self.key]
#fit and transform numeric features, used in scikit Feature union
class digit_col(BaseEstimator, TransformerMixin):
def fit(self, x, y=None):
return self
def transform(self, hd_searches):
d_col_drops=['text', 'no_punctuation', 'no_stopwords', 'text_clean', 'affixes', 'affix_punct']
hd_searches = hd_searches.drop(d_col_drops,axis=1).values
scaler = preprocessing.MinMaxScaler().fit(hd_searches)
return scaler.transform(hd_searches)
def train(xtrain, ytrain, xval, yval, lang, tags_to_idx, weighting):
if weighting =='tfidf':
path = "./models/model_" + lang + "_weights.hdf5"
elif weighting == 'bm25':
path = "./models/model_" + lang + "_bm25_weights.hdf5"
checkpointer = ModelCheckpoint(filepath=path,
verbose=1,
monitor="val_acc",
save_best_only=True,
mode="max")
#print("Train and dev shape: ", xtrain.shape, xval.shape)
counts = defaultdict(int)
for c in ytrain.tolist():
counts[c] += 1
if lang!='all':
character_vectorizer = CountVectorizer(analyzer='char', ngram_range=(3,6), lowercase=False, min_df=5, max_df=0.3)
else:
character_vectorizer = CountVectorizer(analyzer='char_wb', ngram_range=(3,5), lowercase=False, min_df=5, max_df=0.3)
if weighting == 'tfidf':
transformer = TfidfTransformer(sublinear_tf=True)
elif weighting == 'bm25':
transformer = BM25Transformer()
tfidf_matrix = pipeline.Pipeline([
('character', pipeline.Pipeline(
[('s5', text_col(key='text_clean')), ('character_vectorizer', character_vectorizer),
('tfidf_character', transformer)])),
('scale', Normalizer())])
tfidf_matrix = tfidf_matrix.fit(xtrain)
tfidf_matrix_test = tfidf_matrix.transform(xtrain)
print('tfidf matrix size: ', tfidf_matrix_test.shape)
ngrams_matrix_shape = tfidf_matrix_test.shape[1]
tfidf_matrix_val = tfidf_matrix.transform(xval)
charvec, char_vocab, max_train_len_char = make_charvec(xtrain.text_clean.tolist())
char_vocab_size = len(char_vocab) + 2
charvec_shape = charvec.shape[1]
charvec_val, _, _ = make_charvec(xval.text_clean.tolist(), train=False, char_vocab=char_vocab, max_text_len=max_train_len_char)
num_classes = len(set(yval.tolist()))
textmodel_data = ngrams_matrix_shape, num_classes, charvec_shape, char_vocab_size, tfidf_matrix, char_vocab, max_train_len_char, tags_to_idx
if weighting == 'tfidf':
data_path = 'models/model_' + lang + '_data.pk'
elif weighting == 'bm25':
data_path = 'models/model_' + lang + '_bm25_data.pk'
with open(data_path, 'wb') as f:
pickle.dump(textmodel_data, f, protocol=2)
if lang != 'all':
if lang not in ['sg', 'ar']:
num_epoch = 20
else:
num_epoch = 80
else:
num_epoch = 10
model = build_model(ngrams_matrix_shape, num_classes, charvec_shape, char_vocab_size)
model.fit([tfidf_matrix_test, charvec], ytrain, validation_data=([tfidf_matrix_val, charvec_val], yval), batch_size=16, epochs=num_epoch, verbose=0, callbacks=[checkpointer])
K.clear_session()
gc.collect()
return model
def test_trained_model(data_test, target, drop, lang, weighting):
if weighting == 'tfidf':
data_path = 'models/model_' + lang + '_data.pk'
elif weighting == 'bm25':
data_path = 'models/model_' + lang + '_bm25_data.pk'
textmodel_data = pickle.load(open(data_path, 'rb'))
unigrams_shape, num_classes, charvec_shape, char_vocab_size,tfidf_matrix, char_vocab, max_train_len_char, tags_to_idx = textmodel_data
xtest, ytest, _ = preprocess_data(data_test, target, drop, tags_to_idx=tags_to_idx)
tfidf_matrix_test = tfidf_matrix.transform(xtest)
charvec_test, _, _ = make_charvec(xtest.text_clean.tolist(), train=False, char_vocab=char_vocab, max_text_len=max_train_len_char)
model = build_model(unigrams_shape, num_classes, charvec_shape, char_vocab_size)
if weighting =='tfidf':
path = "./models/model_" + lang + "_weights.hdf5"
elif weighting == 'bm25':
path = "./models/model_" + lang + "_bm25_weights.hdf5"
model.load_weights(path)
predictions = model.predict([tfidf_matrix_test, charvec_test]).argmax(axis=-1)
macro = str(f1_score(ytest, predictions, average='macro'))
micro = str(f1_score(ytest, predictions, average='micro'))
weighted = str(f1_score(ytest, predictions, average='weighted'))
accuracy = str(accuracy_score(ytest, predictions))
print('Test F1 macro:', macro)
print('Test F1 micro:', micro)
print('Test F1 weighted:', weighted)
print('Test accuracy:', accuracy)
print('Test confusion matrix:', confusion_matrix(ytest, predictions))
def test_all(data_test, target, drop, langs=['es','fa','fr','idmy','pt','slavic'], weighting='tfidf'):
if weighting == 'tfidf':
data_path = 'models/model_all_data.pk'
elif weighting == 'bm25':
data_path = 'models/model_all_bm25_data.pk'
textmodel_data_all = pickle.load(open(data_path, 'rb'))
unigrams_shape, num_classes, charvec_shape, char_vocab_size, tfidf_matrix, char_vocab, max_train_len_char, group_tags_to_idx = textmodel_data_all
xtest, ytest, _ = preprocess_data(data_test, target, drop, tags_to_idx=group_tags_to_idx)
tfidf_matrix_test = tfidf_matrix.transform(xtest)
charvec_test, _, _ = make_charvec(xtest.text_clean.tolist(), train=False, char_vocab=char_vocab, max_text_len=max_train_len_char)
model = build_model(unigrams_shape, num_classes, charvec_shape, char_vocab_size)
if weighting =='tfidf':
path = "./models/model_all_weights.hdf5"
elif weighting == 'bm25':
path = "./models/model_all_bm25_weights.hdf5"
model.load_weights(path)
predictions = model.predict([tfidf_matrix_test, charvec_test]).argmax(axis=-1)
print('Test F1 macro lang group:', f1_score(ytest, predictions, average='macro'))
print('Test F1 micro lang group:', f1_score(ytest, predictions, average='micro'))
print('Test F1 weighted lang group:', f1_score(ytest, predictions, average='weighted'))
print('Test accuracy lang group:', accuracy_score(ytest, predictions))
print('Test confusion matrix lang group:', confusion_matrix(ytest, predictions))
df_predictions = pd.DataFrame({'lang_group_pred': predictions})
xtest.reset_index(drop=True, inplace=True)
df_true = pd.DataFrame({'lang_group': ytest})
df_data = pd.concat([xtest, df_true, df_predictions], axis=1)
K.clear_session()
gc.collect()
all_predictions = []
for lang in langs:
lang_idx = group_tags_to_idx.index(lang)
filtered_data = df_data.loc[df_data['lang_group_pred'] == lang_idx]
if weighting == 'tfidf':
data_path = 'models/model_' + lang + '_data.pk'
elif weighting == 'bm25':
data_path = 'models/model_' + lang + '_bm25_data.pk'
textmodel_data = pickle.load(open(data_path, 'rb'))
unigrams_shape, num_classes, charvec_shape, char_vocab_size, tfidf_matrix, char_vocab, max_train_len_char, tags_to_idx = textmodel_data
tfidf_matrix_test = tfidf_matrix.transform(filtered_data).toarray()
charvec_test, _, _ = make_charvec(filtered_data.text_clean.tolist(), train=False, char_vocab=char_vocab, max_text_len=max_train_len_char)
model = build_model(unigrams_shape, num_classes, charvec_shape, char_vocab_size)
if weighting == 'tfidf':
path = "./models/model_" + lang + "_weights.hdf5"
elif weighting == 'bm25':
path = "./models/model_" + lang + "_bm25_weights.hdf5"
model.load_weights(path)
predictions = model.predict([tfidf_matrix_test, charvec_test]).argmax(axis=-1)
predictions = np.array([tags_to_idx[prediction] for prediction in predictions])
df_predictions = pd.DataFrame({'predictions': predictions})
df_predictions.reset_index(drop=True, inplace=True)
ytest = filtered_data.variety
df_ytest = pd.DataFrame({'y': ytest})
df_ytest.reset_index(drop=True, inplace=True)
results = pd.concat([df_ytest, df_predictions], axis=1)
all_predictions.append(results)
all_data = pd.concat(all_predictions, axis=0)
all_y = all_data.y
all_preds = all_data.predictions
print('Test all macro F1 score:', f1_score(all_y, all_preds, average='macro'))
print('Test all micro F1 score:', f1_score(all_y, all_preds, average='micro'))
print('Test all weighted F1 score:', f1_score(all_y, all_preds, average='weighted'))
print('Test all accuracy score:', accuracy_score(all_y, all_preds))
print('Test all confusion matrix score:', confusion_matrix(all_y, all_preds))
| [
"bm25.BM25Transformer",
"sklearn.feature_extraction.text.TfidfTransformer",
"sklearn.metrics.f1_score",
"pickle.dump",
"keras.callbacks.ModelCheckpoint",
"sklearn.feature_extraction.text.CountVectorizer",
"numpy.array",
"collections.defaultdict",
"keras.backend.clear_session",
"dl_architecture.bui... | [((663, 715), 're.sub', 're.sub', (['"""[\\\\w\\\\.-]+@[\\\\w\\\\.-]+"""', 'replace_token', 'text'], {}), "('[\\\\w\\\\.-]+@[\\\\w\\\\.-]+', replace_token, text)\n", (669, 715), False, 'import re\n'), ((856, 890), 're.sub', 're.sub', (['regex', 'replace_token', 'text'], {}), '(regex, replace_token, text)\n', (862, 890), False, 'import re\n'), ((2710, 2808), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', ([], {'filepath': 'path', 'verbose': '(1)', 'monitor': '"""val_acc"""', 'save_best_only': '(True)', 'mode': '"""max"""'}), "(filepath=path, verbose=1, monitor='val_acc', save_best_only\n =True, mode='max')\n", (2725, 2808), False, 'from keras.callbacks import ModelCheckpoint\n'), ((3020, 3036), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (3031, 3036), False, 'from collections import defaultdict\n'), ((4981, 5058), 'dl_architecture.build_model', 'build_model', (['ngrams_matrix_shape', 'num_classes', 'charvec_shape', 'char_vocab_size'], {}), '(ngrams_matrix_shape, num_classes, charvec_shape, char_vocab_size)\n', (4992, 5058), False, 'from dl_architecture import make_charvec, build_model\n'), ((5243, 5260), 'keras.backend.clear_session', 'K.clear_session', ([], {}), '()\n', (5258, 5260), True, 'from keras import backend as K\n'), ((5265, 5277), 'gc.collect', 'gc.collect', ([], {}), '()\n', (5275, 5277), False, 'import gc\n'), ((6026, 6098), 'dl_architecture.build_model', 'build_model', (['unigrams_shape', 'num_classes', 'charvec_shape', 'char_vocab_size'], {}), '(unigrams_shape, num_classes, charvec_shape, char_vocab_size)\n', (6037, 6098), False, 'from dl_architecture import make_charvec, build_model\n'), ((7634, 7706), 'dl_architecture.build_model', 'build_model', (['unigrams_shape', 'num_classes', 'charvec_shape', 'char_vocab_size'], {}), '(unigrams_shape, num_classes, charvec_shape, char_vocab_size)\n', (7645, 7706), False, 'from dl_architecture import make_charvec, build_model\n'), ((8427, 8473), 'pandas.DataFrame', 'pd.DataFrame', (["{'lang_group_pred': predictions}"], {}), "({'lang_group_pred': predictions})\n", (8439, 8473), True, 'import pandas as pd\n'), ((8535, 8570), 'pandas.DataFrame', 'pd.DataFrame', (["{'lang_group': ytest}"], {}), "({'lang_group': ytest})\n", (8547, 8570), True, 'import pandas as pd\n'), ((8585, 8636), 'pandas.concat', 'pd.concat', (['[xtest, df_true, df_predictions]'], {'axis': '(1)'}), '([xtest, df_true, df_predictions], axis=1)\n', (8594, 8636), True, 'import pandas as pd\n'), ((8642, 8659), 'keras.backend.clear_session', 'K.clear_session', ([], {}), '()\n', (8657, 8659), True, 'from keras import backend as K\n'), ((8664, 8676), 'gc.collect', 'gc.collect', ([], {}), '()\n', (8674, 8676), False, 'import gc\n'), ((10350, 10384), 'pandas.concat', 'pd.concat', (['all_predictions'], {'axis': '(0)'}), '(all_predictions, axis=0)\n', (10359, 10384), True, 'import pandas as pd\n'), ((3143, 3238), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {'analyzer': '"""char"""', 'ngram_range': '(3, 6)', 'lowercase': '(False)', 'min_df': '(5)', 'max_df': '(0.3)'}), "(analyzer='char', ngram_range=(3, 6), lowercase=False,\n min_df=5, max_df=0.3)\n", (3158, 3238), False, 'from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\n'), ((3275, 3373), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {'analyzer': '"""char_wb"""', 'ngram_range': '(3, 5)', 'lowercase': '(False)', 'min_df': '(5)', 'max_df': '(0.3)'}), "(analyzer='char_wb', ngram_range=(3, 5), lowercase=False,\n min_df=5, max_df=0.3)\n", (3290, 3373), False, 'from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\n'), ((3421, 3456), 'sklearn.feature_extraction.text.TfidfTransformer', 'TfidfTransformer', ([], {'sublinear_tf': '(True)'}), '(sublinear_tf=True)\n', (3437, 3456), False, 'from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer\n'), ((4764, 4806), 'pickle.dump', 'pickle.dump', (['textmodel_data', 'f'], {'protocol': '(2)'}), '(textmodel_data, f, protocol=2)\n', (4775, 4806), False, 'import pickle\n'), ((6407, 6452), 'sklearn.metrics.f1_score', 'f1_score', (['ytest', 'predictions'], {'average': '"""macro"""'}), "(ytest, predictions, average='macro')\n", (6415, 6452), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((6470, 6515), 'sklearn.metrics.f1_score', 'f1_score', (['ytest', 'predictions'], {'average': '"""micro"""'}), "(ytest, predictions, average='micro')\n", (6478, 6515), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((6536, 6584), 'sklearn.metrics.f1_score', 'f1_score', (['ytest', 'predictions'], {'average': '"""weighted"""'}), "(ytest, predictions, average='weighted')\n", (6544, 6584), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((6605, 6639), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['ytest', 'predictions'], {}), '(ytest, predictions)\n', (6619, 6639), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((6826, 6862), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['ytest', 'predictions'], {}), '(ytest, predictions)\n', (6842, 6862), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((8020, 8065), 'sklearn.metrics.f1_score', 'f1_score', (['ytest', 'predictions'], {'average': '"""macro"""'}), "(ytest, predictions, average='macro')\n", (8028, 8065), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((8106, 8151), 'sklearn.metrics.f1_score', 'f1_score', (['ytest', 'predictions'], {'average': '"""micro"""'}), "(ytest, predictions, average='micro')\n", (8114, 8151), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((8195, 8243), 'sklearn.metrics.f1_score', 'f1_score', (['ytest', 'predictions'], {'average': '"""weighted"""'}), "(ytest, predictions, average='weighted')\n", (8203, 8243), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((8284, 8318), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['ytest', 'predictions'], {}), '(ytest, predictions)\n', (8298, 8318), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((8367, 8403), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['ytest', 'predictions'], {}), '(ytest, predictions)\n', (8383, 8403), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((9486, 9558), 'dl_architecture.build_model', 'build_model', (['unigrams_shape', 'num_classes', 'charvec_shape', 'char_vocab_size'], {}), '(unigrams_shape, num_classes, charvec_shape, char_vocab_size)\n', (9497, 9558), False, 'from dl_architecture import make_charvec, build_model\n'), ((9899, 9964), 'numpy.array', 'np.array', (['[tags_to_idx[prediction] for prediction in predictions]'], {}), '([tags_to_idx[prediction] for prediction in predictions])\n', (9907, 9964), True, 'import numpy as np\n'), ((9990, 10032), 'pandas.DataFrame', 'pd.DataFrame', (["{'predictions': predictions}"], {}), "({'predictions': predictions})\n", (10002, 10032), True, 'import pandas as pd\n'), ((10150, 10176), 'pandas.DataFrame', 'pd.DataFrame', (["{'y': ytest}"], {}), "({'y': ytest})\n", (10162, 10176), True, 'import pandas as pd\n'), ((10249, 10294), 'pandas.concat', 'pd.concat', (['[df_ytest, df_predictions]'], {'axis': '(1)'}), '([df_ytest, df_predictions], axis=1)\n', (10258, 10294), True, 'import pandas as pd\n'), ((10483, 10526), 'sklearn.metrics.f1_score', 'f1_score', (['all_y', 'all_preds'], {'average': '"""macro"""'}), "(all_y, all_preds, average='macro')\n", (10491, 10526), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((10566, 10609), 'sklearn.metrics.f1_score', 'f1_score', (['all_y', 'all_preds'], {'average': '"""micro"""'}), "(all_y, all_preds, average='micro')\n", (10574, 10609), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((10652, 10698), 'sklearn.metrics.f1_score', 'f1_score', (['all_y', 'all_preds'], {'average': '"""weighted"""'}), "(all_y, all_preds, average='weighted')\n", (10660, 10698), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((10739, 10771), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['all_y', 'all_preds'], {}), '(all_y, all_preds)\n', (10753, 10771), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((10819, 10853), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['all_y', 'all_preds'], {}), '(all_y, all_preds)\n', (10835, 10853), False, 'from sklearn.metrics import f1_score, accuracy_score, confusion_matrix\n'), ((3509, 3526), 'bm25.BM25Transformer', 'BM25Transformer', ([], {}), '()\n', (3524, 3526), False, 'from bm25 import BM25Transformer\n'), ((2350, 2378), 'sklearn.preprocessing.MinMaxScaler', 'preprocessing.MinMaxScaler', ([], {}), '()\n', (2376, 2378), False, 'from sklearn import preprocessing\n'), ((3775, 3787), 'sklearn.preprocessing.Normalizer', 'Normalizer', ([], {}), '()\n', (3785, 3787), False, 'from sklearn.preprocessing import Normalizer\n')] |
import unittest
from app.email import send_email_async
from app.models import User
from tests.test_selenium.base import TestBase
class TestSendMail(TestBase):
def test_send_mail(self):
with self.app.app_context():
new_user = User(first_name="John", last_name="Doe", email="<EMAIL>", role_id=1)
TestSendMail.db.session.add(new_user)
TestSendMail.db.session.commit()
send_email_async(
self.app,
recipient="<EMAIL>",
subject="hello world",
template="account/email/reset_password",
user=User.query.first(),
reset_link="test",
config=self.app.config
)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"app.models.User.query.first",
"app.models.User"
] | [((777, 792), 'unittest.main', 'unittest.main', ([], {}), '()\n', (790, 792), False, 'import unittest\n'), ((254, 322), 'app.models.User', 'User', ([], {'first_name': '"""John"""', 'last_name': '"""Doe"""', 'email': '"""<EMAIL>"""', 'role_id': '(1)'}), "(first_name='John', last_name='Doe', email='<EMAIL>', role_id=1)\n", (258, 322), False, 'from app.models import User\n'), ((629, 647), 'app.models.User.query.first', 'User.query.first', ([], {}), '()\n', (645, 647), False, 'from app.models import User\n')] |
# -*- coding: utf-8 -*-
"""
Setup for qat-qscore
"""
import importlib
from setuptools import setup, find_packages
def detect_if_qlm():
"""
Detects if this setup is run in a complete QLM environement.
If not, we will need to add myQLM to the dependencies of the package.
"""
try:
importlib.import_module("qat.linalg")
print("=> Detected a QLM installation. <=")
return True
except ModuleNotFoundError:
print("=> No QLM installation detected, adding myQLM to the dependencies. <=")
return False
setup(
name="qat-qscore",
description="QScore implementation based on Atos' qat framework.",
version="0.0.1",
packages=find_packages(include=["qat.*"]),
install_requires=[] if detect_if_qlm() else ["myqlm"],
)
| [
"setuptools.find_packages",
"importlib.import_module"
] | [((309, 346), 'importlib.import_module', 'importlib.import_module', (['"""qat.linalg"""'], {}), "('qat.linalg')\n", (332, 346), False, 'import importlib\n'), ((692, 724), 'setuptools.find_packages', 'find_packages', ([], {'include': "['qat.*']"}), "(include=['qat.*'])\n", (705, 724), False, 'from setuptools import setup, find_packages\n')] |
from RNNs import QIFExpAddNoiseSyns
import numpy as np
import pickle
import matplotlib.pyplot as plt
from scipy.ndimage import gaussian_filter1d
# STEP 0: Define simulation condition
#####################################
# parse worker indices from script arguments
idx_cond = 570
# STEP 1: Load pre-generated RNN parameters
###########################################
path = "/home/rgast/PycharmProjects/BrainNetworks/RC/results"
config = pickle.load(open(f"{path}/qif_micro_config.pkl", 'rb'))
# connectivity matrix
C = config['C']
# input
inp = config['inp']
# input weights
W_in = config['W_in']
# simulation config
T = config['T']
dt = config['dt']
dts = config['dts']
cutoff = config['cutoff']
# target values
targets = config['targets']
# adaptation strength
alpha = 0.5 # config['alphas'][idx_cond]
# eta
eta = -3.8 # config['etas'][idx_cond]
# STEP 2: define remaining network parameters
#############################################
# general parameters
N = C.shape[0]
m = W_in.shape[0]
n_folds = 5
ridge_alpha = 1e-3
# qif parameters
Delta = 2.0
J = 15.0*np.sqrt(Delta)
D = 0.0
tau_a = 10.0
tau_s = 0.8
# STEP 3: Evaluate classification performance of RNN
####################################################
# setup QIF RNN
qif_rnn = QIFExpAddNoiseSyns(C, eta, J, Delta=Delta, alpha=alpha, D=D, tau_s=tau_s, tau_a=tau_a)
# perform simulation
W_in[:, :] = 0.0
X = qif_rnn.run(T, dt, dts, inp=inp, W_in=W_in, state_record_key='t1', cutoff=cutoff)
r_qif = np.mean(X, axis=1)
# prepare training data
buffer_val = 0
for i in range(X.shape[1]):
X[:, i] = gaussian_filter1d(X[:, i], 0.05 / dts, mode='constant', cval=buffer_val)
y = targets
r_qif2 = np.mean(X, axis=1)
# split into test and training data
split = int(np.round(X.shape[0]*0.75, decimals=0))
X_train = X[:split, :]
y_train = y[:split]
X_test = X[split:, :]
y_test = y[split:]
# train RNN
key, scores, coefs = qif_rnn.ridge_fit(X=X_train, y=y_train, alpha=ridge_alpha, k=n_folds, fit_intercept=False, copy_X=True,
solver='lsqr')
score, _ = qif_rnn.test(X=X_test, y=y_test, readout_key=key)
y_predict = qif_rnn.predict(X=X, readout_key=key)
print(f"Classification performance on test data: {score}")
# plotting
fig, axes = plt.subplots(nrows=4)
ax1 = axes[0]
ax1.plot(np.mean(X, axis=1))
ax2 = axes[1]
im = ax2.imshow(X.T, aspect='auto', cmap="plasma", vmin=0, vmax=0.005)
#plt.colorbar(im, ax=ax2, shrink=0.5)
ax3 = axes[2]
ax3.plot(y)
ax3.plot(y_predict)
plt.legend(['target', 'output'])
ax4 = axes[3]
start = int(cutoff/dt)
ax4.plot(inp[0, start:])
ax4.plot(inp[1, start:])
plt.legend(['lorenz', 'stula'])
plt.tight_layout()
# plot connectivity
fig2, ax = plt.subplots()
im1 = ax.imshow(C, aspect='auto', cmap="plasma", vmin=0, vmax=np.max(C[:]))
plt.colorbar(im1, ax=ax, shrink=0.5)
plt.title('C')
plt.tight_layout()
print(f'Synaptic sparseness: {np.sum(C[:] == 0)/N**2}')
plt.show()
| [
"numpy.mean",
"RNNs.QIFExpAddNoiseSyns",
"scipy.ndimage.gaussian_filter1d",
"numpy.sqrt",
"numpy.round",
"matplotlib.pyplot.colorbar",
"numpy.max",
"numpy.sum",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.legend",
"matplotlib.... | [((1264, 1354), 'RNNs.QIFExpAddNoiseSyns', 'QIFExpAddNoiseSyns', (['C', 'eta', 'J'], {'Delta': 'Delta', 'alpha': 'alpha', 'D': 'D', 'tau_s': 'tau_s', 'tau_a': 'tau_a'}), '(C, eta, J, Delta=Delta, alpha=alpha, D=D, tau_s=tau_s,\n tau_a=tau_a)\n', (1282, 1354), False, 'from RNNs import QIFExpAddNoiseSyns\n'), ((1484, 1502), 'numpy.mean', 'np.mean', (['X'], {'axis': '(1)'}), '(X, axis=1)\n', (1491, 1502), True, 'import numpy as np\n'), ((1679, 1697), 'numpy.mean', 'np.mean', (['X'], {'axis': '(1)'}), '(X, axis=1)\n', (1686, 1697), True, 'import numpy as np\n'), ((2256, 2277), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(4)'}), '(nrows=4)\n', (2268, 2277), True, 'import matplotlib.pyplot as plt\n'), ((2493, 2525), 'matplotlib.pyplot.legend', 'plt.legend', (["['target', 'output']"], {}), "(['target', 'output'])\n", (2503, 2525), True, 'import matplotlib.pyplot as plt\n'), ((2614, 2645), 'matplotlib.pyplot.legend', 'plt.legend', (["['lorenz', 'stula']"], {}), "(['lorenz', 'stula'])\n", (2624, 2645), True, 'import matplotlib.pyplot as plt\n'), ((2647, 2665), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2663, 2665), True, 'import matplotlib.pyplot as plt\n'), ((2698, 2712), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2710, 2712), True, 'import matplotlib.pyplot as plt\n'), ((2790, 2826), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['im1'], {'ax': 'ax', 'shrink': '(0.5)'}), '(im1, ax=ax, shrink=0.5)\n', (2802, 2826), True, 'import matplotlib.pyplot as plt\n'), ((2827, 2841), 'matplotlib.pyplot.title', 'plt.title', (['"""C"""'], {}), "('C')\n", (2836, 2841), True, 'import matplotlib.pyplot as plt\n'), ((2843, 2861), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2859, 2861), True, 'import matplotlib.pyplot as plt\n'), ((2918, 2928), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2926, 2928), True, 'import matplotlib.pyplot as plt\n'), ((1082, 1096), 'numpy.sqrt', 'np.sqrt', (['Delta'], {}), '(Delta)\n', (1089, 1096), True, 'import numpy as np\n'), ((1585, 1657), 'scipy.ndimage.gaussian_filter1d', 'gaussian_filter1d', (['X[:, i]', '(0.05 / dts)'], {'mode': '"""constant"""', 'cval': 'buffer_val'}), "(X[:, i], 0.05 / dts, mode='constant', cval=buffer_val)\n", (1602, 1657), False, 'from scipy.ndimage import gaussian_filter1d\n'), ((1747, 1786), 'numpy.round', 'np.round', (['(X.shape[0] * 0.75)'], {'decimals': '(0)'}), '(X.shape[0] * 0.75, decimals=0)\n', (1755, 1786), True, 'import numpy as np\n'), ((2302, 2320), 'numpy.mean', 'np.mean', (['X'], {'axis': '(1)'}), '(X, axis=1)\n', (2309, 2320), True, 'import numpy as np\n'), ((2776, 2788), 'numpy.max', 'np.max', (['C[:]'], {}), '(C[:])\n', (2782, 2788), True, 'import numpy as np\n'), ((2892, 2909), 'numpy.sum', 'np.sum', (['(C[:] == 0)'], {}), '(C[:] == 0)\n', (2898, 2909), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# coding=utf-8
from functools import partial
from mock import Mock
from pytest import fixture
@fixture(autouse=True)
def a_function_setup(tmpdir, monkeypatch):
"""
:type tmpdir: py.path.local
:type monkeypatch: _pytest.monkeypatch.monkeypatch
"""
from redislite import StrictRedis
default_connection = partial(StrictRedis, dbfilename=tmpdir.join('test_redis.db').strpath)
monkeypatch.setattr('cache_requests.memoize.default_connection', default_connection)
monkeypatch.setattr('cache_requests.utils.default_connection', default_connection)
monkeypatch.setattr('cache_requests.sessions.default_connection', default_connection)
@fixture(autouse=True)
def function_tear_down(request):
""":type request: _pytest.python.FixtureRequest"""
from cache_requests.utils import default_connection
def cleanup():
redis = default_connection()
redis.flushall()
request.addfinalizer(cleanup)
@fixture
def redis_mock():
cache = {}
def set(name=None, value=None, **_):
cache[name] = value
def get(name):
return cache.get(name)
def delete(key):
cache.pop(key)
_MockRedis = Mock(spec='redislite.StrictRedis')
_MockRedis.cache = cache
_MockRedis.get = Mock(side_effect=get)
_MockRedis.set = Mock(side_effect=set)
_MockRedis.delete = Mock(side_effect=delete)
_MockRedis.flushall = Mock()
return _MockRedis
| [
"pytest.fixture",
"mock.Mock",
"cache_requests.utils.default_connection"
] | [((120, 141), 'pytest.fixture', 'fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (127, 141), False, 'from pytest import fixture\n'), ((693, 714), 'pytest.fixture', 'fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (700, 714), False, 'from pytest import fixture\n'), ((1204, 1238), 'mock.Mock', 'Mock', ([], {'spec': '"""redislite.StrictRedis"""'}), "(spec='redislite.StrictRedis')\n", (1208, 1238), False, 'from mock import Mock\n'), ((1289, 1310), 'mock.Mock', 'Mock', ([], {'side_effect': 'get'}), '(side_effect=get)\n', (1293, 1310), False, 'from mock import Mock\n'), ((1332, 1353), 'mock.Mock', 'Mock', ([], {'side_effect': 'set'}), '(side_effect=set)\n', (1336, 1353), False, 'from mock import Mock\n'), ((1378, 1402), 'mock.Mock', 'Mock', ([], {'side_effect': 'delete'}), '(side_effect=delete)\n', (1382, 1402), False, 'from mock import Mock\n'), ((1429, 1435), 'mock.Mock', 'Mock', ([], {}), '()\n', (1433, 1435), False, 'from mock import Mock\n'), ((895, 915), 'cache_requests.utils.default_connection', 'default_connection', ([], {}), '()\n', (913, 915), False, 'from cache_requests.utils import default_connection\n')] |
import xml.etree.ElementTree as ET
import socket
#for get_access_token
import requests
#for starting mysql and java server
import subprocess
STRINGS_FILE_LOCATION = "C:\Data\Coding\\flash-cards-android\\app\\src\main\\res\\values\strings.xml"
IP_KEY = "flash_cards_api_url"
ACCESS_TOKEN_KEY = "flash_cards_access_token"
#############################################
############Function Definitions#############
#############################################
def get_ip_address():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
my_ip_address = s.getsockname()[0]
s.close()
return my_ip_address
def change_ip(STRINGS_FILE_LOCATION, IP_KEY, new_ip_address):
tree = ET.parse(STRINGS_FILE_LOCATION)
root = tree.getroot()
ip_string_not_found = True
for string_value in root.findall('string'):
if string_value.attrib["name"]==IP_KEY:
start = 7
end = string_value.text.find(':8080')
string_value.text = string_value.text.replace(string_value.text[start:end], new_ip_address)
tree.write(STRINGS_FILE_LOCATION)
ip_string_not_found = False
if ip_string_not_found:
print("There is no String resource represent ip address, or the key :"+IP_KEY+" has been changed ! Please contact someone or even better be proactive and find a fix, asd yalla fe eh!!!")
def get_access_token():
url = 'http://localhost:8080/oauth/token'
payload = {'grant_type':'password','username':'<EMAIL>','password':'<PASSWORD>'}
headers = {'Authorization': 'Basic d2ViOjEyMzQ1Ng=='}
r = requests.post(url, data=payload, headers=headers)
response_json = r.json()
access_token = response_json['access_token']
return access_token
def change_access_token(STRINGS_FILE_LOCATION, ACCESS_TOKEN_KEY, new_access_token):
tree = ET.parse(STRINGS_FILE_LOCATION)
root = tree.getroot()
access_token_string_not_found = True
for string_value in root.findall('string'):
if string_value.attrib["name"]==ACCESS_TOKEN_KEY:
string_value.text = access_token
tree.write(STRINGS_FILE_LOCATION)
access_token_string_not_found = False
if access_token_string_not_found:
print("There is no String resource represent access_token, or the key :"+ACCESS_TOKEN_KEY+" has been changed ! Please contact someone or even better be proactive and find a fix, asd yalla fe eh!!!")
###########################################
###########################################
###########################################
#############changing ip adress############
my_ip_address = get_ip_address()
change_ip(STRINGS_FILE_LOCATION, IP_KEY, my_ip_address)
#############starting mysql###############
subprocess.Popen(["C:\\xampp\\mysql\\bin\\mysqld.exe"],
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
#############stopping java server############
subprocess.Popen(["mvn", "spring-boot:stop"], shell=True,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
#############starting java server############
subprocess.Popen(["mvn", "clean", "spring-boot:run"], shell=True,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
##############adding access token#############
access_token = get_access_token()
change_access_token(STRINGS_FILE_LOCATION, ACCESS_TOKEN_KEY, access_token)
| [
"subprocess.Popen",
"requests.post",
"xml.etree.ElementTree.parse",
"socket.socket"
] | [((2829, 2940), 'subprocess.Popen', 'subprocess.Popen', (["['C:\\\\xampp\\\\mysql\\\\bin\\\\mysqld.exe']"], {'creationflags': 'subprocess.CREATE_NEW_PROCESS_GROUP'}), "(['C:\\\\xampp\\\\mysql\\\\bin\\\\mysqld.exe'], creationflags=\n subprocess.CREATE_NEW_PROCESS_GROUP)\n", (2845, 2940), False, 'import subprocess\n'), ((3003, 3116), 'subprocess.Popen', 'subprocess.Popen', (["['mvn', 'spring-boot:stop']"], {'shell': '(True)', 'creationflags': 'subprocess.CREATE_NEW_PROCESS_GROUP'}), "(['mvn', 'spring-boot:stop'], shell=True, creationflags=\n subprocess.CREATE_NEW_PROCESS_GROUP)\n", (3019, 3116), False, 'import subprocess\n'), ((3178, 3298), 'subprocess.Popen', 'subprocess.Popen', (["['mvn', 'clean', 'spring-boot:run']"], {'shell': '(True)', 'creationflags': 'subprocess.CREATE_NEW_PROCESS_GROUP'}), "(['mvn', 'clean', 'spring-boot:run'], shell=True,\n creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)\n", (3194, 3298), False, 'import subprocess\n'), ((505, 553), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (518, 553), False, 'import socket\n'), ((744, 775), 'xml.etree.ElementTree.parse', 'ET.parse', (['STRINGS_FILE_LOCATION'], {}), '(STRINGS_FILE_LOCATION)\n', (752, 775), True, 'import xml.etree.ElementTree as ET\n'), ((1655, 1704), 'requests.post', 'requests.post', (['url'], {'data': 'payload', 'headers': 'headers'}), '(url, data=payload, headers=headers)\n', (1668, 1704), False, 'import requests\n'), ((1909, 1940), 'xml.etree.ElementTree.parse', 'ET.parse', (['STRINGS_FILE_LOCATION'], {}), '(STRINGS_FILE_LOCATION)\n', (1917, 1940), True, 'import xml.etree.ElementTree as ET\n')] |
import pandas as pd
import numpy as np
from sklearn.feature_selection import RFE
from sklearn.linear_model import LogisticRegression
import os
import json
def load_data(data_path):
with open(data_path, "r") as fp:
data = json.load(fp)
#convert list -> np.array()
inputs = np.array(data["features"])
targets = np.array(data["mms"])
print(inputs.shape, targets.shape)
return inputs, targets
def cramers_v(x, y):
confusion_matrix = pd.crosstab(x,y)
chi2,p,dof,ex = ss.chi2_contingency(confusion_matrix)
n = confusion_matrix.sum().sum()
phi2 = chi2/n
r,k = confusion_matrix.shape
phi2corr = max(0, phi2-((k-1)*(r-1))/(n-1))
rcorr = r-((r-1)**2)/(n-1)
kcorr = k-((k-1)**2)/(n-1)
return np.sqrt(phi2/min((k-1),(r-1)))
if __name__ == "__main__":
data_path = os.path.abspath("json/data.json")
inputs, targets = load_data(data_path=data_path)
for val in range(0, len(inputs[0])):
X = (inputs[1], inputs[2])
Y = (targets[1], targets[2])
model = LogisticRegression(random_state=0, solver='lbfgs',multi_class='multinomial').fit(X, Y)
rfe = RFE(model, 5)
fit = rfe.fit(X, Y)
print( fit.n_features_)
print(f'Observing frame # {val}')
print("Selected Features: %s"% fit.support_)
print("Feature Ranking: %s"% fit.ranking_)
#cramers_v(inputs, targets)
| [
"pandas.crosstab",
"sklearn.linear_model.LogisticRegression",
"json.load",
"numpy.array",
"sklearn.feature_selection.RFE",
"os.path.abspath"
] | [((294, 320), 'numpy.array', 'np.array', (["data['features']"], {}), "(data['features'])\n", (302, 320), True, 'import numpy as np\n'), ((335, 356), 'numpy.array', 'np.array', (["data['mms']"], {}), "(data['mms'])\n", (343, 356), True, 'import numpy as np\n'), ((470, 487), 'pandas.crosstab', 'pd.crosstab', (['x', 'y'], {}), '(x, y)\n', (481, 487), True, 'import pandas as pd\n'), ((830, 863), 'os.path.abspath', 'os.path.abspath', (['"""json/data.json"""'], {}), "('json/data.json')\n", (845, 863), False, 'import os\n'), ((234, 247), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (243, 247), False, 'import json\n'), ((1148, 1161), 'sklearn.feature_selection.RFE', 'RFE', (['model', '(5)'], {}), '(model, 5)\n', (1151, 1161), False, 'from sklearn.feature_selection import RFE\n'), ((1047, 1124), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(0)', 'solver': '"""lbfgs"""', 'multi_class': '"""multinomial"""'}), "(random_state=0, solver='lbfgs', multi_class='multinomial')\n", (1065, 1124), False, 'from sklearn.linear_model import LogisticRegression\n')] |
from . import TorchModel, NUM_GESTURES
import torch
from torch import nn
import numpy as np
class ConvNet(TorchModel):
def define_model(self, dim_in):
self.conv = nn.Conv1d(dim_in[0], self.conv_filters, kernel_size=self.conv_kernel_size,
stride=self.conv_stride, padding=self.conv_padding)
self.conv_shape = [self.conv_filters, int(np.floor(
(dim_in[1] - self.conv_kernel_size + 2 * self.conv_padding) / self.conv_stride) + 1)]
model = torch.nn.Sequential(
torch.nn.BatchNorm1d(dim_in),
torch.nn.Linear(dim_in, dim_in * 32),
torch.nn.LeakyReLU(),
torch.nn.Linear(dim_in * 32, dim_in * 64),
torch.nn.BatchNorm1d(dim_in * 64),
torch.nn.LeakyReLU(),
torch.nn.Linear(dim_in * 64, NUM_GESTURES),
torch.nn.Softmax(dim=1)
)
return model
def forward_pass(self, sample):
targets = torch.LongTensor(sample[1].type(torch.LongTensor)).to(self.device)
predictions = self.model(sample[0].to(self.device))
return torch.nn.functional.cross_entropy(predictions, targets), [predictions, None]
from . import TorchModel, NUM_GESTURES
import torch
class Structure(torch.nn.Module):
"""
The classifier with the best known performance on the NinaPro dataset thus far (using a variation of
PaddedMultiRMS).
"""
def __init__(self, input_size, classes, convnet_filters, convnet_kernel_size, convnet_stride, convnet_padding, convnet_maxpooling, convnet_fc_num):
"""
In the constructor we instantiate two nn.Linear modules and assign them as
member variables.
"""
super(Structure, self).__init__()
# Layer 0: Batch Norm
self.batch_norm1 = torch.nn.BatchNorm1d(np.product(input_size))
# Layer 1: Conv Layer
self.conv = nn.Conv1d(input_size[0], convnet_filters, kernel_size=convnet_kernel_size,
stride=convnet_stride, padding=convnet_padding)
self.conv_shape = [convnet_filters, int(np.floor(
(input_size[1] - convnet_kernel_size + 2 * convnet_padding) / convnet_stride) + 1)]
# Layer 1.0: Maxpooling Layer
self.maxpool = nn.MaxPool1d(convnet_maxpooling)
self.maxpool_shape = [self.conv_shape[0], self.conv_shape[1] // convnet_maxpooling]
# Layer 2: FC Layer
self.fcn = nn.Sequential(
nn.Linear(np.product(self.maxpool_shape), convnet_fc_num),
nn.ReLU(inplace=True),
nn.Linear(convnet_fc_num, classes),
nn.Sigmoid()
)
self.relu = nn.ReLU()
def forward(self, x):
if x.shape[0] > 1:
x = self.batch_norm1(x.flatten(1)).view(*x.shape)
x = self.relu(self.conv(x))
x = self.maxpool(x)
y = self.fcn(x.flatten(1))
return y
#
# Yet another variation of FullyConnectedNNV2, leveraging the CustomNet module
#
class ConvNet(TorchModel):
def define_model(self, dim_in):
model = Structure(dim_in, NUM_GESTURES, self.convnet_filters, self.convnet_kernel_size, self.convnet_stride, self.convnet_padding, self.convnet_maxpooling, self.convnet_fc_num)
return model
def forward_pass(self, sample):
targets = torch.LongTensor(sample[1].type(torch.LongTensor)).to(self.device)
predictions = self.model(sample[0].to(self.device))
return torch.nn.functional.cross_entropy(predictions, targets), [predictions, None]
| [
"torch.nn.MaxPool1d",
"numpy.product",
"torch.nn.ReLU",
"torch.nn.Sigmoid",
"torch.nn.LeakyReLU",
"torch.nn.Softmax",
"numpy.floor",
"torch.nn.BatchNorm1d",
"torch.nn.Linear",
"torch.nn.functional.cross_entropy",
"torch.nn.Conv1d"
] | [((178, 308), 'torch.nn.Conv1d', 'nn.Conv1d', (['dim_in[0]', 'self.conv_filters'], {'kernel_size': 'self.conv_kernel_size', 'stride': 'self.conv_stride', 'padding': 'self.conv_padding'}), '(dim_in[0], self.conv_filters, kernel_size=self.conv_kernel_size,\n stride=self.conv_stride, padding=self.conv_padding)\n', (187, 308), False, 'from torch import nn\n'), ((1917, 2043), 'torch.nn.Conv1d', 'nn.Conv1d', (['input_size[0]', 'convnet_filters'], {'kernel_size': 'convnet_kernel_size', 'stride': 'convnet_stride', 'padding': 'convnet_padding'}), '(input_size[0], convnet_filters, kernel_size=convnet_kernel_size,\n stride=convnet_stride, padding=convnet_padding)\n', (1926, 2043), False, 'from torch import nn\n'), ((2286, 2318), 'torch.nn.MaxPool1d', 'nn.MaxPool1d', (['convnet_maxpooling'], {}), '(convnet_maxpooling)\n', (2298, 2318), False, 'from torch import nn\n'), ((2684, 2693), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (2691, 2693), False, 'from torch import nn\n'), ((543, 571), 'torch.nn.BatchNorm1d', 'torch.nn.BatchNorm1d', (['dim_in'], {}), '(dim_in)\n', (563, 571), False, 'import torch\n'), ((585, 621), 'torch.nn.Linear', 'torch.nn.Linear', (['dim_in', '(dim_in * 32)'], {}), '(dim_in, dim_in * 32)\n', (600, 621), False, 'import torch\n'), ((635, 655), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (653, 655), False, 'import torch\n'), ((669, 710), 'torch.nn.Linear', 'torch.nn.Linear', (['(dim_in * 32)', '(dim_in * 64)'], {}), '(dim_in * 32, dim_in * 64)\n', (684, 710), False, 'import torch\n'), ((724, 757), 'torch.nn.BatchNorm1d', 'torch.nn.BatchNorm1d', (['(dim_in * 64)'], {}), '(dim_in * 64)\n', (744, 757), False, 'import torch\n'), ((771, 791), 'torch.nn.LeakyReLU', 'torch.nn.LeakyReLU', ([], {}), '()\n', (789, 791), False, 'import torch\n'), ((805, 847), 'torch.nn.Linear', 'torch.nn.Linear', (['(dim_in * 64)', 'NUM_GESTURES'], {}), '(dim_in * 64, NUM_GESTURES)\n', (820, 847), False, 'import torch\n'), ((861, 884), 'torch.nn.Softmax', 'torch.nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (877, 884), False, 'import torch\n'), ((1117, 1172), 'torch.nn.functional.cross_entropy', 'torch.nn.functional.cross_entropy', (['predictions', 'targets'], {}), '(predictions, targets)\n', (1150, 1172), False, 'import torch\n'), ((1842, 1864), 'numpy.product', 'np.product', (['input_size'], {}), '(input_size)\n', (1852, 1864), True, 'import numpy as np\n'), ((2557, 2578), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2564, 2578), False, 'from torch import nn\n'), ((2592, 2626), 'torch.nn.Linear', 'nn.Linear', (['convnet_fc_num', 'classes'], {}), '(convnet_fc_num, classes)\n', (2601, 2626), False, 'from torch import nn\n'), ((2640, 2652), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (2650, 2652), False, 'from torch import nn\n'), ((3487, 3542), 'torch.nn.functional.cross_entropy', 'torch.nn.functional.cross_entropy', (['predictions', 'targets'], {}), '(predictions, targets)\n', (3520, 3542), False, 'import torch\n'), ((2496, 2526), 'numpy.product', 'np.product', (['self.maxpool_shape'], {}), '(self.maxpool_shape)\n', (2506, 2526), True, 'import numpy as np\n'), ((385, 478), 'numpy.floor', 'np.floor', (['((dim_in[1] - self.conv_kernel_size + 2 * self.conv_padding) / self.conv_stride\n )'], {}), '((dim_in[1] - self.conv_kernel_size + 2 * self.conv_padding) / self\n .conv_stride)\n', (393, 478), True, 'import numpy as np\n'), ((2118, 2208), 'numpy.floor', 'np.floor', (['((input_size[1] - convnet_kernel_size + 2 * convnet_padding) / convnet_stride)'], {}), '((input_size[1] - convnet_kernel_size + 2 * convnet_padding) /\n convnet_stride)\n', (2126, 2208), True, 'import numpy as np\n')] |
import code
def test_inc():
assert code.inc(3) == 4
| [
"code.inc"
] | [((40, 51), 'code.inc', 'code.inc', (['(3)'], {}), '(3)\n', (48, 51), False, 'import code\n')] |
"""-----------------------------------------------------------------------------
Name: positional_accuracy.py
Purpose: Statistically summarizes positional accuracy values that are stored
in a field within a feature class.
Description: This tool statistically summarizes the positional accuracy values
that are help in a user defined attribute field in the selected
feature class.
Requirements: Python 2.7.x/Python3.x, ArcGIS 10.5+/Pro 1.2+
Author(s): <NAME>, Contractor for National Geospatial-Intelligence
Agency (NGA) | <NAME>, Contractor for NGA
Program Manager: <NAME>, NGA (<EMAIL>)
Created: August 19, 2015
Modified: August 24, 2016 | April, 2017
Copyright: Esri
License: TBD
-----------------------------------------------------------------------------"""
from __future__ import division
from __future__ import print_function
import os
import sys
import numpy as np
import pandas as pd
from collections import Counter
#Using the ArcGIS API for Python
import arcgis
from arcgis.gis import GIS
from arcgis.features import FeatureLayer
from arcgis.geometry import filters
from arcgis.geometry import Geometry
#Import logic to create layer selection
import sotd_config as config
FIELDS = ('MEAN', 'MEDIAN',
'MODE', 'MIN_',
'MAX_', 'NO_DATE_CNT',
'NO_DATE_PCT', 'FEATURE_CNT',
'PA_SCORE', "TIER")
#--------------------------------------------------------------------------
class FunctionError(Exception):
""" raised when a function fails to run """
pass
#--------------------------------------------------------------------------
def trace():
"""
trace finds the line, the filename
and error message and returns it
to the user
"""
import traceback
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
# script name + line number
line = tbinfo.split(", ")[1]
# Get Python syntax error
#
synerror = traceback.format_exc().splitlines()[-1]
return line, __file__, synerror
#--------------------------------------------------------------------------
def get_score(mean):
value = 0
if mean > 0:
if mean >= 0 and mean < 15:
value = 5
elif mean >= 15 and mean <= 25:
value = 4
elif mean > 25 and mean <= 50:
value = 3
elif mean > 50 and mean <= 100:
value = 2
else:
value = 1
elif mean == -1:
# no samples
value = 0
return value
#--------------------------------------------------------------------------
def get_tier(score):
"""
"""
cat = 'Tin'
if score == 5: # ranges
cat = "Platinum"
elif score == 4:
cat = "Gold"
elif score == 3:
cat = 'Silver'
elif score == 2:
cat = "Bronze"
elif score == 1:
cat = "Tin"
else:
cat = "No Ranking"
return cat
#--------------------------------------------------------------------------
## ('_ID', np.int),
## ('MEAN', np.float64),
## ('MEDIAN', np.float64),
## ('MODE', np.float64),
## ('MIN', np.float64),
## ('MAX', np.float64),
## ('NO_DATE_CNT', np.int32),
## ('NO_DATE_PCT', np.float64),
## ('FEATURE_CNT', np.int32),
## ('PA_SCORE', np.int32),
## ("TIER", '|S1024')
#--------------------------------------------------------------------------
def positional_accuracy(gis, df_current, output_features, grid_filter, geom, value_field):
""" main driver of program """
try:
PDVERSION = [int(v) for v in pd.__version__.split('.')]
out_fl = FeatureLayer(gis=gis, url=output_features)
out_sdf = out_fl.query(geometry_filter=grid_filter,return_geometry=True,
return_all_records=True).df
sq = df_current['SHAPE'].disjoint(geom) == False
df_current = df_current[sq].copy()
if len(df_current) > 0:
df_notnull = df_current.loc[df_current[value_field].notnull() == True]
if PDVERSION[1] <= 16:
df_notnull = df_notnull.drop(value_field, axis=1).join(df_notnull[value_field].astype(float,raise_on_error=False)).copy()
elif PDVERSION[1] > 16:
df_notnull = df_notnull.drop(value_field, axis=1).join(df_notnull[value_field].apply(pd.to_numeric, errors='coerce')).copy() # CHANGES NON NUMERIC ROWS to NaN
df_notnull = df_notnull.loc[df_notnull[value_field].notnull() == True].copy() # Drops NaN values
not_null_count = len(df_notnull)
null_count = len(df_current) - not_null_count
if PDVERSION[1] == 16:
try:
s = df_notnull.loc[df_notnull[value_field] != 'No Information', value_field].copy().astype(np.float64)
except:
s = df_notnull.loc[df_notnull[value_field].astype(str) != 'No Information', value_field].copy().astype(np.float64)
elif PDVERSION[1] > 16:
s = df_notnull.drop(value_field, axis=1).join(df_notnull[value_field].apply(pd.to_numeric, errors='coerce'))[value_field].copy() # Drops Text Fields
s = s[s.notnull() == True].copy() # Drops NaN values
mean = s.mean()
median = s.median()
mode = s.mode()
if len(mode) > 0:
mode = mode[0]
else:
mode = 0
mmax = s.max()
mmin = s.min()
score = get_score(mean)
null_percent = float(null_count) * 100.0 / float(len(df_current))
if not pd.isnull(mean):
out_sdf[FIELDS[0]][0]=round(mean,1)
else:
out_sdf[FIELDS[0]][0]=-1
if not pd.isnull(median):
out_sdf[FIELDS[1]][0]=median
else:
out_sdf[FIELDS[1]][0]=-1
if not pd.isnull(mode):
out_sdf[FIELDS[2]][0]=mode
else:
out_sdf[FIELDS[2]][0]=-1
if not pd.isnull(mmin):
out_sdf[FIELDS[3]][0]=mmin
else:
out_sdf[FIELDS[3]][0]=-1
if not pd.isnull(mmax):
out_sdf[FIELDS[4]][0]=mmax
else:
out_sdf[FIELDS[4]][0]=-1
out_sdf[FIELDS[5]][0]=null_count
out_sdf[FIELDS[6]][0]=round(null_percent,1)
out_sdf[FIELDS[7]][0]=len(df_current)#not_null_count
out_sdf[FIELDS[8]][0]=score
out_sdf[FIELDS[9]][0]=get_tier(score)
del df_notnull
del mean
del median
del mode
del mmax
del mmin
del score
del null_percent
else:
out_sdf[FIELDS[0]][0]=-1
out_sdf[FIELDS[1]][0]=-1
out_sdf[FIELDS[2]][0]=-1
out_sdf[FIELDS[3]][0]=-1
out_sdf[FIELDS[4]][0]=-1
out_sdf[FIELDS[5]][0]=0
out_sdf[FIELDS[6]][0]=0
out_sdf[FIELDS[7]][0]=0
out_sdf[FIELDS[8]][0]=0
out_sdf[FIELDS[9]][0]="No Ranking"
#r = tuple([oid] + [-1]*5 + [0] * 4 + ["No Ranking"])
return out_sdf, out_fl
except FunctionError as f_e:
messages = f_e.args[0]
#arcpy.AddError("error in function: %s" % messages["function"])
#arcpy.AddError("error on line: %s" % messages["line"])
#arcpy.AddError("error in file name: %s" % messages["filename"])
#arcpy.AddError("with error message: %s" % messages["synerror"])
#arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
except:
line, filename, synerror = trace()
#arcpy.AddError("error on line: %s" % line)
#arcpy.AddError("error in file name: %s" % filename)
#arcpy.AddError("with error message: %s" % synerror)
| [
"pandas.__version__.split",
"traceback.format_exc",
"pandas.isnull",
"traceback.format_tb",
"sys.exc_info",
"arcgis.features.FeatureLayer"
] | [((1796, 1810), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (1808, 1810), False, 'import sys\n'), ((1827, 1850), 'traceback.format_tb', 'traceback.format_tb', (['tb'], {}), '(tb)\n', (1846, 1850), False, 'import traceback\n'), ((3756, 3798), 'arcgis.features.FeatureLayer', 'FeatureLayer', ([], {'gis': 'gis', 'url': 'output_features'}), '(gis=gis, url=output_features)\n', (3768, 3798), False, 'from arcgis.features import FeatureLayer\n'), ((1970, 1992), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1990, 1992), False, 'import traceback\n'), ((3711, 3736), 'pandas.__version__.split', 'pd.__version__.split', (['"""."""'], {}), "('.')\n", (3731, 3736), True, 'import pandas as pd\n'), ((5717, 5732), 'pandas.isnull', 'pd.isnull', (['mean'], {}), '(mean)\n', (5726, 5732), True, 'import pandas as pd\n'), ((5864, 5881), 'pandas.isnull', 'pd.isnull', (['median'], {}), '(median)\n', (5873, 5881), True, 'import pandas as pd\n'), ((6007, 6022), 'pandas.isnull', 'pd.isnull', (['mode'], {}), '(mode)\n', (6016, 6022), True, 'import pandas as pd\n'), ((6146, 6161), 'pandas.isnull', 'pd.isnull', (['mmin'], {}), '(mmin)\n', (6155, 6161), True, 'import pandas as pd\n'), ((6285, 6300), 'pandas.isnull', 'pd.isnull', (['mmax'], {}), '(mmax)\n', (6294, 6300), True, 'import pandas as pd\n')] |
import typing
from threading import Thread
from . import host, watcher
from .tray import Tray
def init_tray(trays: typing.List[Tray]):
host_thread = Thread(target=host.init, args=[0, trays])
host_thread.daemon = True
host_thread.start()
watcher_thread = Thread(target=watcher.init)
watcher_thread.daemon = True
watcher_thread.start()
def deinit_tray():
host.deinit()
watcher.deinit()
| [
"threading.Thread"
] | [((156, 197), 'threading.Thread', 'Thread', ([], {'target': 'host.init', 'args': '[0, trays]'}), '(target=host.init, args=[0, trays])\n', (162, 197), False, 'from threading import Thread\n'), ((274, 301), 'threading.Thread', 'Thread', ([], {'target': 'watcher.init'}), '(target=watcher.init)\n', (280, 301), False, 'from threading import Thread\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created: March 2020
Python without class!
@author: <NAME> (RRCC)
"""
import numpy as np
import matplotlib.pyplot as plt
import argparse
def readFile(fName):
"""
Returns
-------
nDumps : TYPE
DESCRIPTION.
nPars : TYPE
DESCRIPTION.
times : TYPE
DESCRIPTION.
x : TYPE
DESCRIPTION.
vx : TYPE
DESCRIPTION.
mass : TYPE
DESCRIPTION.
rho : TYPE
DESCRIPTION.
p : TYPE
DESCRIPTION.
ie : TYPE
DESCRIPTION.
xm : TYPE
DESCRIPTION.
dx : TYPE
DESCRIPTION.
"""
print('Opening File: ',fName)
f = open(fName,'r')
times = []
nDumps = 0
for lin in f:
if lin.find("DONE") != -1: nDumps += 1
if lin.find("NP") != -1: iNP = lin.split()[1]
print(nDumps," SPH1D output records found.")
print(iNP, " particles used in simulation.") # iNP assumed constant!
nPars = int(iNP)
x = np.zeros((nDumps,nPars))
xm = np.zeros((nDumps,nPars-1))
dx = np.zeros((nDumps,nPars-1))
vx = np.zeros((nDumps,nPars))
mass = np.zeros((nDumps,nPars))
rho = np.zeros((nDumps,nPars))
p = np.zeros((nDumps,nPars))
ie = np.zeros((nDumps,nPars))
f.seek(0,0)
for i in np.arange(nDumps):
lin1 = f.readline().split()
t = lin1[1]
times.append(t)
print("Processing record from time = ",t," s")
f.readline()
f.readline()
j = 0
while j < nPars:
lin = f.readline()
nums = lin.split()
x[i][j] = nums[0]
vx[i][j] = nums[1]
mass[i][j] = nums[2]
rho[i][j] = nums[3]
p[i][j] = nums[4]
ie[i][j] = nums[5]
j+=1
f.readline()
dx[i] = x[i][1:] - x[i][0:-1]
xm[i] = (x[i][1:] + x[i][0:-1])/2.0
f.close()
return nDumps,nPars,times,x,vx,mass,rho,p,ie,xm,dx
def plotAll(t,x,vx,mass,rho,p,ie,xm,dx):
"""
Parameters
----------
t : TYPE
DESCRIPTION.
x : TYPE
DESCRIPTION.
vx : TYPE
DESCRIPTION.
mass : TYPE
DESCRIPTION.
rho : TYPE
DESCRIPTION.
p : TYPE
DESCRIPTION.
ie : TYPE
DESCRIPTION.
xm : TYPE
DESCRIPTION.
dx : TYPE
DESCRIPTION.
Returns
-------
None.
"""
plt.plot(xm,dx,label='delta-X')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('delta-X')
plt.xlim(-.4,.4)
plt.ylim(-.1,3)
plt.legend()
plt.show()
plt.plot(x,vx,label='X-Velocity')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Velocity')
plt.xlim(-.4,.4)
plt.ylim(-.5,1)
plt.legend()
plt.show()
plt.plot(x,mass,label='Mass')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Mass')
plt.xlim(-.4,.4)
plt.ylim(-.1,3)
plt.legend()
plt.show()
plt.plot(x,rho,label='Density')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Density')
plt.xlim(-.4,.4)
plt.ylim(-.1,3)
plt.legend()
plt.show()
plt.plot(x,p,label='Pressure')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Pressure')
plt.xlim(-.4,.4)
plt.ylim(-.1,1.2)
plt.legend()
plt.show()
plt.plot(x,ie,label='Internal Energy')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Internal Energy')
plt.xlim(-.4,.4)
plt.ylim(1,3)
plt.legend()
plt.show()
def plotIE(t,x,ie):
"""
Parameters
----------
t : TYPE
DESCRIPTION.
x : TYPE
DESCRIPTION.
ie : TYPE
DESCRIPTION.
Returns
-------
None.
"""
plt.plot(x,ie,label='Internal Energy')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Internal Energy')
plt.xlim(-.4,.4)
plt.ylim(1,3)
plt.legend()
plt.show()
def plotVEL(t,x,vx):
"""
Parameters
----------
t : TYPE
DESCRIPTION.
x : TYPE
DESCRIPTION.
vx : TYPE
DESCRIPTION.
Returns
-------
None.
"""
plt.plot(x,vx,label='X-Velocity')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Velocity')
plt.xlim(-.4,.4)
plt.ylim(-.5,1)
plt.legend()
plt.show()
def plotP(t,x,p):
"""
Parameters
----------
t : TYPE
DESCRIPTION.
x : TYPE
DESCRIPTION.
p : TYPE
DESCRIPTION.
Returns
-------
None.
"""
plt.plot(x,p,label='Pressure')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Pressure')
plt.xlim(-.4,.4)
plt.ylim(-.1,1.2)
plt.legend()
plt.show()
def plotDEN(t,x,rho):
"""
Parameters
----------
t : TYPE
DESCRIPTION.
x : TYPE
DESCRIPTION.
rho : TYPE
DESCRIPTION.
Returns
-------
None.
"""
plt.plot(x,rho,label='Density')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('Density')
plt.xlim(-.4,.4)
plt.ylim(-.1,3)
plt.legend()
plt.show()
def plotDX(t,xm,dx):
"""
Parameters
----------
t : TYPE
DESCRIPTION.
xm : TYPE
DESCRIPTION.
dx : TYPE
DESCRIPTION.
Returns
-------
None.
"""
plt.plot(xm,dx,label='delta-X')
plt.title('Simulation Time = '+t+' s')
plt.xlabel('Position')
plt.ylabel('delta-X')
plt.xlim(-.4,.4)
plt.ylim(-.1,3)
plt.legend()
plt.show()
def getUserOptions():
"""
Returns
-------
TYPE
DESCRIPTION.
"""
parser = argparse.ArgumentParser(
description='Welcome to sphPlot help...',
epilog='Example:\n>python sphPlot.py -i <input_file>')
# Required argument
parser.add_argument(
'-i',
required=True,
help="'file/location/SPH1D_Output.txt' is required")
return parser.parse_args()
def main(args):
"""
Parameters
----------
args : TYPE
DESCRIPTION.
Returns
-------
None.
"""
nDumps,nPars,t,x,vx,mass,rho,p,ie,xm,dx = readFile(args.i)
for i in np.arange(nDumps):
print("Plotting Time: ",t[i]," s")
plotVEL(t[i],x[i],vx[i])
if __name__ == '__main__':
args = getUserOptions()
main(args) | [
"argparse.ArgumentParser",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.zeros",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.ylim",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((1063, 1088), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars)'], {}), '((nDumps, nPars))\n', (1071, 1088), True, 'import numpy as np\n'), ((1098, 1127), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars - 1)'], {}), '((nDumps, nPars - 1))\n', (1106, 1127), True, 'import numpy as np\n'), ((1135, 1164), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars - 1)'], {}), '((nDumps, nPars - 1))\n', (1143, 1164), True, 'import numpy as np\n'), ((1172, 1197), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars)'], {}), '((nDumps, nPars))\n', (1180, 1197), True, 'import numpy as np\n'), ((1209, 1234), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars)'], {}), '((nDumps, nPars))\n', (1217, 1234), True, 'import numpy as np\n'), ((1245, 1270), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars)'], {}), '((nDumps, nPars))\n', (1253, 1270), True, 'import numpy as np\n'), ((1279, 1304), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars)'], {}), '((nDumps, nPars))\n', (1287, 1304), True, 'import numpy as np\n'), ((1314, 1339), 'numpy.zeros', 'np.zeros', (['(nDumps, nPars)'], {}), '((nDumps, nPars))\n', (1322, 1339), True, 'import numpy as np\n'), ((1370, 1387), 'numpy.arange', 'np.arange', (['nDumps'], {}), '(nDumps)\n', (1379, 1387), True, 'import numpy as np\n'), ((2544, 2577), 'matplotlib.pyplot.plot', 'plt.plot', (['xm', 'dx'], {'label': '"""delta-X"""'}), "(xm, dx, label='delta-X')\n", (2552, 2577), True, 'import matplotlib.pyplot as plt\n'), ((2581, 2623), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (2590, 2623), True, 'import matplotlib.pyplot as plt\n'), ((2625, 2647), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (2635, 2647), True, 'import matplotlib.pyplot as plt\n'), ((2653, 2674), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""delta-X"""'], {}), "('delta-X')\n", (2663, 2674), True, 'import matplotlib.pyplot as plt\n'), ((2680, 2699), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (2688, 2699), True, 'import matplotlib.pyplot as plt\n'), ((2702, 2719), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.1)', '(3)'], {}), '(-0.1, 3)\n', (2710, 2719), True, 'import matplotlib.pyplot as plt\n'), ((2723, 2735), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2733, 2735), True, 'import matplotlib.pyplot as plt\n'), ((2741, 2751), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2749, 2751), True, 'import matplotlib.pyplot as plt\n'), ((2757, 2792), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'vx'], {'label': '"""X-Velocity"""'}), "(x, vx, label='X-Velocity')\n", (2765, 2792), True, 'import matplotlib.pyplot as plt\n'), ((2796, 2838), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (2805, 2838), True, 'import matplotlib.pyplot as plt\n'), ((2840, 2862), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (2850, 2862), True, 'import matplotlib.pyplot as plt\n'), ((2868, 2890), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Velocity"""'], {}), "('Velocity')\n", (2878, 2890), True, 'import matplotlib.pyplot as plt\n'), ((2896, 2915), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (2904, 2915), True, 'import matplotlib.pyplot as plt\n'), ((2918, 2935), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.5)', '(1)'], {}), '(-0.5, 1)\n', (2926, 2935), True, 'import matplotlib.pyplot as plt\n'), ((2939, 2951), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2949, 2951), True, 'import matplotlib.pyplot as plt\n'), ((2957, 2967), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2965, 2967), True, 'import matplotlib.pyplot as plt\n'), ((2973, 3004), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'mass'], {'label': '"""Mass"""'}), "(x, mass, label='Mass')\n", (2981, 3004), True, 'import matplotlib.pyplot as plt\n'), ((3008, 3050), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (3017, 3050), True, 'import matplotlib.pyplot as plt\n'), ((3052, 3074), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (3062, 3074), True, 'import matplotlib.pyplot as plt\n'), ((3080, 3098), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mass"""'], {}), "('Mass')\n", (3090, 3098), True, 'import matplotlib.pyplot as plt\n'), ((3104, 3123), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (3112, 3123), True, 'import matplotlib.pyplot as plt\n'), ((3126, 3143), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.1)', '(3)'], {}), '(-0.1, 3)\n', (3134, 3143), True, 'import matplotlib.pyplot as plt\n'), ((3147, 3159), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3157, 3159), True, 'import matplotlib.pyplot as plt\n'), ((3165, 3175), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3173, 3175), True, 'import matplotlib.pyplot as plt\n'), ((3181, 3214), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'rho'], {'label': '"""Density"""'}), "(x, rho, label='Density')\n", (3189, 3214), True, 'import matplotlib.pyplot as plt\n'), ((3218, 3260), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (3227, 3260), True, 'import matplotlib.pyplot as plt\n'), ((3262, 3284), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (3272, 3284), True, 'import matplotlib.pyplot as plt\n'), ((3290, 3311), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Density"""'], {}), "('Density')\n", (3300, 3311), True, 'import matplotlib.pyplot as plt\n'), ((3317, 3336), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (3325, 3336), True, 'import matplotlib.pyplot as plt\n'), ((3339, 3356), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.1)', '(3)'], {}), '(-0.1, 3)\n', (3347, 3356), True, 'import matplotlib.pyplot as plt\n'), ((3360, 3372), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3370, 3372), True, 'import matplotlib.pyplot as plt\n'), ((3378, 3388), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3386, 3388), True, 'import matplotlib.pyplot as plt\n'), ((3394, 3426), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'p'], {'label': '"""Pressure"""'}), "(x, p, label='Pressure')\n", (3402, 3426), True, 'import matplotlib.pyplot as plt\n'), ((3430, 3472), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (3439, 3472), True, 'import matplotlib.pyplot as plt\n'), ((3474, 3496), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (3484, 3496), True, 'import matplotlib.pyplot as plt\n'), ((3502, 3524), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Pressure"""'], {}), "('Pressure')\n", (3512, 3524), True, 'import matplotlib.pyplot as plt\n'), ((3530, 3549), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (3538, 3549), True, 'import matplotlib.pyplot as plt\n'), ((3552, 3571), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.1)', '(1.2)'], {}), '(-0.1, 1.2)\n', (3560, 3571), True, 'import matplotlib.pyplot as plt\n'), ((3575, 3587), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3585, 3587), True, 'import matplotlib.pyplot as plt\n'), ((3593, 3603), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3601, 3603), True, 'import matplotlib.pyplot as plt\n'), ((3609, 3649), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'ie'], {'label': '"""Internal Energy"""'}), "(x, ie, label='Internal Energy')\n", (3617, 3649), True, 'import matplotlib.pyplot as plt\n'), ((3653, 3695), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (3662, 3695), True, 'import matplotlib.pyplot as plt\n'), ((3697, 3719), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (3707, 3719), True, 'import matplotlib.pyplot as plt\n'), ((3725, 3754), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Internal Energy"""'], {}), "('Internal Energy')\n", (3735, 3754), True, 'import matplotlib.pyplot as plt\n'), ((3760, 3779), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (3768, 3779), True, 'import matplotlib.pyplot as plt\n'), ((3782, 3796), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(1)', '(3)'], {}), '(1, 3)\n', (3790, 3796), True, 'import matplotlib.pyplot as plt\n'), ((3801, 3813), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3811, 3813), True, 'import matplotlib.pyplot as plt\n'), ((3819, 3829), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3827, 3829), True, 'import matplotlib.pyplot as plt\n'), ((4064, 4104), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'ie'], {'label': '"""Internal Energy"""'}), "(x, ie, label='Internal Energy')\n", (4072, 4104), True, 'import matplotlib.pyplot as plt\n'), ((4108, 4150), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (4117, 4150), True, 'import matplotlib.pyplot as plt\n'), ((4152, 4174), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (4162, 4174), True, 'import matplotlib.pyplot as plt\n'), ((4180, 4209), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Internal Energy"""'], {}), "('Internal Energy')\n", (4190, 4209), True, 'import matplotlib.pyplot as plt\n'), ((4215, 4234), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (4223, 4234), True, 'import matplotlib.pyplot as plt\n'), ((4237, 4251), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(1)', '(3)'], {}), '(1, 3)\n', (4245, 4251), True, 'import matplotlib.pyplot as plt\n'), ((4256, 4268), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4266, 4268), True, 'import matplotlib.pyplot as plt\n'), ((4274, 4284), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4282, 4284), True, 'import matplotlib.pyplot as plt\n'), ((4520, 4555), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'vx'], {'label': '"""X-Velocity"""'}), "(x, vx, label='X-Velocity')\n", (4528, 4555), True, 'import matplotlib.pyplot as plt\n'), ((4559, 4601), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (4568, 4601), True, 'import matplotlib.pyplot as plt\n'), ((4603, 4625), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (4613, 4625), True, 'import matplotlib.pyplot as plt\n'), ((4631, 4653), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Velocity"""'], {}), "('Velocity')\n", (4641, 4653), True, 'import matplotlib.pyplot as plt\n'), ((4659, 4678), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (4667, 4678), True, 'import matplotlib.pyplot as plt\n'), ((4681, 4698), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.5)', '(1)'], {}), '(-0.5, 1)\n', (4689, 4698), True, 'import matplotlib.pyplot as plt\n'), ((4702, 4714), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4712, 4714), True, 'import matplotlib.pyplot as plt\n'), ((4720, 4730), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4728, 4730), True, 'import matplotlib.pyplot as plt\n'), ((4962, 4994), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'p'], {'label': '"""Pressure"""'}), "(x, p, label='Pressure')\n", (4970, 4994), True, 'import matplotlib.pyplot as plt\n'), ((4998, 5040), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (5007, 5040), True, 'import matplotlib.pyplot as plt\n'), ((5042, 5064), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (5052, 5064), True, 'import matplotlib.pyplot as plt\n'), ((5070, 5092), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Pressure"""'], {}), "('Pressure')\n", (5080, 5092), True, 'import matplotlib.pyplot as plt\n'), ((5098, 5117), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (5106, 5117), True, 'import matplotlib.pyplot as plt\n'), ((5120, 5139), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.1)', '(1.2)'], {}), '(-0.1, 1.2)\n', (5128, 5139), True, 'import matplotlib.pyplot as plt\n'), ((5143, 5155), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5153, 5155), True, 'import matplotlib.pyplot as plt\n'), ((5161, 5171), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5169, 5171), True, 'import matplotlib.pyplot as plt\n'), ((5409, 5442), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'rho'], {'label': '"""Density"""'}), "(x, rho, label='Density')\n", (5417, 5442), True, 'import matplotlib.pyplot as plt\n'), ((5446, 5488), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (5455, 5488), True, 'import matplotlib.pyplot as plt\n'), ((5490, 5512), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (5500, 5512), True, 'import matplotlib.pyplot as plt\n'), ((5518, 5539), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Density"""'], {}), "('Density')\n", (5528, 5539), True, 'import matplotlib.pyplot as plt\n'), ((5545, 5564), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (5553, 5564), True, 'import matplotlib.pyplot as plt\n'), ((5567, 5584), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.1)', '(3)'], {}), '(-0.1, 3)\n', (5575, 5584), True, 'import matplotlib.pyplot as plt\n'), ((5588, 5600), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5598, 5600), True, 'import matplotlib.pyplot as plt\n'), ((5606, 5616), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5614, 5616), True, 'import matplotlib.pyplot as plt\n'), ((5853, 5886), 'matplotlib.pyplot.plot', 'plt.plot', (['xm', 'dx'], {'label': '"""delta-X"""'}), "(xm, dx, label='delta-X')\n", (5861, 5886), True, 'import matplotlib.pyplot as plt\n'), ((5890, 5932), 'matplotlib.pyplot.title', 'plt.title', (["('Simulation Time = ' + t + ' s')"], {}), "('Simulation Time = ' + t + ' s')\n", (5899, 5932), True, 'import matplotlib.pyplot as plt\n'), ((5934, 5956), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Position"""'], {}), "('Position')\n", (5944, 5956), True, 'import matplotlib.pyplot as plt\n'), ((5962, 5983), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""delta-X"""'], {}), "('delta-X')\n", (5972, 5983), True, 'import matplotlib.pyplot as plt\n'), ((5989, 6008), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.4)', '(0.4)'], {}), '(-0.4, 0.4)\n', (5997, 6008), True, 'import matplotlib.pyplot as plt\n'), ((6011, 6028), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.1)', '(3)'], {}), '(-0.1, 3)\n', (6019, 6028), True, 'import matplotlib.pyplot as plt\n'), ((6032, 6044), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6042, 6044), True, 'import matplotlib.pyplot as plt\n'), ((6050, 6060), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6058, 6060), True, 'import matplotlib.pyplot as plt\n'), ((6184, 6312), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Welcome to sphPlot help..."""', 'epilog': '"""Example:\n>python sphPlot.py -i <input_file>"""'}), '(description=\'Welcome to sphPlot help...\', epilog=\n """Example:\n>python sphPlot.py -i <input_file>""")\n', (6207, 6312), False, 'import argparse\n'), ((6741, 6758), 'numpy.arange', 'np.arange', (['nDumps'], {}), '(nDumps)\n', (6750, 6758), True, 'import numpy as np\n')] |
from pytube import YouTube
import tkinter as tk
import tkinter.messagebox
def download():
URL = url.get()
aud = var.get()
try:
if aud == 1:
ls = YouTube(URL).streams.filter(adaptive=True, only_audio=True)
ans = 'Audio'
path = ls[len(ls)-1].download()
elif aud == 2:
ls = YouTube(URL).streams.filter(adaptive=True)
ans = 'Video'
path = ls[0].download()
msg = "Badhai xa !! "+ ans + " downloaded sucessfully \n location: "+ path
tk.messagebox.showinfo("YO done", msg)
except:
tk.messagebox.showerror("ERROR!!", "Sorry, We got Error!! Please Try with different URL")
root = tk.Tk()
root.geometry("240x180")
root.title("uTHUB")
url = tk.StringVar()
url_label = tk.Label(root, text = 'URL: ', font=('Comic Sans MS',12, 'italic'))
url_entry = tk.Entry(root, textvariable = url, font=('calibre',10,'normal'), bd=4, width=25, fg='green')
var = tk.IntVar()
Audio_only = tk.Label(root, text='Audio Only', font=('Comic Sans MS',12,'italic'))
R1 = tk.Radiobutton(root, text="Yes",value=1, variable=var, font=('Comic Sans MS',10,'bold'))
R2 = tk.Radiobutton(root, text="No", value=2, variable=var, font=('Comic Sans MS',10,'bold'))
url_label.grid(row = 1, column = 1)
url_entry.grid(row = 1, column = 2)
Audio_only.grid(row=2, column = 2)
R1.grid(row = 3, column = 2)
R2.grid(row = 4, column = 2)
B = tk.Button(text = "Download", command = download)
B.grid(row = 6, column = 2)
root.resizable(False, False)
root.mainloop() | [
"tkinter.IntVar",
"tkinter.messagebox.showerror",
"tkinter.Entry",
"pytube.YouTube",
"tkinter.Button",
"tkinter.Radiobutton",
"tkinter.StringVar",
"tkinter.Tk",
"tkinter.Label",
"tkinter.messagebox.showinfo"
] | [((700, 707), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (705, 707), True, 'import tkinter as tk\n'), ((760, 774), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (772, 774), True, 'import tkinter as tk\n'), ((787, 853), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""URL: """', 'font': "('Comic Sans MS', 12, 'italic')"}), "(root, text='URL: ', font=('Comic Sans MS', 12, 'italic'))\n", (795, 853), True, 'import tkinter as tk\n'), ((867, 963), 'tkinter.Entry', 'tk.Entry', (['root'], {'textvariable': 'url', 'font': "('calibre', 10, 'normal')", 'bd': '(4)', 'width': '(25)', 'fg': '"""green"""'}), "(root, textvariable=url, font=('calibre', 10, 'normal'), bd=4,\n width=25, fg='green')\n", (875, 963), True, 'import tkinter as tk\n'), ((967, 978), 'tkinter.IntVar', 'tk.IntVar', ([], {}), '()\n', (976, 978), True, 'import tkinter as tk\n'), ((992, 1063), 'tkinter.Label', 'tk.Label', (['root'], {'text': '"""Audio Only"""', 'font': "('Comic Sans MS', 12, 'italic')"}), "(root, text='Audio Only', font=('Comic Sans MS', 12, 'italic'))\n", (1000, 1063), True, 'import tkinter as tk\n'), ((1067, 1163), 'tkinter.Radiobutton', 'tk.Radiobutton', (['root'], {'text': '"""Yes"""', 'value': '(1)', 'variable': 'var', 'font': "('Comic Sans MS', 10, 'bold')"}), "(root, text='Yes', value=1, variable=var, font=(\n 'Comic Sans MS', 10, 'bold'))\n", (1081, 1163), True, 'import tkinter as tk\n'), ((1161, 1256), 'tkinter.Radiobutton', 'tk.Radiobutton', (['root'], {'text': '"""No"""', 'value': '(2)', 'variable': 'var', 'font': "('Comic Sans MS', 10, 'bold')"}), "(root, text='No', value=2, variable=var, font=(\n 'Comic Sans MS', 10, 'bold'))\n", (1175, 1256), True, 'import tkinter as tk\n'), ((1421, 1465), 'tkinter.Button', 'tk.Button', ([], {'text': '"""Download"""', 'command': 'download'}), "(text='Download', command=download)\n", (1430, 1465), True, 'import tkinter as tk\n'), ((543, 581), 'tkinter.messagebox.showinfo', 'tk.messagebox.showinfo', (['"""YO done"""', 'msg'], {}), "('YO done', msg)\n", (565, 581), True, 'import tkinter as tk\n'), ((602, 695), 'tkinter.messagebox.showerror', 'tk.messagebox.showerror', (['"""ERROR!!"""', '"""Sorry, We got Error!! Please Try with different URL"""'], {}), "('ERROR!!',\n 'Sorry, We got Error!! Please Try with different URL')\n", (625, 695), True, 'import tkinter as tk\n'), ((177, 189), 'pytube.YouTube', 'YouTube', (['URL'], {}), '(URL)\n', (184, 189), False, 'from pytube import YouTube\n'), ((347, 359), 'pytube.YouTube', 'YouTube', (['URL'], {}), '(URL)\n', (354, 359), False, 'from pytube import YouTube\n')] |
from __future__ import print_function
import json
import logging
import sys
import time
from utils.chronograph import Chronograph
import grpc
import numpy as np
from grpc._channel import _Rendezvous
import taranis_pb2
import taranis_pb2_grpc
DB_NAME = 'db3'
INDEX_NAME = 'basic_index'
DIMENSION = 128 # dimension
N_LISTS = 4096
n_batch = 10000
n_training_vectors = 1000
# DB_NAME = 'db2'
# INDEX_NAME = 'basic_index'
# DIMENSION = 128 # dimension
# N_LISTS = 4
# n_batch = 100
# n_training_vectors = 1000
# set up logger
logger = logging.getLogger()
logger.setLevel(logging.DEBUG) # anything debug and above passes through to the handler level
fh = logging.StreamHandler(stream=sys.stdout)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
def run():
with grpc.insecure_channel('localhost:50051') as channel:
stub = taranis_pb2_grpc.TaranisStub(channel)
try:
# # Delete the database if it already exists, and recreate it
# try:
# my_database = stub.getDatabase(taranis_pb2.DatabaseNameModel(name=DB_NAME))
# logging.info("Found database {}".format(my_database.name))
# stub.deleteDatabase(taranis_pb2.DatabaseNameModel(name=DB_NAME))
# logging.info("Deleted database {}".format(DB_NAME))
# except _Rendezvous as e:
# logging.info("{} : {}".format(e.code(), e.details()))
#
# response = stub.createDatabase(taranis_pb2.NewDatabaseModel(name=DB_NAME))
# logging.info("Created database {} at {}".format(response.name, response.created_at))
#
# # Check if database exists
# my_database = stub.getDatabase(taranis_pb2.DatabaseNameModel(name=DB_NAME))
# logging.info("Found database {}".format(my_database.name))
#
# # Delete the index if it already exists and recreate it
# try:
# my_index = stub.getIndex(taranis_pb2.IndexQueryModel(db_name=DB_NAME, index_name=INDEX_NAME))
# logging.info("Found Index {}".format(my_index.index_name))
# stub.deleteIndex(taranis_pb2.IndexQueryModel(db_name=DB_NAME, index_name=INDEX_NAME))
# logging.info("Deleted Index {}".format(my_index.index_name))
# except _Rendezvous as e:
# logging.info("{} : {}".format(e.code(), e.details()))
#
# response = stub.createIndex(taranis_pb2.NewIndexModel(db_name=DB_NAME, index_name=INDEX_NAME,
# config=json.dumps(dict(index_type="IVFPQ",
# dimension=DIMENSION,
# n_list=N_LISTS,
# metric="METRIC_L2",
# n_probes=4))))
# logging.info("Created index {} at {}".format(response.index_name, response.created_at))
#
# my_index = stub.getIndex(taranis_pb2.IndexQueryModel(db_name=DB_NAME, index_name=INDEX_NAME))
# logging.info("Found Index {}".format(my_index.index_name))
#
# vid = 0
# for b in range(0, n_batch):
# logging.info("Batch {} on {}".format(b, n_batch))
# payload = taranis_pb2.NewVectorsModel()
# payload.db_name = DB_NAME
# for i in range(b * n_training_vectors, (b + 1) * n_training_vectors):
# v = payload.vectors.add()
# v.id = vid
# v.data = np.random.Generator().random((DIMENSION,), dtype=np.float32).tobytes()
# # v.data = np.random.random_sample((DIMENSION,)).tobytes()
# v.metadata = json.dumps(dict(aaa="aaa", bbb="bbb"))
# vid += 1
# response = stub.addVectors(payload)
# logging.info("Added {} vectors".format(n_training_vectors))
#
# # Train the index
# response = stub.trainIndex(taranis_pb2.IndexQueryModel(db_name=DB_NAME, index_name=INDEX_NAME))
# logging.info("Trained index {} for db {}".format(INDEX_NAME, DB_NAME))
#
# # reencode all vectors in database
# response = stub.reindex(taranis_pb2.IndexQueryModel(db_name=DB_NAME, index_name=INDEX_NAME))
cg = Chronograph(name="Testing Chronograph", verbosity=1, logger=logger, log_lvl="INFO", start_timing=False)
for b in range(0, 100):
query = taranis_pb2.VectorsQueryModel(db_name=DB_NAME)
for i in np.random.randint(0, n_batch * n_training_vectors, 100, np.int64).tolist():
query.ids.append(i)
random_vectors: taranis_pb2.VectorsReplyModel = stub.getVectors(query)
search_request = taranis_pb2.SearchRequestModel(db_name=DB_NAME, index_name=INDEX_NAME, k=100, n_probe=5)
for v in random_vectors.vectors:
search_request.vectors.append(v.data)
cg.start("searchVectors")
result_list: taranis_pb2.SearchResultListModel = stub.searchVectors(search_request)
cg.stop()
for sr, qid in zip(result_list.results, query.ids):
print("{} : {}".format(qid, sr.knn[0]))
cg.report(printout=True)
except _Rendezvous as e:
logging.error("{} : {}".format(e.code(), e.details()))
if __name__ == '__main__':
logging.basicConfig(level="INFO")
run()
| [
"logging.getLogger",
"logging.basicConfig",
"taranis_pb2_grpc.TaranisStub",
"logging.StreamHandler",
"utils.chronograph.Chronograph",
"grpc.insecure_channel",
"numpy.random.randint",
"taranis_pb2.SearchRequestModel",
"taranis_pb2.VectorsQueryModel"
] | [((541, 560), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (558, 560), False, 'import logging\n'), ((661, 701), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'sys.stdout'}), '(stream=sys.stdout)\n', (682, 701), False, 'import logging\n'), ((5803, 5836), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '"""INFO"""'}), "(level='INFO')\n", (5822, 5836), False, 'import logging\n'), ((773, 813), 'grpc.insecure_channel', 'grpc.insecure_channel', (['"""localhost:50051"""'], {}), "('localhost:50051')\n", (794, 813), False, 'import grpc\n'), ((841, 878), 'taranis_pb2_grpc.TaranisStub', 'taranis_pb2_grpc.TaranisStub', (['channel'], {}), '(channel)\n', (869, 878), False, 'import taranis_pb2_grpc\n'), ((4662, 4770), 'utils.chronograph.Chronograph', 'Chronograph', ([], {'name': '"""Testing Chronograph"""', 'verbosity': '(1)', 'logger': 'logger', 'log_lvl': '"""INFO"""', 'start_timing': '(False)'}), "(name='Testing Chronograph', verbosity=1, logger=logger, log_lvl\n ='INFO', start_timing=False)\n", (4673, 4770), False, 'from utils.chronograph import Chronograph\n'), ((4827, 4873), 'taranis_pb2.VectorsQueryModel', 'taranis_pb2.VectorsQueryModel', ([], {'db_name': 'DB_NAME'}), '(db_name=DB_NAME)\n', (4856, 4873), False, 'import taranis_pb2\n'), ((5137, 5230), 'taranis_pb2.SearchRequestModel', 'taranis_pb2.SearchRequestModel', ([], {'db_name': 'DB_NAME', 'index_name': 'INDEX_NAME', 'k': '(100)', 'n_probe': '(5)'}), '(db_name=DB_NAME, index_name=INDEX_NAME, k=\n 100, n_probe=5)\n', (5167, 5230), False, 'import taranis_pb2\n'), ((4899, 4964), 'numpy.random.randint', 'np.random.randint', (['(0)', '(n_batch * n_training_vectors)', '(100)', 'np.int64'], {}), '(0, n_batch * n_training_vectors, 100, np.int64)\n', (4916, 4964), True, 'import numpy as np\n')] |
import datetime
from pathlib import Path
from setuptools import setup, find_namespace_packages
with open(Path(__file__).parent / "requirements.txt") as f:
required = f.read().splitlines()
setup(
name="emmet-builders",
use_scm_version={"relative_to": Path(__file__).parent},
setup_requires=["setuptools_scm"],
description="Builders for the Emmet Library",
author="The <NAME>",
author_email="<EMAIL>",
url="https://github.com/materialsproject/emmet",
packages=find_namespace_packages(include=["emmet.*"]),
install_requires=required,
license="modified BSD",
zip_safe=False,
)
| [
"setuptools.find_namespace_packages",
"pathlib.Path"
] | [((496, 540), 'setuptools.find_namespace_packages', 'find_namespace_packages', ([], {'include': "['emmet.*']"}), "(include=['emmet.*'])\n", (519, 540), False, 'from setuptools import setup, find_namespace_packages\n'), ((106, 120), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (110, 120), False, 'from pathlib import Path\n'), ((264, 278), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (268, 278), False, 'from pathlib import Path\n')] |
'''
a='1.1'
b='1'
l1=a.partition('.')
l2=b.partition('.')
print(l1)
print(l2)
c='new'
print(c+l1[1]+l1[2])
print(c+l2[1]+l2[2])
'''
'''
while '.DS_Store' in l:
l.remove('.DS_Store')
len(l)
'''
'''
import csv
f=open('sum.csv','rt')
f_csv=csv.reader(f)
print(type(f_csv))
h=next(f_csv)
print(h)
for r in f_csv:
print(r)
f.close()
t='a'
f=open('sum.csv',t,newline='')
w_csv=csv.writer(f)
#l=[[col*row for col in range(4)] for row in range(5)]
#w_csv.writerows(l)
w_csv.writerow([1,2,3,4])
f.close()
'''
'''
with open('A.csv','rb') as csvfile:
reader = csv.reader(csvfile)
rows= [row for row in reader]
with open('A.csv','rb') as csvfile:
reader = csv.reader(csvfile)
column = [row[2] for row in reader]
with open('A.csv','rb') as csvfile:
reader = csv.DictReader(csvfile)
column = [row for row in reader]
'''
'''
import csv
filnename='/Users/luxi/Desktop/Tencent-intern/med_image/test1/summary.csv'
with open(filnename) as f:
f_reader=csv.reader(f)
h=next(f_reader)
rows=[row for row in f_reader]
print(h)
print(rows)
'''
'''
import os
root='/Users/luxi/Desktop/Tencent-intern/med_image/test1'
print(os.path.basename(root))
'''
'''
l=[['a','b','1','3'],['c','b','1','2'],['z']]
l.sort()
print(l)
'''
'''
import pickle
f=open('todo.pkl','rb')
print(pickle.load(f))
f.close()
'''
'''
a='1234'
b='1'
print(a.zfill(3))
print(b.zfill(3))
'''
'''
import imghdr
if imghdr.what('/Users/luxi/Desktop/Tencent-intern/med_image/test/IMG-0001-00001.dcm'):
print('img')
else:
print(None)
'''
import pickle
f=open('todo.pkl','rb')
a=pickle.load(f)
print(a,len(a))
f.close()
b=set(a)
print(len(b)) | [
"pickle.load"
] | [((1582, 1596), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (1593, 1596), False, 'import pickle\n')] |
from django.contrib.auth import get_user_model, authenticate
from rest_framework import serializers
from rest_framework.serializers import ModelSerializer, Serializer
from django.utils.translation import ugettext_lazy as _
class UserSerializer(ModelSerializer):
"""Serialiser for user"""
class Meta:
model = get_user_model()
fields = ('email', 'password', 'name')
extra_kwargs = {
'password': {'write_only': True, 'min_length': 8}
}
def create(self, validated_data):
"""Creating new user and return it"""
return get_user_model().objects.create_user(**validated_data)
def update(self, instance, validated_data):
"""updating user password and name"""
password = validated_data.pop('password', None)
user = super().update(instance, validated_data)
if password:
user.set_password(password)
user.save()
return user
class AuthTokenSerializer(Serializer):
"""Serializer for user authentication objects"""
email = serializers.CharField()
password = serializers.CharField(
style={'input_type': 'password'},
trim_whitespace=True)
def validate(self, attrs):
email = attrs.get('email')
password = attrs.get('password')
user = authenticate(
request=self.context.get('request'),
username=email,
password=password)
if not user:
msg = _('Unable to authenticate user with provided credentials')
raise serializers.ValidationError(msg, code='authentication')
attrs['user'] = user
return attrs
| [
"django.contrib.auth.get_user_model",
"django.utils.translation.ugettext_lazy",
"rest_framework.serializers.CharField",
"rest_framework.serializers.ValidationError"
] | [((1064, 1087), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (1085, 1087), False, 'from rest_framework import serializers\n'), ((1103, 1180), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'style': "{'input_type': 'password'}", 'trim_whitespace': '(True)'}), "(style={'input_type': 'password'}, trim_whitespace=True)\n", (1124, 1180), False, 'from rest_framework import serializers\n'), ((327, 343), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (341, 343), False, 'from django.contrib.auth import get_user_model, authenticate\n'), ((1485, 1543), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to authenticate user with provided credentials"""'], {}), "('Unable to authenticate user with provided credentials')\n", (1486, 1543), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1562, 1617), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['msg'], {'code': '"""authentication"""'}), "(msg, code='authentication')\n", (1589, 1617), False, 'from rest_framework import serializers\n'), ((589, 605), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (603, 605), False, 'from django.contrib.auth import get_user_model, authenticate\n')] |
"""Creates a new table of characters in PostGreSQL from a characters in
a table in sqlite3"""
import sqlite3
import psycopg2
DBNAME = 'ytwptqxp'
USER = 'ytwptqxp'
PASSWORD = '***'
HOST = 'suleiman.db.elephantsql.com'
#Queries
create_character_table = """
CREATE TABLE charactercreator_character (
character_id SERIAL PRIMARY KEY,
name VARCHAR(30),
level INT,
exp INT,
hp INT,
strength INT,
intelligence INT,
dexterity INT,
wisdom INT
);
"""
get_characters = """
SELECT * FROM charactercreator_character;
"""
# Connects to PostGreSQL database
def connect_pg_db(DBNAME=DBNAME, USER=USER, PASSWORD=PASSWORD, HOST=HOST):
pg_conn = psycopg2.connect(dbname = DBNAME, user = USER,
password=PASSWORD, host=HOST)
pg_curs = pg_conn.cursor()
return pg_conn, pg_curs
# Connects to sqlite3 database
def connect_sl_db(dbname='rpg_db.sqlite3'):
sl_conn = sqlite3.connect(dbname)
sl_curs = sl_conn.cursor()
return sl_conn, sl_curs
# creates a character table in PostGRESQL
def generate_characters_table(pg_conn, pg_curs, sl_curs):
pg_curs.execute(create_character_table)
characters = sl_curs.execute(get_characters)
for character in characters:
insert_character = """
INSERT INTO charactercreator_character
(name, level, exp, hp, strength, intelligence, dexterity, wisdom)
VALUES {};
""".format(character[1:])
print (insert_character)
pg_curs.execute(insert_character)
pg_conn.commit()
if __name__ == '__main__':
pg_conn, pg_curs = connect_pg_db()
sl_conn, sl_curs = connect_sl_db()
generate_characters_table(pg_conn, pg_curs, sl_curs)
| [
"psycopg2.connect",
"sqlite3.connect"
] | [((726, 798), 'psycopg2.connect', 'psycopg2.connect', ([], {'dbname': 'DBNAME', 'user': 'USER', 'password': 'PASSWORD', 'host': 'HOST'}), '(dbname=DBNAME, user=USER, password=PASSWORD, host=HOST)\n', (742, 798), False, 'import psycopg2\n'), ((984, 1007), 'sqlite3.connect', 'sqlite3.connect', (['dbname'], {}), '(dbname)\n', (999, 1007), False, 'import sqlite3\n')] |
from .image import Image
from .base import StylizedElement
class HTML(StylizedElement):
def __init__(self, html, **kwargs):
super().__init__(**kwargs)
self.html = html
def build(self, style):
import imgkit
res = imgkit.from_string(self.html, False)
image = Image(res).do_build(style)
self.width = image.width
self.height = image.height
self.element = image.element
def __repr__(self):
return f'HTML({self.html!r})'
| [
"imgkit.from_string"
] | [((255, 291), 'imgkit.from_string', 'imgkit.from_string', (['self.html', '(False)'], {}), '(self.html, False)\n', (273, 291), False, 'import imgkit\n')] |
import datetime
import json
from django.contrib.auth import logout as auth_logout
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse, HttpResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.views.decorators.cache import never_cache
from django.db.models import Q
from requests.exceptions import MissingSchema
from rules.contrib.views import objectgetter, permission_required
from tellme.models import Feedback
import bugsnag
from .forms import ProfileForm, ProfilePictureForm, ProjectForm
from .models import Profile, Project, User, SuccessStory, FeedbackProject
from ConnectID.choices import *
@never_cache
def index(request):
'''
This is the view when browsing to the root of the website.
If the user is logged in, redirect to the landing page with login information
Otherwise, (user not logged in), redirect him to basic landing
'''
if request.user.is_authenticated:
if request.user.profile.profilePicture :
context = {
'loggedin': True,
'user': request.user,
'currentYear': datetime.datetime.now().year,
'successtories': SuccessStory.objects.all(),
}
return render(request, 'home/landing.html', context)
else:
return redirect('profile/edit/picture')
else:
context = {
'currentYear': datetime.datetime.now().year,
'loggedin': False,
'successtories': SuccessStory.objects.all(),
}
return render(request, 'home/landing.html', context)
def loginSuccess(request):
'''
This view gets called after logging in, to redirect to the home page (ommitting the landing page)
If his profile picture is not set, it should redirect to the edit profile picture page
'''
bugsnag.notify(Exception("Testing bugsnag :)"))
if request.user.is_authenticated:
if request.user.profile.profilePicture:
return redirect('/projects')
else:
return redirect('profile/edit/picture')
else:
return redirect('/')
def login(request):
return redirect('social:begin', backend='linkedin-oauth2')
@login_required(login_url='/login')
def logout(request):
# LOGOUT should be post request. https://stackoverflow.com/questions/3521290/logout-get-or-post
if request.method == 'POST':
auth_logout(request)
return redirect('/')
else:
return render(request, 'home/logout_confirmation.html')
@never_cache
@login_required(login_url='/login')
def projects(request):
projects = Project.objects.all()
favorites = request.user.profile.favorites.values_list('id', flat=True)
'''
#code for primitive search function, not used anymore because too slow
query = request.GET.get("q")
if query:
projects = projects.filter( #Q encapsulates a single LIKE query
Q(title__icontains=query) |
Q(abstract__icontains=query) |
Q(keywords__icontains=query) |
Q(description__icontains=query)).distinct()
'''
context = {
'projects': projects,
'favorites': favorites,
}
return render(request, 'home/home.html', context) # TODO customscript aanpassen
@never_cache
@login_required(login_url='/login')
def profilepage(request, userID, slugName):
user = get_object_or_404(User, pk=userID)
profile = user.profile
projects = Project.objects.filter(owner=user, visible=True, anonymity=False)
context = {
'profile': profile,
'projects': projects,
'userID': userID,
}
return render(request, 'home/profile.html', context)
@login_required(login_url='/login')
def ownprofilepage(request):
return redirect('home:profile', userID=request.user.id, slugName=request.user.get_full_name().replace(" ","_"))
@login_required(
login_url="/login") # no further permissions needed, form is filled out with the logged-in users's data
def editprofilepage(request):
profile = get_object_or_404(Profile, user_id=request.user.id)
if request.method == 'POST':
form = ProfileForm(request.POST, instance=profile)
if form.is_valid():
profile.save()
return redirect('home:ownProfile')
else:
form = ProfileForm(instance=profile)
return render(request, 'home/edit_profile.html', {'form': form, 'profile': profile})
@login_required(login_url="/login")
def editprofilepicturepage(request):
profile = get_object_or_404(Profile, user_id=request.user.id)
errorOnUploadedPicture = False
if request.method == 'POST':
form = ProfilePictureForm(request.POST, request.FILES, instance=profile)
if form.is_valid():
form.save()
return redirect('home:ownProfile')
else:
for e in form.errors:
print(e)
# TODO log error (but also that is is non fatal!)
errorOnUploadedPicture = True
linkedinNotOk = True
# check if image from linkedin is still accessable
# TODO if linkedin profile pictures are fixed, try to get it from the 'Python Social Auth' - 'User Social Auth' model's extra data
# See the issue on Github and the comment in edit_profile_picture.html arround line 35
try:
# It used to be that the link broke after awhile.
# Make sure to check if the link exists (is not none) and it is still reachable
'''
# It used to be this:
result = requests.get(profile.linkedInProfilePictureURL)
if result.status_code is 200:
'''
if True: # Temporary, as long as the linkedin picture is not working
linkedinNotOk = True
except MissingSchema:
# TODO log
linkedinNotOk = True
form = ProfilePictureForm(instance=profile)
context = {'form': form,
'profile': profile,
'linkedinNotOk': linkedinNotOk,
'error': errorOnUploadedPicture
}
return render(request, 'home/edit_profile_picture.html', context )
@login_required(login_url="/login")
def users(request):
context = {
'profiles': Profile.objects.order_by('-id'),
}
return render(request, 'home/users.html', context)
@never_cache
@login_required(login_url='/login')
@permission_required('projects.can_view', fn=objectgetter(Project, 'projectID'))
def projectpage(request, projectID, slugTitle):
bugsnag.notify(Exception("Testing bugsnag :)"))
project = get_object_or_404(Project, id=projectID)
context = {
'project': project,
}
if request.user.profile.favorites.filter(id=projectID).exists():
context.update({'favorite': 'true'})
return render(request, 'home/project.html', context)
@login_required(login_url='/login')
def newprojectpage(request, projectID="newproject"):
# if this is a POST request we need to process the form data
if request.method == 'POST':
if 'submit_form' in request.POST:
# create a form instance and populate it with data from the request:
if projectID == "newproject":
form = ProjectForm(request.POST)
else:
project = get_object_or_404(Project, id=projectID)
form = ProjectForm(request.POST, instance=project)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
project = form.save(commit=False)
project.owner = request.user
# project.projectType = request.POST.get('type')
# get all values from the 5 buttons, if text is empty ignore.
project.save()
return redirect('home:projects')
elif 'add_keyword' in request.POST:
formset = ProjectForm(request.POST)
if formset.is_valid():
for form in formset:
# only save if name is present
if form.cleaned_data.get('name'):
form.save()
return redirect('home:newproject')
# if a GET (or any other method) we'll create a blank form
else:
if projectID == "newproject":
form = ProjectForm()
else:
project = get_object_or_404(Project, id=projectID)
form = ProjectForm(instance=project)
return render(request, 'home/new_project.html', {'form': form})
@login_required(login_url='/login')
@permission_required('projects.edit_project', fn=objectgetter(Project, 'projectID'))
def editprojectpage(request, projectID):
project = get_object_or_404(Project, id=projectID)
if request.method == 'POST':
form = ProjectForm(request.POST, instance=project)
if form.is_valid():
project = form.save()
return redirect('home:projects')
# if a GET (or any other method) we'll create a blank form
else:
form = ProjectForm(instance=project)
return render(request, 'home/new_project.html', {'form': form})
@login_required(login_url='/login')
def togglefavorite(request):
data = json.loads(request.body)
projectID = data["projectID"]
project = Project.objects.get(id=projectID)
profile = request.user.profile
if profile.favorites.filter(id=projectID).exists():
profile.favorites.remove(project)
else:
profile.favorites.add(project)
return JsonResponse({
'projectID': projectID,
})
@permission_required('common.is_staff')
@login_required(login_url='/login')
def feedback(request):
context = {
'feedbacks': Feedback.objects.all(),
}
return render(request, 'home/feedback.html', context)
@permission_required('projects.is_owner', fn=objectgetter(Project, 'projectID'))
@login_required(login_url='/login')
def deleteproject(request, projectID):
print(request.body)
data = json.loads(request.body)
projectNameGuess = data['projectNameGuess']
feedback = data['feedback']
actualProject = get_object_or_404(Project, pk=projectID)
if projectNameGuess == actualProject.title:
Project.objects.filter(pk=projectID).delete()
response = {'removed': True}
feedbackmodel = FeedbackProject()
feedbackmodel.feedback = feedback
feedbackmodel.save()
else:
response = {'removed': False}
return JsonResponse(response)
def bad_request(request):
return render(request, 'home/error_pages/400.html')
def permission_denied(request):
return render(request, 'home/error_pages/403.html')
def not_found(request):
return render(request, 'home/error_pages/404.html')
def server_error(request):
return render(request, 'home/error_pages/500.html') | [
"django.shortcuts.render",
"tellme.models.Feedback.objects.all",
"json.loads",
"rules.contrib.views.objectgetter",
"django.http.JsonResponse",
"django.shortcuts.get_object_or_404",
"rules.contrib.views.permission_required",
"datetime.datetime.now",
"django.shortcuts.redirect",
"django.contrib.auth... | [((2267, 2301), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (2281, 2301), False, 'from django.contrib.auth.decorators import login_required\n'), ((2603, 2637), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (2617, 2637), False, 'from django.contrib.auth.decorators import login_required\n'), ((3371, 3405), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (3385, 3405), False, 'from django.contrib.auth.decorators import login_required\n'), ((3770, 3804), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (3784, 3804), False, 'from django.contrib.auth.decorators import login_required\n'), ((3953, 3987), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (3967, 3987), False, 'from django.contrib.auth.decorators import login_required\n'), ((4516, 4550), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (4530, 4550), False, 'from django.contrib.auth.decorators import login_required\n'), ((6187, 6221), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (6201, 6221), False, 'from django.contrib.auth.decorators import login_required\n'), ((6387, 6421), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (6401, 6421), False, 'from django.contrib.auth.decorators import login_required\n'), ((6885, 6919), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (6899, 6919), False, 'from django.contrib.auth.decorators import login_required\n'), ((8592, 8626), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (8606, 8626), False, 'from django.contrib.auth.decorators import login_required\n'), ((9200, 9234), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (9214, 9234), False, 'from django.contrib.auth.decorators import login_required\n'), ((9634, 9672), 'rules.contrib.views.permission_required', 'permission_required', (['"""common.is_staff"""'], {}), "('common.is_staff')\n", (9653, 9672), False, 'from rules.contrib.views import objectgetter, permission_required\n'), ((9674, 9708), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (9688, 9708), False, 'from django.contrib.auth.decorators import login_required\n'), ((9940, 9974), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/login"""'}), "(login_url='/login')\n", (9954, 9974), False, 'from django.contrib.auth.decorators import login_required\n'), ((2212, 2263), 'django.shortcuts.redirect', 'redirect', (['"""social:begin"""'], {'backend': '"""linkedin-oauth2"""'}), "('social:begin', backend='linkedin-oauth2')\n", (2220, 2263), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3282, 3324), 'django.shortcuts.render', 'render', (['request', '"""home/home.html"""', 'context'], {}), "(request, 'home/home.html', context)\n", (3288, 3324), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3461, 3495), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'pk': 'userID'}), '(User, pk=userID)\n', (3478, 3495), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((3722, 3767), 'django.shortcuts.render', 'render', (['request', '"""home/profile.html"""', 'context'], {}), "(request, 'home/profile.html', context)\n", (3728, 3767), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4122, 4173), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user_id': 'request.user.id'}), '(Profile, user_id=request.user.id)\n', (4139, 4173), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4436, 4513), 'django.shortcuts.render', 'render', (['request', '"""home/edit_profile.html"""', "{'form': form, 'profile': profile}"], {}), "(request, 'home/edit_profile.html', {'form': form, 'profile': profile})\n", (4442, 4513), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4602, 4653), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user_id': 'request.user.id'}), '(Profile, user_id=request.user.id)\n', (4619, 4653), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((6124, 6182), 'django.shortcuts.render', 'render', (['request', '"""home/edit_profile_picture.html"""', 'context'], {}), "(request, 'home/edit_profile_picture.html', context)\n", (6130, 6182), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((6328, 6371), 'django.shortcuts.render', 'render', (['request', '"""home/users.html"""', 'context'], {}), "(request, 'home/users.html', context)\n", (6334, 6371), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((6617, 6657), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'id': 'projectID'}), '(Project, id=projectID)\n', (6634, 6657), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((6836, 6881), 'django.shortcuts.render', 'render', (['request', '"""home/project.html"""', 'context'], {}), "(request, 'home/project.html', context)\n", (6842, 6881), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((8532, 8588), 'django.shortcuts.render', 'render', (['request', '"""home/new_project.html"""', "{'form': form}"], {}), "(request, 'home/new_project.html', {'form': form})\n", (8538, 8588), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((8767, 8807), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'id': 'projectID'}), '(Project, id=projectID)\n', (8784, 8807), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((9140, 9196), 'django.shortcuts.render', 'render', (['request', '"""home/new_project.html"""', "{'form': form}"], {}), "(request, 'home/new_project.html', {'form': form})\n", (9146, 9196), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((9275, 9299), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (9285, 9299), False, 'import json\n'), ((9577, 9615), 'django.http.JsonResponse', 'JsonResponse', (["{'projectID': projectID}"], {}), "({'projectID': projectID})\n", (9589, 9615), False, 'from django.http import JsonResponse, HttpResponse\n'), ((9810, 9856), 'django.shortcuts.render', 'render', (['request', '"""home/feedback.html"""', 'context'], {}), "(request, 'home/feedback.html', context)\n", (9816, 9856), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((10049, 10073), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (10059, 10073), False, 'import json\n'), ((10175, 10215), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'pk': 'projectID'}), '(Project, pk=projectID)\n', (10192, 10215), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((10530, 10552), 'django.http.JsonResponse', 'JsonResponse', (['response'], {}), '(response)\n', (10542, 10552), False, 'from django.http import JsonResponse, HttpResponse\n'), ((10592, 10636), 'django.shortcuts.render', 'render', (['request', '"""home/error_pages/400.html"""'], {}), "(request, 'home/error_pages/400.html')\n", (10598, 10636), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((10681, 10725), 'django.shortcuts.render', 'render', (['request', '"""home/error_pages/403.html"""'], {}), "(request, 'home/error_pages/403.html')\n", (10687, 10725), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((10762, 10806), 'django.shortcuts.render', 'render', (['request', '"""home/error_pages/404.html"""'], {}), "(request, 'home/error_pages/404.html')\n", (10768, 10806), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((10846, 10890), 'django.shortcuts.render', 'render', (['request', '"""home/error_pages/500.html"""'], {}), "(request, 'home/error_pages/500.html')\n", (10852, 10890), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1603, 1648), 'django.shortcuts.render', 'render', (['request', '"""home/landing.html"""', 'context'], {}), "(request, 'home/landing.html', context)\n", (1609, 1648), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2165, 2178), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2173, 2178), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2464, 2484), 'django.contrib.auth.logout', 'auth_logout', (['request'], {}), '(request)\n', (2475, 2484), True, 'from django.contrib.auth import logout as auth_logout\n'), ((2500, 2513), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2508, 2513), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2539, 2587), 'django.shortcuts.render', 'render', (['request', '"""home/logout_confirmation.html"""'], {}), "(request, 'home/logout_confirmation.html')\n", (2545, 2587), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((6467, 6501), 'rules.contrib.views.objectgetter', 'objectgetter', (['Project', '"""projectID"""'], {}), "(Project, 'projectID')\n", (6479, 6501), False, 'from rules.contrib.views import objectgetter, permission_required\n'), ((8676, 8710), 'rules.contrib.views.objectgetter', 'objectgetter', (['Project', '"""projectID"""'], {}), "(Project, 'projectID')\n", (8688, 8710), False, 'from rules.contrib.views import objectgetter, permission_required\n'), ((9769, 9791), 'tellme.models.Feedback.objects.all', 'Feedback.objects.all', ([], {}), '()\n', (9789, 9791), False, 'from tellme.models import Feedback\n'), ((9903, 9937), 'rules.contrib.views.objectgetter', 'objectgetter', (['Project', '"""projectID"""'], {}), "(Project, 'projectID')\n", (9915, 9937), False, 'from rules.contrib.views import objectgetter, permission_required\n'), ((1291, 1336), 'django.shortcuts.render', 'render', (['request', '"""home/landing.html"""', 'context'], {}), "(request, 'home/landing.html', context)\n", (1297, 1336), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1370, 1402), 'django.shortcuts.redirect', 'redirect', (['"""profile/edit/picture"""'], {}), "('profile/edit/picture')\n", (1378, 1402), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2052, 2073), 'django.shortcuts.redirect', 'redirect', (['"""/projects"""'], {}), "('/projects')\n", (2060, 2073), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((2107, 2139), 'django.shortcuts.redirect', 'redirect', (['"""profile/edit/picture"""'], {}), "('profile/edit/picture')\n", (2115, 2139), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4341, 4368), 'django.shortcuts.redirect', 'redirect', (['"""home:ownProfile"""'], {}), "('home:ownProfile')\n", (4349, 4368), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((4874, 4901), 'django.shortcuts.redirect', 'redirect', (['"""home:ownProfile"""'], {}), "('home:ownProfile')\n", (4882, 4901), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((8430, 8470), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'id': 'projectID'}), '(Project, id=projectID)\n', (8447, 8470), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((8983, 9008), 'django.shortcuts.redirect', 'redirect', (['"""home:projects"""'], {}), "('home:projects')\n", (8991, 9008), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1460, 1483), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1481, 1483), False, 'import datetime\n'), ((7329, 7369), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Project'], {'id': 'projectID'}), '(Project, id=projectID)\n', (7346, 7369), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((7871, 7896), 'django.shortcuts.redirect', 'redirect', (['"""home:projects"""'], {}), "('home:projects')\n", (7879, 7896), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((8221, 8248), 'django.shortcuts.redirect', 'redirect', (['"""home:newproject"""'], {}), "('home:newproject')\n", (8229, 8248), False, 'from django.shortcuts import get_object_or_404, redirect, render\n'), ((1167, 1190), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1188, 1190), False, 'import datetime\n')] |
from nose.tools import raises
from rightscale.rightscale import Resource
def test_empty_linky_thing():
linky = Resource()
assert {} == linky.links
def test_resource_repr():
r = Resource()
assert r == eval(repr(r))
r = {
'a': '/path/to/a',
'mojo': '/somefingelse/blah/blah',
'thang': '/thang.thang',
}
assert r == eval(repr(r))
def test_loaded_linky_thing():
exp = {
'a': '/path/to/a',
'mojo': '/somefingelse/blah/blah',
'thang': '/thang.thang',
}
data = []
for k, v in exp.items():
data.append({'rel': k, 'href': v})
linky = Resource({'links': data})
assert exp == linky.links
def test_allow_customize_links():
"""
Allow custom links independent of original initialization data.
"""
orig = {
'a': '/path/to/a',
'mojo': '/somefingelse/blah/blah',
'thang': '/thang.thang',
}
newmojo = 'another thing yeah'
exp = dict(orig, mojo=newmojo)
data = []
for k, v in orig.items():
data.append({'rel': k, 'href': v})
linky = Resource({'links': data})
linky.links['mojo'] = newmojo
assert exp == linky.links
def test_str_resource():
"""
str(resource) should be based on the soul.
"""
fakey = {'foo': '/foo', 'bar': '/path/to/bar'}
res = Resource(soul=fakey)
assert str(fakey) == str(res)
def test_dir_resource():
"""
dir(resource) should only be based on the embedded links.
"""
fakey = {'foo': '/foo', 'bar': '/path/to/bar'}
res = Resource()
res._links = fakey
assert set(fakey.keys()) == set(dir(res))
def test_real_resource_attr():
"""
Resource objects should allow access to attrs named after links.
"""
fakey = {'foo': '/foo', 'bar': '/path/to/bar'}
res = Resource()
res._links = fakey
res.foo
res.bar
@raises(AttributeError)
def test_bogus_resource_attr():
"""
Resource objects should complain when trying to access unknown attrs.
"""
fakey = {'foo': '/foo', 'bar': '/path/to/bar'}
res = Resource()
res.fubar
| [
"rightscale.rightscale.Resource",
"nose.tools.raises"
] | [((1945, 1967), 'nose.tools.raises', 'raises', (['AttributeError'], {}), '(AttributeError)\n', (1951, 1967), False, 'from nose.tools import raises\n'), ((117, 127), 'rightscale.rightscale.Resource', 'Resource', ([], {}), '()\n', (125, 127), False, 'from rightscale.rightscale import Resource\n'), ((193, 203), 'rightscale.rightscale.Resource', 'Resource', ([], {}), '()\n', (201, 203), False, 'from rightscale.rightscale import Resource\n'), ((676, 701), 'rightscale.rightscale.Resource', 'Resource', (["{'links': data}"], {}), "({'links': data})\n", (684, 701), False, 'from rightscale.rightscale import Resource\n'), ((1163, 1188), 'rightscale.rightscale.Resource', 'Resource', (["{'links': data}"], {}), "({'links': data})\n", (1171, 1188), False, 'from rightscale.rightscale import Resource\n'), ((1404, 1424), 'rightscale.rightscale.Resource', 'Resource', ([], {'soul': 'fakey'}), '(soul=fakey)\n', (1412, 1424), False, 'from rightscale.rightscale import Resource\n'), ((1625, 1635), 'rightscale.rightscale.Resource', 'Resource', ([], {}), '()\n', (1633, 1635), False, 'from rightscale.rightscale import Resource\n'), ((1884, 1894), 'rightscale.rightscale.Resource', 'Resource', ([], {}), '()\n', (1892, 1894), False, 'from rightscale.rightscale import Resource\n'), ((2151, 2161), 'rightscale.rightscale.Resource', 'Resource', ([], {}), '()\n', (2159, 2161), False, 'from rightscale.rightscale import Resource\n')] |
##
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from googler.recaptcha import mailhide
import unittest
class TestEncryption(unittest.TestCase):
"""
Test case to test whether Encryption and Decryption for the mailhide
API works correctly.
"""
def setUp(self):
self.private_key = 'deadbeefdeadbeefdeadbeefdeadbeef'
self.email_address_list = (
'<EMAIL>',
'<EMAIL>',
'<EMAIL>',
)
def test_mailhide_encryption(self):
for email in self.email_address_list:
enc = mailhide._encrypt_email_address(email, self.private_key)
dec = mailhide._decrypt_email_address(enc, self.private_key)
self.assertEqual(email, dec)
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"googler.recaptcha.mailhide._decrypt_email_address",
"googler.recaptcha.mailhide._encrypt_email_address"
] | [((1310, 1325), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1323, 1325), False, 'import unittest\n'), ((1106, 1162), 'googler.recaptcha.mailhide._encrypt_email_address', 'mailhide._encrypt_email_address', (['email', 'self.private_key'], {}), '(email, self.private_key)\n', (1137, 1162), False, 'from googler.recaptcha import mailhide\n'), ((1181, 1235), 'googler.recaptcha.mailhide._decrypt_email_address', 'mailhide._decrypt_email_address', (['enc', 'self.private_key'], {}), '(enc, self.private_key)\n', (1212, 1235), False, 'from googler.recaptcha import mailhide\n')] |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2019 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Module tests."""
from __future__ import absolute_import, print_function
import pytest
from invenio_accounts.testutils import create_test_user
from invenio_jsonschemas.errors import JSONSchemaNotFound
from invenio_pidstore.models import PersistentIdentifier
from jsonschema.exceptions import ValidationError
from invenio_sipstore.errors import SIPUserDoesNotExist
from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, \
SIPMetadataType
def test_sip_model(db):
"""Test the SIP model."""
user1 = create_test_user('<EMAIL>')
# Valid agent JSON
agent1 = {'email': '<EMAIL>', 'ip_address': '1.1.1.1'}
# Invalid agent JSON
agent2 = {
'email': ['should', 'not', 'be', 'a', 'list'],
'ip_address': {'definitely': 'not', 'a': 'dict'},
}
# Agent JSON with wrong schema
agent3 = {
'email': '<EMAIL>',
'ip_address': '1.1.1.1',
'$schema': 'http://incorrect/agent/schema.json',
}
sip1 = SIP.create(user_id=user1.id, agent=agent1)
assert sip1.user == user1
SIP.create()
SIP.create(user_id=user1.id, agent=agent1)
assert SIP.query.count() == 3
pytest.raises(ValidationError, SIP.create, agent=agent2)
pytest.raises(SIPUserDoesNotExist, SIP.create, user_id=5)
pytest.raises(JSONSchemaNotFound, SIP.create, agent=agent3)
db.session.commit()
def test_sip_file_model(app, db, sips):
"""Test the SIPFile model."""
sip = sips[0]
app.config['SIPSTORE_FILEPATH_MAX_LEN'] = 15
with pytest.raises(ValueError) as excinfo:
SIPFile(sip_id=sip.id,
filepath="way too long file name.zip",
file_id=sip.files[0].file_id)
assert 'Filepath too long' in str(excinfo.value)
def test_sip_file_storage_location(db, sips):
"""Test the storage_location SIPFile member."""
assert sips[0].files[0].filepath == 'foobar.txt'
with open(sips[0].files[0].storage_location, "rb") as f:
assert f.read() == b'test'
def test_sip_metadata_model(db):
"""Test the SIPMetadata model."""
sip1 = SIP.create()
mtype = SIPMetadataType(title='JSON Test', name='json-test',
format='json', schema='url')
db.session.add(mtype)
metadata1 = '{"title": "great book"}'
sipmetadata = SIPMetadata(sip_id=sip1.id, content=metadata1,
type=mtype)
db.session.add(sipmetadata)
db.session.commit()
assert SIP.query.count() == 1
assert SIPMetadataType.query.count() == 1
assert SIPMetadata.query.count() == 1
sipmetadata = SIPMetadata.query.one()
assert sipmetadata.content == metadata1
assert sipmetadata.type.format == 'json'
assert sipmetadata.sip.id == sip1.id
def test_sip_metadatatype_model(db):
"""Test the SIPMetadata model."""
mtype = SIPMetadataType(title='JSON Test', name='json-test',
format='json', schema='url')
db.session.add(mtype)
db.session.commit()
assert SIPMetadataType.query.count() == 1
sipmetadatatype = SIPMetadataType.get(mtype.id)
assert sipmetadatatype.title == 'JSON Test'
assert sipmetadatatype.format == 'json'
assert sipmetadatatype.name == 'json-test'
assert sipmetadatatype.schema == 'url'
def test_record_sip_model(db):
"""Test the RecordSIP model."""
sip1 = SIP.create()
db.session.commit()
pid1 = PersistentIdentifier.create('recid', '12345')
rsip1 = RecordSIP(sip_id=sip1.id, pid_id=pid1.id)
db.session.add(rsip1)
db.session.commit()
assert RecordSIP.query.count() == 1
| [
"invenio_sipstore.models.SIPMetadata.query.count",
"invenio_sipstore.models.SIPMetadata.query.one",
"invenio_accounts.testutils.create_test_user",
"invenio_sipstore.models.SIP.query.count",
"invenio_sipstore.models.SIPFile",
"invenio_sipstore.models.SIP.create",
"invenio_sipstore.models.RecordSIP",
"i... | [((769, 796), 'invenio_accounts.testutils.create_test_user', 'create_test_user', (['"""<EMAIL>"""'], {}), "('<EMAIL>')\n", (785, 796), False, 'from invenio_accounts.testutils import create_test_user\n'), ((1225, 1267), 'invenio_sipstore.models.SIP.create', 'SIP.create', ([], {'user_id': 'user1.id', 'agent': 'agent1'}), '(user_id=user1.id, agent=agent1)\n', (1235, 1267), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((1303, 1315), 'invenio_sipstore.models.SIP.create', 'SIP.create', ([], {}), '()\n', (1313, 1315), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((1320, 1362), 'invenio_sipstore.models.SIP.create', 'SIP.create', ([], {'user_id': 'user1.id', 'agent': 'agent1'}), '(user_id=user1.id, agent=agent1)\n', (1330, 1362), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((1402, 1458), 'pytest.raises', 'pytest.raises', (['ValidationError', 'SIP.create'], {'agent': 'agent2'}), '(ValidationError, SIP.create, agent=agent2)\n', (1415, 1458), False, 'import pytest\n'), ((1463, 1520), 'pytest.raises', 'pytest.raises', (['SIPUserDoesNotExist', 'SIP.create'], {'user_id': '(5)'}), '(SIPUserDoesNotExist, SIP.create, user_id=5)\n', (1476, 1520), False, 'import pytest\n'), ((1525, 1584), 'pytest.raises', 'pytest.raises', (['JSONSchemaNotFound', 'SIP.create'], {'agent': 'agent3'}), '(JSONSchemaNotFound, SIP.create, agent=agent3)\n', (1538, 1584), False, 'import pytest\n'), ((2317, 2329), 'invenio_sipstore.models.SIP.create', 'SIP.create', ([], {}), '()\n', (2327, 2329), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((2342, 2428), 'invenio_sipstore.models.SIPMetadataType', 'SIPMetadataType', ([], {'title': '"""JSON Test"""', 'name': '"""json-test"""', 'format': '"""json"""', 'schema': '"""url"""'}), "(title='JSON Test', name='json-test', format='json', schema=\n 'url')\n", (2357, 2428), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((2538, 2596), 'invenio_sipstore.models.SIPMetadata', 'SIPMetadata', ([], {'sip_id': 'sip1.id', 'content': 'metadata1', 'type': 'mtype'}), '(sip_id=sip1.id, content=metadata1, type=mtype)\n', (2549, 2596), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((2823, 2846), 'invenio_sipstore.models.SIPMetadata.query.one', 'SIPMetadata.query.one', ([], {}), '()\n', (2844, 2846), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((3066, 3152), 'invenio_sipstore.models.SIPMetadataType', 'SIPMetadataType', ([], {'title': '"""JSON Test"""', 'name': '"""json-test"""', 'format': '"""json"""', 'schema': '"""url"""'}), "(title='JSON Test', name='json-test', format='json', schema=\n 'url')\n", (3081, 3152), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((3294, 3323), 'invenio_sipstore.models.SIPMetadataType.get', 'SIPMetadataType.get', (['mtype.id'], {}), '(mtype.id)\n', (3313, 3323), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((3586, 3598), 'invenio_sipstore.models.SIP.create', 'SIP.create', ([], {}), '()\n', (3596, 3598), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((3634, 3679), 'invenio_pidstore.models.PersistentIdentifier.create', 'PersistentIdentifier.create', (['"""recid"""', '"""12345"""'], {}), "('recid', '12345')\n", (3661, 3679), False, 'from invenio_pidstore.models import PersistentIdentifier\n'), ((3693, 3734), 'invenio_sipstore.models.RecordSIP', 'RecordSIP', ([], {'sip_id': 'sip1.id', 'pid_id': 'pid1.id'}), '(sip_id=sip1.id, pid_id=pid1.id)\n', (3702, 3734), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((1374, 1391), 'invenio_sipstore.models.SIP.query.count', 'SIP.query.count', ([], {}), '()\n', (1389, 1391), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((1761, 1786), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1774, 1786), False, 'import pytest\n'), ((1807, 1903), 'invenio_sipstore.models.SIPFile', 'SIPFile', ([], {'sip_id': 'sip.id', 'filepath': '"""way too long file name.zip"""', 'file_id': 'sip.files[0].file_id'}), "(sip_id=sip.id, filepath='way too long file name.zip', file_id=sip.\n files[0].file_id)\n", (1814, 1903), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((2694, 2711), 'invenio_sipstore.models.SIP.query.count', 'SIP.query.count', ([], {}), '()\n', (2709, 2711), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((2728, 2757), 'invenio_sipstore.models.SIPMetadataType.query.count', 'SIPMetadataType.query.count', ([], {}), '()\n', (2755, 2757), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((2774, 2799), 'invenio_sipstore.models.SIPMetadata.query.count', 'SIPMetadata.query.count', ([], {}), '()\n', (2797, 2799), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((3237, 3266), 'invenio_sipstore.models.SIPMetadataType.query.count', 'SIPMetadataType.query.count', ([], {}), '()\n', (3264, 3266), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n'), ((3796, 3819), 'invenio_sipstore.models.RecordSIP.query.count', 'RecordSIP.query.count', ([], {}), '()\n', (3817, 3819), False, 'from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata, SIPMetadataType\n')] |
from django.db import models
class Performer(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class Song(models.Model):
title = models.CharField(max_length=255)
artist = models.CharField(max_length=255)
performer = models.ForeignKey(Performer)
length = models.IntegerField()
class META:
ordering = ['order',]
def __str__(self):
return "{} by {}".format(self.title, self.artist)
| [
"django.db.models.IntegerField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((73, 105), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (89, 105), False, 'from django.db import models\n'), ((195, 227), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (211, 227), False, 'from django.db import models\n'), ((241, 273), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (257, 273), False, 'from django.db import models\n'), ((290, 318), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Performer'], {}), '(Performer)\n', (307, 318), False, 'from django.db import models\n'), ((332, 353), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (351, 353), False, 'from django.db import models\n')] |
import os
import subprocess
def compile_schemas():
for file in os.listdir("."):
if file.endswith(".proto"):
print(file)
subprocess.run(["protoc", "--python_out=.", file])
| [
"subprocess.run",
"os.listdir"
] | [((69, 84), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (79, 84), False, 'import os\n'), ((158, 208), 'subprocess.run', 'subprocess.run', (["['protoc', '--python_out=.', file]"], {}), "(['protoc', '--python_out=.', file])\n", (172, 208), False, 'import subprocess\n')] |
# read delta G values from equilibrator_results.tsv
infile = open('equilibrator_results.tsv', 'r')
import numpy as np
def read_dg(infile):
dg_list = []
for line in infile:
if not line.startswith("'"): # skips line with headers
pass
else:
line = line.strip("\n")
line_list = line.split("\t")
dg = line_list[2].split(" ")[0]
dg = dg.strip("(")
dg_list.append(float(dg))
dg_array = np.array(dg_list)
return(dg_array)
| [
"numpy.array"
] | [((408, 425), 'numpy.array', 'np.array', (['dg_list'], {}), '(dg_list)\n', (416, 425), True, 'import numpy as np\n')] |
import os
def get_icons():
icons = {}
icons_walk = os.walk("src/icons")
_, authors, _ = next(icons_walk)
for root, _, files in icons_walk:
author = os.path.split(root)[-1]
icons[author] = files
return icons
def generate_js(icons):
lines = []
all_icons = []
for author, images in icons.items():
for file_name in images:
icon_name = f"{author}__{file_name}".removesuffix(".svg").replace("-", "_")
all_icons.append(icon_name)
lines.append(f"import {icon_name} from './icons/{author}/{file_name}'")
lines.append("\n")
lines.append(f"export const icon_names = {all_icons}")
lines.append(f"export const icons = {all_icons}".replace("'", ""))
lines.append("\n")
return "\n".join(lines)
def main():
icons = get_icons()
js = generate_js(icons)
with open("src/icons.js", "w") as file:
file.write(js)
if __name__ == "__main__":
main()
| [
"os.walk",
"os.path.split"
] | [((61, 81), 'os.walk', 'os.walk', (['"""src/icons"""'], {}), "('src/icons')\n", (68, 81), False, 'import os\n'), ((174, 193), 'os.path.split', 'os.path.split', (['root'], {}), '(root)\n', (187, 193), False, 'import os\n')] |
import time
from easydict import EasyDoct as edict
## Init
__C = edict()
cfg = __C
## seed Value
__C.SEED = 2021
## datasets: X, Y
__C.DATASET = None
## networks: X, Y
__C.NET = None
## gpu id
__C.GPU_ID = [0]
## learning rate
__C.LR = 0
# LR scheduler
## training settings
__C.MAX_EPOCH = 0
## experiment
now = time.strftime('%m-%d_%H-%M', time.localtime())
__C.EXP_NAME = now \
+ '_' + __C.DATASET \
+ '_' + __C.NET
## loggin
__C.EXP_PATH = '/experiments'
| [
"easydict.EasyDoct",
"time.localtime"
] | [((67, 74), 'easydict.EasyDoct', 'edict', ([], {}), '()\n', (72, 74), True, 'from easydict import EasyDoct as edict\n'), ((350, 366), 'time.localtime', 'time.localtime', ([], {}), '()\n', (364, 366), False, 'import time\n')] |
from lib.data.model.shared.abstract_board import AbstractBoard
from lib.data.model.shared.cell import Cell
from lib.data.model.shared.player import Player
class TicTacToeBoard(AbstractBoard):
def __init__(self, size):
self.height = self.width = size
self.board = [[Cell(j, i) for i in range(size)] for j in range(size)] | [
"lib.data.model.shared.cell.Cell"
] | [((287, 297), 'lib.data.model.shared.cell.Cell', 'Cell', (['j', 'i'], {}), '(j, i)\n', (291, 297), False, 'from lib.data.model.shared.cell import Cell\n')] |
import torch
import numpy as np
import torch.nn as nn
import torch.distributed as dist
import torch.nn.functional as F
from torch import Tensor
from typing import Any
from typing import Dict
from typing import Tuple
from typing import Optional
from cftool.misc import update_dict
from cftool.misc import shallow_copy_dict
from torch.nn.parallel import DistributedDataParallel as DDP
from ..encoder import Encoder1DBase
from ....data import CVLoader
from ....types import tensor_dict_type
from ....protocol import StepOutputs
from ....protocol import TrainerState
from ....protocol import MetricsOutputs
from ....protocol import ModelWithCustomSteps
from ....constants import LOSS_KEY
from ....constants import INPUT_KEY
from ....constants import LATENT_KEY
from ....misc.toolkit import to_device
from ....misc.toolkit import l2_normalize
from ....misc.toolkit import get_world_size
from ....misc.toolkit import has_batch_norms
def _get_dino_defaults(name: str) -> Dict[str, Any]:
if name == "vit":
return {"patch_size": 16, "drop_path_rate": 0.1}
return {}
class Scheduler:
def __init__(self, values: np.ndarray):
self.values = values
self.max_idx = len(values) - 1
def __getitem__(self, index: int) -> Any:
return self.values[min(index, self.max_idx)]
def cosine_scheduler(
base_value: float,
final_value: float,
epochs: int,
num_step_per_epoch: int,
warmup_epochs: int = 0,
start_warmup_value: int = 0,
) -> Scheduler:
warmup_schedule = np.array([])
warmup_iters = warmup_epochs * num_step_per_epoch
if warmup_epochs > 0:
warmup_schedule = np.linspace(start_warmup_value, base_value, warmup_iters)
iters = np.arange(epochs * num_step_per_epoch - warmup_iters)
diff = base_value - final_value
schedule = final_value + 0.5 * diff * (1.0 + np.cos(np.pi * iters / len(iters)))
schedule = np.concatenate((warmup_schedule, schedule))
assert len(schedule) == epochs * num_step_per_epoch
return Scheduler(schedule)
class MultiCropWrapper(nn.Module):
def __init__(self, backbone: nn.Module, head: nn.Module):
super().__init__()
backbone.fc, backbone.head = nn.Identity(), nn.Identity()
self.backbone = backbone
self.head = head
def forward(
self,
batch_idx: int,
batch: tensor_dict_type,
state: Optional[TrainerState] = None,
*,
img_end_idx: Optional[int] = None,
**kwargs: Any,
) -> Tensor:
img_crops = batch[INPUT_KEY]
if not isinstance(img_crops, list):
img_crops = batch[INPUT_KEY] = [img_crops]
if img_end_idx is not None:
img_crops = img_crops[:img_end_idx]
idx_crops = torch.cumsum(
torch.unique_consecutive(
torch.tensor([img_crop.shape[-1] for img_crop in img_crops]),
return_counts=True,
)[1],
0,
)
outputs = []
start_idx = 0
for end_idx in idx_crops:
local_batch = shallow_copy_dict(batch)
local_batch[INPUT_KEY] = torch.cat(img_crops[start_idx:end_idx])
idx_rs = self.backbone(batch_idx, local_batch, state, **kwargs)
idx_out = idx_rs[LATENT_KEY]
if isinstance(idx_out, tuple):
idx_out = idx_out[0]
outputs.append(idx_out)
start_idx = end_idx
return self.head(torch.cat(outputs))
class DINOHead(nn.Module):
def __init__(
self,
in_dim: int,
out_dim: int,
batch_norm: bool = False,
norm_last_layer: bool = True,
*,
num_layers: int = 3,
latent_dim: int = 2048,
bottleneck_dim: int = 256,
):
super().__init__()
num_layers = max(num_layers, 1)
if num_layers == 1:
self.mapping = nn.Linear(in_dim, bottleneck_dim)
else:
blocks = [nn.Linear(in_dim, latent_dim)]
if batch_norm:
blocks.append(nn.BatchNorm1d(latent_dim))
blocks.append(nn.GELU())
for _ in range(num_layers - 2):
blocks.append(nn.Linear(latent_dim, latent_dim))
if batch_norm:
blocks.append(nn.BatchNorm1d(latent_dim))
blocks.append(nn.GELU())
blocks.append(nn.Linear(latent_dim, bottleneck_dim))
self.mapping = nn.Sequential(*blocks)
self.apply(self._init_weights)
last = nn.Linear(bottleneck_dim, out_dim, bias=False)
self.last_layer = nn.utils.weight_norm(last)
self.last_layer.weight_g.data.fill_(1)
if norm_last_layer:
self.last_layer.weight_g.requires_grad = False
def _init_weights(self, m: nn.Module) -> None:
if isinstance(m, nn.Linear):
nn.init.trunc_normal_(m.weight, std=0.02)
if isinstance(m, nn.Linear) and m.bias is not None:
nn.init.constant_(m.bias, 0)
def forward(self, net: Tensor) -> Tensor:
net = self.mapping(net)
net = nn.functional.normalize(net, dim=-1, p=2)
net = self.last_layer(net)
return net
class DINOLoss(nn.Module):
center: torch.Tensor
def __init__(
self,
out_dim: int,
teacher_temp: float,
warmup_teacher_temp: float,
warmup_teacher_temp_epochs: int,
teacher_temp_epochs: int,
*,
student_temp: float = 0.1,
center_momentum: float = 0.9,
):
super().__init__()
self.student_temp = student_temp
self.center_momentum = center_momentum
self.register_buffer("center", torch.zeros(1, out_dim))
teacher_temp_constant_epochs = teacher_temp_epochs - warmup_teacher_temp_epochs
self.teacher_temp_schedule = Scheduler(
np.concatenate(
(
np.linspace(
warmup_teacher_temp,
teacher_temp,
warmup_teacher_temp_epochs,
),
np.ones(teacher_temp_constant_epochs) * teacher_temp,
)
)
)
self.num_epochs = teacher_temp_epochs
def forward(
self,
epoch: int,
num_crops: int,
student_output: Tensor,
teacher_output: Tensor,
) -> Tensor:
student_logits = student_output / self.student_temp
student_logits_list = student_logits.chunk(num_crops)
temp = self.teacher_temp_schedule[epoch]
teacher_logits = F.softmax((teacher_output - self.center) / temp, dim=-1)
teacher_logits_list = teacher_logits.detach().chunk(2)
total_loss = 0.0
num_loss_terms = 0
for it, t_logit in enumerate(teacher_logits_list):
for iv, v_logit in enumerate(student_logits_list):
if iv == it:
continue
loss = torch.sum(-t_logit * F.log_softmax(v_logit, dim=-1), dim=-1)
total_loss += loss.mean()
num_loss_terms += 1
total_loss /= num_loss_terms
self.update_center(teacher_output)
return total_loss
@torch.no_grad()
def update_center(self, teacher_output: Tensor) -> None:
batch_center = torch.sum(teacher_output, dim=0, keepdim=True)
if dist.is_initialized():
dist.all_reduce(batch_center)
batch_center = batch_center / (len(teacher_output) * get_world_size())
m = self.center_momentum
self.center = self.center * m + batch_center * (1.0 - m)
class DINOEvaluateLoss:
def __init__(self, train_loss: DINOLoss):
self.train_loss = train_loss
def __call__(
self,
epoch: int,
student_output: Tensor,
teacher_output: Tensor,
) -> float:
s_logits = student_output / self.train_loss.student_temp
temp = self.train_loss.teacher_temp_schedule[epoch]
centered = teacher_output - self.train_loss.center
t_logits = F.softmax(centered / temp, dim=-1)
loss = torch.sum(-t_logits * F.log_softmax(s_logits, dim=-1), dim=-1).mean()
return loss.item()
@ModelWithCustomSteps.register("dino")
class DINO(ModelWithCustomSteps):
custom_params_groups = True
custom_ddp_initialization = True
lr_schedule: Optional[Scheduler]
wd_schedule: Optional[Scheduler]
momentum_schedule: Optional[Scheduler]
def __init__(
self,
encoder1d: str = "vit",
encoder1d_config: Optional[Dict[str, Any]] = None,
student_specific: Optional[Dict[str, Any]] = None,
teacher_specific: Optional[Dict[str, Any]] = None,
*,
out_dim: int = 65536,
use_bn_in_head: bool = False,
norm_last_layer: bool = True,
teacher_temp: float = 0.07,
momentum_teacher: float = 0.996,
warmup_teacher_temp: float = 0.04,
warmup_teacher_temp_epochs: int = 30,
teacher_temp_epochs: int,
freeze_last_layer: int = 1,
weight_decay: float = 0.04,
weight_decay_end: float = 0.4,
warmup_epochs: int = 10,
):
super().__init__()
base = update_dict(encoder1d_config or {}, _get_dino_defaults(encoder1d))
student_cfg = update_dict(student_specific or {}, shallow_copy_dict(base))
teacher_cfg = update_dict(teacher_specific or {}, shallow_copy_dict(base))
student = Encoder1DBase.make(encoder1d, student_cfg)
teacher = Encoder1DBase.make(encoder1d, teacher_cfg)
self.ddp_student = self.ddp_teacher = None
self.student = MultiCropWrapper(
student,
DINOHead(
student.latent_dim,
out_dim,
use_bn_in_head,
norm_last_layer,
),
)
self.teacher = MultiCropWrapper(
teacher,
DINOHead(teacher.latent_dim, out_dim, use_bn_in_head),
)
self.freeze_last_layer = freeze_last_layer
self.teacher.load_state_dict(self.student.state_dict())
self.loss = DINOLoss(
out_dim,
teacher_temp,
warmup_teacher_temp,
warmup_teacher_temp_epochs,
teacher_temp_epochs,
)
self.evaluate_loss = DINOEvaluateLoss(self.loss)
self.momentum_teacher = momentum_teacher
self.teacher_temp_epochs = teacher_temp_epochs
self.weight_decay = weight_decay
self.weight_decay_end = weight_decay_end
self.warmup_epochs = warmup_epochs
self.lr_schedule = None
self.wd_schedule = None
self.momentum_schedule = None
@property
def student_for_training(self) -> MultiCropWrapper:
return self.ddp_student or self.student
@property
def teacher_for_training(self) -> MultiCropWrapper:
return self.ddp_teacher or self.teacher
def forward(
self,
batch_idx: int,
batch: tensor_dict_type,
state: Optional[TrainerState] = None,
**kwargs: Any,
) -> tensor_dict_type:
net = self.student.backbone(batch_idx, batch, state, **kwargs)[LATENT_KEY]
net = l2_normalize(net)
return {LATENT_KEY: net}
def onnx_forward(self, batch: tensor_dict_type) -> Any:
inp = batch[INPUT_KEY]
net = self.get_latent(inp, determinate=True)
return net.view(inp.shape[0], self.student.backbone.latent_dim)
def get_latent(self, net: Tensor, **kwargs: Any) -> Tensor:
return self.forward(0, {INPUT_KEY: net}, **kwargs)[LATENT_KEY]
def get_logits(self, net: Tensor) -> Tensor:
return self.student(0, {INPUT_KEY: net})
def state_dict(
self,
destination: Any = None,
prefix: str = "",
keep_vars: bool = False,
) -> Any:
states = super().state_dict(destination, prefix, keep_vars)
for k in list(states.keys()):
if k.startswith("ddp"):
states.pop(k)
return states
def summary_forward(self, batch_idx: int, batch: tensor_dict_type) -> None:
self.student(batch_idx, to_device(batch, self.device))
def _get_outputs(
self,
batch_idx: int,
batch: tensor_dict_type,
trainer: Any,
forward_kwargs: Dict[str, Any],
) -> tensor_dict_type:
teacher_output = self.teacher_for_training(
batch_idx,
batch,
trainer.state,
img_end_idx=2,
**forward_kwargs,
)
student_output = self.student_for_training(
batch_idx,
batch,
trainer.state,
**forward_kwargs,
)
return {"student": student_output, "teacher": teacher_output}
def _get_loss(
self,
batch_idx: int,
batch: tensor_dict_type,
trainer: Any,
forward_kwargs: Dict[str, Any],
) -> Tuple[tensor_dict_type, Tensor]:
with torch.cuda.amp.autocast(enabled=trainer.use_amp):
outputs = self._get_outputs(batch_idx, batch, trainer, forward_kwargs)
epoch = trainer.state.epoch
num_crops = len(batch[INPUT_KEY])
student_output = outputs["student"]
teacher_output = outputs["teacher"]
loss = self.loss(epoch, num_crops, student_output, teacher_output)
return outputs, loss
def train_step(
self,
batch_idx: int,
batch: tensor_dict_type,
trainer: Any,
forward_kwargs: Dict[str, Any],
loss_kwargs: Dict[str, Any],
) -> StepOutputs:
state = trainer.state
if self.lr_schedule is None:
self.lr_schedule = cosine_scheduler(
self.lr * (len(batch[INPUT_KEY][0]) * get_world_size()) / 256.0, # type: ignore
self.min_lr,
self.teacher_temp_epochs,
state.num_step_per_epoch,
warmup_epochs=self.warmup_epochs,
)
if self.wd_schedule is None:
self.wd_schedule = cosine_scheduler(
self.weight_decay,
self.weight_decay_end,
self.teacher_temp_epochs,
state.num_step_per_epoch,
)
# manual scheduling
optimizer = trainer.optimizers["all"]
for i, param_group in enumerate(optimizer.param_groups):
param_group["lr"] = self.lr_schedule[state.step]
if i == 0:
param_group["weight_decay"] = self.wd_schedule[state.step]
# forward pass
rs, loss = self._get_loss(batch_idx, batch, trainer, forward_kwargs)
# backward pass
optimizer.zero_grad()
trainer.grad_scaler.scale(loss).backward()
# clip norm
if trainer.clip_norm > 0.0:
trainer.grad_scaler.unscale_(optimizer)
nn.utils.clip_grad_norm_(
self.student_for_training.parameters(),
max_norm=trainer.clip_norm,
)
# freeze last layer
if state.epoch <= self.freeze_last_layer:
for n, p in self.student.named_parameters():
if "last_layer" in n:
p.grad = None
# update parameters
trainer.grad_scaler.step(optimizer)
trainer.grad_scaler.update()
# update momentum teacher
if self.momentum_schedule is None:
self.momentum_schedule = cosine_scheduler(
self.momentum_teacher,
1.0,
self.teacher_temp_epochs,
state.num_step_per_epoch,
)
with torch.no_grad():
m = self.momentum_schedule[state.step]
for param_q, param_k in zip(
self.student.parameters(),
self.teacher.parameters(),
):
param_k.data.mul_(m).add_((1.0 - m) * param_q.detach().data)
# return
return StepOutputs(rs, {LOSS_KEY: loss.item()})
def evaluate_step( # type: ignore
self,
loader: CVLoader,
portion: float,
trainer: Any,
) -> MetricsOutputs:
losses = []
for i, batch in enumerate(loader):
if i / len(loader) >= portion:
break
batch = to_device(batch, self.device)
outputs = self._get_outputs(i, batch, trainer, {})
losses.append(
self.evaluate_loss(
trainer.state.epoch,
outputs["student"],
outputs["teacher"],
)
)
# gather
mean_loss = sum(losses) / len(losses)
return MetricsOutputs(
-mean_loss,
{
"loss": mean_loss,
"lr": self.lr_schedule[trainer.state.step], # type: ignore
"wd": self.wd_schedule[trainer.state.step], # type: ignore
},
)
@staticmethod
def params_groups(m: nn.Module) -> Any:
regularized = []
bias_and_norm = []
for name, param in m.named_parameters():
if not param.requires_grad:
continue
if name.endswith(".bias") or len(param.shape) == 1:
bias_and_norm.append(param)
else:
regularized.append(param)
return [{"params": regularized}, {"params": bias_and_norm, "weight_decay": 0.0}]
def _init_with_trainer(self, trainer: Any) -> None:
self.teacher_for_training.requires_grad_(False)
def init_ddp(self, trainer: Any) -> None:
if has_batch_norms(self.student):
self.student = nn.SyncBatchNorm.convert_sync_batchnorm(self.student)
self.teacher = nn.SyncBatchNorm.convert_sync_batchnorm(self.teacher)
self.ddp_student = DDP(self.student, device_ids=[trainer.rank])
self.ddp_teacher = DDP(self.teacher, device_ids=[trainer.rank])
self.ddp_teacher.requires_grad_(False) # type: ignore
def permute_trainer_config(self, trainer_config: Dict[str, Any]) -> None:
# TODO : make `permute_trainer_config` more general
if trainer_config["clip_norm"] == 0.0:
trainer_config["clip_norm"] = 3.0
if trainer_config["lr"] is None:
trainer_config["lr"] = 0.0005
self.lr = trainer_config["lr"]
self.min_lr = trainer_config.pop("min_lr", 1.0e-6)
if trainer_config["optimizer_name"] is None:
trainer_config["optimizer_name"] = "adamw"
trainer_config["scheduler_name"] = "none"
__all__ = [
"DINO",
]
| [
"torch.nn.GELU",
"torch.nn.init.constant_",
"torch.nn.Sequential",
"numpy.array",
"torch.nn.BatchNorm1d",
"torch.sum",
"torch.nn.init.trunc_normal_",
"torch.nn.functional.softmax",
"numpy.arange",
"numpy.linspace",
"torch.cuda.amp.autocast",
"numpy.concatenate",
"torch.nn.Identity",
"torch... | [((1525, 1537), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1533, 1537), True, 'import numpy as np\n'), ((1714, 1767), 'numpy.arange', 'np.arange', (['(epochs * num_step_per_epoch - warmup_iters)'], {}), '(epochs * num_step_per_epoch - warmup_iters)\n', (1723, 1767), True, 'import numpy as np\n'), ((1904, 1947), 'numpy.concatenate', 'np.concatenate', (['(warmup_schedule, schedule)'], {}), '((warmup_schedule, schedule))\n', (1918, 1947), True, 'import numpy as np\n'), ((7235, 7250), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7248, 7250), False, 'import torch\n'), ((1644, 1701), 'numpy.linspace', 'np.linspace', (['start_warmup_value', 'base_value', 'warmup_iters'], {}), '(start_warmup_value, base_value, warmup_iters)\n', (1655, 1701), True, 'import numpy as np\n'), ((4525, 4571), 'torch.nn.Linear', 'nn.Linear', (['bottleneck_dim', 'out_dim'], {'bias': '(False)'}), '(bottleneck_dim, out_dim, bias=False)\n', (4534, 4571), True, 'import torch.nn as nn\n'), ((4598, 4624), 'torch.nn.utils.weight_norm', 'nn.utils.weight_norm', (['last'], {}), '(last)\n', (4618, 4624), True, 'import torch.nn as nn\n'), ((5104, 5145), 'torch.nn.functional.normalize', 'nn.functional.normalize', (['net'], {'dim': '(-1)', 'p': '(2)'}), '(net, dim=-1, p=2)\n', (5127, 5145), True, 'import torch.nn as nn\n'), ((6608, 6664), 'torch.nn.functional.softmax', 'F.softmax', (['((teacher_output - self.center) / temp)'], {'dim': '(-1)'}), '((teacher_output - self.center) / temp, dim=-1)\n', (6617, 6664), True, 'import torch.nn.functional as F\n'), ((7335, 7381), 'torch.sum', 'torch.sum', (['teacher_output'], {'dim': '(0)', 'keepdim': '(True)'}), '(teacher_output, dim=0, keepdim=True)\n', (7344, 7381), False, 'import torch\n'), ((7393, 7414), 'torch.distributed.is_initialized', 'dist.is_initialized', ([], {}), '()\n', (7412, 7414), True, 'import torch.distributed as dist\n'), ((8080, 8114), 'torch.nn.functional.softmax', 'F.softmax', (['(centered / temp)'], {'dim': '(-1)'}), '(centered / temp, dim=-1)\n', (8089, 8114), True, 'import torch.nn.functional as F\n'), ((17892, 17936), 'torch.nn.parallel.DistributedDataParallel', 'DDP', (['self.student'], {'device_ids': '[trainer.rank]'}), '(self.student, device_ids=[trainer.rank])\n', (17895, 17936), True, 'from torch.nn.parallel import DistributedDataParallel as DDP\n'), ((17964, 18008), 'torch.nn.parallel.DistributedDataParallel', 'DDP', (['self.teacher'], {'device_ids': '[trainer.rank]'}), '(self.teacher, device_ids=[trainer.rank])\n', (17967, 18008), True, 'from torch.nn.parallel import DistributedDataParallel as DDP\n'), ((2198, 2211), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (2209, 2211), True, 'import torch.nn as nn\n'), ((2213, 2226), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (2224, 2226), True, 'import torch.nn as nn\n'), ((3066, 3090), 'cftool.misc.shallow_copy_dict', 'shallow_copy_dict', (['batch'], {}), '(batch)\n', (3083, 3090), False, 'from cftool.misc import shallow_copy_dict\n'), ((3128, 3167), 'torch.cat', 'torch.cat', (['img_crops[start_idx:end_idx]'], {}), '(img_crops[start_idx:end_idx])\n', (3137, 3167), False, 'import torch\n'), ((3458, 3476), 'torch.cat', 'torch.cat', (['outputs'], {}), '(outputs)\n', (3467, 3476), False, 'import torch\n'), ((3890, 3923), 'torch.nn.Linear', 'nn.Linear', (['in_dim', 'bottleneck_dim'], {}), '(in_dim, bottleneck_dim)\n', (3899, 3923), True, 'import torch.nn as nn\n'), ((4448, 4470), 'torch.nn.Sequential', 'nn.Sequential', (['*blocks'], {}), '(*blocks)\n', (4461, 4470), True, 'import torch.nn as nn\n'), ((4860, 4901), 'torch.nn.init.trunc_normal_', 'nn.init.trunc_normal_', (['m.weight'], {'std': '(0.02)'}), '(m.weight, std=0.02)\n', (4881, 4901), True, 'import torch.nn as nn\n'), ((5694, 5717), 'torch.zeros', 'torch.zeros', (['(1)', 'out_dim'], {}), '(1, out_dim)\n', (5705, 5717), False, 'import torch\n'), ((7428, 7457), 'torch.distributed.all_reduce', 'dist.all_reduce', (['batch_center'], {}), '(batch_center)\n', (7443, 7457), True, 'import torch.distributed as dist\n'), ((9366, 9389), 'cftool.misc.shallow_copy_dict', 'shallow_copy_dict', (['base'], {}), '(base)\n', (9383, 9389), False, 'from cftool.misc import shallow_copy_dict\n'), ((9449, 9472), 'cftool.misc.shallow_copy_dict', 'shallow_copy_dict', (['base'], {}), '(base)\n', (9466, 9472), False, 'from cftool.misc import shallow_copy_dict\n'), ((13037, 13085), 'torch.cuda.amp.autocast', 'torch.cuda.amp.autocast', ([], {'enabled': 'trainer.use_amp'}), '(enabled=trainer.use_amp)\n', (13060, 13085), False, 'import torch\n'), ((15703, 15718), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (15716, 15718), False, 'import torch\n'), ((17730, 17783), 'torch.nn.SyncBatchNorm.convert_sync_batchnorm', 'nn.SyncBatchNorm.convert_sync_batchnorm', (['self.student'], {}), '(self.student)\n', (17769, 17783), True, 'import torch.nn as nn\n'), ((17811, 17864), 'torch.nn.SyncBatchNorm.convert_sync_batchnorm', 'nn.SyncBatchNorm.convert_sync_batchnorm', (['self.teacher'], {}), '(self.teacher)\n', (17850, 17864), True, 'import torch.nn as nn\n'), ((3960, 3989), 'torch.nn.Linear', 'nn.Linear', (['in_dim', 'latent_dim'], {}), '(in_dim, latent_dim)\n', (3969, 3989), True, 'import torch.nn as nn\n'), ((4102, 4111), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (4109, 4111), True, 'import torch.nn as nn\n'), ((4382, 4419), 'torch.nn.Linear', 'nn.Linear', (['latent_dim', 'bottleneck_dim'], {}), '(latent_dim, bottleneck_dim)\n', (4391, 4419), True, 'import torch.nn as nn\n'), ((4982, 5010), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (4999, 5010), True, 'import torch.nn as nn\n'), ((2822, 2882), 'torch.tensor', 'torch.tensor', (['[img_crop.shape[-1] for img_crop in img_crops]'], {}), '([img_crop.shape[-1] for img_crop in img_crops])\n', (2834, 2882), False, 'import torch\n'), ((4048, 4074), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['latent_dim'], {}), '(latent_dim)\n', (4062, 4074), True, 'import torch.nn as nn\n'), ((4187, 4220), 'torch.nn.Linear', 'nn.Linear', (['latent_dim', 'latent_dim'], {}), '(latent_dim, latent_dim)\n', (4196, 4220), True, 'import torch.nn as nn\n'), ((4345, 4354), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (4352, 4354), True, 'import torch.nn as nn\n'), ((5921, 5995), 'numpy.linspace', 'np.linspace', (['warmup_teacher_temp', 'teacher_temp', 'warmup_teacher_temp_epochs'], {}), '(warmup_teacher_temp, teacher_temp, warmup_teacher_temp_epochs)\n', (5932, 5995), True, 'import numpy as np\n'), ((4287, 4313), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['latent_dim'], {}), '(latent_dim)\n', (4301, 4313), True, 'import torch.nn as nn\n'), ((6112, 6149), 'numpy.ones', 'np.ones', (['teacher_temp_constant_epochs'], {}), '(teacher_temp_constant_epochs)\n', (6119, 6149), True, 'import numpy as np\n'), ((7005, 7035), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['v_logit'], {'dim': '(-1)'}), '(v_logit, dim=-1)\n', (7018, 7035), True, 'import torch.nn.functional as F\n'), ((8152, 8183), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['s_logits'], {'dim': '(-1)'}), '(s_logits, dim=-1)\n', (8165, 8183), True, 'import torch.nn.functional as F\n')] |
from __future__ import absolute_import
# external modules
from past.builtins import basestring
import numpy as num
# ANUGA modules
import anuga.utilities.log as log
from anuga.config import netcdf_mode_r, netcdf_mode_w, netcdf_mode_a, \
netcdf_float
from .asc2dem import asc2dem
def dem2array(filename, variable_name='elevation',
easting_min=None, easting_max=None,
northing_min=None, northing_max=None,
use_cache=False, verbose=False,):
"""Read Digitial Elevation model from the following NetCDF format (.dem)
Example:
ncols 3121
nrows 1800
xllcorner 722000
yllcorner 5893000
cellsize 25
NODATA_value -9999
138.3698 137.4194 136.5062 135.5558 ..........
name_in should be .dem file to be read.
"""
import os
from anuga.file.netcdf import NetCDFFile
msg = 'Filename must be a text string'
assert isinstance(filename, basestring), msg
msg = 'Extension should be .dem'
assert os.path.splitext(filename)[1] in ['.dem'], msg
msg = 'Variable name must be a text string'
assert isinstance(variable_name, basestring), msg
# Get NetCDF
infile = NetCDFFile(filename, netcdf_mode_r)
if verbose: log.critical('Reading DEM from %s' % (filename))
ncols = int(infile.ncols)
nrows = int(infile.nrows)
xllcorner = float(infile.xllcorner) # Easting of lower left corner
yllcorner = float(infile.yllcorner) # Northing of lower left corner
cellsize = float(infile.cellsize)
NODATA_value = float(infile.NODATA_value)
zone = int(infile.zone)
false_easting = float(infile.false_easting)
false_northing = float(infile.false_northing)
# Text strings
projection = infile.projection
datum = infile.datum
units = infile.units
Z = infile.variables[variable_name][:]
Z = Z.reshape(nrows,ncols)
Z = num.where(Z == NODATA_value , num.nan, Z)
#changed the orientation of Z array to make it consistent with grd2array result
Z = num.fliplr(Z.T)
#print ncols, nrows, xllcorner,yllcorner, cellsize, NODATA_value, zone
x = num.linspace(xllcorner, xllcorner+(ncols-1)*cellsize, ncols)
y = num.linspace(yllcorner, yllcorner+(nrows-1)*cellsize, nrows)
return x,y, Z
| [
"numpy.where",
"numpy.fliplr",
"anuga.file.netcdf.NetCDFFile",
"os.path.splitext",
"numpy.linspace",
"anuga.utilities.log.critical"
] | [((1287, 1322), 'anuga.file.netcdf.NetCDFFile', 'NetCDFFile', (['filename', 'netcdf_mode_r'], {}), '(filename, netcdf_mode_r)\n', (1297, 1322), False, 'from anuga.file.netcdf import NetCDFFile\n'), ((2004, 2044), 'numpy.where', 'num.where', (['(Z == NODATA_value)', 'num.nan', 'Z'], {}), '(Z == NODATA_value, num.nan, Z)\n', (2013, 2044), True, 'import numpy as num\n'), ((2138, 2153), 'numpy.fliplr', 'num.fliplr', (['Z.T'], {}), '(Z.T)\n', (2148, 2153), True, 'import numpy as num\n'), ((2239, 2305), 'numpy.linspace', 'num.linspace', (['xllcorner', '(xllcorner + (ncols - 1) * cellsize)', 'ncols'], {}), '(xllcorner, xllcorner + (ncols - 1) * cellsize, ncols)\n', (2251, 2305), True, 'import numpy as num\n'), ((2308, 2374), 'numpy.linspace', 'num.linspace', (['yllcorner', '(yllcorner + (nrows - 1) * cellsize)', 'nrows'], {}), '(yllcorner, yllcorner + (nrows - 1) * cellsize, nrows)\n', (2320, 2374), True, 'import numpy as num\n'), ((1341, 1387), 'anuga.utilities.log.critical', 'log.critical', (["('Reading DEM from %s' % filename)"], {}), "('Reading DEM from %s' % filename)\n", (1353, 1387), True, 'import anuga.utilities.log as log\n'), ((1095, 1121), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (1111, 1121), False, 'import os\n')] |
import os
from utils import *
wbt = wbt_setup()
__all__ = ['steep_areas',
'geomorphological_fluvial_flood_hazard_areas',
]
def geomorphological_fluvial_flood_hazard_areas(dem, output_prefix, buffer_distance, facc_threshold = 1000, remove_temp_outputs = True):
"""
Calculates Flood hazard areas from raster and outputs these areas as polygons
Inputs:
dem: str <-- path to raster(.tif) file
output_prefix: str <-- site specific name appended to each output file name
facc_threshold: int or float <-- flow accumulation threshold defaulted at 1000
buffer_distance: int or float <-- distance used to buffer flow accumulation raster
Outputs:
out_facc_setnull_buffer_polygon: str <-- output polygon(.shp) that represents flood hazard areas
Returns:
None
"""
out_fill = f"{output_prefix}_fill.tif"
out_fdir = f"{output_prefix}_fdir.tif"
out_facc = f"{output_prefix}_facc.tif"
out_facc_setnull = f"{output_prefix}_facc_setnull_{facc_threshold}.tif"
out_facc_setnull_buffer = f"{output_prefix}_facc_setnull_buffer_{buffer_distance}.tif"
out_facc_setnull_buffer_polygon = f"{output_prefix}_flood_hazard_areas.shp"
wbt.fill_depressions_planchon_and_darboux(dem, out_fill) # fills depressions of input raster
wbt.d8_pointer(out_fill, out_fdir, esri_pntr=True) # calcualtes flow direction from filled raster
wbt.d8_flow_accumulation(out_fdir, out_facc, pntr=True, esri_pntr=True) # calculates flow accumulation from flow direction raster
wbt.conditional_evaluation(i=out_facc, output=out_facc_setnull, statement=f"value >= {facc_threshold}", true=1, false='null') # provides evaluation on raster based on certain condtional statements
wbt.buffer_raster(out_facc_setnull, out_facc_setnull_buffer, size=buffer_distance) # buffers raster by specific distance
wbt.raster_to_vector_polygons(out_facc_setnull_buffer, out_facc_setnull_buffer_polygon) # converts temporary buffered raster to polygons
if remove_temp_outputs:
os.remove(os.path.join(wbt.work_dir,out_fill))
os.remove(os.path.join(wbt.work_dir,out_fdir))
os.remove(os.path.join(wbt.work_dir,out_facc))
os.remove(os.path.join(wbt.work_dir,out_facc_setnull))
os.remove(os.path.join(wbt.work_dir,out_facc_setnull_buffer))
def steep_areas(dem, threshold, output_prefix, delete_temp_outputs=True):
"""Create mask of areas with slope equal or higher than a threshold, and export as raster and vector files.
Inputs:
dem : str <-- path to raster(.tif) file
threshold : int or float <-- threshold of slope that defines mask
output_prefix: str <-- site specific name appended to each output file name
delete_temp_outputs [optional] : boolean <-- if True will delete temporary output files
Outputs:
polygon_output: str <-- Outputted vector polygon(.shp) file name
Returns:
None
"""
slope_output = f"{output_prefix}_slope.tif" # defines
raster_output = f"{output_prefix}_slope_setnull.tif" # output file
polygon_output = f"{output_prefix}_slope_setnull.shp" # names
wbt.slope(dem, slope_output) # calculates slope gradient of input raster
wbt.conditional_evaluation(i=slope_output,
output=raster_output,
statement=f'value > {threshold}', # creates new raster based on slope output under certain conditions
true=1,
false='null',
)
# temp_polygon_output = 'temp_polygon_output.shp'
wbt.raster_to_vector_polygons(i=raster_output, output=polygon_output) # converts raster to vector polygons
# wbt.dissolve(temp_polygon_output, polygon_output)
if delete_temp_outputs:
# os.remove(os.path.join(working_dir,temp_polygon_output))
os.remove(os.path.join(wbt.work_dir,slope_output)) # removes temporary slope output raster file
| [
"os.path.join"
] | [((2138, 2174), 'os.path.join', 'os.path.join', (['wbt.work_dir', 'out_fill'], {}), '(wbt.work_dir, out_fill)\n', (2150, 2174), False, 'import os\n'), ((2194, 2230), 'os.path.join', 'os.path.join', (['wbt.work_dir', 'out_fdir'], {}), '(wbt.work_dir, out_fdir)\n', (2206, 2230), False, 'import os\n'), ((2250, 2286), 'os.path.join', 'os.path.join', (['wbt.work_dir', 'out_facc'], {}), '(wbt.work_dir, out_facc)\n', (2262, 2286), False, 'import os\n'), ((2306, 2350), 'os.path.join', 'os.path.join', (['wbt.work_dir', 'out_facc_setnull'], {}), '(wbt.work_dir, out_facc_setnull)\n', (2318, 2350), False, 'import os\n'), ((2370, 2421), 'os.path.join', 'os.path.join', (['wbt.work_dir', 'out_facc_setnull_buffer'], {}), '(wbt.work_dir, out_facc_setnull_buffer)\n', (2382, 2421), False, 'import os\n'), ((4112, 4152), 'os.path.join', 'os.path.join', (['wbt.work_dir', 'slope_output'], {}), '(wbt.work_dir, slope_output)\n', (4124, 4152), False, 'import os\n')] |
# Generated by Django 2.2.13 on 2021-07-05 06:50
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20210705_0041'),
]
operations = [
migrations.AlterField(
model_name='restaurantreservations',
name='check_in',
field=models.DateTimeField(default=datetime.datetime(2021, 7, 5, 9, 49, 59, 711519)),
),
migrations.AlterField(
model_name='restaurantreservations',
name='check_out',
field=models.DateTimeField(default=datetime.datetime(2021, 7, 5, 9, 49, 59, 711519)),
),
migrations.CreateModel(
name='Timeslots',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timeslot_id_1', models.IntegerField(default=1)),
('timeslot_is_reserved_1', models.BooleanField(default=True)),
('timeslot_value_12:00', models.CharField(default='12:00', max_length=10)),
('timeslot_id_2', models.IntegerField(default=2)),
('timeslot_is_reserved_2', models.BooleanField(default=True)),
('timeslot_value_12:15', models.CharField(default='12:15', max_length=10)),
('timeslot_id_3', models.IntegerField(default=3)),
('timeslot_is_reserved_3', models.BooleanField(default=True)),
('timeslot_value_12:30', models.CharField(default='12:30', max_length=10)),
('timeslot_id_4', models.IntegerField(default=4)),
('timeslot_is_reserved_4', models.BooleanField(default=True)),
('timeslot_value_12:45', models.CharField(default='12:45', max_length=10)),
('timeslot_id_5', models.IntegerField(default=5)),
('timeslot_is_reserved_5', models.BooleanField(default=True)),
('timeslot_value_13:00', models.CharField(default='13:00', max_length=10)),
('timeslot_id_6', models.IntegerField(default=6)),
('timeslot_is_reserved_6', models.BooleanField(default=True)),
('timeslot_value_13:15', models.CharField(default='13:15', max_length=10)),
('timeslot_id_7', models.IntegerField(default=7)),
('timeslot_is_reserved_7', models.BooleanField(default=True)),
('timeslot_value_13:30', models.CharField(default='13:30', max_length=10)),
('timeslot_id_8', models.IntegerField(default=8)),
('timeslot_is_reserved_8', models.BooleanField(default=True)),
('timeslot_value_13:45', models.CharField(default='13:45', max_length=10)),
('timeslot_id_9', models.IntegerField(default=9)),
('timeslot_is_reserved_9', models.BooleanField(default=True)),
('timeslot_value_14:00', models.CharField(default='14:00', max_length=10)),
('timeslot_id_10', models.IntegerField(default=10)),
('timeslot_is_reserved_10', models.BooleanField(default=True)),
('timeslot_value_14:15', models.CharField(default='14:15', max_length=10)),
('timeslot_id_11', models.IntegerField(default=11)),
('timeslot_is_reserved_11', models.BooleanField(default=True)),
('timeslot_value_14:30', models.CharField(default='14:30', max_length=10)),
('timeslot_id_12', models.IntegerField(default=12)),
('timeslot_is_reserved_12', models.BooleanField(default=True)),
('timeslot_value_14:45', models.CharField(default='14:45', max_length=10)),
('timeslot_id_13', models.IntegerField(default=13)),
('timeslot_is_reserved_13', models.BooleanField(default=True)),
('timeslot_value_15:00', models.CharField(default='15:00', max_length=10)),
('timeslot_id_14', models.IntegerField(default=14)),
('timeslot_is_reserved_14', models.BooleanField(default=True)),
('timeslot_value_15:15', models.CharField(default='15:15', max_length=10)),
('timeslot_id_15', models.IntegerField(default=15)),
('timeslot_is_reserved_15', models.BooleanField(default=True)),
('timeslot_value_15:30', models.CharField(default='15:30', max_length=10)),
('timeslot_id_16', models.IntegerField(default=16)),
('timeslot_is_reserved_16', models.BooleanField(default=True)),
('timeslot_value_15:45', models.CharField(default='15:45', max_length=10)),
('timeslot_id_17', models.IntegerField(default=17)),
('timeslot_is_reserved_17', models.BooleanField(default=True)),
('timeslot_value_16:00', models.CharField(default='16:00', max_length=10)),
('timeslot_id_18', models.IntegerField(default=18)),
('timeslot_is_reserved_18', models.BooleanField(default=True)),
('timeslot_value_16:15', models.CharField(default='16:15', max_length=10)),
('timeslot_id_19', models.IntegerField(default=19)),
('timeslot_is_reserved_19', models.BooleanField(default=True)),
('timeslot_value_16:30', models.CharField(default='16:30', max_length=10)),
('timeslot_id_20', models.IntegerField(default=20)),
('timeslot_is_reserved_20', models.BooleanField(default=True)),
('timeslot_value_16:45', models.CharField(default='16:45', max_length=10)),
('timeslot_id_21', models.IntegerField(default=21)),
('timeslot_is_reserved_21', models.BooleanField(default=True)),
('timeslot_value_17:00', models.CharField(default='17:00', max_length=10)),
('timeslot_id_22', models.IntegerField(default=22)),
('timeslot_is_reserved_22', models.BooleanField(default=True)),
('timeslot_value_17:15', models.CharField(default='17:15', max_length=10)),
('timeslot_id_23', models.IntegerField(default=23)),
('timeslot_is_reserved_23', models.BooleanField(default=True)),
('timeslot_value_17:30', models.CharField(default='17:30', max_length=10)),
('timeslot_id_24', models.IntegerField(default=24)),
('timeslot_is_reserved_24', models.BooleanField(default=True)),
('timeslot_value_17:45', models.CharField(default='17:45', max_length=10)),
('timeslot_id_25', models.IntegerField(default=25)),
('timeslot_is_reserved_25', models.BooleanField(default=True)),
('timeslot_value_18:00', models.CharField(default='18:00', max_length=10)),
('timeslot_id_26', models.IntegerField(default=26)),
('timeslot_is_reserved_26', models.BooleanField(default=True)),
('timeslot_value_18:15', models.CharField(default='18:15', max_length=10)),
('timeslot_id_27', models.IntegerField(default=27)),
('timeslot_is_reserved_27', models.BooleanField(default=True)),
('timeslot_value_18:30', models.CharField(default='18:30', max_length=10)),
('timeslot_id_28', models.IntegerField(default=28)),
('timeslot_is_reserved_28', models.BooleanField(default=True)),
('timeslot_value_18:45', models.CharField(default='18:45', max_length=10)),
('timeslot_id_29', models.IntegerField(default=29)),
('timeslot_is_reserved_29', models.BooleanField(default=True)),
('timeslot_value_19:00', models.CharField(default='19:00', max_length=10)),
('timeslot_id_30', models.IntegerField(default=30)),
('timeslot_is_reserved_30', models.BooleanField(default=True)),
('timeslot_value_19:15', models.CharField(default='19:15', max_length=10)),
('timeslot_id_31', models.IntegerField(default=31)),
('timeslot_is_reserved_31', models.BooleanField(default=True)),
('timeslot_value_19:30', models.CharField(default='19:30', max_length=10)),
('timeslot_id_32', models.IntegerField(default=32)),
('timeslot_is_reserved_32', models.BooleanField(default=True)),
('timeslot_value_19:45', models.CharField(default='19:45', max_length=10)),
('timeslot_id_33', models.IntegerField(default=33)),
('timeslot_is_reserved_33', models.BooleanField(default=True)),
('timeslot_value_20:00', models.CharField(default='20:00', max_length=10)),
('timeslot_id_34', models.IntegerField(default=34)),
('timeslot_is_reserved_34', models.BooleanField(default=True)),
('timeslot_value_20:15', models.CharField(default='20:15', max_length=10)),
('timeslot_id_35', models.IntegerField(default=35)),
('timeslot_is_reserved_35', models.BooleanField(default=True)),
('timeslot_value_20:30', models.CharField(default='20:30', max_length=10)),
('timeslot_id_36', models.IntegerField(default=36)),
('timeslot_is_reserved_36', models.BooleanField(default=True)),
('timeslot_value_20:45', models.CharField(default='20:45', max_length=10)),
('timeslot_id_37', models.IntegerField(default=37)),
('timeslot_is_reserved_37', models.BooleanField(default=True)),
('timeslot_value_21:00', models.CharField(default='21:00', max_length=10)),
('timeslot_id_38', models.IntegerField(default=38)),
('timeslot_is_reserved_38', models.BooleanField(default=True)),
('timeslot_value_21:15', models.CharField(default='21:15', max_length=10)),
('timeslot_id_39', models.IntegerField(default=39)),
('timeslot_is_reserved_39', models.BooleanField(default=True)),
('timeslot_value_21:30', models.CharField(default='21:30', max_length=10)),
('timeslot_id_40', models.IntegerField(default=40)),
('timeslot_is_reserved_40', models.BooleanField(default=True)),
('timeslot_value_21:45', models.CharField(default='21:45', max_length=10)),
('timeslot_id_41', models.IntegerField(default=41)),
('timeslot_is_reserved_41', models.BooleanField(default=True)),
('timeslot_value_22:00', models.CharField(default='22:00', max_length=10)),
('timeslot_id_42', models.IntegerField(default=42)),
('timeslot_is_reserved_42', models.BooleanField(default=True)),
('timeslot_value_22:15', models.CharField(default='22:15', max_length=10)),
('timeslot_id_43', models.IntegerField(default=43)),
('timeslot_is_reserved_43', models.BooleanField(default=True)),
('timeslot_value_22:30', models.CharField(default='22:30', max_length=10)),
('timeslot_id_44', models.IntegerField(default=44)),
('timeslot_is_reserved_44', models.BooleanField(default=True)),
('timeslot_value_22:45', models.CharField(default='22:45', max_length=10)),
('timeslot_id_45', models.IntegerField(default=45)),
('timeslot_is_reserved_45', models.BooleanField(default=True)),
('timeslot_value_23:00', models.CharField(default='23:00', max_length=10)),
('timeslot_id_46', models.IntegerField(default=46)),
('timeslot_is_reserved_46', models.BooleanField(default=True)),
('timeslot_value_23:15', models.CharField(default='23:15', max_length=10)),
('timeslot_id_47', models.IntegerField(default=47)),
('timeslot_is_reserved_47', models.BooleanField(default=True)),
('timeslot_value_23:30', models.CharField(default='23:30', max_length=10)),
('timeslot_id_48', models.IntegerField(default=48)),
('timeslot_is_reserved_48', models.BooleanField(default=True)),
('timeslot_value_23:45', models.CharField(default='23:45', max_length=10)),
('table', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.RestaurantTables')),
],
),
]
| [
"datetime.datetime",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.CharField"
] | [((430, 478), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(7)', '(5)', '(9)', '(49)', '(59)', '(711519)'], {}), '(2021, 7, 5, 9, 49, 59, 711519)\n', (447, 478), False, 'import datetime\n'), ((649, 697), 'datetime.datetime', 'datetime.datetime', (['(2021)', '(7)', '(5)', '(9)', '(49)', '(59)', '(711519)'], {}), '(2021, 7, 5, 9, 49, 59, 711519)\n', (666, 697), False, 'import datetime\n'), ((817, 910), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (833, 910), False, 'from django.db import migrations, models\n'), ((943, 973), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (962, 973), False, 'from django.db import migrations, models\n'), ((1019, 1052), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1038, 1052), False, 'from django.db import migrations, models\n'), ((1096, 1144), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""12:00"""', 'max_length': '(10)'}), "(default='12:00', max_length=10)\n", (1112, 1144), False, 'from django.db import migrations, models\n'), ((1181, 1211), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(2)'}), '(default=2)\n', (1200, 1211), False, 'from django.db import migrations, models\n'), ((1257, 1290), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1276, 1290), False, 'from django.db import migrations, models\n'), ((1334, 1382), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""12:15"""', 'max_length': '(10)'}), "(default='12:15', max_length=10)\n", (1350, 1382), False, 'from django.db import migrations, models\n'), ((1419, 1449), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(3)'}), '(default=3)\n', (1438, 1449), False, 'from django.db import migrations, models\n'), ((1495, 1528), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1514, 1528), False, 'from django.db import migrations, models\n'), ((1572, 1620), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""12:30"""', 'max_length': '(10)'}), "(default='12:30', max_length=10)\n", (1588, 1620), False, 'from django.db import migrations, models\n'), ((1657, 1687), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(4)'}), '(default=4)\n', (1676, 1687), False, 'from django.db import migrations, models\n'), ((1733, 1766), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1752, 1766), False, 'from django.db import migrations, models\n'), ((1810, 1858), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""12:45"""', 'max_length': '(10)'}), "(default='12:45', max_length=10)\n", (1826, 1858), False, 'from django.db import migrations, models\n'), ((1895, 1925), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(5)'}), '(default=5)\n', (1914, 1925), False, 'from django.db import migrations, models\n'), ((1971, 2004), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (1990, 2004), False, 'from django.db import migrations, models\n'), ((2048, 2096), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""13:00"""', 'max_length': '(10)'}), "(default='13:00', max_length=10)\n", (2064, 2096), False, 'from django.db import migrations, models\n'), ((2133, 2163), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(6)'}), '(default=6)\n', (2152, 2163), False, 'from django.db import migrations, models\n'), ((2209, 2242), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2228, 2242), False, 'from django.db import migrations, models\n'), ((2286, 2334), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""13:15"""', 'max_length': '(10)'}), "(default='13:15', max_length=10)\n", (2302, 2334), False, 'from django.db import migrations, models\n'), ((2371, 2401), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(7)'}), '(default=7)\n', (2390, 2401), False, 'from django.db import migrations, models\n'), ((2447, 2480), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2466, 2480), False, 'from django.db import migrations, models\n'), ((2524, 2572), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""13:30"""', 'max_length': '(10)'}), "(default='13:30', max_length=10)\n", (2540, 2572), False, 'from django.db import migrations, models\n'), ((2609, 2639), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(8)'}), '(default=8)\n', (2628, 2639), False, 'from django.db import migrations, models\n'), ((2685, 2718), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2704, 2718), False, 'from django.db import migrations, models\n'), ((2762, 2810), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""13:45"""', 'max_length': '(10)'}), "(default='13:45', max_length=10)\n", (2778, 2810), False, 'from django.db import migrations, models\n'), ((2847, 2877), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(9)'}), '(default=9)\n', (2866, 2877), False, 'from django.db import migrations, models\n'), ((2923, 2956), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2942, 2956), False, 'from django.db import migrations, models\n'), ((3000, 3048), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""14:00"""', 'max_length': '(10)'}), "(default='14:00', max_length=10)\n", (3016, 3048), False, 'from django.db import migrations, models\n'), ((3086, 3117), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(10)'}), '(default=10)\n', (3105, 3117), False, 'from django.db import migrations, models\n'), ((3164, 3197), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3183, 3197), False, 'from django.db import migrations, models\n'), ((3241, 3289), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""14:15"""', 'max_length': '(10)'}), "(default='14:15', max_length=10)\n", (3257, 3289), False, 'from django.db import migrations, models\n'), ((3327, 3358), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(11)'}), '(default=11)\n', (3346, 3358), False, 'from django.db import migrations, models\n'), ((3405, 3438), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3424, 3438), False, 'from django.db import migrations, models\n'), ((3482, 3530), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""14:30"""', 'max_length': '(10)'}), "(default='14:30', max_length=10)\n", (3498, 3530), False, 'from django.db import migrations, models\n'), ((3568, 3599), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(12)'}), '(default=12)\n', (3587, 3599), False, 'from django.db import migrations, models\n'), ((3646, 3679), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3665, 3679), False, 'from django.db import migrations, models\n'), ((3723, 3771), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""14:45"""', 'max_length': '(10)'}), "(default='14:45', max_length=10)\n", (3739, 3771), False, 'from django.db import migrations, models\n'), ((3809, 3840), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(13)'}), '(default=13)\n', (3828, 3840), False, 'from django.db import migrations, models\n'), ((3887, 3920), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (3906, 3920), False, 'from django.db import migrations, models\n'), ((3964, 4012), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""15:00"""', 'max_length': '(10)'}), "(default='15:00', max_length=10)\n", (3980, 4012), False, 'from django.db import migrations, models\n'), ((4050, 4081), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(14)'}), '(default=14)\n', (4069, 4081), False, 'from django.db import migrations, models\n'), ((4128, 4161), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (4147, 4161), False, 'from django.db import migrations, models\n'), ((4205, 4253), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""15:15"""', 'max_length': '(10)'}), "(default='15:15', max_length=10)\n", (4221, 4253), False, 'from django.db import migrations, models\n'), ((4291, 4322), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(15)'}), '(default=15)\n', (4310, 4322), False, 'from django.db import migrations, models\n'), ((4369, 4402), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (4388, 4402), False, 'from django.db import migrations, models\n'), ((4446, 4494), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""15:30"""', 'max_length': '(10)'}), "(default='15:30', max_length=10)\n", (4462, 4494), False, 'from django.db import migrations, models\n'), ((4532, 4563), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(16)'}), '(default=16)\n', (4551, 4563), False, 'from django.db import migrations, models\n'), ((4610, 4643), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (4629, 4643), False, 'from django.db import migrations, models\n'), ((4687, 4735), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""15:45"""', 'max_length': '(10)'}), "(default='15:45', max_length=10)\n", (4703, 4735), False, 'from django.db import migrations, models\n'), ((4773, 4804), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(17)'}), '(default=17)\n', (4792, 4804), False, 'from django.db import migrations, models\n'), ((4851, 4884), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (4870, 4884), False, 'from django.db import migrations, models\n'), ((4928, 4976), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""16:00"""', 'max_length': '(10)'}), "(default='16:00', max_length=10)\n", (4944, 4976), False, 'from django.db import migrations, models\n'), ((5014, 5045), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(18)'}), '(default=18)\n', (5033, 5045), False, 'from django.db import migrations, models\n'), ((5092, 5125), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (5111, 5125), False, 'from django.db import migrations, models\n'), ((5169, 5217), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""16:15"""', 'max_length': '(10)'}), "(default='16:15', max_length=10)\n", (5185, 5217), False, 'from django.db import migrations, models\n'), ((5255, 5286), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(19)'}), '(default=19)\n', (5274, 5286), False, 'from django.db import migrations, models\n'), ((5333, 5366), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (5352, 5366), False, 'from django.db import migrations, models\n'), ((5410, 5458), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""16:30"""', 'max_length': '(10)'}), "(default='16:30', max_length=10)\n", (5426, 5458), False, 'from django.db import migrations, models\n'), ((5496, 5527), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(20)'}), '(default=20)\n', (5515, 5527), False, 'from django.db import migrations, models\n'), ((5574, 5607), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (5593, 5607), False, 'from django.db import migrations, models\n'), ((5651, 5699), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""16:45"""', 'max_length': '(10)'}), "(default='16:45', max_length=10)\n", (5667, 5699), False, 'from django.db import migrations, models\n'), ((5737, 5768), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(21)'}), '(default=21)\n', (5756, 5768), False, 'from django.db import migrations, models\n'), ((5815, 5848), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (5834, 5848), False, 'from django.db import migrations, models\n'), ((5892, 5940), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""17:00"""', 'max_length': '(10)'}), "(default='17:00', max_length=10)\n", (5908, 5940), False, 'from django.db import migrations, models\n'), ((5978, 6009), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(22)'}), '(default=22)\n', (5997, 6009), False, 'from django.db import migrations, models\n'), ((6056, 6089), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (6075, 6089), False, 'from django.db import migrations, models\n'), ((6133, 6181), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""17:15"""', 'max_length': '(10)'}), "(default='17:15', max_length=10)\n", (6149, 6181), False, 'from django.db import migrations, models\n'), ((6219, 6250), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(23)'}), '(default=23)\n', (6238, 6250), False, 'from django.db import migrations, models\n'), ((6297, 6330), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (6316, 6330), False, 'from django.db import migrations, models\n'), ((6374, 6422), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""17:30"""', 'max_length': '(10)'}), "(default='17:30', max_length=10)\n", (6390, 6422), False, 'from django.db import migrations, models\n'), ((6460, 6491), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(24)'}), '(default=24)\n', (6479, 6491), False, 'from django.db import migrations, models\n'), ((6538, 6571), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (6557, 6571), False, 'from django.db import migrations, models\n'), ((6615, 6663), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""17:45"""', 'max_length': '(10)'}), "(default='17:45', max_length=10)\n", (6631, 6663), False, 'from django.db import migrations, models\n'), ((6701, 6732), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(25)'}), '(default=25)\n', (6720, 6732), False, 'from django.db import migrations, models\n'), ((6779, 6812), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (6798, 6812), False, 'from django.db import migrations, models\n'), ((6856, 6904), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""18:00"""', 'max_length': '(10)'}), "(default='18:00', max_length=10)\n", (6872, 6904), False, 'from django.db import migrations, models\n'), ((6942, 6973), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(26)'}), '(default=26)\n', (6961, 6973), False, 'from django.db import migrations, models\n'), ((7020, 7053), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (7039, 7053), False, 'from django.db import migrations, models\n'), ((7097, 7145), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""18:15"""', 'max_length': '(10)'}), "(default='18:15', max_length=10)\n", (7113, 7145), False, 'from django.db import migrations, models\n'), ((7183, 7214), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(27)'}), '(default=27)\n', (7202, 7214), False, 'from django.db import migrations, models\n'), ((7261, 7294), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (7280, 7294), False, 'from django.db import migrations, models\n'), ((7338, 7386), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""18:30"""', 'max_length': '(10)'}), "(default='18:30', max_length=10)\n", (7354, 7386), False, 'from django.db import migrations, models\n'), ((7424, 7455), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(28)'}), '(default=28)\n', (7443, 7455), False, 'from django.db import migrations, models\n'), ((7502, 7535), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (7521, 7535), False, 'from django.db import migrations, models\n'), ((7579, 7627), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""18:45"""', 'max_length': '(10)'}), "(default='18:45', max_length=10)\n", (7595, 7627), False, 'from django.db import migrations, models\n'), ((7665, 7696), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(29)'}), '(default=29)\n', (7684, 7696), False, 'from django.db import migrations, models\n'), ((7743, 7776), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (7762, 7776), False, 'from django.db import migrations, models\n'), ((7820, 7868), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""19:00"""', 'max_length': '(10)'}), "(default='19:00', max_length=10)\n", (7836, 7868), False, 'from django.db import migrations, models\n'), ((7906, 7937), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(30)'}), '(default=30)\n', (7925, 7937), False, 'from django.db import migrations, models\n'), ((7984, 8017), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (8003, 8017), False, 'from django.db import migrations, models\n'), ((8061, 8109), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""19:15"""', 'max_length': '(10)'}), "(default='19:15', max_length=10)\n", (8077, 8109), False, 'from django.db import migrations, models\n'), ((8147, 8178), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(31)'}), '(default=31)\n', (8166, 8178), False, 'from django.db import migrations, models\n'), ((8225, 8258), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (8244, 8258), False, 'from django.db import migrations, models\n'), ((8302, 8350), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""19:30"""', 'max_length': '(10)'}), "(default='19:30', max_length=10)\n", (8318, 8350), False, 'from django.db import migrations, models\n'), ((8388, 8419), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(32)'}), '(default=32)\n', (8407, 8419), False, 'from django.db import migrations, models\n'), ((8466, 8499), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (8485, 8499), False, 'from django.db import migrations, models\n'), ((8543, 8591), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""19:45"""', 'max_length': '(10)'}), "(default='19:45', max_length=10)\n", (8559, 8591), False, 'from django.db import migrations, models\n'), ((8629, 8660), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(33)'}), '(default=33)\n', (8648, 8660), False, 'from django.db import migrations, models\n'), ((8707, 8740), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (8726, 8740), False, 'from django.db import migrations, models\n'), ((8784, 8832), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""20:00"""', 'max_length': '(10)'}), "(default='20:00', max_length=10)\n", (8800, 8832), False, 'from django.db import migrations, models\n'), ((8870, 8901), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(34)'}), '(default=34)\n', (8889, 8901), False, 'from django.db import migrations, models\n'), ((8948, 8981), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (8967, 8981), False, 'from django.db import migrations, models\n'), ((9025, 9073), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""20:15"""', 'max_length': '(10)'}), "(default='20:15', max_length=10)\n", (9041, 9073), False, 'from django.db import migrations, models\n'), ((9111, 9142), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(35)'}), '(default=35)\n', (9130, 9142), False, 'from django.db import migrations, models\n'), ((9189, 9222), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9208, 9222), False, 'from django.db import migrations, models\n'), ((9266, 9314), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""20:30"""', 'max_length': '(10)'}), "(default='20:30', max_length=10)\n", (9282, 9314), False, 'from django.db import migrations, models\n'), ((9352, 9383), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(36)'}), '(default=36)\n', (9371, 9383), False, 'from django.db import migrations, models\n'), ((9430, 9463), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9449, 9463), False, 'from django.db import migrations, models\n'), ((9507, 9555), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""20:45"""', 'max_length': '(10)'}), "(default='20:45', max_length=10)\n", (9523, 9555), False, 'from django.db import migrations, models\n'), ((9593, 9624), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(37)'}), '(default=37)\n', (9612, 9624), False, 'from django.db import migrations, models\n'), ((9671, 9704), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9690, 9704), False, 'from django.db import migrations, models\n'), ((9748, 9796), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""21:00"""', 'max_length': '(10)'}), "(default='21:00', max_length=10)\n", (9764, 9796), False, 'from django.db import migrations, models\n'), ((9834, 9865), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(38)'}), '(default=38)\n', (9853, 9865), False, 'from django.db import migrations, models\n'), ((9912, 9945), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (9931, 9945), False, 'from django.db import migrations, models\n'), ((9989, 10037), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""21:15"""', 'max_length': '(10)'}), "(default='21:15', max_length=10)\n", (10005, 10037), False, 'from django.db import migrations, models\n'), ((10075, 10106), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(39)'}), '(default=39)\n', (10094, 10106), False, 'from django.db import migrations, models\n'), ((10153, 10186), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (10172, 10186), False, 'from django.db import migrations, models\n'), ((10230, 10278), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""21:30"""', 'max_length': '(10)'}), "(default='21:30', max_length=10)\n", (10246, 10278), False, 'from django.db import migrations, models\n'), ((10316, 10347), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(40)'}), '(default=40)\n', (10335, 10347), False, 'from django.db import migrations, models\n'), ((10394, 10427), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (10413, 10427), False, 'from django.db import migrations, models\n'), ((10471, 10519), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""21:45"""', 'max_length': '(10)'}), "(default='21:45', max_length=10)\n", (10487, 10519), False, 'from django.db import migrations, models\n'), ((10557, 10588), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(41)'}), '(default=41)\n', (10576, 10588), False, 'from django.db import migrations, models\n'), ((10635, 10668), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (10654, 10668), False, 'from django.db import migrations, models\n'), ((10712, 10760), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""22:00"""', 'max_length': '(10)'}), "(default='22:00', max_length=10)\n", (10728, 10760), False, 'from django.db import migrations, models\n'), ((10798, 10829), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(42)'}), '(default=42)\n', (10817, 10829), False, 'from django.db import migrations, models\n'), ((10876, 10909), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (10895, 10909), False, 'from django.db import migrations, models\n'), ((10953, 11001), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""22:15"""', 'max_length': '(10)'}), "(default='22:15', max_length=10)\n", (10969, 11001), False, 'from django.db import migrations, models\n'), ((11039, 11070), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(43)'}), '(default=43)\n', (11058, 11070), False, 'from django.db import migrations, models\n'), ((11117, 11150), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11136, 11150), False, 'from django.db import migrations, models\n'), ((11194, 11242), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""22:30"""', 'max_length': '(10)'}), "(default='22:30', max_length=10)\n", (11210, 11242), False, 'from django.db import migrations, models\n'), ((11280, 11311), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(44)'}), '(default=44)\n', (11299, 11311), False, 'from django.db import migrations, models\n'), ((11358, 11391), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11377, 11391), False, 'from django.db import migrations, models\n'), ((11435, 11483), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""22:45"""', 'max_length': '(10)'}), "(default='22:45', max_length=10)\n", (11451, 11483), False, 'from django.db import migrations, models\n'), ((11521, 11552), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(45)'}), '(default=45)\n', (11540, 11552), False, 'from django.db import migrations, models\n'), ((11599, 11632), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11618, 11632), False, 'from django.db import migrations, models\n'), ((11676, 11724), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""23:00"""', 'max_length': '(10)'}), "(default='23:00', max_length=10)\n", (11692, 11724), False, 'from django.db import migrations, models\n'), ((11762, 11793), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(46)'}), '(default=46)\n', (11781, 11793), False, 'from django.db import migrations, models\n'), ((11840, 11873), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (11859, 11873), False, 'from django.db import migrations, models\n'), ((11917, 11965), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""23:15"""', 'max_length': '(10)'}), "(default='23:15', max_length=10)\n", (11933, 11965), False, 'from django.db import migrations, models\n'), ((12003, 12034), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(47)'}), '(default=47)\n', (12022, 12034), False, 'from django.db import migrations, models\n'), ((12081, 12114), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (12100, 12114), False, 'from django.db import migrations, models\n'), ((12158, 12206), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""23:30"""', 'max_length': '(10)'}), "(default='23:30', max_length=10)\n", (12174, 12206), False, 'from django.db import migrations, models\n'), ((12244, 12275), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(48)'}), '(default=48)\n', (12263, 12275), False, 'from django.db import migrations, models\n'), ((12322, 12355), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (12341, 12355), False, 'from django.db import migrations, models\n'), ((12399, 12447), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""23:45"""', 'max_length': '(10)'}), "(default='23:45', max_length=10)\n", (12415, 12447), False, 'from django.db import migrations, models\n'), ((12476, 12571), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""core.RestaurantTables"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'core.RestaurantTables')\n", (12493, 12571), False, 'from django.db import migrations, models\n')] |
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from datetime import datetime
from mojitobooks import db, ma, app
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
public_id = db.Column(db.String(50), unique=True)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
name = db.Column(db.String(90), nullable=False)
password = db.Column(db.String(60), nullable=False)
profile_image = db.Column(db.String(200), nullable=False,
default='default-avatar.png')
date_joined = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
bio = db.Column(db.Text)
cards = db.relationship('Card', backref='author', lazy=True)
def get_reset_token(self, expires_sec=1800):
s = Serializer(app.config['SECRET_KEY'], expires_sec)
return s.dumps({'user_id': self.id}).decode('utf-8')
@staticmethod
def verify_reset_token(token):
s = Serializer(app.config['SECRET_KEY'])
try:
user_id = s.loads(token)['user_id']
except:
return None
return User.query.get(user_id)
def __repr__(self):
return f"User('{self.name}', '{self.username}', '{self.profile_image}')"
class Card(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100), nullable=False)
description = db.Column(db.Text, nullable=False)
likes = db.Column(db.Integer, nullable=False, default=0)
date_posted = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
emoji = db.Column(db.String(200), nullable=False)
picture = db.Column(db.String(20), nullable=False,
default='card_default.png')
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return f"Card('{self.title}', '{self.date_posted}')"
class UserSchema(ma.ModelSchema):
class Meta:
model = User
fields = ('username','email','name','profile_image','bio', 'date_joined')
class CardSchema(ma.ModelSchema):
class Meta:
model = Card
fields = ('id', 'title', 'description', 'likes', 'date_posted', 'picture', 'author', 'emoji')
| [
"mojitobooks.db.relationship",
"mojitobooks.db.ForeignKey",
"itsdangerous.TimedJSONWebSignatureSerializer",
"mojitobooks.db.Column",
"mojitobooks.db.String"
] | [((169, 208), 'mojitobooks.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (178, 208), False, 'from mojitobooks import db, ma, app\n'), ((647, 710), 'mojitobooks.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)', 'default': 'datetime.utcnow'}), '(db.DateTime, nullable=False, default=datetime.utcnow)\n', (656, 710), False, 'from mojitobooks import db, ma, app\n'), ((721, 739), 'mojitobooks.db.Column', 'db.Column', (['db.Text'], {}), '(db.Text)\n', (730, 739), False, 'from mojitobooks import db, ma, app\n'), ((752, 804), 'mojitobooks.db.relationship', 'db.relationship', (['"""Card"""'], {'backref': '"""author"""', 'lazy': '(True)'}), "('Card', backref='author', lazy=True)\n", (767, 804), False, 'from mojitobooks import db, ma, app\n'), ((1367, 1406), 'mojitobooks.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (1376, 1406), False, 'from mojitobooks import db, ma, app\n'), ((1479, 1513), 'mojitobooks.db.Column', 'db.Column', (['db.Text'], {'nullable': '(False)'}), '(db.Text, nullable=False)\n', (1488, 1513), False, 'from mojitobooks import db, ma, app\n'), ((1526, 1574), 'mojitobooks.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)', 'default': '(0)'}), '(db.Integer, nullable=False, default=0)\n', (1535, 1574), False, 'from mojitobooks import db, ma, app\n'), ((1593, 1656), 'mojitobooks.db.Column', 'db.Column', (['db.DateTime'], {'nullable': '(False)', 'default': 'datetime.utcnow'}), '(db.DateTime, nullable=False, default=datetime.utcnow)\n', (1602, 1656), False, 'from mojitobooks import db, ma, app\n'), ((235, 248), 'mojitobooks.db.String', 'db.String', (['(50)'], {}), '(50)\n', (244, 248), False, 'from mojitobooks import db, ma, app\n'), ((288, 301), 'mojitobooks.db.String', 'db.String', (['(20)'], {}), '(20)\n', (297, 301), False, 'from mojitobooks import db, ma, app\n'), ((354, 368), 'mojitobooks.db.String', 'db.String', (['(120)'], {}), '(120)\n', (363, 368), False, 'from mojitobooks import db, ma, app\n'), ((420, 433), 'mojitobooks.db.String', 'db.String', (['(90)'], {}), '(90)\n', (429, 433), False, 'from mojitobooks import db, ma, app\n'), ((476, 489), 'mojitobooks.db.String', 'db.String', (['(60)'], {}), '(60)\n', (485, 489), False, 'from mojitobooks import db, ma, app\n'), ((537, 551), 'mojitobooks.db.String', 'db.String', (['(200)'], {}), '(200)\n', (546, 551), False, 'from mojitobooks import db, ma, app\n'), ((867, 916), 'itsdangerous.TimedJSONWebSignatureSerializer', 'Serializer', (["app.config['SECRET_KEY']", 'expires_sec'], {}), "(app.config['SECRET_KEY'], expires_sec)\n", (877, 916), True, 'from itsdangerous import TimedJSONWebSignatureSerializer as Serializer\n'), ((1044, 1080), 'itsdangerous.TimedJSONWebSignatureSerializer', 'Serializer', (["app.config['SECRET_KEY']"], {}), "(app.config['SECRET_KEY'])\n", (1054, 1080), True, 'from itsdangerous import TimedJSONWebSignatureSerializer as Serializer\n'), ((1429, 1443), 'mojitobooks.db.String', 'db.String', (['(100)'], {}), '(100)\n', (1438, 1443), False, 'from mojitobooks import db, ma, app\n'), ((1679, 1693), 'mojitobooks.db.String', 'db.String', (['(200)'], {}), '(200)\n', (1688, 1693), False, 'from mojitobooks import db, ma, app\n'), ((1735, 1748), 'mojitobooks.db.String', 'db.String', (['(20)'], {}), '(20)\n', (1744, 1748), False, 'from mojitobooks import db, ma, app\n'), ((1854, 1878), 'mojitobooks.db.ForeignKey', 'db.ForeignKey', (['"""user.id"""'], {}), "('user.id')\n", (1867, 1878), False, 'from mojitobooks import db, ma, app\n')] |
from htm_rl.modules.htm.pattern_memory import PatternMemory
from htm.bindings.sdr import SDR
import numpy as np
from tqdm import tqdm
EPS = 1e-12
def get_labels(pm: PatternMemory, data, input_size):
labels = dict()
input_pattern = SDR(input_size)
for i, item in enumerate(data):
input_pattern.sparse = item
labels[i] = pm.compute(input_pattern, False)
return labels
def train(pm: PatternMemory, data, epochs, input_size, noise=0.0):
input_pattern = SDR(input_size)
indices = np.arange(len(data))
for epoch in tqdm(range(epochs)):
np.random.shuffle(indices)
for i in indices:
if noise > 0:
n_bits = int(noise * len(data[i]))
bits_to_remove = np.random.choice(data[i], n_bits, replace=False)
bits_to_add = np.random.choice(np.arange(input_size), n_bits, replace=False)
noisy_sample = np.setdiff1d(data[i], bits_to_remove)
noisy_sample = np.union1d(noisy_sample, bits_to_add)
else:
noisy_sample = data[i]
input_pattern.sparse = noisy_sample
pm.compute(input_pattern, True)
# print(f'epoch {epoch}: {get_labels(pm, data, input_size)}')
labels = get_labels(pm, data, input_size)
return labels
def test_retrieval(pm: PatternMemory, data, labels):
iou = list()
for i, item in enumerate(data):
if labels[i] is not None:
pattern = pm.get_pattern(labels[i])
iou.append(np.intersect1d(pattern, item).size/(np.union1d(pattern, item).size + EPS))
else:
iou.append(0)
return sum(iou)/len(iou)
def generate_data(input_size, n_patterns, sparsity):
data = [np.random.choice(np.arange(0, input_size), max(int(input_size * sparsity), 1), replace=False) for _ in range(n_patterns)]
return data
def main():
input_size = 1000
epochs = 20
seed = 5436
n_patterns = 1000
sparsity = 0.05
config = dict(
input_size=input_size,
max_segments=1000,
min_distance=0.1,
permanence_increment=0.1,
permanence_decrement=0.01,
segment_decrement=0.1,
permanence_connected_threshold=0.5,
seed=seed
)
data = generate_data(input_size, n_patterns, sparsity)
pm = PatternMemory(**config)
labels = train(pm, data, epochs, input_size, noise=0.09)
mean_iou = test_retrieval(pm, data, labels)
print(mean_iou)
if __name__ == '__main__':
main()
| [
"numpy.intersect1d",
"numpy.union1d",
"numpy.random.choice",
"htm.bindings.sdr.SDR",
"htm_rl.modules.htm.pattern_memory.PatternMemory",
"numpy.setdiff1d",
"numpy.arange",
"numpy.random.shuffle"
] | [((243, 258), 'htm.bindings.sdr.SDR', 'SDR', (['input_size'], {}), '(input_size)\n', (246, 258), False, 'from htm.bindings.sdr import SDR\n'), ((491, 506), 'htm.bindings.sdr.SDR', 'SDR', (['input_size'], {}), '(input_size)\n', (494, 506), False, 'from htm.bindings.sdr import SDR\n'), ((2326, 2349), 'htm_rl.modules.htm.pattern_memory.PatternMemory', 'PatternMemory', ([], {}), '(**config)\n', (2339, 2349), False, 'from htm_rl.modules.htm.pattern_memory import PatternMemory\n'), ((588, 614), 'numpy.random.shuffle', 'np.random.shuffle', (['indices'], {}), '(indices)\n', (605, 614), True, 'import numpy as np\n'), ((1756, 1780), 'numpy.arange', 'np.arange', (['(0)', 'input_size'], {}), '(0, input_size)\n', (1765, 1780), True, 'import numpy as np\n'), ((751, 799), 'numpy.random.choice', 'np.random.choice', (['data[i]', 'n_bits'], {'replace': '(False)'}), '(data[i], n_bits, replace=False)\n', (767, 799), True, 'import numpy as np\n'), ((924, 961), 'numpy.setdiff1d', 'np.setdiff1d', (['data[i]', 'bits_to_remove'], {}), '(data[i], bits_to_remove)\n', (936, 961), True, 'import numpy as np\n'), ((993, 1030), 'numpy.union1d', 'np.union1d', (['noisy_sample', 'bits_to_add'], {}), '(noisy_sample, bits_to_add)\n', (1003, 1030), True, 'import numpy as np\n'), ((847, 868), 'numpy.arange', 'np.arange', (['input_size'], {}), '(input_size)\n', (856, 868), True, 'import numpy as np\n'), ((1528, 1557), 'numpy.intersect1d', 'np.intersect1d', (['pattern', 'item'], {}), '(pattern, item)\n', (1542, 1557), True, 'import numpy as np\n'), ((1564, 1589), 'numpy.union1d', 'np.union1d', (['pattern', 'item'], {}), '(pattern, item)\n', (1574, 1589), True, 'import numpy as np\n')] |
from typing import List
import pytest
from pathlib import Path
from graphtik.sphinxext import DocFilesPurgatory, _image_formats
@pytest.fixture
def img_docs() -> List[str]:
return [f"d{i}" for i in range(3)]
@pytest.fixture
def img_files(tmpdir) -> List[Path]:
files = [tmpdir.join(f"f{i}") for i in range(3)]
for f in files:
f.ensure()
return [Path(i) for i in files]
@pytest.fixture
def img_reg(img_docs, img_files) -> DocFilesPurgatory:
img_reg = DocFilesPurgatory()
img_reg.register_doc_fpath(img_docs[0], img_files[0])
img_reg.register_doc_fpath(img_docs[0], img_files[1])
img_reg.register_doc_fpath(img_docs[1], img_files[0])
img_reg.register_doc_fpath(img_docs[2], img_files[2])
return img_reg
def test_image_purgatory(img_docs, img_files, img_reg):
for _ in range(2):
img_reg.purge_doc(img_docs[2])
assert list(img_reg.doc_fpaths) == img_docs[:2]
assert img_files[0].exists()
assert img_files[1].exists()
assert not img_files[2].exists()
for _ in range(2):
img_reg.purge_doc(img_docs[1])
assert list(img_reg.doc_fpaths) == img_docs[:1]
assert img_files[0].exists()
assert img_files[1].exists()
assert not img_files[2].exists()
img_reg.purge_doc(img_docs[0])
assert not img_reg.doc_fpaths
assert not img_files[0].exists()
assert not img_files[1].exists()
assert not img_files[2].exists()
img_reg.purge_doc(img_docs[0])
img_reg.purge_doc(img_docs[1])
img_reg.purge_doc(img_docs[2])
| [
"graphtik.sphinxext.DocFilesPurgatory",
"pathlib.Path"
] | [((486, 505), 'graphtik.sphinxext.DocFilesPurgatory', 'DocFilesPurgatory', ([], {}), '()\n', (503, 505), False, 'from graphtik.sphinxext import DocFilesPurgatory, _image_formats\n'), ((375, 382), 'pathlib.Path', 'Path', (['i'], {}), '(i)\n', (379, 382), False, 'from pathlib import Path\n')] |
import os
import jwt
from datetime import datetime,timedelta
from django.urls import reverse
from rest_framework import status
from authors.apps.authentication.models import User
from authors.apps.authentication.tests.base import BaseTest
from authors.settings import SECRET_KEY
from authors.apps.authentication.tests.test_data import EMAIL_ACTIVATION_DATA,PASSWORD_RESET_DATA,VALID_USER_DATA
class TestPasswordTest(BaseTest):
"""
test for password reset and
send password reset token during account reset
"""
def test_send_email_activation_link(self):
"""
test to send invalide email during
password reset
"""
response = self.client.post(
self.register_url,
data=VALID_USER_DATA,
format='json'
)
response = self.client.post(
self.passoword_reset_url,
data=EMAIL_ACTIVATION_DATA,
format='json'
)
self.assertEquals(
response.status_code,
status.HTTP_200_OK
)
self.assertEquals(
response.data['message'],
'password reset link has been sent to your email'
)
def test_email_does_not_exist_during_send_token(self):
"""
test to check invalid email during password
process
"""
self.response = self.client.post(
self.passoword_reset_url,
data=EMAIL_ACTIVATION_DATA,
format='json'
)
self.assertEquals(
self.response.status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEquals(
self.response.data['error'],
'the email does not match any account'
)
def test_password_reset(self):
"""
test to ensure that the user
is able to change his or her password
"""
token_bearer =self.create_user(VALID_USER_DATA)
list = token_bearer.split()
token =list[1]
self.response = self.client.put(
reverse('password-reset', kwargs={'token': token}),
data={
"user": {
"password": "<PASSWORD>",
"confirmpassword": "<PASSWORD>"
}
},
format='json'
)
self.assertEquals(self.response.status_code, status.HTTP_200_OK)
self.assertEquals(
self.response.data['message'],
'your password has been reset successfully'
)
def test_password_not_match_at_reset(self):
"""
test for incase the user enters
passwords which do not match
"""
token_bearer =self.create_user(VALID_USER_DATA)
list = token_bearer.split()
token =list[1]
self.response = self.client.put(
reverse('password-reset', kwargs={'token': token}
),
data={
"user": {
"password": "<PASSWORD>",
"confirmpassword": "<PASSWORD>"
}
},
format='json'
)
self.assertEquals(
self.response.status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEquals(
self.response.data['error'],
'password and confirm password fields do not match'
)
def test_short_password_length_at_reset(self):
token_bearer =self.create_user(VALID_USER_DATA)
list = token_bearer.split()
token =list[1]
self.response = self.client.put(
reverse('password-reset', kwargs={'token': token}),
data={
"user": {
"password": "<PASSWORD>",
"confirmpassword": "<PASSWORD>"
}
},
format='json'
)
self.assertEquals(
self.response.status_code,
status.HTTP_400_BAD_REQUEST
)
| [
"django.urls.reverse"
] | [((2098, 2148), 'django.urls.reverse', 'reverse', (['"""password-reset"""'], {'kwargs': "{'token': token}"}), "('password-reset', kwargs={'token': token})\n", (2105, 2148), False, 'from django.urls import reverse\n'), ((2895, 2945), 'django.urls.reverse', 'reverse', (['"""password-reset"""'], {'kwargs': "{'token': token}"}), "('password-reset', kwargs={'token': token})\n", (2902, 2945), False, 'from django.urls import reverse\n'), ((3659, 3709), 'django.urls.reverse', 'reverse', (['"""password-reset"""'], {'kwargs': "{'token': token}"}), "('password-reset', kwargs={'token': token})\n", (3666, 3709), False, 'from django.urls import reverse\n')] |
"""
xml parser
This is the xml parser for use with the cli_parse module and action plugin
https://github.com/martinblech/xmltodict
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import missing_required_lib
from ansible_collections.ansible.netcommon.plugins.module_utils.cli_parser.cli_parserbase import (
CliParserBase,
)
try:
import xmltodict
HAS_XMLTODICT = True
except ImportError:
HAS_XMLTODICT = False
class CliParser(CliParserBase):
""" The xml parser class
Convert an xml string to structured data using xmltodict
"""
DEFAULT_TEMPLATE_EXTENSION = None
PROVIDE_TEMPLATE_CONTENTS = False
@staticmethod
def _check_reqs():
""" Check the prerequisites for the xml parser
"""
errors = []
if not HAS_XMLTODICT:
errors.append(missing_required_lib("xmltodict"))
return errors
def parse(self, *_args, **_kwargs):
""" Std entry point for a cli_parse parse execution
:return: Errors or parsed text as structured data
:rtype: dict
:example:
The parse function of a parser should return a dict:
{"errors": [a list of errors]}
or
{"parsed": obj}
"""
errors = self._check_reqs()
if errors:
return {"errors": errors}
cli_output = self._task_args.get("text")
network_os = self._task_args.get("parser").get(
"os"
) or self._task_vars.get("ansible_network_os")
# the nxos | xml includes a odd garbage line at the end, so remove it
if "nxos" in network_os:
splitted = cli_output.splitlines()
if splitted[-1] == "]]>]]>":
cli_output = "\n".join(splitted[:-1])
try:
parsed = xmltodict.parse(cli_output)
return {"parsed": parsed}
except Exception as exc:
msg = "XML parser returned an error while parsing. Error: {err}"
return {"errors": [msg.format(err=to_native(exc))]}
| [
"ansible.module_utils.basic.missing_required_lib",
"xmltodict.parse",
"ansible.module_utils._text.to_native"
] | [((1913, 1940), 'xmltodict.parse', 'xmltodict.parse', (['cli_output'], {}), '(cli_output)\n', (1928, 1940), False, 'import xmltodict\n'), ((947, 980), 'ansible.module_utils.basic.missing_required_lib', 'missing_required_lib', (['"""xmltodict"""'], {}), "('xmltodict')\n", (967, 980), False, 'from ansible.module_utils.basic import missing_required_lib\n'), ((2135, 2149), 'ansible.module_utils._text.to_native', 'to_native', (['exc'], {}), '(exc)\n', (2144, 2149), False, 'from ansible.module_utils._text import to_native\n')] |
from collections import namedtuple
from itertools import chain
from typing import List, Dict, Tuple, Any
from valacefgen import utils
from valacefgen.vala import VALA_TYPES, VALA_ALIASES, GLIB_TYPES
TypeInfo = utils.TypeInfo
EnumValue = namedtuple("EnumValue", 'c_name vala_name comment')
class Type:
def __init__(self, c_name: str, vala_name: str, c_header: str, comment: str = None):
self.comment = utils.reformat_comment(comment)
self.c_name = c_name
self.vala_name = vala_name
self.c_header = c_header
def is_simple_type(self, repo: "Repository") -> bool:
raise NotImplementedError
def gen_vala_code(self, repo: "Repository") -> List[str]:
raise NotImplementedError
class SimpleType(Type):
def __init__(self, c_name: str, vala_name: str, c_header: str, comment: str = None):
super().__init__(c_name, vala_name, c_header, comment)
def gen_vala_code(self, repo: "Repository") -> List[str]:
return []
def is_simple_type(self, repo: "Repository") -> bool:
return True
class Enum(Type):
def __init__(self, c_name: str, vala_name: str, c_header: str, values: List[EnumValue], comment: str = None):
super().__init__(c_name, vala_name, c_header, comment)
self.values = values
def is_simple_type(self, repo: "Repository") -> bool:
return True
def __repr__(self):
return "enum %s" % self.vala_name
def gen_vala_code(self, repo: "Repository") -> List[str]:
buf = []
if self.comment:
buf.extend(utils.vala_comment(self.comment, valadoc=True))
buf.extend([
'[CCode (cname="%s", cheader_filename="%s", has_type_id=false)]' % (self.c_name, self.c_header),
'public enum %s {' % self.vala_name,
])
n_values = len(self.values)
for i, value in enumerate(self.values):
if value.comment:
buf.extend(' ' + line for line in utils.vala_comment(value.comment, valadoc=True))
buf.append(' [CCode (cname="%s")]' % value.c_name)
buf.append(' %s%s' % (value.vala_name, "," if i < n_values - 1 else ";"))
buf.append('}')
return buf
class Function(Type):
def __init__(self, c_name: str, vala_name: str, c_header: str, ret_type: str = None,
params: List[Tuple[str, str]] = None, body: List[str] = None, comment: str = None,
vala_generics: List[str] = None, vala_simple_generics: bool = False):
super().__init__(c_name, vala_name, c_header, comment)
self.vala_simple_generics = vala_simple_generics
self.vala_generics = vala_generics
self.params = params
self.ret_type = ret_type if ret_type != 'void' else None
self.body = body
self.construct = False
def gen_vala_code(self, repo: "Repository") -> List[str]:
params = repo.vala_param_list(self.params, self.c_name, generics=self.vala_generics)
ret_type = repo.vala_ret_type(self.ret_type, generics=self.vala_generics)
buf = []
if self.comment:
buf.extend(utils.vala_comment(self.comment, valadoc=True))
buf.extend([
'[CCode (cname="%s", cheader_filename="%s"%s)]' % (
self.c_name, self.c_header,
', simple_generics=true' if self.vala_simple_generics else ''),
'public %s %s%s(%s)%s' % (
ret_type if not self.construct else '',
self.vala_name,
'<%s>' % (','.join(self.vala_generics),) if self.vala_generics else '',
', '.join(params),
';' if self.body is None else ' {'
)
])
if self.body is not None:
body: List[str] = self.body
buf.extend(' ' + line for line in body)
buf.append("}")
return buf
def gen_c_header(self, repo: "Repository") -> List[str]:
return self._gen_c_code(repo, False)
def gen_c_code(self, repo: "Repository") -> List[str]:
return self._gen_c_code(repo, True)
def _gen_c_code(self, repo: "Repository", gen_body: bool) -> List[str]:
params = repo.c_param_list(self.params)
ret_type = repo.c_ret_type(self.ret_type)
buf = []
if self.c_header:
buf.extend('#include "%s"' % h for h in self.c_header.split(';'))
buf.extend([
'%s %s(%s)%s' % (
ret_type,
self.c_name,
', '.join(params),
';' if not gen_body or self.body is None else ' {'
)
])
if gen_body and self.body is not None:
body: List[str] = self.body
buf.extend(' ' + line for line in body)
buf.append("}")
return buf
def is_simple_type(self, repo: "Repository") -> bool:
return False
class OpaqueClass(Type):
def __init__(self, basename: str, c_type: str, c_name: str, vala_name: str, c_header: str, comment: str = None):
super().__init__(c_name, vala_name, c_header, comment)
self.basename = basename
self.c_type = c_type
self.create_func = None
self.free_func = basename + "free"
self.copy_func = basename + "copy"
self.methods = []
def is_simple_type(self, repo: "Repository") -> bool:
return False
def add_method(self, func: "Function"):
if self.c_name == func.ret_type and not func.params:
self.create_func = func.c_name
elif func.params[0][0] == self.c_name:
self.methods.append(func)
else:
raise NotImplementedError(func)
def gen_vala_code(self, repo: "Repository") -> List[str]:
buf = []
if self.comment:
buf.extend(utils.vala_comment(self.comment, valadoc=True))
ccode = {
'cname': '"%s"' % self.c_type,
'cheader_filename': '"%s"' % self.c_header,
'has_type_id': 'false',
}
if self.free_func:
ccode['free_function'] = '"%s"' % self.free_func
if self.copy_func:
ccode['copy_function'] = '"%s"' % self.copy_func
buf.append('[CCode (%s)]' % ', '.join('%s=%s' % e for e in ccode.items()))
buf.append('[Compact]')
buf.append('public class %s {' % self.vala_name)
if self.create_func:
buf.append(' [CCode (cname="%s")]' % self.create_func)
buf.append(' public %s();' % self.vala_name)
if self.methods:
for method in self.methods:
del method.params[0]
method.vala_name = method.vala_name[len(self.basename)-4:]
for line in method.gen_vala_code(repo):
buf.append(" " + line)
buf.append('}')
return buf
class Struct(Type):
def __init__(self, c_name: str, vala_name: str, c_header: str, members: List["StructMember"], comment: str = None,
virtual_funcs: List["StructVirtualFunc"] = None):
super().__init__(c_name, vala_name, c_header, comment)
self.virtual_funcs = virtual_funcs
self.members = members
self.parent: Struct = None
self.methods: List[Function] = []
self.is_class: bool = False
self.ref_func: str = None
self.unref_func: str = None
def set_parent(self, parent: "Struct"):
self.parent = parent
def set_is_class(self, is_class: bool):
self.is_class = is_class
def set_ref_counting(self, ref_func: str, unref_func: str):
self.ref_func = ref_func
self.unref_func = unref_func
def add_method(self, method: Function):
self.methods.append(method)
def is_simple_type(self, repo: "Repository") -> bool:
return False
def gen_vala_code(self, repo: "Repository") -> List[str]:
buf = []
if self.comment:
buf.extend(utils.vala_comment(self.comment, valadoc=True))
ccode = {
'cname': '"%s"' % self.c_name,
'cheader_filename': '"%s,valacef.h"' % self.c_header,
'has_type_id': 'false',
}
if self.is_class:
buf.append('[Compact]')
struct_type = 'class'
if self.ref_func:
ccode['ref_function'] = '"%s"' % self.ref_func
if self.unref_func:
ccode['unref_function'] = '"%s"' % self.unref_func
else:
struct_type = 'struct'
ccode['destroy_function'] = '""'
buf.append('[CCode (%s)]' % ', '.join('%s=%s' % e for e in ccode.items()))
if self.parent:
buf.append('public %s %s: %s {' % (struct_type, self.vala_name, self.parent.vala_name))
else:
buf.append('public %s %s {' % (struct_type, self.vala_name))
for member in self.members:
type_info = utils.parse_c_type(member.c_type)
vala_type = repo.resolve_c_type(type_info.c_type)
if 'char' in member.c_type:
print("!!!", member.c_type)
if member.c_type == 'char*':
m_type = 'string?'
elif member.c_type == 'char**':
m_type = 'char**'
else:
m_type = vala_type.vala_name
if type_info.pointer:
m_type += '?'
if member.comment:
buf.extend(' ' + line for line in utils.vala_comment(member.comment, valadoc=True))
if member.c_name != member.vala_name:
buf.append(' [CCode (cname="%s")]' % member.c_name)
buf.append(' public %s %s;' % (m_type, member.vala_name))
for method in self.methods:
if method.construct:
break
else:
buf.append(' protected %s(){}' % self.vala_name)
for method in self.methods:
buf.extend(' ' + line for line in method.gen_vala_code(repo))
for vfunc in self.virtual_funcs or []:
params = repo.vala_param_list(vfunc.params[1:], vfunc_of_class=self.c_name)
ret_type = repo.vala_ret_type(vfunc.ret_type)
if ret_type == "StringUserfree":
ret_type = "string?"
if vfunc.comment:
buf.extend(' ' + line for line in utils.vala_comment(vfunc.comment, valadoc=True))
buf.extend([
' [CCode (cname="%s", cheader_filename="valacef_api.h")]' % vfunc.c_name,
' public %s %s(%s);' % (ret_type, vfunc.vala_name, ', '.join(params)),
])
buf.append('}')
return buf
def gen_c_header(self, repo: "Repository") -> List[str]:
return self._gen_c_code(repo, 'gen_c_header')
def gen_c_code(self, repo: "Repository") -> List[str]:
return self._gen_c_code(repo, 'gen_c_code')
def _gen_c_code(self, repo: "Repository", generator: str) -> List[str]:
buf = [
'#include "%s"' % self.parent.c_header,
]
if self.c_header:
buf.extend('#include "%s"' % h for h in self.c_header.split(';'))
buf.extend([
'typedef struct {',
' %s parent;' % self.parent.c_name,
])
for member in self.members:
type_info = utils.parse_c_type(member.c_type)
vala_type = repo.resolve_c_type(type_info.c_type)
buf.append(' %s%s %s;' % ('volatile ' if type_info.volatile else '', vala_type.c_name, member.c_name))
buf.append('} %s;' % self.c_name)
for method in self.methods:
buf.extend(' ' + line for line in getattr(method, generator)(repo))
return buf
class StructMember:
def __init__(self, c_type: str, c_name: str, vala_name: str, comment: str = None):
self.comment = utils.reformat_comment(comment, strip_chars=5)
self.c_type = c_type
self.c_name = c_name
self.vala_name = vala_name
class StructVirtualFunc:
def __init__(self, c_name: str, vala_name: str, ret_type: str = None, params: List[Tuple[str, str]] = None,
comment: str = None):
self.comment = utils.reformat_comment(comment, strip_chars=5)
self.c_name = c_name
self.vala_name = vala_name
self.ret_type = ret_type if ret_type != 'void' else None
self.params = params
class Typedef(Type):
def __init__(self, c_name: str, vala_name: str, c_type: str, c_header: str):
super().__init__(c_name, vala_name, c_header)
self.c_type = c_type
def is_simple_type(self, repo: "Repository") -> bool:
c_type = self.c_type
if c_type in VALA_TYPES or c_type in VALA_ALIASES:
return True
return repo.c_types[c_type].is_simple_type(repo)
def gen_vala_code(self, repo: "Repository") -> List[str]:
buf = []
c_type = self.c_type
if c_type != 'void*':
simple_type = self.is_simple_type(repo)
if c_type in VALA_TYPES:
base_type = c_type
elif c_type in VALA_ALIASES:
base_type = VALA_ALIASES[c_type]
else:
c_type_obj = repo.c_types[c_type]
base_type = c_type_obj.vala_name
if simple_type:
buf.append('[SimpleType]')
buf.append('[CCode (cname="%s", has_type_id=false)]' % self.c_name)
buf.append('public struct %s : %s {' % (self.vala_name, base_type))
buf.append('}')
else:
buf.append('[CCode (cname="%s", has_type_id=false)]' % self.c_name)
buf.append('public struct %s{' % self.vala_name)
buf.append('}')
return buf
class Delegate(Type):
def __init__(self, c_name: str, vala_name: str, c_header: str, ret_type: str = None,
params: List[Tuple[str, str]] = None, vfunc_of_class=None, vfunc_name=None):
super().__init__(c_name, vala_name, c_header)
self.vfunc_name = vfunc_name
self.ret_type = ret_type if ret_type != 'void' else None
self.params = params
self.vfunc_of_class = vfunc_of_class
def gen_vala_code(self, repo: "Repository") -> List[str]:
params = repo.vala_param_list(self.params, vfunc_of_class=self.vfunc_of_class)
ret_type = repo.vala_ret_type(self.ret_type)
buf = [
'[CCode (cname="%s", cheader_filename="%s", has_target = false)]' % (
self.c_name, self.c_header),
'public delegate %s %s(%s);' % (ret_type, self.vala_name, ', '.join(params)),
]
return buf
def _gen_c_code(self, repo: "Repository", body: bool) -> List[str]:
params = repo.c_param_list(self.params)
ret_type = repo.c_ret_type(self.ret_type)
buf = []
if self.c_header:
buf.extend('#include "%s"' % h for h in self.c_header.split(';'))
if self.ret_type:
header = repo.resolve_c_type(utils.parse_c_type(ret_type).c_type).c_header
if header:
buf.append('#include "%s"' % header)
if self.params:
headers = (repo.resolve_c_type(utils.parse_c_type(h[0]).c_type).c_header for h in self.params)
buf.extend('#include "%s"' % h for h in headers if h)
buf.extend([
'typedef %s (*%s)(%s);' % (
ret_type,
self.c_name,
', '.join(params)
)
])
if self.vfunc_name:
buf.extend([
'%s %s_%s(%s)%s' % (
ret_type if ret_type != 'cef_string_userfree_t' else 'char*',
self.vfunc_of_class,
self.vfunc_name,
', '.join(params),
' {' if body else ';'
)
])
if body:
call = 'self->%s(%s);' % (self.vfunc_name, ', '.join(p[1] for p in self.params))
if ret_type == 'void':
buf.append(' ' + call)
elif ret_type == 'cef_string_userfree_t':
buf.extend([
' %s __utf16_str__ = %s' % (ret_type, call),
' if (__utf16_str__ == NULL) return NULL;',
' cef_string_utf8_t __utf8_str__ = {};',
' cef_string_utf16_to_utf8(__utf16_str__->str, __utf16_str__->length, &__utf8_str__);',
' cef_string_userfree_free(__utf16_str__);',
' return __utf8_str__.str;'
])
else:
buf.append(' return ' + call)
buf.append('}')
return buf
def gen_c_code(self, repo: "Repository") -> List[str]:
return self._gen_c_code(repo, True)
def gen_c_header(self, repo: "Repository") -> List[str]:
return self._gen_c_code(repo, False)
def is_simple_type(self, repo: "Repository") -> bool:
return True
class Repository:
enums: Dict[str, Enum]
structs: Dict[str, Struct]
typedefs: Dict[str, Typedef]
c_types: Dict[str, Type]
def __init__(self, vala_namespace: str, overrides: Any = None):
self.overrides = overrides
self.vala_namespace = vala_namespace
self.enums: Dict[str, Enum] = {}
self.structs: Dict[str, Struct] = {}
self.opaque_classes: Dict[str, OpaqueClass] = {}
self.typedefs: Dict[str, Typedef] = {}
self.delegates: Dict[str, Delegate] = {}
self.functions: Dict[str, Function] = {}
self.c_types: Dict[str, Type] = {}
self.basenames = {}
def add_enum(self, enum: Enum):
self.enums[enum.c_name] = enum
self.c_types[enum.c_name] = enum
def add_struct(self, *structs: Struct):
for struct in structs:
self.structs[struct.c_name] = struct
self.c_types[struct.c_name] = struct
def add_opaque_class(self, *classes: OpaqueClass):
for klass in classes:
self.opaque_classes[klass.c_name] = klass
self.c_types[klass.c_name] = klass
self.basenames[klass.basename] = klass
def add_typedef(self, typedef: Typedef):
self.typedefs[typedef.c_name] = typedef
self.c_types[typedef.c_name] = typedef
def add_delegate(self, delegate: Delegate):
self.delegates[delegate.c_name or delegate.vala_name] = delegate
self.c_types[delegate.c_name or delegate.vala_name] = delegate
def add_function(self, *functions: Function):
for func in functions:
self.functions[func.c_name] = func
self.c_types[func.c_name] = func
def resolve_c_type(self, c_type: str) -> Type:
c_type = utils.bare_c_type(c_type)
if c_type in VALA_TYPES:
return SimpleType(c_type, c_type, "")
if c_type in VALA_ALIASES:
return self.resolve_c_type(VALA_ALIASES[c_type])
if c_type in GLIB_TYPES:
return SimpleType(c_type + "*", GLIB_TYPES[c_type], "")
try:
return self.c_types[c_type]
except KeyError:
raise NotImplementedError(c_type)
def __repr__(self):
buf = []
for enum in self.enums.values():
buf.append(repr(enum))
return '\n'.join(buf)
def gen_vala_code(self):
buf = ['namespace %s {\n' % self.vala_namespace]
entries = self.enums, self.delegates, self.functions, self.typedefs, self.opaque_classes, self.structs
for entry in chain.from_iterable(e.values() for e in entries):
for line in entry.gen_vala_code(self):
buf.extend((' ', line, '\n'))
buf.append('} // namespace %s\n' % self.vala_namespace)
return ''.join(buf)
def c_ret_type(self, c_type: str = None) -> str:
return c_type if c_type else 'void'
def vala_ret_type(self, c_type: str = None, generics: List[str] = None) -> str:
if generics and c_type in generics:
return "unowned " + c_type
if c_type == 'char*':
return 'string?'
if c_type is None:
return "void"
type_info = utils.parse_c_type(c_type)
ret_type = self.resolve_c_type(type_info.c_type).vala_name
if type_info.pointer:
ret_type += "?"
return ret_type
def vala_param_list(self, params: List[Tuple[str, str]] = None, name: str = None, vfunc_of_class: str = None,
generics: List[str] = None) -> List[str]:
vala_params = []
if params is not None:
array_size = None
skipped_params = 0
for i, (p_type, p_name) in enumerate(params):
i -= skipped_params
if p_type == "size_t" and p_name.lower().endswith("count"):
array_size = (i + 1 - 0.1, p_type, p_name)
skipped_params += 1
continue
if generics and p_type in generics:
param = "owned " + p_type
assert not array_size
else:
type_info = utils.parse_c_type(p_type)
if name:
self.override_param(name, p_name, type_info)
param = ""
if array_size:
if type_info.out:
type_info.out = False
elif type_info.pointer:
type_info.pointer = False
type_info.array = True
param = '[CCode(array_length_pos=%s, array_length_type="%s")] ' % array_size[0:2]
assert p_name == array_size[2][:-5], (p_name, array_size[2])
array_size = None
elif type_info.ref:
param += 'ref '
elif type_info.out:
param += 'out '
else:
try:
# CEF reference counting: When passing a struct to delegate/function,
# increase ref unless it is a self-param of vfunc of that struct.
if self.structs[type_info.c_type].is_class and type_info.c_type != vfunc_of_class:
param += "owned "
except KeyError:
pass
vala_type = self.resolve_c_type(type_info.c_type).vala_name
if vala_type == 'String' and type_info.pointer:
param += '' + vala_type + '*'
elif vala_type == 'char' and type_info.pointer:
param += 'string?'
else:
param += vala_type
if type_info.pointer:
param += "?"
if type_info.array:
param += "[]"
param += ' ' + p_name
vala_params.append(param)
return vala_params
def c_param_list(self, params: List[Tuple[str, str]] = None) -> List[str]:
c_params = []
if params is not None:
for p_type, p_name in params:
c_params.append('%s %s' % (p_type, p_name))
return c_params
def override_param(self, name: str, p_name: str, type_info: TypeInfo) -> TypeInfo:
try:
return getattr(self.overrides, 'param__%s__%s' % (name, p_name))(type_info)
except AttributeError as e:
return type_info
| [
"collections.namedtuple",
"valacefgen.utils.reformat_comment",
"valacefgen.utils.vala_comment",
"valacefgen.utils.parse_c_type",
"valacefgen.utils.bare_c_type"
] | [((239, 290), 'collections.namedtuple', 'namedtuple', (['"""EnumValue"""', '"""c_name vala_name comment"""'], {}), "('EnumValue', 'c_name vala_name comment')\n", (249, 290), False, 'from collections import namedtuple\n'), ((417, 448), 'valacefgen.utils.reformat_comment', 'utils.reformat_comment', (['comment'], {}), '(comment)\n', (439, 448), False, 'from valacefgen import utils\n'), ((11867, 11913), 'valacefgen.utils.reformat_comment', 'utils.reformat_comment', (['comment'], {'strip_chars': '(5)'}), '(comment, strip_chars=5)\n', (11889, 11913), False, 'from valacefgen import utils\n'), ((12208, 12254), 'valacefgen.utils.reformat_comment', 'utils.reformat_comment', (['comment'], {'strip_chars': '(5)'}), '(comment, strip_chars=5)\n', (12230, 12254), False, 'from valacefgen import utils\n'), ((18818, 18843), 'valacefgen.utils.bare_c_type', 'utils.bare_c_type', (['c_type'], {}), '(c_type)\n', (18835, 18843), False, 'from valacefgen import utils\n'), ((20255, 20281), 'valacefgen.utils.parse_c_type', 'utils.parse_c_type', (['c_type'], {}), '(c_type)\n', (20273, 20281), False, 'from valacefgen import utils\n'), ((8925, 8958), 'valacefgen.utils.parse_c_type', 'utils.parse_c_type', (['member.c_type'], {}), '(member.c_type)\n', (8943, 8958), False, 'from valacefgen import utils\n'), ((11341, 11374), 'valacefgen.utils.parse_c_type', 'utils.parse_c_type', (['member.c_type'], {}), '(member.c_type)\n', (11359, 11374), False, 'from valacefgen import utils\n'), ((1574, 1620), 'valacefgen.utils.vala_comment', 'utils.vala_comment', (['self.comment'], {'valadoc': '(True)'}), '(self.comment, valadoc=True)\n', (1592, 1620), False, 'from valacefgen import utils\n'), ((3142, 3188), 'valacefgen.utils.vala_comment', 'utils.vala_comment', (['self.comment'], {'valadoc': '(True)'}), '(self.comment, valadoc=True)\n', (3160, 3188), False, 'from valacefgen import utils\n'), ((5824, 5870), 'valacefgen.utils.vala_comment', 'utils.vala_comment', (['self.comment'], {'valadoc': '(True)'}), '(self.comment, valadoc=True)\n', (5842, 5870), False, 'from valacefgen import utils\n'), ((7968, 8014), 'valacefgen.utils.vala_comment', 'utils.vala_comment', (['self.comment'], {'valadoc': '(True)'}), '(self.comment, valadoc=True)\n', (7986, 8014), False, 'from valacefgen import utils\n'), ((21227, 21253), 'valacefgen.utils.parse_c_type', 'utils.parse_c_type', (['p_type'], {}), '(p_type)\n', (21245, 21253), False, 'from valacefgen import utils\n'), ((15020, 15048), 'valacefgen.utils.parse_c_type', 'utils.parse_c_type', (['ret_type'], {}), '(ret_type)\n', (15038, 15048), False, 'from valacefgen import utils\n'), ((1979, 2026), 'valacefgen.utils.vala_comment', 'utils.vala_comment', (['value.comment'], {'valadoc': '(True)'}), '(value.comment, valadoc=True)\n', (1997, 2026), False, 'from valacefgen import utils\n'), ((9478, 9526), 'valacefgen.utils.vala_comment', 'utils.vala_comment', (['member.comment'], {'valadoc': '(True)'}), '(member.comment, valadoc=True)\n', (9496, 9526), False, 'from valacefgen import utils\n'), ((10363, 10410), 'valacefgen.utils.vala_comment', 'utils.vala_comment', (['vfunc.comment'], {'valadoc': '(True)'}), '(vfunc.comment, valadoc=True)\n', (10381, 10410), False, 'from valacefgen import utils\n'), ((15209, 15233), 'valacefgen.utils.parse_c_type', 'utils.parse_c_type', (['h[0]'], {}), '(h[0])\n', (15227, 15233), False, 'from valacefgen import utils\n')] |
#import boto3
#import botocore
import sys
import os
import shutil
import json
import pickle
import lmdb
import subprocess
import argparse
import time
import datetime
import S3
import concurrent.futures
import requests
import subprocess
import pymongo
map_size = 100 * 1024 * 1024 * 1024
# default endpoint
endpoint_pdf = '/annotateSoftwarePDF'
endpoint_txt = '/annotateSoftwareText'
class software_mention_client(object):
"""
Python client for using the GROBID software mention service.
"""
def __init__(self, config_path='./config.json'):
self.config = None
# standard lmdb environment for storing processed biblio entry uuid
self.env = None
# lmdb environment for keeping track of PDF annotation failures
self.env_fail = None
self._load_config(config_path)
self._init_lmdb()
if self.config['bucket_name'] is not None and len(self.config['bucket_name']) > 0:
self.s3 = S3.S3(self.config)
self.mongo_client = None
def _load_config(self, path='./config.json'):
"""
Load the json configuration
"""
config_json = open(path).read()
self.config = json.loads(config_json)
def _init_lmdb(self):
# open in write mode
envFilePath = os.path.join(self.config["data_path"], 'entries')
self.env = lmdb.open(envFilePath, map_size=map_size)
envFilePath = os.path.join(self.config["data_path"], 'fail')
self.env_fail = lmdb.open(envFilePath, map_size=map_size)
def annotate_directory(self, directory):
# recursive directory walk for all pdf documents
pdf_files = []
for root, directories, filenames in os.walk(directory):
for filename in filenames:
if filename.endswith(".pdf") or filename.endswith(".PDF"):
print(os.path.join(root,filename))
#self.annotation(os.path.join(root,filename), file_out=None, doi=None)
pdf_files.append(os.path.join(root,filename))
if len(pdf_files) == self.config["batch_size"]:
self.annotate_batch(pdf_files, None, None)
pdf_files = []
# last batch
if len(pdf_files) > 0:
self.annotate_batch(pdf_files, None, None, None)
def annotate_batch(self, pdf_files, out_files=None, dois=None, pmcs=None):
print("annotate_batch", len(pdf_files))
with concurrent.futures.ProcessPoolExecutor(max_workers=self.config["concurrency"]) as executor:
for i, pdf_file in enumerate(pdf_files):
if out_files is None:
out_file = None
else:
out_file = out_files[i]
if dois is None:
doi = None
else:
doi = dois[i]
if pmcs is None:
pmc = None
else:
pmc = pmcs[i]
executor.submit(annotate, pdf_file, self.config, file_out, doi, pmc)
def annotate_collection(self):
# init lmdb transactions
txn = self.env.begin(write=True)
txn_fail = self.env_fail.begin(write=True)
# the db will provide additional metadata for each file
"""
def reprocess_failed(self):
"""
def reset(self):
"""
Remove the local lmdb keeping track of the state of advancement of the annotation and
of the failed entries
"""
# close environments
self.env.close()
self.env_fail.close()
envFilePath = os.path.join(self.config["data_path"], 'entries')
shutil.rmtree(envFilePath)
envFilePath = os.path.join(self.config["data_path"], 'fail')
shutil.rmtree(envFilePath)
# re-init the environments
self._init_lmdb()
def annotate(file_in, config, file_out=None, doi=None, pmc=None):
the_file = {'input': open(file_in, 'rb')}
url = "http://" + config["software_mention_host"]
if config["software_mention_port"] is not None:
url += ":" + str(config["software_mention_port"])
url += endpoint_pdf
print("calling... ", url)
response = requests.post(url, files=the_file)
jsonObject = None
if response.status_code >= 500:
print('[{0}] Server Error'.format(response.status_code))
elif response.status_code == 404:
print('[{0}] URL not found: [{1}]'.format(response.status_code,api_url))
elif response.status_code == 401:
print('[{0}] Authentication Failed'.format(response.status_code))
elif response.status_code >= 400:
print('[{0}] Bad Request'.format(response.status_code))
print(ssh_key )
print(response.content )
elif response.status_code >= 300:
print('[{0}] Unexpected redirect.'.format(response.status_code))
elif response.status_code == 200:
jsonObject = response.json()
else:
print('Unexpected Error: [HTTP {0}]: Content: {1}'.format(response.status_code, response.content))
if jsonObject is not None:
print(jsonObject)
# add file, DOI, date and version info in the JSON, if available
if doi is not None:
jsonObject['DOI'] = doi;
if pmc is not None:
jsonObject['PMC'] = pmc;
jsonObject['file_name'] = os.path.basename(file_in)
jsonObject['file_path'] = file_in
jsonObject['date'] = datetime.datetime.now().isoformat();
# TODO: get the version via the server
jsonObject['version'] = "0.5.6-SNAPSHOT";
if file_out is not None:
# we write the json result into a file
with open(file_out, "w", encoding="utf-8") as json_file:
json_file.write(json.dumps(jsonObject))
else:
# we store the result in mongo db (this is the common case)
if self.mongo_client is None:
self.mongo_client = pymongo.MongoClient(config["mongo_host"], int(config["mongo_port"]))
self.mongo_db = self.mongo_client[config["mongo_db"]]
inserted_id = self.mongo_db.annotations.insert_one(jsonObject).inserted_id
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "GROBID Software Mention recognition client")
parser.add_argument("--data-path", default=None, help="path to the JSON dump file created by biblio-glutton-harvester")
parser.add_argument("--config", default="./config.json", help="path to the config file, default is ./config.json")
parser.add_argument("--reprocess", action="store_true", help="Reprocessed failed PDF")
parser.add_argument("--reset", action="store_true", help="Ignore previous processing states, and re-init the annotation process from the beginning")
parser.add_argument("--file-in", default=None, help="A PDF input file to be processed by the GROBID software mention recognizer")
parser.add_argument("--file-out", default=None, help="Path to output the software mentions in JSON format, extracted from the PDF file-in")
parser.add_argument("--repo-in", default=None, help="Path to directory of PDf files to be processed by the GROBID software mention recognizer")
args = parser.parse_args()
data_path = args.data_path
config_path = args.config
reprocess = args.reprocess
reset = args.reset
file_in = args.file_in
file_out = args.file_out
repo_in = args.repo_in
client = software_mention_client(config_path=config_path)
if reset:
client.reset()
if reprocess:
client.reprocess_failed()
elif repo_in is not None:
client.annotate_directory(repo_in)
elif file_in is not None:
annotate(file_in, client.config, file_out)
elif data_path is not None:
client.annotate_collection()
| [
"json.loads",
"requests.post",
"argparse.ArgumentParser",
"json.dumps",
"os.path.join",
"datetime.datetime.now",
"lmdb.open",
"os.path.basename",
"shutil.rmtree",
"S3.S3",
"os.walk"
] | [((4294, 4328), 'requests.post', 'requests.post', (['url'], {'files': 'the_file'}), '(url, files=the_file)\n', (4307, 4328), False, 'import requests\n'), ((5417, 5442), 'os.path.basename', 'os.path.basename', (['file_in'], {}), '(file_in)\n', (5433, 5442), False, 'import os\n'), ((6236, 6322), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""GROBID Software Mention recognition client"""'}), "(description=\n 'GROBID Software Mention recognition client')\n", (6259, 6322), False, 'import argparse\n'), ((1207, 1230), 'json.loads', 'json.loads', (['config_json'], {}), '(config_json)\n', (1217, 1230), False, 'import json\n'), ((1309, 1358), 'os.path.join', 'os.path.join', (["self.config['data_path']", '"""entries"""'], {}), "(self.config['data_path'], 'entries')\n", (1321, 1358), False, 'import os\n'), ((1378, 1419), 'lmdb.open', 'lmdb.open', (['envFilePath'], {'map_size': 'map_size'}), '(envFilePath, map_size=map_size)\n', (1387, 1419), False, 'import lmdb\n'), ((1443, 1489), 'os.path.join', 'os.path.join', (["self.config['data_path']", '"""fail"""'], {}), "(self.config['data_path'], 'fail')\n", (1455, 1489), False, 'import os\n'), ((1514, 1555), 'lmdb.open', 'lmdb.open', (['envFilePath'], {'map_size': 'map_size'}), '(envFilePath, map_size=map_size)\n', (1523, 1555), False, 'import lmdb\n'), ((1727, 1745), 'os.walk', 'os.walk', (['directory'], {}), '(directory)\n', (1734, 1745), False, 'import os\n'), ((3689, 3738), 'os.path.join', 'os.path.join', (["self.config['data_path']", '"""entries"""'], {}), "(self.config['data_path'], 'entries')\n", (3701, 3738), False, 'import os\n'), ((3747, 3773), 'shutil.rmtree', 'shutil.rmtree', (['envFilePath'], {}), '(envFilePath)\n', (3760, 3773), False, 'import shutil\n'), ((3797, 3843), 'os.path.join', 'os.path.join', (["self.config['data_path']", '"""fail"""'], {}), "(self.config['data_path'], 'fail')\n", (3809, 3843), False, 'import os\n'), ((3852, 3878), 'shutil.rmtree', 'shutil.rmtree', (['envFilePath'], {}), '(envFilePath)\n', (3865, 3878), False, 'import shutil\n'), ((980, 998), 'S3.S3', 'S3.S3', (['self.config'], {}), '(self.config)\n', (985, 998), False, 'import S3\n'), ((5506, 5529), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5527, 5529), False, 'import datetime\n'), ((5803, 5825), 'json.dumps', 'json.dumps', (['jsonObject'], {}), '(jsonObject)\n', (5813, 5825), False, 'import json\n'), ((1888, 1916), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (1900, 1916), False, 'import os\n'), ((2045, 2073), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (2057, 2073), False, 'import os\n')] |
import gsyIO
str_filename = gsyIO.save_image()
print(str_filename)
index_ext = str_filename.rfind('.')
print(index_ext)
str_time = str_filename[:index_ext] + '_time' + str_filename[(index_ext):]
print(str_time)
# from tkinter import filedialog
# from tkinter import *
# root = Tk()
# root.filename = filedialog.asksaveasfilename(initialdir = "/",title = "Select file",defaultextension='.jpg',filetypes = (("jpeg files","*.jpg"),('csv', '*.csv'),("all files","*.*")))
# print (root.filename)
# root.destroy()
# =============================================================================
# import tkinter as tk
# import tkinter.messagebox as msgbox
# import os
# import glob
# import csv
#
# from tkinter import filedialog
#
#
# locRoot = tk.Tk()
#
# locRoot.withdraw()
#
# str_file_path = filedialog.asksaveasfilename(initialdir=os.getcwd(),
# title="Save as txt",
# filetypes = [("Text files","*.txt"),
# ('CSV', '*.csv'),
# ("all files","*.*")])
#
# print(str_file_path)
#
# locRoot.destroy()
# =============================================================================
#print(str_file_path)
# =============================================================================
# import sys
# from PyQt5.QtWidgets import QApplication, QWidget, QInputDialog, QLineEdit, QFileDialog
# from PyQt5.QtGui import QIcon
#
# class file_save_qt5(QWidget):
#
# def __init__(self):
# super().__init__()
# self.title = 'PyQt5 file dialogs - pythonspot.com'
# self.left = 10
# self.top = 10
# self.width = 640
# self.height = 480
# self.initUI()
#
# def initUI(self):
# self.setWindowTitle(self.title)
# self.setGeometry(self.left, self.top, self.width, self.height)
#
# # self.openFileNameDialog()
# # self.openFileNamesDialog()
# self.saveFileDialog()
#
# # self.show()
# =============================================================================
# =============================================================================
# def openFileNameDialog(self):
# options = QFileDialog.Options()
# options |= QFileDialog.DontUseNativeDialog
# fileName, _ = QFileDialog.getOpenFileName(self,"QFileDialog.getOpenFileName()", "","All Files (*);;Python Files (*.py)", options=options)
# if fileName:
# print(fileName)
#
# def openFileNamesDialog(self):
# options = QFileDialog.Options()
# options |= QFileDialog.DontUseNativeDialog
# files, _ = QFileDialog.getOpenFileNames(self,"QFileDialog.getOpenFileNames()", "","All Files (*);;Python Files (*.py)", options=options)
# if files:
# print(files)
# =============================================================================
# =============================================================================
# def saveFileDialog(self):
# options = QFileDialog.Options()
# options |= QFileDialog.DontUseNativeDialog
# fileName, _ = QFileDialog.getSaveFileName(self,"QFileDialog.getSaveFileName()","","All Files (*);;Text Files (*.txt)", options=options)
# if fileName:
# print(fileName)
# =============================================================================
# =============================================================================
# def run():
# app = QApplication(sys.argv)
# ex = file_save_qt5()
# sys.exit(app.exec_())
#
# run()
# =============================================================================
# =============================================================================
#
# if __name__ == '__main__':
# app = QApplication(sys.argv)
# ex = file_save_qt5()
# sys.exit(app.exec_())
#
# =============================================================================
# =============================================================================
# from PyQt5 import QtCore, QtGui, QtWidgets
#
# filename, _filter = QtWidgets.QFileDialog.getSaveFileName(None, "Save some Data File", '.', "(*.csv)")
#
# print(filename)
# =============================================================================
# import csv
# import numpy as np
# time = np.arange(0, 1, 0.1)
# data = np.arange(21, 20, -0.1)
# output = open(r'E:\[TEMP]\test.csv', 'w', newline='')
# fieldnames = ['time', 'data']
# writer = csv.DictWriter(output, fieldnames=fieldnames)
# writer.writeheader()
# for item in range(len(time)):
# writer.writerow({'time': time[item], 'data': data[item]})
# output.close()
# import gsyTransforms as trf
# import numpy as np
# import matplotlib.pyplot as plt
# from numpy import sqrt, sin, cos
# CONST_WITH = 1280
# CONST_HEIGHT = 800
# CONST_DPI = 100
# dbl_base_freq = 50
# dbl_base_period = 1 / dbl_base_freq
# time_end = 0.04
# n = 1
# phi_a = 0
# phi_b = -2/3 * np.pi
# phi_c = 2/3 * np.pi
# mag_a = 1.2
# mag_b = 0
# mag_c = 1
# # time vector
# time = np.linspace( 0, time_end, (10 ** 5) )
# # angular freq
# omega = 2 * np.pi * dbl_base_freq
# # base phases of the 3-phase inputs, note, base phases only
# phase_a = omega * time + phi_a
# phase_b = omega * time + phi_b
# phase_c = omega * time + phi_c
# # 3-phase inputs
# # input_a = trf.to_complex(mag_a, (n * phase_a))
# # input_b = trf.to_complex(mag_b, (n * phase_b))
# # input_c = trf.to_complex(mag_c, (n * phase_c))
# input_a = mag_a * (cos(n * phase_a) + 1j * sin(n * phase_a))
# input_b = mag_b * (cos(n * phase_b) + 1j * sin(n * phase_b))
# input_c = mag_c * (cos(n * phase_c) + 1j * sin(n * phase_c))
# # Fortescue
# a_pos, b_pos, c_pos, a_neg, b_neg, c_neg, zero = trf.cal_symm(input_a, input_b, input_c)
# # amplitude invariant Clarke Transform
# alpha, beta, zero = trf.cal_clarke(input_a, input_b, input_c)
# # DSOGI Clarke
# alpha_pos_dsogi, beta_pos_dsogi, alpha_neg_dsogi, beta_neg_dsogi, _ = trf.cal_clarke_dsogi(input_a, input_b, input_c)
# # normal Park Transform
# d, q, _ = trf.cal_park(omega*time, alpha, beta,)
# # print(d)
# # postive Park (DSOGI)
# d_pos_dsogi, q_pos_dsogi, _ = trf.cal_park(omega*time, alpha_pos_dsogi, beta_pos_dsogi)
# # print(d_pos_dsogi)
# # negative Park (DSOGI)
# d_neg_dsogi, q_neg_dsogi, _ = trf.cal_park(omega*time, alpha_neg_dsogi, beta_neg_dsogi)
# ylim_max = 1.1
# ylim_min = -1 * ylim_max
# fig_main = plt.figure(figsize=(CONST_WITH/CONST_DPI, CONST_HEIGHT/CONST_DPI), dpi=CONST_DPI)
# # 3-phase inputs and symmetrical components
# ax1 = plt.subplot(4, 3, 1)
# ax1_a, = ax1.plot(time, input_a, label=r'Phase-A Input', color='r', lw=2)
# ax1_b, = ax1.plot(time, input_b, label=r'Phase-B Input', color='g', lw=2)
# ax1_c, = ax1.plot(time, input_c, label=r'Phase-C Input', color='b', lw=2)
# ax1_legend = plt.legend(handles=[ax1_a, ax1_b, ax1_c], title=r'3-phase inputs', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax2 = plt.subplot(4, 3, 4)
# ax2_a, = ax2.plot(time, a_pos, label=r'Phase-A +', color='r', lw=2)
# ax2_b, = ax2.plot(time, b_pos, label=r'Phase-B +', color='g', lw=2)
# ax2_c, = ax2.plot(time, c_pos, label=r'Phase-C +', color='b', lw=2)
# ax2_legend = plt.legend(handles=[ax2_a, ax2_b, ax2_c], title=r'Positive Seq.', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax3 = plt.subplot(4, 3, 7)
# ax3_a, = ax3.plot(time, a_neg, label=r'Phase-A -', color='r', lw=2)
# ax3_b, = ax3.plot(time, b_neg, label=r'Phase-B -', color='g', lw=2)
# ax3_c, = ax3.plot(time, c_neg, label=r'Phase-C -', color='b', lw=2)
# ax3_legend = plt.legend(handles=[ax3_a, ax3_b, ax3_c], title=r'Negative Seq.', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax4 = plt.subplot(4, 3, 10)
# ax4_zero, = ax4.plot(time, zero, label=r'Zero Seq', color='slateblue', lw=2)
# ax4_legend = plt.legend(handles=[ax4_zero], loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# # Clarke Transfroms
# ax5 = plt.subplot(4, 3, 2)
# ax5_alpha, = ax5.plot(time, alpha, label=r'$\alpha$', color='r', lw=2)
# ax5_beta, = ax5.plot(time, beta, label=r'$\beta$', color='g', lw=2)
# ax5_legend = plt.legend(handles=[ax5_alpha, ax5_beta], title=r'Clarke Transform', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax6 = plt.subplot(4, 3, 5)
# ax6_alpha_pos, = ax6.plot(time, alpha_pos_dsogi, label=r'$\alpha_+$', color='r', lw=2)
# ax6_beta_pos, = ax6.plot(time, beta_pos_dsogi, label=r'$\beta_+$', color='g', lw=2)
# ax6_legend = plt.legend(handles=[ax6_alpha_pos, ax6_beta_pos], title=r'Clarke DSOGI +', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax7 = plt.subplot(4, 3, 8)
# ax7_alpha_neg, = ax7.plot(time, alpha_neg_dsogi, label=r'$\alpha_-$', color='r', lw=2)
# ax7_beta_neg, = ax7.plot(time, beta_neg_dsogi, label=r'$\beta_-$', color='g', lw=2)
# ax7_legend = plt.legend(handles=[ax7_alpha_neg, ax7_beta_neg], title=r'Clarke DSOGI -', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax8 = plt.subplot(4, 3, 11)
# ax8_zero, = ax8.plot(time, zero, label=r'Zero Seq', color='slateblue', lw=2)
# ax8_legend = plt.legend(handles=[ax8_zero], loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# # Parke Transforms
# ax9 = plt.subplot(4, 3, 3)
# ax9_d, = ax9.plot(time, d, label=r'$d$', color='r', lw=2)
# ax9_q, = ax9.plot(time, q, label=r'$q$', color='g', lw=2)
# ax9_legend = plt.legend(handles=[ax9_d, ax9_q], title=r'Park Transform', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax10 = plt.subplot(4, 3, 6)
# ax10_d_pos, = ax10.plot(time, d_pos_dsogi, label=r'$d_+$', color='r', lw=2)
# ax10_q_pos, = ax10.plot(time, q_pos_dsogi, label=r'$q_+$', color='g', lw=2)
# ax10_legend = plt.legend(handles=[ax10_d_pos, ax10_q_pos], title=r'Park DSOGI +', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax11 = plt.subplot(4, 3, 9)
# ax11_d_neg, = ax11.plot(time, d_neg_dsogi, label=r'$d_-$', color='r', lw=2)
# ax11_q_neg, = ax11.plot(time, q_neg_dsogi, label=r'$q_-$', color='g', lw=2)
# ax11_legend = plt.legend(handles=[ax11_d_neg, ax11_q_neg], title=r'Park DSOGI -', loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# ax12 = plt.subplot(4, 3, 12)
# ax12_zero, = ax12.plot(time, zero, label=r'Zero Seq', color='slateblue', lw=2)
# ax12_legend = plt.legend(handles=[ax12_zero], loc='upper right')
# plt.ylim([ylim_min, ylim_max])
# plt.tight_layout()
# plt.show()
# fig_polar = plt.figure(figsize=(CONST_WITH/CONST_DPI, CONST_HEIGHT/CONST_DPI), dpi=CONST_DPI)
# ax_polar1 = plt.subplot(231, projection='polar')
# polar1_input_a = ax_polar1.plot(np.angle(input_a), abs(input_a), label=r'Phase-A Input', color='r', lw=5)
# polar1_input_b = ax_polar1.plot(np.angle(input_b), abs(input_b), label=r'Phase-B Input', color='g', lw=3)
# polar1_input_c = ax_polar1.plot(np.angle(input_c), abs(input_c), label=r'Phase-C Input', color='b', lw=1)
# polar1_legned = ax_polar1.legend(loc='upper left', bbox_to_anchor=(-0.55, 1))
# ax_polar1.set_rmax(1.2)
# ax_polar2 = plt.subplot(234, projection='polar')
# polar2_input_a_pos = ax_polar2.plot(np.angle(a_pos), abs(a_pos), label=r'Phase-A +', color='r', lw=5)
# polar2_input_b_pos = ax_polar2.plot(np.angle(b_pos), abs(b_pos), label=r'Phase-B +', color='g', lw=3)
# polar2_input_c_pos = ax_polar2.plot(np.angle(c_pos), abs(c_pos), label=r'Phase-C +', color='b', lw=1)
# polar2_input_a_neg = ax_polar2.plot(np.angle(a_neg), abs(a_neg), label=r'Phase-A -', color='pink', lw=5)
# polar2_input_b_neg = ax_polar2.plot(np.angle(b_neg), abs(b_neg), label=r'Phase-B -', color='limegreen', lw=3)
# polar2_input_c_neg = ax_polar2.plot(np.angle(c_neg), abs(c_neg), label=r'Phase-C -', color='skyblue', lw=1)
# polar2_zero = ax_polar2.plot(np.angle(zero), abs(zero), label=r'Zero', color='k', lw=1)
# polar2_legned = ax_polar2.legend(loc='lower left', bbox_to_anchor=(-0.55, -0.2))
# ax_polar2.set_rmax(1.2)
# ax_polar3 = plt.subplot(232, projection='polar')
# polar3_alpha = ax_polar3.plot(np.angle(alpha), abs(alpha), label=r'$\alpha$', color='r', lw=2)
# polar3_beta = ax_polar3.plot(np.angle(beta), abs(beta), label=r'$\beta$', color='g', lw=2)
# polar3_legned = ax_polar3.legend(loc='upper right', bbox_to_anchor=(1.3, 1))
# ax_polar3.set_rmax(1.2)
# ax_polar4 = plt.subplot(235, projection='polar')
# polar4_alpha_pos = ax_polar4.plot(np.angle(alpha_pos_dsogi), abs(alpha_pos_dsogi), label=r'$\alpha_+$', color='r', lw=2)
# polar4_beta_pos = ax_polar4.plot(np.angle(beta_pos_dsogi), abs(beta_pos_dsogi), label=r'$\beta_+$', color='g', lw=2)
# polar4_alpha_neg = ax_polar4.plot(np.angle(alpha_neg_dsogi), abs(alpha_neg_dsogi), label=r'$\alpha_-$', color='pink', lw=2)
# polar4_beta_neg = ax_polar4.plot(np.angle(beta_neg_dsogi), abs(beta_neg_dsogi), label=r'$\beta_-$', color='limegreen', lw=2)
# polar4_zero = ax_polar4.plot(np.angle(zero), abs(zero), label=r'Zero', color='k', lw=1)
# polar4_legned = ax_polar4.legend(loc='lower right', bbox_to_anchor=(1.3, -0.2))
# ax_polar4.set_rmax(1.2)
# ax_polar5 = plt.subplot(233, projection='polar')
# polar5_d = ax_polar5.plot(np.angle(d), abs(d), label=r'$d$', color='r', lw=2)
# polar5_q = ax_polar5.plot(np.angle(q), abs(q), label=r'$q$', color='g', lw=2)
# polar5_legned = ax_polar5.legend(loc='upper right', bbox_to_anchor=(1.4, 1))
# ax_polar5.set_rmax(1.2)
# ax_polar6 = plt.subplot(236, projection='polar')
# polar6_d_pos = ax_polar6.plot(np.angle(d_pos_dsogi), abs(d_pos_dsogi), label=r'$d_+$', color='r', lw=2, marker='.')
# polar6_q_pos = ax_polar6.plot(np.angle(q_pos_dsogi), abs(q_pos_dsogi), label=r'$q_+$', color='g', lw=2, marker='.')
# # print(abs(d_pos_dsogi))
# # print(np.angle(d_pos_dsogi))
# polar6_d_neg = ax_polar6.plot(np.angle(d_neg_dsogi), abs(d_neg_dsogi), label=r'$d_-$', color='pink', lw=2, marker='.')
# polar6_q_neg = ax_polar6.plot(np.angle(q_neg_dsogi), abs(q_neg_dsogi), label=r'$q_-$', color='limegreen', lw=2, marker='.')
# polar6_zero = ax_polar6.plot(np.angle(zero), abs(zero), label=r'Zero', color='k', lw=2)
# polar6_legned = ax_polar6.legend(loc='lower right', bbox_to_anchor=(1.4, -0.2))
# ax_polar6.set_rmax(1.2)
# #plt.tight_layout()
# plt.show()
# =============================================================================
# plt.subplot(3, 1, 1)
# plt.plot(time, input_a, time, input_b, time, input_c)
#
# plt.subplot(3, 1, 2)
# plt.plot(time, alpha, time, beta, time, zero)
#
# plt.subplot(3, 1, 3)
# plt.plot(time, d_ddsrf_pos, time, q_ddsrf_pos, time, d_ddsrf_neg, time, q_ddsrf_neg)
# =============================================================================
# =============================================================================
# def draw_clarke(dbl_har_order, dbl_base_freq=50, dbl_cycle=4,
# mag_a=1, mag_b=1, mag_c=1,
# bool_savefig=False, int_dpi=600, str_fig_path=''):
#
# # import modules
# import matplotlib as mpl
# import matplotlib.pyplot as plt
# import numpy as np
# import os
#
# from numpy import pi, cos
# from gsyDqLib import date_time_now
#
# # matplotlib setup
# mpl.rcParams['font.family'] = 'serif'
# mpl.rcParams['text.usetex'] = True
# mpl.rcParams['text.latex.preview'] = True
# mpl.rcParams['text.latex.preamble'] = [r'\boldmath']
# mpl.rcParams['font.weight'] = 'bold'
#
#
# # =============================================================================
# # <Process input arguments>
# # =============================================================================
# # process input harmonic order
# try:
#
# dbl_har_order = float(dbl_har_order)
#
# if dbl_har_order != 0:
#
# dbl_har_order = abs(dbl_har_order)
#
# else:
#
# dbl_har_order = 1
#
# print(date_time_now() + ' Invalid harmoinc order. Harmonic order set to 1')
#
# except:
#
# dbl_har_order = 1
#
# print(date_time_now() + ' Invalid harmoinc order. Harmonic order set to 1')
#
# pass
#
# # process base frequency
# try:
#
# dbl_base_freq = float(dbl_base_freq)
#
# if dbl_base_freq != 0:
#
# dbl_base_freq = abs(dbl_base_freq)
#
# else:
#
# dbl_base_freq = 50.0
#
# print(date_time_now() + ' Invalid base frequency. Base frequency set to 50')
#
# except:
#
# dbl_base_freq = 50.0
#
# print(date_time_now() + ' Invalid base frequency. Base frequency set to 50')
#
# pass
#
# # process how many cycles to display
# try:
#
# dbl_cycle = float(dbl_cycle)
#
# if dbl_cycle != 0:
#
# dbl_cycle = abs(dbl_cycle)
#
# else:
#
# dbl_cycle = 4
#
# print(date_time_now() + ' Invalid display cycles. Set display cycles to 4')
#
# except:
#
# dbl_cycle = 4
#
# print(date_time_now() + ' Invalid display cycles. Set display cycles to 4')
#
# pass
#
# # process whether to save the figure
# try:
#
# bool_savefig = bool(bool_savefig)
#
# except:
#
# bool_savefig = False
#
# pass
#
# # process dpi
# try:
#
# int_dpi = int(int_dpi)
#
# if int_dpi != 0:
#
# int_dpi = int_dpi
#
# else:
#
# int_dpi = 600
#
# print(date_time_now() + ' Invalid dpi. Set dpi to 600')
#
# except:
#
# int_dpi = 600
#
# print(date_time_now() + ' Invalid dpi. Set dpi to 600')
#
# pass
#
# # process figure path
# try:
#
# str_fig_path = str(str_fig_path)
#
# if bool_savefig == True:
#
# if len(str_fig_path) == 0:
#
# str_fig_path = 'Figure_' + str( int(np.random.rand() * 1e6) ) + '.png'
#
# str_fig_path = os.path.join(os.getcwd(), str_fig_path)
#
# else:
#
# pass
#
# else:
#
# pass
#
# except:
#
# str_fig_path = 'Figure_' + str( int(np.random.rand() * 1e6) ) + '.png'
#
# str_fig_path = os.path.join(os.getcwd(), str_fig_path)
#
# print(date_time_now() + ' Invalid figure path. Set figure path to "' + str_fig_path + '"')
#
# pass
# # =============================================================================
# # </Process input arguments>
# # =============================================================================
#
#
# # =============================================================================
# # <Make data, titles, legends>
# # =============================================================================
# int_remainder = np.mod(dbl_har_order, 3)
#
# dbl_base_period = 1 / dbl_base_freq
#
# time_end = dbl_base_period / dbl_har_order * dbl_cycle
#
# # time vector
# time = np.linspace( 0, time_end, (10 ** 5) )
#
# # angular freq
# omega = 2 * np.pi * dbl_base_freq
#
# # base phases of the 3-phase inputs, note, base phases only
# phase_a = omega * time
# phase_b = omega * time - (2 / 3 * pi)
# phase_c = omega * time + (2 / 3 * pi)
#
# # 3-phase inputs
# input_a = mag_a * cos(dbl_har_order * phase_a)
# input_b = mag_b * cos(dbl_har_order * phase_b)
# input_c = mag_c * cos(dbl_har_order * phase_c)
#
# # amplitude invariant Clarke transform
# alpha, beta, zero = cal_clarke(input_a, input_b, input_c)
#
# # legend labels for the 3-phase inputs
# str_a_lbl = r'$a = cos(' + str(dbl_har_order) + r'\cdot \omega t)$'
# str_b_lbl = r'$b = cos[' + str(dbl_har_order) + r'\cdot (\omega t - \frac{2}{3}\pi)]$'
# str_c_lbl = r'$c = cos[' + str(dbl_har_order) + r'\cdot (\omega t + \frac{2}{3}\pi)]$'
#
# # legend labels for the Clarke transform
# str_alpha_lbl = r'$\alpha = \frac{2}{3} (a - \frac{1}{2} b - \frac{1}{2} c)$'
#
# str_beta_lbl = r'$\beta = \frac{2}{3} ( 0 + \frac{\sqrt{3}}{2} b - \frac{\sqrt{3}}{2} c)$'
#
# str_zero_lbl = r'$Zero = \frac{2}{3} (\frac{1}{2} a + \frac{1}{2} b + \frac{1}{2} c)$'
#
# # condition, coordinate 1 title
# if all( x == mag_a for x in (mag_a, mag_b, mag_c) ):
#
# if int_remainder == 1:
#
# str_ax1_title = ( r'$\textbf{Three-Phase Inputs, } \omega =2 \pi \times'
# + str(dbl_har_order) + r'\times' + str(dbl_base_freq)
# + r'\textbf{ (positive sequence)}$' )
#
# elif int_remainder == 2:
#
# str_ax1_title = ( r'$\textbf{Three-Phase Inputs, } \omega =2 \pi \times'
# + str(dbl_har_order) + r'\times' + str(dbl_base_freq)
# + r'\textbf{ (negative sequence)}$' )
#
# elif int_remainder == 0:
#
# str_ax1_title = ( r'$\textbf{Three-Phase Inputs, } \omega =2 \pi \times'
# + str(dbl_har_order) + r'\times' + str(dbl_base_freq)
# + r'\textbf{ (zero sequence)}$' )
#
# else:
#
# str_ax1_title = ( r'$\textbf{Three-Phase Inputs, } \omega =2 \pi \times'
# + str(dbl_har_order) + r'\times' + str(dbl_base_freq)
# + r'$' )
#
# else:
#
# str_ax1_title = ( r'$\textbf{Three-Phase Inputs, } \omega =2 \pi \times'
# + str(dbl_har_order) + r'\times' + str(dbl_base_freq)
# + r'$' )
#
# # coordinate 2 title
# str_ax2_title = r'$\textbf{Outputs of the General Amplitude Invariant Clarke Transform}$'
# # =============================================================================
# # </Make data, titles, legends>
# # =============================================================================
#
#
# # =============================================================================
# # <Main figure setup>
# # =============================================================================
# # make main figure
# fig = plt.figure(figsize=(900.0/100.0, 600.0/100.0), dpi = 100.0)
#
# # adjust spacing between subplots
# fig.subplots_adjust(hspace=0.5)
#
# # <coordnate 1> =============================================================================
# # make coordinate 1
# ax1 = plt.subplot(2, 1, 1)
#
# # set coordinate 1 title
# ax1.set_title(str_ax1_title, fontsize=14, fontweight='bold', y=1.2)
#
# # set coordinate 1 horizontal line
# ax1.axhline(y=0, color='k', lw=3)
#
# # plot 3-phase inputs
# ax1_input_a, = plt.plot(time, input_a, color='r', lw=2, label=str_a_lbl)
# ax1_input_b, = plt.plot(time, input_b, color='g', lw=2, label=str_b_lbl)
# ax1_input_c, = plt.plot(time, input_c, color='b', lw=2, label=str_c_lbl)
#
# # get automatic y limits
# y_min, y_max = ax1.get_ylim()
#
# # set limits and grid lines
# plt.xlim([0, time_end])
# plt.ylim([y_min, y_max])
# plt.grid(True)
#
# # range arguments for ploting period helping lines
# rng_start = dbl_base_period / dbl_har_order
# rng_stop = time_end + rng_start
# rng_step = rng_start
#
# # plot period helping lines
# for item in np.arange(rng_start, rng_stop, rng_step):
#
# plt.plot([item, item], [y_min, y_max],
# linestyle='--', linewidth=2, color='k')
#
# # set legend
# ax1_lgd = plt.legend(handles=[ax1_input_a, ax1_input_b, ax1_input_c],
# loc='upper center', fontsize=11, bbox_to_anchor=(0.5, 1.25),
# shadow=False, fancybox=True, ncol=3)
#
# # set legend transparence
# ax1_lgd.get_frame().set_alpha(1.0)
#
# # set y label
# ax1.set_ylabel(r'\textbf{Amplitude}', fontweight='bold', fontsize=12)
# # </coordnate 1> =============================================================================
#
# # <coordnate 2> =============================================================================
# # make coordinate 2
# ax2 = plt.subplot(2, 1, 2)
#
# # set coordinate 2 title
# ax2.set_title(str_ax2_title, fontsize=14, fontweight='bold', y=1.23)
#
# # plot coordinate 2 horizontal line
# ax2.axhline(y=0, color='k', lw=3)
#
# # plot Clarke transform components
# ax2_alpha, = plt.plot(time, alpha, color='r', lw=2, label=str_alpha_lbl)
# ax2_beta, = plt.plot(time, beta, color='g', lw=2, label=str_beta_lbl)
# ax2_zero, = plt.plot(time, zero, color='b', lw=2, label=str_zero_lbl)
#
# # get automatic y limits
# # y_min, y_max = ax2.get_ylim()
#
# # set coordinate 2 limits
# plt.xlim([0, time_end])
# plt.ylim([y_min, y_max])
# plt.grid(True)
#
# # plot period helping lines
# for item in np.arange(rng_start, rng_stop, rng_step):
#
# plt.plot([item, item], [y_min, y_max], linestyle='--', linewidth=2, color='k')
#
# # set coordinate 2 legend
# ax2_lgd = plt.legend(handles=[ax2_alpha, ax2_beta, ax2_zero], loc='upper center',
# fontsize=11, bbox_to_anchor=(0.5, 1.27),
# shadow=False, fancybox=True, ncol=3)
#
# # set legend transparence
# ax2_lgd.get_frame().set_alpha(1.0)
#
# # set labels
# ax2.set_xlabel(r'\textbf{Time (s)}', fontweight='bold', fontsize=12)
# ax2.set_ylabel(r'\textbf{Amplitude}', fontweight='bold', fontsize=12)
# # </coordnate 2> =============================================================================
#
# plt.show()
# # =============================================================================
# # </Main figure setup>
# # =============================================================================
#
#
# # =============================================================================
# # <Condition, save figure>
# # =============================================================================
# if bool_savefig == True:
#
# plt.tight_layout()
#
# fig.savefig(str_fig_path, dpi=int_dpi)
#
# plt.tight_layout()
#
# print( date_time_now() + 'Figure saved as:"' + str_fig_path + '"' )
#
# plt.close()
#
#
# else:
#
# pass
# # =============================================================================
# # </Condition, save figure>
# # =============================================================================
#
# draw_clarke(1, mag_a=0.7, mag_b=1.2, mag_c=0.6)
# =============================================================================
# =============================================================================
#
# import numpy as np
# import os
#
# for item in (1.7, 2.4, 5.6):
#
# path = r'J:\_Clarke & Park\Figures\Clarke_unbalanced'
#
# path = os.path.join(path, str(item) + '.png')
#
# draw_clarke(item, bool_savefig=True, str_fig_path=path)
# =============================================================================
| [
"gsyIO.save_image"
] | [((29, 47), 'gsyIO.save_image', 'gsyIO.save_image', ([], {}), '()\n', (45, 47), False, 'import gsyIO\n')] |
# -*- coding: utf-8 -*-
__version__ = "0.1.0"
version_info = tuple([int(num) for num in __version__.split('.')])
import os
import gevent
import zmq.green as zmq
from geventwebsocket.handler import WebSocketHandler
import paste.urlparser
import msgpack
import json
def main():
'''Set up zmq context and greenlets for all the servers, then launch the web
browser and run the data producer'''
context = zmq.Context()
# zeromq: tcp to inproc gateway
job_zmq = gevent.spawn(zmq_server, context)
# websocket server: copies inproc zmq messages to websocket
ws_server = gevent.pywsgi.WSGIServer(
('', 8004), WebSocketApp(context),
handler_class=WebSocketHandler)
http_server = gevent.pywsgi.WSGIServer(
('', 8003),
paste.urlparser.StaticURLParser(os.path.dirname(__file__)))
ws_server.start()
http_server.start()
try:
job_zmq.join()
except KeyboardInterrupt:
job_zmq.kill()
ws_server.kill()
def zmq_server(context):
'''Funnel messages coming from the external tcp socket to an inproc socket'''
sock_incoming = context.socket(zmq.SUB)
sock_incoming.setsockopt(zmq.SUBSCRIBE, "")
sock_incoming.connect('tcp://localhost:8002')
sock_outgoing = context.socket(zmq.PUB)
sock_outgoing.bind('inproc://queue')
while True:
msg = sock_incoming.recv()
sock_outgoing.send(json.dumps(msgpack.loads(msg)))
gevent.sleep(0.05)
class WebSocketApp(object):
'''Funnel messages coming from an inproc zmq socket to the websocket'''
def __init__(self, context):
self.context = context
def __call__(self, environ, start_response):
ws = environ['wsgi.websocket']
sock = self.context.socket(zmq.SUB)
sock.setsockopt(zmq.SUBSCRIBE, "")
sock.connect('inproc://queue')
while True:
msg = sock.recv()
ws.send(msg)
if __name__ == '__main__':
main()
| [
"gevent.sleep",
"os.path.dirname",
"zmq.green.Context",
"msgpack.loads",
"gevent.spawn"
] | [((416, 429), 'zmq.green.Context', 'zmq.Context', ([], {}), '()\n', (427, 429), True, 'import zmq.green as zmq\n'), ((481, 514), 'gevent.spawn', 'gevent.spawn', (['zmq_server', 'context'], {}), '(zmq_server, context)\n', (493, 514), False, 'import gevent\n'), ((1450, 1468), 'gevent.sleep', 'gevent.sleep', (['(0.05)'], {}), '(0.05)\n', (1462, 1468), False, 'import gevent\n'), ((808, 833), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (823, 833), False, 'import os\n'), ((1421, 1439), 'msgpack.loads', 'msgpack.loads', (['msg'], {}), '(msg)\n', (1434, 1439), False, 'import msgpack\n')] |
import pytest
import tempfile
import os
import io
import logging
from cellpy import log
from cellpy import prms
from cellpy import prmreader
from . import fdv
log.setup_logging(default_level="DEBUG")
config_file_txt = """---
Batch:
color_style_label: seaborn-deep
dpi: 300
fig_extension: png
figure_type: unlimited
markersize: 4
symbol_label: simple
DataSet:
nom_cap: 3579
Db:
db_data_start_row: 2
db_header_row: 0
db_search_end_row: -1
db_search_start_row: 2
db_table_name: db_table
db_type: simple_excel_reader
db_unit_row: 1
DbCols:
active_material: !!python/tuple
- mass_active_material
- float
batch: !!python/tuple
- batch
- str
cell_name: !!python/tuple
- cell
- str
cell_type: !!python/tuple
- cell_type
- cat
cellpy_file_name: !!python/tuple
- cellpy_file_name
- str
comment_cell: !!python/tuple
- comment_cell
- str
comment_general: !!python/tuple
- comment_general
- str
comment_slurry: !!python/tuple
- comment_slurry
- str
exists: !!python/tuple
- exists
- bol
experiment_type: !!python/tuple
- experiment_type
- cat
file_name_indicator: !!python/tuple
- file_name_indicator
- str
freeze: !!python/tuple
- freeze
- bol
group: !!python/tuple
- group
- int
id: !!python/tuple
- id
- int
label: !!python/tuple
- label
- str
loading: !!python/tuple
- loading_active_material
- float
project: !!python/tuple
- project
- str
raw_file_names: !!python/tuple
- raw_file_names
- list
selected: !!python/tuple
- selected
- bol
sub_batch_01: !!python/tuple
- b01
- str
sub_batch_02: !!python/tuple
- b02
- str
sub_batch_03: !!python/tuple
- b03
- str
sub_batch_04: !!python/tuple
- b04
- str
sub_batch_05: !!python/tuple
- b05
- str
sub_batch_06: !!python/tuple
- b06
- str
sub_batch_07: !!python/tuple
- b07
- str
total_material: !!python/tuple
- mass_total
- float
FileNames: {}
Instruments:
custom_instrument_definitions_file: null
tester: arbin
Arbin:
chunk_size: null
detect_subprocess_need: false
max_chunks: null
max_res_filesize: 150000000
office_version: 64bit
sub_process_path: None
use_subprocess: false
Paths:
cellpydatadir: cellpy_data/h5
db_filename: cellpy_db.xlsx
db_path: cellpy_data/db
filelogdir: cellpy_data/log
outdatadir: cellpy_data/out
rawdatadir: cellpy_data/raw
Reader:
auto_dirs: true
cellpy_datadir: null
chunk_size: null
cycle_mode: anode
daniel_number: 5
ensure_step_table: false
filestatuschecker: size
force_all: false
force_step_table_creation: true
last_chunk: null
limit_loaded_cycles: null
load_only_summary: false
max_chunks: null
max_res_filesize: 150000000
raw_datadir: null
select_minimal: false
sep: ;
sorted_data: true
use_cellpy_stat_file: false
...
"""
config_file = io.StringIO(config_file_txt)
@pytest.fixture(scope="module")
def cellpy_data_instance():
from cellpy import cellreader
return cellreader.CellpyData()
@pytest.fixture()
def clean_dir():
new_path = tempfile.mkdtemp()
return new_path
def test_set_prm_inside_cellpy(cellpy_data_instance):
pass
def test_save_prm_file(clean_dir):
tmp_config_file_name = os.path.join(clean_dir, "cellpy_test_config_1.yml")
with open(tmp_config_file_name, "w") as f:
f.write(config_file_txt)
prmreader._read_prm_file(tmp_config_file_name)
prms.Instruments["tester"] = "biologics"
prms.Reader.cycle_mode = "cathode"
prmreader._write_prm_file(tmp_config_file_name)
prmreader._read_prm_file(tmp_config_file_name)
assert prms.Instruments.tester == "biologics"
# with open(tmp_config_file_name, "r") as f:
# lines = f.readlines()
# for line in lines:
# print(line, end="")
| [
"cellpy.prmreader._read_prm_file",
"io.StringIO",
"os.path.join",
"tempfile.mkdtemp",
"cellpy.prmreader._write_prm_file",
"cellpy.cellreader.CellpyData",
"pytest.fixture",
"cellpy.log.setup_logging"
] | [((160, 200), 'cellpy.log.setup_logging', 'log.setup_logging', ([], {'default_level': '"""DEBUG"""'}), "(default_level='DEBUG')\n", (177, 200), False, 'from cellpy import log\n'), ((2902, 2930), 'io.StringIO', 'io.StringIO', (['config_file_txt'], {}), '(config_file_txt)\n', (2913, 2930), False, 'import io\n'), ((2934, 2964), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (2948, 2964), False, 'import pytest\n'), ((3066, 3082), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3080, 3082), False, 'import pytest\n'), ((3039, 3062), 'cellpy.cellreader.CellpyData', 'cellreader.CellpyData', ([], {}), '()\n', (3060, 3062), False, 'from cellpy import cellreader\n'), ((3115, 3133), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (3131, 3133), False, 'import tempfile\n'), ((3283, 3334), 'os.path.join', 'os.path.join', (['clean_dir', '"""cellpy_test_config_1.yml"""'], {}), "(clean_dir, 'cellpy_test_config_1.yml')\n", (3295, 3334), False, 'import os\n'), ((3420, 3466), 'cellpy.prmreader._read_prm_file', 'prmreader._read_prm_file', (['tmp_config_file_name'], {}), '(tmp_config_file_name)\n', (3444, 3466), False, 'from cellpy import prmreader\n'), ((3555, 3602), 'cellpy.prmreader._write_prm_file', 'prmreader._write_prm_file', (['tmp_config_file_name'], {}), '(tmp_config_file_name)\n', (3580, 3602), False, 'from cellpy import prmreader\n'), ((3607, 3653), 'cellpy.prmreader._read_prm_file', 'prmreader._read_prm_file', (['tmp_config_file_name'], {}), '(tmp_config_file_name)\n', (3631, 3653), False, 'from cellpy import prmreader\n')] |
import nltk
from rouge import Rouge
def padding_sequence(sentence, length=200, padding="<padding>"):
sentence = sentence[:length]
return sentence + [padding]*(length-len(sentence))
_rouge = Rouge()
def rouge_score(s1, s2):
return 0 if not s1 or not s2 else _rouge.calc_score([s1], [s2])
def get_fake_answer_json(text, ref_answer):
"""
The interface to return assembled document json.
:param text: str, the original passage text
:param ref_answer: the original marked answer
:returns document: json, generated data structure
"""
tokened_text, tokened_answer = [nltk.word_tokenize(t) for t in [text, ref_answer]]
spans = _get_answer_spans(tokened_text, tokened_answer)
return (tokened_text, spans, tokened_text[spans[0]: spans[1]+1])
{
"document": tokened_text,
"answer_spans": spans,
# answer_spans doesn't include the upper boundary.
"fake_answers": tokened_text[spans[0]: spans[1]+1]
}
def recall_score(text, truth):
return len(set(text) & set(truth)) / len(set(truth))
def _get_answer_spans(text, ref_answer):
"""
Based on Rouge-L Score to get the best answer spans.
:param text: list of tokens in text
:param ref_answer: the human's answer, also tokenized
:returns max_spans: list of two numbers, marks the start and end position with the max score
"""
max_score = -1.
max_spans = [0, len(text)-1]
for start, _token in enumerate(text):
if _token not in ref_answer: continue
for end in range(len(text)-1, start-1, -1):
scorer = recall_score # rouge_score, rouge score is too slow
_score = scorer(text[start: end+1], ref_answer)
if _score > max_score:
max_score = _score
max_spans = [start, end]
if max_score > 0.9:
return max_spans
# Warning: the end pointed character is inclueded in fake answer
return max_spans
| [
"rouge.Rouge",
"nltk.word_tokenize"
] | [((202, 209), 'rouge.Rouge', 'Rouge', ([], {}), '()\n', (207, 209), False, 'from rouge import Rouge\n'), ((606, 627), 'nltk.word_tokenize', 'nltk.word_tokenize', (['t'], {}), '(t)\n', (624, 627), False, 'import nltk\n')] |
# included from libs/combination.py
"""
Combination
Not fastest but PyPy compatible version
"""
MOD = 998_244_353
K = 301
def makeInverseTable(K=K, MOD=MOD):
"""calc i^-1 for i in [1, K] mod MOD. MOD should be prime
>>> invs = makeInverseTable(10)
>>> [i * invs[i] % MOD for i in range(1, 10)]
[1, 1, 1, 1, 1, 1, 1, 1, 1]
%timeit makeInverseTable()
516 ms ± 26.6 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)
525 ms ± 19.5 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)
"""
ret = [1] * (K + 1)
for i in range(2, K + 1):
q, r = divmod(MOD, i)
ret[i] = -ret[r] * q % MOD
return ret
def makeFactorialTable(K=K, MOD=MOD):
"""calc i! for i in [0, K] mod MOD. MOD should be prime
>>> fs = makeFactorialTable(10, 23)
>>> fs
[1, 1, 2, 6, 1, 5, 7, 3, 1, 9, 21]
>>> import math
>>> fs == [math.factorial(i) % 23 for i in range(11)]
True
%timeit makeFactorialTable()
163 ms ± 805 µs per loop (mean ± std. dev. of 7 runs, 10 loops each)
169 ms ± 1.97 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)
"""
ret = [1] * (K + 1)
cur = 1
for i in range(2, K + 1):
cur *= i
cur %= MOD
ret[i] = cur
return ret
def makeInvFactoTable(inv, K=K, MOD=MOD):
"""calc i!^-1 for i in [0, K] mod MOD. MOD should be prime
You can not do inv[facto[i]], because facto[i] may greater than K.
inv = makeInverseTable()
%timeit makeInvFactoTable(inv)
182 ms ± 1.08 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)
189 ms ± 1.56 ms per loop (mean ± std. dev. of 7 runs, 10 loops each)
"""
ret = [1] * (K + 1)
cur = 1
for i in range(2, K + 1):
cur *= inv[i]
cur %= MOD
ret[i] = cur
return ret
def combination(n, k, facto, invf, MOD=MOD):
return facto[n] * invf[k] % MOD * invf[n - k] % MOD
def comb_rep(n, k, facto, invf, MOD=MOD):
return facto[n + k - 1] * invf[k] % MOD * invf[n - 1] % MOD
class Comb:
def __init__(self, maxValue, modulo):
self.maxValue = maxValue
self.modulo = modulo
self.facto = makeFactorialTable(maxValue, modulo)
self.inv = makeInverseTable(maxValue, modulo)
self.invf = makeInvFactoTable(self.inv, maxValue, modulo)
def comb(self, n, k):
return combination(n, k, self.facto, self.invf, self.modulo)
# end of libs/combination.py
# included from snippets/main.py
def debug(*x, msg=""):
import sys
print(msg, *x, file=sys.stderr)
def solve(N, K, AS):
MOD = 998_244_353
div2 = pow(2, MOD - 2, MOD)
sumTable = [N] * (K + 2)
# for x in range(1, K + 1):
# s = 0
# for a in AS:
# s += pow(a, x, MOD)
# s %= MOD
# sumTable[x] = s
ps = AS[:]
for x in range(1, K + 1):
s = 0
for i in range(N):
s += ps[i]
s %= MOD
ps[i] *= AS[i]
ps[i] %= MOD
sumTable[x] = s
c = Comb(K + 1, MOD)
for x in range(1, K + 1):
ret = 0
for i in range(x + 1):
ret += c.comb(x, i) * sumTable[x - i] * sumTable[i]
ret %= MOD
p = pow(2, x, MOD)
ret -= sumTable[x] * p
ret %= MOD
ret *= div2
ret %= MOD
print(ret)
def main():
# parse input
N, K = map(int, input().split())
AS = list(map(int, input().split()))
solve(N, K, AS)
# tests
T1 = """
3 3
1 2 3
"""
TEST_T1 = """
>>> as_input(T1)
>>> main()
12
50
216
"""
T2 = """
10 10
1 1 1 1 1 1 1 1 1 1
"""
TEST_T2 = """
>>> as_input(T2)
>>> main()
90
180
360
720
1440
2880
5760
11520
23040
46080
"""
T3 = """
2 5
1234 5678
"""
TEST_T3 = """
>>> as_input(T3)
>>> main()
6912
47775744
805306038
64822328
838460992
"""
def _test():
import doctest
doctest.testmod()
g = globals()
for k in sorted(g):
if k.startswith("TEST_"):
doctest.run_docstring_examples(g[k], g, name=k)
def as_input(s):
"use in test, use given string as input file"
import io
f = io.StringIO(s.strip())
g = globals()
g["input"] = lambda: bytes(f.readline(), "ascii")
g["read"] = lambda: bytes(f.read(), "ascii")
if __name__ == "__main__":
import sys
input = sys.stdin.buffer.readline
read = sys.stdin.buffer.read
if sys.argv[-1] == "-t":
print("testing")
_test()
sys.exit()
main()
# end of snippets/main.py
| [
"doctest.testmod",
"doctest.run_docstring_examples",
"sys.exit"
] | [((3846, 3863), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (3861, 3863), False, 'import doctest\n'), ((4428, 4438), 'sys.exit', 'sys.exit', ([], {}), '()\n', (4436, 4438), False, 'import sys\n'), ((3952, 3999), 'doctest.run_docstring_examples', 'doctest.run_docstring_examples', (['g[k]', 'g'], {'name': 'k'}), '(g[k], g, name=k)\n', (3982, 3999), False, 'import doctest\n')] |
# -*- coding: utf-8 -*-
import io
import json
import requests
import time
# Add your feedly API credentials
user_id = ''
access_token = ''
def get_saved_items(user_id, access_token, continuation = None):
headers = {'Authorization' : 'OAuth ' + access_token}
url = 'https://cloud.feedly.com/v3/streams/contents?streamId=user/' + user_id + '/tag/global.saved&count=10000'
if continuation:
url += '&continuation=' + continuation
print('Requesting saved items')
r = requests.get(url, headers = headers)
if r.status_code == 200:
r.encoding = 'UTF-8'
items = r.json()['items']
print(len(items))
if (r.json().get('continuation')):
print(r.json()['continuation'])
return items + get_saved_items(user_id, access_token, r.json()['continuation'])
else:
return items
else:
print('Error: Saved items couldn’t be fetched')
print('Status code: ' + str(r.status_code))
print(r.json())
exit(1)
filename = 'feedly-saved-' + time.strftime("%Y%m%d-%H%M%S") + '.json'
items = get_saved_items(user_id, access_token)
with io.open(filename, 'a', encoding='UTF-8') as output_file:
try:
print(len(items), 'total items')
json.dump(items, output_file, separators=(',',':'), indent=2)
print('Success: Created ' + filename)
except ValueError as error:
print(error) | [
"json.dump",
"time.strftime",
"requests.get",
"io.open"
] | [((494, 528), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (506, 528), False, 'import requests\n'), ((1128, 1168), 'io.open', 'io.open', (['filename', '"""a"""'], {'encoding': '"""UTF-8"""'}), "(filename, 'a', encoding='UTF-8')\n", (1135, 1168), False, 'import io\n'), ((1035, 1065), 'time.strftime', 'time.strftime', (['"""%Y%m%d-%H%M%S"""'], {}), "('%Y%m%d-%H%M%S')\n", (1048, 1065), False, 'import time\n'), ((1233, 1295), 'json.dump', 'json.dump', (['items', 'output_file'], {'separators': "(',', ':')", 'indent': '(2)'}), "(items, output_file, separators=(',', ':'), indent=2)\n", (1242, 1295), False, 'import json\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ade:
# Asynchronous Differential Evolution.
#
# Copyright (C) 2018-19 by <NAME>,
# http://edsuom.com/ade
#
# See edsuom.com for API documentation as well as information about
# Ed's background and other projects, software and otherwise.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS
# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
Example script I{voc.py}: Identifying coefficients for the
open-circuit voltage of an AGM lead-acid battery over time.
This example reads a two-item-per-line CSV file. Each line
contains: (B{1}) the time in seconds from some arbitrary starting
time, (B{2}) the battery voltage with no charge or discharge current.
Then it uses asynchronous differential evolution to efficiently find a
nonlinear best-fit curve.
"""
import time
import numpy as np
from scipy import signal
from twisted.internet import reactor, defer
from asynqueue.process import ProcessQueue
from yampex.plot import Plotter
from ade.population import Population
from ade.de import DifferentialEvolution
from ade.image import ImageViewer
from ade.util import *
from data import TimeData
class BatteryData(TimeData):
"""
Run L{setup} on my instance to decompress and load the
voc.csv.bz2 CSV file.
The CSV file isn't included in the I{ade} package and will
automatically be downloaded from U{edsuom.com}. Here's the privacy
policy for my site (it's short, as all good privacy policies
should be)::
Privacy policy: I don’t sniff out, track, or share anything
identifying individual visitors to this site. There are no
cookies or anything in place to let me see where you go on the
Internetthat’s creepy. All I get (like anyone else with a web
server), is plain vanilla server logs with “referral” info
about which web page sent you to this one.
@see: The L{Data} base class.
"""
basename = "voc"
class Reporter(object):
"""
An instance of me is called each time a combination of parameters
is found that's better than any of the others thus far.
Prints the sum-of-squared error and parameter values to the
console and updates a plot image (PNG) at I{plotFilePath}.
@cvar plotFilePath: The file name in the current directory of a
PNG file to write an update with a Matplotlib plot image of
the actual vs. modeled temperature versus thermistor
resistance curves.
"""
plotFilePath = "voc.png"
N_curve_plot = 200
extrapolationMultiple = 3
def __init__(self, evaluator, population):
"""
C{Reporter(evaluator, population)}
"""
self.ev = evaluator
self.prettyValues = population.pm.prettyValues
self.pt = Plotter(
2, filePath=self.plotFilePath, width=15, height=10)
self.pt.use_grid()
self.pt.use_timex()
ImageViewer(self.plotFilePath)
def __call__(self, values, counter, SSE):
"""
Prints out a new best parameter combination and its curve vs
observations, with lots of extrapolation to the right.
"""
def titlePart(*args):
titleParts.append(sub(*args))
SSE_info = sub("SSE={:g}", SSE)
titleParts = []
titlePart("Voltage vs Time (sec)")
titlePart(SSE_info)
titlePart("k={:d}", counter)
msg(0, self.prettyValues(values, SSE_info+", with"), 0)
with self.pt as sp:
sp.set_title(", ".join(titleParts))
t = self.ev.t
V = self.ev.X[:,0]
# Model versus observations
sp.add_line('-', 1)
sp.set_ylabel("V")
sp.set_zeroLine(values[-1])
sp.add_annotation(0, "Battery disconnect")
sp.add_annotation(-1, "Last observation")
sp.add_textBox("NE", "Estimated VOC: {:.2f} V", values[-1])
ax = sp(t, V)
tm = np.linspace(
t.min(), self.extrapolationMultiple*t.max(), self.N_curve_plot)
V_curve = self.ev.curve(tm, *values)
ax.plot(tm, V_curve, color='red', marker='o', markersize=2)
# Residuals
res = self.ev.curve(t, *values) - V
sp.set_ylabel("dV")
sp.set_zeroLine()
k = np.argmax(np.abs(res[2:])) + 2
resPercentage = 100 * res[k]/V[k]
sp(t, res)
self.pt.show()
class Evaluator(Picklable):
"""
I evaluate battery VOC model fitness.
Construct an instance of me, run the L{setup} method, and wait (in
non-blocking Twisted-friendly fashion) for the C{Deferred} it
returns to fire. Then call the instance a bunch of times with
parameter values for a L{curve} to get a (deferred)
sum-of-squared-error fitness of the curve to the thermistor data.
"""
scale_SSE = 100
bounds = {
# Initial rapid drop with up to 10 minute time constant
'a1': (0, 40),
'b1': (1, 10*60),
'c1': (1, 200),
# Middle drop with 20 min to 2 hour time constant
'a2': (0, 600),
'b2': (20*60, 2*3600),
'c2': (50, 1000),
# Slow settling with 1-12 hour time constant
'a3': (0, 800),
'b3': (3600, 12*3600),
'c3': (100, 4000),
# A bit beyond the extremes for VOC of an AGM lead acid battery
'voc': (45, 54),
}
def setup(self):
"""
Returns a C{Deferred} that fires with two equal-length sequences,
the names and bounds of all parameters to be determined.
Also creates a dict of I{indices} in those sequences, keyed by
parameter name.
"""
def done(null):
for name in ('t', 'X'):
setattr(self, name, getattr(data, name))
return names, bounds
bounds = []
names = sorted(self.bounds.keys())
for name in names:
bounds.append(self.bounds[name])
# The data
data = BatteryData()
return data.setup().addCallbacks(done, oops)
def curve(self, t, *args):
"""
Given a 1-D time vector followed by arguments defining curve
parameters, returns a 1-D vector of battery voltage over that
time with with no charge or discharge current, with one
particular but unknown SOC.
The model implements this equation:
M{V = a1*exp(-t/b1+c1) + ... ak*exp(-t/bk+ck) + voc}
"""
V = args[-1]
for k in range(3):
a, b, c = args[3*k:3*k+3]
V += a*np.exp(-(t+c)/b)
return V
def __call__(self, values):
"""
Evaluation function for the parameter I{values}.
"""
V = self.X[:,0]
V_curve = self.curve(self.t, *values)
return self.scale_SSE * np.sum(np.square(V_curve - V))
class Runner(object):
"""
I run everything to fit a curve to thermistor data using
asynchronous differential evolution.
Construct an instance of me with an instance of L{Args} that has
parsed command-line options, then have the Twisted reactor call
the instance when it starts. Then start the reactor and watch the
fun.
"""
def __init__(self, args):
"""
C{Runner(args)}
"""
self.args = args
self.ev = Evaluator()
N = args.N if args.N else ProcessQueue.cores()-1
self.q = ProcessQueue(N, returnFailure=True)
self.fh = open("voc.log", 'w') if args.l else True
msg(self.fh)
@defer.inlineCallbacks
def shutdown(self):
"""
Call this to shut me down when I'm done. Shuts down my
C{ProcessQueue}, which can take a moment.
Repeated calls have no effect.
"""
if self.q is not None:
msg("Shutting down...")
yield self.q.shutdown()
msg("Task Queue is shut down")
self.q = None
msg("Goodbye")
def evaluate(self, values):
"""
The function that gets called with each combination of parameters
to be evaluated for fitness.
"""
if values is None:
return self.shutdown()
values = list(values)
if self.q: return self.q.call(self.ev, values)
@defer.inlineCallbacks
def __call__(self):
t0 = time.time()
args = self.args
names_bounds = yield self.ev.setup().addErrback(oops)
self.p = Population(
self.evaluate,
names_bounds[0], names_bounds[1], popsize=args.p)
yield self.p.setup().addErrback(oops)
reporter = Reporter(self.ev, self.p)
self.p.addCallback(reporter)
F = [float(x) for x in args.F.split(',')]
de = DifferentialEvolution(
self.p,
CR=args.C, F=F, maxiter=args.m,
randomBase=not args.b, uniform=args.u,
adaptive=not args.n, bitterEnd=args.e, logHandle=self.fh)
yield de()
yield self.shutdown()
msg(0, "Final population:\n{}", self.p)
msg(0, "Elapsed time: {:.2f} seconds", time.time()-t0, 0)
msg(None)
reactor.stop()
def run(self):
return self().addErrback(oops)
def main():
"""
Called when this module is run as a script.
"""
if args.h:
return
r = Runner(args)
reactor.callWhenRunning(r.run)
reactor.run()
args = Args(
"""
Parameter finder for AGM lead-acid battery open-circuit voltage
model using Differential Evolution.
Downloads a compressed CSV file of real VOC data points from
edsuom.com to the current directory (if it's not already
present). The data points and the current best-fit curves are
plotted in the PNG file (also in the current directory)
pfinder.png. You can see the plots, automatically updated, with
the Linux command "qiv -Te thermistor.png". (Possibly that other
OS may have something that works, too.)
Press the Enter key to quit early.
"""
)
args('-m', '--maxiter', 800, "Maximum number of DE generations to run")
args('-e', '--bitter-end', "Keep working to the end even with little progress")
args('-p', '--popsize', 20, "Population: # individuals per unknown parameter")
args('-C', '--CR', 0.8, "DE Crossover rate CR")
args('-F', '--F', "0.5,1.0", "DE mutation scaling F: two values for range")
args('-b', '--best', "Use DE/best/1 instead of DE/rand/1")
args('-n', '--not-adaptive', "Don't use automatic F adaptation")
args('-u', '--uniform', "Initialize population uniformly instead of with LHS")
args('-N', '--N-cores', 0, "Limit the number of CPU cores")
args('-l', '--logfile', "Write results to logfile 'voc.log' instead of STDOUT")
args(main)
| [
"numpy.abs",
"asynqueue.process.ProcessQueue",
"twisted.internet.reactor.stop",
"numpy.square",
"numpy.exp",
"yampex.plot.Plotter",
"ade.de.DifferentialEvolution",
"asynqueue.process.ProcessQueue.cores",
"twisted.internet.reactor.run",
"ade.population.Population",
"twisted.internet.reactor.callW... | [((9904, 9934), 'twisted.internet.reactor.callWhenRunning', 'reactor.callWhenRunning', (['r.run'], {}), '(r.run)\n', (9927, 9934), False, 'from twisted.internet import reactor, defer\n'), ((9939, 9952), 'twisted.internet.reactor.run', 'reactor.run', ([], {}), '()\n', (9950, 9952), False, 'from twisted.internet import reactor, defer\n'), ((3223, 3282), 'yampex.plot.Plotter', 'Plotter', (['(2)'], {'filePath': 'self.plotFilePath', 'width': '(15)', 'height': '(10)'}), '(2, filePath=self.plotFilePath, width=15, height=10)\n', (3230, 3282), False, 'from yampex.plot import Plotter\n'), ((3359, 3389), 'ade.image.ImageViewer', 'ImageViewer', (['self.plotFilePath'], {}), '(self.plotFilePath)\n', (3370, 3389), False, 'from ade.image import ImageViewer\n'), ((7956, 7991), 'asynqueue.process.ProcessQueue', 'ProcessQueue', (['N'], {'returnFailure': '(True)'}), '(N, returnFailure=True)\n', (7968, 7991), False, 'from asynqueue.process import ProcessQueue\n'), ((8892, 8903), 'time.time', 'time.time', ([], {}), '()\n', (8901, 8903), False, 'import time\n'), ((9008, 9083), 'ade.population.Population', 'Population', (['self.evaluate', 'names_bounds[0]', 'names_bounds[1]'], {'popsize': 'args.p'}), '(self.evaluate, names_bounds[0], names_bounds[1], popsize=args.p)\n', (9018, 9083), False, 'from ade.population import Population\n'), ((9300, 9467), 'ade.de.DifferentialEvolution', 'DifferentialEvolution', (['self.p'], {'CR': 'args.C', 'F': 'F', 'maxiter': 'args.m', 'randomBase': '(not args.b)', 'uniform': 'args.u', 'adaptive': '(not args.n)', 'bitterEnd': 'args.e', 'logHandle': 'self.fh'}), '(self.p, CR=args.C, F=F, maxiter=args.m, randomBase=\n not args.b, uniform=args.u, adaptive=not args.n, bitterEnd=args.e,\n logHandle=self.fh)\n', (9321, 9467), False, 'from ade.de import DifferentialEvolution\n'), ((9697, 9711), 'twisted.internet.reactor.stop', 'reactor.stop', ([], {}), '()\n', (9709, 9711), False, 'from twisted.internet import reactor, defer\n'), ((7097, 7117), 'numpy.exp', 'np.exp', (['(-(t + c) / b)'], {}), '(-(t + c) / b)\n', (7103, 7117), True, 'import numpy as np\n'), ((7358, 7380), 'numpy.square', 'np.square', (['(V_curve - V)'], {}), '(V_curve - V)\n', (7367, 7380), True, 'import numpy as np\n'), ((7916, 7936), 'asynqueue.process.ProcessQueue.cores', 'ProcessQueue.cores', ([], {}), '()\n', (7934, 7936), False, 'from asynqueue.process import ProcessQueue\n'), ((9652, 9663), 'time.time', 'time.time', ([], {}), '()\n', (9661, 9663), False, 'import time\n'), ((4780, 4795), 'numpy.abs', 'np.abs', (['res[2:]'], {}), '(res[2:])\n', (4786, 4795), True, 'import numpy as np\n')] |
"""Calculations involving a pair of Cu atoms
"""
from typing import Union, Callable
import numpy as np
from ase import Atoms
from ase.units import Ang
try:
from Morse import MorsePotential
from util import map_func
except ModuleNotFoundError:
from .Morse import MorsePotential
from .util import map_func
def build_pair(d0: Union[float, int] = 1) -> Callable:
"""Closure to store the atoms object
Args:
d0 (Union[float, int], optional): default unit cell length
Returns:
Callable: function to apply strain
"""
calc = MorsePotential()
a = Atoms('2Cu', positions=[(0., 0., 0.), (0., 0., d0 * Ang)])
a.set_calculator(calc)
def change_distance(d: Union[float, int]) -> Atoms:
"""Function that returns the deformed unit cell under a given hydrostatic strain
Args:
d (Union[float, int]): distance (Å)
Returns:
Atoms: deformed atom pair
"""
a.positions[1, 2] = d * Ang
return a
return change_distance
get_pair = build_pair() # set up the closure
def get_pairwise_pe(d: Union[float, int]) -> float:
"""Calculate the potential energy of two atoms separated by the given distance
Args:
d (Union[float, int]): distance (Å)
Returns:
float: potential energy (eV)
"""
return get_pair(d).get_potential_energy()
def get_pairwise_pes(arr: np.ndarray) -> np.ndarray:
"""Apply pairwise potential energy calculation to an array of distances
Args:
arr (np.ndarray): array of distances (Å)
Returns:
np.ndarray: array of potential energies (eV)
"""
return map_func(get_pairwise_pe, arr)
def get_pairwise_force(d: Union[float, int]) -> float:
"""Calculate the force between two atoms separated by the given distance
Args:
d (Union[float, int]): distance (Å)
Returns:
float: force (eV/Å)
"""
return get_pair(d).get_forces()[1, 2]
def get_pairwise_forces(arr: np.ndarray) -> np.ndarray:
"""Apply pairwise force calculation to an array of distances
Args:
arr (np.ndarray): array of distances (Å)
Returns:
np.ndarray: array of forces (eV/Å)
"""
return map_func(get_pairwise_force, arr)
if __name__ == "__main__":
print(get_pairwise_pe(2.5))
print(get_pairwise_pes(np.linspace(0, 5, 10)))
| [
"ase.Atoms",
"Morse.MorsePotential",
"numpy.linspace",
"util.map_func"
] | [((574, 590), 'Morse.MorsePotential', 'MorsePotential', ([], {}), '()\n', (588, 590), False, 'from Morse import MorsePotential\n'), ((599, 662), 'ase.Atoms', 'Atoms', (['"""2Cu"""'], {'positions': '[(0.0, 0.0, 0.0), (0.0, 0.0, d0 * Ang)]'}), "('2Cu', positions=[(0.0, 0.0, 0.0), (0.0, 0.0, d0 * Ang)])\n", (604, 662), False, 'from ase import Atoms\n'), ((1665, 1695), 'util.map_func', 'map_func', (['get_pairwise_pe', 'arr'], {}), '(get_pairwise_pe, arr)\n', (1673, 1695), False, 'from util import map_func\n'), ((2236, 2269), 'util.map_func', 'map_func', (['get_pairwise_force', 'arr'], {}), '(get_pairwise_force, arr)\n', (2244, 2269), False, 'from util import map_func\n'), ((2358, 2379), 'numpy.linspace', 'np.linspace', (['(0)', '(5)', '(10)'], {}), '(0, 5, 10)\n', (2369, 2379), True, 'import numpy as np\n')] |
import bplights
import time
import random
import math
l = bplights.BPLights(300)
l.off()
m = bplights.BPNanoKontrol()
m.sliders[0] = 1
m.knobs[0] = 1
dq = math.pi * 6.0 / l.npixels
dt = 0.1
t = 0
for i in range(10000):
t = t + dt
if (t > 2 * math.pi):
t = t - 2 * math.pi
dt = 0.005 + 0.3 * m.knobs[0]
for q in range(l.npixels):
vt = q * dq + t
s = (math.sin(vt) + 1) * 0.5
r = m.sliders[0] * 255 * s
g = m.sliders[1] * 255 * s
b = m.sliders[2] * 255 * s
l.setval(q, r, g, b)
l.show()
m.poll()
time.sleep(0.02)
| [
"bplights.BPNanoKontrol",
"bplights.BPLights",
"time.sleep",
"math.sin"
] | [((60, 82), 'bplights.BPLights', 'bplights.BPLights', (['(300)'], {}), '(300)\n', (77, 82), False, 'import bplights\n'), ((96, 120), 'bplights.BPNanoKontrol', 'bplights.BPNanoKontrol', ([], {}), '()\n', (118, 120), False, 'import bplights\n'), ((596, 612), 'time.sleep', 'time.sleep', (['(0.02)'], {}), '(0.02)\n', (606, 612), False, 'import time\n'), ((406, 418), 'math.sin', 'math.sin', (['vt'], {}), '(vt)\n', (414, 418), False, 'import math\n')] |
import asyncio
class Barrier(object):
def __init__(self, parties, action=lambda: None):
self._parties = parties
self._action = action
self._cond = asyncio.Condition()
self._count = 0
async def wait(self):
self._count += 1
with (await self._cond):
if self._maybe_release():
return
await self._cond.wait()
async def deregister(self):
self._parties -= 1
with (await self._cond):
self._maybe_release()
@property
def empty(self):
return self._parties == 0
@property
def n_waiting(self):
return self._count
@property
def parties(self):
return self._parties
def _maybe_release(self):
if self._count == self._parties:
# Release everyone
self._cond.notify_all()
self._count = 0
self._action()
return True
return False
| [
"asyncio.Condition"
] | [((177, 196), 'asyncio.Condition', 'asyncio.Condition', ([], {}), '()\n', (194, 196), False, 'import asyncio\n')] |
# -*- coding: utf-8 -*-
"""
Created on Fri May 5 16:20:14 2017
@author: <NAME>
Program for WOS Cited References Analysis
"""
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from collections import Counter
df = pd.read_pickle('concatenated.pkl')
df = df.dropna(subset = ['PY','CR']) # Get rid of badly imported data
cited_ref = df.CR
orig_art_yr = df.PY
a = cited_ref.size
refs_per = np.zeros(a) # Citations per article
name = [] # Citation author
year = [] # Cited article year of pub
age = [] # Cited article age wrt published art.
journal = [] # Journal name of cited article
for i, row in enumerate(cited_ref.values):
auths = cited_ref.values[i] # Read the cell with all the citations for one article
parts = auths.split(';') # Split the citations based on semi-colon
refs_per[i] = 0; # Count the number of citations
# Split the citation into parts based on comma to get the year and journal name
for j in parts:
if len(j.split(',')) == 3:
n,y,jou = j.split(',')
elif len(j.split(',')) == 4:
n,y,jou,ver = j.split(',')
elif len(j.split(',')) == 5:
n,y,jou,ver,page = j.split(',')
elif len(j.split(',')) == 6:
n,y,jou,ver,page,doi = j.split(',')
y = y.strip()
if y.isdigit(): # Some citations don't have a year, throw them away
name.append(n)
year.append(y)
year = [int(i) for i in year]
temp = orig_art_yr.values[i] - float(y)
age.append(temp)
journal.append(jou)
refs_per[i] += 1
else:
pass
## Write the Top Most Cited Journals to csv file
journal = [x.upper() for x in journal] # Convert all names to uppercase
cc = Counter(journal)
p = cc.most_common()
cols = ['name','count']
pp = pd.DataFrame(p,columns = cols)
pp['name'] = pp['name'].str.upper() # Convert all names to uppercase
pp = pp.set_index('name')
pp = pp.groupby(pp.index).sum() # Find duplicate names and add the counts
pp = pp.sort_values(['count'], ascending = [False]) # Sort list by counts
pp.to_csv('MaxCitedJournals.csv') # Write to csv file
#############################################################################
# Let's make some figures
# Number of articles published per year
orig_art_yr = np.array(orig_art_yr,int)
plt.figure()
bins=np.arange(min(orig_art_yr), max(orig_art_yr)+2)
plt.hist(orig_art_yr, bins)
plt.xticks(orig_art_yr+0.5, orig_art_yr, rotation = 90)
plt.ylabel('Number of articles published per year')
# Year of publication of cited articles
year = np.array(year)
plt.figure()
plt.hist(year, bins = np.arange(min(year), max(year) + 2, 1))
plt.xlabel('Year of publication of cited articles')
# Age of cited references wrt. published article
age = np.array(age)
plt.figure()
#plt.hist(age, bins=np.arange(min(age), max(age) + 2, 1))
plt.hist(age, bins = np.arange(0, 100, 1))
plt.xlabel('Age of cited articles (years)')
plt.ylabel('Count')
# Total number of cited references per year, and
# Average number of cited references per article per year
ref_peryear = []
avgref_peryear = []
xx = np.unique(orig_art_yr)
i = min(orig_art_yr)
for i in xx:
ii = orig_art_yr == i
p = refs_per[ii].sum()
pp = refs_per[ii].mean()
ref_peryear.append(p)
avgref_peryear.append(pp)
ref_peryear = np.array(ref_peryear)
avgref_peryear = np.array(avgref_peryear)
plt.figure()
plt.plot(xx,ref_peryear,'o-')
plt.xticks(xx, xx, rotation=90)
plt.ylabel('Number of citations per year')
plt.figure()
plt.plot(xx,avgref_peryear,'o-')
plt.xticks(xx, xx, rotation=90)
plt.ylabel('Avg. number of citations per article, per year')
## Write to file
temp1 = pd.DataFrame({'Original article year': orig_art_yr,'references per article': refs_per})
temp1.to_csv('OriginalArticle_Year_RefCount.csv')
del temp1
temp1 = pd.DataFrame({'Cited journal year': year,'Cited journal age': age})
temp1.to_csv('CitedJournalAge.csv')
del temp1
temp1 = pd.DataFrame({'Year': xx, 'Total refs per year': ref_peryear,'Ave refs per article per year': avgref_peryear})
temp1.to_csv('ReferenceStats.csv')
del temp1 | [
"pandas.read_pickle",
"matplotlib.pyplot.hist",
"numpy.unique",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"collections.Counter",
"numpy.array",
"numpy.zeros",
"matplotlib.pyplot.figure",
"pandas.DataFrame",
"numpy.arange"
] | [((238, 272), 'pandas.read_pickle', 'pd.read_pickle', (['"""concatenated.pkl"""'], {}), "('concatenated.pkl')\n", (252, 272), True, 'import pandas as pd\n'), ((413, 424), 'numpy.zeros', 'np.zeros', (['a'], {}), '(a)\n', (421, 424), True, 'import numpy as np\n'), ((1901, 1917), 'collections.Counter', 'Counter', (['journal'], {}), '(journal)\n', (1908, 1917), False, 'from collections import Counter\n'), ((1968, 1997), 'pandas.DataFrame', 'pd.DataFrame', (['p'], {'columns': 'cols'}), '(p, columns=cols)\n', (1980, 1997), True, 'import pandas as pd\n'), ((2484, 2510), 'numpy.array', 'np.array', (['orig_art_yr', 'int'], {}), '(orig_art_yr, int)\n', (2492, 2510), True, 'import numpy as np\n'), ((2510, 2522), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2520, 2522), True, 'import matplotlib.pyplot as plt\n'), ((2576, 2603), 'matplotlib.pyplot.hist', 'plt.hist', (['orig_art_yr', 'bins'], {}), '(orig_art_yr, bins)\n', (2584, 2603), True, 'import matplotlib.pyplot as plt\n'), ((2604, 2659), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(orig_art_yr + 0.5)', 'orig_art_yr'], {'rotation': '(90)'}), '(orig_art_yr + 0.5, orig_art_yr, rotation=90)\n', (2614, 2659), True, 'import matplotlib.pyplot as plt\n'), ((2674, 2725), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of articles published per year"""'], {}), "('Number of articles published per year')\n", (2684, 2725), True, 'import matplotlib.pyplot as plt\n'), ((2777, 2791), 'numpy.array', 'np.array', (['year'], {}), '(year)\n', (2785, 2791), True, 'import numpy as np\n'), ((2792, 2804), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2802, 2804), True, 'import matplotlib.pyplot as plt\n'), ((2867, 2918), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Year of publication of cited articles"""'], {}), "('Year of publication of cited articles')\n", (2877, 2918), True, 'import matplotlib.pyplot as plt\n'), ((2978, 2991), 'numpy.array', 'np.array', (['age'], {}), '(age)\n', (2986, 2991), True, 'import numpy as np\n'), ((2992, 3004), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3002, 3004), True, 'import matplotlib.pyplot as plt\n'), ((3106, 3149), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Age of cited articles (years)"""'], {}), "('Age of cited articles (years)')\n", (3116, 3149), True, 'import matplotlib.pyplot as plt\n'), ((3152, 3171), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Count"""'], {}), "('Count')\n", (3162, 3171), True, 'import matplotlib.pyplot as plt\n'), ((3325, 3347), 'numpy.unique', 'np.unique', (['orig_art_yr'], {}), '(orig_art_yr)\n', (3334, 3347), True, 'import numpy as np\n'), ((3539, 3560), 'numpy.array', 'np.array', (['ref_peryear'], {}), '(ref_peryear)\n', (3547, 3560), True, 'import numpy as np\n'), ((3578, 3602), 'numpy.array', 'np.array', (['avgref_peryear'], {}), '(avgref_peryear)\n', (3586, 3602), True, 'import numpy as np\n'), ((3603, 3615), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3613, 3615), True, 'import matplotlib.pyplot as plt\n'), ((3616, 3647), 'matplotlib.pyplot.plot', 'plt.plot', (['xx', 'ref_peryear', '"""o-"""'], {}), "(xx, ref_peryear, 'o-')\n", (3624, 3647), True, 'import matplotlib.pyplot as plt\n'), ((3646, 3677), 'matplotlib.pyplot.xticks', 'plt.xticks', (['xx', 'xx'], {'rotation': '(90)'}), '(xx, xx, rotation=90)\n', (3656, 3677), True, 'import matplotlib.pyplot as plt\n'), ((3692, 3734), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of citations per year"""'], {}), "('Number of citations per year')\n", (3702, 3734), True, 'import matplotlib.pyplot as plt\n'), ((3747, 3759), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3757, 3759), True, 'import matplotlib.pyplot as plt\n'), ((3760, 3794), 'matplotlib.pyplot.plot', 'plt.plot', (['xx', 'avgref_peryear', '"""o-"""'], {}), "(xx, avgref_peryear, 'o-')\n", (3768, 3794), True, 'import matplotlib.pyplot as plt\n'), ((3793, 3824), 'matplotlib.pyplot.xticks', 'plt.xticks', (['xx', 'xx'], {'rotation': '(90)'}), '(xx, xx, rotation=90)\n', (3803, 3824), True, 'import matplotlib.pyplot as plt\n'), ((3839, 3899), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Avg. number of citations per article, per year"""'], {}), "('Avg. number of citations per article, per year')\n", (3849, 3899), True, 'import matplotlib.pyplot as plt\n'), ((3929, 4021), 'pandas.DataFrame', 'pd.DataFrame', (["{'Original article year': orig_art_yr, 'references per article': refs_per}"], {}), "({'Original article year': orig_art_yr,\n 'references per article': refs_per})\n", (3941, 4021), True, 'import pandas as pd\n'), ((4086, 4154), 'pandas.DataFrame', 'pd.DataFrame', (["{'Cited journal year': year, 'Cited journal age': age}"], {}), "({'Cited journal year': year, 'Cited journal age': age})\n", (4098, 4154), True, 'import pandas as pd\n'), ((4209, 4324), 'pandas.DataFrame', 'pd.DataFrame', (["{'Year': xx, 'Total refs per year': ref_peryear,\n 'Ave refs per article per year': avgref_peryear}"], {}), "({'Year': xx, 'Total refs per year': ref_peryear,\n 'Ave refs per article per year': avgref_peryear})\n", (4221, 4324), True, 'import pandas as pd\n'), ((3084, 3104), 'numpy.arange', 'np.arange', (['(0)', '(100)', '(1)'], {}), '(0, 100, 1)\n', (3093, 3104), True, 'import numpy as np\n')] |
import logging
import time
import selenium
from selenium.webdriver.common.by import By
import testutils
def test_project_file_browser(driver: selenium.webdriver, *args, **kwargs):
"""
Test that a file can be dragged and dropped into code, input data,
and output data in a project.
Args:
driver
"""
r = testutils.prep_py3_minimal_base(driver)
username, project_title = r.username, r.project_name
logging.info(f"Navigating to Code for project: {project_title}")
file_browser_elts = testutils.FileBrowserElements(driver)
file_browser_elts.code_tab.wait().click()
time.sleep(2)
logging.info(f"Dragging and dropping file into code for project: {project_title}")
file_browser_elts.drag_drop_file_in_drop_zone()
assert file_browser_elts.file_information.find().text == 'sample-upload.txt', \
"Expected sample-upload.txt to be the first file in Code"
logging.info(f"Navigating to Input Data for project: {project_title}")
file_browser_elts.input_data_tab.wait().click()
time.sleep(2)
logging.info(f"Dragging and dropping file into Input Data for project: {project_title}")
file_browser_elts.drag_drop_file_in_drop_zone()
assert file_browser_elts.file_information.find().text == 'sample-upload.txt', \
"Expected sample-upload.txt to be the first file in Input Data"
# TODO - Upload file to Output Data, need to deal with untracked directory
def test_dataset_file_browser(driver: selenium.webdriver, *args, **kwargs):
"""
Test that a file can be dragged and dropped into data in a dataset.
Args:
driver
"""
testutils.log_in(driver)
testutils.GuideElements(driver).remove_guide()
dataset_elts = testutils.DatasetElements(driver)
dataset_title = dataset_elts.create_dataset(testutils.unique_dataset_name())
logging.info(f"Navigating to Data for dataset: {dataset_title}")
file_browser_elts = testutils.FileBrowserElements(driver)
file_browser_elts.data_tab.wait().click()
logging.info(f"Dragging and dropping file into Data for dataset: {dataset_title}")
time.sleep(3)
file_browser_elts.drag_drop_file_in_drop_zone()
time.sleep(3)
assert file_browser_elts.file_information.find().text == 'sample-upload.txt', \
"Expected sample-upload.txt to be the first file in Data"
| [
"testutils.unique_dataset_name",
"testutils.FileBrowserElements",
"testutils.prep_py3_minimal_base",
"time.sleep",
"testutils.DatasetElements",
"testutils.GuideElements",
"testutils.log_in",
"logging.info"
] | [((339, 378), 'testutils.prep_py3_minimal_base', 'testutils.prep_py3_minimal_base', (['driver'], {}), '(driver)\n', (370, 378), False, 'import testutils\n'), ((440, 504), 'logging.info', 'logging.info', (['f"""Navigating to Code for project: {project_title}"""'], {}), "(f'Navigating to Code for project: {project_title}')\n", (452, 504), False, 'import logging\n'), ((529, 566), 'testutils.FileBrowserElements', 'testutils.FileBrowserElements', (['driver'], {}), '(driver)\n', (558, 566), False, 'import testutils\n'), ((617, 630), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (627, 630), False, 'import time\n'), ((635, 722), 'logging.info', 'logging.info', (['f"""Dragging and dropping file into code for project: {project_title}"""'], {}), "(\n f'Dragging and dropping file into code for project: {project_title}')\n", (647, 722), False, 'import logging\n'), ((926, 996), 'logging.info', 'logging.info', (['f"""Navigating to Input Data for project: {project_title}"""'], {}), "(f'Navigating to Input Data for project: {project_title}')\n", (938, 996), False, 'import logging\n'), ((1053, 1066), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (1063, 1066), False, 'import time\n'), ((1071, 1164), 'logging.info', 'logging.info', (['f"""Dragging and dropping file into Input Data for project: {project_title}"""'], {}), "(\n f'Dragging and dropping file into Input Data for project: {project_title}')\n", (1083, 1164), False, 'import logging\n'), ((1645, 1669), 'testutils.log_in', 'testutils.log_in', (['driver'], {}), '(driver)\n', (1661, 1669), False, 'import testutils\n'), ((1740, 1773), 'testutils.DatasetElements', 'testutils.DatasetElements', (['driver'], {}), '(driver)\n', (1765, 1773), False, 'import testutils\n'), ((1859, 1923), 'logging.info', 'logging.info', (['f"""Navigating to Data for dataset: {dataset_title}"""'], {}), "(f'Navigating to Data for dataset: {dataset_title}')\n", (1871, 1923), False, 'import logging\n'), ((1948, 1985), 'testutils.FileBrowserElements', 'testutils.FileBrowserElements', (['driver'], {}), '(driver)\n', (1977, 1985), False, 'import testutils\n'), ((2036, 2123), 'logging.info', 'logging.info', (['f"""Dragging and dropping file into Data for dataset: {dataset_title}"""'], {}), "(\n f'Dragging and dropping file into Data for dataset: {dataset_title}')\n", (2048, 2123), False, 'import logging\n'), ((2123, 2136), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2133, 2136), False, 'import time\n'), ((2193, 2206), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2203, 2206), False, 'import time\n'), ((1822, 1853), 'testutils.unique_dataset_name', 'testutils.unique_dataset_name', ([], {}), '()\n', (1851, 1853), False, 'import testutils\n'), ((1674, 1705), 'testutils.GuideElements', 'testutils.GuideElements', (['driver'], {}), '(driver)\n', (1697, 1705), False, 'import testutils\n')] |