code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
"""
Copyright (C) 2021 <NAME>
This file is part of QuantLib, a free-software/open-source library
for financial quantitative analysts and developers - http://quantlib.org/
QuantLib is free software: you can redistribute it and/or modify it
under the terms of the QuantLib license. You should have received a
copy of the license along with this program; if not, please email
<<EMAIL>>. The license is also available online at
<http://quantlib.org/license.shtml>.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the license for more details.
"""
import unittest
import QuantLib as ql
class CurrencyTest(unittest.TestCase):
def test_default_currency_constructor(self):
"""Testing default currency constructor"""
fail_msg = "Failed to create default currency."
default_ccy = ql.Currency()
self.assertTrue(default_ccy.empty(), fail_msg)
def test_eur_constructor(self):
"""Testing EUR constructor"""
fail_msg = "Failed to create EUR currency."
eur = ql.EURCurrency()
self.assertFalse(eur.empty(), fail_msg)
def test_bespoke_currency_constructor(self):
"""Testing bespoke currency constructor"""
fail_msg = "Failed to create bespoke currency."
custom_ccy = ql.Currency(
"CCY", "CCY", 100, "#", "", 100, ql.Rounding(), "")
self.assertFalse(custom_ccy.empty(), fail_msg)
if __name__ == '__main__':
print('testing QuantLib ' + ql.__version__)
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(CurrencyTest, 'test'))
unittest.TextTestRunner(verbosity=2).run(suite)
|
[
"QuantLib.Rounding",
"unittest.TextTestRunner",
"unittest.TestSuite",
"unittest.makeSuite",
"QuantLib.EURCurrency",
"QuantLib.Currency"
] |
[((1628, 1648), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (1646, 1648), False, 'import unittest\n'), ((954, 967), 'QuantLib.Currency', 'ql.Currency', ([], {}), '()\n', (965, 967), True, 'import QuantLib as ql\n'), ((1164, 1180), 'QuantLib.EURCurrency', 'ql.EURCurrency', ([], {}), '()\n', (1178, 1180), True, 'import QuantLib as ql\n'), ((1667, 1707), 'unittest.makeSuite', 'unittest.makeSuite', (['CurrencyTest', '"""test"""'], {}), "(CurrencyTest, 'test')\n", (1685, 1707), False, 'import unittest\n'), ((1465, 1478), 'QuantLib.Rounding', 'ql.Rounding', ([], {}), '()\n', (1476, 1478), True, 'import QuantLib as ql\n'), ((1713, 1749), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (1736, 1749), False, 'import unittest\n')]
|
#!/usr/bin/env python3
"""This is an example to train a task with TRPO algorithm.
It uses an LSTM-based recurrent policy.
Here it runs CartPole-v1 environment with 100 iterations.
Results:
AverageReturn: 100
RiseTime: itr 13
"""
from metarl.experiment import run_experiment
from metarl.np.baselines import LinearFeatureBaseline
from metarl.tf.algos import TRPO
from metarl.tf.envs import TfEnv
from metarl.tf.experiment import LocalTFRunner
from metarl.tf.optimizers import ConjugateGradientOptimizer
from metarl.tf.optimizers import FiniteDifferenceHvp
from metarl.tf.policies import CategoricalLSTMPolicy
def run_task(snapshot_config, *_):
"""Defines the main experiment routine.
Args:
snapshot_config (metarl.experiment.SnapshotConfig): Configuration
values for snapshotting.
*_ (object): Hyperparameters (unused).
"""
with LocalTFRunner(snapshot_config=snapshot_config) as runner:
env = TfEnv(env_name='CartPole-v1')
policy = CategoricalLSTMPolicy(name='policy', env_spec=env.spec)
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(env_spec=env.spec,
policy=policy,
baseline=baseline,
max_path_length=100,
discount=0.99,
max_kl_step=0.01,
optimizer=ConjugateGradientOptimizer,
optimizer_args=dict(hvp_approach=FiniteDifferenceHvp(
base_eps=1e-5)))
runner.setup(algo, env)
runner.train(n_epochs=100, batch_size=4000)
run_experiment(
run_task,
snapshot_mode='last',
seed=1,
)
|
[
"metarl.tf.envs.TfEnv",
"metarl.tf.policies.CategoricalLSTMPolicy",
"metarl.np.baselines.LinearFeatureBaseline",
"metarl.tf.optimizers.FiniteDifferenceHvp",
"metarl.tf.experiment.LocalTFRunner",
"metarl.experiment.run_experiment"
] |
[((1611, 1665), 'metarl.experiment.run_experiment', 'run_experiment', (['run_task'], {'snapshot_mode': '"""last"""', 'seed': '(1)'}), "(run_task, snapshot_mode='last', seed=1)\n", (1625, 1665), False, 'from metarl.experiment import run_experiment\n'), ((886, 932), 'metarl.tf.experiment.LocalTFRunner', 'LocalTFRunner', ([], {'snapshot_config': 'snapshot_config'}), '(snapshot_config=snapshot_config)\n', (899, 932), False, 'from metarl.tf.experiment import LocalTFRunner\n'), ((958, 987), 'metarl.tf.envs.TfEnv', 'TfEnv', ([], {'env_name': '"""CartPole-v1"""'}), "(env_name='CartPole-v1')\n", (963, 987), False, 'from metarl.tf.envs import TfEnv\n'), ((1006, 1061), 'metarl.tf.policies.CategoricalLSTMPolicy', 'CategoricalLSTMPolicy', ([], {'name': '"""policy"""', 'env_spec': 'env.spec'}), "(name='policy', env_spec=env.spec)\n", (1027, 1061), False, 'from metarl.tf.policies import CategoricalLSTMPolicy\n'), ((1082, 1122), 'metarl.np.baselines.LinearFeatureBaseline', 'LinearFeatureBaseline', ([], {'env_spec': 'env.spec'}), '(env_spec=env.spec)\n', (1103, 1122), False, 'from metarl.np.baselines import LinearFeatureBaseline\n'), ((1462, 1497), 'metarl.tf.optimizers.FiniteDifferenceHvp', 'FiniteDifferenceHvp', ([], {'base_eps': '(1e-05)'}), '(base_eps=1e-05)\n', (1481, 1497), False, 'from metarl.tf.optimizers import FiniteDifferenceHvp\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# the code is modified from
# https://github.com/Cadene/pretrained-models.pytorch/blob/master/pretrainedmodels/models/xception.py
from singa import autograd
from singa import module
class Block(autograd.Layer):
def __init__(self,
in_filters,
out_filters,
reps,
strides=1,
padding=0,
start_with_relu=True,
grow_first=True):
super(Block, self).__init__()
if out_filters != in_filters or strides != 1:
self.skip = autograd.Conv2d(in_filters,
out_filters,
1,
stride=strides,
padding=padding,
bias=False)
self.skipbn = autograd.BatchNorm2d(out_filters)
else:
self.skip = None
self.layers = []
filters = in_filters
if grow_first:
self.layers.append(autograd.ReLU())
self.layers.append(
autograd.SeparableConv2d(in_filters,
out_filters,
3,
stride=1,
padding=1,
bias=False))
self.layers.append(autograd.BatchNorm2d(out_filters))
filters = out_filters
for i in range(reps - 1):
self.layers.append(autograd.ReLU())
self.layers.append(
autograd.SeparableConv2d(filters,
filters,
3,
stride=1,
padding=1,
bias=False))
self.layers.append(autograd.BatchNorm2d(filters))
if not grow_first:
self.layers.append(autograd.ReLU())
self.layers.append(
autograd.SeparableConv2d(in_filters,
out_filters,
3,
stride=1,
padding=1,
bias=False))
self.layers.append(autograd.BatchNorm2d(out_filters))
if not start_with_relu:
self.layers = self.layers[1:]
else:
self.layers[0] = autograd.ReLU()
if strides != 1:
self.layers.append(autograd.MaxPool2d(3, strides, padding + 1))
def __call__(self, x):
y = self.layers[0](x)
for layer in self.layers[1:]:
if isinstance(y, tuple):
y = y[0]
y = layer(y)
if self.skip is not None:
skip = self.skip(x)
skip = self.skipbn(skip)
else:
skip = x
y = autograd.add(y, skip)
return y
class Xception(module.Module):
"""
Xception optimized for the ImageNet dataset, as specified in
https://arxiv.org/pdf/1610.02357.pdf
"""
def __init__(self, num_classes=10, num_channels=3, in_size=299):
""" Constructor
Args:
num_classes: number of classes
"""
super(Xception, self).__init__()
self.num_classes = num_classes
self.input_size = in_size
self.dimension = 4
self.conv1 = autograd.Conv2d(num_channels, 32, 3, 2, 0, bias=False)
self.bn1 = autograd.BatchNorm2d(32)
self.conv2 = autograd.Conv2d(32, 64, 3, 1, 1, bias=False)
self.bn2 = autograd.BatchNorm2d(64)
# do relu here
self.block1 = Block(64,
128,
2,
2,
padding=0,
start_with_relu=False,
grow_first=True)
self.block2 = Block(128,
256,
2,
2,
padding=0,
start_with_relu=True,
grow_first=True)
self.block3 = Block(256,
728,
2,
2,
padding=0,
start_with_relu=True,
grow_first=True)
self.block4 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block5 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block6 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block7 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block8 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block9 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block10 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block11 = Block(728,
728,
3,
1,
start_with_relu=True,
grow_first=True)
self.block12 = Block(728,
1024,
2,
2,
start_with_relu=True,
grow_first=False)
self.conv3 = autograd.SeparableConv2d(1024, 1536, 3, 1, 1)
self.bn3 = autograd.BatchNorm2d(1536)
# do relu here
self.conv4 = autograd.SeparableConv2d(1536, 2048, 3, 1, 1)
self.bn4 = autograd.BatchNorm2d(2048)
self.globalpooling = autograd.MaxPool2d(10, 1)
if self.input_size == 299:
self.fc = autograd.Linear(2048, num_classes)
elif self.input_size == 416:
self.fc = autograd.Linear(32768, num_classes)
elif self.input_size == 720:
self.fc = autograd.Linear(401408, num_classes)
elif self.input_size == 1280:
self.fc = autograd.Linear(1968128, num_classes)
def features(self, input):
x = self.conv1(input)
x = self.bn1(x)
x = autograd.relu(x)
x = self.conv2(x)
x = self.bn2(x)
x = autograd.relu(x)
x = self.block1(x)
x = self.block2(x)
x = self.block3(x)
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.block8(x)
x = self.block9(x)
x = self.block10(x)
x = self.block11(x)
x = self.block12(x)
x = self.conv3(x)
x = self.bn3(x)
x = autograd.relu(x)
x = self.conv4(x)
x = self.bn4(x)
return x
def logits(self, features):
x = autograd.relu(features)
x = self.globalpooling(x)
x = autograd.flatten(x)
x = self.fc(x)
return x
def forward(self, input):
x = self.features(input)
x = self.logits(x)
return x
def loss(self, out, ty):
return autograd.softmax_cross_entropy(out, ty)
def optim(self, loss, dist_option, spars):
if dist_option == 'fp32':
self.optimizer.backward_and_update(loss)
elif dist_option == 'fp16':
self.optimizer.backward_and_update_half(loss)
elif dist_option == 'partialUpdate':
self.optimizer.backward_and_partial_update(loss)
elif dist_option == 'sparseTopK':
self.optimizer.backward_and_sparse_update(loss,
topK=True,
spars=spars)
elif dist_option == 'sparseThreshold':
self.optimizer.backward_and_sparse_update(loss,
topK=False,
spars=spars)
def set_optimizer(self, optimizer):
self.optimizer = optimizer
def create_model(pretrained=False, **kwargs):
"""Constructs a Xceptionnet model.
Args:
pretrained (bool): If True, returns a model pre-trained
"""
model = Xception(**kwargs)
return model
__all__ = ['Xception', 'create_model']
|
[
"singa.autograd.Conv2d",
"singa.autograd.relu",
"singa.autograd.softmax_cross_entropy",
"singa.autograd.MaxPool2d",
"singa.autograd.ReLU",
"singa.autograd.SeparableConv2d",
"singa.autograd.BatchNorm2d",
"singa.autograd.Linear",
"singa.autograd.add",
"singa.autograd.flatten"
] |
[((3919, 3940), 'singa.autograd.add', 'autograd.add', (['y', 'skip'], {}), '(y, skip)\n', (3931, 3940), False, 'from singa import autograd\n'), ((4439, 4493), 'singa.autograd.Conv2d', 'autograd.Conv2d', (['num_channels', '(32)', '(3)', '(2)', '(0)'], {'bias': '(False)'}), '(num_channels, 32, 3, 2, 0, bias=False)\n', (4454, 4493), False, 'from singa import autograd\n'), ((4513, 4537), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['(32)'], {}), '(32)\n', (4533, 4537), False, 'from singa import autograd\n'), ((4560, 4604), 'singa.autograd.Conv2d', 'autograd.Conv2d', (['(32)', '(64)', '(3)', '(1)', '(1)'], {'bias': '(False)'}), '(32, 64, 3, 1, 1, bias=False)\n', (4575, 4604), False, 'from singa import autograd\n'), ((4624, 4648), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['(64)'], {}), '(64)\n', (4644, 4648), False, 'from singa import autograd\n'), ((7511, 7556), 'singa.autograd.SeparableConv2d', 'autograd.SeparableConv2d', (['(1024)', '(1536)', '(3)', '(1)', '(1)'], {}), '(1024, 1536, 3, 1, 1)\n', (7535, 7556), False, 'from singa import autograd\n'), ((7576, 7602), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['(1536)'], {}), '(1536)\n', (7596, 7602), False, 'from singa import autograd\n'), ((7648, 7693), 'singa.autograd.SeparableConv2d', 'autograd.SeparableConv2d', (['(1536)', '(2048)', '(3)', '(1)', '(1)'], {}), '(1536, 2048, 3, 1, 1)\n', (7672, 7693), False, 'from singa import autograd\n'), ((7713, 7739), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['(2048)'], {}), '(2048)\n', (7733, 7739), False, 'from singa import autograd\n'), ((7770, 7795), 'singa.autograd.MaxPool2d', 'autograd.MaxPool2d', (['(10)', '(1)'], {}), '(10, 1)\n', (7788, 7795), False, 'from singa import autograd\n'), ((8275, 8291), 'singa.autograd.relu', 'autograd.relu', (['x'], {}), '(x)\n', (8288, 8291), False, 'from singa import autograd\n'), ((8355, 8371), 'singa.autograd.relu', 'autograd.relu', (['x'], {}), '(x)\n', (8368, 8371), False, 'from singa import autograd\n'), ((8763, 8779), 'singa.autograd.relu', 'autograd.relu', (['x'], {}), '(x)\n', (8776, 8779), False, 'from singa import autograd\n'), ((8893, 8916), 'singa.autograd.relu', 'autograd.relu', (['features'], {}), '(features)\n', (8906, 8916), False, 'from singa import autograd\n'), ((8963, 8982), 'singa.autograd.flatten', 'autograd.flatten', (['x'], {}), '(x)\n', (8979, 8982), False, 'from singa import autograd\n'), ((9176, 9215), 'singa.autograd.softmax_cross_entropy', 'autograd.softmax_cross_entropy', (['out', 'ty'], {}), '(out, ty)\n', (9206, 9215), False, 'from singa import autograd\n'), ((1430, 1522), 'singa.autograd.Conv2d', 'autograd.Conv2d', (['in_filters', 'out_filters', '(1)'], {'stride': 'strides', 'padding': 'padding', 'bias': '(False)'}), '(in_filters, out_filters, 1, stride=strides, padding=padding,\n bias=False)\n', (1445, 1522), False, 'from singa import autograd\n'), ((1745, 1778), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['out_filters'], {}), '(out_filters)\n', (1765, 1778), False, 'from singa import autograd\n'), ((3467, 3482), 'singa.autograd.ReLU', 'autograd.ReLU', ([], {}), '()\n', (3480, 3482), False, 'from singa import autograd\n'), ((7853, 7887), 'singa.autograd.Linear', 'autograd.Linear', (['(2048)', 'num_classes'], {}), '(2048, num_classes)\n', (7868, 7887), False, 'from singa import autograd\n'), ((1932, 1947), 'singa.autograd.ReLU', 'autograd.ReLU', ([], {}), '()\n', (1945, 1947), False, 'from singa import autograd\n'), ((1997, 2086), 'singa.autograd.SeparableConv2d', 'autograd.SeparableConv2d', (['in_filters', 'out_filters', '(3)'], {'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(in_filters, out_filters, 3, stride=1, padding=1,\n bias=False)\n', (2021, 2086), False, 'from singa import autograd\n'), ((2320, 2353), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['out_filters'], {}), '(out_filters)\n', (2340, 2353), False, 'from singa import autograd\n'), ((2455, 2470), 'singa.autograd.ReLU', 'autograd.ReLU', ([], {}), '()\n', (2468, 2470), False, 'from singa import autograd\n'), ((2520, 2598), 'singa.autograd.SeparableConv2d', 'autograd.SeparableConv2d', (['filters', 'filters', '(3)'], {'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(filters, filters, 3, stride=1, padding=1, bias=False)\n', (2544, 2598), False, 'from singa import autograd\n'), ((2836, 2865), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['filters'], {}), '(filters)\n', (2856, 2865), False, 'from singa import autograd\n'), ((2926, 2941), 'singa.autograd.ReLU', 'autograd.ReLU', ([], {}), '()\n', (2939, 2941), False, 'from singa import autograd\n'), ((2991, 3080), 'singa.autograd.SeparableConv2d', 'autograd.SeparableConv2d', (['in_filters', 'out_filters', '(3)'], {'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(in_filters, out_filters, 3, stride=1, padding=1,\n bias=False)\n', (3015, 3080), False, 'from singa import autograd\n'), ((3314, 3347), 'singa.autograd.BatchNorm2d', 'autograd.BatchNorm2d', (['out_filters'], {}), '(out_filters)\n', (3334, 3347), False, 'from singa import autograd\n'), ((3540, 3583), 'singa.autograd.MaxPool2d', 'autograd.MaxPool2d', (['(3)', 'strides', '(padding + 1)'], {}), '(3, strides, padding + 1)\n', (3558, 3583), False, 'from singa import autograd\n'), ((7947, 7982), 'singa.autograd.Linear', 'autograd.Linear', (['(32768)', 'num_classes'], {}), '(32768, num_classes)\n', (7962, 7982), False, 'from singa import autograd\n'), ((8042, 8078), 'singa.autograd.Linear', 'autograd.Linear', (['(401408)', 'num_classes'], {}), '(401408, num_classes)\n', (8057, 8078), False, 'from singa import autograd\n'), ((8139, 8176), 'singa.autograd.Linear', 'autograd.Linear', (['(1968128)', 'num_classes'], {}), '(1968128, num_classes)\n', (8154, 8176), False, 'from singa import autograd\n')]
|
import unittest
from tests.test_support import TestSupport
from mock import Mock
from maskgen.masks.donor_rules import VideoDonor, AudioDonor, AllStreamDonor, AllAudioStreamDonor, \
VideoDonorWithoutAudio, InterpolateDonor,AudioZipDonor
from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, \
get_end_time_from_segment, get_end_frame_from_segment
class TestDonorRules(TestSupport):
def test_video_donor(self):
graph = Mock()
def lkup_preds(x):
return {'b':['a'],'e':['d']}[x]
def lkup_edge(x,y):
return {'ab':{'op':'NoSelect'},'de':{'op':'SelectSomething','arguments': {'Start Time': 20, 'End Time':100}}}[x + y]
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
donor = VideoDonor(graph, 'e','f', 'x',(None,self.locateFile('tests/videos/sample1.mov')), (None,self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual(20, args['Start Time']['defaultvalue'])
self.assertEqual(100, args['End Time']['defaultvalue'])
segments = donor.create(arguments={'include audio':'yes','Start Time':30,'End Time':150})
for segment in segments:
if get_type_of_segment(segment) == 'audio':
self.assertEqual(115542,get_start_frame_from_segment(segment))
self.assertEqual(509061, get_end_frame_from_segment(segment))
else:
self.assertEqual(30, get_start_frame_from_segment(segment))
self.assertEqual(150, get_end_frame_from_segment(segment))
self.assertEqual(2620.0, get_start_time_from_segment(segment))
self.assertEqual(11543, int(get_end_time_from_segment(segment)))
donor = VideoDonor(graph, 'b','c','x', (None,self.locateFile('tests/videos/sample1.mov')), (None,self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual(1, args['Start Time']['defaultvalue'])
self.assertEqual(0, args['End Time']['defaultvalue'])
segments = donor.create(arguments={'include audio':'yes','Start Time':30,'End Time':150})
for segment in segments:
if get_type_of_segment(segment) == 'audio':
self.assertEqual(115542,get_start_frame_from_segment(segment))
self.assertEqual(509061, get_end_frame_from_segment(segment))
else:
self.assertEqual(30, get_start_frame_from_segment(segment))
self.assertEqual(150, get_end_frame_from_segment(segment))
self.assertEqual(2620.0, get_start_time_from_segment(segment))
self.assertEqual(11543, int(get_end_time_from_segment(segment)))
segments = donor.create(arguments={'include audio': 'no', 'Start Time': 30, 'End Time': 150})
self.assertEqual(0,len([segment for segment in segments if get_type_of_segment(segment) == 'audio']))
donor = VideoDonorWithoutAudio(graph, 'b','c', 'x', (None,self.locateFile('tests/videos/sample1.mov')),
(None,self.locateFile('tests/videos/sample1.mov')))
self.assertTrue('include audio' not in donor.arguments())
def test_audio_donor(self):
graph = Mock()
def lkup_preds(x):
return {'b': ['a'], 'e': ['d']}[x]
def lkup_edge(x, y):
return \
{'ab': {'op': 'NoSelect'}, 'ef': {'op': 'SelectSomething', 'arguments': {'Start Time': "00:00:00.000000"}}}[
x + y]
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
donor = AudioDonor(graph, 'e', 'f', 'x', (None, self.locateFile('tests/videos/sample1.mov')),
(None, self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual("00:00:00.000000", args['Start Time']['defaultvalue'])
self.assertEqual("00:00:00.000000", args['End Time']['defaultvalue'])
segments = donor.create(arguments={'Start Time': "00:00:01.11", 'End Time': "00:00:01.32"})
for segment in segments:
self.assertEqual(48951, get_start_frame_from_segment(segment))
self.assertEqual(58212, get_end_frame_from_segment(segment))
self.assertAlmostEqual(1109.97, get_start_time_from_segment(segment),places=1)
self.assertEqual(1320.0, int(get_end_time_from_segment(segment)))
donor = AllStreamDonor(graph, 'e', 'f', 'y', (None, self.locateFile('tests/videos/sample1.mov')),
(None, self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual(0,len(args))
segments = donor.create(arguments={})
types = set()
for segment in segments:
types.add(get_type_of_segment(segment))
if get_type_of_segment(segment) == 'audio':
self.assertEqual(1, get_start_frame_from_segment(segment))
self.assertEqual(2617262, get_end_frame_from_segment(segment))
self.assertAlmostEqual(0, get_start_time_from_segment(segment), places=1)
self.assertAlmostEqual(59348, int(get_end_time_from_segment(segment)))
else:
self.assertEqual(1, get_start_frame_from_segment(segment))
self.assertEqual(803, get_end_frame_from_segment(segment))
self.assertAlmostEqual(0, get_start_time_from_segment(segment), places=1)
self.assertAlmostEqual(59348, int(get_end_time_from_segment(segment)))
self.assertEqual(2,len(types))
donor = AllAudioStreamDonor(graph, 'e', 'f', 'y', (None, self.locateFile('tests/videos/sample1.mov')),
(None, self.locateFile('tests/videos/sample1.mov')))
self.assertEqual(0, len(donor.arguments()))
self.assertEqual(['audio'],donor.media_types())
def test_audio_zip_donor(self):
graph = Mock()
def lkup_preds(x):
return {'b': ['a'], 'e': ['d']}[x]
def lkup_edge(x, y):
return \
{'ab': {'op': 'NoSelect'}, 'ef': {'op': 'SelectSomething', 'arguments': {'Start Time': "00:00:00.000000"}}}[
x + y]
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
donor = AudioZipDonor(graph, 'e', 'f', 'x', (None, self.locateFile('tests/zips/test.wav.zip')),
(None, self.locateFile('tests/videos/sample1.mov')))
args = donor.arguments()
self.assertEqual("00:00:00.000000", args['Start Time']['defaultvalue'])
segments = donor.create(arguments={'Start Time': "00:00:09.11", 'End Time': "00:00:16.32", 'sample rate':44100})
for segment in segments:
self.assertEqual(401752, get_start_frame_from_segment(segment))
self.assertEqual(719713, get_end_frame_from_segment(segment))
self.assertAlmostEqual(9110, get_start_time_from_segment(segment),places=1)
self.assertEqual(16320.0, int(get_end_time_from_segment(segment)))
segments = donor.create(
arguments={'Start Time': "00:00:00.00", 'End Time': "00:00:00.00", 'sample rate': 44100})
for segment in segments:
self.assertEqual(1, get_start_frame_from_segment(segment))
self.assertEqual(1572865, get_end_frame_from_segment(segment))
self.assertAlmostEqual(0.0, get_start_time_from_segment(segment),places=1)
self.assertEqual(35665, int(get_end_time_from_segment(segment)))
def test_image_donor(self):
import numpy as np
from maskgen.image_wrap import ImageWrapper
graph = Mock()
def lkup_preds(x):
return {'b': ['a'], 'e': ['d']}[x]
def lkup_edge(x, y):
return \
{'ab': {'op': 'NoSelect'}, 'de': {'op': 'SelectRegion'}}[
x + y]
withoutalpha = ImageWrapper(np.zeros((400, 400, 3), dtype=np.uint8))
withAlpha = ImageWrapper(np.zeros((400, 400, 4), dtype=np.uint8))
mask = ImageWrapper(np.ones((400, 400),dtype = np.uint8)*255)
mask.image_array[0:30, 0:30] = 0
withAlpha.image_array[0:30, 0:30, 3] = 255
graph.predecessors = lkup_preds
graph.get_edge = lkup_edge
graph.dir = '.'
graph.get_edge_image = Mock(return_value=mask)
donor = InterpolateDonor(graph, 'e', 'f', 'x', (withoutalpha, self.locateFile('tests/videos/sample1.mov')),
(withAlpha, self.locateFile('tests/videos/sample1.mov')))
mask = donor.create(arguments={})
self.assertTrue(np.all(mask.image_array[0:30,0:30] == 255))
self.assertEquals(900,np.sum((mask.image_array/255)))
donor = InterpolateDonor(graph, 'b', 'c', 'x', (withoutalpha, self.locateFile('tests/videos/sample1.mov')),
(withAlpha, self.locateFile('tests/videos/sample1.mov')))
mask = donor.create(arguments={})
self.assertIsNone(mask)
donor = InterpolateDonor(graph, 'b', 'c', 'x', (withAlpha, self.locateFile('tests/videos/sample1.mov')),
(withAlpha, self.locateFile('tests/videos/sample1.mov')))
mask = donor.create(arguments={})
self.assertTrue(np.all(mask.image_array[0:30, 0:30] == 0))
self.assertEquals(159100, np.sum((mask.image_array / 255)))
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"numpy.sum",
"maskgen.video_tools.get_type_of_segment",
"maskgen.video_tools.get_start_time_from_segment",
"numpy.zeros",
"maskgen.video_tools.get_end_frame_from_segment",
"numpy.ones",
"mock.Mock",
"maskgen.video_tools.get_start_frame_from_segment",
"maskgen.video_tools.get_end_time_from_segment",
"numpy.all"
] |
[((9585, 9600), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9598, 9600), False, 'import unittest\n'), ((501, 507), 'mock.Mock', 'Mock', ([], {}), '()\n', (505, 507), False, 'from mock import Mock\n'), ((3326, 3332), 'mock.Mock', 'Mock', ([], {}), '()\n', (3330, 3332), False, 'from mock import Mock\n'), ((6064, 6070), 'mock.Mock', 'Mock', ([], {}), '()\n', (6068, 6070), False, 'from mock import Mock\n'), ((7819, 7825), 'mock.Mock', 'Mock', ([], {}), '()\n', (7823, 7825), False, 'from mock import Mock\n'), ((8490, 8513), 'mock.Mock', 'Mock', ([], {'return_value': 'mask'}), '(return_value=mask)\n', (8494, 8513), False, 'from mock import Mock\n'), ((8082, 8121), 'numpy.zeros', 'np.zeros', (['(400, 400, 3)'], {'dtype': 'np.uint8'}), '((400, 400, 3), dtype=np.uint8)\n', (8090, 8121), True, 'import numpy as np\n'), ((8156, 8195), 'numpy.zeros', 'np.zeros', (['(400, 400, 4)'], {'dtype': 'np.uint8'}), '((400, 400, 4), dtype=np.uint8)\n', (8164, 8195), True, 'import numpy as np\n'), ((8782, 8825), 'numpy.all', 'np.all', (['(mask.image_array[0:30, 0:30] == 255)'], {}), '(mask.image_array[0:30, 0:30] == 255)\n', (8788, 8825), True, 'import numpy as np\n'), ((8856, 8886), 'numpy.sum', 'np.sum', (['(mask.image_array / 255)'], {}), '(mask.image_array / 255)\n', (8862, 8886), True, 'import numpy as np\n'), ((9441, 9482), 'numpy.all', 'np.all', (['(mask.image_array[0:30, 0:30] == 0)'], {}), '(mask.image_array[0:30, 0:30] == 0)\n', (9447, 9482), True, 'import numpy as np\n'), ((9518, 9548), 'numpy.sum', 'np.sum', (['(mask.image_array / 255)'], {}), '(mask.image_array / 255)\n', (9524, 9548), True, 'import numpy as np\n'), ((1296, 1324), 'maskgen.video_tools.get_type_of_segment', 'get_type_of_segment', (['segment'], {}), '(segment)\n', (1315, 1324), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((1700, 1736), 'maskgen.video_tools.get_start_time_from_segment', 'get_start_time_from_segment', (['segment'], {}), '(segment)\n', (1727, 1736), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2274, 2302), 'maskgen.video_tools.get_type_of_segment', 'get_type_of_segment', (['segment'], {}), '(segment)\n', (2293, 2302), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2678, 2714), 'maskgen.video_tools.get_start_time_from_segment', 'get_start_time_from_segment', (['segment'], {}), '(segment)\n', (2705, 2714), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((4247, 4284), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (4275, 4284), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((4322, 4357), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (4348, 4357), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((4403, 4439), 'maskgen.video_tools.get_start_time_from_segment', 'get_start_time_from_segment', (['segment'], {}), '(segment)\n', (4430, 4439), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((4909, 4937), 'maskgen.video_tools.get_type_of_segment', 'get_type_of_segment', (['segment'], {}), '(segment)\n', (4928, 4937), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((4954, 4982), 'maskgen.video_tools.get_type_of_segment', 'get_type_of_segment', (['segment'], {}), '(segment)\n', (4973, 4982), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((6931, 6968), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (6959, 6968), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((7007, 7042), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (7033, 7042), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((7085, 7121), 'maskgen.video_tools.get_start_time_from_segment', 'get_start_time_from_segment', (['segment'], {}), '(segment)\n', (7112, 7121), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((7413, 7450), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (7441, 7450), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((7490, 7525), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (7516, 7525), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((7567, 7603), 'maskgen.video_tools.get_start_time_from_segment', 'get_start_time_from_segment', (['segment'], {}), '(segment)\n', (7594, 7603), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((8225, 8260), 'numpy.ones', 'np.ones', (['(400, 400)'], {'dtype': 'np.uint8'}), '((400, 400), dtype=np.uint8)\n', (8232, 8260), True, 'import numpy as np\n'), ((1377, 1414), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (1405, 1414), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((1457, 1492), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (1483, 1492), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((1549, 1586), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (1577, 1586), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((1626, 1661), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (1652, 1661), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((1778, 1812), 'maskgen.video_tools.get_end_time_from_segment', 'get_end_time_from_segment', (['segment'], {}), '(segment)\n', (1803, 1812), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2355, 2392), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (2383, 2392), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2435, 2470), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (2461, 2470), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2527, 2564), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (2555, 2564), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2604, 2639), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (2630, 2639), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2756, 2790), 'maskgen.video_tools.get_end_time_from_segment', 'get_end_time_from_segment', (['segment'], {}), '(segment)\n', (2781, 2790), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((4491, 4525), 'maskgen.video_tools.get_end_time_from_segment', 'get_end_time_from_segment', (['segment'], {}), '(segment)\n', (4516, 4525), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5031, 5068), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (5059, 5068), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5112, 5147), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (5138, 5147), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5191, 5227), 'maskgen.video_tools.get_start_time_from_segment', 'get_start_time_from_segment', (['segment'], {}), '(segment)\n', (5218, 5227), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5380, 5417), 'maskgen.video_tools.get_start_frame_from_segment', 'get_start_frame_from_segment', (['segment'], {}), '(segment)\n', (5408, 5417), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5457, 5492), 'maskgen.video_tools.get_end_frame_from_segment', 'get_end_frame_from_segment', (['segment'], {}), '(segment)\n', (5483, 5492), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5536, 5572), 'maskgen.video_tools.get_start_time_from_segment', 'get_start_time_from_segment', (['segment'], {}), '(segment)\n', (5563, 5572), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((7174, 7208), 'maskgen.video_tools.get_end_time_from_segment', 'get_end_time_from_segment', (['segment'], {}), '(segment)\n', (7199, 7208), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((7654, 7688), 'maskgen.video_tools.get_end_time_from_segment', 'get_end_time_from_segment', (['segment'], {}), '(segment)\n', (7679, 7688), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5289, 5323), 'maskgen.video_tools.get_end_time_from_segment', 'get_end_time_from_segment', (['segment'], {}), '(segment)\n', (5314, 5323), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((5634, 5668), 'maskgen.video_tools.get_end_time_from_segment', 'get_end_time_from_segment', (['segment'], {}), '(segment)\n', (5659, 5668), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n'), ((2963, 2991), 'maskgen.video_tools.get_type_of_segment', 'get_type_of_segment', (['segment'], {}), '(segment)\n', (2982, 2991), False, 'from maskgen.video_tools import get_type_of_segment, get_start_time_from_segment, get_start_frame_from_segment, get_end_time_from_segment, get_end_frame_from_segment\n')]
|
#%% -*- coding: utf-8 -*-
"""
Created on Sun Apr 26 02:47:57 2020
plot sherical hermonics in 3D with radial colormap
http://balbuceosastropy.blogspot.com/2015/06/spherical-harmonics-in-python.html
"""
from __future__ import division
import scipy as sci
import scipy.special as sp
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm, colors
#%% ===========================================================================
l = 4 #degree
m = 2 # order
PHI, THETA = np.mgrid[0:2*np.pi:200j, 0:np.pi:100j] #arrays of angular variables
R = np.abs(sp.sph_harm(m, l, PHI, THETA)) #Array with the absolute values of Ylm
"""
THETA = pi/2 - G_Lat*pi/180
PHI = G_Long*pi/180 + pi
R = G_Grid + 50000
"""
#Now we convert to cartesian coordinates
# for the 3D representation
X = R * np.sin(THETA) * np.cos(PHI)
Y = R * np.sin(THETA) * np.sin(PHI)
Z = R * np.cos(THETA)
N = R/R.max() # Normalize R for the plot colors to cover the entire range of colormap.
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'), figsize=(7,5))
im = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=cm.jet(N))
ax.set_title(r'$|Y^2_ 4|$', fontsize=20)
m = cm.ScalarMappable(cmap=cm.jet)
m.set_array(R) # Assign the unnormalized data array to the mappable
#so that the scale corresponds to the values of R
fig.colorbar(m, shrink=0.8);
#%% ===========================================================================
l = 4 # degree
m = 2 # order
PHI, THETA = np.mgrid[0:2*np.pi:200j, 0:np.pi:100j]
R = sp.sph_harm(m, l, PHI, THETA).real
X = R * np.sin(THETA) * np.cos(PHI)
Y = R * np.sin(THETA) * np.sin(PHI)
Z = R * np.cos(THETA)
#As R has negative values, we'll use an instance of Normalize
#see http://stackoverflow.com/questions/25023075/normalizing-colormap-used-by-facecolors-in-matplotlib
norm = colors.Normalize()
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'), figsize=(7,5))
m = cm.ScalarMappable(cmap=cm.jet)
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=cm.jet(norm(R)))
ax.set_title('real$(Y^2_ 4)$', fontsize=20)
m.set_array(R)
fig.colorbar(m, shrink=0.8);
#%% ===========================================================================
l = 4 # degree
m = 2 # order
PHI, THETA = np.mgrid[0:2*np.pi:300j, 0:np.pi:150j]
R = sp.sph_harm(m, l, PHI, THETA).real
s = 1
X = (s*R+1) * np.sin(THETA) * np.cos(PHI)
Y = (s*R+1) * np.sin(THETA) * np.sin(PHI)
Z = (s*R+1) * np.cos(THETA)
norm = colors.Normalize()
fig, ax = plt.subplots(subplot_kw=dict(projection='3d'), figsize=(7,5))
m = cm.ScalarMappable(cmap=cm.jet)
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=cm.terrain(norm(R)))
ax.set_title('1 + real$(Y^2_ 4)$', fontsize=20)
m.set_array(R)
fig.colorbar(m, shrink=0.8);
#%%
|
[
"scipy.special.sph_harm",
"matplotlib.colors.Normalize",
"matplotlib.cm.ScalarMappable",
"matplotlib.cm.jet",
"numpy.sin",
"numpy.cos"
] |
[((1230, 1260), 'matplotlib.cm.ScalarMappable', 'cm.ScalarMappable', ([], {'cmap': 'cm.jet'}), '(cmap=cm.jet)\n', (1247, 1260), False, 'from matplotlib import cm, colors\n'), ((1908, 1926), 'matplotlib.colors.Normalize', 'colors.Normalize', ([], {}), '()\n', (1924, 1926), False, 'from matplotlib import cm, colors\n'), ((2003, 2033), 'matplotlib.cm.ScalarMappable', 'cm.ScalarMappable', ([], {'cmap': 'cm.jet'}), '(cmap=cm.jet)\n', (2020, 2033), False, 'from matplotlib import cm, colors\n'), ((2534, 2552), 'matplotlib.colors.Normalize', 'colors.Normalize', ([], {}), '()\n', (2550, 2552), False, 'from matplotlib import cm, colors\n'), ((2629, 2659), 'matplotlib.cm.ScalarMappable', 'cm.ScalarMappable', ([], {'cmap': 'cm.jet'}), '(cmap=cm.jet)\n', (2646, 2659), False, 'from matplotlib import cm, colors\n'), ((635, 664), 'scipy.special.sph_harm', 'sp.sph_harm', (['m', 'l', 'PHI', 'THETA'], {}), '(m, l, PHI, THETA)\n', (646, 664), True, 'import scipy.special as sp\n'), ((878, 889), 'numpy.cos', 'np.cos', (['PHI'], {}), '(PHI)\n', (884, 889), True, 'import numpy as np\n'), ((914, 925), 'numpy.sin', 'np.sin', (['PHI'], {}), '(PHI)\n', (920, 925), True, 'import numpy as np\n'), ((934, 947), 'numpy.cos', 'np.cos', (['THETA'], {}), '(THETA)\n', (940, 947), True, 'import numpy as np\n'), ((1605, 1634), 'scipy.special.sph_harm', 'sp.sph_harm', (['m', 'l', 'PHI', 'THETA'], {}), '(m, l, PHI, THETA)\n', (1616, 1634), True, 'import scipy.special as sp\n'), ((1665, 1676), 'numpy.cos', 'np.cos', (['PHI'], {}), '(PHI)\n', (1671, 1676), True, 'import numpy as np\n'), ((1701, 1712), 'numpy.sin', 'np.sin', (['PHI'], {}), '(PHI)\n', (1707, 1712), True, 'import numpy as np\n'), ((1721, 1734), 'numpy.cos', 'np.cos', (['THETA'], {}), '(THETA)\n', (1727, 1734), True, 'import numpy as np\n'), ((2372, 2401), 'scipy.special.sph_harm', 'sp.sph_harm', (['m', 'l', 'PHI', 'THETA'], {}), '(m, l, PHI, THETA)\n', (2383, 2401), True, 'import scipy.special as sp\n'), ((2444, 2455), 'numpy.cos', 'np.cos', (['PHI'], {}), '(PHI)\n', (2450, 2455), True, 'import numpy as np\n'), ((2486, 2497), 'numpy.sin', 'np.sin', (['PHI'], {}), '(PHI)\n', (2492, 2497), True, 'import numpy as np\n'), ((2512, 2525), 'numpy.cos', 'np.cos', (['THETA'], {}), '(THETA)\n', (2518, 2525), True, 'import numpy as np\n'), ((862, 875), 'numpy.sin', 'np.sin', (['THETA'], {}), '(THETA)\n', (868, 875), True, 'import numpy as np\n'), ((898, 911), 'numpy.sin', 'np.sin', (['THETA'], {}), '(THETA)\n', (904, 911), True, 'import numpy as np\n'), ((1174, 1183), 'matplotlib.cm.jet', 'cm.jet', (['N'], {}), '(N)\n', (1180, 1183), False, 'from matplotlib import cm, colors\n'), ((1649, 1662), 'numpy.sin', 'np.sin', (['THETA'], {}), '(THETA)\n', (1655, 1662), True, 'import numpy as np\n'), ((1685, 1698), 'numpy.sin', 'np.sin', (['THETA'], {}), '(THETA)\n', (1691, 1698), True, 'import numpy as np\n'), ((2428, 2441), 'numpy.sin', 'np.sin', (['THETA'], {}), '(THETA)\n', (2434, 2441), True, 'import numpy as np\n'), ((2470, 2483), 'numpy.sin', 'np.sin', (['THETA'], {}), '(THETA)\n', (2476, 2483), True, 'import numpy as np\n')]
|
from flask import Flask
from pycoingecko import CoinGeckoAPI
from time import sleep
from threading import Timer
cg = CoinGeckoAPI()
app = Flask(__name__)
coin_data = {}
coins_to_fetch = ["bitcoin", "ethereum", "litecoin", "monero", "dogecoin", "cardano", "tezos", "stellar"]
#Credit for RepeatedTimer class goes to MestreLion from https://stackoverflow.com/questions/474528/what-is-the-best-way-to-repeatedly-execute-a-function-every-x-seconds
class RepeatedTimer(object):
def __init__(self, interval, function, *args, **kwargs):
self._timer = None
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.is_running = False
self.start()
def _run(self):
self.is_running = False
self.start()
self.function(*self.args, **self.kwargs)
def start(self):
if not self.is_running:
self._timer = Timer(self.interval, self._run)
self._timer.start()
self.is_running = True
def stop(self):
self._timer.cancel()
self.is_running = False
def update_coin_data():
prices = cg.get_price(ids=coins_to_fetch, vs_currencies='usd')
for coin in coins_to_fetch:
coin_data[coin] = prices[coin]['usd']
@app.route("/<coin>")
def getPrice(coin):
try:
return str(coin_data[coin])
except KeyError:
return "NULL"
if __name__ == "__main__":
update_coin_data() #get initial values
rt = RepeatedTimer(10, update_coin_data)
app.run('0.0.0.0', 5000)
|
[
"flask.Flask",
"threading.Timer",
"pycoingecko.CoinGeckoAPI"
] |
[((118, 132), 'pycoingecko.CoinGeckoAPI', 'CoinGeckoAPI', ([], {}), '()\n', (130, 132), False, 'from pycoingecko import CoinGeckoAPI\n'), ((139, 154), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (144, 154), False, 'from flask import Flask\n'), ((958, 989), 'threading.Timer', 'Timer', (['self.interval', 'self._run'], {}), '(self.interval, self._run)\n', (963, 989), False, 'from threading import Timer\n')]
|
import grpc
import MFTApi_pb2
import MFTApi_pb2_grpc
channel = grpc.insecure_channel('localhost:7004')
stub = MFTApi_pb2_grpc.MFTApiServiceStub(channel)
download_request = MFTApi_pb2.HttpDownloadApiRequest(sourceStoreId ="remote-ssh-storage",
sourcePath= "/tmp/a.txt",
sourceToken = "<PASSWORD>",
sourceType= "SCP",
targetAgent = "agent0",
mftAuthorizationToken = "")
result = stub.submitHttpDownload(download_request)
print(result)
## Sample output ##
# url: "http://localhost:3333/53937f40-d545-4180-967c-ddb193d672d8"
# targetAgent: "agent0"
|
[
"grpc.insecure_channel",
"MFTApi_pb2.HttpDownloadApiRequest",
"MFTApi_pb2_grpc.MFTApiServiceStub"
] |
[((64, 103), 'grpc.insecure_channel', 'grpc.insecure_channel', (['"""localhost:7004"""'], {}), "('localhost:7004')\n", (85, 103), False, 'import grpc\n'), ((111, 153), 'MFTApi_pb2_grpc.MFTApiServiceStub', 'MFTApi_pb2_grpc.MFTApiServiceStub', (['channel'], {}), '(channel)\n', (144, 153), False, 'import MFTApi_pb2_grpc\n'), ((173, 367), 'MFTApi_pb2.HttpDownloadApiRequest', 'MFTApi_pb2.HttpDownloadApiRequest', ([], {'sourceStoreId': '"""remote-ssh-storage"""', 'sourcePath': '"""/tmp/a.txt"""', 'sourceToken': '"""<PASSWORD>"""', 'sourceType': '"""SCP"""', 'targetAgent': '"""agent0"""', 'mftAuthorizationToken': '""""""'}), "(sourceStoreId='remote-ssh-storage',\n sourcePath='/tmp/a.txt', sourceToken='<PASSWORD>', sourceType='SCP',\n targetAgent='agent0', mftAuthorizationToken='')\n", (206, 367), False, 'import MFTApi_pb2\n')]
|
from flask import Flask, render_template, request, session, redirect, url_for
from models import db, User#, Places
from forms import SignupForm, LoginForm
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:edzh@localhost:5432/rubix'
db.init_app(app)
app.secret_key = "development-key"
@app.route('/')
def index():
return render_template('index.html')
@app.route('/about')
def about():
return render_template("about.html")
@app.route('/profile')
def profile():
return render_template('profile.html')
@app.route('/learnmore')
def learnmore():
return render_template('learnmore.html')
@app.route('/signup', methods=['GET', 'POST'])
def signup():
# Disable access to login page if user is already logged in.
if 'email' in session:
return redirect(url_for('home'))
form = SignupForm()
# Checks if form fields are filled
# if it is, create a new user with provided credentials
if request.method == 'POST':
if form.validate() == False:
return render_template('signup.html', form=form)
else:
newuser = User(form.first_name.data, form.last_name.data, form.email.data, form.password.data)
db.session.add(newuser)
db.session.commit()
session['email'] = newuser.email
return redirect(url_for('home'))
elif request.method == 'GET':
return render_template('signup.html', form=form)
@app.route('/home')
def home():
if 'email' not in session:
return redirect(url_for('login'))
return render_template('home.html')
# Route to the Login Page
@app.route('/login', methods=['GET', 'POST'])
def login():
# Disable access to login page if user is already logged in.
if 'email'in session:
return redirect(url_for('home'))
form = LoginForm()
if request.method == 'POST':
# Checks if form fields are filled
if form.validate() == False:
return render_template('login.html', form=form)
else:
email = form.email.data
password = form.password.data
user = User.query.filter_by(email=email).first()
# If user exists and password is correct
# Create new session
if user is not None and user.check_password(password):
session['email'] = form.email.data
return redirect(url_for('home'))
else:
return redirect(url_for('login'))
elif request.method == 'GET':
return render_template('login.html', form=form)
@app.route('/logout')
def logout():
session.pop('email', None)
return redirect(url_for('index'))
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == "__main__":
app.run(debug=True)
|
[
"models.db.session.commit",
"flask.session.pop",
"models.db.init_app",
"flask.Flask",
"models.db.session.add",
"forms.SignupForm",
"models.User.query.filter_by",
"flask.url_for",
"flask.render_template",
"forms.LoginForm",
"models.User"
] |
[((162, 177), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (167, 177), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((269, 285), 'models.db.init_app', 'db.init_app', (['app'], {}), '(app)\n', (280, 285), False, 'from models import db, User\n'), ((361, 390), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (376, 390), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((435, 464), 'flask.render_template', 'render_template', (['"""about.html"""'], {}), "('about.html')\n", (450, 464), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((515, 546), 'flask.render_template', 'render_template', (['"""profile.html"""'], {}), "('profile.html')\n", (530, 546), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((601, 634), 'flask.render_template', 'render_template', (['"""learnmore.html"""'], {}), "('learnmore.html')\n", (616, 634), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((833, 845), 'forms.SignupForm', 'SignupForm', ([], {}), '()\n', (843, 845), False, 'from forms import SignupForm, LoginForm\n'), ((1501, 1529), 'flask.render_template', 'render_template', (['"""home.html"""'], {}), "('home.html')\n", (1516, 1529), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((1752, 1763), 'forms.LoginForm', 'LoginForm', ([], {}), '()\n', (1761, 1763), False, 'from forms import SignupForm, LoginForm\n'), ((2453, 2479), 'flask.session.pop', 'session.pop', (['"""email"""', 'None'], {}), "('email', None)\n", (2464, 2479), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((2498, 2514), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (2505, 2514), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((2574, 2601), 'flask.render_template', 'render_template', (['"""404.html"""'], {}), "('404.html')\n", (2589, 2601), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((806, 821), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (813, 821), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((1018, 1059), 'flask.render_template', 'render_template', (['"""signup.html"""'], {'form': 'form'}), "('signup.html', form=form)\n", (1033, 1059), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((1086, 1175), 'models.User', 'User', (['form.first_name.data', 'form.last_name.data', 'form.email.data', 'form.password.data'], {}), '(form.first_name.data, form.last_name.data, form.email.data, form.\n password.data)\n', (1090, 1175), False, 'from models import db, User\n'), ((1177, 1200), 'models.db.session.add', 'db.session.add', (['newuser'], {}), '(newuser)\n', (1191, 1200), False, 'from models import db, User\n'), ((1207, 1226), 'models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1224, 1226), False, 'from models import db, User\n'), ((1350, 1391), 'flask.render_template', 'render_template', (['"""signup.html"""'], {'form': 'form'}), "('signup.html', form=form)\n", (1365, 1391), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((1474, 1490), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (1481, 1490), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((1724, 1739), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (1731, 1739), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((1882, 1922), 'flask.render_template', 'render_template', (['"""login.html"""'], {'form': 'form'}), "('login.html', form=form)\n", (1897, 1922), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((2373, 2413), 'flask.render_template', 'render_template', (['"""login.html"""'], {'form': 'form'}), "('login.html', form=form)\n", (2388, 2413), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((1289, 1304), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (1296, 1304), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((2013, 2046), 'models.User.query.filter_by', 'User.query.filter_by', ([], {'email': 'email'}), '(email=email)\n', (2033, 2046), False, 'from models import db, User\n'), ((2258, 2273), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (2265, 2273), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n'), ((2311, 2327), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (2318, 2327), False, 'from flask import Flask, render_template, request, session, redirect, url_for\n')]
|
import uuid
from django.db import models
from . import conf
class Item(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
owner = models.PositiveIntegerField(db_index=True)
storage = models.IntegerField(default=0)
data = models.JSONField(default=dict)
base_type = models.CharField(max_length=conf.ITEM_TYPE_LENGTH)
full_type = models.CharField(max_length=conf.ITEM_TYPE_LENGTH)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
db_table = 'items'
class LogRecord(models.Model):
id = models.BigAutoField(primary_key=True)
transaction = models.UUIDField(default=uuid.uuid4)
item = models.UUIDField()
type = models.IntegerField()
data = models.JSONField(default=dict)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = 'log_records'
index_together = [('item', 'created_at')]
|
[
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.PositiveIntegerField",
"django.db.models.BigAutoField",
"django.db.models.JSONField",
"django.db.models.IntegerField",
"django.db.models.UUIDField"
] |
[((101, 171), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'primary_key': '(True)', 'default': 'uuid.uuid4', 'editable': '(False)'}), '(primary_key=True, default=uuid.uuid4, editable=False)\n', (117, 171), False, 'from django.db import models\n'), ((185, 227), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (212, 227), False, 'from django.db import models\n'), ((243, 273), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (262, 273), False, 'from django.db import models\n'), ((286, 316), 'django.db.models.JSONField', 'models.JSONField', ([], {'default': 'dict'}), '(default=dict)\n', (302, 316), False, 'from django.db import models\n'), ((334, 384), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': 'conf.ITEM_TYPE_LENGTH'}), '(max_length=conf.ITEM_TYPE_LENGTH)\n', (350, 384), False, 'from django.db import models\n'), ((402, 452), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': 'conf.ITEM_TYPE_LENGTH'}), '(max_length=conf.ITEM_TYPE_LENGTH)\n', (418, 452), False, 'from django.db import models\n'), ((471, 510), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (491, 510), False, 'from django.db import models\n'), ((528, 563), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (548, 563), False, 'from django.db import models\n'), ((650, 687), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'primary_key': '(True)'}), '(primary_key=True)\n', (669, 687), False, 'from django.db import models\n'), ((707, 743), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4'}), '(default=uuid.uuid4)\n', (723, 743), False, 'from django.db import models\n'), ((756, 774), 'django.db.models.UUIDField', 'models.UUIDField', ([], {}), '()\n', (772, 774), False, 'from django.db import models\n'), ((787, 808), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (806, 808), False, 'from django.db import models\n'), ((821, 851), 'django.db.models.JSONField', 'models.JSONField', ([], {'default': 'dict'}), '(default=dict)\n', (837, 851), False, 'from django.db import models\n'), ((870, 909), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (890, 909), False, 'from django.db import models\n')]
|
import os
import json
import pprint
import shutil
from _notebooks.notebook import Notebook
from topfarm.easy_drivers import EasyDriverBase
# def get_cells(nb):
# cells = []
# for cell in nb['cells']:
# if cell['cell_type'] == 'code' and len(cell['source']) > 0 and '%%include' in cell['source'][0]:
# cells.extend(load_notebook(cell['source'][0].replace('%%include', '').strip())['cells'])
# else:
# cells.append(cell)
# return cells
#
# def load_notebook(f):
# with open(f) as fid:
# nb = json.load(fid)
#
# nb['cells'] = get_cells(nb)
# return nb
#
#
# def save_notebook(nb, f):
# with open(f, 'w') as fid:
# json.dump(nb, fid, indent=4)
# # fid.write(pprint.pformat(nb))
def make_tutorials():
path = os.path.dirname(__file__) + "/templates/"
for f in [f for f in os.listdir(path) if f.endswith('.ipynb')]:
nb = Notebook(path + f)
nb.replace_include_tag()
nb.save(os.path.dirname(__file__) + "/../tutorials/" + f)
# with open(os.path.dirname(__file__) + "/../tutorials/" + f, 'w') as fid:
# json.dump(nb, fid)
def doc_header(name):
nb = Notebook(os.path.dirname(__file__) + "/elements/doc_setup.ipynb")
nb.cells[0]['source'][0] = nb.cells[0]['source'][0].replace('[name]', name)
return nb.cells
def make_doc_notebooks(notebooks):
src_path = os.path.dirname(__file__) + "/elements/"
dst_path = os.path.dirname(__file__) + "/../docs/notebooks/"
if os.path.isdir(dst_path):
try:
shutil.rmtree(dst_path)
except PermissionError:
pass
os.makedirs(dst_path, exist_ok=True)
for name in notebooks:
nb = Notebook(src_path + name + ".ipynb")
t = '[Try this yourself](https://colab.research.google.com/github/DTUWindEnergy/TopFarm2/blob/master/docs/notebooks/%s.ipynb) (requires google account)'
nb.insert_markdown_cell(1, t % name)
code = """%%capture
# Install Topfarm if needed
import importlib
if not importlib.util.find_spec("topfarm"):
!pip install topfarm
"""
if not name in ['loads', 'wake_steering_and_loads', 'layout_and_loads']:
nb.insert_code_cell(2, code)
nb.save(dst_path + name + ".ipynb")
def check_notebooks(notebooks=None):
import matplotlib.pyplot as plt
def no_show(*args, **kwargs):
pass
plt.show = no_show # disable plt show that requires the user to close the plot
path = os.path.dirname(__file__) + "/elements/"
if notebooks is None:
notebooks = [f for f in os.listdir(path) if f.endswith('.ipynb')]
else:
notebooks = [f + '.ipynb' for f in notebooks]
for f in notebooks:
nb = Notebook(path + f)
nb.check_code()
nb.check_links()
if __name__ == '__main__':
notebooks = ['constraints', 'cost_models', 'drivers', 'loads', 'problems',
'roads_and_cables', 'wake_steering_and_loads', 'layout_and_loads',
'bathymetry',]
notebooks.remove('wake_steering_and_loads')
notebooks.remove('loads')
check_notebooks(notebooks)
make_doc_notebooks(notebooks)
print('Done')
|
[
"os.makedirs",
"os.path.isdir",
"os.path.dirname",
"_notebooks.notebook.Notebook",
"shutil.rmtree",
"os.listdir"
] |
[((1524, 1547), 'os.path.isdir', 'os.path.isdir', (['dst_path'], {}), '(dst_path)\n', (1537, 1547), False, 'import os\n'), ((1652, 1688), 'os.makedirs', 'os.makedirs', (['dst_path'], {'exist_ok': '(True)'}), '(dst_path, exist_ok=True)\n', (1663, 1688), False, 'import os\n'), ((804, 829), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (819, 829), False, 'import os\n'), ((927, 945), '_notebooks.notebook.Notebook', 'Notebook', (['(path + f)'], {}), '(path + f)\n', (935, 945), False, 'from _notebooks.notebook import Notebook\n'), ((1411, 1436), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1426, 1436), False, 'import os\n'), ((1467, 1492), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1482, 1492), False, 'import os\n'), ((1729, 1765), '_notebooks.notebook.Notebook', 'Notebook', (["(src_path + name + '.ipynb')"], {}), "(src_path + name + '.ipynb')\n", (1737, 1765), False, 'from _notebooks.notebook import Notebook\n'), ((2503, 2528), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2518, 2528), False, 'import os\n'), ((2745, 2763), '_notebooks.notebook.Notebook', 'Notebook', (['(path + f)'], {}), '(path + f)\n', (2753, 2763), False, 'from _notebooks.notebook import Notebook\n'), ((871, 887), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (881, 887), False, 'import os\n'), ((1202, 1227), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1217, 1227), False, 'import os\n'), ((1574, 1597), 'shutil.rmtree', 'shutil.rmtree', (['dst_path'], {}), '(dst_path)\n', (1587, 1597), False, 'import shutil\n'), ((2602, 2618), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (2612, 2618), False, 'import os\n'), ((995, 1020), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1010, 1020), False, 'import os\n')]
|
import urllib.request as request
url = 'https://ipinfo.io'
username = 'username'
password = 'password'
proxy = f'http://{username}:{password}@gate.<EMAIL>:7000'
query = request.build_opener(request.ProxyHandler({'http': proxy, 'https': proxy}))
print(query.open(url).read())
|
[
"urllib.request.ProxyHandler"
] |
[((195, 248), 'urllib.request.ProxyHandler', 'request.ProxyHandler', (["{'http': proxy, 'https': proxy}"], {}), "({'http': proxy, 'https': proxy})\n", (215, 248), True, 'import urllib.request as request\n')]
|
'''
Translate expressions to SMT import format.
'''
from Z3 import Z3
class UnsatisfiableException(Exception):
pass
# NOTE(JY): Think about if the solver needs to know about everything for
# negative constraints. I don't think so because enough things should be
# concrete that this doesn't matter.
def solve(constraints, defaults, desiredVars):
# NOTE(JY): This is just a sketch of what should go on...
# Implement defaults by adding values to the model and
#for v in jeeveslib.env.envVars:
# jeeveslib.solver.push()
# solver.assertConstraint(v = z3.BoolVal(True))
# if (solver.check() == solver.Unsat):
# jeeveslib.solver.pop()
# Now get the variables back from the solver by evaluating all
# variables in question...
# Now return the new environment...
#return NotImplemented
solver = Z3()
result = {}
for constraint in constraints:
if constraint.type != bool:
raise ValueError("constraints must be bools")
solver.boolExprAssert(constraint)
if not solver.check():
raise UnsatisfiableException("Constraints not satisfiable")
for default in defaults:
solver.push()
if default.type != bool:
raise ValueError("defaults must be bools")
solver.boolExprAssert(default)
if not solver.isSatisfiable():
solver.pop()
assert solver.check()
result = {}
for var in desiredVars:
result[var] = solver.evaluate(var)
assert (result[var] is True) or (result[var] is False)
return result
|
[
"Z3.Z3"
] |
[((829, 833), 'Z3.Z3', 'Z3', ([], {}), '()\n', (831, 833), False, 'from Z3 import Z3\n')]
|
import os
from setuptools import setup, find_packages
# Utility function to read the README file.
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='ovirt-scheduler-proxy',
version=read('VERSION').strip(),
license='ASL2',
description='oVirt Scheduler Proxy',
author='<NAME>',
author_email='<EMAIL>',
url='http://www.ovirt.org/Features/oVirt_External_Scheduling_Proxy',
packages=find_packages("src"),
package_dir={'': 'src'},
long_description=read('README'),
)
|
[
"os.path.dirname",
"setuptools.find_packages"
] |
[((464, 484), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {}), "('src')\n", (477, 484), False, 'from setuptools import setup, find_packages\n'), ((146, 171), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (161, 171), False, 'import os\n')]
|
#!/usr/bin/env python
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
import sys
import os
from os.path import join, exists
from os import environ
import shutil
def get_env_systemds_root():
"""
Env variable error check and path location
return: String
Location of SYSTEMDS_ROOT
"""
systemds_root = os.environ.get('SYSTEMDS_ROOT')
if systemds_root is None:
#print('SYSTEMDS_ROOT not found')
#sys.exit()
fn = sys.argv[0]
systemds_root = fn[:fn.rfind('/')] + '/..'
return systemds_root
def get_env_spark_root():
"""
Env variable error check and path location
return: String
Location of SPARK_ROOT
"""
spark_root = environ.get('SPARK_ROOT')
if spark_root is None:
print('SPARK_ROOT not found')
sys.exit()
return spark_root
def find_file(name, path):
"""
Responsible for finding a specific file recursively given a location
"""
for root, dirs, files in os.walk(path):
if name in files:
return join(root, name)
def find_dml_file(systemds_root, script_file):
"""
Find the location of DML script being executed
return: String
Location of the dml script
"""
scripts_dir = join(systemds_root, 'scripts')
if not exists(script_file):
script_file_path = find_file(script_file, scripts_dir)
if script_file_path is not None:
return script_file_path
else:
print('Could not find DML script: ' + script_file)
sys.exit()
return script_file
def log4j_path(systemds_root):
"""
Create log4j.properties from the template if not exist
return: String
Location of log4j.properties path
"""
log4j_properties_path = join(systemds_root, 'conf', 'log4j.properties')
log4j_template_properties_path = join(systemds_root, 'conf', 'log4j.properties.template')
if not (exists(log4j_properties_path)):
shutil.copyfile(log4j_template_properties_path, log4j_properties_path)
print('... created ' + log4j_properties_path)
return log4j_properties_path
def config_path(systemds_root):
"""
Create SystemDS-config from the template if not exist
return: String
Location of SystemDS-config.xml
"""
systemds_config_path = join(systemds_root, 'conf', 'SystemDS-config.xml')
systemds_template_config_path = join(systemds_root, 'conf', 'SystemDS-config.xml.template')
if not (exists(systemds_config_path)):
shutil.copyfile(systemds_template_config_path, systemds_config_path)
print('... created ' + systemds_config_path)
return systemds_config_path
|
[
"os.walk",
"os.path.exists",
"os.environ.get",
"shutil.copyfile",
"os.path.join",
"sys.exit"
] |
[((1195, 1226), 'os.environ.get', 'os.environ.get', (['"""SYSTEMDS_ROOT"""'], {}), "('SYSTEMDS_ROOT')\n", (1209, 1226), False, 'import os\n'), ((1576, 1601), 'os.environ.get', 'environ.get', (['"""SPARK_ROOT"""'], {}), "('SPARK_ROOT')\n", (1587, 1601), False, 'from os import environ\n'), ((1856, 1869), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (1863, 1869), False, 'import os\n'), ((2118, 2148), 'os.path.join', 'join', (['systemds_root', '"""scripts"""'], {}), "(systemds_root, 'scripts')\n", (2122, 2148), False, 'from os.path import join, exists\n'), ((2638, 2685), 'os.path.join', 'join', (['systemds_root', '"""conf"""', '"""log4j.properties"""'], {}), "(systemds_root, 'conf', 'log4j.properties')\n", (2642, 2685), False, 'from os.path import join, exists\n'), ((2723, 2779), 'os.path.join', 'join', (['systemds_root', '"""conf"""', '"""log4j.properties.template"""'], {}), "(systemds_root, 'conf', 'log4j.properties.template')\n", (2727, 2779), False, 'from os.path import join, exists\n'), ((3181, 3231), 'os.path.join', 'join', (['systemds_root', '"""conf"""', '"""SystemDS-config.xml"""'], {}), "(systemds_root, 'conf', 'SystemDS-config.xml')\n", (3185, 3231), False, 'from os.path import join, exists\n'), ((3268, 3327), 'os.path.join', 'join', (['systemds_root', '"""conf"""', '"""SystemDS-config.xml.template"""'], {}), "(systemds_root, 'conf', 'SystemDS-config.xml.template')\n", (3272, 3327), False, 'from os.path import join, exists\n'), ((1675, 1685), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1683, 1685), False, 'import sys\n'), ((2160, 2179), 'os.path.exists', 'exists', (['script_file'], {}), '(script_file)\n', (2166, 2179), False, 'from os.path import join, exists\n'), ((2792, 2821), 'os.path.exists', 'exists', (['log4j_properties_path'], {}), '(log4j_properties_path)\n', (2798, 2821), False, 'from os.path import join, exists\n'), ((2832, 2902), 'shutil.copyfile', 'shutil.copyfile', (['log4j_template_properties_path', 'log4j_properties_path'], {}), '(log4j_template_properties_path, log4j_properties_path)\n', (2847, 2902), False, 'import shutil\n'), ((3340, 3368), 'os.path.exists', 'exists', (['systemds_config_path'], {}), '(systemds_config_path)\n', (3346, 3368), False, 'from os.path import join, exists\n'), ((3379, 3447), 'shutil.copyfile', 'shutil.copyfile', (['systemds_template_config_path', 'systemds_config_path'], {}), '(systemds_template_config_path, systemds_config_path)\n', (3394, 3447), False, 'import shutil\n'), ((1916, 1932), 'os.path.join', 'join', (['root', 'name'], {}), '(root, name)\n', (1920, 1932), False, 'from os.path import join, exists\n'), ((2410, 2420), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2418, 2420), False, 'import sys\n')]
|
import unicodedata
import re
import urllib
_slugify_strip_re = re.compile(r'[^\w\s-]')
_slugify_hyphenate_re = re.compile(r'[-\s]+')
def slugify(value):
slug = unicode(_slugify_strip_re.sub('', normalize(value)).strip().lower())
slug = _slugify_hyphenate_re.sub('-', slug)
if not slug:
return quote(value)
return quote(slug)
def normalize(name):
if not isinstance(name, unicode):
name = name.decode('utf8')
return unicodedata.normalize('NFKC', name).encode('utf8')
def quote(name):
return urllib.quote(normalize(name))
def make_icon_url(region, icon, size='large'):
if size == 'small':
size = 18
else:
size = 56
return 'http://%s.media.blizzard.com/wow/icons/%d/%s.jpg' % (region, size, icon)
def make_connection():
if not hasattr(make_connection, 'Connection'):
from .connection import Connection
make_connection.Connection = Connection
return make_connection.Connection()
|
[
"unicodedata.normalize",
"re.compile"
] |
[((64, 88), 're.compile', 're.compile', (['"""[^\\\\w\\\\s-]"""'], {}), "('[^\\\\w\\\\s-]')\n", (74, 88), False, 'import re\n'), ((112, 133), 're.compile', 're.compile', (['"""[-\\\\s]+"""'], {}), "('[-\\\\s]+')\n", (122, 133), False, 'import re\n'), ((462, 497), 'unicodedata.normalize', 'unicodedata.normalize', (['"""NFKC"""', 'name'], {}), "('NFKC', name)\n", (483, 497), False, 'import unicodedata\n')]
|
"""
Common sub models for lubricants
"""
import numpy as np
__all__ = ['constant_array_property', 'roelands', 'barus', 'nd_barus', 'nd_roelands', 'dowson_higginson',
'nd_dowson_higginson']
def constant_array_property(value: float):
""" Produce a closure that returns an index able constant value
Parameters
----------
value: float
The value of the constant
Returns
-------
inner: closure
A closure that returns a fully populated array the same size as the just_touching_gap keyword argument, this is
guaranteed to be in the current state dict, and therefore passed as a keyword when sub models are saved.
Notes
-----
Using this closure means that lubrication steps can be writen for the general case, using indexing on fluid
properties.
See Also
--------
constant_array_property
Examples
--------
>>> closure = constant_array_property(1.23)
>>> constant_array = closure(just_touching_gap = np.ones((5,5)))
>>> constant_array.shape
(5,5)
>>> constant_array[0,0]
1,23
"""
def inner(just_touching_gap: np.ndarray, **kwargs):
return np.ones_like(just_touching_gap) * value
return inner
def roelands(eta_0, pressure_0, z):
""" The roelands pressure viscosity equation
Parameters
----------
eta_0, pressure_0, z: float
Coefficients for the equation, see notes for details
Returns
-------
inner: closure
A callable that produces the viscosity terms according to the Roelands equation, see notes for details
Notes
-----
The roelands equation linking viscosity (eta) to the fluid pressure (p) is given by:
eta(p) = eta_0*exp((ln(eta_0)+9.67)*(-1+(1+(p/p_0)^z))
eta_0, p_0 and z are coefficients that depend on the oil and it's temperature.
"""
ln_eta_0 = np.log(eta_0) + 9.67
def inner(pressure: np.ndarray, **kwargs):
return eta_0 * np.exp(ln_eta_0 * (-1 + (1 + pressure / pressure_0) ** z))
return inner
def nd_roelands(eta_0: float, pressure_0: float, pressure_hertzian: float, z: float):
""" The roelands pressure viscosity equation in a non dimentional form
Parameters
----------
eta_0, pressure_0, z: float
Coefficients for the equation, see notes for details
pressure_hertzian: float
The hertzian pressure used to non dimentionalise the pressure term in the equation. Should be the same as is
used in the reynolds solver
Returns
-------
inner: closure
A callable that produces the non dimentional viscosity according to the Roelands equation, see notes for details
Notes
-----
The roelands equation linking viscosity (eta) to the non dimentional fluid pressure (nd_p) is given by:
eta(p)/eta_0 = exp((ln(eta_0)+9.67)*(-1+(1+(nd_p/p_0*p_h)^z))
eta_0, p_0 and z are coefficients that depend on the oil and it's temperature.
p_h is the hertzian pressure used to non dimentionalise the pressure term.
"""
ln_eta_0 = np.log(eta_0) + 9.67
p_all = pressure_hertzian / pressure_0
def inner(nd_pressure: np.ndarray, **kwargs):
return np.exp(ln_eta_0 * (-1 + (1 + p_all * nd_pressure) ** z))
return inner
def barus(eta_0: float, alpha: float):
""" The Barus pressure viscosity equation
Parameters
----------
eta_0, alpha: float
Coefficients in the equation, see notes for details
Returns
-------
inner: closure
A callable that returns the resulting viscosity according to the barus equation
Notes
-----
The Barus equation linking pressure (p) to viscosity (eta) is given by:
eta(p) = eta_0*exp(alpha*p)
In which eta_0 and alpha are coefficients which depend on the lubricant and it's temperature
"""
def inner(pressure: np.ndarray, **kwargs):
return eta_0 * np.exp(alpha * pressure)
return inner
def nd_barus(pressure_hertzian: float, alpha: float):
""" A non dimentional form of the Barus equation
Parameters
----------
alpha: float
A coefficient in the Barus equation, see notes for details
pressure_hertzian: float
The hertzian pressure used to non dimensionalise the pressure
Returns
-------
inner: closure
A callable that will produce the non dimentional viscosity according to the barus equation
Notes
-----
The non dimentional Barus equation relating the viscosity (eta) to the non dimentional pressure (nd_p) is given by:
eta(p)/eta_0 = exp(alpha*p_h*nd_p)
In which alpha is alpha is a coefficient which will depend on the lubricant used and the temperature
p_h is the hertzian pressure used to non dimentionalise the pressure, this must be the same as is passed to the
reynolds solver.
"""
def inner(nd_pressure: np.ndarray, **kwargs):
return np.exp(alpha * pressure_hertzian * nd_pressure)
return inner
def dowson_higginson(rho_0: float):
""" The Dowson Higginson equation relating pressure to density
Parameters
----------
rho_0: float
A coefficient of the dowson higginson equation, seen notes for details
Returns
-------
inner: closure
A callable that returns the density based on the pressure according to the dowson higginson equation
Notes
-----
The dowson higginson equation relating pressure (p) to density (rho) is given by:
rho(p) = rho_0 * (5.9e8+1.34*p)/(5.9e8+p)
In which rho_0 is the parameter of the equation which will depend on the lubricant used and it's temperature
"""
def inner(pressure: np.ndarray, **kwargs):
return rho_0 * (5.9e8 + 1.34 * pressure) / (5.9e8 + pressure)
return inner
def nd_dowson_higginson(pressure_hertzian: float):
""" A non dimentional form of the Dowson Higginson equation relating pressure to density
Parameters
----------
pressure_hertzian: float
The hertzian pressure used to non dimentionalise the pressure, this must match the pressure given to the
reynolds solver
Returns
-------
inner: closure
A callable that returns the non dimentional density based on the non dimentional pressure
Notes
-----
The non dimentional dowson higginson equation relating non dimensional pressure (nd_p) to density (rho) is given by:
rho(p)/rho_0 = (5.9e8+1.34*p_h*nd_p)/(5.9e8+p_h*nd_p)
In which p_h is the hertzian pressure used to non denationalise the pressure and rho_0 is a parameter of the
dimentional form of the dowson higginson equation. Here the value rho(p)/rho_0 is returned
"""
constant = 5.9e8 / pressure_hertzian
def inner(nd_pressure: np.ndarray, **kwargs):
return (constant + 1.34 * nd_pressure) / (constant + nd_pressure)
return inner
|
[
"numpy.exp",
"numpy.ones_like",
"numpy.log"
] |
[((1879, 1892), 'numpy.log', 'np.log', (['eta_0'], {}), '(eta_0)\n', (1885, 1892), True, 'import numpy as np\n'), ((3063, 3076), 'numpy.log', 'np.log', (['eta_0'], {}), '(eta_0)\n', (3069, 3076), True, 'import numpy as np\n'), ((3193, 3249), 'numpy.exp', 'np.exp', (['(ln_eta_0 * (-1 + (1 + p_all * nd_pressure) ** z))'], {}), '(ln_eta_0 * (-1 + (1 + p_all * nd_pressure) ** z))\n', (3199, 3249), True, 'import numpy as np\n'), ((4913, 4960), 'numpy.exp', 'np.exp', (['(alpha * pressure_hertzian * nd_pressure)'], {}), '(alpha * pressure_hertzian * nd_pressure)\n', (4919, 4960), True, 'import numpy as np\n'), ((1179, 1210), 'numpy.ones_like', 'np.ones_like', (['just_touching_gap'], {}), '(just_touching_gap)\n', (1191, 1210), True, 'import numpy as np\n'), ((1971, 2029), 'numpy.exp', 'np.exp', (['(ln_eta_0 * (-1 + (1 + pressure / pressure_0) ** z))'], {}), '(ln_eta_0 * (-1 + (1 + pressure / pressure_0) ** z))\n', (1977, 2029), True, 'import numpy as np\n'), ((3907, 3931), 'numpy.exp', 'np.exp', (['(alpha * pressure)'], {}), '(alpha * pressure)\n', (3913, 3931), True, 'import numpy as np\n')]
|
import os
import shutil
from pathlib import Path
import conda_content_trust.signing as cct_signing
class RepoSigner:
def sign_repodata(self, repodata_fn, pkg_mgr_key):
final_fn = self.in_folder / "repodata_signed.json"
print("copy", repodata_fn, final_fn)
shutil.copyfile(repodata_fn, final_fn)
cct_signing.sign_all_in_repodata(str(final_fn), pkg_mgr_key)
print(f"Signed {final_fn}")
def __init__(self, in_folder, pkg_mgr_key):
self.in_folder = Path(in_folder).resolve()
f = os.path.join(self.in_folder, "repodata.json")
if os.path.isfile(f):
self.sign_repodata(Path(f), pkg_mgr_key)
|
[
"shutil.copyfile",
"os.path.isfile",
"os.path.join",
"pathlib.Path"
] |
[((287, 325), 'shutil.copyfile', 'shutil.copyfile', (['repodata_fn', 'final_fn'], {}), '(repodata_fn, final_fn)\n', (302, 325), False, 'import shutil\n'), ((545, 590), 'os.path.join', 'os.path.join', (['self.in_folder', '"""repodata.json"""'], {}), "(self.in_folder, 'repodata.json')\n", (557, 590), False, 'import os\n'), ((602, 619), 'os.path.isfile', 'os.path.isfile', (['f'], {}), '(f)\n', (616, 619), False, 'import os\n'), ((506, 521), 'pathlib.Path', 'Path', (['in_folder'], {}), '(in_folder)\n', (510, 521), False, 'from pathlib import Path\n'), ((652, 659), 'pathlib.Path', 'Path', (['f'], {}), '(f)\n', (656, 659), False, 'from pathlib import Path\n')]
|
import numpy as np
from gradient_boosting import *
def test_train_predict():
X_train, y_train = load_dataset("data/tiny.rent.train")
X_val, y_val = load_dataset("data/tiny.rent.test")
y_mean, trees = gradient_boosting_mse(X_train, y_train, 5, max_depth=2, nu=0.1)
assert(np.around(y_mean, decimals=4)== 3839.1724)
y_hat_train = gradient_boosting_predict(X_train, trees, y_mean, nu=0.1)
assert(np.around(r2_score(y_train, y_hat_train), decimals=4)==0.5527)
y_hat = gradient_boosting_predict(X_val, trees, y_mean, nu=0.1)
assert(np.around(r2_score(y_val, y_hat), decimals=4)==0.5109)
|
[
"numpy.around"
] |
[((287, 316), 'numpy.around', 'np.around', (['y_mean'], {'decimals': '(4)'}), '(y_mean, decimals=4)\n', (296, 316), True, 'import numpy as np\n')]
|
import struct
from collections import namedtuple
def read_1(f):
return f.read(1)[0]
def read_2(f):
return struct.unpack('<H', f.read(2))[0]
def read_4(f):
return struct.unpack('<I', f.read(4))[0]
def read_8(f):
return struct.unpack('<Q', f.read(8))[0]
def read_buffer(f):
length = read_4(f)
return f.read(length)
def read_str(f):
s = read_buffer(f)
assert(s[-1] == 0)
return s[0:-1]
LogMessage = namedtuple('LogMessage', ['msg'])
Open = namedtuple('Open', ['flags', 'mode', 'fd', 'path'])
Mmap = namedtuple('Mmap', ['offset', 'prot', 'flags', 'fd', 'region_id', 'start', 'length'])
Munmap = namedtuple('Munmap', ['offset', 'region_id', 'start', 'length', 'unk1', 'unk2'])
StoreInfo = namedtuple('StoreInfo', ['msg'])
Store = namedtuple('Store', ['region_id', 'offset', 'data'])
ProcessMap = namedtuple('ProcessMap', ['msg'])
# etnaviv specific
Commit = namedtuple('Commit', [])
def parse_mmt_file(f):
while True:
ch = f.read(1)
if ch == b'':
return
elif ch == b'=' or ch == b'-': # Comment
s = b''
while True: # read until \n
ch = f.read(1)
if ch == b'\n':
break
else:
s += ch
yield LogMessage(s)
elif ch == b'o': # open
flags = read_4(f)
mode = read_4(f)
fd = read_4(f)
path = read_str(f)
assert(read_1(f) == 10)
yield Open(flags, mode, fd, path)
elif ch == b'M': # mmap
offset = read_8(f)
prot = read_4(f)
flags = read_4(f)
fd = read_4(f)
region_id = read_4(f)
start = read_8(f)
length = read_8(f)
assert(read_1(f) == 10)
yield Mmap(offset, prot, flags, fd, region_id, start, length)
elif ch == b'u': # munmap
offset = read_8(f)
region_id = read_4(f)
start = read_8(f)
length = read_8(f)
unk1 = read_8(f)
unk2 = read_8(f)
assert(read_1(f) == 10)
yield Munmap(offset, region_id, start, length, unk1, unk2)
elif ch == b'x': # store_info
info = read_str(f)
assert(read_1(f) == 10)
yield StoreInfo(info)
elif ch == b'w': # store
region_id = read_4(f)
offset = read_4(f)
length = read_1(f)
data = f.read(length)
assert(read_1(f) == 10)
yield Store(region_id, offset, data)
elif ch == b'c': # commit
assert(read_1(f) == 10)
yield Commit()
elif ch == b'y': # process map
assert(read_8(f) == 1)
msg = read_buffer(f)
assert(read_1(f) == 10)
yield ProcessMap(msg)
else:
print('Unknown ', ch)
exit(1)
|
[
"collections.namedtuple"
] |
[((434, 467), 'collections.namedtuple', 'namedtuple', (['"""LogMessage"""', "['msg']"], {}), "('LogMessage', ['msg'])\n", (444, 467), False, 'from collections import namedtuple\n'), ((475, 526), 'collections.namedtuple', 'namedtuple', (['"""Open"""', "['flags', 'mode', 'fd', 'path']"], {}), "('Open', ['flags', 'mode', 'fd', 'path'])\n", (485, 526), False, 'from collections import namedtuple\n'), ((534, 623), 'collections.namedtuple', 'namedtuple', (['"""Mmap"""', "['offset', 'prot', 'flags', 'fd', 'region_id', 'start', 'length']"], {}), "('Mmap', ['offset', 'prot', 'flags', 'fd', 'region_id', 'start',\n 'length'])\n", (544, 623), False, 'from collections import namedtuple\n'), ((629, 714), 'collections.namedtuple', 'namedtuple', (['"""Munmap"""', "['offset', 'region_id', 'start', 'length', 'unk1', 'unk2']"], {}), "('Munmap', ['offset', 'region_id', 'start', 'length', 'unk1', 'unk2']\n )\n", (639, 714), False, 'from collections import namedtuple\n'), ((722, 754), 'collections.namedtuple', 'namedtuple', (['"""StoreInfo"""', "['msg']"], {}), "('StoreInfo', ['msg'])\n", (732, 754), False, 'from collections import namedtuple\n'), ((763, 815), 'collections.namedtuple', 'namedtuple', (['"""Store"""', "['region_id', 'offset', 'data']"], {}), "('Store', ['region_id', 'offset', 'data'])\n", (773, 815), False, 'from collections import namedtuple\n'), ((829, 862), 'collections.namedtuple', 'namedtuple', (['"""ProcessMap"""', "['msg']"], {}), "('ProcessMap', ['msg'])\n", (839, 862), False, 'from collections import namedtuple\n'), ((892, 916), 'collections.namedtuple', 'namedtuple', (['"""Commit"""', '[]'], {}), "('Commit', [])\n", (902, 916), False, 'from collections import namedtuple\n')]
|
from catalogo.models import Categoria, Produto
class Gerencia_categoria():
def Cria_categoria(request):
nome = request.POST.get("nome")
slug = request.POST.get("slug")
Categoria.objects.create(nome=nome, slug=slug)
def Atualiza_categoria(request, slug):
nome = request.POST.get("nome")
categoria = Categoria.objects.get(slug=slug)
categoria.nome = nome
categoria.save()
def Deleta_categoria(delete):
Categoria.objects.get(id=delete).delete()
class Gerencia_produto():
def Cria_produto(request):
nome = request.POST.get("nome")
slug = request.POST.get("slug")
categoria = request.POST.get("categoria")
descricao = request.POST.get("descricao")
price = request.POST.get("price")
Produto.objects.create(nome=nome, slug=slug, categoria_id=int(categoria), descricao=descricao, price=price)
def Atualiza_produto(request, slug):
nome = request.POST.get("nome")
categoria = request.POST.get("categoria")
descricao = request.POST.get("descricao")
price = request.POST.get("price")
produto = Produto.objects.get(slug=slug)
produto.nome = nome
produto.categoria_id = categoria
produto.descricao = descricao
produto.price = price
produto.save()
def Deleta_produto(delete):
Produto.objects.get(id=delete).delete()
|
[
"catalogo.models.Produto.objects.get",
"catalogo.models.Categoria.objects.get",
"catalogo.models.Categoria.objects.create"
] |
[((197, 243), 'catalogo.models.Categoria.objects.create', 'Categoria.objects.create', ([], {'nome': 'nome', 'slug': 'slug'}), '(nome=nome, slug=slug)\n', (221, 243), False, 'from catalogo.models import Categoria, Produto\n'), ((352, 384), 'catalogo.models.Categoria.objects.get', 'Categoria.objects.get', ([], {'slug': 'slug'}), '(slug=slug)\n', (373, 384), False, 'from catalogo.models import Categoria, Produto\n'), ((1167, 1197), 'catalogo.models.Produto.objects.get', 'Produto.objects.get', ([], {'slug': 'slug'}), '(slug=slug)\n', (1186, 1197), False, 'from catalogo.models import Categoria, Produto\n'), ((483, 515), 'catalogo.models.Categoria.objects.get', 'Categoria.objects.get', ([], {'id': 'delete'}), '(id=delete)\n', (504, 515), False, 'from catalogo.models import Categoria, Produto\n'), ((1399, 1429), 'catalogo.models.Produto.objects.get', 'Produto.objects.get', ([], {'id': 'delete'}), '(id=delete)\n', (1418, 1429), False, 'from catalogo.models import Categoria, Produto\n')]
|
# Adaptation from https://github.com/williamjameshandley/spherical_kde
# For the rule of thumb : https://arxiv.org/pdf/1306.0517.pdf
# Exact risk improvement of bandwidth selectors for kernel density estimation with directional data
# <NAME>
import math
import scipy.optimize
import scipy.special
import torch
from .geometry import spherical_to_cartesian
def logsinh(x):
"""
Compute log(sinh(x)), stably for large x.
:param x : torch.tensor, argument to evaluate at, must be positive
:return torch.tensor, log(sinh(x))
"""
if torch.any(x < 0):
raise ValueError("logsinh only valid for positive arguments")
return x + torch.log(0.5 - torch.exp(-2 * x) / 2)
def maxlikelihood_kappa(data) -> float:
"""
Estimate kappa if data follows a Von Mises Fisher distribution
https://en.wikipedia.org/wiki/Von_Mises%E2%80%93Fisher_distribution#Estimation_of_parameters
:param data : torch.tensor, [Nx3], xyz coordinates of points on the sphere
:return float
"""
r = data.mean(dim=0).square().sum(0).sqrt().item()
def a_p(kappa):
return scipy.special.iv(3 / 2, kappa) / scipy.special.iv(3 / 2 - 1, kappa) - r
opti_kappa = scipy.optimize.brentq(a_p, 1e-4, 1e2)
return opti_kappa
def h_rot(data):
"""
Rule-of-thumb bandwidth hrot for Kernel Density Estimation using Von Mises Fisher kernels
Typo in the original paper, see : https://github.com/egarpor/DirStats/blob/master/R/bw-pi.R
:param data : torch.tensor, [Nx3], xyz coordinates of points on the sphere
:return float
"""
kappa = maxlikelihood_kappa(data)
n = data.shape[0]
num = 8 * math.sinh(kappa) ** 2
den = (-2 * kappa * math.cosh(2 * kappa) + (1 + 4 * kappa ** 2) * math.sinh(2 * kappa)) * n
return (num / den) ** (1 / 6)
class SphereKDE:
"""
Spherical kernel density estimator, using Von Mises Fisher kernels
Inspired by https://github.com/williamjameshandley/spherical_kde
"""
def __init__(self, data_pts, chunk_matmul=10000):
self.pts = data_pts
self.device = data_pts.device
self.chunk_matmul = chunk_matmul
def __call__(self, sampling_pts, bandwidth):
sampling = sampling_pts.to(self.device)
kappa = torch.tensor(1 / (bandwidth ** 2), device=self.device)
logc = torch.log(kappa / (4 * math.pi)) - logsinh(kappa)
# kernels = logc + torch.matmul(sampling, self.pts.T) * kappa
# pdf = torch.exp(torch.logsumexp(kernels, dim=1)) / self.pts.shape[0]
pdf = torch.empty([sampling.shape[0]], device=sampling.device, dtype=sampling.dtype)
for i in range(sampling.shape[0] // self.chunk_matmul + 1): # Solve memory limitations
chk_sampling = sampling[i * self.chunk_matmul:(i + 1) * self.chunk_matmul]
kernels = logc + torch.matmul(chk_sampling, self.pts.T) * kappa
pdf[i * self.chunk_matmul:(i + 1) * self.chunk_matmul] = \
torch.exp(torch.logsumexp(kernels, dim=1)) / self.pts.shape[0]
return pdf
def vonmisesfisher_kde(data_theta, data_phi, x_theta, x_phi, bandwidth=None):
"""
Perform Von Mises-Fisher Kernel Density Estimation (used for spherical data)
:param data_theta: 1D torch.float tensor, containing theta values for training data, between 0 and pi
:param data_phi: 1D torch.float tensor, containing phi values for training data, between 0 and 2*pi
:param x_theta: torch.float tensor, containing theta values for sampling points, between 0 and pi
:param x_phi: torch.float tensor, containing phi values for sampling points, between 0 and 2*pi
:param bandwidth: smoothing bandwith. If None, then uses rule-of-thumb
:return: torch tensor of interpolated values, of the same shape as x_theta and x_phi
"""
data_pts = torch.stack(spherical_to_cartesian(torch.ones_like(data_theta), data_theta, data_phi), dim=1)
if bandwidth is None:
bandwidth = h_rot(data_pts)
assert x_theta.shape == x_phi.shape
shape = x_theta.shape
x_theta, x_phi = x_theta.flatten(), x_phi.flatten()
sampling_pts = torch.stack(spherical_to_cartesian(torch.ones_like(x_theta), x_theta, x_phi), dim=1)
pdf = SphereKDE(data_pts)(sampling_pts, bandwidth=bandwidth)
return pdf.view(shape)
|
[
"torch.ones_like",
"torch.logsumexp",
"torch.any",
"torch.empty",
"math.cosh",
"torch.exp",
"math.sinh",
"torch.matmul",
"torch.log",
"torch.tensor"
] |
[((554, 570), 'torch.any', 'torch.any', (['(x < 0)'], {}), '(x < 0)\n', (563, 570), False, 'import torch\n'), ((2256, 2308), 'torch.tensor', 'torch.tensor', (['(1 / bandwidth ** 2)'], {'device': 'self.device'}), '(1 / bandwidth ** 2, device=self.device)\n', (2268, 2308), False, 'import torch\n'), ((2539, 2617), 'torch.empty', 'torch.empty', (['[sampling.shape[0]]'], {'device': 'sampling.device', 'dtype': 'sampling.dtype'}), '([sampling.shape[0]], device=sampling.device, dtype=sampling.dtype)\n', (2550, 2617), False, 'import torch\n'), ((1653, 1669), 'math.sinh', 'math.sinh', (['kappa'], {}), '(kappa)\n', (1662, 1669), False, 'import math\n'), ((2326, 2358), 'torch.log', 'torch.log', (['(kappa / (4 * math.pi))'], {}), '(kappa / (4 * math.pi))\n', (2335, 2358), False, 'import torch\n'), ((3849, 3876), 'torch.ones_like', 'torch.ones_like', (['data_theta'], {}), '(data_theta)\n', (3864, 3876), False, 'import torch\n'), ((4146, 4170), 'torch.ones_like', 'torch.ones_like', (['x_theta'], {}), '(x_theta)\n', (4161, 4170), False, 'import torch\n'), ((1699, 1719), 'math.cosh', 'math.cosh', (['(2 * kappa)'], {}), '(2 * kappa)\n', (1708, 1719), False, 'import math\n'), ((1745, 1765), 'math.sinh', 'math.sinh', (['(2 * kappa)'], {}), '(2 * kappa)\n', (1754, 1765), False, 'import math\n'), ((673, 690), 'torch.exp', 'torch.exp', (['(-2 * x)'], {}), '(-2 * x)\n', (682, 690), False, 'import torch\n'), ((2830, 2868), 'torch.matmul', 'torch.matmul', (['chk_sampling', 'self.pts.T'], {}), '(chk_sampling, self.pts.T)\n', (2842, 2868), False, 'import torch\n'), ((2974, 3005), 'torch.logsumexp', 'torch.logsumexp', (['kernels'], {'dim': '(1)'}), '(kernels, dim=1)\n', (2989, 3005), False, 'import torch\n')]
|
from django.shortcuts import redirect
from learn.services.choice import random_choice, rythm_choice
def choose_rythm_notation_exercise(request, dictionary_pk):
if request.user.is_authenticated():
translation = rythm_choice(dictionary_pk, request.user)
else:
translation = random_choice(dictionary_pk)
if not translation:
return redirect('learn:come_back', dictionary_pk=dictionary_pk)
return redirect('learn:exercise', dictionary_pk=dictionary_pk, translation_pk=translation.id)
|
[
"django.shortcuts.redirect",
"learn.services.choice.random_choice",
"learn.services.choice.rythm_choice"
] |
[((435, 526), 'django.shortcuts.redirect', 'redirect', (['"""learn:exercise"""'], {'dictionary_pk': 'dictionary_pk', 'translation_pk': 'translation.id'}), "('learn:exercise', dictionary_pk=dictionary_pk, translation_pk=\n translation.id)\n", (443, 526), False, 'from django.shortcuts import redirect\n'), ((225, 266), 'learn.services.choice.rythm_choice', 'rythm_choice', (['dictionary_pk', 'request.user'], {}), '(dictionary_pk, request.user)\n', (237, 266), False, 'from learn.services.choice import random_choice, rythm_choice\n'), ((299, 327), 'learn.services.choice.random_choice', 'random_choice', (['dictionary_pk'], {}), '(dictionary_pk)\n', (312, 327), False, 'from learn.services.choice import random_choice, rythm_choice\n'), ((367, 423), 'django.shortcuts.redirect', 'redirect', (['"""learn:come_back"""'], {'dictionary_pk': 'dictionary_pk'}), "('learn:come_back', dictionary_pk=dictionary_pk)\n", (375, 423), False, 'from django.shortcuts import redirect\n')]
|
# -*- coding: UTF-8 -*-
#
# Copyright (C) 2008-2011 <NAME> <<EMAIL>>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Gas dynamics solver of the Euler equations.
"""
from solvcon.kerpak.cuse import CuseSolver
from solvcon.kerpak.cuse import CuseCase
from solvcon.kerpak.cuse import CuseBC
from solvcon.anchor import Anchor
from solvcon.hook import BlockHook
################################################################################
# Utility.
################################################################################
class MovingShock(object):
"""
Define relations across a moving shock wave. Subscript 1 denotes
quantities before shock (have not touched by shock), subscript 2 denotes
quantities after shock (passed by shock).
@ivar ga: ratio of specific heat.
@itype ga: float
@ivar Ms: Mach number of shock wave.
@itype Ms: float
@ivar gasconst: gas constant.
@itype gasconst: float
"""
def __init__(self, ga, Ms, **kw):
self.ga = ga
self.Ms = Ms
self.gasconst = kw.pop('gasconst', 1.0)
@property
def ratio_p(self):
"""
ratio of upstream/downstream pressure.
"""
ga = self.ga
Ms = self.Ms
return (2*ga*Ms**2 - (ga-1))/(ga+1)
@property
def ratio_rho(self):
"""
ratio of upstream/downstream density.
"""
ga = self.ga
Ms = self.Ms
return (ga+1)*Ms**2/(2+(ga-1)*Ms**2)
@property
def ratio_T(self):
"""
ratio of upstream/downstream temperature.
"""
ga = self.ga
Ms = self.Ms
return self.ratio_p/self.ratio_rho
@property
def M2(self):
"""
Mach number behind standing normal shock wave.
"""
from math import sqrt
ga = self.ga
Ms = self.Ms
return sqrt(((ga-1)*Ms**2+2)/(2*ga*Ms**2-(ga-1)))
@property
def M2p(self):
"""
Mach number behind moving normal shock wave.
"""
from math import sqrt
M1 = self.Ms
M2 = self.M2
ratio_a = sqrt(self.ratio_T)
return M1/ratio_a - M2
def calc_temperature(self, p, rho):
"""
Calculate temperature according to given pressure and density.
@param p: pressure.
@type p: float
@param rho: density.
@type rho: float
@return: temperature
@rtype: float
"""
return p/(rho*self.gasconst)
def calc_speedofsound(self, p, rho):
"""
Calculate speed of sound according to given pressure and density.
@param p: pressure.
@type p: float
@param rho: density.
@type rho: float
@return: speed of sound
@rtype: float
"""
from math import sqrt
ga = self.ga
return sqrt(ga*p/rho)
def calc_speeds(self, p, rho):
"""
Calculate shock wave speed and upstream speed for static downstream.
@param p: downstream pressure.
@type p: float
@param rho: downstream density.
@type rho: float
@return: a 2-tuple for shock wave and upstream speeds.
@rtype: (float, float)
"""
M1 = self.Ms
M2 = self.M2
a1 = self.calc_speedofsound(p, rho)
a2 = self.calc_speedofsound(p*self.ratio_p, rho*self.ratio_rho)
return M1*a1, M1*a1 - M2*a2
###############################################################################
# Solver.
###############################################################################
class GasdynSolver(CuseSolver):
"""
Gas dynamics solver of the Euler equations.
"""
def __init__(self, blk, *args, **kw):
kw['nsca'] = 1
super(GasdynSolver, self).__init__(blk, *args, **kw)
from solvcon.dependency import getcdll
__clib_gasdyn_c = {
2: getcdll('gasdyn2d_c', raise_on_fail=False),
3: getcdll('gasdyn3d_c', raise_on_fail=False),
}
__clib_gasdyn_cu = {
2: getcdll('gasdyn2d_cu', raise_on_fail=False),
3: getcdll('gasdyn3d_cu', raise_on_fail=False),
}
del getcdll
@property
def _clib_gasdyn_c(self):
return self.__clib_gasdyn_c[self.ndim]
@property
def _clib_gasdyn_cu(self):
return self.__clib_gasdyn_cu[self.ndim]
@property
def _clib_mcu(self):
return self.__clib_gasdyn_cu[self.ndim]
_gdlen_ = 0
@property
def _jacofunc_(self):
return self._clib_gasdyn_c.calc_jaco
def calccfl(self, worker=None):
from ctypes import byref
if self.scu:
self._clib_gasdyn_cu.calc_cfl(self.ncuth,
byref(self.cumgr.exd), self.cumgr.gexd.gptr)
else:
self._clib_gasdyn_c.calc_cfl(byref(self.exd))
###############################################################################
# Case.
###############################################################################
class GasdynCase(CuseCase):
"""
Gas dynamics case.
"""
from solvcon.domain import Domain
defdict = {
'solver.solvertype': GasdynSolver,
'solver.domaintype': Domain,
}
del Domain
def load_block(self):
loaded = super(GasdynCase, self).load_block()
if hasattr(loaded, 'ndim'):
ndim = loaded.ndim
else:
ndim = loaded.blk.ndim
self.execution.neq = ndim+2
return loaded
###############################################################################
# Boundary conditions.
###############################################################################
class GasdynBC(CuseBC):
"""
Basic BC class for gas dynamics.
"""
from solvcon.dependency import getcdll
__clib_gasdynb_c = {
2: getcdll('gasdynb2d_c', raise_on_fail=False),
3: getcdll('gasdynb3d_c', raise_on_fail=False),
}
__clib_gasdynb_cu = {
2: getcdll('gasdynb2d_cu', raise_on_fail=False),
3: getcdll('gasdynb3d_cu', raise_on_fail=False),
}
del getcdll
@property
def _clib_gasdynb_c(self):
return self.__clib_gasdynb_c[self.svr.ndim]
@property
def _clib_gasdynb_cu(self):
return self.__clib_gasdynb_cu[self.svr.ndim]
class GasdynWall(GasdynBC):
_ghostgeom_ = 'mirror'
def soln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_wall_soln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_wall_soln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
def dsoln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_wall_dsoln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_wall_dsoln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
class GasdynNswall(GasdynWall):
def soln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_nswall_soln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_nswall_soln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
def dsoln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_nswall_dsoln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_nswall_dsoln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
class GasdynInlet(GasdynBC):
vnames = ['rho', 'v1', 'v2', 'v3', 'p', 'gamma']
vdefaults = {
'rho': 1.0, 'p': 1.0, 'gamma': 1.4, 'v1': 0.0, 'v2': 0.0, 'v3': 0.0,
}
_ghostgeom_ = 'mirror'
def soln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_inlet_soln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr,
self.value.shape[1], self.cuvalue.gptr)
else:
self._clib_gasdynb_c.bound_inlet_soln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_,
self.value.shape[1], self.value.ctypes._as_parameter_)
def dsoln(self):
from ctypes import byref
svr = self.svr
if svr.scu:
self._clib_gasdynb_cu.bound_inlet_dsoln(svr.ncuth,
svr.cumgr.gexd.gptr, self.facn.shape[0], self.cufacn.gptr)
else:
self._clib_gasdynb_c.bound_inlet_dsoln(byref(svr.exd),
self.facn.shape[0], self.facn.ctypes._as_parameter_)
###############################################################################
# Anchors.
###############################################################################
class GasdynIAnchor(Anchor):
"""
Basic initializing anchor class of GasdynSolver.
"""
def __init__(self, svr, **kw):
assert isinstance(svr, GasdynSolver)
self.gamma = float(kw.pop('gamma'))
super(GasdynIAnchor, self).__init__(svr, **kw)
def provide(self):
from solvcon.solver_legacy import ALMOST_ZERO
svr = self.svr
svr.amsca.fill(self.gamma)
svr.sol.fill(ALMOST_ZERO)
svr.soln.fill(ALMOST_ZERO)
svr.dsol.fill(ALMOST_ZERO)
svr.dsoln.fill(ALMOST_ZERO)
class UniformIAnchor(GasdynIAnchor):
def __init__(self, svr, **kw):
self.rho = float(kw.pop('rho'))
self.v1 = float(kw.pop('v1'))
self.v2 = float(kw.pop('v2'))
self.v3 = float(kw.pop('v3'))
self.p = float(kw.pop('p'))
super(UniformIAnchor, self).__init__(svr, **kw)
def provide(self):
super(UniformIAnchor, self).provide()
gamma = self.gamma
svr = self.svr
svr.soln[:,0].fill(self.rho)
svr.soln[:,1].fill(self.rho*self.v1)
svr.soln[:,2].fill(self.rho*self.v2)
vs = self.v1**2 + self.v2**2
if svr.ndim == 3:
vs += self.v3**2
svr.soln[:,3].fill(self.rho*self.v3)
svr.soln[:,svr.ndim+1].fill(self.rho*vs/2 + self.p/(gamma-1))
svr.sol[:] = svr.soln[:]
class GasdynOAnchor(Anchor):
"""
Calculates physical quantities for output. Implements (i) provide() and
(ii) postfull() methods.
@ivar gasconst: gas constant.
@itype gasconst: float.
"""
_varlist_ = ['v', 'rho', 'p', 'T', 'ke', 'a', 'M', 'sch']
def __init__(self, svr, **kw):
self.rsteps = kw.pop('rsteps', 1)
self.gasconst = kw.pop('gasconst', 1.0)
self.schk = kw.pop('schk', 1.0)
self.schk0 = kw.pop('schk0', 0.0)
self.schk1 = kw.pop('schk1', 1.0)
super(GasdynOAnchor, self).__init__(svr, **kw)
def _calculate_physics(self):
from ctypes import byref, c_double
svr = self.svr
der = svr.der
svr._clib_gasdyn_c.process_physics(byref(svr.exd),
c_double(self.gasconst),
der['v'].ctypes._as_parameter_,
der['w'].ctypes._as_parameter_,
der['wm'].ctypes._as_parameter_,
der['rho'].ctypes._as_parameter_,
der['p'].ctypes._as_parameter_,
der['T'].ctypes._as_parameter_,
der['ke'].ctypes._as_parameter_,
der['a'].ctypes._as_parameter_,
der['M'].ctypes._as_parameter_,
)
def _calculate_schlieren(self):
from ctypes import byref, c_double
svr = self.svr
sch = svr.der['sch']
svr._clib_gasdyn_c.process_schlieren_rhog(byref(svr.exd),
sch.ctypes._as_parameter_)
rhogmax = sch[svr.ngstcell:].max()
svr._clib_gasdyn_c.process_schlieren_sch(byref(svr.exd),
c_double(self.schk), c_double(self.schk0), c_double(self.schk1),
c_double(rhogmax), sch.ctypes._as_parameter_,
)
def provide(self):
from numpy import empty
svr = self.svr
der = svr.der
nelm = svr.ngstcell + svr.ncell
der['v'] = empty((nelm, svr.ndim), dtype=svr.fpdtype)
der['w'] = empty((nelm, svr.ndim), dtype=svr.fpdtype)
der['wm'] = empty(nelm, dtype=svr.fpdtype)
der['rho'] = empty(nelm, dtype=svr.fpdtype)
der['p'] = empty(nelm, dtype=svr.fpdtype)
der['T'] = empty(nelm, dtype=svr.fpdtype)
der['ke'] = empty(nelm, dtype=svr.fpdtype)
der['a'] = empty(nelm, dtype=svr.fpdtype)
der['M'] = empty(nelm, dtype=svr.fpdtype)
der['sch'] = empty(nelm, dtype=svr.fpdtype)
self._calculate_physics()
self._calculate_schlieren()
def postfull(self):
svr = self.svr
istep = self.svr.step_global
rsteps = self.rsteps
if istep > 0 and istep%rsteps == 0:
if svr.scu:
svr.cumgr.arr_from_gpu('amsca', 'soln', 'dsoln')
self._calculate_physics()
self._calculate_schlieren()
|
[
"ctypes.c_double",
"math.sqrt",
"ctypes.byref",
"numpy.empty",
"solvcon.dependency.getcdll"
] |
[((2530, 2592), 'math.sqrt', 'sqrt', (['(((ga - 1) * Ms ** 2 + 2) / (2 * ga * Ms ** 2 - (ga - 1)))'], {}), '(((ga - 1) * Ms ** 2 + 2) / (2 * ga * Ms ** 2 - (ga - 1)))\n', (2534, 2592), False, 'from math import sqrt\n'), ((2773, 2791), 'math.sqrt', 'sqrt', (['self.ratio_T'], {}), '(self.ratio_T)\n', (2777, 2791), False, 'from math import sqrt\n'), ((3534, 3552), 'math.sqrt', 'sqrt', (['(ga * p / rho)'], {}), '(ga * p / rho)\n', (3538, 3552), False, 'from math import sqrt\n'), ((4573, 4615), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdyn2d_c"""'], {'raise_on_fail': '(False)'}), "('gasdyn2d_c', raise_on_fail=False)\n", (4580, 4615), False, 'from solvcon.dependency import getcdll\n'), ((4628, 4670), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdyn3d_c"""'], {'raise_on_fail': '(False)'}), "('gasdyn3d_c', raise_on_fail=False)\n", (4635, 4670), False, 'from solvcon.dependency import getcdll\n'), ((4714, 4757), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdyn2d_cu"""'], {'raise_on_fail': '(False)'}), "('gasdyn2d_cu', raise_on_fail=False)\n", (4721, 4757), False, 'from solvcon.dependency import getcdll\n'), ((4770, 4813), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdyn3d_cu"""'], {'raise_on_fail': '(False)'}), "('gasdyn3d_cu', raise_on_fail=False)\n", (4777, 4813), False, 'from solvcon.dependency import getcdll\n'), ((6473, 6516), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdynb2d_c"""'], {'raise_on_fail': '(False)'}), "('gasdynb2d_c', raise_on_fail=False)\n", (6480, 6516), False, 'from solvcon.dependency import getcdll\n'), ((6529, 6572), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdynb3d_c"""'], {'raise_on_fail': '(False)'}), "('gasdynb3d_c', raise_on_fail=False)\n", (6536, 6572), False, 'from solvcon.dependency import getcdll\n'), ((6617, 6661), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdynb2d_cu"""'], {'raise_on_fail': '(False)'}), "('gasdynb2d_cu', raise_on_fail=False)\n", (6624, 6661), False, 'from solvcon.dependency import getcdll\n'), ((6674, 6718), 'solvcon.dependency.getcdll', 'getcdll', (['"""gasdynb3d_cu"""'], {'raise_on_fail': '(False)'}), "('gasdynb3d_cu', raise_on_fail=False)\n", (6681, 6718), False, 'from solvcon.dependency import getcdll\n'), ((13057, 13099), 'numpy.empty', 'empty', (['(nelm, svr.ndim)'], {'dtype': 'svr.fpdtype'}), '((nelm, svr.ndim), dtype=svr.fpdtype)\n', (13062, 13099), False, 'from numpy import empty\n'), ((13119, 13161), 'numpy.empty', 'empty', (['(nelm, svr.ndim)'], {'dtype': 'svr.fpdtype'}), '((nelm, svr.ndim), dtype=svr.fpdtype)\n', (13124, 13161), False, 'from numpy import empty\n'), ((13182, 13212), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13187, 13212), False, 'from numpy import empty\n'), ((13234, 13264), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13239, 13264), False, 'from numpy import empty\n'), ((13284, 13314), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13289, 13314), False, 'from numpy import empty\n'), ((13334, 13364), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13339, 13364), False, 'from numpy import empty\n'), ((13385, 13415), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13390, 13415), False, 'from numpy import empty\n'), ((13435, 13465), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13440, 13465), False, 'from numpy import empty\n'), ((13485, 13515), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13490, 13515), False, 'from numpy import empty\n'), ((13537, 13567), 'numpy.empty', 'empty', (['nelm'], {'dtype': 'svr.fpdtype'}), '(nelm, dtype=svr.fpdtype)\n', (13542, 13567), False, 'from numpy import empty\n'), ((11946, 11960), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (11951, 11960), False, 'from ctypes import byref, c_double\n'), ((11974, 11997), 'ctypes.c_double', 'c_double', (['self.gasconst'], {}), '(self.gasconst)\n', (11982, 11997), False, 'from ctypes import byref, c_double\n'), ((12590, 12604), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (12595, 12604), False, 'from ctypes import byref, c_double\n'), ((12737, 12751), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (12742, 12751), False, 'from ctypes import byref, c_double\n'), ((12765, 12784), 'ctypes.c_double', 'c_double', (['self.schk'], {}), '(self.schk)\n', (12773, 12784), False, 'from ctypes import byref, c_double\n'), ((12786, 12806), 'ctypes.c_double', 'c_double', (['self.schk0'], {}), '(self.schk0)\n', (12794, 12806), False, 'from ctypes import byref, c_double\n'), ((12808, 12828), 'ctypes.c_double', 'c_double', (['self.schk1'], {}), '(self.schk1)\n', (12816, 12828), False, 'from ctypes import byref, c_double\n'), ((12842, 12859), 'ctypes.c_double', 'c_double', (['rhogmax'], {}), '(rhogmax)\n', (12850, 12859), False, 'from ctypes import byref, c_double\n'), ((5369, 5390), 'ctypes.byref', 'byref', (['self.cumgr.exd'], {}), '(self.cumgr.exd)\n', (5374, 5390), False, 'from ctypes import byref, c_double\n'), ((5469, 5484), 'ctypes.byref', 'byref', (['self.exd'], {}), '(self.exd)\n', (5474, 5484), False, 'from ctypes import byref, c_double\n'), ((7289, 7303), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (7294, 7303), False, 'from ctypes import byref, c_double\n'), ((7672, 7686), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (7677, 7686), False, 'from ctypes import byref, c_double\n'), ((8089, 8103), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (8094, 8103), False, 'from ctypes import byref, c_double\n'), ((8476, 8490), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (8481, 8490), False, 'from ctypes import byref, c_double\n'), ((9125, 9139), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (9130, 9139), False, 'from ctypes import byref, c_double\n'), ((9581, 9595), 'ctypes.byref', 'byref', (['svr.exd'], {}), '(svr.exd)\n', (9586, 9595), False, 'from ctypes import byref, c_double\n')]
|
from lexical_analyzer.assignment_analyzer import analyze
if __name__ == '__main__':
assignment_statements = []
# get string statements from file
with open('test-cases.txt', 'r') as file:
for line in file:
assignment_statements.append(line.rstrip('\n'))
# lexically analyze
validation_results = analyze(assignment_statements)
# display results
for statement, validation in zip(assignment_statements, validation_results):
print (f'\n{statement} -> {validation}')
print('-----------------------------------------------------')
|
[
"lexical_analyzer.assignment_analyzer.analyze"
] |
[((342, 372), 'lexical_analyzer.assignment_analyzer.analyze', 'analyze', (['assignment_statements'], {}), '(assignment_statements)\n', (349, 372), False, 'from lexical_analyzer.assignment_analyzer import analyze\n')]
|
#!/usr/bin/env python
# <NAME> (<EMAIL>)
# Fri Jul 23 16:27:08 EDT 2021
#import xarray as xr, numpy as np, pandas as pd
import os.path
import matplotlib.pyplot as plt
#more imports
#from PIL import Image
import random
from matplotlib import image
#
#
#start from here
dice = range(1,6+1)
idir = os.path.dirname(__file__)
while True:
n = random.choice(dice)
ifile = os.path.join(idir, f'dice-{n}.jpg')
#img = Image.open(ifile)
#img.show()
#with Image.open(ifile) as img:
# img.show()
plt.ion()
plt.imshow(image.imread(ifile))
plt.axis('off')
#plt.show()
print(f'Your number is {n}')
s = input(f'Press Return to continue (or type q and press Return to quit):')
plt.close()
if s == 'q':
break
|
[
"matplotlib.image.imread",
"matplotlib.pyplot.close",
"matplotlib.pyplot.axis",
"random.choice",
"matplotlib.pyplot.ion"
] |
[((343, 362), 'random.choice', 'random.choice', (['dice'], {}), '(dice)\n', (356, 362), False, 'import random\n'), ((516, 525), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (523, 525), True, 'import matplotlib.pyplot as plt\n'), ((566, 581), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (574, 581), True, 'import matplotlib.pyplot as plt\n'), ((716, 727), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (725, 727), True, 'import matplotlib.pyplot as plt\n'), ((541, 560), 'matplotlib.image.imread', 'image.imread', (['ifile'], {}), '(ifile)\n', (553, 560), False, 'from matplotlib import image\n')]
|
# -*- coding: utf-8 -*-
# mostly from: http://stackoverflow.com/questions/30552656/python-traveling-salesman-greedy-algorithm
# credit to cMinor
import math
import random
import itertools
def indexOrNeg(arrr, myvalue):
try:
return arrr.index(myvalue)
except:
return -1
def printTour(tour, cities):
for icity in tour:
print(cities[icity], end=' ')
def readAdventDatafile(myfilename):
"""Reads in the Advent Of Code datafile, which has the list of
cities, connections, and distances between them.
Will return:
n - number of cities
D - distance matrix
cities - array (of length n) of city names
"""
D = {} # dictionary to hold n times n matrix
cities = []
with open(myfilename) as datafile:
for thisstring in datafile:
thisstring = thisstring.rstrip()
tokens = thisstring.split(' ')
index_city1 = indexOrNeg(cities, tokens[0])
if index_city1 < 0:
cities.append(tokens[0])
index_city1 = len(cities)-1
index_city2 = indexOrNeg(cities, tokens[2])
if index_city2 < 0:
cities.append(tokens[2])
index_city2 = len(cities)-1
D[index_city1, index_city2] = int(tokens[4])
D[index_city2, index_city1] = int(tokens[4])
return len(cities), D, cities
def mk_matrix(coord, dist):
"""Compute a distance matrix for a set of points.
Uses function 'dist' to calculate distance between
any two points. Parameters:
-coord -- list of tuples with coordinates of all points, [(x1,y1),...,(xn,yn)]
-dist -- distance function
"""
n = len(coord)
D = {} # dictionary to hold n times n matrix
for i in range(n-1):
for j in range(i+1,n):
(x1,y1) = coord[i]
(x2,y2) = coord[j]
D[i,j] = dist((x1,y1), (x2,y2))
D[j,i] = D[i,j]
return n,D
def mk_closest(D, n):
"""Compute a sorted list of the distances for each of the nodes.
For each node, the entry is in the form [(d1,i1), (d2,i2), ...]
where each tuple is a pair (distance,node).
"""
C = []
for i in range(n):
dlist = [(D[i,j], j) for j in range(n) if j != i]
dlist.sort()
C.append(dlist)
return C
def length(tour, D):
"""Calculate the length of a tour according to distance matrix 'D'."""
#z = D[tour[-1], tour[0]] # edge from last to first city of the tour
z = 0
for i in range(1,len(tour)):
z += D[tour[i], tour[i-1]] # add length of edge from city i-1 to i
return z
def randtour(n):
"""Construct a random tour of size 'n'."""
sol = list(range(n)) # set solution equal to [0,1,...,n-1]
random.shuffle(sol) # place it in a random order
return sol
def nearest(last, unvisited, D):
"""Return the index of the node which is closest to 'last'."""
near = unvisited[0]
min_dist = D[last, near]
for i in unvisited[1:]:
if D[last,i] < min_dist:
near = i
min_dist = D[last, near]
return near
def nearest_neighbor(n, i, D):
"""Return tour starting from city 'i', using the Nearest Neighbor.
Uses the Nearest Neighbor heuristic to construct a solution:
- start visiting city i
- while there are unvisited cities, follow to the closest one
- return to city i
"""
unvisited = list(range(n))
unvisited.remove(i)
last = i
tour = [i]
while unvisited != []:
next = nearest(last, unvisited, D)
tour.append(next)
unvisited.remove(next)
last = next
return tour
def exchange_cost(tour, i, j, D):
"""Calculate the cost of exchanging two arcs in a tour.
Determine the variation in the tour length if
arcs (i,i+1) and (j,j+1) are removed,
and replaced by (i,j) and (i+1,j+1)
(note the exception for the last arc).
Parameters:
-t -- a tour
-i -- position of the first arc
-j>i -- position of the second arc
"""
n = len(tour)
a,b = tour[i],tour[(i+1)%n]
c,d = tour[j],tour[(j+1)%n]
return (D[a,c] + D[b,d]) - (D[a,b] + D[c,d])
def exchange(tour, tinv, i, j):
"""Exchange arcs (i,i+1) and (j,j+1) with (i,j) and (i+1,j+1).
For the given tour 't', remove the arcs (i,i+1) and (j,j+1) and
insert (i,j) and (i+1,j+1).
This is done by inverting the sublist of cities between i and j.
"""
n = len(tour)
if i>j:
i,j = j,i
assert i>=0 and i<j-1 and j<n
path = tour[i+1:j+1]
path.reverse()
tour[i+1:j+1] = path
for k in range(i+1,j+1):
tinv[tour[k]] = k
def improve(tour, z, D, C):
"""Try to improve tour 't' by exchanging arcs; return improved tour length.
If possible, make a series of local improvements on the solution 'tour',
using a breadth first strategy, until reaching a local optimum.
"""
n = len(tour)
tinv = [0 for i in tour]
for k in range(n):
tinv[tour[k]] = k # position of each city in 't'
for i in range(n):
a,b = tour[i],tour[(i+1)%n]
dist_ab = D[a,b]
improved = False
for dist_ac,c in C[a]:
if dist_ac >= dist_ab:
break
j = tinv[c]
d = tour[(j+1)%n]
dist_cd = D[c,d]
dist_bd = D[b,d]
delta = (dist_ac + dist_bd) - (dist_ab + dist_cd)
if delta < 0: # exchange decreases length
exchange(tour, tinv, i, j);
z += delta
improved = True
break
if improved:
continue
for dist_bd,d in C[b]:
if dist_bd >= dist_ab:
break
j = tinv[d]-1
if j==-1:
j=n-1
c = tour[j]
dist_cd = D[c,d]
dist_ac = D[a,c]
delta = (dist_ac + dist_bd) - (dist_ab + dist_cd)
if delta < 0: # exchange decreases length
exchange(tour, tinv, i, j);
z += delta
break
return z
def localsearch(tour, z, D, C=None):
"""Obtain a local optimum starting from solution t; return solution length.
Parameters:
tour -- initial tour
z -- length of the initial tour
D -- distance matrix
"""
n = len(tour)
if C == None:
C = mk_closest(D, n) # create a sorted list of distances to each node
while 1:
newz = improve(tour, z, D, C)
if newz < z:
z = newz
else:
break
return z
def multistart_localsearch(k, n, D, report=None):
"""Do k iterations of local search, starting from random solutions.
Parameters:
-k -- number of iterations
-D -- distance matrix
-report -- if not None, call it to print verbose output
Returns best solution and its cost.
"""
C = mk_closest(D, n) # create a sorted list of distances to each node
bestt=None
bestz=None
for i in range(0,k):
tour = randtour(n)
z = length(tour, D)
z = localsearch(tour, z, D, C)
if bestz == None or z < bestz:
bestz = z
bestt = list(tour)
if report:
report(z, tour)
return bestt, bestz
def all_permutations(n, D, report=None):
"""Do all of the permutations of tours"""
icount = 0
bestt = None
bestz = None
worstt = None
worstz = None
for thistour in itertools.permutations(range(n)):
icount += 1
z = length(thistour,D)
if bestz == None or z < bestz:
bestz = z
bestt = list(thistour)
if report:
report(z,thistour)
if worstz == None or z > worstz:
worstz = z
worstt = list(thistour)
if report:
report(z,thistour)
return bestt, bestz, worstt, worstz
if __name__ == "__main__":
"""Local search for the Travelling Saleman Problem: sample usage."""
#
# test the functions:
#
# random.seed(1) # uncomment for having always the same behavior
import sys
# read in datafile
n, D, cities = readAdventDatafile('day9.dat')
# at this point, I need:
# n - number of cities
# D - distance matrix
# function for printing best found solution when it is found
from time import clock
init = clock()
def report_sol(obj, s=""):
print("cpu:%g\tobj:%g\ttour:%s" % \
(clock(), obj, s))
print("*** travelling salesman problem ***")
print
# random construction
print("random construction + local search:")
tour = randtour(n) # create a random tour
z = length(tour, D) # calculate its length
print("random:", tour, z, ' --> ',end='')
z = localsearch(tour, z, D) # local search starting from the random tour
# print(tour, z)
printTour(tour, cities)
print(" cost={0}".format(z))
print
# greedy construction
print("greedy construction with nearest neighbor + local search:")
for i in range(n):
tour = nearest_neighbor(n, i, D) # create a greedy tour, visiting city 'i' first
z = length(tour, D)
print("nneigh:", tour, z, ' --> ',end='')
z = localsearch(tour, z, D)
# print(tour, z)
printTour(tour, cities)
print(" cost={0}".format(z))
print
# multi-start local search
# print("random start local search:")
# niter = 10000
# tour,z = multistart_localsearch(niter, n, D, report_sol)
# assert z == length(tour, D)
# print("best found solution (%d iterations): z = %g" % (niter, z))
# printTour(tour, cities)
# print(" cost={0}".format(z))
# all the permutations
print("all the permutations!")
tour, z, worsttour, worstz = all_permutations(n, D, report_sol)
assert z == length(tour,D)
print("best found solution: z = %g" % z)
printTour(tour, cities)
print(" cost={0}".format(z))
print("worst found solution: z = %g" % worstz)
printTour(worsttour, cities)
print(" cost={0}".format(worstz))
|
[
"random.shuffle",
"time.clock"
] |
[((2889, 2908), 'random.shuffle', 'random.shuffle', (['sol'], {}), '(sol)\n', (2903, 2908), False, 'import random\n'), ((8581, 8588), 'time.clock', 'clock', ([], {}), '()\n', (8586, 8588), False, 'from time import clock\n'), ((8679, 8686), 'time.clock', 'clock', ([], {}), '()\n', (8684, 8686), False, 'from time import clock\n')]
|
import torch
def get_zero_count(matrix):
# A utility function to count the number of zeroes in a 2-D matrix
return torch.sum(matrix == 0).item()
def apply_mask_dict_to_weight_dict(mask_dict, weight_dict):
# mask_dict - a dictionary where keys are layer names (string) and values are masks (bytetensor) for that layer
# weight_dict - a dictionary where keys are layer names and values are weights (tensor) for that layer
# Applies the mask to the weight for each layer. This is done by simple multiplying the weight by the mask
# (Hadamard product)
# Since every value in the mask is either 0 or 1, this is equivalent to either letting the weight go unchanged or
# setting it as 0
weights_after_masking = dict()
for layer_name, mask in mask_dict.items():
weight = weight_dict[layer_name]
# The mask should be copied to the cpu since `weights_after_masking` dict is always stored in memory, and not the GPU
weights_after_masking[layer_name] = weight * mask.cpu().float()
return weights_after_masking
|
[
"torch.sum"
] |
[((125, 147), 'torch.sum', 'torch.sum', (['(matrix == 0)'], {}), '(matrix == 0)\n', (134, 147), False, 'import torch\n')]
|
from .tokens2ast.ast_builder import *
from .parse2tokens.parser import Parser, SyntaxException
from .ast2sql.ast2sqlconverter import Ast2SqlConverter
from .ast2sql.exceptions import *
from ..sql_engine.db_state_tracker import DBStateTracker
from colorama import *
__author__ = 'caioseguin', 'saltzm'
class Datalog2SqlConverter:
def __init__(self, db_state_tracker):
self.db_state_tracker = db_state_tracker
def convertDatalog2Sql(self, datalog_statement, is_assertion = False):
sql_query_list = []
try:
parsed_statement = Parser().parsesentence(datalog_statement).asList()
ast_query_list = ASTBuilder().buildAST(
parsed_statement,
is_assertion
)
for ast_query in ast_query_list:
sql_query = \
Ast2SqlConverter(self.db_state_tracker).convertAst2Sql(ast_query)
sql_query_list.append(sql_query)
except SyntaxException as e:
print (Fore.RED+'SyntaxException: ' + str(e)+Fore.RESET)
except SafetyException as e:
print (Fore.RED+'SafetyException: ' + str(e)+Fore.RESET)
except Exception as e:
import traceback
traceback.print_exc()
return sql_query_list
def trim_assert(self, statement):
return statement[len('/assert '):]
|
[
"traceback.print_exc"
] |
[((1294, 1315), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (1313, 1315), False, 'import traceback\n')]
|
# Generated by Django 2.2.3 on 2019-08-12 14:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('antiqueProjectApp', '0011_auto_20190812_1453'),
]
operations = [
migrations.AlterModelOptions(
name='antiquesale',
options={'permissions': (('can_buy', 'Set antique as purchased'),)},
),
migrations.AlterField(
model_name='antique',
name='AntiqueType',
field=models.ManyToManyField(help_text='Select a type for this antique', to='antiqueProjectApp.AntiqueType'),
),
]
|
[
"django.db.models.ManyToManyField",
"django.db.migrations.AlterModelOptions"
] |
[((245, 367), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""antiquesale"""', 'options': "{'permissions': (('can_buy', 'Set antique as purchased'),)}"}), "(name='antiquesale', options={'permissions': ((\n 'can_buy', 'Set antique as purchased'),)})\n", (273, 367), False, 'from django.db import migrations, models\n'), ((514, 621), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'help_text': '"""Select a type for this antique"""', 'to': '"""antiqueProjectApp.AntiqueType"""'}), "(help_text='Select a type for this antique', to=\n 'antiqueProjectApp.AntiqueType')\n", (536, 621), False, 'from django.db import migrations, models\n')]
|
# This file is a part of Arjuna
# Copyright 2015-2021 <NAME>
# Website: www.RahulVerma.net
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum, auto
class ArjunaOption(Enum):
'''
Represents all built-in configuration options for Arjuna.
Any option name which is does not correspond to ArjunaOption enum constant is treated as a user defined option.
'''
ARJUNA_ROOT_DIR = auto()
'''Root Directory of Arjuna Installed/Imported in a session'''
ARJUNA_EXTERNAL_IMPORTS_DIR = auto()
'''Directory of third party libs directly included in Arjuna.'''
LOG_NAME = auto()
'''Name of Arjuna's log file'''
RUN_ID = auto()
'''An alnum string representing current test run. Default is **mrun**'''
RUN_SESSION_NAME = auto()
'''Current session name.'''
RUN_HOST_OS = auto()
'''Host Operating System type: Windows/Mac/Linux.'''
LOG_FILE_LEVEL = auto()
'''Minimum level for a message to be logged to log file.'''
LOG_CONSOLE_LEVEL = auto()
'''Minimum level for a message to be displayed on console'''
LOG_ALLOWED_CONTEXTS = auto()
'''Allowed context strings for logging (file as well as display). Messages without contexts always get logged.'''
L10N_LOCALE = auto()
'''Default Locale type to be used for Localization call. Values as per arjuna.tpi.constant.Locale'''
L10N_STRICT = auto()
'''Sets Localization mode to strict. Default is False.'''
L10N_DIR = auto()
'''Directory containing Localization files.'''
PROJECT_NAME = auto()
'''Test Project Name'''
PROJECT_ROOT_DIR = auto()
'''Test Project Root Directory'''
CONF_PROJECT_FILE = auto()
'''Project conf file path.'''
CONF_PROJECT_LOCAL_FILE = auto()
'''Local Project conf file path.'''
TESTS_DIR = auto()
'''Directory containing test modules.'''
HOOKS_PACKAGE = auto()
'''Arjuna Hooks Package Import Path.'''
HOOKS_CONFIG_PACKAGE = auto()
'''Arjuna Config Hooks Package Import Path.'''
HOOKS_ENTITY_PACKAGE = auto()
'''Arjuna Data Entity Hooks Package Import Path.'''
HOOKS_RESOURCE_PACKAGE = auto()
'''Arjuna Resource Package Import Path.'''
REPORTS_DIR = auto()
'''Root directory for test reports.'''
REPORT_FORMATS = auto()
'''Formats for Report Generation. XML/HTML'''
REPORT_DIR = auto()
'''Reporting directory for current test run under REPORTS_DIR. Name is generated with RUN_ID and Current Timestamp. With --static-rid CLI switch, timestamp is not appended.'''
REPORT_XML_DIR = auto()
'''Directory containing report.xml for current test run.'''
REPORT_HTML_DIR = auto()
'''Directory containing report.html for current test run.'''
REPORT_GROUP_RENAME = auto()
'''If True, for run-group command, reports are created without session and stage prefixes.'''
REPORT_SCREENSHOTS_ALWAYS = auto()
'''If True, Screenshots are always show in Report, else they are not shown for passed tests. Default is False.'''
REPORT_NETWORK_ALWAYS = auto()
'''If True, Network packets are always show in Report, else they are not shown for passed tests. Default is False.'''
REPORT_NETWORK_FILTER = auto()
'''If True, in reporting, the request/response for static resources like image files, css etc will be excluded. Only HTML/JSON/XML content is included. Default is True'''
LOG_DIR = auto()
'''Directory containing arjuna.log for current test run.'''
SCREENSHOTS_DIR = auto()
'''Directory containing screenshots for current test run.'''
TOOLS_DIR = auto()
'''Directory containing external tool binaries in Arjuna test project.'''
TOOLS_BMPROXY_DIR = auto()
'''Directory containing BrowerMob Proxy binaries.'''
DEPS_DIR = auto()
'''Directory containing external tool binaries in Arjuna test project.'''
DBAUTO_DIR = auto()
'''Directory containing Database interaction automation related input files.'''
DBAUTO_SQL_DIR = auto()
'''Directory containing SQL files for Database interaction automation.'''
TEMP_DIR = auto()
'''Temporary directory for this session.'''
CONF_DIR = auto()
'''Test Project configuration directory'''
CONF_DATA_FILE = auto()
'''File that contains all data configurations.'''
CONF_DATA_LOCAL_FILE = auto()
'''Local File that contains all data configurations.'''
CONF_ENVS_FILE = auto()
'''File that contains all environment configurations.'''
CONF_ENVS_LOCAL_FILE = auto()
'''Local File that contains all environment configurations.'''
CONF_SESSIONS_FILE = auto()
'''File that contains all test session definitions.'''
CONF_SESSIONS_LOCAL_FILE = auto()
'''Local File that contains all test session definitions.'''
CONF_STAGES_FILE = auto()
'''File that contains all test stage definitions.'''
CONF_STAGES_LOCAL_FILE = auto()
'''Local File that contains all test stage definitions.'''
CONF_GROUPS_FILE = auto()
'''File that contains all test group definitions.'''
CONF_GROUPS_LOCAL_FILE = auto()
'''Local File that contains all test group definitions.'''
CONF_WITHX_FILE = auto()
'''withx.yaml file used for writing custom locators for Gui Automation.'''
CONF_WITHX_LOCAL_FILE = auto()
'''Local withx.yaml file used for writing custom locators for Gui Automation.'''
DATA_DIR = auto()
'''Directory containing data files in test project.'''
DATA_SRC_DIR = auto()
'''Directory containing data source files in test project.'''
DATA_REF_DIR = auto()
'''Directory containing contextual data reference files in test project.'''
DATA_REF_CONTEXTUAL_DIR = auto()
'''Directory containing contextual data reference files in test project.'''
DATA_REF_INDEXED_DIR = auto()
'''Directory containing indexed data reference files in test project.'''
DATA_FILE_DIR = auto()
'''Directory containing files used as file data.'''
APP_URL = auto()
'''Base URL for a Web App. Used by launch() method if url is not specified for GuiApp.'''
SOCKET_TIMEOUT = auto()
'''Timeout for socket connections. Default is 60 seconds.'''
HTTP_PROXY_ENABLED = auto()
'''Is a proxy enabled for HTTP requests (GUIAuto as well as HttpAuto)'''
HTTP_PROXY_HOST = auto()
'''IP address/Name of HTTP proxy host. Default is localhost.'''
HTTP_PROXY_PORT = auto()
'''Network Port of HTTP proxy. Default is 8080.'''
HTTPAUTO_DIR = auto()
'''Root directory of all HTTP automation relation directories and files'''
HTTPAUTO_MESSAGE_DIR = auto()
'''Root directory of all HTTP message YAML files.'''
EMAILAUTO_IMAP_HOST = auto()
'''IP address/Name of EmailBox for IMAP Protocol. Default is localhost.'''
EMAILAUTO_IMAP_PORT = auto()
'''Network Port of EmailBox for IMAP Protocol. Default is 993 in SSL Mode and 143 in non-SSL Mode.'''
EMAILAUTO_IMAP_USESSL = auto()
'''Enables/Disables usage of SSL for connecting to EmailBox via IMAP. Default is True.'''
EMAILAUTO_USER = auto()
'''Default Email Address to be used in Arjuna.'''
EMAILAUTO_PASSWORD = auto()
'''Default Email password to be used in Arjuna.'''
EMAILAUTO_MAX_WAIT = auto()
'''Maximum time for selecting a mailbox or reading/parsing emails. Uses Dynamic Wait. Expressed in seconds. Default is 120 seconds.'''
BROWSER_NAME = auto()
'''Browser Name for Gui Automation. Chrome/Firefox. Default is Chrome'''
BROWSER_HEADLESS = auto()
'''Sets headless mode for browser for GUI Automation. Default is False.'''
BROWSER_VERSION = auto()
'''Browser Version for GUI Automation.'''
BROWSER_MAXIMIZE = auto()
'''Browser is maximized in GUI Automation after launch. Default is False.'''
BROWSER_DIM_HEIGHT = auto()
'''Browser Height for GUI Automation. If not set, Arjuna does not change the height of browser.'''
BROWSER_DIM_WIDTH = auto()
'''Browser Width for GUI Automation. If not set, Arjuna does not change the width of browser.'''
BROWSER_BIN_PATH = auto()
'''Path of the Browser binary on test system.'''
BROWSER_NETWORK_RECORDER_ENABLED = auto()
'''If True, Arjuna uses BrowserMob proxy, if available in test project, to capture Network requests made by browser. Default is False.'''
BROWSER_NETWORK_RECORDER_AUTOMATIC = auto()
'''If True, when a browser is launched, Arjuna automatically starts capturing all traffic. Default is False'''
ALLOW_INSECURE_SSL_CERT = auto()
'''If True, insecure SSL certificates are allowd. Default is True'''
GUIAUTO_NAME = auto()
'''Engine name. Currently set to Selenium which is the only supported engine.'''
GUIAUTO_DIR = auto()
'''Root directory of all Gui automation relation directories and files'''
GUIAUTO_NAMESPACE_DIR = auto()
'''Root directory of all Gui Namespace (GNS) files.'''
GUIAUTO_DEF_MULTICONTEXT = auto()
'''Sets multi context mode for GNS files. Currently not processed.'''
GUIAUTO_CONTEXT = auto()
'''Gui Automation Context. Currently not processed.'''
SCROLL_PIXELS = auto()
'''Number of pixels for each scroll call in Gui Automation. Default is 100.'''
GUIAUTO_MAX_WAIT = auto()
'''Maximum time for a Gui element locating or waitable interaction to occur. Uses Dynamic Wait. Expressed in seconds. Default is 60.'''
GUIAUTO_SLOMO_ON = auto()
'''Sets slow motion mode for Gui Automation. Default is False.'''
GUIAUTO_SLOMO_INTERVAL = auto()
'''Time Interval between successive Gui Automation actions when Slow Motion mode is ON. Expressed in seconds. Default is 2'''
MOBILE_OS_NAME = auto()
'''Mobile OS Name. iOs/Android. Default is Android.'''
MOBILE_OS_VERSION = auto()
'''Mobile OS Version. No default set.'''
MOBILE_DEVICE_NAME = auto()
'''Mobile Device name. No default set.'''
MOBILE_DEVICE_UDID = auto()
'''Mobile Device UDID. No default set.'''
MOBILE_APP_FILE_PATH = auto()
'''Mobile App path on test system. No default set.'''
SELENIUM_DRIVER_PROP = auto()
'''Selenium Environment variable for browser driver as per chosen browser. Automatically set as per chosen browser. Default is webdriver.chrome.driver'''
SELENIUM_DRIVERS_DIR = auto()
'''Root Directory containing OS specific browser drivers for Selenium. Has an impact only if SELENIUM_DRIVER_DOWNLOAD is set to False.'''
SELENIUM_DRIVER_PATH = auto()
'''Absolute path of Selenium browser driver. Automatically set to WebDriverManager's downloaded driver if SELENIUM_DRIVER_DOWNLOAD is True, else automatically set as per the test project structure, OS and browser.'''
SELENIUM_DRIVER_DOWNLOAD = auto()
'''Instructs Arjuna to automatically download Selenium browser driver for chosen browser. Default is True.'''
SELENIUM_SERVICE_URL = auto()
'''Selenium's Service URL. If set, Arjuna does not launch the browser service and uses this URL as the service URL.'''
APPIUM_SERVICE_URL = auto()
'''Appium Service URL. Currently not processed.'''
APPIUM_AUTO_LAUNCH = auto()
'''Instructs Arjuna to launch Appium programmatically. Default is True. Currently not processed.'''
IMG_COMP_MIN_SCORE = auto()
'''A fraction that represents minimum image comparison score to decide on an image match. Default is 0.7. Currently not processed.'''
class TimeUnit(Enum):
'''
Allowed time unit types.
'''
MILLI_SECONDS = auto()
SECONDS = auto()
MINUTES = auto()
class BrowserName(Enum):
'''
Allowed browser names for Gui Automation.
'''
CHROME = auto()
FIREFOX = auto()
class DomDirection(Enum):
'''
Directions in DOM movement.
'''
UP = auto()
DOWN = auto()
LEFT = auto()
RIGHT = auto()
class DomNodeType(Enum):
'''
Directions in DOM movement.
'''
NODE = auto()
BNODE = auto()
FNODE = auto()
import locale
import re
__locales = [i.upper() for i in locale.locale_alias.keys() if re.match('^[\w_]+$', i)]
Locale = Enum('Locale', dict(zip(__locales, range(len(__locales)))))
Locale.__doc__ = '''Allowed locale names in Arjuna.'''
|
[
"enum.auto",
"locale.locale_alias.keys",
"re.match"
] |
[((919, 925), 'enum.auto', 'auto', ([], {}), '()\n', (923, 925), False, 'from enum import Enum, auto\n'), ((1028, 1034), 'enum.auto', 'auto', ([], {}), '()\n', (1032, 1034), False, 'from enum import Enum, auto\n'), ((1120, 1126), 'enum.auto', 'auto', ([], {}), '()\n', (1124, 1126), False, 'from enum import Enum, auto\n'), ((1177, 1183), 'enum.auto', 'auto', ([], {}), '()\n', (1181, 1183), False, 'from enum import Enum, auto\n'), ((1285, 1291), 'enum.auto', 'auto', ([], {}), '()\n', (1289, 1291), False, 'from enum import Enum, auto\n'), ((1343, 1349), 'enum.auto', 'auto', ([], {}), '()\n', (1347, 1349), False, 'from enum import Enum, auto\n'), ((1429, 1435), 'enum.auto', 'auto', ([], {}), '()\n', (1433, 1435), False, 'from enum import Enum, auto\n'), ((1525, 1531), 'enum.auto', 'auto', ([], {}), '()\n', (1529, 1531), False, 'from enum import Enum, auto\n'), ((1625, 1631), 'enum.auto', 'auto', ([], {}), '()\n', (1629, 1631), False, 'from enum import Enum, auto\n'), ((1769, 1775), 'enum.auto', 'auto', ([], {}), '()\n', (1773, 1775), False, 'from enum import Enum, auto\n'), ((1900, 1906), 'enum.auto', 'auto', ([], {}), '()\n', (1904, 1906), False, 'from enum import Enum, auto\n'), ((1985, 1991), 'enum.auto', 'auto', ([], {}), '()\n', (1989, 1991), False, 'from enum import Enum, auto\n'), ((2063, 2069), 'enum.auto', 'auto', ([], {}), '()\n', (2067, 2069), False, 'from enum import Enum, auto\n'), ((2122, 2128), 'enum.auto', 'auto', ([], {}), '()\n', (2126, 2128), False, 'from enum import Enum, auto\n'), ((2192, 2198), 'enum.auto', 'auto', ([], {}), '()\n', (2196, 2198), False, 'from enum import Enum, auto\n'), ((2264, 2270), 'enum.auto', 'auto', ([], {}), '()\n', (2268, 2270), False, 'from enum import Enum, auto\n'), ((2328, 2334), 'enum.auto', 'auto', ([], {}), '()\n', (2332, 2334), False, 'from enum import Enum, auto\n'), ((2401, 2407), 'enum.auto', 'auto', ([], {}), '()\n', (2405, 2407), False, 'from enum import Enum, auto\n'), ((2480, 2486), 'enum.auto', 'auto', ([], {}), '()\n', (2484, 2486), False, 'from enum import Enum, auto\n'), ((2566, 2572), 'enum.auto', 'auto', ([], {}), '()\n', (2570, 2572), False, 'from enum import Enum, auto\n'), ((2659, 2665), 'enum.auto', 'auto', ([], {}), '()\n', (2663, 2665), False, 'from enum import Enum, auto\n'), ((2732, 2738), 'enum.auto', 'auto', ([], {}), '()\n', (2736, 2738), False, 'from enum import Enum, auto\n'), ((2804, 2810), 'enum.auto', 'auto', ([], {}), '()\n', (2808, 2810), False, 'from enum import Enum, auto\n'), ((2879, 2885), 'enum.auto', 'auto', ([], {}), '()\n', (2883, 2885), False, 'from enum import Enum, auto\n'), ((3088, 3094), 'enum.auto', 'auto', ([], {}), '()\n', (3092, 3094), False, 'from enum import Enum, auto\n'), ((3182, 3188), 'enum.auto', 'auto', ([], {}), '()\n', (3186, 3188), False, 'from enum import Enum, auto\n'), ((3281, 3287), 'enum.auto', 'auto', ([], {}), '()\n', (3285, 3287), False, 'from enum import Enum, auto\n'), ((3419, 3425), 'enum.auto', 'auto', ([], {}), '()\n', (3423, 3425), False, 'from enum import Enum, auto\n'), ((3573, 3579), 'enum.auto', 'auto', ([], {}), '()\n', (3577, 3579), False, 'from enum import Enum, auto\n'), ((3731, 3737), 'enum.auto', 'auto', ([], {}), '()\n', (3735, 3737), False, 'from enum import Enum, auto\n'), ((3928, 3934), 'enum.auto', 'auto', ([], {}), '()\n', (3932, 3934), False, 'from enum import Enum, auto\n'), ((4022, 4028), 'enum.auto', 'auto', ([], {}), '()\n', (4026, 4028), False, 'from enum import Enum, auto\n'), ((4111, 4117), 'enum.auto', 'auto', ([], {}), '()\n', (4115, 4117), False, 'from enum import Enum, auto\n'), ((4221, 4227), 'enum.auto', 'auto', ([], {}), '()\n', (4225, 4227), False, 'from enum import Enum, auto\n'), ((4301, 4307), 'enum.auto', 'auto', ([], {}), '()\n', (4305, 4307), False, 'from enum import Enum, auto\n'), ((4404, 4410), 'enum.auto', 'auto', ([], {}), '()\n', (4408, 4410), False, 'from enum import Enum, auto\n'), ((4517, 4523), 'enum.auto', 'auto', ([], {}), '()\n', (4521, 4523), False, 'from enum import Enum, auto\n'), ((4618, 4624), 'enum.auto', 'auto', ([], {}), '()\n', (4622, 4624), False, 'from enum import Enum, auto\n'), ((4689, 4695), 'enum.auto', 'auto', ([], {}), '()\n', (4693, 4695), False, 'from enum import Enum, auto\n'), ((4765, 4771), 'enum.auto', 'auto', ([], {}), '()\n', (4769, 4771), False, 'from enum import Enum, auto\n'), ((4854, 4860), 'enum.auto', 'auto', ([], {}), '()\n', (4858, 4860), False, 'from enum import Enum, auto\n'), ((4943, 4949), 'enum.auto', 'auto', ([], {}), '()\n', (4947, 4949), False, 'from enum import Enum, auto\n'), ((5039, 5045), 'enum.auto', 'auto', ([], {}), '()\n', (5043, 5045), False, 'from enum import Enum, auto\n'), ((5139, 5145), 'enum.auto', 'auto', ([], {}), '()\n', (5143, 5145), False, 'from enum import Enum, auto\n'), ((5237, 5243), 'enum.auto', 'auto', ([], {}), '()\n', (5241, 5243), False, 'from enum import Enum, auto\n'), ((5333, 5339), 'enum.auto', 'auto', ([], {}), '()\n', (5337, 5339), False, 'from enum import Enum, auto\n'), ((5427, 5433), 'enum.auto', 'auto', ([], {}), '()\n', (5431, 5433), False, 'from enum import Enum, auto\n'), ((5521, 5527), 'enum.auto', 'auto', ([], {}), '()\n', (5525, 5527), False, 'from enum import Enum, auto\n'), ((5615, 5621), 'enum.auto', 'auto', ([], {}), '()\n', (5619, 5621), False, 'from enum import Enum, auto\n'), ((5708, 5714), 'enum.auto', 'auto', ([], {}), '()\n', (5712, 5714), False, 'from enum import Enum, auto\n'), ((5823, 5829), 'enum.auto', 'auto', ([], {}), '()\n', (5827, 5829), False, 'from enum import Enum, auto\n'), ((5931, 5937), 'enum.auto', 'auto', ([], {}), '()\n', (5935, 5937), False, 'from enum import Enum, auto\n'), ((6017, 6023), 'enum.auto', 'auto', ([], {}), '()\n', (6021, 6023), False, 'from enum import Enum, auto\n'), ((6110, 6116), 'enum.auto', 'auto', ([], {}), '()\n', (6114, 6116), False, 'from enum import Enum, auto\n'), ((6228, 6234), 'enum.auto', 'auto', ([], {}), '()\n', (6232, 6234), False, 'from enum import Enum, auto\n'), ((6343, 6349), 'enum.auto', 'auto', ([], {}), '()\n', (6347, 6349), False, 'from enum import Enum, auto\n'), ((6448, 6454), 'enum.auto', 'auto', ([], {}), '()\n', (6452, 6454), False, 'from enum import Enum, auto\n'), ((6526, 6532), 'enum.auto', 'auto', ([], {}), '()\n', (6530, 6532), False, 'from enum import Enum, auto\n'), ((6649, 6655), 'enum.auto', 'auto', ([], {}), '()\n', (6653, 6655), False, 'from enum import Enum, auto\n'), ((6747, 6753), 'enum.auto', 'auto', ([], {}), '()\n', (6751, 6753), False, 'from enum import Enum, auto\n'), ((6854, 6860), 'enum.auto', 'auto', ([], {}), '()\n', (6858, 6860), False, 'from enum import Enum, auto\n'), ((6952, 6958), 'enum.auto', 'auto', ([], {}), '()\n', (6956, 6958), False, 'from enum import Enum, auto\n'), ((7034, 7040), 'enum.auto', 'auto', ([], {}), '()\n', (7038, 7040), False, 'from enum import Enum, auto\n'), ((7148, 7154), 'enum.auto', 'auto', ([], {}), '()\n', (7152, 7154), False, 'from enum import Enum, auto\n'), ((7239, 7245), 'enum.auto', 'auto', ([], {}), '()\n', (7243, 7245), False, 'from enum import Enum, auto\n'), ((7352, 7358), 'enum.auto', 'auto', ([], {}), '()\n', (7356, 7358), False, 'from enum import Enum, auto\n'), ((7494, 7500), 'enum.auto', 'auto', ([], {}), '()\n', (7498, 7500), False, 'from enum import Enum, auto\n'), ((7617, 7623), 'enum.auto', 'auto', ([], {}), '()\n', (7621, 7623), False, 'from enum import Enum, auto\n'), ((7704, 7710), 'enum.auto', 'auto', ([], {}), '()\n', (7708, 7710), False, 'from enum import Enum, auto\n'), ((7792, 7798), 'enum.auto', 'auto', ([], {}), '()\n', (7796, 7798), False, 'from enum import Enum, auto\n'), ((7958, 7964), 'enum.auto', 'auto', ([], {}), '()\n', (7962, 7964), False, 'from enum import Enum, auto\n'), ((8066, 8072), 'enum.auto', 'auto', ([], {}), '()\n', (8070, 8072), False, 'from enum import Enum, auto\n'), ((8175, 8181), 'enum.auto', 'auto', ([], {}), '()\n', (8179, 8181), False, 'from enum import Enum, auto\n'), ((8252, 8258), 'enum.auto', 'auto', ([], {}), '()\n', (8256, 8258), False, 'from enum import Enum, auto\n'), ((8366, 8372), 'enum.auto', 'auto', ([], {}), '()\n', (8370, 8372), False, 'from enum import Enum, auto\n'), ((8501, 8507), 'enum.auto', 'auto', ([], {}), '()\n', (8505, 8507), False, 'from enum import Enum, auto\n'), ((8633, 8639), 'enum.auto', 'auto', ([], {}), '()\n', (8637, 8639), False, 'from enum import Enum, auto\n'), ((8733, 8739), 'enum.auto', 'auto', ([], {}), '()\n', (8737, 8739), False, 'from enum import Enum, auto\n'), ((8924, 8930), 'enum.auto', 'auto', ([], {}), '()\n', (8928, 8930), False, 'from enum import Enum, auto\n'), ((9077, 9083), 'enum.auto', 'auto', ([], {}), '()\n', (9081, 9083), False, 'from enum import Enum, auto\n'), ((9177, 9183), 'enum.auto', 'auto', ([], {}), '()\n', (9181, 9183), False, 'from enum import Enum, auto\n'), ((9288, 9294), 'enum.auto', 'auto', ([], {}), '()\n', (9292, 9294), False, 'from enum import Enum, auto\n'), ((9402, 9408), 'enum.auto', 'auto', ([], {}), '()\n', (9406, 9408), False, 'from enum import Enum, auto\n'), ((9500, 9506), 'enum.auto', 'auto', ([], {}), '()\n', (9504, 9506), False, 'from enum import Enum, auto\n'), ((9604, 9610), 'enum.auto', 'auto', ([], {}), '()\n', (9608, 9610), False, 'from enum import Enum, auto\n'), ((9691, 9697), 'enum.auto', 'auto', ([], {}), '()\n', (9695, 9697), False, 'from enum import Enum, auto\n'), ((9805, 9811), 'enum.auto', 'auto', ([], {}), '()\n', (9809, 9811), False, 'from enum import Enum, auto\n'), ((9976, 9982), 'enum.auto', 'auto', ([], {}), '()\n', (9980, 9982), False, 'from enum import Enum, auto\n'), ((10083, 10089), 'enum.auto', 'auto', ([], {}), '()\n', (10087, 10089), False, 'from enum import Enum, auto\n'), ((10242, 10248), 'enum.auto', 'auto', ([], {}), '()\n', (10246, 10248), False, 'from enum import Enum, auto\n'), ((10333, 10339), 'enum.auto', 'auto', ([], {}), '()\n', (10337, 10339), False, 'from enum import Enum, auto\n'), ((10415, 10421), 'enum.auto', 'auto', ([], {}), '()\n', (10419, 10421), False, 'from enum import Enum, auto\n'), ((10494, 10500), 'enum.auto', 'auto', ([], {}), '()\n', (10498, 10500), False, 'from enum import Enum, auto\n'), ((10575, 10581), 'enum.auto', 'auto', ([], {}), '()\n', (10579, 10581), False, 'from enum import Enum, auto\n'), ((10668, 10674), 'enum.auto', 'auto', ([], {}), '()\n', (10672, 10674), False, 'from enum import Enum, auto\n'), ((10861, 10867), 'enum.auto', 'auto', ([], {}), '()\n', (10865, 10867), False, 'from enum import Enum, auto\n'), ((11038, 11044), 'enum.auto', 'auto', ([], {}), '()\n', (11042, 11044), False, 'from enum import Enum, auto\n'), ((11298, 11304), 'enum.auto', 'auto', ([], {}), '()\n', (11302, 11304), False, 'from enum import Enum, auto\n'), ((11447, 11453), 'enum.auto', 'auto', ([], {}), '()\n', (11451, 11453), False, 'from enum import Enum, auto\n'), ((11603, 11609), 'enum.auto', 'auto', ([], {}), '()\n', (11607, 11609), False, 'from enum import Enum, auto\n'), ((11691, 11697), 'enum.auto', 'auto', ([], {}), '()\n', (11695, 11697), False, 'from enum import Enum, auto\n'), ((11828, 11834), 'enum.auto', 'auto', ([], {}), '()\n', (11832, 11834), False, 'from enum import Enum, auto\n'), ((12066, 12072), 'enum.auto', 'auto', ([], {}), '()\n', (12070, 12072), False, 'from enum import Enum, auto\n'), ((12087, 12093), 'enum.auto', 'auto', ([], {}), '()\n', (12091, 12093), False, 'from enum import Enum, auto\n'), ((12108, 12114), 'enum.auto', 'auto', ([], {}), '()\n', (12112, 12114), False, 'from enum import Enum, auto\n'), ((12221, 12227), 'enum.auto', 'auto', ([], {}), '()\n', (12225, 12227), False, 'from enum import Enum, auto\n'), ((12242, 12248), 'enum.auto', 'auto', ([], {}), '()\n', (12246, 12248), False, 'from enum import Enum, auto\n'), ((12333, 12339), 'enum.auto', 'auto', ([], {}), '()\n', (12337, 12339), False, 'from enum import Enum, auto\n'), ((12351, 12357), 'enum.auto', 'auto', ([], {}), '()\n', (12355, 12357), False, 'from enum import Enum, auto\n'), ((12369, 12375), 'enum.auto', 'auto', ([], {}), '()\n', (12373, 12375), False, 'from enum import Enum, auto\n'), ((12388, 12394), 'enum.auto', 'auto', ([], {}), '()\n', (12392, 12394), False, 'from enum import Enum, auto\n'), ((12480, 12486), 'enum.auto', 'auto', ([], {}), '()\n', (12484, 12486), False, 'from enum import Enum, auto\n'), ((12499, 12505), 'enum.auto', 'auto', ([], {}), '()\n', (12503, 12505), False, 'from enum import Enum, auto\n'), ((12518, 12524), 'enum.auto', 'auto', ([], {}), '()\n', (12522, 12524), False, 'from enum import Enum, auto\n'), ((12582, 12608), 'locale.locale_alias.keys', 'locale.locale_alias.keys', ([], {}), '()\n', (12606, 12608), False, 'import locale\n'), ((12612, 12636), 're.match', 're.match', (['"""^[\\\\w_]+$"""', 'i'], {}), "('^[\\\\w_]+$', i)\n", (12620, 12636), False, 'import re\n')]
|
#%%
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import matplotlib.cm as cm
from tqdm import trange, tqdm
from sklearn.metrics import adjusted_rand_score
from argparse import ArgumentParser
from util.config_parser import ConfigParser_with_eval
#%% parse arguments
def arg_check(value, default):
return value if value else default
default_hypparams_model = "hypparams/model.config"
parser = ArgumentParser()
parser.add_argument("--model", help=f"hyper parameters of model, default is [{default_hypparams_model}]")
args = parser.parse_args()
hypparams_model = arg_check(args.model, default_hypparams_model)
#%%
def load_config(filename):
cp = ConfigParser_with_eval()
cp.read(filename)
return cp
#%%
def get_names():
return np.loadtxt("files.txt", dtype=str)
def get_datas_and_length(names):
datas = [np.loadtxt("DATA/" + name + ".txt") for name in names]
length = [len(d) for d in datas]
return datas, length
def get_results_of_word(names, length):
return _joblib_get_results(names, length, "s")
def get_results_of_letter(names, length):
return _joblib_get_results(names, length, "l")
def get_results_of_duration(names, length):
return _joblib_get_results(names, length, "d")
def _get_results(names, lengths, c):
return [np.loadtxt("results/" + name + "_" + c + ".txt").reshape((-1, l)) for name, l in zip(names, lengths)]
def _joblib_get_results(names, lengths, c):
from joblib import Parallel, delayed
def _component(name, length, c):
return np.loadtxt("results/" + name + "_" + c + ".txt").reshape((-1, length))
return Parallel(n_jobs=-1)([delayed(_component)(n, l, c) for n, l in zip(names, lengths)])
def _plot_discreate_sequence(feature, title, sample_data, cmap=None):
ax = plt.subplot2grid((2, 1), (0, 0))
plt.sca(ax)
ax.plot(feature)
ax.set_xlim((0, feature.shape[0]-1))
plt.ylabel('Feature')
#label matrix
ax = plt.subplot2grid((2, 1), (1, 0))
plt.suptitle(title)
plt.sca(ax)
ax.matshow(sample_data, aspect = 'auto', cmap=cmap)
#write x&y label
plt.xlabel('Frame')
plt.ylabel('Iteration')
plt.xticks(())
Path("figures").mkdir(exist_ok=True)
Path("summary_files").mkdir(exist_ok=True)
#%% config parse
print("Loading model config...")
config_parser = load_config(hypparams_model)
section = config_parser["model"]
word_num = section["word_num"]
letter_num = section["letter_num"]
print("Done!")
#%%
print("Loading results....")
names = get_names()
datas, length = get_datas_and_length(names)
l_results = get_results_of_letter(names, length)
w_results = get_results_of_word(names, length)
d_results = get_results_of_duration(names, length)
log_likelihood = np.loadtxt("summary_files/log_likelihood.txt")
resample_times = np.loadtxt("summary_files/resample_times.txt")
print("Done!")
train_iter = l_results[0].shape[0]
#%%
lcolors = ListedColormap([cm.tab20(float(i)/letter_num) for i in range(letter_num)])
wcolors = ListedColormap([cm.tab20(float(i)/word_num) for i in range(word_num)])
#%%
print("Plot results...")
for i, name in enumerate(tqdm(names)):
plt.clf()
_plot_discreate_sequence(datas[i], name + "_l", l_results[i], cmap=lcolors)
plt.savefig("figures/" + name + "_l.png")
plt.clf()
_plot_discreate_sequence(datas[i], name + "_s", w_results[i], cmap=wcolors)
plt.savefig("figures/" + name + "_s.png")
plt.clf()
_plot_discreate_sequence(datas[i], name + "_d", d_results[i], cmap=cm.binary)
plt.savefig("figures/" + name + "_d.png")
print("Done!")
#%%
plt.clf()
plt.title("Log likelihood")
plt.plot(range(train_iter+1), log_likelihood, ".-")
plt.savefig("figures/Log_likelihood.png")
#%%
plt.clf()
plt.title("Resample times")
plt.plot(range(train_iter), resample_times, ".-")
plt.savefig("figures/Resample_times.png")
#%%
with open("summary_files/Sum_of_resample_times.txt", "w") as f:
f.write(str(np.sum(resample_times)))
|
[
"matplotlib.pyplot.title",
"tqdm.tqdm",
"util.config_parser.ConfigParser_with_eval",
"numpy.sum",
"argparse.ArgumentParser",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.suptitle",
"matplotlib.pyplot.subplot2grid",
"joblib.Parallel",
"numpy.loadtxt",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.sca",
"matplotlib.pyplot.ylabel",
"joblib.delayed",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((449, 465), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (463, 465), False, 'from argparse import ArgumentParser\n'), ((2757, 2803), 'numpy.loadtxt', 'np.loadtxt', (['"""summary_files/log_likelihood.txt"""'], {}), "('summary_files/log_likelihood.txt')\n", (2767, 2803), True, 'import numpy as np\n'), ((2821, 2867), 'numpy.loadtxt', 'np.loadtxt', (['"""summary_files/resample_times.txt"""'], {}), "('summary_files/resample_times.txt')\n", (2831, 2867), True, 'import numpy as np\n'), ((3601, 3610), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3608, 3610), True, 'import matplotlib.pyplot as plt\n'), ((3611, 3638), 'matplotlib.pyplot.title', 'plt.title', (['"""Log likelihood"""'], {}), "('Log likelihood')\n", (3620, 3638), True, 'import matplotlib.pyplot as plt\n'), ((3691, 3732), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""figures/Log_likelihood.png"""'], {}), "('figures/Log_likelihood.png')\n", (3702, 3732), True, 'import matplotlib.pyplot as plt\n'), ((3738, 3747), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3745, 3747), True, 'import matplotlib.pyplot as plt\n'), ((3748, 3775), 'matplotlib.pyplot.title', 'plt.title', (['"""Resample times"""'], {}), "('Resample times')\n", (3757, 3775), True, 'import matplotlib.pyplot as plt\n'), ((3826, 3867), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""figures/Resample_times.png"""'], {}), "('figures/Resample_times.png')\n", (3837, 3867), True, 'import matplotlib.pyplot as plt\n'), ((706, 730), 'util.config_parser.ConfigParser_with_eval', 'ConfigParser_with_eval', ([], {}), '()\n', (728, 730), False, 'from util.config_parser import ConfigParser_with_eval\n'), ((800, 834), 'numpy.loadtxt', 'np.loadtxt', (['"""files.txt"""'], {'dtype': 'str'}), "('files.txt', dtype=str)\n", (810, 834), True, 'import numpy as np\n'), ((1817, 1849), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(2, 1)', '(0, 0)'], {}), '((2, 1), (0, 0))\n', (1833, 1849), True, 'import matplotlib.pyplot as plt\n'), ((1854, 1865), 'matplotlib.pyplot.sca', 'plt.sca', (['ax'], {}), '(ax)\n', (1861, 1865), True, 'import matplotlib.pyplot as plt\n'), ((1932, 1953), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Feature"""'], {}), "('Feature')\n", (1942, 1953), True, 'import matplotlib.pyplot as plt\n'), ((1981, 2013), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(2, 1)', '(1, 0)'], {}), '((2, 1), (1, 0))\n', (1997, 2013), True, 'import matplotlib.pyplot as plt\n'), ((2018, 2037), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['title'], {}), '(title)\n', (2030, 2037), True, 'import matplotlib.pyplot as plt\n'), ((2042, 2053), 'matplotlib.pyplot.sca', 'plt.sca', (['ax'], {}), '(ax)\n', (2049, 2053), True, 'import matplotlib.pyplot as plt\n'), ((2135, 2154), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Frame"""'], {}), "('Frame')\n", (2145, 2154), True, 'import matplotlib.pyplot as plt\n'), ((2159, 2182), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Iteration"""'], {}), "('Iteration')\n", (2169, 2182), True, 'import matplotlib.pyplot as plt\n'), ((2187, 2201), 'matplotlib.pyplot.xticks', 'plt.xticks', (['()'], {}), '(())\n', (2197, 2201), True, 'import matplotlib.pyplot as plt\n'), ((3145, 3156), 'tqdm.tqdm', 'tqdm', (['names'], {}), '(names)\n', (3149, 3156), False, 'from tqdm import trange, tqdm\n'), ((3163, 3172), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3170, 3172), True, 'import matplotlib.pyplot as plt\n'), ((3257, 3298), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('figures/' + name + '_l.png')"], {}), "('figures/' + name + '_l.png')\n", (3268, 3298), True, 'import matplotlib.pyplot as plt\n'), ((3303, 3312), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3310, 3312), True, 'import matplotlib.pyplot as plt\n'), ((3397, 3438), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('figures/' + name + '_s.png')"], {}), "('figures/' + name + '_s.png')\n", (3408, 3438), True, 'import matplotlib.pyplot as plt\n'), ((3443, 3452), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3450, 3452), True, 'import matplotlib.pyplot as plt\n'), ((3539, 3580), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('figures/' + name + '_d.png')"], {}), "('figures/' + name + '_d.png')\n", (3550, 3580), True, 'import matplotlib.pyplot as plt\n'), ((882, 917), 'numpy.loadtxt', 'np.loadtxt', (["('DATA/' + name + '.txt')"], {}), "('DATA/' + name + '.txt')\n", (892, 917), True, 'import numpy as np\n'), ((1653, 1672), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': '(-1)'}), '(n_jobs=-1)\n', (1661, 1672), False, 'from joblib import Parallel, delayed\n'), ((3953, 3975), 'numpy.sum', 'np.sum', (['resample_times'], {}), '(resample_times)\n', (3959, 3975), True, 'import numpy as np\n'), ((1331, 1379), 'numpy.loadtxt', 'np.loadtxt', (["('results/' + name + '_' + c + '.txt')"], {}), "('results/' + name + '_' + c + '.txt')\n", (1341, 1379), True, 'import numpy as np\n'), ((1571, 1619), 'numpy.loadtxt', 'np.loadtxt', (["('results/' + name + '_' + c + '.txt')"], {}), "('results/' + name + '_' + c + '.txt')\n", (1581, 1619), True, 'import numpy as np\n'), ((1674, 1693), 'joblib.delayed', 'delayed', (['_component'], {}), '(_component)\n', (1681, 1693), False, 'from joblib import Parallel, delayed\n')]
|
from __future__ import annotations
import logging
from pymodbus.client.sync import ModbusTcpClient
from pymodbus.exceptions import ModbusIOException
from givenergy_modbus.decoder import GivEnergyResponseDecoder
from givenergy_modbus.framer import GivEnergyModbusFramer
from givenergy_modbus.model.register import HoldingRegister, InputRegister # type: ignore
from givenergy_modbus.pdu import (
ModbusPDU,
ReadHoldingRegistersRequest,
ReadHoldingRegistersResponse,
ReadInputRegistersRequest,
ReadInputRegistersResponse,
WriteHoldingRegisterRequest,
WriteHoldingRegisterResponse,
)
from givenergy_modbus.transaction import GivEnergyTransactionManager
_logger = logging.getLogger(__package__)
class GivEnergyModbusTcpClient(ModbusTcpClient):
"""GivEnergy Modbus Client implementation.
This class ties together all the pieces to create a functional client that can converse with a
GivEnergy Modbus implementation over TCP. It exists as a thin wrapper around the ModbusTcpClient
to hot patch in our own Framer and TransactionManager since they are hardcoded classes for Decoder
and TransactionManager throughout constructors up the call chain.
We also provide a few convenience methods to read and write registers.
"""
def __init__(self, **kwargs):
kwargs.setdefault("port", 8899) # GivEnergy default instead of the standard 502
super().__init__(**kwargs)
self.framer = GivEnergyModbusFramer(GivEnergyResponseDecoder(), client=self)
self.transaction = GivEnergyTransactionManager(client=self, **kwargs)
self.timeout = 2
def __repr__(self):
return f"GivEnergyModbusTcpClient({self.host}:{self.port}): timeout={self.timeout})"
def execute(self, request: ModbusPDU = None) -> ModbusPDU | None:
"""Send the given PDU to the remote device and return any PDU returned in response."""
_logger.debug(f'Sending request {request}')
try:
response = super().execute(request)
if isinstance(response, ModbusIOException):
_logger.exception(response)
return response
except ModbusIOException as e:
_logger.exception(e)
self.close()
return None
except Exception as e:
# This seems to help with inverters becoming unresponsive from the portal."""
_logger.exception(e)
self.close()
return None
def read_registers(
self, kind: type[HoldingRegister | InputRegister], base_address: int, register_count: int, **kwargs
) -> dict[int, int]:
"""Read out registers from the correct location depending on type specified."""
# match types of register to their request/response types
t_req, t_res = {
HoldingRegister: (ReadHoldingRegistersRequest, ReadHoldingRegistersResponse),
InputRegister: (ReadInputRegistersRequest, ReadInputRegistersResponse),
}[kind]
request = t_req(base_register=base_address, register_count=register_count, **kwargs)
_logger.debug(
f'Attempting to read {t_req}s #{request.base_register}-'
f'{request.base_register + request.register_count} from device {hex(request.slave_address)}...'
)
response = self.execute(request)
if response and isinstance(response, t_res):
if response.base_register != base_address:
_logger.error(
f'Returned base register ({response.base_register}) '
f'does not match that from request ({base_address}).'
)
return {}
if response.register_count != register_count:
_logger.error(
f'Returned register count ({response.register_count}) '
f'does not match that from request ({register_count}).'
)
return {}
return response.to_dict()
_logger.error(f'Did not receive expected response type: {t_res.__name__} != {response.__class__.__name__}')
# FIXME this contract needs improving
return {}
def read_holding_registers(self, address, count=1, **kwargs) -> dict[int, int]:
"""Convenience method to help read out holding registers."""
return self.read_registers(HoldingRegister, address, count, **kwargs)
def read_input_registers(self, address, count=1, **kwargs) -> dict[int, int]:
"""Convenience method to help read out input registers."""
return self.read_registers(InputRegister, address, count, **kwargs)
def write_holding_register(self, register: HoldingRegister, value: int) -> None:
"""Write a value to a single holding register."""
if not register.write_safe: # type: ignore # shut up mypy
raise ValueError(f'Register {register.name} is not safe to write to')
if value != value & 0xFFFF:
raise ValueError(f'Value {value} must fit in 2 bytes')
_logger.info(f'Attempting to write {value}/{hex(value)} to Holding Register {register.value}/{register.name}')
request = WriteHoldingRegisterRequest(register=register.value, value=value)
result = self.execute(request)
if isinstance(result, WriteHoldingRegisterResponse):
if result.value != value:
raise AssertionError(f'Register read-back value 0x{result.value:04x} != written value 0x{value:04x}')
else:
raise AssertionError(f'Unexpected response from remote end: {result}')
|
[
"givenergy_modbus.pdu.WriteHoldingRegisterRequest",
"givenergy_modbus.transaction.GivEnergyTransactionManager",
"givenergy_modbus.decoder.GivEnergyResponseDecoder",
"logging.getLogger"
] |
[((692, 722), 'logging.getLogger', 'logging.getLogger', (['__package__'], {}), '(__package__)\n', (709, 722), False, 'import logging\n'), ((1550, 1600), 'givenergy_modbus.transaction.GivEnergyTransactionManager', 'GivEnergyTransactionManager', ([], {'client': 'self'}), '(client=self, **kwargs)\n', (1577, 1600), False, 'from givenergy_modbus.transaction import GivEnergyTransactionManager\n'), ((5173, 5238), 'givenergy_modbus.pdu.WriteHoldingRegisterRequest', 'WriteHoldingRegisterRequest', ([], {'register': 'register.value', 'value': 'value'}), '(register=register.value, value=value)\n', (5200, 5238), False, 'from givenergy_modbus.pdu import ModbusPDU, ReadHoldingRegistersRequest, ReadHoldingRegistersResponse, ReadInputRegistersRequest, ReadInputRegistersResponse, WriteHoldingRegisterRequest, WriteHoldingRegisterResponse\n'), ((1482, 1508), 'givenergy_modbus.decoder.GivEnergyResponseDecoder', 'GivEnergyResponseDecoder', ([], {}), '()\n', (1506, 1508), False, 'from givenergy_modbus.decoder import GivEnergyResponseDecoder\n')]
|
import setuptools
requirements = []
with open('requirements.txt', 'r') as fh:
for line in fh:
requirements.append(line.strip())
with open("README.md", "r") as fh:
long_description = fh.read()
print(setuptools.find_packages(),)
setuptools.setup(
name="phlab",
version="0.0.0.dev6",
authors="<NAME>, <NAME>",
# author_email="<EMAIL>",
description="Phonon contirbution in RIXS",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/geonda/RIXS.phonons/",
packages=setuptools.find_packages(),
install_requires = requirements,
python_requires='>=3.6',
)
|
[
"setuptools.find_packages"
] |
[((217, 243), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (241, 243), False, 'import setuptools\n'), ((569, 595), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (593, 595), False, 'import setuptools\n')]
|
# Generated by Django 3.1.3 on 2020-11-25 06:01
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('trains', '0009_auto_20201125_0840'),
]
operations = [
migrations.DeleteModel(
name='Station',
),
migrations.DeleteModel(
name='TrainRoutes',
),
]
|
[
"django.db.migrations.DeleteModel"
] |
[((226, 264), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Station"""'}), "(name='Station')\n", (248, 264), False, 'from django.db import migrations\n'), ((297, 339), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""TrainRoutes"""'}), "(name='TrainRoutes')\n", (319, 339), False, 'from django.db import migrations\n')]
|
import logging
from datetime import datetime
from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func
from sqlalchemy.orm import relationship
from sqlalchemy.sql.elements import and_
from sqlalchemy.sql.schema import ForeignKey
from flexget import db_schema
from flexget.db_schema import versioned_base
from flexget.utils.database import entry_synonym, with_session
plugin_name = 'pending_list'
log = logging.getLogger(plugin_name)
Base = versioned_base(plugin_name, 0)
@db_schema.upgrade(plugin_name)
def upgrade(ver, session):
ver = 0
return ver
class PendingListList(Base):
__tablename__ = 'pending_list_lists'
id = Column(Integer, primary_key=True)
name = Column(Unicode, unique=True)
added = Column(DateTime, default=datetime.now)
entries = relationship(
'PendingListEntry', backref='list', cascade='all, delete, delete-orphan', lazy='dynamic'
)
def to_dict(self):
return {'id': self.id, 'name': self.name, 'added_on': self.added}
class PendingListEntry(Base):
__tablename__ = 'wait_list_entries'
id = Column(Integer, primary_key=True)
list_id = Column(Integer, ForeignKey(PendingListList.id), nullable=False)
added = Column(DateTime, default=datetime.now)
title = Column(Unicode)
original_url = Column(Unicode)
_json = Column('json', Unicode)
entry = entry_synonym('_json')
approved = Column(Boolean)
def __init__(self, entry, pending_list_id):
self.title = entry['title']
self.original_url = entry.get('original_url') or entry['url']
self.entry = entry
self.list_id = pending_list_id
self.approved = False
def __repr__(self):
return '<PendingListEntry,title=%s,original_url=%s,approved=%s>' % (
self.title,
self.original_url,
self.approved,
)
def to_dict(self):
return {
'id': self.id,
'list_id': self.list_id,
'added_on': self.added,
'title': self.title,
'original_url': self.original_url,
'entry': dict(self.entry),
'approved': self.approved,
}
@with_session
def get_pending_lists(name=None, session=None):
log.debug('retrieving pending lists')
query = session.query(PendingListList)
if name:
log.debug('searching for pending lists with name %s', name)
query = query.filter(PendingListList.name.contains(name))
return query.all()
@with_session
def get_list_by_exact_name(name, session=None):
log.debug('returning pending list with name %s', name)
return (
session.query(PendingListList)
.filter(func.lower(PendingListList.name) == name.lower())
.one()
)
@with_session
def get_list_by_id(list_id, session=None):
log.debug('returning pending list with id %d', list_id)
return session.query(PendingListList).filter(PendingListList.id == list_id).one()
@with_session
def delete_list_by_id(list_id, session=None):
entry_list = get_list_by_id(list_id=list_id, session=session)
if entry_list:
log.debug('deleting pending list with id %d', list_id)
session.delete(entry_list)
@with_session
def get_entries_by_list_id(
list_id,
start=None,
stop=None,
order_by='title',
descending=False,
approved=False,
filter=None,
entry_ids=None,
session=None,
):
log.debug('querying entries from pending list with id %d', list_id)
query = session.query(PendingListEntry).filter(PendingListEntry.list_id == list_id)
if filter:
query = query.filter(func.lower(PendingListEntry.title).contains(filter.lower()))
if approved:
query = query.filter(PendingListEntry.approved is approved)
if entry_ids:
query = query.filter(PendingListEntry.id.in_(entry_ids))
if descending:
query = query.order_by(getattr(PendingListEntry, order_by).desc())
else:
query = query.order_by(getattr(PendingListEntry, order_by))
return query.slice(start, stop).all()
@with_session
def get_entry_by_title(list_id, title, session=None):
entry_list = get_list_by_id(list_id=list_id, session=session)
if entry_list:
log.debug('fetching entry with title `%s` from list id %d', title, list_id)
return (
session.query(PendingListEntry)
.filter(and_(PendingListEntry.title == title, PendingListEntry.list_id == list_id))
.first()
)
@with_session
def get_entry_by_id(list_id, entry_id, session=None):
log.debug('fetching entry with id %d from list id %d', entry_id, list_id)
return (
session.query(PendingListEntry)
.filter(and_(PendingListEntry.id == entry_id, PendingListEntry.list_id == list_id))
.one()
)
|
[
"sqlalchemy.sql.elements.and_",
"flexget.db_schema.upgrade",
"sqlalchemy.orm.relationship",
"flexget.db_schema.versioned_base",
"sqlalchemy.Column",
"sqlalchemy.sql.schema.ForeignKey",
"flexget.utils.database.entry_synonym",
"sqlalchemy.func.lower",
"logging.getLogger"
] |
[((420, 450), 'logging.getLogger', 'logging.getLogger', (['plugin_name'], {}), '(plugin_name)\n', (437, 450), False, 'import logging\n'), ((458, 488), 'flexget.db_schema.versioned_base', 'versioned_base', (['plugin_name', '(0)'], {}), '(plugin_name, 0)\n', (472, 488), False, 'from flexget.db_schema import versioned_base\n'), ((492, 522), 'flexget.db_schema.upgrade', 'db_schema.upgrade', (['plugin_name'], {}), '(plugin_name)\n', (509, 522), False, 'from flexget import db_schema\n'), ((658, 691), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (664, 691), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((703, 731), 'sqlalchemy.Column', 'Column', (['Unicode'], {'unique': '(True)'}), '(Unicode, unique=True)\n', (709, 731), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((744, 782), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.now'}), '(DateTime, default=datetime.now)\n', (750, 782), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((797, 904), 'sqlalchemy.orm.relationship', 'relationship', (['"""PendingListEntry"""'], {'backref': '"""list"""', 'cascade': '"""all, delete, delete-orphan"""', 'lazy': '"""dynamic"""'}), "('PendingListEntry', backref='list', cascade=\n 'all, delete, delete-orphan', lazy='dynamic')\n", (809, 904), False, 'from sqlalchemy.orm import relationship\n'), ((1093, 1126), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1099, 1126), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((1217, 1255), 'sqlalchemy.Column', 'Column', (['DateTime'], {'default': 'datetime.now'}), '(DateTime, default=datetime.now)\n', (1223, 1255), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((1268, 1283), 'sqlalchemy.Column', 'Column', (['Unicode'], {}), '(Unicode)\n', (1274, 1283), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((1303, 1318), 'sqlalchemy.Column', 'Column', (['Unicode'], {}), '(Unicode)\n', (1309, 1318), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((1331, 1354), 'sqlalchemy.Column', 'Column', (['"""json"""', 'Unicode'], {}), "('json', Unicode)\n", (1337, 1354), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((1367, 1389), 'flexget.utils.database.entry_synonym', 'entry_synonym', (['"""_json"""'], {}), "('_json')\n", (1380, 1389), False, 'from flexget.utils.database import entry_synonym, with_session\n'), ((1405, 1420), 'sqlalchemy.Column', 'Column', (['Boolean'], {}), '(Boolean)\n', (1411, 1420), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((1157, 1187), 'sqlalchemy.sql.schema.ForeignKey', 'ForeignKey', (['PendingListList.id'], {}), '(PendingListList.id)\n', (1167, 1187), False, 'from sqlalchemy.sql.schema import ForeignKey\n'), ((4707, 4781), 'sqlalchemy.sql.elements.and_', 'and_', (['(PendingListEntry.id == entry_id)', '(PendingListEntry.list_id == list_id)'], {}), '(PendingListEntry.id == entry_id, PendingListEntry.list_id == list_id)\n', (4711, 4781), False, 'from sqlalchemy.sql.elements import and_\n'), ((2685, 2717), 'sqlalchemy.func.lower', 'func.lower', (['PendingListList.name'], {}), '(PendingListList.name)\n', (2695, 2717), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((3620, 3654), 'sqlalchemy.func.lower', 'func.lower', (['PendingListEntry.title'], {}), '(PendingListEntry.title)\n', (3630, 3654), False, 'from sqlalchemy import Boolean, Column, DateTime, Integer, Unicode, func\n'), ((4383, 4457), 'sqlalchemy.sql.elements.and_', 'and_', (['(PendingListEntry.title == title)', '(PendingListEntry.list_id == list_id)'], {}), '(PendingListEntry.title == title, PendingListEntry.list_id == list_id)\n', (4387, 4457), False, 'from sqlalchemy.sql.elements import and_\n')]
|
import sys
import os
import logging
import argparse
from bs4 import BeautifulSoup
import requests
# Output data to stdout instead of stderr
log = logging.getLogger()
log.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
log.addHandler(handler)
# Parse the argument for user specification files
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--txt",
type=str,
required=True,
help="Text file for downloading a pack of icons")
parser.add_argument("-o", "--output",
type=str,
required=True,
help="Output directory for the download")
args = parser.parse_args()
# Check directory is exist or not
os.makedirs(f"{args.output}",
exist_ok=True)
# Create the fake headers to cheat the website
headers = {
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/56.0.2924.87 Safari/537.36 "
}
# Save the category files
buffer_information = open(args.txt, "r").readlines()
for cur_information in buffer_information:
# Remove "\n"
cur_website, categories = cur_information.strip().split(",")
# Know the current category
logging.info(categories)
# End line
if cur_website == "":
sys.exit()
count = 0
# Get the html and parse the tags
response = requests.get(f"{cur_website}",
headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
find_ul = soup.find("section", {"class": "search-result"}).find("ul", {"class": "icons"})
find_li = find_ul.findAll("li", {"class": "icon--item"})
# Run through all the image section
for li in find_li:
try:
img = li.find("img", {"class": "lzy"})
img_url = img.get("data-src")
# Check the url is valid for saving image file
if not img_url.endswith(".png"):
continue
# Save the data
img_data = requests.get(img_url).content
# Save the image and text file
with open(f"{args.output}/{categories}_{count+1}.png", "wb") as img_file, \
open(f"{args.output}/{categories}_{count+1}.txt", "w") as text_file:
img_file.write(img_data)
text_file.write(f"{categories}\n")
count += 1
except Exception:
break
|
[
"os.makedirs",
"argparse.ArgumentParser",
"logging.StreamHandler",
"logging.info",
"requests.get",
"bs4.BeautifulSoup",
"sys.exit",
"logging.getLogger"
] |
[((147, 166), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (164, 166), False, 'import logging\n'), ((205, 238), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (226, 238), False, 'import logging\n'), ((355, 380), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (378, 380), False, 'import argparse\n'), ((778, 822), 'os.makedirs', 'os.makedirs', (['f"""{args.output}"""'], {'exist_ok': '(True)'}), "(f'{args.output}', exist_ok=True)\n", (789, 822), False, 'import os\n'), ((1304, 1328), 'logging.info', 'logging.info', (['categories'], {}), '(categories)\n', (1316, 1328), False, 'import logging\n'), ((1460, 1507), 'requests.get', 'requests.get', (['f"""{cur_website}"""'], {'headers': 'headers'}), "(f'{cur_website}', headers=headers)\n", (1472, 1507), False, 'import requests\n'), ((1548, 1591), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.text', '"""html.parser"""'], {}), "(response.text, 'html.parser')\n", (1561, 1591), False, 'from bs4 import BeautifulSoup\n'), ((1379, 1389), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1387, 1389), False, 'import sys\n'), ((2101, 2122), 'requests.get', 'requests.get', (['img_url'], {}), '(img_url)\n', (2113, 2122), False, 'import requests\n')]
|
"""Bridge for connecting a UI instance to nvim."""
import sys
from threading import Semaphore, Thread
from traceback import format_exc
class UIBridge(object):
"""UIBridge class. Connects a Nvim instance to a UI class."""
def connect(self, nvim, ui):
"""Connect nvim and the ui.
This will start loops for handling the UI and nvim events while
also synchronizing both.
"""
self._error = None
self._nvim = nvim
self._ui = ui
self._nvim_event_loop()
if self._error:
print(self._error)
def exit(self):
"""Disconnect by exiting nvim."""
self.detach()
self._call(self._nvim.quit)
def input(self, input_str):
"""Send input to nvim."""
self._call(self._nvim.input, input_str)
def resize(self, columns, rows):
"""Send a resize request to nvim."""
self._call(self._nvim.ui_try_resize, columns, rows)
def attach(self, columns, rows, rgb):
"""Attach the UI to nvim."""
self._call(self._nvim.ui_attach, columns, rows, rgb)
def detach(self):
"""Detach the UI from nvim."""
self._call(self._nvim.ui_detach)
def _call(self, fn, *args):
self._nvim.async_call(fn, *args)
def _nvim_event_loop(self):
def on_setup():
self.input("<ESC>:let g:NeoSFMLGUIChannelID=" + str(self._nvim.channel_id) + "<CR>")
import messages_from_ui
file_to_edit = messages_from_ui.get_command_line_argument()
if file_to_edit != None and file_to_edit != "":
'''
In case there is a swap file, command_input will error out
and the program won't work. Use input instead.
'''
self._nvim.input("<esc>:edit " + file_to_edit + "<cr>")
self._ui.start(self)
self._ui.switch_to_navigator()
def on_request(method, args):
if method == "switchToNavigator":
self._ui.switch_to_navigator()
else:
raise Exception('Not implemented')
def on_notification(method, updates):
def apply_updates():
try:
for update in updates:
try:
handler = getattr(self._ui, '_nvim_' + update[0])
#print('_nvim_' + update[0])
except AttributeError:
pass
else:
#for args in update[1:]:
#print(*args, end = " ")
#print("END")
text = ''
if update[0] == 'put':
for args in update[1:]:
text += str(args)[2]
handler(text)
else:
for args in update[1:]:
handler(*args)
except:
self._error = format_exc()
self._call(self._nvim.quit)
if method == 'redraw':
if len(updates) > 0:
self._ui._nvim_lock_update_mutex();
apply_updates();
self._ui._nvim_redraw();
self._ui._nvim_unlock_update_mutex();
self._nvim.run_loop(on_request, on_notification, on_setup)
self._ui.quit() #end definition of nvim event loop
|
[
"messages_from_ui.get_command_line_argument",
"traceback.format_exc"
] |
[((1494, 1538), 'messages_from_ui.get_command_line_argument', 'messages_from_ui.get_command_line_argument', ([], {}), '()\n', (1536, 1538), False, 'import messages_from_ui\n'), ((3145, 3157), 'traceback.format_exc', 'format_exc', ([], {}), '()\n', (3155, 3157), False, 'from traceback import format_exc\n')]
|
"""
Utility to execute command line processes.
"""
import subprocess
import os
import sys
import re
def execute( command ):
"""
Convenience function for executing commands as though
from the command line. The command is executed and the
results are returned as str list. For example,
command = "/usr/bin/git commit -m 'Fixes a bug.'".
Parameters
----------
command : str
command with single space between terms
Return
------
list of the output : str
"""
args = command.split()
# using the Popen function to execute the
# command and store the result in temp.
# it returns a tuple that contains the
# data and the error if any.
outputStreams = subprocess.Popen(args, stdout = subprocess.PIPE)
# we use the communicate function
# to fetch the output
stdout_data, stderr_data = outputStreams.communicate()
# https://docs.python.org/3/library/subprocess.html#subprocess.Popen.communicate
# communicate() returns a tuple (stdout_data, stderr_data)
#print('stdout_data: ' + str( stdout_data ) )
#print('stderr_data: ' + str( stderr_data ) )
# splitting the output so that
# we can parse them line by line
#print( 'COMMAND_LINE: ' + str( type(stdout_data) ) )
#output = stdout_data.split("\n")
# Issue: python2 stdout_data is a str (or str-like)
# python3 stdout_data is bytes and cnnot be directly split
# Solution: convert to str using decode, check type instead of version check
# NB: If not done correctly the system logging fails and pollutes the audit log!!!
# type=SYSCALL msg=audit(1630261099.364:78511): arch=40000003 syscall=295 success=no exit=-13
# ... comm="python3" exe="/usr/bin/python3.7" subj==unconfined key="access"
# type=CWD msg=audit(1630261099.364:78511): cwd="/home/pi/container-escape-dataset/src"
#
#output = re.split('\n', str(stdout_data) )
output = None
#print( 'COMMAND_LINE: ' + str(type(stdout_data)) )
if( isinstance(stdout_data, str) ):
# this is python2 behaviour, stdout_data is str
output = stdout_data.split("\n")
else:
# this is python3 behaviour, stdout_data is bytes
output = re.split('\n', stdout_data.decode('utf-8') )
# a variable to store the output
result = []
# iterate through the output
# line by line
for line in output:
#print('LINE: ' + line)
result.append(line)
return result
#
# Test main function
#
def main():
command = sys.argv[1]
result = execute(command)
for line in result:
print(line)
if __name__ == '__main__':
main()
|
[
"subprocess.Popen"
] |
[((741, 787), 'subprocess.Popen', 'subprocess.Popen', (['args'], {'stdout': 'subprocess.PIPE'}), '(args, stdout=subprocess.PIPE)\n', (757, 787), False, 'import subprocess\n')]
|
import sqlite3
import sys
import requests
from lxml import html
from lxml import etree
from bs4 import BeautifulSoup
def get_contest_info(contest_id):
contest_info = {}
url = "https://codeforces.com/contest/"+contest_id
response = requests.get(url)
if response.status_code != 200:
sys.exit(0)
html_content = html.document_fromstring(response.content)
node=etree.tostring(html_content).decode('utf-8')
soup = BeautifulSoup(node,'lxml')
contest_info['contest_name'] = soup.findAll("th", {"class" : "left"})[0].text
node = html_content.find_class("problems")[0]
node=etree.tostring(html_content).decode('utf-8')
soup = BeautifulSoup(node,'lxml')
for i in soup.findAll("tr"):
try:
problem_id = i.a.text.replace(" ","").replace("\r","").replace("\n","")
problem_name = i.div.div.a.text.replace("\r","").replace("\n","")
contest_info[problem_id] = problem_name
except:
pass
return contest_info
conn = sqlite3.connect('sqlite.db')
cur = conn.cursor()
contest_id = (sys.argv)[1]
problem_list_1 = (sys.argv)[2:]
problem_list_2 = list(cur.execute('SELECT problem_id FROM problems WHERE contest_id = '+str(contest_id)).fetchall())
problem_list_2 = [i[0] for i in problem_list_2]
problem_list = list(set(problem_list_1) - set(problem_list_2))
problem_list.sort()
contest_info = get_contest_info(contest_id)
cur.execute('SELECT * FROM contests WHERE contest_id = '+ str(contest_id))
rows = cur.fetchall()
if len(rows) :
cur.execute('UPDATE contests SET update_time = DATETIME() WHERE contest_id = '+str(contest_id))
else:
cur.execute('INSERT INTO contests(contest_id, contest_name, update_time) VALUES ('+str(contest_id)+', "'+contest_info['contest_name']+'", DATETIME("now"))')
conn.commit()
for problem in problem_list:
cur.execute('INSERT INTO problems(contest_id, contest_name, problem_id, problem_name) VALUES('+str(contest_id)+', "'+contest_info['contest_name']+'", "'+problem+'", "'+contest_info[problem]+'")')
conn.commit()
conn.close()
|
[
"lxml.html.document_fromstring",
"sqlite3.connect",
"requests.get",
"bs4.BeautifulSoup",
"lxml.etree.tostring",
"sys.exit"
] |
[((928, 956), 'sqlite3.connect', 'sqlite3.connect', (['"""sqlite.db"""'], {}), "('sqlite.db')\n", (943, 956), False, 'import sqlite3\n'), ((235, 252), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (247, 252), False, 'import requests\n'), ((316, 358), 'lxml.html.document_fromstring', 'html.document_fromstring', (['response.content'], {}), '(response.content)\n', (340, 358), False, 'from lxml import html\n'), ((418, 445), 'bs4.BeautifulSoup', 'BeautifulSoup', (['node', '"""lxml"""'], {}), "(node, 'lxml')\n", (431, 445), False, 'from bs4 import BeautifulSoup\n'), ((630, 657), 'bs4.BeautifulSoup', 'BeautifulSoup', (['node', '"""lxml"""'], {}), "(node, 'lxml')\n", (643, 657), False, 'from bs4 import BeautifulSoup\n'), ((288, 299), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (296, 299), False, 'import sys\n'), ((365, 393), 'lxml.etree.tostring', 'etree.tostring', (['html_content'], {}), '(html_content)\n', (379, 393), False, 'from lxml import etree\n'), ((577, 605), 'lxml.etree.tostring', 'etree.tostring', (['html_content'], {}), '(html_content)\n', (591, 605), False, 'from lxml import etree\n')]
|
import os
import matplotlib.pyplot as plt
from tensorflow import keras
from tensorflow.keras.models import Model
def plot_model(model: Model, path: str) -> None:
if not os.path.isfile(path):
keras.utils.plot_model(model, to_file=path, show_shapes=True)
def plot_learning_history(fit, metric: str = "accuracy", path: str = "history.png") -> None:
"""Plot learning curve
Args:
fit (Any): History object
path (str, default="history.png")
"""
fig, (axL, axR) = plt.subplots(ncols=2, figsize=(10, 4))
axL.plot(fit.history["loss"], label="train")
axL.plot(fit.history["val_loss"], label="validation")
axL.set_title("Loss")
axL.set_xlabel("epoch")
axL.set_ylabel("loss")
axL.legend(loc="upper right")
axR.plot(fit.history[metric], label="train")
axR.plot(fit.history[f"val_{metric}"], label="validation")
axR.set_title(metric.capitalize())
axR.set_xlabel("epoch")
axR.set_ylabel(metric)
axR.legend(loc="best")
fig.savefig(path)
plt.close()
|
[
"tensorflow.keras.utils.plot_model",
"matplotlib.pyplot.close",
"os.path.isfile",
"matplotlib.pyplot.subplots"
] |
[((506, 544), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(2)', 'figsize': '(10, 4)'}), '(ncols=2, figsize=(10, 4))\n', (518, 544), True, 'import matplotlib.pyplot as plt\n'), ((1028, 1039), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (1037, 1039), True, 'import matplotlib.pyplot as plt\n'), ((176, 196), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (190, 196), False, 'import os\n'), ((206, 267), 'tensorflow.keras.utils.plot_model', 'keras.utils.plot_model', (['model'], {'to_file': 'path', 'show_shapes': '(True)'}), '(model, to_file=path, show_shapes=True)\n', (228, 267), False, 'from tensorflow import keras\n')]
|
import pytest
import mackinac
@pytest.fixture(scope='module')
def test_model(b_theta_genome_id, b_theta_id):
# Reconstruct a model so there is a folder in the workspace.
stats = mackinac.create_patric_model(b_theta_genome_id, model_id=b_theta_id)
yield stats
mackinac.delete_patric_model(b_theta_id)
@pytest.fixture(scope='module')
def test_file():
return '/{0}/modelseed/emergency'.format(mackinac.workspace.ws_client.username)
@pytest.fixture(scope='module')
def test_file_data():
return 'This is a test of the emergency broadcasting system.'
@pytest.fixture(scope='module')
def bad_reference():
return '/{0}/modelseed/badref'.format(mackinac.workspace.ws_client.username)
@pytest.mark.usefixtures('authenticate')
class TestWorkspace:
# Remember these tests are calling a server and can take a while depending on the network
# and how busy the server is servicing other requests.
def test_list_objects(self, test_model):
output = mackinac.list_workspace_objects(test_model['ref'])
assert len(output) == 13
assert len(output[0]) == 12
def test_list_objects_by_name(self, test_model):
output = mackinac.list_workspace_objects(test_model['ref'], sort_key='name')
assert len(output) == 13
assert output[0][0] == '{0}.cpdtbl'.format(test_model['id'])
def test_list_objects_by_type(self, test_model):
output = mackinac.list_workspace_objects(test_model['ref'], sort_key='type')
assert len(output) == 13
assert output[4][1] == 'genome'
assert output[5][1] == 'model'
def test_list_objects_bad_folder(self):
# This fails because there is no leading forward slash.
bad_reference = '{0}/modelseed/badref'.format(mackinac.workspace.ws_client.username)
with pytest.raises(mackinac.SeedClient.ObjectNotFoundError):
mackinac.list_workspace_objects(bad_reference)
def test_list_objects_no_exist_folder(self):
no_exist_reference = '/{0}/modelseed/badref'.format(mackinac.workspace.ws_client.username)
output = mackinac.list_workspace_objects(no_exist_reference)
assert output is None
def test_list_objects_bad_sort_key(self, test_model):
with pytest.raises(KeyError):
mackinac.list_workspace_objects(test_model['ref'], sort_key='foobar')
def test_get_object_meta(self, test_model):
output = mackinac.get_workspace_object_meta(test_model['ref'])
assert len(output) == 12
assert output[0] == '.' + test_model['id']
assert output[1] == 'folder'
assert output[8]['is_folder'] == 1
def test_get_object_meta_bad_ref(self):
bad_reference = '{0}/modelseed/badref'.format(mackinac.workspace.ws_client.username)
with pytest.raises(mackinac.SeedClient.ObjectNotFoundError):
mackinac.get_workspace_object_meta(bad_reference)
def test_get_object_data_json(self, test_model):
reference = '{0}/model'.format(test_model['ref'])
output = mackinac.get_workspace_object_data(reference)
assert output['id'] == '.' + test_model['id']
assert len(output['modelcompartments']) == 2
assert 'modelcompounds' in output
assert 'modelreactions' in output
def test_get_object_data_text(self, test_model):
reference = '{0}/{1}.rxntbl'.format(test_model['ref'], test_model['id'])
output = mackinac.get_workspace_object_data(reference, json_data=False)
assert len(output) > 100000 # Just a really long string
def test_get_object_data_bad_ref(self, bad_reference):
with pytest.raises(mackinac.SeedClient.ObjectNotFoundError):
mackinac.get_workspace_object_data(bad_reference)
def test_put_object_no_data(self, test_file):
output = mackinac.put_workspace_object(test_file, 'string')
assert output[0] == 'emergency'
assert output[1] == 'string'
assert output[6] == 0
assert len(output[7]) == 0
def test_put_object_meta(self, test_file, b_theta_id):
output = mackinac.put_workspace_object(test_file, 'string', metadata={'model': b_theta_id}, overwrite=True)
assert output[0] == 'emergency'
assert output[1] == 'string'
assert output[6] == 0
assert len(output[7]) == 1
def test_put_object_data(self, test_file, test_file_data, b_theta_id):
output = mackinac.put_workspace_object(test_file, 'string', data=test_file_data,
metadata={'model': b_theta_id}, overwrite=True)
assert output[0] == 'emergency'
assert output[1] == 'string'
assert output[6] == len(test_file_data)
assert len(output[7]) == 1
def test_delete_object(self, test_file):
output = mackinac.delete_workspace_object(test_file)
assert output[0] == 'emergency'
def test_delete_object_bad_ref(self, bad_reference):
with pytest.raises(mackinac.SeedClient.ObjectNotFoundError):
mackinac.delete_workspace_object(bad_reference)
|
[
"mackinac.get_workspace_object_meta",
"mackinac.delete_workspace_object",
"mackinac.get_workspace_object_data",
"mackinac.create_patric_model",
"pytest.fixture",
"pytest.raises",
"mackinac.delete_patric_model",
"mackinac.put_workspace_object",
"mackinac.list_workspace_objects",
"pytest.mark.usefixtures"
] |
[((34, 64), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (48, 64), False, 'import pytest\n'), ((322, 352), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (336, 352), False, 'import pytest\n'), ((457, 487), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (471, 487), False, 'import pytest\n'), ((579, 609), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (593, 609), False, 'import pytest\n'), ((715, 754), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""authenticate"""'], {}), "('authenticate')\n", (738, 754), False, 'import pytest\n'), ((189, 257), 'mackinac.create_patric_model', 'mackinac.create_patric_model', (['b_theta_genome_id'], {'model_id': 'b_theta_id'}), '(b_theta_genome_id, model_id=b_theta_id)\n', (217, 257), False, 'import mackinac\n'), ((278, 318), 'mackinac.delete_patric_model', 'mackinac.delete_patric_model', (['b_theta_id'], {}), '(b_theta_id)\n', (306, 318), False, 'import mackinac\n'), ((993, 1043), 'mackinac.list_workspace_objects', 'mackinac.list_workspace_objects', (["test_model['ref']"], {}), "(test_model['ref'])\n", (1024, 1043), False, 'import mackinac\n'), ((1184, 1251), 'mackinac.list_workspace_objects', 'mackinac.list_workspace_objects', (["test_model['ref']"], {'sort_key': '"""name"""'}), "(test_model['ref'], sort_key='name')\n", (1215, 1251), False, 'import mackinac\n'), ((1425, 1492), 'mackinac.list_workspace_objects', 'mackinac.list_workspace_objects', (["test_model['ref']"], {'sort_key': '"""type"""'}), "(test_model['ref'], sort_key='type')\n", (1456, 1492), False, 'import mackinac\n'), ((2101, 2152), 'mackinac.list_workspace_objects', 'mackinac.list_workspace_objects', (['no_exist_reference'], {}), '(no_exist_reference)\n', (2132, 2152), False, 'import mackinac\n'), ((2428, 2481), 'mackinac.get_workspace_object_meta', 'mackinac.get_workspace_object_meta', (["test_model['ref']"], {}), "(test_model['ref'])\n", (2462, 2481), False, 'import mackinac\n'), ((3044, 3089), 'mackinac.get_workspace_object_data', 'mackinac.get_workspace_object_data', (['reference'], {}), '(reference)\n', (3078, 3089), False, 'import mackinac\n'), ((3433, 3495), 'mackinac.get_workspace_object_data', 'mackinac.get_workspace_object_data', (['reference'], {'json_data': '(False)'}), '(reference, json_data=False)\n', (3467, 3495), False, 'import mackinac\n'), ((3820, 3870), 'mackinac.put_workspace_object', 'mackinac.put_workspace_object', (['test_file', '"""string"""'], {}), "(test_file, 'string')\n", (3849, 3870), False, 'import mackinac\n'), ((4090, 4192), 'mackinac.put_workspace_object', 'mackinac.put_workspace_object', (['test_file', '"""string"""'], {'metadata': "{'model': b_theta_id}", 'overwrite': '(True)'}), "(test_file, 'string', metadata={'model':\n b_theta_id}, overwrite=True)\n", (4119, 4192), False, 'import mackinac\n'), ((4424, 4547), 'mackinac.put_workspace_object', 'mackinac.put_workspace_object', (['test_file', '"""string"""'], {'data': 'test_file_data', 'metadata': "{'model': b_theta_id}", 'overwrite': '(True)'}), "(test_file, 'string', data=test_file_data,\n metadata={'model': b_theta_id}, overwrite=True)\n", (4453, 4547), False, 'import mackinac\n'), ((4814, 4857), 'mackinac.delete_workspace_object', 'mackinac.delete_workspace_object', (['test_file'], {}), '(test_file)\n', (4846, 4857), False, 'import mackinac\n'), ((1820, 1874), 'pytest.raises', 'pytest.raises', (['mackinac.SeedClient.ObjectNotFoundError'], {}), '(mackinac.SeedClient.ObjectNotFoundError)\n', (1833, 1874), False, 'import pytest\n'), ((1888, 1934), 'mackinac.list_workspace_objects', 'mackinac.list_workspace_objects', (['bad_reference'], {}), '(bad_reference)\n', (1919, 1934), False, 'import mackinac\n'), ((2255, 2278), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (2268, 2278), False, 'import pytest\n'), ((2292, 2361), 'mackinac.list_workspace_objects', 'mackinac.list_workspace_objects', (["test_model['ref']"], {'sort_key': '"""foobar"""'}), "(test_model['ref'], sort_key='foobar')\n", (2323, 2361), False, 'import mackinac\n'), ((2797, 2851), 'pytest.raises', 'pytest.raises', (['mackinac.SeedClient.ObjectNotFoundError'], {}), '(mackinac.SeedClient.ObjectNotFoundError)\n', (2810, 2851), False, 'import pytest\n'), ((2865, 2914), 'mackinac.get_workspace_object_meta', 'mackinac.get_workspace_object_meta', (['bad_reference'], {}), '(bad_reference)\n', (2899, 2914), False, 'import mackinac\n'), ((3634, 3688), 'pytest.raises', 'pytest.raises', (['mackinac.SeedClient.ObjectNotFoundError'], {}), '(mackinac.SeedClient.ObjectNotFoundError)\n', (3647, 3688), False, 'import pytest\n'), ((3702, 3751), 'mackinac.get_workspace_object_data', 'mackinac.get_workspace_object_data', (['bad_reference'], {}), '(bad_reference)\n', (3736, 3751), False, 'import mackinac\n'), ((4969, 5023), 'pytest.raises', 'pytest.raises', (['mackinac.SeedClient.ObjectNotFoundError'], {}), '(mackinac.SeedClient.ObjectNotFoundError)\n', (4982, 5023), False, 'import pytest\n'), ((5037, 5084), 'mackinac.delete_workspace_object', 'mackinac.delete_workspace_object', (['bad_reference'], {}), '(bad_reference)\n', (5069, 5084), False, 'import mackinac\n')]
|
#!/usr/bin/env python
# encoding: utf-8
"""
@version: ??
@author: liangliangyy
@license: MIT Licence
@contact: <EMAIL>
@site: https://www.lylinux.net/
@software: PyCharm
@file: urls.py
@time: 2016/11/2 下午7:15
"""
from django.urls import path
from django.views.decorators.cache import cache_page
from website.utils import my_cache
from . import views as v
app_name = "mobile"
urlpatterns = [
path('service', my_cache(v.ServiceListView.as_view), name='service'),
path('service/<int:pk>', my_cache(v.ServiceDetailView.as_view), name='service-detail'),
path('product', my_cache(v.ProductListView.as_view), name='product'),
path('product/<int:pk>', my_cache(v.ProductDetailView.as_view), name='product-detail'),
path('category/<int:category_pk>', my_cache(v.CategoryDetailView.as_view), name='category'),
path('case', my_cache(v.CaseListView.as_view), name='case'),
path('case/<int:pk>', my_cache(v.CaseDetailView.as_view), name='case-detail'),
path('about/<int:pk>', my_cache(v.AboutDetailView.as_view), name='about'),
# path('', v.index, name='index'),
]
|
[
"website.utils.my_cache"
] |
[((416, 451), 'website.utils.my_cache', 'my_cache', (['v.ServiceListView.as_view'], {}), '(v.ServiceListView.as_view)\n', (424, 451), False, 'from website.utils import my_cache\n'), ((499, 536), 'website.utils.my_cache', 'my_cache', (['v.ServiceDetailView.as_view'], {}), '(v.ServiceDetailView.as_view)\n', (507, 536), False, 'from website.utils import my_cache\n'), ((582, 617), 'website.utils.my_cache', 'my_cache', (['v.ProductListView.as_view'], {}), '(v.ProductListView.as_view)\n', (590, 617), False, 'from website.utils import my_cache\n'), ((665, 702), 'website.utils.my_cache', 'my_cache', (['v.ProductDetailView.as_view'], {}), '(v.ProductDetailView.as_view)\n', (673, 702), False, 'from website.utils import my_cache\n'), ((767, 805), 'website.utils.my_cache', 'my_cache', (['v.CategoryDetailView.as_view'], {}), '(v.CategoryDetailView.as_view)\n', (775, 805), False, 'from website.utils import my_cache\n'), ((842, 874), 'website.utils.my_cache', 'my_cache', (['v.CaseListView.as_view'], {}), '(v.CaseListView.as_view)\n', (850, 874), False, 'from website.utils import my_cache\n'), ((916, 950), 'website.utils.my_cache', 'my_cache', (['v.CaseDetailView.as_view'], {}), '(v.CaseDetailView.as_view)\n', (924, 950), False, 'from website.utils import my_cache\n'), ((1000, 1035), 'website.utils.my_cache', 'my_cache', (['v.AboutDetailView.as_view'], {}), '(v.AboutDetailView.as_view)\n', (1008, 1035), False, 'from website.utils import my_cache\n')]
|
# Generated by Django 3.2.6 on 2021-08-04 18:09
import django.core.serializers.json
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('worlds', '0009_job_job_definition'),
]
operations = [
migrations.AlterField(
model_name='job',
name='job_definition',
field=models.JSONField(blank=True, encoder=django.core.serializers.json.DjangoJSONEncoder, null=True),
),
]
|
[
"django.db.models.JSONField"
] |
[((376, 476), 'django.db.models.JSONField', 'models.JSONField', ([], {'blank': '(True)', 'encoder': 'django.core.serializers.json.DjangoJSONEncoder', 'null': '(True)'}), '(blank=True, encoder=django.core.serializers.json.\n DjangoJSONEncoder, null=True)\n', (392, 476), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf-8 -*-
import click
import logging
from pathlib import Path
from dotenv import find_dotenv, load_dotenv
import netCDF4 as nc
import pickle as pk
import pandas as pd
import datetime
import os
import numpy as np
import sys
src_dir = os.path.join(os.getcwd(), 'src/data')
sys.path.append(src_dir)
from helper import save_pkl, load_pkl, min_max_norm
src_dir = os.path.join(os.getcwd(), 'src/models')
sys.path.append(src_dir)
# from competition_model_class import Seq2Seq_Class # during Debug and Developing
from seq2seq_class import Seq2Seq_Class # during the game and competition
def train(processed_path, train_data, val_data, model_save_path, model_name):
train_dict = load_pkl(processed_path, train_data)
val_dict = load_pkl(processed_path, val_data)
print(train_dict.keys())
print('Original input_obs data shape:')
print(train_dict['input_obs'].shape)
print(val_dict['input_obs'].shape)
print('After clipping the 9 days, input_obs data shape:')
train_dict['input_obs'] = train_dict['input_obs'][:,:-9,:,:]
val_dict['input_obs'] = val_dict['input_obs'][:,:-9,:,:]
print(train_dict['input_obs'].shape)
print(val_dict['input_obs'].shape)
enc_dec = Seq2Seq_Class(model_save_path=model_save_path,
model_structure_name=model_name,
model_weights_name=model_name,
model_name=model_name)
enc_dec.build_graph()
val_size=val_dict['input_ruitu'].shape[0] # 87 val samples
val_ids=[]
val_times=[]
for i in range(10):
val_ids.append(np.ones(shape=(val_size,37))*i)
val_ids = np.stack(val_ids, axis=-1)
print('val_ids.shape is:', val_ids.shape)
val_times = np.array(range(37))
val_times = np.tile(val_times,(val_size,1))
print('val_times.shape is:',val_times.shape)
enc_dec.fit(train_dict['input_obs'], train_dict['input_ruitu'], train_dict['ground_truth'],
val_dict['input_obs'], val_dict['input_ruitu'], val_dict['ground_truth'], val_ids = val_ids, val_times=val_times,
iterations=10000, batch_size=512, validation=True)
print('Training finished!')
@click.command()
@click.argument('processed_path', type=click.Path(exists=True))
@click.option('--train_data', type=str)
@click.option('--val_data', type=str)
@click.argument('model_save_path', type=click.Path(exists=True))
@click.option('--model_name', type=str)
def main(processed_path, train_data, val_data, model_save_path, model_name):
""" Runs data processing scripts to turn raw data from (../raw) into
cleaned data ready to be analyzed (saved in ../processed).
"""
logger = logging.getLogger(__name__)
train(processed_path, train_data, val_data, model_save_path, model_name)
if __name__ == '__main__':
log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_fmt)
# not used in this stub but often useful for finding various files
project_dir = Path(__file__).resolve().parents[2]
# find .env automagically by walking up directories until it's found, then
# load up the .env entries as environment variables
load_dotenv(find_dotenv())
main()
|
[
"sys.path.append",
"numpy.stack",
"logging.basicConfig",
"dotenv.find_dotenv",
"os.getcwd",
"click.option",
"numpy.ones",
"click.command",
"pathlib.Path",
"click.Path",
"numpy.tile",
"seq2seq_class.Seq2Seq_Class",
"helper.load_pkl",
"logging.getLogger"
] |
[((287, 311), 'sys.path.append', 'sys.path.append', (['src_dir'], {}), '(src_dir)\n', (302, 311), False, 'import sys\n'), ((414, 438), 'sys.path.append', 'sys.path.append', (['src_dir'], {}), '(src_dir)\n', (429, 438), False, 'import sys\n'), ((2158, 2173), 'click.command', 'click.command', ([], {}), '()\n', (2171, 2173), False, 'import click\n'), ((2239, 2277), 'click.option', 'click.option', (['"""--train_data"""'], {'type': 'str'}), "('--train_data', type=str)\n", (2251, 2277), False, 'import click\n'), ((2279, 2315), 'click.option', 'click.option', (['"""--val_data"""'], {'type': 'str'}), "('--val_data', type=str)\n", (2291, 2315), False, 'import click\n'), ((2382, 2420), 'click.option', 'click.option', (['"""--model_name"""'], {'type': 'str'}), "('--model_name', type=str)\n", (2394, 2420), False, 'import click\n'), ((262, 273), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (271, 273), False, 'import os\n'), ((387, 398), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (396, 398), False, 'import os\n'), ((692, 728), 'helper.load_pkl', 'load_pkl', (['processed_path', 'train_data'], {}), '(processed_path, train_data)\n', (700, 728), False, 'from helper import save_pkl, load_pkl, min_max_norm\n'), ((744, 778), 'helper.load_pkl', 'load_pkl', (['processed_path', 'val_data'], {}), '(processed_path, val_data)\n', (752, 778), False, 'from helper import save_pkl, load_pkl, min_max_norm\n'), ((1218, 1356), 'seq2seq_class.Seq2Seq_Class', 'Seq2Seq_Class', ([], {'model_save_path': 'model_save_path', 'model_structure_name': 'model_name', 'model_weights_name': 'model_name', 'model_name': 'model_name'}), '(model_save_path=model_save_path, model_structure_name=\n model_name, model_weights_name=model_name, model_name=model_name)\n', (1231, 1356), False, 'from seq2seq_class import Seq2Seq_Class\n'), ((1632, 1658), 'numpy.stack', 'np.stack', (['val_ids'], {'axis': '(-1)'}), '(val_ids, axis=-1)\n', (1640, 1658), True, 'import numpy as np\n'), ((1757, 1790), 'numpy.tile', 'np.tile', (['val_times', '(val_size, 1)'], {}), '(val_times, (val_size, 1))\n', (1764, 1790), True, 'import numpy as np\n'), ((2660, 2687), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2677, 2687), False, 'import logging\n'), ((2871, 2926), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': 'log_fmt'}), '(level=logging.INFO, format=log_fmt)\n', (2890, 2926), False, 'import logging\n'), ((2213, 2236), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (2223, 2236), False, 'import click\n'), ((2356, 2379), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (2366, 2379), False, 'import click\n'), ((3205, 3218), 'dotenv.find_dotenv', 'find_dotenv', ([], {}), '()\n', (3216, 3218), False, 'from dotenv import find_dotenv, load_dotenv\n'), ((1586, 1615), 'numpy.ones', 'np.ones', ([], {'shape': '(val_size, 37)'}), '(shape=(val_size, 37))\n', (1593, 1615), True, 'import numpy as np\n'), ((3017, 3031), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (3021, 3031), False, 'from pathlib import Path\n')]
|
import pickle
import numpy as np
import pandas as pd
import shap
import matplotlib.pyplot as pl
shap.initjs()
json_path = "response.json"
model_path = "xgboost_primary_model.pkl"
AGE_GROUP_CUTOFFS = [0, 17, 30, 40, 50, 60, 70, 120]
AGE_GROUPS_TRANSFORMER = {1: 10, 2: 25, 3: 35, 4: 45, 5: 55, 6: 65, 7: 75}
AGE_COL = 'age_group'
X_COLS = ["gender", AGE_COL, "condition_any", "symptom_well", "symptom_sore_throat", "symptom_cough",
"symptom_shortness_of_breath", "symptom_smell_or_taste_loss", "symptom_fever"]
def add_age_group(df):
df[AGE_COL] = pd.cut(df['age'], bins=AGE_GROUP_CUTOFFS, labels=AGE_GROUPS_TRANSFORMER.values(), include_lowest=True, right=True)
df[AGE_COL] = df[AGE_COL].astype(int)
return df
def get_prediction(json_path, model_path):
response_df = pd.read_json(json_path, lines=True)
response_df = add_age_group(response_df)
response_df = response_df[X_COLS].sort_index(axis=1)
model = pickle.load(open(model_path, "rb"))
predictions = model.predict_proba(response_df)
predicted_probability = np.round(predictions[:, 1][0], 3)
return predicted_probability
if __name__ == '__main__':
print("The response probability to test positive according to our model is:", get_prediction(json_path, model_path))
model = pickle.load(open('xgboost_primary_model.pkl', "rb"))
explainer = shap.TreeExplainer(model)
data = pd.read_csv('../creating_the_models/primary model.csv')
BASE_MODEL_X_COLS = ['gender', 'age_group']
X_COLS = BASE_MODEL_X_COLS + \
['symptom_well',
'symptom_sore_throat',
'symptom_cough',
'symptom_shortness_of_breath',
'symptom_smell_or_taste_loss',
'symptom_fever',
'condition_any']
X = data[X_COLS].sort_index(axis=1)
y = data['label'].values.ravel()
shap_values = explainer.shap_values(X)
shap.force_plot(explainer.expected_value, shap_values[0, :], X.iloc[0, :])
shap.summary_plot(shap_values, X)
|
[
"pandas.read_csv",
"shap.summary_plot",
"pandas.read_json",
"shap.initjs",
"shap.TreeExplainer",
"shap.force_plot",
"numpy.round"
] |
[((98, 111), 'shap.initjs', 'shap.initjs', ([], {}), '()\n', (109, 111), False, 'import shap\n'), ((802, 837), 'pandas.read_json', 'pd.read_json', (['json_path'], {'lines': '(True)'}), '(json_path, lines=True)\n', (814, 837), True, 'import pandas as pd\n'), ((1068, 1101), 'numpy.round', 'np.round', (['predictions[:, 1][0]', '(3)'], {}), '(predictions[:, 1][0], 3)\n', (1076, 1101), True, 'import numpy as np\n'), ((1366, 1391), 'shap.TreeExplainer', 'shap.TreeExplainer', (['model'], {}), '(model)\n', (1384, 1391), False, 'import shap\n'), ((1403, 1458), 'pandas.read_csv', 'pd.read_csv', (['"""../creating_the_models/primary model.csv"""'], {}), "('../creating_the_models/primary model.csv')\n", (1414, 1458), True, 'import pandas as pd\n'), ((1916, 1990), 'shap.force_plot', 'shap.force_plot', (['explainer.expected_value', 'shap_values[0, :]', 'X.iloc[0, :]'], {}), '(explainer.expected_value, shap_values[0, :], X.iloc[0, :])\n', (1931, 1990), False, 'import shap\n'), ((1995, 2028), 'shap.summary_plot', 'shap.summary_plot', (['shap_values', 'X'], {}), '(shap_values, X)\n', (2012, 2028), False, 'import shap\n')]
|
from cereal import car
from common.numpy_fast import mean, int_rnd
from opendbc.can.can_define import CANDefine
from selfdrive.car.interfaces import CarStateBase
from opendbc.can.parser import CANParser
from selfdrive.config import Conversions as CV
from selfdrive.car.ocelot.values import CAR, DBC, STEER_THRESHOLD, BUTTON_STATES
class CarState(CarStateBase):
def __init__(self, CP):
super().__init__(CP)
can_define = CANDefine(DBC[CP.carFingerprint]['chassis'])
self.shifter_values = can_define.dv["GEAR_PACKET"]['GEAR']
self.brakeUnavailable = True
self.enabled = False
self.oldEnabled = False
self.oldSpeedUp = False
self.oldSpeedDn = False
self.engineRPM = 0
self.setSpeed = 10
self.buttonStates = BUTTON_STATES.copy()
self.oldButtonStates = BUTTON_STATES.copy()
def update(self, cp, cp_body, enabled):
ret = car.CarState.new_message()
#Car specific information
if self.CP.carFingerprint == CAR.SMART_ROADSTER_COUPE:
ret.doorOpen = False #any([cp_body.vl["BODYCONTROL"]['RIGHT_DOOR'], cp_body.vl["BODYCONTROL"]['LEFT_DOOR']]) != 0
ret.seatbeltUnlatched = False
ret.leftBlinker = bool(cp_body.vl["BODYCONTROL"]['LEFT_SIGNAL'])
ret.rightBlinker = bool(cp_body.vl["BODYCONTROL"]['RIGHT_SIGNAL'])
ret.espDisabled = bool(cp_body.vl["ABS"]['ESP_STATUS'])
ret.wheelSpeeds.fl = cp_body.vl["SMARTROADSTERWHEELSPEEDS"]['WHEELSPEED_FL'] * CV.MPH_TO_MS
ret.wheelSpeeds.fr = cp_body.vl["SMARTROADSTERWHEELSPEEDS"]['WHEELSPEED_FR'] * CV.MPH_TO_MS
ret.wheelSpeeds.rl = cp_body.vl["SMARTROADSTERWHEELSPEEDS"]['WHEELSPEED_RL'] * CV.MPH_TO_MS
ret.wheelSpeeds.rr = cp_body.vl["SMARTROADSTERWHEELSPEEDS"]['WHEELSPEED_RR'] * CV.MPH_TO_MS
can_gear = int(cp_body.vl["GEAR_PACKET"]['GEAR'])
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(can_gear, None))
self.engineRPM = cp_body.vl["GEAR_PACKET"]["RPM"]
#iBooster data
ret.brakePressed = bool(cp.vl["BRAKE_STATUS"]['DRIVER_BRAKE_APPLIED'])
ret.brakeLights = bool(cp.vl["BRAKE_STATUS"]['BRAKE_APPLIED'])
self.brakeUnavailable = not bool(cp.vl["BRAKE_STATUS"]['BRAKE_OK'])
if self.CP.enableGasInterceptor:
ret.gas = (cp.vl["GAS_SENSOR"]['PED_GAS'] + cp.vl["GAS_SENSOR"]['PED_GAS2']) / 2.
ret.gasPressed = ret.gas > 15
#calculate speed from wheel speeds
ret.vEgoRaw = mean([ret.wheelSpeeds.fl, ret.wheelSpeeds.fr, ret.wheelSpeeds.rl, ret.wheelSpeeds.rr])
ret.vEgo, ret.aEgo = self.update_speed_kf(ret.vEgoRaw)
ret.standstill = ret.vEgoRaw < 0.001
#Toyota SAS (installed flipped)
ret.steeringAngleDeg = -(cp.vl["TOYOTA_STEERING_ANGLE_SENSOR1"]['TOYOTA_STEER_ANGLE'] + cp.vl["TOYOTA_STEERING_ANGLE_SENSOR1"]['TOYOTA_STEER_FRACTION'])
ret.steeringRateDeg = -cp.vl["TOYOTA_STEERING_ANGLE_SENSOR1"]['TOYOTA_STEER_RATE']
#Steering information from smart standin ECU
ret.steeringTorque = cp.vl["STEERING_STATUS"]['STEERING_TORQUE_DRIVER']
ret.steeringTorqueEps = cp.vl["STEERING_STATUS"]['STEERING_TORQUE_EPS']
ret.steeringPressed = abs(ret.steeringTorque) > STEER_THRESHOLD
ret.steerError = bool(cp.vl["STEERING_STATUS"]['STEERING_OK'] == 0)
ret.cruiseState.available = True
ret.cruiseState.standstill = False
ret.cruiseState.nonAdaptive = False
self.buttonStates["accelCruise"] = bool(cp.vl["HIM_CTRLS"]['SPEEDUP_BTN'])
self.buttonStates["decelCruise"] = bool(cp.vl["HIM_CTRLS"]['SPEEDDN_BTN'])
self.buttonStates["cancel"] = bool(cp.vl["HIM_CTRLS"]['CANCEL_BTN'])
self.buttonStates["setCruise"] = bool(cp.vl["HIM_CTRLS"]['SET_BTN'])
#if enabled:
#print(" OPENPILOT ENABLED")
if not enabled:
self.enabled = False
#print(" OPENPILOT OFF")
if bool(self.buttonStates["setCruise"]) and not self.oldEnabled:
print("attempt enable")
self.enabled = not self.enabled
if self.enabled:
self.setSpeed = (int_rnd((ret.vEgo * CV.MS_TO_MPH)/5) * 5)
if ret.standstill:
self.setSpeed = 10
if bool(self.buttonStates["accelCruise"]) and not self.oldSpeedUp:
print("speedup")
self.setSpeed = self.setSpeed + 5
if bool(self.buttonStates["decelCruise"]) and not self.oldSpeedDn:
print("speeddn")
self.setSpeed = self.setSpeed - 5
ret.cruiseState.speed = self.setSpeed * CV.MPH_TO_MS
ret.cruiseState.enabled = self.enabled
ret.stockAeb = False
ret.leftBlindspot = False
ret.rightBlindspot = False
self.oldEnabled = bool(self.buttonStates["setCruise"])
self.oldSpeedDn = bool(self.buttonStates["decelCruise"])
self.oldSpeedUp = bool(self.buttonStates["accelCruise"])
return ret
@staticmethod
def get_can_parser(CP):
signals = [
("TOYOTA_STEER_ANGLE", "TOYOTA_STEERING_ANGLE_SENSOR1", 0),
("BRAKE_APPLIED", "BRAKE_STATUS", 0),
("DRIVER_BRAKE_APPLIED", "BRAKE_STATUS", 0),
("BRAKE_OK", "BRAKE_STATUS", 0),
("BRAKE_PEDAL_POSITION", "BRAKE_STATUS", 0),
("TOYOTA_STEER_FRACTION", "TOYOTA_STEERING_ANGLE_SENSOR1", 0),
("TOYOTA_STEER_RATE", "TOYOTA_STEERING_ANGLE_SENSOR1", 0),
("SET_BTN", "HIM_CTRLS", 0),
("CANCEL_BTN", "HIM_CTRLS", 0),
("SPEEDUP_BTN", "HIM_CTRLS", 0),
("SPEEDDN_BTN", "HIM_CTRLS", 0),
("STEERING_TORQUE_DRIVER", "STEERING_STATUS", 0),
("STEERING_TORQUE_EPS", "STEERING_STATUS", 0),
("STEERING_OK", "STEERING_STATUS", 0),
("PED_GAS", "GAS_SENSOR", 0),
("PED_GAS2", "GAS_SENSOR", 0)
]
checks = [
("TOYOTA_STEERING_ANGLE_SENSOR1", 80),
("STEERING_STATUS", 80),
("HIM_CTRLS", 0),
("BRAKE_STATUS", 80),
("GAS_SENSOR", 40),
]
return CANParser(DBC[CP.carFingerprint]['pt'], signals, checks, 1)
@staticmethod
def get_body_can_parser(CP):
signals = [
]
checks = [
("BODYCONTROL", 10),
("ABS", 10),
("SMARTROADSTERWHEELSPEEDS", 10),
("GEAR_PACKET", 10),
]
if CP.carFingerprint == CAR.SMART_ROADSTER_COUPE:
signals.append(("RIGHT_DOOR", "BODYCONTROL",0))
signals.append(("LEFT_DOOR", "BODYCONTROL",0))
signals.append(("LEFT_SIGNAL", "BODYCONTROL",0))
signals.append(("RIGHT_SIGNAL", "BODYCONTROL",0))
signals.append(("ESP_STATUS", "ABS",0))
signals.append(("WHEELSPEED_FL", "SMARTROADSTERWHEELSPEEDS",0))
signals.append(("WHEELSPEED_FR", "SMARTROADSTERWHEELSPEEDS",0))
signals.append(("WHEELSPEED_RL", "SMARTROADSTERWHEELSPEEDS",0))
signals.append(("WHEELSPEED_RR", "SMARTROADSTERWHEELSPEEDS",0))
signals.append(("BRAKEPEDAL", "ABS",0))
signals.append(("GEAR","GEAR_PACKET", 0))
signals.append(("RPM","GEAR_PACKET",0))
return CANParser(DBC[CP.carFingerprint]['chassis'], signals, checks, 0)
|
[
"opendbc.can.can_define.CANDefine",
"common.numpy_fast.int_rnd",
"cereal.car.CarState.new_message",
"selfdrive.car.ocelot.values.BUTTON_STATES.copy",
"common.numpy_fast.mean",
"opendbc.can.parser.CANParser"
] |
[((430, 474), 'opendbc.can.can_define.CANDefine', 'CANDefine', (["DBC[CP.carFingerprint]['chassis']"], {}), "(DBC[CP.carFingerprint]['chassis'])\n", (439, 474), False, 'from opendbc.can.can_define import CANDefine\n'), ((750, 770), 'selfdrive.car.ocelot.values.BUTTON_STATES.copy', 'BUTTON_STATES.copy', ([], {}), '()\n', (768, 770), False, 'from selfdrive.car.ocelot.values import CAR, DBC, STEER_THRESHOLD, BUTTON_STATES\n'), ((798, 818), 'selfdrive.car.ocelot.values.BUTTON_STATES.copy', 'BUTTON_STATES.copy', ([], {}), '()\n', (816, 818), False, 'from selfdrive.car.ocelot.values import CAR, DBC, STEER_THRESHOLD, BUTTON_STATES\n'), ((872, 898), 'cereal.car.CarState.new_message', 'car.CarState.new_message', ([], {}), '()\n', (896, 898), False, 'from cereal import car\n'), ((2422, 2513), 'common.numpy_fast.mean', 'mean', (['[ret.wheelSpeeds.fl, ret.wheelSpeeds.fr, ret.wheelSpeeds.rl, ret.wheelSpeeds.rr\n ]'], {}), '([ret.wheelSpeeds.fl, ret.wheelSpeeds.fr, ret.wheelSpeeds.rl, ret.\n wheelSpeeds.rr])\n', (2426, 2513), False, 'from common.numpy_fast import mean, int_rnd\n'), ((5747, 5806), 'opendbc.can.parser.CANParser', 'CANParser', (["DBC[CP.carFingerprint]['pt']", 'signals', 'checks', '(1)'], {}), "(DBC[CP.carFingerprint]['pt'], signals, checks, 1)\n", (5756, 5806), False, 'from opendbc.can.parser import CANParser\n'), ((6788, 6852), 'opendbc.can.parser.CANParser', 'CANParser', (["DBC[CP.carFingerprint]['chassis']", 'signals', 'checks', '(0)'], {}), "(DBC[CP.carFingerprint]['chassis'], signals, checks, 0)\n", (6797, 6852), False, 'from opendbc.can.parser import CANParser\n'), ((3973, 4009), 'common.numpy_fast.int_rnd', 'int_rnd', (['(ret.vEgo * CV.MS_TO_MPH / 5)'], {}), '(ret.vEgo * CV.MS_TO_MPH / 5)\n', (3980, 4009), False, 'from common.numpy_fast import mean, int_rnd\n')]
|
"""Data parsing tools"""
import json
import pandas as pd
## Data specific headers ##
# Global Positioning System Fix Data
# http://aprs.gids.nl/nmea/#gga
GGA_columns = ["nmea_type", "UTC_time", "latitude", "NS", "longitude", "EW", "quality", "n_satellites",
"horizontal_dilution", "altitude", "M", "geoidal_separation", "M",
"age_sec_diff", "diff_id", "checksum"]
# GPS DOP and active satellites
# http://aprs.gids.nl/nmea/#gsa
GSA_columns = (["nmea_type", "mode", "mode_fix"] +
["sv" + str(n) for n in list(range(12))] +
["pdop", "hdop", "vdop", "checksum"])
# GPS DOP and active satellites
# http://aprs.gids.nl/nmea/#gsv
GSV_columns = (["nmea_type", "total_messages", "message_number"] +
["total_sv_in_view", "sv_prn_number", "elev_degree", "azimuth", "snr"] * 4)
# Recommended minimum specific GPS/Transit data
# http://aprs.gids.nl/nmea/#rmc
RMC_columns = ["nmea_type", "UTC_time", "valid", "latitude", "NS", "longitude", "EW", "speed_knots",
"true_course", "date", "variation", "variation_EW", "checksum"]
# Track made good and ground speed
# http://aprs.gids.nl/nmea/#vtg
VTG_columns = ["nmea_type", "track_made_good", "T", "NA", "NA", "speed_knots", "N",
"speed_km_hr", "K"]
def pad_header(column_list, target_len):
unnamed_columns = list(range(len(column_list), target_len))
unnamed_columns = ["c" + str(c) for c in unnamed_columns]
return column_list[:target_len] + unnamed_columns
def csv_resource(fp):
"""Parse a .csv file generated with Meerkat
Parameters
----------
fp : filepath to saved data
Returns
-------
meta : dict, metadata describing data
df : Pandas DataFrame, data recorded from device(s) described in meta
"""
with open(fp, 'r') as f:
sbang = f.readline()
meta = json.loads(sbang[2:])
df = pd.read_csv(fp,
delimiter=meta['delimiter'],
comment=meta['comment'],
quotechar='"')
df['datetime64_ns'] = pd.to_datetime(df[meta['time_format']])
return meta, df
|
[
"pandas.read_csv",
"pandas.to_datetime",
"json.loads"
] |
[((1880, 1901), 'json.loads', 'json.loads', (['sbang[2:]'], {}), '(sbang[2:])\n', (1890, 1901), False, 'import json\n'), ((1911, 1999), 'pandas.read_csv', 'pd.read_csv', (['fp'], {'delimiter': "meta['delimiter']", 'comment': "meta['comment']", 'quotechar': '"""\\""""'}), '(fp, delimiter=meta[\'delimiter\'], comment=meta[\'comment\'],\n quotechar=\'"\')\n', (1922, 1999), True, 'import pandas as pd\n'), ((2085, 2124), 'pandas.to_datetime', 'pd.to_datetime', (["df[meta['time_format']]"], {}), "(df[meta['time_format']])\n", (2099, 2124), True, 'import pandas as pd\n')]
|
import sys
import os
print(os.getcwd())
sys.path.append("../")
from clustercode.ClusterEnsemble import ClusterEnsemble
from clustercode.clustering import cluster_analysis
# tpr = "/home/trl11/Virtual_Share/gromacs_test/npt.tpr"
# traj = "/home/trl11/Virtual_Share/gromacs_test/npt.xtc"
traj = "clustercode/tests/cluster/files/traj_small.xtc"
tpr = "clustercode/tests/cluster/files/topol_small.tpr"
# traj = "/home/mk8118/OneDrive/2019/simulations/gromacs/SDS/\
# check_ensembles/NVT/PME_revised/nh_10/base/nvt.trr"
ClstrEns = ClusterEnsemble(tpr, traj, ["CE", "CM"])
ClstrEns.cluster_analysis(algorithm="static")
clstr_ens_static = ClstrEns.cluster_list
ClstrEns.cluster_analysis(algorithm="dynamic", work_in="Atom")
clstr_ens_dynamic = ClstrEns.cluster_list
# exit()
for idx_time, (static_clus_list, dynamic_clus_list) in enumerate(
zip(clstr_ens_static, clstr_ens_dynamic)
):
diff_clust_count = len(static_clus_list) - len(dynamic_clus_list)
print("_________________________________________________________")
print("Frame: {:d}".format(idx_time))
print(
"Difference in cluster counts (static - dynamic): {:d}".format(diff_clust_count)
)
static_molec_count = 0
for cluster in static_clus_list:
static_molec_count += len(cluster)
print("Statis molec count: {:d}".format(static_molec_count))
dynamic_molec_count = 0
for cluster in dynamic_clus_list:
dynamic_molec_count += cluster.n_residues
print("Dynamic molec count: {:d}".format(dynamic_molec_count))
print(
"Difference in molecule counts (static - dynamic): {:d}".format(
static_molec_count - dynamic_molec_count
)
)
new_s_set = static_clus_list[0]
for cluster in static_clus_list:
new_s_set = new_s_set.union(cluster)
new_d_set = dynamic_clus_list[0]
for cluster in dynamic_clus_list:
new_d_set = new_d_set.union(cluster)
print(
"Static molec double counted: {:d}".format(static_molec_count - len(new_d_set))
)
print(
"Dynamic molec double counted: {:d}".format(
dynamic_molec_count - new_d_set.n_residues
)
)
for idxi, clusteri in enumerate(dynamic_clus_list):
for idxj, clusterj in enumerate(dynamic_clus_list):
if clusteri.issuperset(clusterj) and idxi != idxj:
print(idxi, idxj)
print(clusteri)
print(clusteri.n_residues, clusterj.n_residues)
|
[
"sys.path.append",
"clustercode.ClusterEnsemble.ClusterEnsemble",
"os.getcwd"
] |
[((41, 63), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (56, 63), False, 'import sys\n'), ((531, 571), 'clustercode.ClusterEnsemble.ClusterEnsemble', 'ClusterEnsemble', (['tpr', 'traj', "['CE', 'CM']"], {}), "(tpr, traj, ['CE', 'CM'])\n", (546, 571), False, 'from clustercode.ClusterEnsemble import ClusterEnsemble\n'), ((28, 39), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (37, 39), False, 'import os\n')]
|
import os
import re
import cv2
import numpy as np
def gen_img_label_list(csv_list):
csv_f = open(csv_list, 'r')
lines = csv_f.readlines()
cnt_img = ''
cnt_label = ''
for i in lines:
img = i.strip().split(' ')[0]
score = float(i.strip().split(' ')[1])
b = [int(float(j)) for j in i.strip().split(' ')[2:]]
if score > 0.5:
continue
# if img != 'EBA961BC' or img != 'AD393334':
# continue
print(img)
img_ = cv2.imread('../../DataFountain/GLODON_objDet/test_dataset/' + img + '.jpg')
print(score)
h, w, _ = img_.shape
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.rectangle(img_, (int(b[0]), int(b[1])), (int(b[2]), int(b[3])), (0,255,255), 3)
img_ = cv2.resize(img_, (int(w/2), int(h/2)))
cv2.imshow('img', img_)
cv2.waitKey(0)
if __name__ == '__main__':
csv_list = '../../results/comp4_det_test_rebar.txt'
gen_img_label_list(csv_list)
|
[
"cv2.waitKey",
"cv2.imread",
"cv2.imshow"
] |
[((507, 582), 'cv2.imread', 'cv2.imread', (["('../../DataFountain/GLODON_objDet/test_dataset/' + img + '.jpg')"], {}), "('../../DataFountain/GLODON_objDet/test_dataset/' + img + '.jpg')\n", (517, 582), False, 'import cv2\n'), ((827, 850), 'cv2.imshow', 'cv2.imshow', (['"""img"""', 'img_'], {}), "('img', img_)\n", (837, 850), False, 'import cv2\n'), ((859, 873), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (870, 873), False, 'import cv2\n')]
|
from IMLearn.learners import UnivariateGaussian, MultivariateGaussian
import numpy as np
import plotly.graph_objects as go
import plotly.io as pio
from matplotlib import pyplot as plt
pio.templates.default = "simple_white"
SAMPLES = 1000
QUESTION_ONE_MEAN = 10
QUESTION_ONE_VAR = 1
QUESTION_ONE_SAMPLES_SKIP = 10
QUESTION_TWO_RESOLUTION = 200
QUESTION_TWO_GRID_SIZE = 10
def test_univariate_gaussian():
# Question 1 - Draw samples and print fitted model
samples = np.random.normal(QUESTION_ONE_MEAN, QUESTION_ONE_VAR, size=SAMPLES)
univariate_gaussian = UnivariateGaussian()
univariate_gaussian.fit(samples)
print(f"({univariate_gaussian.mu_}, {univariate_gaussian.var_})")
# Question 2 - Empirically showing sample mean is consistent
x = np.arange(QUESTION_ONE_MEAN, SAMPLES + 1, QUESTION_ONE_SAMPLES_SKIP)
estimate_mean_dis = np.vectorize(lambda last_index: np.abs(np.mean(samples[:last_index]) - QUESTION_ONE_MEAN))
fig = go.Figure(
[go.Scatter(x=x, y=estimate_mean_dis(x), mode='markers', name=r'$\left|\hat{\mu}(m)-10\right|$',
showlegend=True)], layout=go.Layout(
title={
"text": r"$\text{Distance Between The Estimated-And True Value Of The Expectations}\\"
r"\text{As Function Of Number Of Samples}$",
'y': 0.84, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'},
xaxis_title=r"$\text{Number of samples} [m]$", yaxis_title=r"$\left|\hat{\mu}(m)-10\right|$", height=400))
fig.show()
# fig.write_image("estimate_distance.svg")
# Question 3 - Plotting Empirical PDF of fitted model
fig = go.Figure(
[go.Scatter(x=samples, y=univariate_gaussian.pdf(samples), mode='markers',
showlegend=False, marker=dict(size=2))], layout=go.Layout(
title={
"text": r"$\text{Probability Density As Function Of Samples Values}$",
'y': 0.84, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'},
xaxis_title=r"$\text{Sample value}$", yaxis_title=r"$\text{Probability density}$", height=400))
fig.show()
# fig.write_image("pdf_q1.svg")
def test_multivariate_gaussian():
# Question 4 - Draw samples and print fitted model
mu = np.array([0, 0, 4, 0])
sigma = np.array([[1, 0.2, 0, 0.5],
[0.2, 2, 0, 0],
[0, 0, 1, 0],
[0.5, 0, 0, 1]])
samples = np.random.multivariate_normal(mu, sigma, SAMPLES)
multivariate_gaussian = MultivariateGaussian()
multivariate_gaussian.fit(samples)
print(multivariate_gaussian.mu_)
print(multivariate_gaussian.cov_)
# Question 5 - Likelihood evaluation
f1 = np.linspace(-QUESTION_TWO_GRID_SIZE, QUESTION_TWO_GRID_SIZE, QUESTION_TWO_RESOLUTION)
grid_tuples = np.transpose(np.array([np.repeat(f1, len(f1)), np.tile(f1, len(f1))]))
calc_log_likelihood = lambda x1, x3: multivariate_gaussian.log_likelihood(np.array([x1, 0, x3, 0]), sigma, samples)
Z = np.vectorize(calc_log_likelihood)(grid_tuples[:, 0], grid_tuples[:, 1]).reshape(QUESTION_TWO_RESOLUTION,
QUESTION_TWO_RESOLUTION)
fig, ax = plt.subplots()
heat_map = ax.pcolormesh(f1, f1, Z)
fig.colorbar(heat_map, format='%.e')
ax.set_title("log-likelihood for " + r"$\mu=\left[f_{1},0,f_{3},0\right]{}^{T}$")
ax.set_xlabel("$f_{3}$")
ax.set_ylabel("$f_{1}$")
plt.show()
# Question 6 - Maximum likelihood
max_coordinates = np.where(Z == np.amax(Z))
print(f"({round(f1[max_coordinates[0]][0], 3)}, {round(f1[max_coordinates[1]][0], 3)})")
if __name__ == '__main__':
np.random.seed(0)
test_univariate_gaussian()
test_multivariate_gaussian()
|
[
"IMLearn.learners.UnivariateGaussian",
"matplotlib.pyplot.show",
"numpy.random.seed",
"numpy.vectorize",
"IMLearn.learners.MultivariateGaussian",
"numpy.amax",
"numpy.mean",
"numpy.random.multivariate_normal",
"numpy.arange",
"numpy.array",
"numpy.random.normal",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"plotly.graph_objects.Layout"
] |
[((476, 543), 'numpy.random.normal', 'np.random.normal', (['QUESTION_ONE_MEAN', 'QUESTION_ONE_VAR'], {'size': 'SAMPLES'}), '(QUESTION_ONE_MEAN, QUESTION_ONE_VAR, size=SAMPLES)\n', (492, 543), True, 'import numpy as np\n'), ((570, 590), 'IMLearn.learners.UnivariateGaussian', 'UnivariateGaussian', ([], {}), '()\n', (588, 590), False, 'from IMLearn.learners import UnivariateGaussian, MultivariateGaussian\n'), ((772, 840), 'numpy.arange', 'np.arange', (['QUESTION_ONE_MEAN', '(SAMPLES + 1)', 'QUESTION_ONE_SAMPLES_SKIP'], {}), '(QUESTION_ONE_MEAN, SAMPLES + 1, QUESTION_ONE_SAMPLES_SKIP)\n', (781, 840), True, 'import numpy as np\n'), ((2274, 2296), 'numpy.array', 'np.array', (['[0, 0, 4, 0]'], {}), '([0, 0, 4, 0])\n', (2282, 2296), True, 'import numpy as np\n'), ((2309, 2383), 'numpy.array', 'np.array', (['[[1, 0.2, 0, 0.5], [0.2, 2, 0, 0], [0, 0, 1, 0], [0.5, 0, 0, 1]]'], {}), '([[1, 0.2, 0, 0.5], [0.2, 2, 0, 0], [0, 0, 1, 0], [0.5, 0, 0, 1]])\n', (2317, 2383), True, 'import numpy as np\n'), ((2464, 2513), 'numpy.random.multivariate_normal', 'np.random.multivariate_normal', (['mu', 'sigma', 'SAMPLES'], {}), '(mu, sigma, SAMPLES)\n', (2493, 2513), True, 'import numpy as np\n'), ((2542, 2564), 'IMLearn.learners.MultivariateGaussian', 'MultivariateGaussian', ([], {}), '()\n', (2562, 2564), False, 'from IMLearn.learners import UnivariateGaussian, MultivariateGaussian\n'), ((2730, 2819), 'numpy.linspace', 'np.linspace', (['(-QUESTION_TWO_GRID_SIZE)', 'QUESTION_TWO_GRID_SIZE', 'QUESTION_TWO_RESOLUTION'], {}), '(-QUESTION_TWO_GRID_SIZE, QUESTION_TWO_GRID_SIZE,\n QUESTION_TWO_RESOLUTION)\n', (2741, 2819), True, 'import numpy as np\n'), ((3265, 3279), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3277, 3279), True, 'from matplotlib import pyplot as plt\n'), ((3509, 3519), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3517, 3519), True, 'from matplotlib import pyplot as plt\n'), ((3734, 3751), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (3748, 3751), True, 'import numpy as np\n'), ((1128, 1463), 'plotly.graph_objects.Layout', 'go.Layout', ([], {'title': "{'text':\n '$\\\\text{Distance Between The Estimated-And True Value Of The Expectations}\\\\\\\\\\\\text{As Function Of Number Of Samples}$'\n , 'y': 0.84, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'}", 'xaxis_title': '"""$\\\\text{Number of samples} [m]$"""', 'yaxis_title': '"""$\\\\left|\\\\hat{\\\\mu}(m)-10\\\\right|$"""', 'height': '(400)'}), "(title={'text':\n '$\\\\text{Distance Between The Estimated-And True Value Of The Expectations}\\\\\\\\\\\\text{As Function Of Number Of Samples}$'\n , 'y': 0.84, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'},\n xaxis_title='$\\\\text{Number of samples} [m]$', yaxis_title=\n '$\\\\left|\\\\hat{\\\\mu}(m)-10\\\\right|$', height=400)\n", (1137, 1463), True, 'import plotly.graph_objects as go\n'), ((1820, 2081), 'plotly.graph_objects.Layout', 'go.Layout', ([], {'title': "{'text': '$\\\\text{Probability Density As Function Of Samples Values}$', 'y':\n 0.84, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'}", 'xaxis_title': '"""$\\\\text{Sample value}$"""', 'yaxis_title': '"""$\\\\text{Probability density}$"""', 'height': '(400)'}), "(title={'text':\n '$\\\\text{Probability Density As Function Of Samples Values}$', 'y': \n 0.84, 'x': 0.5, 'xanchor': 'center', 'yanchor': 'top'}, xaxis_title=\n '$\\\\text{Sample value}$', yaxis_title='$\\\\text{Probability density}$',\n height=400)\n", (1829, 2081), True, 'import plotly.graph_objects as go\n'), ((2983, 3007), 'numpy.array', 'np.array', (['[x1, 0, x3, 0]'], {}), '([x1, 0, x3, 0])\n', (2991, 3007), True, 'import numpy as np\n'), ((3596, 3606), 'numpy.amax', 'np.amax', (['Z'], {}), '(Z)\n', (3603, 3606), True, 'import numpy as np\n'), ((3033, 3066), 'numpy.vectorize', 'np.vectorize', (['calc_log_likelihood'], {}), '(calc_log_likelihood)\n', (3045, 3066), True, 'import numpy as np\n'), ((904, 933), 'numpy.mean', 'np.mean', (['samples[:last_index]'], {}), '(samples[:last_index])\n', (911, 933), True, 'import numpy as np\n')]
|
# Importing the libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# Importing our cancer dataset
dataset = pd.read_csv('breast_cancer_dataset.csv')
X = dataset.iloc[:, 1:9].values
Y = dataset.iloc[:, 9].values
# Encoding categorical data values
from sklearn.preprocessing import LabelEncoder
labelencoder_Y = LabelEncoder()
Y = labelencoder_Y.fit_transform(Y)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
sc.fit(X)
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.25, random_state = 0)
# Fitting Simple Logistic Regression to the Training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression( solver='lbfgs', max_iter=500)
classifier.fit(X_train, Y_train)
Y_pred = classifier.predict(X_test)
from sklearn.metrics import confusion_matrix, accuracy_score
cm_SVM = confusion_matrix(Y_test, Y_pred)
print(cm_SVM)
print("Accuracy score of train Classifier")
print(accuracy_score(Y_train, classifier.predict(X_train))*100)
print("Accuracy score of test Classifier")
print(accuracy_score(Y_test, Y_pred)*100)
|
[
"sklearn.preprocessing.StandardScaler",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.accuracy_score",
"sklearn.preprocessing.LabelEncoder",
"sklearn.linear_model.LogisticRegression",
"sklearn.metrics.confusion_matrix"
] |
[((145, 185), 'pandas.read_csv', 'pd.read_csv', (['"""breast_cancer_dataset.csv"""'], {}), "('breast_cancer_dataset.csv')\n", (156, 185), True, 'import pandas as pd\n'), ((354, 368), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (366, 368), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((483, 499), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (497, 499), False, 'from sklearn.preprocessing import StandardScaler\n'), ((663, 717), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'Y'], {'test_size': '(0.25)', 'random_state': '(0)'}), '(X, Y, test_size=0.25, random_state=0)\n', (679, 717), False, 'from sklearn.model_selection import train_test_split\n'), ((849, 897), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'solver': '"""lbfgs"""', 'max_iter': '(500)'}), "(solver='lbfgs', max_iter=500)\n", (867, 897), False, 'from sklearn.linear_model import LogisticRegression\n'), ((1044, 1076), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['Y_test', 'Y_pred'], {}), '(Y_test, Y_pred)\n', (1060, 1076), False, 'from sklearn.metrics import confusion_matrix, accuracy_score\n'), ((1253, 1283), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['Y_test', 'Y_pred'], {}), '(Y_test, Y_pred)\n', (1267, 1283), False, 'from sklearn.metrics import confusion_matrix, accuracy_score\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: <NAME>
# Contact: <EMAIL>
# Date: 18/12/2018
# This code preprocessed the Facebook wall post and the Webspam datasets in order to produce edgelists
# which can be then used to replicate the paper experiments using EvalNE.
from __future__ import division
import os
from sys import argv
import networkx as nx
from evalne.utils import preprocess as pp
def main():
# Check cmd args
if len(argv) != 3:
print("ERROR: wrong number of parameters")
print("Usage: prep_data_prune.py <facebook_path> <webspam_path>")
exit(-1)
# Extract the dataset names and paths
fb_path, fb_name = os.path.split(argv[1])
ws_path, ws_name = os.path.split(argv[2])
# Preprocess FB graph
G1 = prep_fb(argv[1])
# Store FB graph to a file
pp.save_graph(G1, output_path=fb_path + "/prep_graph_slfloops.edgelist", delimiter=',', write_stats=True)
# Preprocess WS graph
G2 = prep_ws(argv[2])
# Store preprocessed graph to a file
pp.save_graph(G2, output_path=ws_path + "/prep_graph_slfloops.edgelist", delimiter=',', write_stats=True)
print("Preprocessing finished.")
def prep_fb(inpath):
"""
Preprocess facebook wall post graph.
"""
# Load a graph
G = pp.load_graph(inpath, delimiter='\t', comments='#', directed=True)
# The FB graph is stores as destination, origin so needs to be reversed
G = G.reverse()
# Preprocess the graph
G, ids = pp.prep_graph(G, relabel=True, del_self_loops=False)
# Return the preprocessed graph
return G
def prep_ws(inpath):
"""
Preprocess web spam graph.
"""
# Create an empty digraph
G = nx.DiGraph()
# Read the file and create the graph
src = 0
f = open(inpath, 'r')
for line in f:
if src != 0:
arr = line.split()
for dst in arr:
dst_id = int(dst.split(':')[0])
# We consider the graph unweighted
G.add_edge(src, dst_id)
src += 1
# G.add_node(src-2)
# Preprocess the graph
G, ids = pp.prep_graph(G, relabel=True, del_self_loops=False)
# Return the preprocessed graph
return G
if __name__ == "__main__":
main()
|
[
"evalne.utils.preprocess.prep_graph",
"evalne.utils.preprocess.load_graph",
"evalne.utils.preprocess.save_graph",
"networkx.DiGraph",
"os.path.split"
] |
[((676, 698), 'os.path.split', 'os.path.split', (['argv[1]'], {}), '(argv[1])\n', (689, 698), False, 'import os\n'), ((722, 744), 'os.path.split', 'os.path.split', (['argv[2]'], {}), '(argv[2])\n', (735, 744), False, 'import os\n'), ((834, 943), 'evalne.utils.preprocess.save_graph', 'pp.save_graph', (['G1'], {'output_path': "(fb_path + '/prep_graph_slfloops.edgelist')", 'delimiter': '""","""', 'write_stats': '(True)'}), "(G1, output_path=fb_path + '/prep_graph_slfloops.edgelist',\n delimiter=',', write_stats=True)\n", (847, 943), True, 'from evalne.utils import preprocess as pp\n'), ((1039, 1148), 'evalne.utils.preprocess.save_graph', 'pp.save_graph', (['G2'], {'output_path': "(ws_path + '/prep_graph_slfloops.edgelist')", 'delimiter': '""","""', 'write_stats': '(True)'}), "(G2, output_path=ws_path + '/prep_graph_slfloops.edgelist',\n delimiter=',', write_stats=True)\n", (1052, 1148), True, 'from evalne.utils import preprocess as pp\n'), ((1290, 1356), 'evalne.utils.preprocess.load_graph', 'pp.load_graph', (['inpath'], {'delimiter': '"""\t"""', 'comments': '"""#"""', 'directed': '(True)'}), "(inpath, delimiter='\\t', comments='#', directed=True)\n", (1303, 1356), True, 'from evalne.utils import preprocess as pp\n'), ((1495, 1547), 'evalne.utils.preprocess.prep_graph', 'pp.prep_graph', (['G'], {'relabel': '(True)', 'del_self_loops': '(False)'}), '(G, relabel=True, del_self_loops=False)\n', (1508, 1547), True, 'from evalne.utils import preprocess as pp\n'), ((1706, 1718), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (1716, 1718), True, 'import networkx as nx\n'), ((2119, 2171), 'evalne.utils.preprocess.prep_graph', 'pp.prep_graph', (['G'], {'relabel': '(True)', 'del_self_loops': '(False)'}), '(G, relabel=True, del_self_loops=False)\n', (2132, 2171), True, 'from evalne.utils import preprocess as pp\n')]
|
from __future__ import absolute_import
from functools import wraps
from Queue import Queue
from celery.utils import cached_property
def coroutine(fun):
"""Decorator that turns a generator into a coroutine that is
started automatically, and that can send values back to the caller.
**Example coroutine that returns values to caller**::
@coroutine
def adder(self):
while 1:
x, y = (yield)
self.give(x + y)
>>> c = adder()
# call sends value and returns the result.
>>> c.call(4, 4)
8
# or you can send the value and get the result later.
>>> c.send(4, 4)
>>> c.get()
8
**Example sink (input-only coroutine)**::
@coroutine
def uniq():
seen = set()
while 1:
line = (yield)
if line not in seen:
seen.add(line)
print(line)
>>> u = uniq()
>>> [u.send(l) for l in [1, 2, 2, 3]]
[1, 2, 3]
**Example chaining coroutines**::
@coroutine
def uniq(callback):
seen = set()
while 1:
line = (yield)
if line not in seen:
callback.send(line)
seen.add(line)
@coroutine
def uppercaser(callback):
while 1:
line = (yield)
callback.send(str(line).upper())
@coroutine
def printer():
while 1:
line = (yield)
print(line)
>>> pipe = uniq(uppercaser(printer()))
>>> for line in file("AUTHORS").readlines():
pipe.send(line)
"""
@wraps(fun)
def start(*args, **kwargs):
return Coroutine.start_from(fun, *args, **kwargs)
return start
class Coroutine(object):
_gen = None
started = False
def bind(self, generator):
self._gen = generator
def _next(self):
return self._gen.next()
next = __next__ = _next
def start(self):
if self.started:
raise ValueError("coroutine already started")
self.next()
self.started = True
return self
def send1(self, value):
return self._gen.send(value)
def call1(self, value, timeout=None):
self.send1(value)
return self.get(timeout=timeout)
def send(self, *args):
return self._gen.send(args)
def call(self, *args, **opts):
self.send(*args)
return self.get(**opts)
@classmethod
def start_from(cls, fun, *args, **kwargs):
coro = cls()
coro.bind(fun(coro, *args, **kwargs))
return coro.start()
@cached_property
def __output__(self):
return Queue()
@property
def give(self):
return self.__output__.put_nowait
@property
def get(self):
return self.__output__.get
if __name__ == "__main__":
@coroutine
def adder(self):
while 1:
x, y = (yield)
self.give(x + y)
x = adder()
for i in xrange(10):
print(x.call(i, i))
|
[
"Queue.Queue",
"functools.wraps"
] |
[((1762, 1772), 'functools.wraps', 'wraps', (['fun'], {}), '(fun)\n', (1767, 1772), False, 'from functools import wraps\n'), ((2816, 2823), 'Queue.Queue', 'Queue', ([], {}), '()\n', (2821, 2823), False, 'from Queue import Queue\n')]
|
import numpy as np
from lmfit.model import Model
class PDFdecayModel(Model):
r"""A model to describe the product of a decaying exponential and a Gaussian
with three parameters: ``amplitude``, ``xi``, and ``sigma``
.. math::
f(x; A, \xi, \sigma) = A e^{[-{|x|}/\xi]} e^{[{-{x^2}/{{2\sigma}^2}}]}
where the parameter ``amplitude`` corresponds to :math:`A`, ``xi`` to
:math:`\xi`, and ``sigma`` to :math:`\sigma`.
"""
def __init__(self, **kwargs):
def pdfdecay(x, amplitude=1.0, xi=1.0, sigma=1.0):
return amplitude * np.exp(-abs(x)/xi) * np.exp(-x**2/(2*sigma**2))
super().__init__(pdfdecay, **kwargs)
def guess(self, data, x=None, negative=False, **kwargs):
"""Estimate initial model parameter values from data."""
sigma = np.sqrt(np.fabs((x**2*data).sum() / data.sum()))
return self.make_params(amplitude=data.max(), xi=sigma, sigma=sigma)
|
[
"numpy.exp"
] |
[((601, 635), 'numpy.exp', 'np.exp', (['(-x ** 2 / (2 * sigma ** 2))'], {}), '(-x ** 2 / (2 * sigma ** 2))\n', (607, 635), True, 'import numpy as np\n')]
|
from flask import Blueprint
sendmail = Blueprint('sendmail', __name__, template_folder='templates/sendmail')
from . import views
|
[
"flask.Blueprint"
] |
[((40, 109), 'flask.Blueprint', 'Blueprint', (['"""sendmail"""', '__name__'], {'template_folder': '"""templates/sendmail"""'}), "('sendmail', __name__, template_folder='templates/sendmail')\n", (49, 109), False, 'from flask import Blueprint\n')]
|
# -*- coding: utf-8 -*-
import sys
#reload(sys)
#sys.setdefaultencoding('utf8')
#1.将问题ID和TOPIC对应关系保持到字典里:process question_topic_train_set.txt
#from:question_id,topics(topic_id1,topic_id2,topic_id3,topic_id4,topic_id5)
# to:(question_id,topic_id1)
# (question_id,topic_id2)
#read question_topic_train_set.txt
import codecs
#1.################################################################################################################
print("process question_topic_train_set.txt,started...")
q_t='question_topic_train_set.txt'
q_t_file = codecs.open(q_t, 'r', 'utf8')
lines=q_t_file.readlines()
question_topic_dict={}
for i,line in enumerate(lines):
if i%300000==0:
print(i)
#print(line)
question_id,topic_list_string=line.split('\t')
#print(question_id)
#print(topic_list_string)
topic_list=topic_list_string.replace("\n","").split(",")
question_topic_dict[question_id]=topic_list
#for ii,topic in enumerate(topic_list):
# print(ii,topic)
#print("=====================================")
#if i>10:
# print(question_topic_dict)
# break
print("process question_topic_train_set.txt,ended...")
###################################################################################################################
###################################################################################################################
#2.处理问题--得到问题ID:问题的表示,存成字典。proces question. for every question form a a list of string to reprensent it.
import codecs
print("process question started11...")
q='question_train_set.txt'
q_file = codecs.open(q, 'r', 'utf8')
q_lines=q_file.readlines()
questionid_words_representation={}
question_representation=[]
length_desc=30
for i,line in enumerate(q_lines):
#print("line:")
#print(line)
element_lists=line.split('\t') #['c324,c39','w305...','c']
question_id=element_lists[0]
#print("question_id:",element_lists[0])
#for i,q_e in enumerate(element_lists):
# print("e:",q_e)
#question_representation=[x for x in element_lists[2].split(",")] #+ #TODO this is only for title's word. no more.
title_words=[x for x in element_lists[2].strip().split(",")][-length_desc:]
#print("title_words:",title_words)
title_c=[x for x in element_lists[1].strip().split(",")][-length_desc:]
#print("title_c:", title_c)
desc_words=[x for x in element_lists[4].strip().split(",")][-length_desc:]
#print("desc_words:", desc_words)
desc_c=[x for x in element_lists[3].strip().split(",")][-length_desc:]
#print("desc_c:", desc_c)
question_representation =title_words+ title_c+desc_words+ desc_c
question_representation=" ".join(question_representation)
#print("question_representation:",question_representation)
#print("question_representation:",question_representation)
questionid_words_representation[question_id]=question_representation
q_file.close()
print("proces question ended2...")
#####################################################################################################################
###################################################################################################################
# 3.获得模型需要的训练数据。以{问题的表示:TOPIC_ID}的形式的列表
# save training data,testing data: question __label__topic_id
import codecs
import random
print("saving traininig data.started1...")
count = 0
train_zhihu = 'train-zhihu6-title-desc.txt'
test_zhihu = 'test-zhihu6-title-desc.txt'
valid_zhihu = 'valid-zhihu6-title-desc.txt'
data_list = []
multi_label_flag=True
def split_list(listt):
random.shuffle(listt)
list_len = len(listt)
train_len = 0.95
valid_len = 0.025
train = listt[0:int(list_len * train_len)]
valid = listt[int(list_len * train_len):int(list_len * (train_len + valid_len))]
test = listt[int(list_len * (train_len + valid_len)):]
return train, valid, test
for question_id, question_representation in questionid_words_representation.items():
# print("===================>")
# print('question_id',question_id)
# print("question_representation:",question_representation)
# get label_id for this question_id by using:question_topic_dict
topic_list = question_topic_dict[question_id]
# print("topic_list:",topic_list)
# if count>5:
# ii=0
# ii/0
if not multi_label_flag:
for topic_id in topic_list:
data_list.append((question_representation, topic_id)) #single-label
else:
data_list.append((question_representation, topic_list)) #multi-label
count = count + 1
# random shuffle list
random.shuffle(data_list)
def write_data_to_file_system(file_name, data):
file = codecs.open(file_name, 'a', 'utf8')
for d in data:
# print(d)
question_representation, topic_id = d
question_representation_ = " ".join(question_representation)
file.write(question_representation_ + " __label__" + str(topic_id) + "\n")
file.close()
def write_data_to_file_system_multilabel(file_name, data):
file = codecs.open(file_name, 'a', 'utf8')
for d in data:
question_representation, topic_id_list = d
topic_id_list_=" ".join(topic_id_list)
file.write(question_representation + " __label__" + str(topic_id_list_) + "\n")
file.close()
train_data, valid_data, test_data = split_list(data_list)
if not multi_label_flag:#single label
write_data_to_file_system(train_zhihu, train_data)
write_data_to_file_system(valid_zhihu, valid_data)
write_data_to_file_system(test_zhihu, test_data)
else:#multi-label
write_data_to_file_system_multilabel(train_zhihu, train_data)
write_data_to_file_system_multilabel(valid_zhihu, valid_data)
write_data_to_file_system_multilabel(test_zhihu, test_data)
print("saving traininig data.ended...")
######################################################################################################################
|
[
"random.shuffle",
"codecs.open"
] |
[((546, 575), 'codecs.open', 'codecs.open', (['q_t', '"""r"""', '"""utf8"""'], {}), "(q_t, 'r', 'utf8')\n", (557, 575), False, 'import codecs\n'), ((1591, 1618), 'codecs.open', 'codecs.open', (['q', '"""r"""', '"""utf8"""'], {}), "(q, 'r', 'utf8')\n", (1602, 1618), False, 'import codecs\n'), ((4582, 4607), 'random.shuffle', 'random.shuffle', (['data_list'], {}), '(data_list)\n', (4596, 4607), False, 'import random\n'), ((3565, 3586), 'random.shuffle', 'random.shuffle', (['listt'], {}), '(listt)\n', (3579, 3586), False, 'import random\n'), ((4668, 4703), 'codecs.open', 'codecs.open', (['file_name', '"""a"""', '"""utf8"""'], {}), "(file_name, 'a', 'utf8')\n", (4679, 4703), False, 'import codecs\n'), ((5028, 5063), 'codecs.open', 'codecs.open', (['file_name', '"""a"""', '"""utf8"""'], {}), "(file_name, 'a', 'utf8')\n", (5039, 5063), False, 'import codecs\n')]
|
import pygame
import numpy as np
from collections import OrderedDict
from Utility.shape import Rectangle
from Utility import ui
from Level.generic_level import GenericLevel
class Level(GenericLevel):
def __init__(self, player, **kwargs):
super().__init__(**kwargs)
self.player = player
self.hurdle_cords = [
(130, 30, 30, self.gameDimension[1] - 60),
(200, 0, 30, self.gameDimension[1] // 2 - 20),
(200, self.gameDimension[1] // 2 + 20, 30, self.gameDimension[1] // 2 - 15)
]
self.hurdle_cords.append((160, self.gameDimension[1] // 2 + 20, 40, 40))
for i in range(1, 3):
self.hurdle_cords.append((130 + i * 180, 30, 30, self.gameDimension[1] - 60))
self.hurdle_cords.append((130 + i * 180 + 70, 0, 30, self.gameDimension[1] // 2 - 20))
self.hurdle_cords.append((130 + i * 180 + 70, self.gameDimension[1] // 2 + 20,
30, self.gameDimension[1] // 2 - 15))
# todo: not hard code it duh.
self.hurdle_cords.append((340, 140, 40, 40))
self.hurdle_cords.append((520, 220, 40, 40))
self.hurdle = [Rectangle(x, y, l, w, (190, 220, 220)) for x, y, w, l in self.hurdle_cords]
self.food_exists = True
self.food_cords = [Rectangle(x=640, y=190, length=30, width=30, color=None)]
self.food = pygame.transform.scale(pygame.image.load(r"Resources/Food/banana.png"), (self.food_cords[0].width,
self.food_cords[0].length))
def draw_hurdle(self):
for hurdle in self.hurdle:
pygame.draw.rect(self.gameDisplay, hurdle.color, (hurdle.x, hurdle.y, hurdle.width, hurdle.length))
size = hurdle.length // hurdle.width
for y in range(size):
pygame.draw.line(self.gameDisplay, self.grid_lines, (hurdle.x, hurdle.y + y*hurdle.width),
(hurdle.x + hurdle.width, hurdle.y + y*hurdle.width))
pygame.draw.circle(self.gameDisplay, (220, 50, 50), (hurdle.x + hurdle.width // 2, hurdle.y + y*hurdle.width + hurdle.width // 2), 3)
pygame.draw.circle(self.gameDisplay, (220, 239, 0), (hurdle.x + + hurdle.width // 2, hurdle.y + y * hurdle.width + + hurdle.width // 2),
1)
def show_player(self, draw=True):
if draw:
pygame.draw.rect(self.gameDisplay, self.player.color,
(self.player.x, self.player.y, self.player.length, self.player.length))
return
blit_img = self.player.characterDefault
if not (self.player.left or self.player.right or self.player.up or self.player.down):
blit_img = self.player.characterDefault
self.player.r_img = self.player.u_img = self.player.d_img = self.player.l_img = 0
elif self.player.left:
blit_img = self.player.movements['Left'][self.player.l_img]
self.player.l_img = (self.player.l_img + 1) % 4
self.player.r_img = self.player.u_img = self.player.d_img = 0
elif self.player.right:
blit_img = self.player.movements['Right'][self.player.r_img]
self.player.r_img = (self.player.r_img + 1) % 4
self.player.l_img = self.player.u_img = self.player.d_img = 0
elif self.player.up:
blit_img = self.player.movements['Up'][self.player.u_img]
self.player.u_img = (self.player.u_img + 1) % 4
self.player.r_img = self.player.l_img = self.player.d_img = 0
elif self.player.down:
blit_img = self.player.movements['Down'][self.player.d_img]
self.player.d_img = (self.player.d_img + 1) % 4
self.player.r_img = self.player.u_img = self.player.l_img = 0
self.gameDisplay.blit(blit_img, (self.player.x, self.player.y))
def draw_food(self):
if self.food_exists:
self.gameDisplay.blit(self.food, (self.food_cords[0].x, self.food_cords[0].y))
def show(self, *args):
self.gameDisplay.fill(self.background)
self.draw_grids(*args)
self.draw_hurdle()
self.draw_food()
self.show_player(draw=self.player.draw)
def pause_game(self, *args):
resume = False
while not resume:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_s:
resume = True
self.gameDisplay.fill((255, 255, 255))
ui.message(gameDisplay=self.gameDisplay,msg="Press, S to Start", x=self.gameDimension[0] // 2 - 50, y=self.gameDimension[1] // 2)
pygame.display.update()
self.clock.tick(30)
def dynamics(self, *args):
self.player.move()
def wall_logic(self):
if self.player.x < 0:
self.player.x = 0
self.player.left = False
elif self.player.x + self.player.width > self.gameDimension[0]:
self.player.x = self.gameDimension[0] - self.player.width
self.player.right = False
if self.player.y < 0:
self.player.y = 0
self.player.up = False
elif self.player.y + self.player.length > self.gameDimension[1]:
self.player.y = self.gameDimension[1] - self.player.length
self.player.down = False
def hurdle_contact(self, blocks):
for hurdle in blocks:
if hurdle.x > self.player.x + self.player.width:
continue
if ((hurdle.x < self.player.x + self.player.width < hurdle.x + hurdle.width)
or (hurdle.x < self.player.x < hurdle.x + hurdle.width)
or (hurdle.x < self.player.x + self.player.width // 2 < hurdle.x + hurdle.width)) \
and \
((hurdle.y < self.player.y + self.player.length < hurdle.y + hurdle.length)
or (hurdle.y < self.player.y < hurdle.y + hurdle.length)
or (hurdle.y < self.player.y + self.player.length // 2 < hurdle.y + hurdle.length)):
return hurdle
return None
def hurdle_logic(self):
cord = self.hurdle_contact(self.hurdle)
if cord is None:
return
if self.player.right:
self.player.x = cord.x - self.player.width
self.player.right = False
elif self.player.left:
self.player.x = cord.x + cord.width
self.player.left = False
if self.player.down:
self.player.y = cord.y - self.player.length
self.player.down = False
elif self.player.up:
self.player.y = cord.y + cord.length
self.player.up = False
def food_logic(self):
if self.hurdle_contact(self.food_cords):
self.player.gotFood = True
self.food_exists = False
self.player.characterDefault = self.player.winDefault
self.player.left = self.player.right = self.player.up = self.player.down = False
def collision(self, *args):
self.wall_logic()
self.hurdle_logic()
self.food_logic()
def have_won(self, *args):
self.show(*args)
ui.message(gameDisplay=self.gameDisplay,msg="Yeah.!", x=self.gameDimension[0] // 2 - 50, y=self.gameDimension[1] // 2 - 50,
color=(100, 200, 100), font_size=50)
pygame.display.flip()
def have_died(self, *args):
pass
def start_game(self, *args):
# self.pause_game()
while self.player.alive and not self.player.gotFood:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
self.player.left = True
self.player.right = self.player.up = self.player.down = False
if event.key == pygame.K_RIGHT:
self.player.right = True
self.player.left = self.player.up = self.player.down = False
if event.key == pygame.K_UP:
self.player.up = True
self.player.right = self.player.left = self.player.down = False
if event.key == pygame.K_DOWN:
self.player.down = True
self.player.right = self.player.up = self.player.left = False
if event.key == pygame.K_p:
self.pause_game()
self.show()
self.dynamics()
self.collision()
pygame.display.flip()
if self.screen_capt:
self.read_screen(stream=self.stream, gray=self.gray, maxi=self.maxi, store=self.store,
player=self.player, vision_limit=50)
self.clock.tick(30)
if self.player.alive and self.player.gotFood:
self.have_won()
pygame.time.wait(2000)
class Level_PathFinding(Level):
def __init__(self, player, **kwargs):
super().__init__(player, **kwargs)
self.wall = np.zeros((self.gameDimension[1] // 10, self.gameDimension[0] // 10))
for hurdle in self.hurdle:
for x in range(hurdle.x // 10, hurdle.x // 10 + hurdle.width // 10):
for y in range(hurdle.y // 10, hurdle.y // 10 + hurdle.length // 10):
self.wall[y, x] = 1
self.wall[y - 1, x] = 1
self.wall[y - 2, x] = 1
# self.wall[y - 3, x] = 1
self.wall[y, x - 1] = 1
self.wall[y, x - 2] = 1
self.wall[y, x - 3] = 1
self.wall[y - 1, x - 1] = 1
self.wall[y - 2, x - 2] = 1
# self.wall[y - 3, x - 3] = 1
self.f_score = np.full(self.wall.shape, np.inf)
self.g_score = np.zeros(self.wall.shape)
self.not_visited = list()
self.visited = list()
self.neighbour = OrderedDict()
self.came_from = OrderedDict()
self.cur_idx =None
for i in range(self.wall.shape[0]):
for j in range(self.wall.shape[1]):
self.neighbour[(i, j)] = self.get_neighbours(i, j)
self.start_pos = (self.player.y // 10, self.player.x // 10)
self.end_pos = (self.food_cords[0].y // 10, self.food_cords[0].x // 10)
def get_neighbours(self, i, j):
possible_neighbours = []
if i > 0:
possible_neighbours.append((i-1, j))
if i < self.wall.shape[0] - 1:
possible_neighbours.append((i + 1, j))
if j > 0:
possible_neighbours.append((i, j - 1))
if j < self.wall.shape[1] - 1:
possible_neighbours.append((i, j + 1))
# if i > 0 and j > 0:
# possible_neighbours.append((i-1, j-1))
# if i < self.wall.shape[0] - 1 and j < self.wall.shape[1] - 1:
# possible_neighbours.append((i + 1, j+1))
# if j > 0 and i < self.wall.shape[0] - 1:
# possible_neighbours.append((i+1, j - 1))
# if j < self.wall.shape[1] - 1 and i > 0:
# possible_neighbours.append((i-1, j + 1))
return possible_neighbours
def find_path_a_star(self):
self.not_visited += [self.start_pos]
while len(self.not_visited) > 0:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
cur_idx = self.not_visited[0]
for i, j in self.not_visited:
if self.f_score[(i, j)] < self.f_score[cur_idx]:
cur_idx = (i, j)
if cur_idx == self.end_pos:
# pygame.time.wait(3000)
self.cur_idx = cur_idx
return
self.not_visited.remove(cur_idx)
self.visited.append(cur_idx)
for neighbour in self.neighbour[cur_idx]:
if neighbour not in self.visited and self.wall[neighbour] == 0:
estimated_g_score = self.g_score[neighbour] + 10
if neighbour not in self.not_visited:
self.not_visited.append(neighbour)
elif self.g_score[neighbour] < estimated_g_score:
continue
self.g_score[neighbour] = estimated_g_score
# self.f_score[neighbour] = estimated_g_score + (abs(self.end_pos[0] - neighbour[0])*10 +
# abs(self.end_pos[1] - neighbour[1])*10)
self.f_score[neighbour] = estimated_g_score + np.sqrt((self.end_pos[0]*10 - neighbour[0]*10)**2 +
(self.end_pos[1]*10 - neighbour[1]*10)**2)
self.came_from[neighbour] = cur_idx
self.show(cur_idx)
pygame.display.update()
self.clock.tick(30)
print("No Path")
def draw_grids(self, current):
for point in self.not_visited:
pygame.draw.rect(self.gameDisplay, (200, 200, 200), (point[1] * 10, point[0] * 10, 10, 10))
for point in self.visited:
pygame.draw.rect(self.gameDisplay, (120, 120, 120), (point[1] * 10, point[0] * 10, 10, 10))
to_draw = list()
to_draw.append(current)
while current in self.came_from.keys():
current = self.came_from[current]
to_draw.append(current)
for point in to_draw:
pygame.draw.rect(self.gameDisplay, (0, 0, 250), (point[1] * 10, point[0] * 10, 10, 10))
for x in range(0, self.gameDimension[0], 10):
pygame.draw.line(self.gameDisplay, self.grid_lines, (x, 0), (x, self.gameDimension[1]))
for y in range(0, self.gameDimension[1], 10):
pygame.draw.line(self.gameDisplay, self.grid_lines, (0, y), (self.gameDimension[0], y))
# def draw_grids_path(self, current):
# for point in self.not_visited:
# pygame.draw.rect(self.gameDisplay, (0, 200, 0), (point[0] * 10, point[1] * 10, 10, 10))
# for point in self.visited:
# pygame.draw.rect(self.gameDisplay, (255, 0, 0), (point[0] * 10, point[1] * 10, 10, 10))
#
# to_draw = list()
# to_draw.append(current)
# while current in self.came_from.keys():
# current = self.came_from[current]
# to_draw.append(current)
#
# for point in to_draw:
# pygame.draw.rect(self.gameDisplay, (0, 0, 250), (point[0] * 10, point[1] * 10, 10, 10))
def start_game(self):
# self.pause_game()
self.find_path_a_star()
current = self.cur_idx
prev = current
# 0 - l, 1 - r, 2 - u, 3 - d
moves = []
c = 0
while current in self.came_from.keys():
c += 1
current = self.came_from[current]
if current[0] > prev[0] and current[1] == prev[1]:
moves.insert(0, 2)
if current[0] < prev[0] and current[1] == prev[1]:
moves.insert(0, 3)
if current[1] < prev[1] and current[0] == prev[0]:
moves.insert(0, 1)
if current[1] > prev[1] and current[0] == prev[0]:
moves.insert(0, 0)
prev = current
move_idx = 0
while self.player.alive and not self.player.gotFood and move_idx < len(moves):
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_p:
self.pause_game()
if moves[move_idx] == 0:
self.player.left = True
self.player.right = self.player.up = self.player.down = False
if moves[move_idx] == 1:
self.player.right = True
self.player.left = self.player.up = self.player.down = False
if moves[move_idx] == 2:
self.player.up = True
self.player.right = self.player.left = self.player.down = False
if moves[move_idx] == 3:
self.player.down = True
self.player.right = self.player.up = self.player.left = False
self.show(self.cur_idx)
pygame.draw.rect(self.gameDisplay, (200, 250,190), (self.player.x,self.player.y, 10, 10))
self.dynamics()
self.collision()
pygame.display.flip()
if self.screen_capt:
self.read_screen(stream=self.stream, gray=self.gray, maxi=self.maxi, store=self.store,
player=self.player, vision_limit=50)
self.clock.tick(30)
move_idx += 1
if self.player.alive and self.player.gotFood:
self.have_won(self.cur_idx)
pygame.time.wait(5_000)
if __name__ == "__main__":
lvl = Level(None, (600, 350))
lvl.start_game()
|
[
"numpy.full",
"pygame.quit",
"pygame.image.load",
"pygame.draw.line",
"pygame.draw.circle",
"pygame.draw.rect",
"pygame.event.get",
"numpy.zeros",
"Utility.ui.message",
"pygame.display.flip",
"pygame.time.wait",
"pygame.display.update",
"collections.OrderedDict",
"Utility.shape.Rectangle",
"numpy.sqrt"
] |
[((7700, 7870), 'Utility.ui.message', 'ui.message', ([], {'gameDisplay': 'self.gameDisplay', 'msg': '"""Yeah.!"""', 'x': '(self.gameDimension[0] // 2 - 50)', 'y': '(self.gameDimension[1] // 2 - 50)', 'color': '(100, 200, 100)', 'font_size': '(50)'}), "(gameDisplay=self.gameDisplay, msg='Yeah.!', x=self.gameDimension\n [0] // 2 - 50, y=self.gameDimension[1] // 2 - 50, color=(100, 200, 100),\n font_size=50)\n", (7710, 7870), False, 'from Utility import ui\n'), ((7892, 7913), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (7911, 7913), False, 'import pygame\n'), ((9613, 9635), 'pygame.time.wait', 'pygame.time.wait', (['(2000)'], {}), '(2000)\n', (9629, 9635), False, 'import pygame\n'), ((9781, 9849), 'numpy.zeros', 'np.zeros', (['(self.gameDimension[1] // 10, self.gameDimension[0] // 10)'], {}), '((self.gameDimension[1] // 10, self.gameDimension[0] // 10))\n', (9789, 9849), True, 'import numpy as np\n'), ((10547, 10579), 'numpy.full', 'np.full', (['self.wall.shape', 'np.inf'], {}), '(self.wall.shape, np.inf)\n', (10554, 10579), True, 'import numpy as np\n'), ((10604, 10629), 'numpy.zeros', 'np.zeros', (['self.wall.shape'], {}), '(self.wall.shape)\n', (10612, 10629), True, 'import numpy as np\n'), ((10722, 10735), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (10733, 10735), False, 'from collections import OrderedDict\n'), ((10762, 10775), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (10773, 10775), False, 'from collections import OrderedDict\n'), ((17923, 17945), 'pygame.time.wait', 'pygame.time.wait', (['(5000)'], {}), '(5000)\n', (17939, 17945), False, 'import pygame\n'), ((1290, 1328), 'Utility.shape.Rectangle', 'Rectangle', (['x', 'y', 'l', 'w', '(190, 220, 220)'], {}), '(x, y, l, w, (190, 220, 220))\n', (1299, 1328), False, 'from Utility.shape import Rectangle\n'), ((1429, 1485), 'Utility.shape.Rectangle', 'Rectangle', ([], {'x': '(640)', 'y': '(190)', 'length': '(30)', 'width': '(30)', 'color': 'None'}), '(x=640, y=190, length=30, width=30, color=None)\n', (1438, 1485), False, 'from Utility.shape import Rectangle\n'), ((1531, 1577), 'pygame.image.load', 'pygame.image.load', (['"""Resources/Food/banana.png"""'], {}), "('Resources/Food/banana.png')\n", (1548, 1577), False, 'import pygame\n'), ((1808, 1911), 'pygame.draw.rect', 'pygame.draw.rect', (['self.gameDisplay', 'hurdle.color', '(hurdle.x, hurdle.y, hurdle.width, hurdle.length)'], {}), '(self.gameDisplay, hurdle.color, (hurdle.x, hurdle.y,\n hurdle.width, hurdle.length))\n', (1824, 1911), False, 'import pygame\n'), ((2605, 2735), 'pygame.draw.rect', 'pygame.draw.rect', (['self.gameDisplay', 'self.player.color', '(self.player.x, self.player.y, self.player.length, self.player.length)'], {}), '(self.gameDisplay, self.player.color, (self.player.x, self.\n player.y, self.player.length, self.player.length))\n', (2621, 2735), False, 'import pygame\n'), ((4584, 4602), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (4600, 4602), False, 'import pygame\n'), ((4919, 5054), 'Utility.ui.message', 'ui.message', ([], {'gameDisplay': 'self.gameDisplay', 'msg': '"""Press, S to Start"""', 'x': '(self.gameDimension[0] // 2 - 50)', 'y': '(self.gameDimension[1] // 2)'}), "(gameDisplay=self.gameDisplay, msg='Press, S to Start', x=self.\n gameDimension[0] // 2 - 50, y=self.gameDimension[1] // 2)\n", (4929, 5054), False, 'from Utility import ui\n'), ((5062, 5085), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (5083, 5085), False, 'import pygame\n'), ((8120, 8138), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (8136, 8138), False, 'import pygame\n'), ((9256, 9277), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (9275, 9277), False, 'import pygame\n'), ((12134, 12152), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (12150, 12152), False, 'import pygame\n'), ((13751, 13774), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (13772, 13774), False, 'import pygame\n'), ((13925, 14021), 'pygame.draw.rect', 'pygame.draw.rect', (['self.gameDisplay', '(200, 200, 200)', '(point[1] * 10, point[0] * 10, 10, 10)'], {}), '(self.gameDisplay, (200, 200, 200), (point[1] * 10, point[0\n ] * 10, 10, 10))\n', (13941, 14021), False, 'import pygame\n'), ((14066, 14162), 'pygame.draw.rect', 'pygame.draw.rect', (['self.gameDisplay', '(120, 120, 120)', '(point[1] * 10, point[0] * 10, 10, 10)'], {}), '(self.gameDisplay, (120, 120, 120), (point[1] * 10, point[0\n ] * 10, 10, 10))\n', (14082, 14162), False, 'import pygame\n'), ((14398, 14490), 'pygame.draw.rect', 'pygame.draw.rect', (['self.gameDisplay', '(0, 0, 250)', '(point[1] * 10, point[0] * 10, 10, 10)'], {}), '(self.gameDisplay, (0, 0, 250), (point[1] * 10, point[0] * \n 10, 10, 10))\n', (14414, 14490), False, 'import pygame\n'), ((14554, 14646), 'pygame.draw.line', 'pygame.draw.line', (['self.gameDisplay', 'self.grid_lines', '(x, 0)', '(x, self.gameDimension[1])'], {}), '(self.gameDisplay, self.grid_lines, (x, 0), (x, self.\n gameDimension[1]))\n', (14570, 14646), False, 'import pygame\n'), ((14710, 14802), 'pygame.draw.line', 'pygame.draw.line', (['self.gameDisplay', 'self.grid_lines', '(0, y)', '(self.gameDimension[0], y)'], {}), '(self.gameDisplay, self.grid_lines, (0, y), (self.\n gameDimension[0], y))\n', (14726, 14802), False, 'import pygame\n'), ((16393, 16411), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (16409, 16411), False, 'import pygame\n'), ((17363, 17459), 'pygame.draw.rect', 'pygame.draw.rect', (['self.gameDisplay', '(200, 250, 190)', '(self.player.x, self.player.y, 10, 10)'], {}), '(self.gameDisplay, (200, 250, 190), (self.player.x, self.\n player.y, 10, 10))\n', (17379, 17459), False, 'import pygame\n'), ((17525, 17546), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (17544, 17546), False, 'import pygame\n'), ((2010, 2162), 'pygame.draw.line', 'pygame.draw.line', (['self.gameDisplay', 'self.grid_lines', '(hurdle.x, hurdle.y + y * hurdle.width)', '(hurdle.x + hurdle.width, hurdle.y + y * hurdle.width)'], {}), '(self.gameDisplay, self.grid_lines, (hurdle.x, hurdle.y + y *\n hurdle.width), (hurdle.x + hurdle.width, hurdle.y + y * hurdle.width))\n', (2026, 2162), False, 'import pygame\n'), ((2206, 2346), 'pygame.draw.circle', 'pygame.draw.circle', (['self.gameDisplay', '(220, 50, 50)', '(hurdle.x + hurdle.width // 2, hurdle.y + y * hurdle.width + hurdle.width // 2)', '(3)'], {}), '(self.gameDisplay, (220, 50, 50), (hurdle.x + hurdle.\n width // 2, hurdle.y + y * hurdle.width + hurdle.width // 2), 3)\n', (2224, 2346), False, 'import pygame\n'), ((2357, 2499), 'pygame.draw.circle', 'pygame.draw.circle', (['self.gameDisplay', '(220, 239, 0)', '(hurdle.x + +hurdle.width // 2, hurdle.y + y * hurdle.width + +hurdle.width //\n 2)', '(1)'], {}), '(self.gameDisplay, (220, 239, 0), (hurdle.x + +hurdle.\n width // 2, hurdle.y + y * hurdle.width + +hurdle.width // 2), 1)\n', (2375, 2499), False, 'import pygame\n'), ((4672, 4685), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (4683, 4685), False, 'import pygame\n'), ((8210, 8223), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (8221, 8223), False, 'import pygame\n'), ((12222, 12235), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (12233, 12235), False, 'import pygame\n'), ((16483, 16496), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (16494, 16496), False, 'import pygame\n'), ((13484, 13594), 'numpy.sqrt', 'np.sqrt', (['((self.end_pos[0] * 10 - neighbour[0] * 10) ** 2 + (self.end_pos[1] * 10 - \n neighbour[1] * 10) ** 2)'], {}), '((self.end_pos[0] * 10 - neighbour[0] * 10) ** 2 + (self.end_pos[1] *\n 10 - neighbour[1] * 10) ** 2)\n', (13491, 13594), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Time : 2022/1/29 10:35 上午
# @Author: zhoumengjie
# @File : pdfutils.py
import base64
import logging
import math
import os
import time
import pdfplumber
from pyecharts.components import Table
from pyecharts.options import ComponentTitleOpts
from selenium import webdriver
from wxcloudrun.bond.BondUtils import Crawler
from wxcloudrun.bond.PageTemplate import PROJECT_DIR
from wxcloudrun.common import fingerprinter as fp
from wxcloudrun.common import tabledrawer
log = logging.getLogger('log')
crawler = Crawler()
def extract_draw_table(path):
tables = []
with pdfplumber.open(path) as pdf:
pages = pdf.pages
for page in pages:
for table in page.extract_tables():
tables.append(table)
return tables
def get_draw_pdf_table(url_path, bond_name, choose_table_idx:int=None, add_finger_print=False):
file_name = bond_name + '_anno' + '.pdf'
crawler.query_anno_pdf(file_name, url_path)
img_file = bond_name + '_draw' + '.png'
return draw_table(file_name, img_file, bond_name, choose_table_idx, add_finger_print)
def draw_table(pdf_path, img_file, bond_name, choose_table_idx:int=None, add_finger_print=False):
table_data = extract_draw_table(pdf_path)
if table_data is None or len(table_data) == 0:
log.info('未识别到pdf的中签表格')
return False, None
rows = []
if choose_table_idx is not None:
headers = table_data[choose_table_idx][0]
rows = table_data[choose_table_idx][1:]
if len(table_data) == 1:
headers = table_data[0][0]
rows = table_data[0][1:]
if len(table_data) == 2:
headers = table_data[0][0]
# 表头相同
if headers == table_data[1][0]:
rows = table_data[0][1:] + table_data[1][1:]
else:
rows = table_data[0][1:] + table_data[1][0:]
if len(rows) == 0:
return False, None
# 过滤空行
rows = filter(lambda row : is_valid_row(row), rows)
# tabledrawer.draw_table(headers, rows)
pic_base64 = draw_table_with_rows('配售结果', img_file, headers, rows, add_finger_print)
# 删除文件
os.remove(pdf_path)
return True, pic_base64
def draw_table_with_rows(title:str, img_file:str, headers:[], rows:[], add_finger_print=False):
table = Table()
table.add(headers, rows)
table.set_global_opts(title_opts=ComponentTitleOpts(title=title))
render_file_name = title + "_table-screenshot.html"
table.render(render_file_name)
options = webdriver.ChromeOptions()
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--headless')
driver = webdriver.Chrome(chrome_options=options)
driver.get("file://" + PROJECT_DIR + '/' + render_file_name)
time.sleep(1)
ele = driver.find_element_by_xpath("//tbody")
ele.screenshot(img_file)
# 添加水印
if add_finger_print:
fp.add_finger_print(img_file)
with open(img_file, 'rb') as f:
pic_base64 = base64.b64encode(f.read())
# 删除文件
os.remove(img_file)
os.remove(render_file_name)
return pic_base64
def is_valid_row(row:[]):
if len(row) == 0:
return False
count = len(row)
for cell in row:
if cell is None or cell=='':
count -= 1
return count != 0
if __name__ == '__main__':
# get_draw_pdf_table('/finalpage/2022-01-26/1212274930.PDF', '豪美转债')
# draw_table(PROJECT_DIR + '/中特转债_anno.pdf', '2.png', '中特转债')
# table = extract_draw_table(PROJECT_DIR + '/中特转债_anno.pdf')
# print(table)
print(math.ceil(1 / float(0.14)))
|
[
"os.remove",
"wxcloudrun.bond.BondUtils.Crawler",
"pyecharts.components.Table",
"wxcloudrun.common.fingerprinter.add_finger_print",
"time.sleep",
"pdfplumber.open",
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.Chrome",
"pyecharts.options.ComponentTitleOpts",
"logging.getLogger"
] |
[((521, 545), 'logging.getLogger', 'logging.getLogger', (['"""log"""'], {}), "('log')\n", (538, 545), False, 'import logging\n'), ((557, 566), 'wxcloudrun.bond.BondUtils.Crawler', 'Crawler', ([], {}), '()\n', (564, 566), False, 'from wxcloudrun.bond.BondUtils import Crawler\n'), ((2156, 2175), 'os.remove', 'os.remove', (['pdf_path'], {}), '(pdf_path)\n', (2165, 2175), False, 'import os\n'), ((2314, 2321), 'pyecharts.components.Table', 'Table', ([], {}), '()\n', (2319, 2321), False, 'from pyecharts.components import Table\n'), ((2528, 2553), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (2551, 2553), False, 'from selenium import webdriver\n'), ((2699, 2739), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'chrome_options': 'options'}), '(chrome_options=options)\n', (2715, 2739), False, 'from selenium import webdriver\n'), ((2809, 2822), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2819, 2822), False, 'import time\n'), ((3078, 3097), 'os.remove', 'os.remove', (['img_file'], {}), '(img_file)\n', (3087, 3097), False, 'import os\n'), ((3102, 3129), 'os.remove', 'os.remove', (['render_file_name'], {}), '(render_file_name)\n', (3111, 3129), False, 'import os\n'), ((623, 644), 'pdfplumber.open', 'pdfplumber.open', (['path'], {}), '(path)\n', (638, 644), False, 'import pdfplumber\n'), ((2947, 2976), 'wxcloudrun.common.fingerprinter.add_finger_print', 'fp.add_finger_print', (['img_file'], {}), '(img_file)\n', (2966, 2976), True, 'from wxcloudrun.common import fingerprinter as fp\n'), ((2388, 2419), 'pyecharts.options.ComponentTitleOpts', 'ComponentTitleOpts', ([], {'title': 'title'}), '(title=title)\n', (2406, 2419), False, 'from pyecharts.options import ComponentTitleOpts\n')]
|
"""Reformats daily seaice data into regional xls file
This is for internal use by scientists.
"""
import calendar as cal
import os
import click
import pandas as pd
from . import util
import seaice.nasateam as nt
import seaice.logging as seaicelogging
import seaice.timeseries as sit
log = seaicelogging.init('seaice.tools')
def output_filepath(output_directory, *, hemi):
fn = '{}_Sea_Ice_Index_Regional_Daily_Data_G02135_{}.xlsx'.format(
hemi,
nt.VERSION_STRING
)
return os.path.join(output_directory, fn)
@click.command()
@click.argument('input_directory', type=click.Path(exists=True, file_okay=False))
@click.argument('output_directory', type=click.Path(exists=True, file_okay=False))
@seaicelogging.log_command(log)
def regional_daily(input_directory, output_directory):
data_store = os.path.join(input_directory, 'daily.p')
for hemisphere in (nt.NORTH, nt.SOUTH):
hemi = hemisphere['short_name']
output_file = open(output_filepath(output_directory, hemi=hemi), 'wb')
# Generate the daily dataframe with regional columns
daily = sit.daily(hemi, data_store=data_store, columns=[])
# Keep only regional columns
regional = daily.drop(nt.DAILY_DEFAULT_COLUMNS, axis=1)
writer = pd.ExcelWriter(output_file, engine='xlsxwriter')
extent_and_area_columns = [c for c in regional.columns if 'missing' not in c]
extent_and_area_columns.sort()
for col in extent_and_area_columns:
regional_mask_cfg, region_prefix = \
util.regional_mask_cfg_from_column_name(col)
# Don't add column to the sheet if wrong hemisphere
if regional_mask_cfg['hemisphere'] != hemisphere['long_name']:
continue
df = regional[col].rolling(window=5, min_periods=2).mean()
df = pd.DataFrame(df).set_index(
[df.index.year, df.index.month, df.index.day]
).unstack(0)
df.index.names = ['month', 'day']
df.index = df.index.set_levels(cal.month_name[1:], level=0)
# Strip the regional mask prefix from the column name
col = col[len(region_prefix):]
sheet_name = util.regional_sheet_name(col)
write_sheet(writer, df, sheet_name)
writer = util.add_documentation_sheet(
writer,
util.documentation_file(output_filepath('', hemi=hemi))
)
writer.save()
log.info('regional_daily created: {}'.format(output_file.name))
def write_sheet(writer, df, sheet_name):
df.columns = df.columns.droplevel(0)
df.to_excel(writer, sheet_name, float_format='%.3f')
if __name__ == '__main__':
regional_daily()
|
[
"pandas.DataFrame",
"seaice.logging.init",
"seaice.timeseries.daily",
"pandas.ExcelWriter",
"click.command",
"click.Path",
"os.path.join",
"seaice.logging.log_command"
] |
[((294, 328), 'seaice.logging.init', 'seaicelogging.init', (['"""seaice.tools"""'], {}), "('seaice.tools')\n", (312, 328), True, 'import seaice.logging as seaicelogging\n'), ((545, 560), 'click.command', 'click.command', ([], {}), '()\n', (558, 560), False, 'import click\n'), ((727, 757), 'seaice.logging.log_command', 'seaicelogging.log_command', (['log'], {}), '(log)\n', (752, 757), True, 'import seaice.logging as seaicelogging\n'), ((507, 541), 'os.path.join', 'os.path.join', (['output_directory', 'fn'], {}), '(output_directory, fn)\n', (519, 541), False, 'import os\n'), ((830, 870), 'os.path.join', 'os.path.join', (['input_directory', '"""daily.p"""'], {}), "(input_directory, 'daily.p')\n", (842, 870), False, 'import os\n'), ((1113, 1163), 'seaice.timeseries.daily', 'sit.daily', (['hemi'], {'data_store': 'data_store', 'columns': '[]'}), '(hemi, data_store=data_store, columns=[])\n', (1122, 1163), True, 'import seaice.timeseries as sit\n'), ((1283, 1331), 'pandas.ExcelWriter', 'pd.ExcelWriter', (['output_file'], {'engine': '"""xlsxwriter"""'}), "(output_file, engine='xlsxwriter')\n", (1297, 1331), True, 'import pandas as pd\n'), ((601, 641), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'file_okay': '(False)'}), '(exists=True, file_okay=False)\n', (611, 641), False, 'import click\n'), ((684, 724), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'file_okay': '(False)'}), '(exists=True, file_okay=False)\n', (694, 724), False, 'import click\n'), ((1871, 1887), 'pandas.DataFrame', 'pd.DataFrame', (['df'], {}), '(df)\n', (1883, 1887), True, 'import pandas as pd\n')]
|
"""Sample program that runs a sweep and records results."""
from pathlib import Path
from typing import Sequence
import numpy as np
from absl import app
from absl import flags
from differential_value_iteration import utils
from differential_value_iteration.algorithms import algorithms
from differential_value_iteration.environments import garet
from differential_value_iteration.environments import micro
FLAGS = flags.FLAGS
flags.DEFINE_string(name='plot_dir', default='plots', help='path to plot dir')
flags.DEFINE_integer('max_iters', 100000, 'Maximum iterations per algorithm.')
flags.DEFINE_float('epsilon', 1e-7, 'Tolerance for convergence.')
flags.DEFINE_bool('mrp', True, 'Run mrp experiments.')
flags.DEFINE_bool('mdp', True, 'Run mdp experiments.')
def main(argv):
del argv
alphas = [1.0, 0.999, 0.99, 0.9, 0.7, 0.5, 0.3, 0.1, 0.01, 0.001]
betas = [1.0, 0.999, 0.99, 0.9, 0.7, 0.5, 0.3, 0.1, 0.01, 0.001]
max_iters = FLAGS.max_iters
epsilon = FLAGS.epsilon
plot_dir = FLAGS.plot_dir
if plot_dir[-1] != '/':
plot_dir += '/'
Path(plot_dir).mkdir(parents=True, exist_ok=True)
if FLAGS.mrp:
run_mrps(alphas=alphas,
betas=betas,
max_iters=max_iters,
epsilon=epsilon,
plot_dir=plot_dir)
if FLAGS.mdp:
run_mdps(alphas=alphas,
betas=betas,
max_iters=max_iters,
epsilon=epsilon,
plot_dir=plot_dir)
def run_mrps(
alphas: Sequence[float],
betas: Sequence[float],
max_iters: int,
epsilon: float,
plot_dir: str):
envs = [
micro.create_mrp1(dtype=np.float32),
micro.create_mrp2(dtype=np.float32),
micro.create_mrp3(dtype=np.float32),
]
for env in envs:
init_v = np.zeros(env.num_states)
init_r_bar_scalar = 0
init_r_bar_vec = np.zeros(env.num_states)
results = exp_RVI_Evaluation(env, 'exec_sync', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Evaluation_sync', alphas)
results = exp_RVI_Evaluation(env, 'exec_async', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Evaluation_async', alphas)
results = exp_DVI_Evaluation(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Evaluation_sync', alphas,
betas)
results = exp_DVI_Evaluation(env, 'exec_async', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Evaluation_async', alphas,
betas)
results = exp_MDVI_Evaluation(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Evaluation_sync', alphas,
betas)
results = exp_MDVI_Evaluation(env, 'exec_async', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Evaluation_async',
alphas,
betas)
def run_mdps(alphas: Sequence[float], betas: Sequence[float], max_iters: int,
epsilon: float, plot_dir: str):
garet_env = garet.create(seed=42,
num_states=10,
num_actions=2,
branching_factor=3)
envs = [garet_env, micro.mdp2]
for env in envs:
init_v = np.zeros(env.num_states)
init_r_bar_scalar = 0
init_r_bar_vec = np.zeros(env.num_states)
results = exp_RVI_Control(env, 'exec_sync', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Control_sync', alphas)
results = exp_RVI_Control(env, 'exec_async', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Control_async', alphas)
results = exp_DVI_Control(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Control_sync', alphas,
betas)
results = exp_DVI_Control(env, 'exec_async', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Control_async', alphas,
betas)
results = exp_MDVI_Control1(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control1_sync', alphas,
betas)
results = exp_MDVI_Control1(env, 'exec_async', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control1_async', alphas,
betas)
results = exp_MDVI_Control2(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control2_sync', alphas,
betas)
results = exp_MDVI_Control2(env, 'exec_async', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control2_async', alphas,
betas)
def exp_RVI_Evaluation(env, update_rule, alphas, init_v, max_iters, epsilon,
ref_idx=0):
convergence_flags = np.zeros(len(alphas))
for alpha_idx, alpha in enumerate(alphas):
alg = algorithms.RVI_Evaluation(env, init_v, alpha, ref_idx)
print(f'{env.name} RVI Evaluation {update_rule} alpha:{alpha}', end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx] = convergence
return convergence_flags
def exp_RVI_Control(env, update_rule, alphas, init_v, max_iters, epsilon,
ref_idx=0):
convergence_flags = np.zeros(len(alphas))
for alpha_idx, alpha in enumerate(alphas):
alg = algorithms.RVI_Control(env, init_v, alpha, ref_idx)
print(f'{env.name} RVI Control {update_rule} alpha:{alpha}', end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx] = convergence
return convergence_flags
def exp_DVI_Evaluation(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.DVI_Evaluation(env, init_v, init_r_bar, alpha, beta)
print(
f'{env.name} DVI Evaluation {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_DVI_Control(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.DVI_Control(env, init_v, init_r_bar, alpha, beta)
print(f'{env.name} DVI Control {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_MDVI_Evaluation(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.MDVI_Evaluation(env, init_v, init_r_bar, alpha, beta)
print(
f'{env.name} MDVI Evaluation {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_MDVI_Control1(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.MDVI_Control1(env, init_v, init_r_bar, alpha, beta)
print(f'{env.name} MDVI Control1 {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_MDVI_Control2(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.MDVI_Control2(env, init_v, init_r_bar, alpha, beta)
print(f'{env.name} MDVI Control2 {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
if __name__ == '__main__':
app.run(main)
|
[
"differential_value_iteration.environments.micro.create_mrp1",
"differential_value_iteration.environments.micro.create_mrp2",
"differential_value_iteration.algorithms.algorithms.MDVI_Evaluation",
"pathlib.Path",
"differential_value_iteration.utils.run_alg",
"absl.flags.DEFINE_bool",
"differential_value_iteration.environments.garet.create",
"differential_value_iteration.algorithms.algorithms.MDVI_Control2",
"differential_value_iteration.algorithms.algorithms.DVI_Control",
"differential_value_iteration.algorithms.algorithms.MDVI_Control1",
"absl.flags.DEFINE_integer",
"absl.flags.DEFINE_float",
"differential_value_iteration.algorithms.algorithms.RVI_Evaluation",
"differential_value_iteration.utils.draw",
"differential_value_iteration.algorithms.algorithms.RVI_Control",
"differential_value_iteration.algorithms.algorithms.DVI_Evaluation",
"numpy.zeros",
"absl.flags.DEFINE_string",
"absl.app.run",
"differential_value_iteration.environments.micro.create_mrp3"
] |
[((428, 506), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', ([], {'name': '"""plot_dir"""', 'default': '"""plots"""', 'help': '"""path to plot dir"""'}), "(name='plot_dir', default='plots', help='path to plot dir')\n", (447, 506), False, 'from absl import flags\n'), ((507, 585), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""max_iters"""', '(100000)', '"""Maximum iterations per algorithm."""'], {}), "('max_iters', 100000, 'Maximum iterations per algorithm.')\n", (527, 585), False, 'from absl import flags\n'), ((586, 652), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""epsilon"""', '(1e-07)', '"""Tolerance for convergence."""'], {}), "('epsilon', 1e-07, 'Tolerance for convergence.')\n", (604, 652), False, 'from absl import flags\n'), ((652, 706), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""mrp"""', '(True)', '"""Run mrp experiments."""'], {}), "('mrp', True, 'Run mrp experiments.')\n", (669, 706), False, 'from absl import flags\n'), ((707, 761), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""mdp"""', '(True)', '"""Run mdp experiments."""'], {}), "('mdp', True, 'Run mdp experiments.')\n", (724, 761), False, 'from absl import flags\n'), ((3400, 3471), 'differential_value_iteration.environments.garet.create', 'garet.create', ([], {'seed': '(42)', 'num_states': '(10)', 'num_actions': '(2)', 'branching_factor': '(3)'}), '(seed=42, num_states=10, num_actions=2, branching_factor=3)\n', (3412, 3471), False, 'from differential_value_iteration.environments import garet\n'), ((9743, 9756), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (9750, 9756), False, 'from absl import app\n'), ((1591, 1626), 'differential_value_iteration.environments.micro.create_mrp1', 'micro.create_mrp1', ([], {'dtype': 'np.float32'}), '(dtype=np.float32)\n', (1608, 1626), False, 'from differential_value_iteration.environments import micro\n'), ((1634, 1669), 'differential_value_iteration.environments.micro.create_mrp2', 'micro.create_mrp2', ([], {'dtype': 'np.float32'}), '(dtype=np.float32)\n', (1651, 1669), False, 'from differential_value_iteration.environments import micro\n'), ((1677, 1712), 'differential_value_iteration.environments.micro.create_mrp3', 'micro.create_mrp3', ([], {'dtype': 'np.float32'}), '(dtype=np.float32)\n', (1694, 1712), False, 'from differential_value_iteration.environments import micro\n'), ((1750, 1774), 'numpy.zeros', 'np.zeros', (['env.num_states'], {}), '(env.num_states)\n', (1758, 1774), True, 'import numpy as np\n'), ((1822, 1846), 'numpy.zeros', 'np.zeros', (['env.num_states'], {}), '(env.num_states)\n', (1830, 1846), True, 'import numpy as np\n'), ((1982, 2055), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_RVI_Evaluation_sync')", 'alphas'], {}), "(results, plot_dir + env.name + '_RVI_Evaluation_sync', alphas)\n", (1992, 2055), False, 'from differential_value_iteration import utils\n'), ((2192, 2266), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_RVI_Evaluation_async')", 'alphas'], {}), "(results, plot_dir + env.name + '_RVI_Evaluation_async', alphas)\n", (2202, 2266), False, 'from differential_value_iteration import utils\n'), ((2417, 2502), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_DVI_Evaluation_sync')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_DVI_Evaluation_sync', alphas, betas\n )\n", (2427, 2502), False, 'from differential_value_iteration import utils\n'), ((2664, 2749), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_DVI_Evaluation_async')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_DVI_Evaluation_async', alphas,\n betas)\n", (2674, 2749), False, 'from differential_value_iteration import utils\n'), ((2910, 2995), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_MDVI_Evaluation_sync')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_MDVI_Evaluation_sync', alphas,\n betas)\n", (2920, 2995), False, 'from differential_value_iteration import utils\n'), ((3157, 3243), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_MDVI_Evaluation_async')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_MDVI_Evaluation_async', alphas,\n betas)\n", (3167, 3243), False, 'from differential_value_iteration import utils\n'), ((3618, 3642), 'numpy.zeros', 'np.zeros', (['env.num_states'], {}), '(env.num_states)\n', (3626, 3642), True, 'import numpy as np\n'), ((3690, 3714), 'numpy.zeros', 'np.zeros', (['env.num_states'], {}), '(env.num_states)\n', (3698, 3714), True, 'import numpy as np\n'), ((3844, 3914), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_RVI_Control_sync')", 'alphas'], {}), "(results, plot_dir + env.name + '_RVI_Control_sync', alphas)\n", (3854, 3914), False, 'from differential_value_iteration import utils\n'), ((4045, 4116), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_RVI_Control_async')", 'alphas'], {}), "(results, plot_dir + env.name + '_RVI_Control_async', alphas)\n", (4055, 4116), False, 'from differential_value_iteration import utils\n'), ((4261, 4338), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_DVI_Control_sync')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_DVI_Control_sync', alphas, betas)\n", (4271, 4338), False, 'from differential_value_iteration import utils\n'), ((4499, 4577), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_DVI_Control_async')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_DVI_Control_async', alphas, betas)\n", (4509, 4577), False, 'from differential_value_iteration import utils\n'), ((4738, 4817), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_MDVI_Control1_sync')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_MDVI_Control1_sync', alphas, betas)\n", (4748, 4817), False, 'from differential_value_iteration import utils\n'), ((4979, 5064), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_MDVI_Control1_async')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_MDVI_Control1_async', alphas, betas\n )\n", (4989, 5064), False, 'from differential_value_iteration import utils\n'), ((5220, 5299), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_MDVI_Control2_sync')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_MDVI_Control2_sync', alphas, betas)\n", (5230, 5299), False, 'from differential_value_iteration import utils\n'), ((5461, 5546), 'differential_value_iteration.utils.draw', 'utils.draw', (['results', "(plot_dir + env.name + '_MDVI_Control2_async')", 'alphas', 'betas'], {}), "(results, plot_dir + env.name + '_MDVI_Control2_async', alphas, betas\n )\n", (5471, 5546), False, 'from differential_value_iteration import utils\n'), ((5751, 5805), 'differential_value_iteration.algorithms.algorithms.RVI_Evaluation', 'algorithms.RVI_Evaluation', (['env', 'init_v', 'alpha', 'ref_idx'], {}), '(env, init_v, alpha, ref_idx)\n', (5776, 5805), False, 'from differential_value_iteration.algorithms import algorithms\n'), ((5901, 5952), 'differential_value_iteration.utils.run_alg', 'utils.run_alg', (['alg', 'update_rule', 'max_iters', 'epsilon'], {}), '(alg, update_rule, max_iters, epsilon)\n', (5914, 5952), False, 'from differential_value_iteration import utils\n'), ((6257, 6308), 'differential_value_iteration.algorithms.algorithms.RVI_Control', 'algorithms.RVI_Control', (['env', 'init_v', 'alpha', 'ref_idx'], {}), '(env, init_v, alpha, ref_idx)\n', (6279, 6308), False, 'from differential_value_iteration.algorithms import algorithms\n'), ((6401, 6452), 'differential_value_iteration.utils.run_alg', 'utils.run_alg', (['alg', 'update_rule', 'max_iters', 'epsilon'], {}), '(alg, update_rule, max_iters, epsilon)\n', (6414, 6452), False, 'from differential_value_iteration import utils\n'), ((1059, 1073), 'pathlib.Path', 'Path', (['plot_dir'], {}), '(plot_dir)\n', (1063, 1073), False, 'from pathlib import Path\n'), ((6828, 6891), 'differential_value_iteration.algorithms.algorithms.DVI_Evaluation', 'algorithms.DVI_Evaluation', (['env', 'init_v', 'init_r_bar', 'alpha', 'beta'], {}), '(env, init_v, init_r_bar, alpha, beta)\n', (6853, 6891), False, 'from differential_value_iteration.algorithms import algorithms\n'), ((7024, 7075), 'differential_value_iteration.utils.run_alg', 'utils.run_alg', (['alg', 'update_rule', 'max_iters', 'epsilon'], {}), '(alg, update_rule, max_iters, epsilon)\n', (7037, 7075), False, 'from differential_value_iteration import utils\n'), ((7462, 7522), 'differential_value_iteration.algorithms.algorithms.DVI_Control', 'algorithms.DVI_Control', (['env', 'init_v', 'init_r_bar', 'alpha', 'beta'], {}), '(env, init_v, init_r_bar, alpha, beta)\n', (7484, 7522), False, 'from differential_value_iteration.algorithms import algorithms\n'), ((7643, 7694), 'differential_value_iteration.utils.run_alg', 'utils.run_alg', (['alg', 'update_rule', 'max_iters', 'epsilon'], {}), '(alg, update_rule, max_iters, epsilon)\n', (7656, 7694), False, 'from differential_value_iteration import utils\n'), ((8085, 8149), 'differential_value_iteration.algorithms.algorithms.MDVI_Evaluation', 'algorithms.MDVI_Evaluation', (['env', 'init_v', 'init_r_bar', 'alpha', 'beta'], {}), '(env, init_v, init_r_bar, alpha, beta)\n', (8111, 8149), False, 'from differential_value_iteration.algorithms import algorithms\n'), ((8283, 8334), 'differential_value_iteration.utils.run_alg', 'utils.run_alg', (['alg', 'update_rule', 'max_iters', 'epsilon'], {}), '(alg, update_rule, max_iters, epsilon)\n', (8296, 8334), False, 'from differential_value_iteration import utils\n'), ((8723, 8785), 'differential_value_iteration.algorithms.algorithms.MDVI_Control1', 'algorithms.MDVI_Control1', (['env', 'init_v', 'init_r_bar', 'alpha', 'beta'], {}), '(env, init_v, init_r_bar, alpha, beta)\n', (8747, 8785), False, 'from differential_value_iteration.algorithms import algorithms\n'), ((8908, 8959), 'differential_value_iteration.utils.run_alg', 'utils.run_alg', (['alg', 'update_rule', 'max_iters', 'epsilon'], {}), '(alg, update_rule, max_iters, epsilon)\n', (8921, 8959), False, 'from differential_value_iteration import utils\n'), ((9348, 9410), 'differential_value_iteration.algorithms.algorithms.MDVI_Control2', 'algorithms.MDVI_Control2', (['env', 'init_v', 'init_r_bar', 'alpha', 'beta'], {}), '(env, init_v, init_r_bar, alpha, beta)\n', (9372, 9410), False, 'from differential_value_iteration.algorithms import algorithms\n'), ((9533, 9584), 'differential_value_iteration.utils.run_alg', 'utils.run_alg', (['alg', 'update_rule', 'max_iters', 'epsilon'], {}), '(alg, update_rule, max_iters, epsilon)\n', (9546, 9584), False, 'from differential_value_iteration import utils\n')]
|
from telebot import types
def my_input(bot, chat_id, txt, ResponseHandler):
message = bot.send_message(chat_id, text=txt)
bot.register_next_step_handler(message, ResponseHandler)
# -----------------------------------------------------------------------
def my_inputInt(bot, chat_id, txt, ResponseHandler):
message = bot.send_message(chat_id, text=txt)
bot.register_next_step_handler(message, my_inputInt_SecondPart, botQuestion=bot, txtQuestion=txt,
ResponseHandler=ResponseHandler)
def my_inputInt_SecondPart(message, botQuestion, txtQuestion, ResponseHandler):
chat_id = message.chat.id
try:
if message.content_type != "text":
raise ValueError
var_int = int(message.text)
# данные корректно преобразовались в int, можно вызвать обработчик ответа, и передать туда наше число
ResponseHandler(botQuestion, chat_id, var_int)
except ValueError:
botQuestion.send_message(chat_id,
text="Можно вводить ТОЛЬКО целое число в десятичной системе исчисления (символами от 0 до 9)!\nПопробуйте еще раз...")
my_inputInt(botQuestion, chat_id, txtQuestion, ResponseHandler) # это не рекурсия, но очень похоже
def dz1(bot, chat_id):
markup = types.InlineKeyboardMarkup()
name = my_input('Введите свое имя')
bot.send_message(chat_id, text="обычно тебя зовут-" + name)
def dz2(bot, chat_id):
age = my_inputInt('Введите свой возраст')
bot.send_message(chat_id, text="твой возраст " + (str(age)))
def dz3(bot, chat_id):
age2 = my_inputInt('Введите свой возраст')
bot.send_message(chat_id, (str(age2)) * 5)
def dz4(bot, chat_id):
name2 = my_input('как <NAME>?')
age3 = my_inputInt('сколько тебе лет?')
bot.send_message(chat_id, text="ку," + name2)
def dz5(bot, chat_id):
user_age = my_inputInt("сколько тебе лет?")
if user_age > 30:
bot.send_message(chat_id,
text="Судья говорит свидетельнице: -Ваш возраст? -Все дают мне 18 лет! -Будете выдумывать, я вам сейчас пожизненное дам")
if user_age < 18:
bot.send_message(chat_id,
text="ты сейчас в таком возрасте, что покупая новые ботинки, должен задуматься: а не в них ли меня будут хоронить?")
else:
bot.send_message(chat_id, text="вы где то между 18 и 30 - shit")
def dz6(bot, chat_id):
name3 = my_input('Введите свое имя')
lenght = len(name3)
bot.send_message(chat_id, str(name3[1:lenght - 1:]))
bot.send_message(chat_id, str(name3[:: -1]))
bot.send_message(chat_id, str(name3[-3::]))
bot.send_message(chat_id, str(name3[0:5:]))
def dz7(bot, chat_id):
name4 = my_input('Введите свое имя')
bot.send_message(chat_id, text='букв в твоеи имени: ' + str(len(name4)))
user_age2 = my_inputInt("сколько тебе лет?")
suma = 0
nesuma = 1
while user_age2 > 0:
digit = user_age2 % 10
suma = suma + digit
nesuma = nesuma * digit
user_age2 = user_age2 // 10
bot.send_message(chat_id, text='сумма чисел твоего возраста: ' + str(suma))
bot.send_message(chat_id, text='произведение чисел твоего возраста: ' + str(nesuma))
def dz8(bot, chat_id):
name4 = my_input('Введите свое имя')
bot.send_message(chat_id, name4.title())
bot.send_message(chat_id, name4.lower())
bot.send_message(chat_id, name4.upper())
def dz9(bot, chat_id):
while True:
user_age2 = my_input('сколько тебе лет?')
if not user_age2.isnumeric():
bot.send_message(chat_id, text='вы ввели не число, ошибка')
elif not 0 <= int(user_age2) <= 150:
bot.send_message(chat_id, text='ваше число не входит в диапазон существующих')
else:
bot.send_message(chat_id, text='ok')
break
def dz10(bot, chat_id):
key = types.InlineKeyboardMarkup()
name44 = my_input('введите свое имя')
if name44.isalpha() or name44.isspace():
bot.send_message(chat_id, text='ok')
else:
bot.send_message(chat_id, text='bad')
|
[
"telebot.types.InlineKeyboardMarkup"
] |
[((1295, 1323), 'telebot.types.InlineKeyboardMarkup', 'types.InlineKeyboardMarkup', ([], {}), '()\n', (1321, 1323), False, 'from telebot import types\n'), ((3887, 3915), 'telebot.types.InlineKeyboardMarkup', 'types.InlineKeyboardMarkup', ([], {}), '()\n', (3913, 3915), False, 'from telebot import types\n')]
|
import fastNLP as FN
import argparse
import os
import random
import numpy
import torch
def get_argparser():
parser = argparse.ArgumentParser()
parser.add_argument('--lr', type=float, required=True)
parser.add_argument('--w_decay', type=float, required=True)
parser.add_argument('--lr_decay', type=float, required=True)
parser.add_argument('--bsz', type=int, required=True)
parser.add_argument('--ep', type=int, required=True)
parser.add_argument('--drop', type=float, required=True)
parser.add_argument('--gpu', type=str, required=True)
parser.add_argument('--log', type=str, default=None)
return parser
def add_model_args(parser):
parser.add_argument('--nhead', type=int, default=6)
parser.add_argument('--hdim', type=int, default=50)
parser.add_argument('--hidden', type=int, default=300)
return parser
def set_gpu(gpu_str):
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ['CUDA_VISIBLE_DEVICES'] = gpu_str
def set_rng_seeds(seed=None):
if seed is None:
seed = numpy.random.randint(0, 65536)
random.seed(seed)
numpy.random.seed(seed)
torch.random.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
# print('RNG_SEED {}'.format(seed))
return seed
class TensorboardCallback(FN.Callback):
"""
接受以下一个或多个字符串作为参数:
- "model"
- "loss"
- "metric"
"""
def __init__(self, *options):
super(TensorboardCallback, self).__init__()
args = {"model", "loss", "metric"}
for opt in options:
if opt not in args:
raise ValueError(
"Unrecognized argument {}. Expect one of {}".format(opt, args))
self.options = options
self._summary_writer = None
self.graph_added = False
def on_train_begin(self):
save_dir = self.trainer.save_path
if save_dir is None:
path = os.path.join(
"./", 'tensorboard_logs_{}'.format(self.trainer.start_time))
else:
path = os.path.join(
save_dir, 'tensorboard_logs_{}'.format(self.trainer.start_time))
self._summary_writer = SummaryWriter(path)
def on_batch_begin(self, batch_x, batch_y, indices):
if "model" in self.options and self.graph_added is False:
# tesorboardX 这里有大bug,暂时没法画模型图
# from fastNLP.core.utils import _build_args
# inputs = _build_args(self.trainer.model, **batch_x)
# args = tuple([value for value in inputs.values()])
# args = args[0] if len(args) == 1 else args
# self._summary_writer.add_graph(self.trainer.model, torch.zeros(32, 2))
self.graph_added = True
def on_backward_begin(self, loss):
if "loss" in self.options:
self._summary_writer.add_scalar(
"loss", loss.item(), global_step=self.trainer.step)
if "model" in self.options:
for name, param in self.trainer.model.named_parameters():
if param.requires_grad:
self._summary_writer.add_scalar(
name + "_mean", param.mean(), global_step=self.trainer.step)
# self._summary_writer.add_scalar(name + "_std", param.std(), global_step=self.trainer.step)
self._summary_writer.add_scalar(name + "_grad_mean", param.grad.mean(),
global_step=self.trainer.step)
def on_valid_end(self, eval_result, metric_key):
if "metric" in self.options:
for name, metric in eval_result.items():
for metric_key, metric_val in metric.items():
self._summary_writer.add_scalar("valid_{}_{}".format(name, metric_key), metric_val,
global_step=self.trainer.step)
def on_train_end(self):
self._summary_writer.close()
del self._summary_writer
def on_exception(self, exception):
if hasattr(self, "_summary_writer"):
self._summary_writer.close()
del self._summary_writer
|
[
"numpy.random.seed",
"argparse.ArgumentParser",
"torch.random.manual_seed",
"torch.cuda.manual_seed_all",
"numpy.random.randint",
"random.seed"
] |
[((123, 148), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (146, 148), False, 'import argparse\n'), ((1092, 1109), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1103, 1109), False, 'import random\n'), ((1114, 1137), 'numpy.random.seed', 'numpy.random.seed', (['seed'], {}), '(seed)\n', (1131, 1137), False, 'import numpy\n'), ((1142, 1172), 'torch.random.manual_seed', 'torch.random.manual_seed', (['seed'], {}), '(seed)\n', (1166, 1172), False, 'import torch\n'), ((1177, 1209), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['seed'], {}), '(seed)\n', (1203, 1209), False, 'import torch\n'), ((1057, 1087), 'numpy.random.randint', 'numpy.random.randint', (['(0)', '(65536)'], {}), '(0, 65536)\n', (1077, 1087), False, 'import numpy\n')]
|
# -*- coding: utf-8 -*-
from django.db import models
class Nameable(models.Model):
name = models.CharField(max_length=40)
class Meta:
abstract = True
|
[
"django.db.models.CharField"
] |
[((97, 128), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)'}), '(max_length=40)\n', (113, 128), False, 'from django.db import models\n')]
|
import sys
from random import randint
class SymbolSet:
def __init__(self, string: str, weight: int) -> None:
self.__str = string
self.__weight = weight
def getWeight(self) -> int:
return self.__weight
def getChar(self) -> str:
index = randint(0, len(self.__str) - 1)
return self.__str[index]
def argOfCorrectFormat(arg: str) -> bool: # of syntax "-*"
if len(arg) == 0:
return False
if arg[0] == "-":
return True
return False
def main() -> int:
if len(sys.argv) <= 1:
return 0
passwordLen = int(sys.argv[1])
if passwordLen <= 0:
return 0
lowerCases = SymbolSet(
"abcdefghijklmnopqrstuvwxyz",
260)
upperCases = SymbolSet(
"ABCDEFGHIJKLMNOPQRSTUVWXYZ",
260)
numbers = SymbolSet(
"1234567890",
400)
symbols = SymbolSet(
"~`! @#$%^&*()_-+={[}]|\:;\"'<,>.?/",
200)
password = ""
listOfSymbols = []
if len(sys.argv) <= 2:
listOfSymbols = [lowerCases, upperCases, numbers, symbols]
else:
for arg in sys.argv[2:]:
if argOfCorrectFormat(arg):
arg_sub = arg[1:]
if arg_sub == "lc":
listOfSymbols.append(lowerCases)
elif arg_sub == "uc":
listOfSymbols.append(upperCases)
elif arg_sub == "n":
listOfSymbols.append(numbers)
elif arg_sub == "s":
listOfSymbols.append(symbols)
else: # invalid arg
print("Invalid Argument Error: only -lc -uc -n -s are allowed")
totalWeight = sum([i.getWeight() for i in listOfSymbols])
for i in range(passwordLen):
randomNumber = randint(0, totalWeight - 1)
for symbolSet in listOfSymbols:
if symbolSet.getWeight() <= randomNumber:
randomNumber -= symbolSet.getWeight()
else:
password += symbolSet.getChar()
break
print(password)
return 0
if __name__ == "__main__":
main()
|
[
"random.randint"
] |
[((1848, 1875), 'random.randint', 'randint', (['(0)', '(totalWeight - 1)'], {}), '(0, totalWeight - 1)\n', (1855, 1875), False, 'from random import randint\n')]
|
import requests
import pickle
import json
def make_call(location):
apikey = '<KEY>'
URL = 'https://api.weather.com/v2/pws/observations/current?apiKey={0}&stationId={1}&numericPrecision=decimal&format=json&units=e'.format(apikey, location)
headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1) AppleWebKit/605.1.fifteen (KHTML, like Gecko) Version/14.0.2 Safari/605.1.fifteen",
"Accept": "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.5",
#"referrer": "https://www.wunderground.com/calendar/us/wa/carnation",
"method": "GET",
"mode": "cors"
}
r = requests.get(URL, headers=headers)
if r.status_code == 200:
response_data = json.loads(r.content)
return response_data
else:
print ("Ignored : " + str(location))
return None
class WuData():
def __init__(self, response_in, location):
"""this takes json in """
self.out_dict = {}
self.out_dict['location'] = '"' + location + '"'
data = response_in['observations'][0]['imperial']
self.current_temp = data['temp']
self.out_dict['current_temp'] = data['temp']
self.out_dict['current_pressure'] = data['pressure']
self.out_dict['today_precip'] = data['precipTotal']
self.out_dict['current_humidity'] = response_in['observations'][0]['humidity']
self.out_dict['wind_speed'] = data['windSpeed']
self.out_dict['wind_direction'] = response_in['observations'][0]['winddir']
self.out_dict['wind_gust'] = data['windGust']
self.out_dict['wind_chill'] = data['windChill']
self.out_dict['dew_point'] = data['dewpt']
self.current_pressure = data['pressure']
self.today_precip = data['precipTotal']
self.humidity = response_in['observations'][0]['humidity']
self.wind_speed = data['windSpeed']
self.wind_direction = response_in['observations'][0]['winddir']
self.wind_gust = data['windGust']
self.wind_chill = data['windChill']
self.dew_point = data['dewpt']
if __name__ == "__main__":
location_list = ['KWACARNA1', 'KWAFALLC80']
page_list = []
for location in location_list:
page = make_call(location)
page_list.append((page, location))
for tup in page_list:
conditions = WuData(tup[0], tup[1])
print (conditions.out_dict)
|
[
"json.loads",
"requests.get"
] |
[((690, 724), 'requests.get', 'requests.get', (['URL'], {'headers': 'headers'}), '(URL, headers=headers)\n', (702, 724), False, 'import requests\n'), ((778, 799), 'json.loads', 'json.loads', (['r.content'], {}), '(r.content)\n', (788, 799), False, 'import json\n')]
|
"""
check if any items that are ready for processing exist in extract queue
ready for processing = status set to 0
extract queue = mongodb db/collection: asdf->extracts
"""
# ----------------------------------------------------------------------------
import sys
import os
branch = sys.argv[1]
utils_dir = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'utils')
sys.path.insert(0, utils_dir)
from config_utility import BranchConfig
config = BranchConfig(branch=branch)
config.test_connection()
# ----------------------------------------------------------------------------
# check mongodb connection
if config.connection_status != 0:
print("error")
# sys.exit("connection status error: " + str(config.connection_error))
# ----------------------------------------------------------------------------
job_type = sys.argv[2]
import pymongo
client = pymongo.MongoClient(config.database)
c_extracts = client.asdf.extracts
if job_type == "det":
request_count = c_extracts.find({'status': {'$in': [0, -1, 2]}, '$or': [{'attempts': {'$exists': False}}, {'attempts': {'$lt': 5}}], 'priority': {'$gt': -1}}).count()
elif job_type == "default":
request_count = c_extracts.find({'status': {'$in': [0, -1, 2]}, '$or': [{'attempts': {'$exists': False}}, {'attempts': {'$lt': 5}}]}).count()
elif job_type == "raster":
request_count = c_extracts.find({'status': {'$in': [0, -1, 2]}, '$or': [{'attempts': {'$exists': False}}, {'attempts': {'$lt': 5}}], 'classification': 'raster'}).count()
elif job_type == "msr":
request_count = c_extracts.find({'status': {'$in': [0, -1, 2]}, '$or': [{'attempts': {'$exists': False}}, {'attempts': {'$lt': 5}}], 'classification': 'msr'}).count()
elif "errors" in job_type:
request_count = c_extracts.find({'status': {'$in': [0, -1, 2]}, '$and': [{'attempts': {'$gte': 5}}, {'attempts': {'$lt': 20}}]}).count()
else:
request_count = "invalid"
if request_count == "invalid":
print("invalid")
elif request_count > 0:
print("ready")
else:
print("empty")
|
[
"pymongo.MongoClient",
"os.path.abspath",
"config_utility.BranchConfig",
"sys.path.insert"
] |
[((401, 430), 'sys.path.insert', 'sys.path.insert', (['(0)', 'utils_dir'], {}), '(0, utils_dir)\n', (416, 430), False, 'import sys\n'), ((482, 509), 'config_utility.BranchConfig', 'BranchConfig', ([], {'branch': 'branch'}), '(branch=branch)\n', (494, 509), False, 'from config_utility import BranchConfig\n'), ((903, 939), 'pymongo.MongoClient', 'pymongo.MongoClient', (['config.database'], {}), '(config.database)\n', (922, 939), False, 'import pymongo\n'), ((363, 388), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (378, 388), False, 'import os\n')]
|
#!/usr/env/python python3
# -*- coding: utf-8 -*-
# @File : vad_util.py
# @Time : 2018/8/29 13:37
# @Software : PyCharm
import numpy as np
from math import log
import librosa
def mse(data):
return ((data ** 2).mean()) ** 0.5
def dBFS(data):
mse_data = mse(data)
if mse_data == 0.0:
return 0
max_possible_val = 2 ** 16 / 2
return 20 * log(mse_data / max_possible_val, 10)
def cut_wav(data, per_f=150):
num_f = int(len(data) / per_f)
data = data[:num_f * per_f]
data = data.reshape((num_f, per_f))
return data
def remove_silence(source_sound, common_sound, silence_threshold=140, chunk_size=148):
source_sounds = cut_wav(source_sound, chunk_size)
common_sounds = cut_wav(common_sound, chunk_size)
y = []
for i in range(common_sounds.shape[0]):
db = -dBFS(common_sounds[i, ...])
if db < silence_threshold:
y.append(source_sounds[i])
# print("db", i, db)
y = np.array(y)
y = y.flatten()
return y
def comman(sound):
abs_sound = np.abs(sound)
return sound / np.max(abs_sound)
if __name__ == '__main__':
wav_data, rate = librosa.load("BAC009S0908W0161.wav", sr=16000)
y = remove_silence(wav_data, wav_data, 139, 300)
librosa.output.write_wav("c.wav", y, sr=16000)
|
[
"numpy.abs",
"librosa.output.write_wav",
"numpy.max",
"numpy.array",
"librosa.load",
"math.log"
] |
[((973, 984), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (981, 984), True, 'import numpy as np\n'), ((1055, 1068), 'numpy.abs', 'np.abs', (['sound'], {}), '(sound)\n', (1061, 1068), True, 'import numpy as np\n'), ((1157, 1203), 'librosa.load', 'librosa.load', (['"""BAC009S0908W0161.wav"""'], {'sr': '(16000)'}), "('BAC009S0908W0161.wav', sr=16000)\n", (1169, 1203), False, 'import librosa\n'), ((1262, 1308), 'librosa.output.write_wav', 'librosa.output.write_wav', (['"""c.wav"""', 'y'], {'sr': '(16000)'}), "('c.wav', y, sr=16000)\n", (1286, 1308), False, 'import librosa\n'), ((375, 411), 'math.log', 'log', (['(mse_data / max_possible_val)', '(10)'], {}), '(mse_data / max_possible_val, 10)\n', (378, 411), False, 'from math import log\n'), ((1088, 1105), 'numpy.max', 'np.max', (['abs_sound'], {}), '(abs_sound)\n', (1094, 1105), True, 'import numpy as np\n')]
|
import os
import sys
import argparse
import logging
import tqdm
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from data.tokenizer import Tokenizer
from util.utils import load_bestmodel
def translate(args, src_sentence, generated_max_length=100):
# prepare best model
best_model_path = os.path.join(args.final_model_path,"bestmodel.pth")
try:
best_model, _= load_bestmodel(best_model_path)
best_model.eval()
except OSError:
logging.info("Check if there is bestmodel.pth file in final_results folder")
# prepare device
device = args.device
# prepare tokenizer for both enc, dec
enc_tokenizer = Tokenizer(args.enc_language,args.enc_max_len)
dec_tokenizer = Tokenizer(args.dec_language,args.dec_max_len)
# prepare vocabulary for both enc, dec
enc_vocabulary = enc_tokenizer.get_vocab()
dec_vocabulary = dec_tokenizer.get_vocab()
# convert src_sentence, and measure the length of the src_sentence
src_sentence = src_sentence.lower() # delete if you do not need cased
src_sentence_length = len(src_sentence)
logging.info(f"The original {args.enc_language} sentence you provided was : ")
logging.info(src_sentence)
# encode the given src_sentence with enc_tokenizer
src_tensor = enc_tokenizer.encode(src_sentence).input_ids # [bs, sl]
enc_mask = best_model.generate_padding_mask(src_tensor, src_tensor, "src", "src")
logging.info(f"The {args.enc_language} Tokenizer converted sentence such as : ")
logging.info(src_tensor)
# prepare the pred_sentence
pred_tensor=[dec_tokenizer.bos_token] # now : [1] -> goal : [generated_max_length]
# translate the given sentence into target language
with torch.no_grad():
# pass through encoder
encoder_output = best_model.Encoder(encoded_src_sentence, enc_mask) # [bs, sl, hs]
for idx in range(generated_max_length):
tgt_tensor = torch.LongTensor(pred_tensor).to(device)
enc_dec_mask = best_model.geneate_padding_mask(tgt_tensor, enc_tensor, "src", "tgt")
dec_mask = best_model.generate_padding_mask(tgt_tensor, tgt_tensor, "tgt", "tgt")
# pass through decoder
decoder_output = best_model.Decoder(tgt_tensor, encoder_output, enc_dec_mask, dec_mask) # [bs, sl, hs]
# append predicted_token into pred_tensor
predicted_token = output.argmax(dim=2)[:,-1].item()
pred_tensor.append(predicted_token)
# ENDING CONDITION : facing eos token
if predicted_token == dec_vocabulary.eos_token :
break
# decode with dec_tokenizer
translated_result = dec_tokenizer.decode(pred_tensor)
translated_result = translated_result[0]
# convert tensor into string
translated_sentence = ""
for tokens in translated_result:
translated_sentence += tokens
if tokens !='.':
translated_sentence += " "
return translated_sentence
|
[
"data.tokenizer.Tokenizer",
"util.utils.load_bestmodel",
"torch.LongTensor",
"logging.info",
"torch.no_grad",
"os.path.join"
] |
[((410, 462), 'os.path.join', 'os.path.join', (['args.final_model_path', '"""bestmodel.pth"""'], {}), "(args.final_model_path, 'bestmodel.pth')\n", (422, 462), False, 'import os\n'), ((778, 824), 'data.tokenizer.Tokenizer', 'Tokenizer', (['args.enc_language', 'args.enc_max_len'], {}), '(args.enc_language, args.enc_max_len)\n', (787, 824), False, 'from data.tokenizer import Tokenizer\n'), ((845, 891), 'data.tokenizer.Tokenizer', 'Tokenizer', (['args.dec_language', 'args.dec_max_len'], {}), '(args.dec_language, args.dec_max_len)\n', (854, 891), False, 'from data.tokenizer import Tokenizer\n'), ((1234, 1312), 'logging.info', 'logging.info', (['f"""The original {args.enc_language} sentence you provided was : """'], {}), "(f'The original {args.enc_language} sentence you provided was : ')\n", (1246, 1312), False, 'import logging\n'), ((1318, 1344), 'logging.info', 'logging.info', (['src_sentence'], {}), '(src_sentence)\n', (1330, 1344), False, 'import logging\n'), ((1575, 1660), 'logging.info', 'logging.info', (['f"""The {args.enc_language} Tokenizer converted sentence such as : """'], {}), "(f'The {args.enc_language} Tokenizer converted sentence such as : '\n )\n", (1587, 1660), False, 'import logging\n'), ((1661, 1685), 'logging.info', 'logging.info', (['src_tensor'], {}), '(src_tensor)\n', (1673, 1685), False, 'import logging\n'), ((496, 527), 'util.utils.load_bestmodel', 'load_bestmodel', (['best_model_path'], {}), '(best_model_path)\n', (510, 527), False, 'from util.utils import load_bestmodel\n'), ((1878, 1893), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1891, 1893), False, 'import torch\n'), ((585, 661), 'logging.info', 'logging.info', (['"""Check if there is bestmodel.pth file in final_results folder"""'], {}), "('Check if there is bestmodel.pth file in final_results folder')\n", (597, 661), False, 'import logging\n'), ((2096, 2125), 'torch.LongTensor', 'torch.LongTensor', (['pred_tensor'], {}), '(pred_tensor)\n', (2112, 2125), False, 'import torch\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import argparse
import cv2, os
from fcos_core.config import cfg
from predictor import VisDroneDemo
import time
def main():
parser = argparse.ArgumentParser(description="PyTorch Object Detection Webcam Demo")
parser.add_argument(
"--config-file",
default="configs/visdrone_tdts/tdts_R_50_FPN_1x_640x1024_visdrone_cn_mw1.5-nms0.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument(
"--weights",
default="models/tdts_R_50_FPN_1x_640x1024_visdrone_cn_mw1.5-nms0.pth",
metavar="FILE",
help="path to the trained model",
)
parser.add_argument(
"--images-dir",
default="demo/images",
metavar="DIR",
help="path to demo images directory",
)
parser.add_argument(
"--results-dir",
default="demo/results",
metavar="DIR",
help="path to demo images directory",
)
parser.add_argument(
"--min-image-size",
type=int,
default=640, # 800
help="Smallest size of the image to feed to the model. "
"Model was trained with 800, which gives best results",
)
parser.add_argument(
"opts",
help="Modify model config options using the command-line",
default=None,
nargs=argparse.REMAINDER,
)
args = parser.parse_args()
# load config from file and command-line arguments
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.MODEL.WEIGHT = args.weights
cfg.freeze()
# The following per-class thresholds are computed by maximizing
# per-class f-measure in their precision-recall curve.
# Please see compute_thresholds_for_classes() in coco_eval.py for details.
thresholds_for_classes = [1.0, 0.4543384611606598, 0.4528161883354187, 0.4456373155117035,
0.4930519461631775, 0.49669983983039856, 0.4916415810585022,
0.43324407935142517, 0.4070464074611664, 0.49178892374038696,
0.43258824944496155, 1.0]
demo_im_names = os.listdir(args.images_dir)
demo_im_names.sort()
print('{} images to test'.format(len(demo_im_names)))
# prepare object that handles inference plus adds predictions on top of image
demo = VisDroneDemo(
cfg,
confidence_thresholds_for_classes=thresholds_for_classes,
min_image_size=args.min_image_size
)
if args.results_dir:
if not os.path.exists(args.results_dir):
os.mkdir(args.results_dir)
# plt
for i, im_name in enumerate(demo_im_names):
img = cv2.imread(os.path.join(args.images_dir, im_name))
if img is None:
continue
start_time = time.time()
demo.run_det_on_opencv_image_plt(img, os.path.join(args.results_dir, im_name))
print("{}, {}\tinference time: {:.2f}s".format(i, im_name, time.time() - start_time))
print("Done!")
else:
for im_name in demo_im_names:
img = cv2.imread(os.path.join(args.images_dir, im_name))
if img is None:
continue
start_time = time.time()
composite = demo.run_on_opencv_image(img)
print("{}\tinference time: {:.2f}s".format(im_name, time.time() - start_time))
cv2.imwrite(os.path.join('result', im_name), composite)
# cv2.imshow(im_name, composite)
print("Press any keys to exit ...")
cv2.waitKey()
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
[
"os.mkdir",
"fcos_core.config.cfg.merge_from_file",
"fcos_core.config.cfg.freeze",
"argparse.ArgumentParser",
"predictor.VisDroneDemo",
"cv2.waitKey",
"fcos_core.config.cfg.merge_from_list",
"os.path.exists",
"time.time",
"cv2.destroyAllWindows",
"os.path.join",
"os.listdir"
] |
[((211, 286), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch Object Detection Webcam Demo"""'}), "(description='PyTorch Object Detection Webcam Demo')\n", (234, 286), False, 'import argparse\n'), ((1507, 1544), 'fcos_core.config.cfg.merge_from_file', 'cfg.merge_from_file', (['args.config_file'], {}), '(args.config_file)\n', (1526, 1544), False, 'from fcos_core.config import cfg\n'), ((1549, 1579), 'fcos_core.config.cfg.merge_from_list', 'cfg.merge_from_list', (['args.opts'], {}), '(args.opts)\n', (1568, 1579), False, 'from fcos_core.config import cfg\n'), ((1621, 1633), 'fcos_core.config.cfg.freeze', 'cfg.freeze', ([], {}), '()\n', (1631, 1633), False, 'from fcos_core.config import cfg\n'), ((2201, 2228), 'os.listdir', 'os.listdir', (['args.images_dir'], {}), '(args.images_dir)\n', (2211, 2228), False, 'import cv2, os\n'), ((2406, 2521), 'predictor.VisDroneDemo', 'VisDroneDemo', (['cfg'], {'confidence_thresholds_for_classes': 'thresholds_for_classes', 'min_image_size': 'args.min_image_size'}), '(cfg, confidence_thresholds_for_classes=thresholds_for_classes,\n min_image_size=args.min_image_size)\n', (2418, 2521), False, 'from predictor import VisDroneDemo\n'), ((3618, 3631), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (3629, 3631), False, 'import cv2, os\n'), ((3640, 3663), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3661, 3663), False, 'import cv2, os\n'), ((2590, 2622), 'os.path.exists', 'os.path.exists', (['args.results_dir'], {}), '(args.results_dir)\n', (2604, 2622), False, 'import cv2, os\n'), ((2636, 2662), 'os.mkdir', 'os.mkdir', (['args.results_dir'], {}), '(args.results_dir)\n', (2644, 2662), False, 'import cv2, os\n'), ((2877, 2888), 'time.time', 'time.time', ([], {}), '()\n', (2886, 2888), False, 'import time\n'), ((3296, 3307), 'time.time', 'time.time', ([], {}), '()\n', (3305, 3307), False, 'import time\n'), ((2759, 2797), 'os.path.join', 'os.path.join', (['args.images_dir', 'im_name'], {}), '(args.images_dir, im_name)\n', (2771, 2797), False, 'import cv2, os\n'), ((2939, 2978), 'os.path.join', 'os.path.join', (['args.results_dir', 'im_name'], {}), '(args.results_dir, im_name)\n', (2951, 2978), False, 'import cv2, os\n'), ((3178, 3216), 'os.path.join', 'os.path.join', (['args.images_dir', 'im_name'], {}), '(args.images_dir, im_name)\n', (3190, 3216), False, 'import cv2, os\n'), ((3477, 3508), 'os.path.join', 'os.path.join', (['"""result"""', 'im_name'], {}), "('result', im_name)\n", (3489, 3508), False, 'import cv2, os\n'), ((3051, 3062), 'time.time', 'time.time', ([], {}), '()\n', (3060, 3062), False, 'import time\n'), ((3426, 3437), 'time.time', 'time.time', ([], {}), '()\n', (3435, 3437), False, 'import time\n')]
|
import os
import shutil
from pdb import set_trace
from gym.envs.box2d.car_racing import CarRacing
import numpy as np
import pandas as pd
def find_roads():
path = './touching_tracks_tests'
# Check if dir exists TODO
if os.path.isdir(path):
# Remove files TODO
shutil.rmtree(path)
# Create dir TODO
os.mkdir(path)
env = CarRacing(
allow_reverse=False,
show_info_panel=False,
num_tracks=2,
num_lanes=2,
num_lanes_changes=0,
num_obstacles=100,
random_obstacle_x_position=False,
random_obstacle_shape=False,)
env.change_zoom()
for j in range(100):
env.reset()
for i in range(len(env.tracks[0])):
prev_tile = env.tracks[0][i-2]
curr_tile = env.tracks[0][i-1]
next_tile = env.tracks[0][i]
if any(curr_tile[0] != prev_tile[1]):
set_trace()
elif any(curr_tile[1] != next_tile[0]):
set_trace()
env.screenshot(path,name=str(j),quality='high')
np.save(path + "/info_" + str(j) + ".csv", env.info)
np.save(path + "/track0_" + str(j) + ".csv", env.tracks[0])
np.save(path + "/track1_" + str(j) + ".csv", env.tracks[1])
if __name__ == '__main__':
find_roads()
|
[
"os.mkdir",
"os.path.isdir",
"pdb.set_trace",
"shutil.rmtree",
"gym.envs.box2d.car_racing.CarRacing"
] |
[((234, 253), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (247, 253), False, 'import os\n'), ((337, 351), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (345, 351), False, 'import os\n'), ((363, 554), 'gym.envs.box2d.car_racing.CarRacing', 'CarRacing', ([], {'allow_reverse': '(False)', 'show_info_panel': '(False)', 'num_tracks': '(2)', 'num_lanes': '(2)', 'num_lanes_changes': '(0)', 'num_obstacles': '(100)', 'random_obstacle_x_position': '(False)', 'random_obstacle_shape': '(False)'}), '(allow_reverse=False, show_info_panel=False, num_tracks=2,\n num_lanes=2, num_lanes_changes=0, num_obstacles=100,\n random_obstacle_x_position=False, random_obstacle_shape=False)\n', (372, 554), False, 'from gym.envs.box2d.car_racing import CarRacing\n'), ((291, 310), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (304, 310), False, 'import shutil\n'), ((982, 993), 'pdb.set_trace', 'set_trace', ([], {}), '()\n', (991, 993), False, 'from pdb import set_trace\n'), ((1062, 1073), 'pdb.set_trace', 'set_trace', ([], {}), '()\n', (1071, 1073), False, 'from pdb import set_trace\n')]
|
import luigi
from ...abstract_method_exception import AbstractMethodException
from ...lib.test_environment.populate_data import PopulateEngineSmallTestDataToDatabase
from ...lib.test_environment.upload_exa_jdbc import UploadExaJDBC
from ...lib.test_environment.upload_virtual_schema_jdbc_adapter import UploadVirtualSchemaJDBCAdapter
from ...lib.base.dependency_logger_base_task import DependencyLoggerBaseTask
from ...lib.data.container_info import ContainerInfo
from ...lib.data.database_credentials import DatabaseCredentialsParameter
from ...lib.data.database_info import DatabaseInfo
from ...lib.data.docker_network_info import DockerNetworkInfo
from ...lib.data.environment_info import EnvironmentInfo
from ...lib.test_environment.general_spawn_test_environment_parameter import \
GeneralSpawnTestEnvironmentParameter
from ...lib.test_environment.spawn_test_container import SpawnTestContainer
DATABASE = "database"
TEST_CONTAINER = "test_container"
class AbstractSpawnTestEnvironment(DependencyLoggerBaseTask,
GeneralSpawnTestEnvironmentParameter,
DatabaseCredentialsParameter):
environment_name = luigi.Parameter()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.test_container_name = f"""test_container_{self.environment_name}"""
self.network_name = f"""db_network_{self.environment_name}"""
def get_environment_type(self):
raise AbstractMethodException()
def run_task(self):
test_environment_info = yield from self._attempt_database_start()
yield from self._setup_test_database(test_environment_info)
self.return_object(test_environment_info)
def _attempt_database_start(self):
is_database_ready = False
attempt = 0
database_info = None
test_container_info = None
while not is_database_ready and attempt < self.max_start_attempts:
network_info, database_info, is_database_ready, test_container_info = \
yield from self._start_database(attempt)
attempt += 1
if not is_database_ready and not attempt < self.max_start_attempts:
raise Exception(f"Maximum attempts {attempt} to start the database reached.")
test_environment_info = \
EnvironmentInfo(name=self.environment_name,
env_type=self.get_environment_type(),
database_info=database_info,
test_container_info=test_container_info,
network_info=network_info)
return test_environment_info
def _start_database(self, attempt):
network_info = yield from self._create_network(attempt)
database_info, test_container_info = \
yield from self._spawn_database_and_test_container(network_info, attempt)
is_database_ready = yield from self._wait_for_database(
database_info, test_container_info, attempt)
return network_info, database_info, is_database_ready, test_container_info
def _create_network(self, attempt):
network_info_future = yield from self.run_dependencies(self.create_network_task(attempt))
network_info = self.get_values_from_future(network_info_future)
return network_info
def create_network_task(self, attempt: int):
raise AbstractMethodException()
def _spawn_database_and_test_container(self,
network_info: DockerNetworkInfo,
attempt: int):
database_and_test_container_info_future = \
yield from self.run_dependencies({
TEST_CONTAINER: SpawnTestContainer(
environment_name=self.environment_name,
test_container_name=self.test_container_name,
network_info=network_info,
ip_address_index_in_subnet=1,
reuse_test_container=self.reuse_test_container,
no_test_container_cleanup_after_end=self.no_test_container_cleanup_after_end,
attempt=attempt),
DATABASE: self.create_spawn_database_task(network_info, attempt)
})
database_and_test_container_info = \
self.get_values_from_futures(database_and_test_container_info_future)
test_container_info = database_and_test_container_info[TEST_CONTAINER]
database_info = database_and_test_container_info[DATABASE]
return database_info, test_container_info
def create_spawn_database_task(self,
network_info: DockerNetworkInfo,
attempt: int):
raise AbstractMethodException()
def _wait_for_database(self,
database_info: DatabaseInfo,
test_container_info: ContainerInfo,
attempt: int):
database_ready_target_future = \
yield from self.run_dependencies(
self.create_wait_for_database_task(
attempt, database_info, test_container_info))
is_database_ready = self.get_values_from_futures(database_ready_target_future)
return is_database_ready
def create_wait_for_database_task(self,
attempt: int,
database_info: DatabaseInfo,
test_container_info: ContainerInfo):
raise AbstractMethodException()
def _setup_test_database(self, test_environment_info: EnvironmentInfo):
# TODO check if database is setup
self.logger.info("Setup database")
upload_tasks = [
self.create_child_task_with_common_params(
UploadExaJDBC,
test_environment_info=test_environment_info,
reuse_uploaded=self.reuse_database_setup),
self.create_child_task_with_common_params(
UploadVirtualSchemaJDBCAdapter,
test_environment_info=test_environment_info,
reuse_uploaded=self.reuse_database_setup),
self.create_child_task_with_common_params(
PopulateEngineSmallTestDataToDatabase,
test_environment_info=test_environment_info,
reuse_data=self.reuse_database_setup
)]
yield from self.run_dependencies(upload_tasks)
|
[
"luigi.Parameter"
] |
[((1188, 1205), 'luigi.Parameter', 'luigi.Parameter', ([], {}), '()\n', (1203, 1205), False, 'import luigi\n')]
|
import discord
import requests
import json
import asyncio
from os import environ
from discord.ext import commands
from io import StringIO
from urllib.request import urlopen
from twitch import TwitchClient
class Emojis:
def __init__(self, bot):
self.bot = bot
self.messages = []
self.client = TwitchClient(
client_id= environ['twitch_key']
)
self.bot.loop.create_task(self.turn_off_buttons())
@commands.command(pass_context=True, name='emojis', aliases=['e', 'emoji'])
async def _emojis(self, ctx, name, mode='chat'):
try:
users = self.client.users.translate_usernames_to_ids([name])
user = users[0]
except:
await self.bot.send_message(ctx.message.channel, 'Can\'t find user. Use ``t?search (query)`` to find streams, users and more!')
return
id = user['id']
link = f'https://api.twitchemotes.com/api/v4/channels/{id}'
response = requests.get(link)
info = json.loads(response.text)
if info == {"error":"Channel not found"}:
await self.bot.send_message(ctx.message.channel, 'Channel not found')
#Generate dict {'EMOJI NAME': EMOJI_IMAGE_LINK, ...}
mode = mode.lower()
if mode == 'chat':
emojis = {}
for item in info['emotes']:
emojis[item['code']] = f'https://static-cdn.jtvnw.net/emoticons/v1/{item["id"]}/4.0'
elif mode == 'sub':
emojis = await self.get_emojis_links(info['channel_name'], info['subscriber_badges'])
if emojis == None:
await self.bot.send_message(ctx.message.channel, 'Subscriber emotes not found')
return
elif mode == 'bits':
emojis = await self.get_emojis_links(info['channel_name'], info['bits_badges'])
if emojis == None:
await self.bot.send_message(ctx.message.channel, 'Cheer emotes not found')
return
else:
await self.bot.send_message(ctx.message.channel, 'Unknown mode')
#if emotes more then 50
while len(emojis) > 50:
title = 'Error'
description = f'''
Too many emojis, write what you want to add like 6-11, 19, 20, 22-50
*6-11 is 6, 7, 8, 9, 10, 11 (emojis)*
'''
embed = discord.Embed(title=title, description=description, color=0x6441A4)
await self.bot.send_message(ctx.message.channel, embed=embed)
answer = await self.bot.wait_for_message(author=ctx.message.author)
text = answer.content.replace(' ', '')
emojis_splited = []
for item in text.split(','):
if '-' in item:
first_num = int(item[:item.find('-')])
end_num = int(item[item.find('-')+1:])
for num in range(first_num, end_num+1):
#num-1 to change 0-49 > 1-50
emojis_splited.append(num-1)
else:
#int(item)-1-1 to change 0,1,2 > 1,2,3
emojis_splited.append(int(item)-1)
n=0
for key in emojis.copy().keys():
if n not in emojis_splited:
del emojis[key]
n+=1
title = 'Warning'
description = f'Do you really want to add ``{len(emojis)}`` **{user["display_name"]}\'s** emojis to this server?'
embed = discord.Embed(title=title, description=description, color=0x6441A4)
message = await self.bot.send_message(ctx.message.channel, embed=embed)
for emoji in ['❌', '✅']:
await self.bot.add_reaction(message, emoji)
answer = await self.bot.wait_for_reaction(['❌', '✅'], message=message, user=ctx.message.author)
#Get emoji answers
if answer.reaction.emoji == '❌':
embed = discord.Embed(title=title, description='Canceled.', color=0x6441A4)
await self.bot.edit_message(message, embed=embed)
for emoji in ['❌', '✅']:
await self.bot.remove_reaction(message, emoji, self.bot.user)
elif answer.reaction.emoji == '✅':
#Send loading message & remove reaction
description='''
Loading...
*(if loading is infinte - wait ~30min, discord api can't load emoji after removing them)*'''
embed = discord.Embed(title=title, description=description, color=0x6441A4)
await self.bot.edit_message(message, embed=embed)
for emoji in ['❌', '✅']:
await self.bot.remove_reaction(message, emoji, self.bot.user)
#Add emojis to server
n=0
for key in emojis.copy().keys():
try:
image = urlopen(emojis[key]).read()
await self.bot.create_custom_emoji(server=ctx.message.server, name=key, image=image)
except Exception as e:
args = e.args
if args[0] == 'BAD REQUEST (status code: 400): Maximum number of emojis reached (50)':
description = 'Maximum number of emojis reached'
embed = discord.Embed(title=title, description=description, color=0x6441A4)
await self.bot.edit_message(message, embed=embed)
return
else:
del emojis[key]
await self.bot.send_message(ctx.message.channel, 'Сant load emoji')
#show percent
n+=1
percent = int(n / len(emojis) * 100)
description = f'Loading... | ``{percent}% / 100%``'
embed = discord.Embed(title=title, description=description, color=0x6441A4)
await self.bot.edit_message(message, embed=embed)
#Swap links to emojis
for key in emojis.keys():
for emoji in ctx.message.server.emojis:
if key == emoji.name:
emojis[key] = str(emoji)
#Send done message
embed = discord.Embed(title=title, description='Added!', color=0x6441A4)
await self.bot.edit_message(message, embed=embed)
#Create & send emojis list
max_page = len(emojis) // 5
if len(emojis) % 5 != 0:
max_page = len(emojis) // 5 + 1
embed, buttons = await self.generate_emoji_list(emojis, 1, max_page)
message = await self.bot.send_message(ctx.message.channel, embed=embed)
if buttons:
for emoji in ['⬅','➡']:
await self.bot.add_reaction(message, emoji)
self.messages.append({
'message': message,
'info': emojis,
'page': 1,
'max_page': max_page,
'emojis': ['⬅','➡']
})
async def generate_emoji_list(self, emojis, page, max_page):
description = ''
if len(emojis) <= 5:
buttons = False
title = f'Emojis | ``{len(emojis)}``'
for key in emojis.keys():
description+= f'{key} | {emojis[key]}\n'
else:
buttons = True
title = f'Emojis | {page}/{max_page}'
emojis_keys = list(emojis.keys())
for key in emojis_keys[(page-1)*5:page*5]:
description+= f'{key} | {emojis[key]}\n'
embed = discord.Embed(title=title, description=description, color=0x6441A4)
return embed, buttons
async def get_emojis_links(self, name, badges):
emojis = {}
if badges != None:
for key in badges.keys():
title = badges[key]['title'].lower().replace(' ', '_').replace('-','_').replace('.','_')
emoji_name = f'{name}_{title}'
emojis[emoji_name] = badges[key]['image_url_4x']
else:
return None
return emojis
async def on_reaction_add(self, reaction, user):
for message_info in self.messages:
if reaction.message.timestamp == message_info['message'].timestamp:
if user != self.bot.user:
await self.bot.remove_reaction(reaction.message, reaction.emoji, user)
if reaction.emoji in message_info['emojis']:
if reaction.emoji == '➡':
message_info['page'] += 1
if message_info['page'] > message_info['max_page']:
message_info['page'] = 1
embed, buttons = await self.generate_emoji_list(message_info['info'], message_info['page'], message_info['max_page'])
await self.bot.edit_message(message_info['message'], embed=embed)
if reaction.emoji == '⬅':
message_info['page'] -= 1
if message_info['page'] < 1:
message_info['page'] = message_info['max_page']
embed, buttons = await self.generate_emoji_list(message_info['info'], message_info['page'], message_info['max_page'])
await self.bot.edit_message(message_info['message'], embed=embed)
async def turn_off_buttons(self):
await self.bot.wait_until_ready()
while not self.bot.is_closed:
if len(self.messages) > 10:
self.messages = [self.messages.pop()]
await self.bot.send_message(
self.messages[0]['message'].channel,
'Old emoji-buttons no longer work'
)
await asyncio.sleep(60)
def setup(bot):
bot.add_cog(Emojis(bot))
|
[
"discord.ext.commands.command",
"json.loads",
"discord.Embed",
"asyncio.sleep",
"urllib.request.urlopen",
"requests.get",
"twitch.TwitchClient"
] |
[((492, 566), 'discord.ext.commands.command', 'commands.command', ([], {'pass_context': '(True)', 'name': '"""emojis"""', 'aliases': "['e', 'emoji']"}), "(pass_context=True, name='emojis', aliases=['e', 'emoji'])\n", (508, 566), False, 'from discord.ext import commands\n'), ((335, 380), 'twitch.TwitchClient', 'TwitchClient', ([], {'client_id': "environ['twitch_key']"}), "(client_id=environ['twitch_key'])\n", (347, 380), False, 'from twitch import TwitchClient\n'), ((1034, 1052), 'requests.get', 'requests.get', (['link'], {}), '(link)\n', (1046, 1052), False, 'import requests\n'), ((1069, 1094), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (1079, 1094), False, 'import json\n'), ((3676, 3742), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'description', 'color': '(6570404)'}), '(title=title, description=description, color=6570404)\n', (3689, 3742), False, 'import discord\n'), ((7905, 7971), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'description', 'color': '(6570404)'}), '(title=title, description=description, color=6570404)\n', (7918, 7971), False, 'import discord\n'), ((2510, 2576), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'description', 'color': '(6570404)'}), '(title=title, description=description, color=6570404)\n', (2523, 2576), False, 'import discord\n'), ((4124, 4190), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': '"""Canceled."""', 'color': '(6570404)'}), "(title=title, description='Canceled.', color=6570404)\n", (4137, 4190), False, 'import discord\n'), ((4659, 4725), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'description', 'color': '(6570404)'}), '(title=title, description=description, color=6570404)\n', (4672, 4725), False, 'import discord\n'), ((6484, 6547), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': '"""Added!"""', 'color': '(6570404)'}), "(title=title, description='Added!', color=6570404)\n", (6497, 6547), False, 'import discord\n'), ((10272, 10289), 'asyncio.sleep', 'asyncio.sleep', (['(60)'], {}), '(60)\n', (10285, 10289), False, 'import asyncio\n'), ((6054, 6120), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'description', 'color': '(6570404)'}), '(title=title, description=description, color=6570404)\n', (6067, 6120), False, 'import discord\n'), ((5071, 5091), 'urllib.request.urlopen', 'urlopen', (['emojis[key]'], {}), '(emojis[key])\n', (5078, 5091), False, 'from urllib.request import urlopen\n'), ((5495, 5561), 'discord.Embed', 'discord.Embed', ([], {'title': 'title', 'description': 'description', 'color': '(6570404)'}), '(title=title, description=description, color=6570404)\n', (5508, 5561), False, 'import discord\n')]
|
from dotenv import load_dotenv
from os.path import join, dirname
from dateutil import parser
from enum import Enum
from typing import List
import os
import urllib.request as url_request
import json
from dataclasses import dataclass
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path)
API_KEY = os.getenv('ALPHA_VANTAGE_KEY')
REQUEST_TIMEOUT_SECONDS = 20
class Interval(Enum):
DAILY = 'DAILY'
WEEKLY = 'WEEKLY'
MONTHLY = 'MONTHLY'
@dataclass
class AssetPrice:
date: str
price: float
def get_stock_returns_history(symbol: str,
interval: Interval) -> [float]:
price_history = get_stock_price_history(symbol, interval, adjusted=True)
returns: [float] = []
prev_price = None
for item in price_history:
if prev_price != None:
returns.append((item.price - prev_price) / prev_price)
prev_price = item.price
return returns
def get_stock_price_history(symbol: str,
interval: Interval,
adjusted=False) -> List[AssetPrice]:
url = url_for_function('TIME_SERIES_%s' % interval.value)
if adjusted == True:
url += '_ADJUSTED'
url += '&apikey=%s' % API_KEY
url += '&symbol=%s' % symbol
url += '&outputsize=full'
response = url_request.urlopen(url, timeout=REQUEST_TIMEOUT_SECONDS)
data = json.load(response)
prices_json = data[list(data.keys())[1]]
field_name = '4. close' if adjusted == False else '5. adjusted close'
prices: List[AssetPrice] = []
for k, v in sorted(prices_json.items()):
prices.append(AssetPrice(date=parser.parse(k),
price=float(v[field_name])))
return prices
def get_crypto_returns_history(currency: str, interval: Interval):
_, prices = get_crypto_price_history(currency, interval)
returns = []
prev_price = None
for price in prices:
if prev_price != None:
returns.append(((price / prev_price) - 1.0) * 100.0)
prev_price = price
return returns
def get_crypto_price_history(currency: str, interval: Interval):
url = url_for_function('DIGITAL_CURRENCY_%s' % interval.value)
url += '&apikey=%s' % API_KEY
url += '&symbol=%s' % currency
url += '&market=%s' % 'USD'
response = url_request.urlopen(url, timeout=REQUEST_TIMEOUT_SECONDS)
data = json.load(response)
_, dates_key = data.keys()
dates_data = data[dates_key]
dates = []
prices = []
for k, v in sorted(dates_data.items()):
dates.append(parser.parse(k))
prices.append(float(v['4a. close (USD)']))
return (dates, prices)
def url_for_function(function: str):
return f'https://www.alphavantage.co/query?function={function}'
|
[
"json.load",
"dateutil.parser.parse",
"os.path.dirname",
"urllib.request.urlopen",
"dotenv.load_dotenv",
"os.getenv"
] |
[((357, 381), 'dotenv.load_dotenv', 'load_dotenv', (['dotenv_path'], {}), '(dotenv_path)\n', (368, 381), False, 'from dotenv import load_dotenv\n'), ((393, 423), 'os.getenv', 'os.getenv', (['"""ALPHA_VANTAGE_KEY"""'], {}), "('ALPHA_VANTAGE_KEY')\n", (402, 423), False, 'import os\n'), ((330, 347), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (337, 347), False, 'from os.path import join, dirname\n'), ((1406, 1463), 'urllib.request.urlopen', 'url_request.urlopen', (['url'], {'timeout': 'REQUEST_TIMEOUT_SECONDS'}), '(url, timeout=REQUEST_TIMEOUT_SECONDS)\n', (1425, 1463), True, 'import urllib.request as url_request\n'), ((1475, 1494), 'json.load', 'json.load', (['response'], {}), '(response)\n', (1484, 1494), False, 'import json\n'), ((2423, 2480), 'urllib.request.urlopen', 'url_request.urlopen', (['url'], {'timeout': 'REQUEST_TIMEOUT_SECONDS'}), '(url, timeout=REQUEST_TIMEOUT_SECONDS)\n', (2442, 2480), True, 'import urllib.request as url_request\n'), ((2492, 2511), 'json.load', 'json.load', (['response'], {}), '(response)\n', (2501, 2511), False, 'import json\n'), ((2674, 2689), 'dateutil.parser.parse', 'parser.parse', (['k'], {}), '(k)\n', (2686, 2689), False, 'from dateutil import parser\n'), ((1734, 1749), 'dateutil.parser.parse', 'parser.parse', (['k'], {}), '(k)\n', (1746, 1749), False, 'from dateutil import parser\n')]
|
import math
def isprime(x):
if x == 2:
return 1
if x < 2 or x % 2 == 0:
return 0
i = 3
while i <= math.sqrt(x):
if x % i == 0:
return 0
i += 2
return 1
n = int(input())
lst = list(map(int, input().split()))
print(sum([isprime(i) for i in lst]))
|
[
"math.sqrt"
] |
[((132, 144), 'math.sqrt', 'math.sqrt', (['x'], {}), '(x)\n', (141, 144), False, 'import math\n')]
|
from approvaltests import verify
from database import DatabaseAccess
from product_service import validate_and_add
from response import ProductFormData
class FakeDatabase(DatabaseAccess):
def __init__(self):
self.product = None
def store_product(self, product):
self.product = product
return 1
def test_validate_and_add():
# Arrange
product_data = ProductFormData("Sample product", "Lipstick", 5, 10, False)
db = FakeDatabase()
# Act
response = validate_and_add(product_data, db)
# Assert
response_and_product = f"{response} {db.product}"
verify(response_and_product)
|
[
"product_service.validate_and_add",
"approvaltests.verify",
"response.ProductFormData"
] |
[((393, 452), 'response.ProductFormData', 'ProductFormData', (['"""Sample product"""', '"""Lipstick"""', '(5)', '(10)', '(False)'], {}), "('Sample product', 'Lipstick', 5, 10, False)\n", (408, 452), False, 'from response import ProductFormData\n'), ((503, 537), 'product_service.validate_and_add', 'validate_and_add', (['product_data', 'db'], {}), '(product_data, db)\n', (519, 537), False, 'from product_service import validate_and_add\n'), ((610, 638), 'approvaltests.verify', 'verify', (['response_and_product'], {}), '(response_and_product)\n', (616, 638), False, 'from approvaltests import verify\n')]
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
from LagouDb import LagouDb
class Analyzer(object):
def __init__(self):
self.db = LagouDb()
# 统计最受欢迎的工作
@staticmethod
def get_popular_jobs(since=None):
if since:
pass
else:
pass
# 统计职位在不同城市的薪资情况
def get_salary_in_city(self, key, count, mincount=10):
result = self.db.salary_in_city_by_key(key, count, mincount)
kv = {}
for i in result:
if i['count'] >= 5:
# 过滤数量小于5的城市
k = '{0} ({1})'.format(i['city'], i['count'])
kv[k] = i['salary']
return kv
# 统计工资最高的工作
def get_high_salary_jobs(self, city, count, mincount=10):
result = self.db.high_salary(city, count, mincount=mincount)
kv = {}
for i in result:
k = '{0} ({1})'.format(i['key'], i['count'])
kv[k] = i['salary']
return kv
# 关键字搜索结果比例
def key_persent(self, city, count):
if city:
result = self.db.key_persent_for_city(city, count)
else:
result = self.db.key_persent(count)
kv = {}
for i in result:
k = '{0} ({1})'.format(i['key'], i['count'])
kv[k] = i['count']
return kv
|
[
"LagouDb.LagouDb"
] |
[((137, 146), 'LagouDb.LagouDb', 'LagouDb', ([], {}), '()\n', (144, 146), False, 'from LagouDb import LagouDb\n')]
|
# Generated by Django 3.1.1 on 2020-09-10 21:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('formulario', '0004_auto_20200909_2313'),
]
operations = [
migrations.RemoveField(
model_name='mapeamento',
name='links_fontes',
),
migrations.AddField(
model_name='mapeamento',
name='fonte_1',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='mapeamento',
name='fonte_2',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='mapeamento',
name='fonte_3',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='mapeamento',
name='fonte_4',
field=models.URLField(blank=True, null=True),
),
migrations.DeleteModel(
name='Fonte',
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.URLField",
"django.db.migrations.DeleteModel"
] |
[((238, 306), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""mapeamento"""', 'name': '"""links_fontes"""'}), "(model_name='mapeamento', name='links_fontes')\n", (260, 306), False, 'from django.db import migrations, models\n'), ((1003, 1039), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Fonte"""'}), "(name='Fonte')\n", (1025, 1039), False, 'from django.db import migrations, models\n'), ((455, 493), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (470, 493), False, 'from django.db import migrations, models\n'), ((618, 656), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (633, 656), False, 'from django.db import migrations, models\n'), ((781, 819), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (796, 819), False, 'from django.db import migrations, models\n'), ((944, 982), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (959, 982), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf-8 -*-
"""Demonstrations of setting up models and visualising outputs."""
from __future__ import division
__authors__ = '<NAME>'
__license__ = 'MIT'
import sys
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from matplotlib.animation import FuncAnimation
import numpy as np
from pompy import models, processors
DEFAULT_SEED = 20181108
def set_up_figure(fig_size=(10, 5)):
"""Set up Matplotlib figure with simulation time title text.
Parameters
----------
title_text : string
Text to set figure title to.
fig_size : tuple
Figure dimensions in inches in order `(width, height)`.
"""
fig, ax = plt.subplots(1, 1, figsize=fig_size)
title = ax.set_title('Simulation time = ---- seconds')
return fig, ax, title
def update_decorator(dt, title, steps_per_frame, models):
"""Decorator for animation update methods."""
def inner_decorator(update_function):
def wrapped_update(i):
for j in range(steps_per_frame):
for model in models:
model.update(dt)
t = i * steps_per_frame * dt
title.set_text('Simulation time = {0:.3f} seconds'.format(t))
return [title] + update_function(i)
return wrapped_update
return inner_decorator
def wind_model_demo(dt=0.01, t_max=100, steps_per_frame=20, seed=DEFAULT_SEED):
"""Set up wind model and animate velocity field with quiver plot.
Parameters
----------
dt : float
Simulation timestep.
t_max : float
End time to simulate to.
steps_per_frame: integer
Number of simulation time steps to perform between animation frames.
seed : integer
Seed for random number generator.
Returns
-------
fig : Figure
Matplotlib figure object.
ax : AxesSubplot
Matplotlib axis object.
anim : FuncAnimation
Matplotlib animation object.
"""
rng = np.random.RandomState(seed)
# define simulation region
wind_region = models.Rectangle(x_min=0., x_max=100., y_min=-25., y_max=25.)
# set up wind model
wind_model = models.WindModel(wind_region, 21, 11, rng=rng)
# let simulation run for 10s to equilibrate wind model
for t in np.arange(0, 10, dt):
wind_model.update(dt)
# generate figure and attach close event
fig, ax, title = set_up_figure()
# create quiver plot of initial velocity field
vf_plot = ax.quiver(wind_model.x_points, wind_model.y_points,
wind_model.velocity_field.T[0],
wind_model.velocity_field.T[1], width=0.003)
# expand axis limits to make vectors at boundary of field visible
ax.axis(ax.axis() + np.array([-0.25, 0.25, -0.25, 0.25]))
ax.set_xlabel('x-coordinate / m')
ax.set_ylabel('y-coordinate / m')
ax.set_aspect(1)
fig.tight_layout()
# define update function
@update_decorator(dt, title, steps_per_frame, [wind_model])
def update(i):
vf_plot.set_UVC(
wind_model.velocity_field.T[0], wind_model.velocity_field.T[1])
return [vf_plot]
# create animation object
n_frame = int(t_max / (dt * steps_per_frame) + 0.5)
anim = FuncAnimation(fig, update, n_frame, blit=True)
return fig, ax, anim
def plume_model_demo(dt=0.01, t_max=100, steps_per_frame=200,
seed=DEFAULT_SEED):
"""Set up plume model and animate puffs overlayed over velocity field.
Puff positions displayed using Matplotlib `scatter` plot function and
velocity field displayed using `quiver` plot function.
plot and quiver functions.
Parameters
----------
dt : float
Simulation timestep.
t_max : float
End time to simulate to.
steps_per_frame: integer
Number of simulation time steps to perform between animation frames.
seed : integer
Seed for random number generator.
Returns
-------
fig : Figure
Matplotlib figure object.
ax : AxesSubplot
Matplotlib axis object.
anim : FuncAnimation
Matplotlib animation object.
"""
rng = np.random.RandomState(seed)
# define simulation region
sim_region = models.Rectangle(x_min=0., x_max=100, y_min=-25., y_max=25.)
# set up wind model
wind_model = models.WindModel(sim_region, 21, 11, rng=rng)
# let simulation run for 10s to equilibrate wind model
for t in np.arange(0, 10, dt):
wind_model.update(dt)
# set up plume model
plume_model = models.PlumeModel(
sim_region, (5., 0., 0.), wind_model, rng=rng)
# set up figure window
fig, ax, title = set_up_figure()
# create quiver plot of initial velocity field
# quiver expects first array dimension (rows) to correspond to y-axis
# therefore need to transpose
vf_plot = plt.quiver(
wind_model.x_points, wind_model.y_points,
wind_model.velocity_field.T[0], wind_model.velocity_field.T[1],
width=0.003)
# expand axis limits to make vectors at boundary of field visible
ax.axis(ax.axis() + np.array([-0.25, 0.25, -0.25, 0.25]))
# draw initial puff positions with scatter plot
radius_mult = 200
pp_plot = plt.scatter(
plume_model.puff_array[:, 0], plume_model.puff_array[:, 1],
radius_mult * plume_model.puff_array[:, 3]**0.5, c='r',
edgecolors='none')
ax.set_xlabel('x-coordinate / m')
ax.set_ylabel('y-coordinate / m')
ax.set_aspect(1)
fig.tight_layout()
# define update function
@update_decorator(dt, title, steps_per_frame, [wind_model, plume_model])
def update(i):
# update velocity field quiver plot data
vf_plot.set_UVC(wind_model.velocity_field[:, :, 0].T,
wind_model.velocity_field[:, :, 1].T)
# update puff position scatter plot positions and sizes
pp_plot.set_offsets(plume_model.puff_array[:, :2])
pp_plot._sizes = radius_mult * plume_model.puff_array[:, 3]**0.5
return [vf_plot, pp_plot]
# create animation object
n_frame = int(t_max / (dt * steps_per_frame) + 0.5)
anim = FuncAnimation(fig, update, frames=n_frame, blit=True)
return fig, ax, anim
def conc_point_val_demo(dt=0.01, t_max=5, steps_per_frame=1, x=10., y=0.0,
seed=DEFAULT_SEED):
"""Set up plume model and animate concentration at a point as time series.
Demonstration of setting up plume model and processing the outputted
puff arrays with the ConcentrationPointValueCalculator class, the
resulting concentration time course at a point in the odour plume being
displayed with the Matplotlib `plot` function.
Parameters
----------
dt : float
Simulation timestep.
t_max : float
End time to simulate to.
steps_per_frame: integer
Number of simulation time steps to perform between animation frames.
x : float
x-coordinate of point to measure concentration at.
y : float
y-coordinate of point to measure concentration at.
seed : integer
Seed for random number generator.
Returns
-------
fig : Figure
Matplotlib figure object.
ax : AxesSubplot
Matplotlib axis object.
anim : FuncAnimation
Matplotlib animation object.
"""
rng = np.random.RandomState(seed)
# define simulation region
sim_region = models.Rectangle(x_min=0., x_max=100, y_min=-25., y_max=25.)
# set up wind model
wind_model = models.WindModel(sim_region, 21, 11, rng=rng)
# set up plume model
plume_model = models.PlumeModel(
sim_region, (5., 0., 0.), wind_model, rng=rng)
# let simulation run for 10s to initialise models
for t in np.arange(0, 10, dt):
wind_model.update(dt)
plume_model.update(dt)
# set up concentration point value calculator
val_calc = processors.ConcentrationValueCalculator(1.)
conc_vals = []
conc_vals.append(val_calc.calc_conc_point(plume_model.puff_array, x, y))
ts = [0.]
# set up figure
fig, ax, title = set_up_figure()
# display initial concentration field as image
conc_line, = plt.plot(ts, conc_vals)
ax.set_xlim(0., t_max)
ax.set_ylim(0., 150.)
ax.set_xlabel('Time / s')
ax.set_ylabel('Normalised concentration')
ax.grid(True)
fig.tight_layout()
# define update function
@update_decorator(dt, title, steps_per_frame, [wind_model, plume_model])
def update(i):
ts.append(dt * i * steps_per_frame)
conc_vals.append(
val_calc.calc_conc_point(plume_model.puff_array, x, y))
conc_line.set_data(ts, conc_vals)
return [conc_line]
# create animation object
n_frame = int(t_max / (dt * steps_per_frame) + 0.5)
anim = FuncAnimation(fig, update, frames=n_frame, blit=True)
return fig, ax, anim
def concentration_array_demo(dt=0.01, t_max=100, steps_per_frame=50,
seed=DEFAULT_SEED):
"""Set up plume model and animate concentration fields.
Demonstration of setting up plume model and processing the outputted
puff arrays with the `ConcentrationArrayGenerator` class, the resulting
arrays being displayed with the Matplotlib `imshow` function.
Parameters
----------
dt : float
Simulation timestep.
t_max : float
End time to simulate to.
steps_per_frame: integer
Number of simulation time steps to perform between animation frames.
seed : integer
Seed for random number generator.
Returns
-------
fig : Figure
Matplotlib figure object.
ax : AxesSubplot
Matplotlib axis object.
anim : FuncAnimation
Matplotlib animation object.
"""
rng = np.random.RandomState(seed)
# define simulation region
sim_region = models.Rectangle(x_min=0., x_max=100, y_min=-25., y_max=25.)
# set up wind model
wind_model = models.WindModel(sim_region, 21, 11, rng=rng)
# set up plume model
plume_model = models.PlumeModel(
sim_region, (5., 0., 0.), wind_model, rng=rng)
# let simulation run for 10s to initialise models
for t in np.arange(0, 10, dt):
wind_model.update(dt)
plume_model.update(dt)
# set up concentration array generator
array_gen = processors.ConcentrationArrayGenerator(
sim_region, 0.01, 500, 250, 1.)
# set up figure
fig, ax, title = set_up_figure()
# display initial concentration field as image
conc_array = array_gen.generate_single_array(plume_model.puff_array)
conc_im = plt.imshow(conc_array.T, extent=sim_region, cmap='Reds',
vmin=0., vmax=1.)
ax.set_xlabel('x-coordinate / m')
ax.set_ylabel('y-coordinate / m')
ax.set_aspect(1)
fig.tight_layout()
# define update function
@update_decorator(dt, title, steps_per_frame, [wind_model, plume_model])
def update(i):
conc_im.set_data(
array_gen.generate_single_array(plume_model.puff_array).T)
return [conc_im]
# create animation object
n_frame = int(t_max / (dt * steps_per_frame) + 0.5)
anim = FuncAnimation(fig, update, frames=n_frame, blit=True)
return fig, ax, anim
|
[
"matplotlib.pyplot.plot",
"pompy.processors.ConcentrationArrayGenerator",
"matplotlib.pyplot.scatter",
"pompy.processors.ConcentrationValueCalculator",
"matplotlib.pyplot.quiver",
"pompy.models.Rectangle",
"matplotlib.pyplot.imshow",
"numpy.random.RandomState",
"matplotlib.animation.FuncAnimation",
"numpy.arange",
"numpy.array",
"pompy.models.PlumeModel",
"matplotlib.pyplot.subplots",
"pompy.models.WindModel"
] |
[((672, 708), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': 'fig_size'}), '(1, 1, figsize=fig_size)\n', (684, 708), True, 'import matplotlib.pyplot as plt\n'), ((1970, 1997), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (1991, 1997), True, 'import numpy as np\n'), ((2047, 2112), 'pompy.models.Rectangle', 'models.Rectangle', ([], {'x_min': '(0.0)', 'x_max': '(100.0)', 'y_min': '(-25.0)', 'y_max': '(25.0)'}), '(x_min=0.0, x_max=100.0, y_min=-25.0, y_max=25.0)\n', (2063, 2112), False, 'from pompy import models, processors\n'), ((2150, 2196), 'pompy.models.WindModel', 'models.WindModel', (['wind_region', '(21)', '(11)'], {'rng': 'rng'}), '(wind_region, 21, 11, rng=rng)\n', (2166, 2196), False, 'from pompy import models, processors\n'), ((2269, 2289), 'numpy.arange', 'np.arange', (['(0)', '(10)', 'dt'], {}), '(0, 10, dt)\n', (2278, 2289), True, 'import numpy as np\n'), ((3234, 3280), 'matplotlib.animation.FuncAnimation', 'FuncAnimation', (['fig', 'update', 'n_frame'], {'blit': '(True)'}), '(fig, update, n_frame, blit=True)\n', (3247, 3280), False, 'from matplotlib.animation import FuncAnimation\n'), ((4153, 4180), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (4174, 4180), True, 'import numpy as np\n'), ((4229, 4292), 'pompy.models.Rectangle', 'models.Rectangle', ([], {'x_min': '(0.0)', 'x_max': '(100)', 'y_min': '(-25.0)', 'y_max': '(25.0)'}), '(x_min=0.0, x_max=100, y_min=-25.0, y_max=25.0)\n', (4245, 4292), False, 'from pompy import models, processors\n'), ((4331, 4376), 'pompy.models.WindModel', 'models.WindModel', (['sim_region', '(21)', '(11)'], {'rng': 'rng'}), '(sim_region, 21, 11, rng=rng)\n', (4347, 4376), False, 'from pompy import models, processors\n'), ((4449, 4469), 'numpy.arange', 'np.arange', (['(0)', '(10)', 'dt'], {}), '(0, 10, dt)\n', (4458, 4469), True, 'import numpy as np\n'), ((4544, 4611), 'pompy.models.PlumeModel', 'models.PlumeModel', (['sim_region', '(5.0, 0.0, 0.0)', 'wind_model'], {'rng': 'rng'}), '(sim_region, (5.0, 0.0, 0.0), wind_model, rng=rng)\n', (4561, 4611), False, 'from pompy import models, processors\n'), ((4855, 4989), 'matplotlib.pyplot.quiver', 'plt.quiver', (['wind_model.x_points', 'wind_model.y_points', 'wind_model.velocity_field.T[0]', 'wind_model.velocity_field.T[1]'], {'width': '(0.003)'}), '(wind_model.x_points, wind_model.y_points, wind_model.\n velocity_field.T[0], wind_model.velocity_field.T[1], width=0.003)\n', (4865, 4989), True, 'import matplotlib.pyplot as plt\n'), ((5230, 5388), 'matplotlib.pyplot.scatter', 'plt.scatter', (['plume_model.puff_array[:, 0]', 'plume_model.puff_array[:, 1]', '(radius_mult * plume_model.puff_array[:, 3] ** 0.5)'], {'c': '"""r"""', 'edgecolors': '"""none"""'}), "(plume_model.puff_array[:, 0], plume_model.puff_array[:, 1], \n radius_mult * plume_model.puff_array[:, 3] ** 0.5, c='r', edgecolors='none'\n )\n", (5241, 5388), True, 'import matplotlib.pyplot as plt\n'), ((6149, 6202), 'matplotlib.animation.FuncAnimation', 'FuncAnimation', (['fig', 'update'], {'frames': 'n_frame', 'blit': '(True)'}), '(fig, update, frames=n_frame, blit=True)\n', (6162, 6202), False, 'from matplotlib.animation import FuncAnimation\n'), ((7347, 7374), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (7368, 7374), True, 'import numpy as np\n'), ((7423, 7486), 'pompy.models.Rectangle', 'models.Rectangle', ([], {'x_min': '(0.0)', 'x_max': '(100)', 'y_min': '(-25.0)', 'y_max': '(25.0)'}), '(x_min=0.0, x_max=100, y_min=-25.0, y_max=25.0)\n', (7439, 7486), False, 'from pompy import models, processors\n'), ((7525, 7570), 'pompy.models.WindModel', 'models.WindModel', (['sim_region', '(21)', '(11)'], {'rng': 'rng'}), '(sim_region, 21, 11, rng=rng)\n', (7541, 7570), False, 'from pompy import models, processors\n'), ((7614, 7681), 'pompy.models.PlumeModel', 'models.PlumeModel', (['sim_region', '(5.0, 0.0, 0.0)', 'wind_model'], {'rng': 'rng'}), '(sim_region, (5.0, 0.0, 0.0), wind_model, rng=rng)\n', (7631, 7681), False, 'from pompy import models, processors\n'), ((7755, 7775), 'numpy.arange', 'np.arange', (['(0)', '(10)', 'dt'], {}), '(0, 10, dt)\n', (7764, 7775), True, 'import numpy as np\n'), ((7903, 7947), 'pompy.processors.ConcentrationValueCalculator', 'processors.ConcentrationValueCalculator', (['(1.0)'], {}), '(1.0)\n', (7942, 7947), False, 'from pompy import models, processors\n'), ((8182, 8205), 'matplotlib.pyplot.plot', 'plt.plot', (['ts', 'conc_vals'], {}), '(ts, conc_vals)\n', (8190, 8205), True, 'import matplotlib.pyplot as plt\n'), ((8807, 8860), 'matplotlib.animation.FuncAnimation', 'FuncAnimation', (['fig', 'update'], {'frames': 'n_frame', 'blit': '(True)'}), '(fig, update, frames=n_frame, blit=True)\n', (8820, 8860), False, 'from matplotlib.animation import FuncAnimation\n'), ((9784, 9811), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (9805, 9811), True, 'import numpy as np\n'), ((9860, 9923), 'pompy.models.Rectangle', 'models.Rectangle', ([], {'x_min': '(0.0)', 'x_max': '(100)', 'y_min': '(-25.0)', 'y_max': '(25.0)'}), '(x_min=0.0, x_max=100, y_min=-25.0, y_max=25.0)\n', (9876, 9923), False, 'from pompy import models, processors\n'), ((9962, 10007), 'pompy.models.WindModel', 'models.WindModel', (['sim_region', '(21)', '(11)'], {'rng': 'rng'}), '(sim_region, 21, 11, rng=rng)\n', (9978, 10007), False, 'from pompy import models, processors\n'), ((10051, 10118), 'pompy.models.PlumeModel', 'models.PlumeModel', (['sim_region', '(5.0, 0.0, 0.0)', 'wind_model'], {'rng': 'rng'}), '(sim_region, (5.0, 0.0, 0.0), wind_model, rng=rng)\n', (10068, 10118), False, 'from pompy import models, processors\n'), ((10192, 10212), 'numpy.arange', 'np.arange', (['(0)', '(10)', 'dt'], {}), '(0, 10, dt)\n', (10201, 10212), True, 'import numpy as np\n'), ((10334, 10405), 'pompy.processors.ConcentrationArrayGenerator', 'processors.ConcentrationArrayGenerator', (['sim_region', '(0.01)', '(500)', '(250)', '(1.0)'], {}), '(sim_region, 0.01, 500, 250, 1.0)\n', (10372, 10405), False, 'from pompy import models, processors\n'), ((10609, 10685), 'matplotlib.pyplot.imshow', 'plt.imshow', (['conc_array.T'], {'extent': 'sim_region', 'cmap': '"""Reds"""', 'vmin': '(0.0)', 'vmax': '(1.0)'}), "(conc_array.T, extent=sim_region, cmap='Reds', vmin=0.0, vmax=1.0)\n", (10619, 10685), True, 'import matplotlib.pyplot as plt\n'), ((11175, 11228), 'matplotlib.animation.FuncAnimation', 'FuncAnimation', (['fig', 'update'], {'frames': 'n_frame', 'blit': '(True)'}), '(fig, update, frames=n_frame, blit=True)\n', (11188, 11228), False, 'from matplotlib.animation import FuncAnimation\n'), ((2739, 2775), 'numpy.array', 'np.array', (['[-0.25, 0.25, -0.25, 0.25]'], {}), '([-0.25, 0.25, -0.25, 0.25])\n', (2747, 2775), True, 'import numpy as np\n'), ((5104, 5140), 'numpy.array', 'np.array', (['[-0.25, 0.25, -0.25, 0.25]'], {}), '([-0.25, 0.25, -0.25, 0.25])\n', (5112, 5140), True, 'import numpy as np\n')]
|
from __future__ import annotations
import re
import string
from abc import ABC, abstractmethod
from dataclasses import dataclass
from fnmatch import fnmatchcase
from io import BytesIO
from typing import IO, Dict, List, Optional, Union
from pptx import Presentation
from pptx.chart.data import ChartData
from pptx.enum.shapes import PP_PLACEHOLDER_TYPE
from typing_extensions import TypedDict
from .core import Filler, Params, Template
from .spec import Selector, Spec
class PPTXTableSpecDict(TypedDict):
keys: Selector
stubs: Selector
columns: Dict[str, Selector]
class PPTXChartSpecDict(TypedDict):
keys: Selector
categories: Selector
series: Dict[str, Selector]
@dataclass
class PPTXSpec(Spec):
variables: Dict[str, Selector]
pictures: Dict[str, Selector]
tables: Dict[str, PPTXTableSpecDict]
charts: Dict[str, PPTXChartSpecDict]
class PPTXTableParamsDict(TypedDict):
keys: List[str]
stubs: Dict[str, str]
columns: Dict[str, Dict[str, str]]
class PPTXChartParamsDict(TypedDict):
keys: List[str]
categories: Dict[str, str]
series: Dict[str, Dict[str, Union[int, float]]]
@dataclass
class PPTXParams(Params[PPTXSpec]):
variables: Dict[str, str]
pictures: Dict[str, bytes]
tables: Dict[str, PPTXTableParamsDict]
charts: Dict[str, PPTXChartParamsDict]
class PPTXTemplate(Template[PPTXParams]):
def render_to_file(self, params: PPTXParams, file_object: IO[bytes]) -> None:
self._params = params
shape_substituters = self._create_shape_substituters()
prs = Presentation(self._path_or_file)
for slide in prs.slides:
for shape in slide.shapes:
for shape_substituter in shape_substituters:
shape_substituter.substitute_shape(shape)
prs.save(file_object)
def _create_shape_substituters(self) -> List[ShapeSubstituter]:
return (
[
TextShapeSubstituter(self._params.variables),
PicturePlaceholderSubstituter(self._params.pictures),
]
+ [
TableShapeSubstituter(table_name, table_params)
for (table_name, table_params) in self._params.tables.items()
]
+ [
ChartShapeSubstituter(chart_name, chart_params)
for (chart_name, chart_params) in self._params.charts.items()
]
)
class PPTXFiller(Filler[PPTXSpec, PPTXParams, PPTXTemplate]):
params_cls = PPTXParams
class ShapeSubstituter(ABC):
@abstractmethod
def substitute_shape(self, shape):
...
class TextShapeSubstituter(ShapeSubstituter):
def __init__(self, variables: Dict[str, str]) -> None:
super().__init__()
self._variables = {
name: (value if value is not None else "")
for name, value in variables.items()
}
def substitute_shape(self, shape):
if shape.has_text_frame:
self.substitute_text_frame(shape.text_frame)
def substitute_text_frame(self, text_frame):
for paragraph in text_frame.paragraphs:
if paragraph.runs:
# Since powerpoint often splits text into multiple runs for some reason,
# we combine the text from all runs, substitute that, and put the result
# in the first run. The remaining runs are made empty. This implies that
# the formatting from the first run will apply to everything in the end,
# but templates can always use separate text frames if needed.
first_run = paragraph.runs[0]
first_run.text = self.substitute_text(
"".join(run.text for run in paragraph.runs)
)
for run in paragraph.runs[1:]:
run.text = ""
def substitute_text(self, text: str) -> str:
template = string.Template(text)
return template.substitute(self._variables)
class PlaceholderSubstituter(ShapeSubstituter):
def substitute_shape(self, shape):
if shape.is_placeholder:
self.substitute_placeholder(shape)
@abstractmethod
def substitute_placeholder(self, placeholder):
...
class PicturePlaceholderSubstituter(PlaceholderSubstituter):
def __init__(self, pictures: Dict[str, bytes]) -> None:
super().__init__()
self._pictures = pictures
def substitute_placeholder(self, placeholder):
type_: PP_PLACEHOLDER_TYPE = placeholder.placeholder_format.type
if type_ == PP_PLACEHOLDER_TYPE.PICTURE: # type: ignore
self.substitute_picture_placeholder(placeholder)
def substitute_picture_placeholder(self, picture_placeholder):
image = self._pictures.get(picture_placeholder.name)
if image is not None:
image_file = BytesIO(image)
picture_placeholder.insert_picture(image_file)
class TableShapeSubstituter(ShapeSubstituter):
def __init__(
self, table_name_pattern: str, table_params: PPTXTableParamsDict
) -> None:
super().__init__()
self._table_name_pattern = table_name_pattern
self._keys = table_params["keys"]
self._stubs = table_params["stubs"]
self._columns = table_params["columns"]
def substitute_shape(self, shape):
if shape.has_table and fnmatchcase(shape.name, self._table_name_pattern):
self.substitute_table(shape.table)
def substitute_table(self, table):
column_index_values = []
for i, row in enumerate(table.rows):
if not column_index_values: # first row is header
for j, cell in enumerate(row.cells):
column_index_values.append(
self.derive_table_column_index_value(cell.text, j)
)
else:
row_index_value = None
for j, cell in enumerate(row.cells):
if j == 0:
row_index_value = self.derive_table_row_index_value(
cell.text, i
)
for run in cell.text_frame.paragraphs[0].runs:
new_text = self.substitute_table_cell(
row_index_value, column_index_values[j], run.text
)
if new_text is not None:
run.text = new_text
break # there should only be one run at most
else:
assert row_index_value is not None
for run in cell.text_frame.paragraphs[0].runs:
new_text = self.substitute_table_cell(
row_index_value, column_index_values[j], run.text
)
if new_text is not None:
run.text = new_text
break # there should only be one run at most
def derive_table_row_index_value(self, text: str, row_number: int) -> str:
if text.startswith("$"):
key_index = int(re.sub(r"\$[a-zA-Z_]*", "", text)) - 1
else:
key_index = row_number
return self._keys[key_index]
def derive_table_column_index_value(
self, text: str, column_number: int
) -> Optional[str]:
if column_number > 0:
return text.lower()
def substitute_table_cell(
self,
row_index_value: str,
column_index_value: Optional[str],
text: str,
) -> Optional[str]:
if column_index_value is None:
return self._stubs[row_index_value]
else:
return self._columns[column_index_value].get(row_index_value)
class ChartShapeSubstituter(ShapeSubstituter):
def __init__(
self, chart_name_pattern: str, chart_params: PPTXChartParamsDict
) -> None:
super().__init__()
self._chart_name_pattern = chart_name_pattern
self._keys = chart_params["keys"]
self._categories = chart_params["categories"]
self._series = chart_params["series"]
def substitute_shape(self, shape):
if shape.has_chart and fnmatchcase(shape.name, self._chart_name_pattern):
self.substitute_chart(shape.chart)
def substitute_chart(self, chart):
index_values = self.generate_chart_index_values(chart.plots[0].categories)
metric_names = [self.derive_chart_metric_name(s.name) for s in chart.series]
chart_data = ChartData()
chart_data.categories = [
self.get_chart_category(index_value) for index_value in index_values
]
for metric_name in metric_names:
values = [
self.get_chart_value(index_value, metric_name)
for index_value in index_values
]
chart_data.add_series(metric_name, values)
chart.replace_data(chart_data)
def generate_chart_index_values(self, current_values) -> List[str]:
return self._keys
def derive_chart_metric_name(self, text: str) -> str:
return text.lower()
def get_chart_category(self, index_value: str) -> str:
return self._categories[index_value]
def get_chart_value(
self, index_value: str, metric_name: str
) -> Optional[Union[int, float]]:
return self._series[metric_name].get(index_value)
|
[
"io.BytesIO",
"pptx.Presentation",
"string.Template",
"fnmatch.fnmatchcase",
"pptx.chart.data.ChartData",
"re.sub"
] |
[((1578, 1610), 'pptx.Presentation', 'Presentation', (['self._path_or_file'], {}), '(self._path_or_file)\n', (1590, 1610), False, 'from pptx import Presentation\n'), ((3933, 3954), 'string.Template', 'string.Template', (['text'], {}), '(text)\n', (3948, 3954), False, 'import string\n'), ((8651, 8662), 'pptx.chart.data.ChartData', 'ChartData', ([], {}), '()\n', (8660, 8662), False, 'from pptx.chart.data import ChartData\n'), ((4879, 4893), 'io.BytesIO', 'BytesIO', (['image'], {}), '(image)\n', (4886, 4893), False, 'from io import BytesIO\n'), ((5394, 5443), 'fnmatch.fnmatchcase', 'fnmatchcase', (['shape.name', 'self._table_name_pattern'], {}), '(shape.name, self._table_name_pattern)\n', (5405, 5443), False, 'from fnmatch import fnmatchcase\n'), ((8323, 8372), 'fnmatch.fnmatchcase', 'fnmatchcase', (['shape.name', 'self._chart_name_pattern'], {}), '(shape.name, self._chart_name_pattern)\n', (8334, 8372), False, 'from fnmatch import fnmatchcase\n'), ((7240, 7273), 're.sub', 're.sub', (['"""\\\\$[a-zA-Z_]*"""', '""""""', 'text'], {}), "('\\\\$[a-zA-Z_]*', '', text)\n", (7246, 7273), False, 'import re\n')]
|
from setuptools import setup, find_packages
setup(
name='symspellpy',
packages=find_packages(exclude=['test']),
package_data={
'symspellpy': ['README.md', 'LICENSE']
},
version='0.9.0',
description='Keyboard layout aware version of SymSpell',
long_description=open('README.md').read().split("========")[-1],
author='crossnox',
url='https://github.com/crossnox/symspellpy',
keywords=['symspellpy'],
install_requires=[
'scipy >= 0.19'
],
python_requires='>=3.4',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
],
test_suite="test",
entry_points={
}
)
|
[
"setuptools.find_packages"
] |
[((88, 119), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['test']"}), "(exclude=['test'])\n", (101, 119), False, 'from setuptools import setup, find_packages\n')]
|
#!/usr/bin/env python3
import sys
import pytest
if __name__ == '__main__':
sys.exit(pytest.main(sys.argv[1:]))
|
[
"pytest.main"
] |
[((92, 117), 'pytest.main', 'pytest.main', (['sys.argv[1:]'], {}), '(sys.argv[1:])\n', (103, 117), False, 'import pytest\n')]
|
# coding: utf-8
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from django import forms
from colaboradores.enums import AREAS
class FormColaborador(forms.Form):
nome = forms.CharField(max_length=100, required=True)
email = forms.CharField(max_length=100, required=True)
area = forms.ChoiceField(choices=AREAS, required=True)
senha = forms.CharField(widget=forms.PasswordInput, required=True)
class FormLogin(forms.Form):
email = forms.CharField(max_length=100)
senha = forms.CharField(widget=forms.PasswordInput)
|
[
"django.forms.CharField",
"django.forms.ChoiceField"
] |
[((244, 290), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(True)'}), '(max_length=100, required=True)\n', (259, 290), False, 'from django import forms\n'), ((303, 349), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)', 'required': '(True)'}), '(max_length=100, required=True)\n', (318, 349), False, 'from django import forms\n'), ((361, 408), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'choices': 'AREAS', 'required': '(True)'}), '(choices=AREAS, required=True)\n', (378, 408), False, 'from django import forms\n'), ((421, 479), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.PasswordInput', 'required': '(True)'}), '(widget=forms.PasswordInput, required=True)\n', (436, 479), False, 'from django import forms\n'), ((523, 554), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (538, 554), False, 'from django import forms\n'), ((567, 610), 'django.forms.CharField', 'forms.CharField', ([], {'widget': 'forms.PasswordInput'}), '(widget=forms.PasswordInput)\n', (582, 610), False, 'from django import forms\n')]
|
from django.db import models
import time
# Create your models here.
class words(models.Model):
word = models.CharField(max_length = 40, default = '', verbose_name = "单词")
symthm = models.CharField(max_length = 40, default = '', verbose_name = "音标")
chinese = models.CharField(max_length = 100, default = '', verbose_name = "中文")
analyzation = models.CharField(max_length = 200, default = '', verbose_name = "联想法")
product = models.ManyToManyField('myadmin.product')
class writes(models.Model):
title = models.CharField(max_length=200, default = "无标题", verbose_name="标题")
time = models.IntegerField(default=0, verbose_name="限制时间")
problem = models.CharField(max_length=2000, default="", verbose_name="问题")
top = models.CharField(max_length=300, default="", verbose_name="规范")
product = models.ForeignKey('myadmin.product', null = True, on_delete=models.CASCADE)
class write_saving(models.Model):
content = models.CharField(max_length=1000, default = "", verbose_name = "内容")
writes = models.ForeignKey('writes', on_delete = models.CASCADE, null = True)
user = models.ForeignKey('users.UserProfile', on_delete = models.CASCADE, null = True)
class read_data(models.Model):
Date = models.DateField(auto_now_add = True, verbose_name = "创建时间")
mp3_url = models.CharField(max_length=300, default = "", verbose_name = "mp3内容", null = True)
eng = models.CharField(max_length=3000, default="", verbose_name = "英文内容")
chinese = models.CharField(max_length=5000, default="", verbose_name = "中文内容")
title = models.CharField(max_length=300, default="", verbose_name = "标题")
url = models.CharField(max_length=300, default = "", verbose_name = "地址")
class listen_data(models.Model):
Date = models.DateField(auto_now_add = True, verbose_name = "创建时间")
mp3_url = models.CharField(max_length=300, default = "", verbose_name = "mp3内容", null = True)
eng = models.CharField(max_length=5500, default="", verbose_name = "英文内容")
chinese = models.CharField(max_length=3000, default="", verbose_name = "中文内容")
title = models.CharField(max_length=300, default="", verbose_name = "标题")
url = models.CharField(max_length=300, default = "", verbose_name = "地址")
data_time = models.CharField(max_length=1000, default = "", verbose_name = "地址")
|
[
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.DateField"
] |
[((106, 168), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'default': '""""""', 'verbose_name': '"""单词"""'}), "(max_length=40, default='', verbose_name='单词')\n", (122, 168), False, 'from django.db import models\n'), ((188, 250), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'default': '""""""', 'verbose_name': '"""音标"""'}), "(max_length=40, default='', verbose_name='音标')\n", (204, 250), False, 'from django.db import models\n'), ((271, 334), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'default': '""""""', 'verbose_name': '"""中文"""'}), "(max_length=100, default='', verbose_name='中文')\n", (287, 334), False, 'from django.db import models\n'), ((359, 423), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'default': '""""""', 'verbose_name': '"""联想法"""'}), "(max_length=200, default='', verbose_name='联想法')\n", (375, 423), False, 'from django.db import models\n'), ((444, 485), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['"""myadmin.product"""'], {}), "('myadmin.product')\n", (466, 485), False, 'from django.db import models\n'), ((527, 593), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'default': '"""无标题"""', 'verbose_name': '"""标题"""'}), "(max_length=200, default='无标题', verbose_name='标题')\n", (543, 593), False, 'from django.db import models\n'), ((607, 658), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""限制时间"""'}), "(default=0, verbose_name='限制时间')\n", (626, 658), False, 'from django.db import models\n'), ((673, 737), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2000)', 'default': '""""""', 'verbose_name': '"""问题"""'}), "(max_length=2000, default='', verbose_name='问题')\n", (689, 737), False, 'from django.db import models\n'), ((748, 811), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""', 'verbose_name': '"""规范"""'}), "(max_length=300, default='', verbose_name='规范')\n", (764, 811), False, 'from django.db import models\n'), ((826, 899), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""myadmin.product"""'], {'null': '(True)', 'on_delete': 'models.CASCADE'}), "('myadmin.product', null=True, on_delete=models.CASCADE)\n", (843, 899), False, 'from django.db import models\n'), ((951, 1015), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'default': '""""""', 'verbose_name': '"""内容"""'}), "(max_length=1000, default='', verbose_name='内容')\n", (967, 1015), False, 'from django.db import models\n'), ((1033, 1097), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""writes"""'], {'on_delete': 'models.CASCADE', 'null': '(True)'}), "('writes', on_delete=models.CASCADE, null=True)\n", (1050, 1097), False, 'from django.db import models\n'), ((1113, 1188), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""users.UserProfile"""'], {'on_delete': 'models.CASCADE', 'null': '(True)'}), "('users.UserProfile', on_delete=models.CASCADE, null=True)\n", (1130, 1188), False, 'from django.db import models\n'), ((1236, 1292), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (1252, 1292), False, 'from django.db import models\n'), ((1311, 1388), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""', 'verbose_name': '"""mp3内容"""', 'null': '(True)'}), "(max_length=300, default='', verbose_name='mp3内容', null=True)\n", (1327, 1388), False, 'from django.db import models\n'), ((1405, 1471), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3000)', 'default': '""""""', 'verbose_name': '"""英文内容"""'}), "(max_length=3000, default='', verbose_name='英文内容')\n", (1421, 1471), False, 'from django.db import models\n'), ((1488, 1554), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(5000)', 'default': '""""""', 'verbose_name': '"""中文内容"""'}), "(max_length=5000, default='', verbose_name='中文内容')\n", (1504, 1554), False, 'from django.db import models\n'), ((1569, 1632), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""', 'verbose_name': '"""标题"""'}), "(max_length=300, default='', verbose_name='标题')\n", (1585, 1632), False, 'from django.db import models\n'), ((1645, 1708), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""', 'verbose_name': '"""地址"""'}), "(max_length=300, default='', verbose_name='地址')\n", (1661, 1708), False, 'from django.db import models\n'), ((1758, 1814), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(auto_now_add=True, verbose_name='创建时间')\n", (1774, 1814), False, 'from django.db import models\n'), ((1833, 1910), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""', 'verbose_name': '"""mp3内容"""', 'null': '(True)'}), "(max_length=300, default='', verbose_name='mp3内容', null=True)\n", (1849, 1910), False, 'from django.db import models\n'), ((1927, 1993), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(5500)', 'default': '""""""', 'verbose_name': '"""英文内容"""'}), "(max_length=5500, default='', verbose_name='英文内容')\n", (1943, 1993), False, 'from django.db import models\n'), ((2010, 2076), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3000)', 'default': '""""""', 'verbose_name': '"""中文内容"""'}), "(max_length=3000, default='', verbose_name='中文内容')\n", (2026, 2076), False, 'from django.db import models\n'), ((2091, 2154), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""', 'verbose_name': '"""标题"""'}), "(max_length=300, default='', verbose_name='标题')\n", (2107, 2154), False, 'from django.db import models\n'), ((2167, 2230), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'default': '""""""', 'verbose_name': '"""地址"""'}), "(max_length=300, default='', verbose_name='地址')\n", (2183, 2230), False, 'from django.db import models\n'), ((2251, 2315), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1000)', 'default': '""""""', 'verbose_name': '"""地址"""'}), "(max_length=1000, default='', verbose_name='地址')\n", (2267, 2315), False, 'from django.db import models\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import os
from .leafs import leafs
print("Reloaded preprocessing!")
def normalize(dataset):
'''normalize data so that over all imeges the pixels on place (x/y) have mean = 0 and are standart distributed'''
# calculate the mean
mean=np.zeros(dataset[0].image.shape)
for lea in dataset:
mean=mean+lea.image
mean/=len(dataset)
#calculating the variance
var=np.zeros(dataset[0].image.shape)
for lea in dataset:
var=var+(lea.image-mean)**2
var/=len(dataset)
f=0.1
var=(var-f>=0)*(var-f)+f # caps the minimal
for lea in dataset:
lea.image=(lea.image-mean)/var
def createTrainingAndTestingList(directory, shuffle = True):
'''
Takes as Input the matrices from collectData and creates a training and a testing list'''
l_train = []
l_test = []
for n in range (7):
matrices = np.load(os.path.join(directory, str(n)+'.npy'))
for i in range(759): # 2x800 for training
l_train += [leafs.Leaf(i+n*1000, n, matrices[i]/255)]
for i in range(760,839): # 2x80 for testing
l_test += [leafs.Leaf(i+n*1000, n, matrices[i]/255)]
if shuffle:
np.random.shuffle(l_train)
np.random.shuffle(l_test)
return([l_train,l_test])
def collectData(root_path, save_path, cfactor, overwrite = False):
'''processes images from root_path one-by-one and save them in same directory
collect them tree by tree, set their labels and return a training and a testing list'''
sizeOfMatrixes = int(2000//cfactor)
#processing images to arrays one-by-one and save inplace
iid = 0
for (root, dirnames, filenames) in os.walk(root_path, topdown = True):
for f in filenames:
if f.endswith('.JPG'):
savepath = os.path.join(root, os.path.splitext(f)[0])
savepath += ('_' + str(sizeOfMatrixes) + 'x' + str(sizeOfMatrixes)) # for example + _50x50
if(not(os.path.isfile(savepath+'.npy')) or overwrite):
matriX = centr_cut_compress(os.path.join(root, f), cfactor)
np.save(savepath, matriX, allow_pickle=False)
iid += 1
# collecting all arrays from tree i into one big folder calld i.npy
for i in range (0,8):
tree_path = os.path.join(root_path, str(i))
tree_save_path = os.path.join(save_path, str(sizeOfMatrixes) + 'x' + str(sizeOfMatrixes) ,str(i))
leaf_list = []
for (root, dirnames, filenames) in os.walk(tree_path , topdown=True):
for f in filenames:
if f.endswith('_' + str(sizeOfMatrixes) + 'x' + str(sizeOfMatrixes) + '.npy'):
leaf_list.append(np.load(os.path.join(root, f)))
leaf_array = np.array(leaf_list)
np.save(tree_save_path, leaf_array, allow_pickle=False)
def desired_output(label):
res = -1 * np.ones((7,1,1))
res[label, 0, 0] = +1
return res
def centr_cut_compress(path, cfactor = 50, square_side = 2000, debug=False):
'''centers, cuts and compresses a picture
Input: path, compressionfactor = 50, squareside of new image= 2000, debug=False
Output: matrix that can be use as a CNN Input
'''
im = center_leaf(path, square_side)
new_shape = im.size[0] // cfactor
new_im = im.resize((new_shape, new_shape)) # makes the resolution smaller
matriz = np.array(new_im) # convert image to numpy matrix
matriz ^= 0xFF # invert matrix
oneD_matriz = matriz[:, :, 1] # only looking at one dimension, 1 = green
if debug:
print('Image “',path,'“ opened with size:',im.size,'and mode:',im.mode)
print('compressed the square-image with lenght :',
oneD_matriz.shape[0], ' with factor:', cfactor)
print('output matrix has shape:', oneD_matriz.shape)
plt.imshow(oneD_matriz)
plt.tight_layout()
plt.show()
return oneD_matriz
def center_leaf(path, square_side=2000):
'''
region we look at, because of the border we found with overlappingcenters a square on the leaf
input: path of image square_side of matriz thats cut away
output: cut image
ATTENTION: the cutting borders are fixed
'''
up = 500
down = 2900
left = 400
right = 4000
s = square_side // 2
im = Image.open(path).convert('RGB')
matriz = np.array(im) # convert image to numpy matrix
matriz ^= 0xFF # invert matrix
oneD_matriz = matriz[up:down,left:right,1] #only look at the green canal 1
indices = np.argwhere(oneD_matriz >= 180) # give all pixel cordinates where the value is higer than 179
meanx = np.average(indices[:,0]) + up
meany = np.average(indices[:,1]) + left
# select new area of the matrix, that is the input for CNN
box = (meany - s, meanx - s, meany + s , meanx + s)
new_image = im.crop(box) # crop is Pill function
im.close()
return new_image
def find_overlap(root_path):
'''function to overlap all pictures
creates a image of all overlayed pictures so the interesting area of the picture can manually be classified
the size of the imaage has to bee ajusted
'''
maximum = np.zeros((3456, 4608))
for root, dirs, files in os.walk(root_path, topdown=False):
for name in files:
im_path = (os.path.join(root, name))
if name[0] == 'I': #making sure its an image, because there are some other files in the directory
image = Image.open(im_path)
image.convert('RGB')
matriz = np.array(image)
maximum = np.maximum(maximum, matriz[:, :, 0])
maximum = np.maximum(maximum, matriz[:, :, 1])
maximum = np.maximum(maximum, matriz[:, :, 2])
image.close()
return maximum
|
[
"numpy.save",
"matplotlib.pyplot.show",
"numpy.average",
"os.path.join",
"numpy.maximum",
"matplotlib.pyplot.imshow",
"os.walk",
"numpy.zeros",
"numpy.ones",
"PIL.Image.open",
"os.path.isfile",
"numpy.array",
"os.path.splitext",
"numpy.argwhere",
"matplotlib.pyplot.tight_layout",
"numpy.random.shuffle"
] |
[((318, 350), 'numpy.zeros', 'np.zeros', (['dataset[0].image.shape'], {}), '(dataset[0].image.shape)\n', (326, 350), True, 'import numpy as np\n'), ((469, 501), 'numpy.zeros', 'np.zeros', (['dataset[0].image.shape'], {}), '(dataset[0].image.shape)\n', (477, 501), True, 'import numpy as np\n'), ((1774, 1806), 'os.walk', 'os.walk', (['root_path'], {'topdown': '(True)'}), '(root_path, topdown=True)\n', (1781, 1806), False, 'import os\n'), ((3510, 3526), 'numpy.array', 'np.array', (['new_im'], {}), '(new_im)\n', (3518, 3526), True, 'import numpy as np\n'), ((4492, 4504), 'numpy.array', 'np.array', (['im'], {}), '(im)\n', (4500, 4504), True, 'import numpy as np\n'), ((4676, 4707), 'numpy.argwhere', 'np.argwhere', (['(oneD_matriz >= 180)'], {}), '(oneD_matriz >= 180)\n', (4687, 4707), True, 'import numpy as np\n'), ((5321, 5343), 'numpy.zeros', 'np.zeros', (['(3456, 4608)'], {}), '((3456, 4608))\n', (5329, 5343), True, 'import numpy as np\n'), ((5374, 5407), 'os.walk', 'os.walk', (['root_path'], {'topdown': '(False)'}), '(root_path, topdown=False)\n', (5381, 5407), False, 'import os\n'), ((1263, 1289), 'numpy.random.shuffle', 'np.random.shuffle', (['l_train'], {}), '(l_train)\n', (1280, 1289), True, 'import numpy as np\n'), ((1298, 1323), 'numpy.random.shuffle', 'np.random.shuffle', (['l_test'], {}), '(l_test)\n', (1315, 1323), True, 'import numpy as np\n'), ((2631, 2663), 'os.walk', 'os.walk', (['tree_path'], {'topdown': '(True)'}), '(tree_path, topdown=True)\n', (2638, 2663), False, 'import os\n'), ((2885, 2904), 'numpy.array', 'np.array', (['leaf_list'], {}), '(leaf_list)\n', (2893, 2904), True, 'import numpy as np\n'), ((2913, 2968), 'numpy.save', 'np.save', (['tree_save_path', 'leaf_array'], {'allow_pickle': '(False)'}), '(tree_save_path, leaf_array, allow_pickle=False)\n', (2920, 2968), True, 'import numpy as np\n'), ((3012, 3030), 'numpy.ones', 'np.ones', (['(7, 1, 1)'], {}), '((7, 1, 1))\n', (3019, 3030), True, 'import numpy as np\n'), ((3959, 3982), 'matplotlib.pyplot.imshow', 'plt.imshow', (['oneD_matriz'], {}), '(oneD_matriz)\n', (3969, 3982), True, 'import matplotlib.pyplot as plt\n'), ((3991, 4009), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4007, 4009), True, 'import matplotlib.pyplot as plt\n'), ((4018, 4028), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4026, 4028), True, 'import matplotlib.pyplot as plt\n'), ((4782, 4807), 'numpy.average', 'np.average', (['indices[:, 0]'], {}), '(indices[:, 0])\n', (4792, 4807), True, 'import numpy as np\n'), ((4824, 4849), 'numpy.average', 'np.average', (['indices[:, 1]'], {}), '(indices[:, 1])\n', (4834, 4849), True, 'import numpy as np\n'), ((4447, 4463), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (4457, 4463), False, 'from PIL import Image\n'), ((5459, 5483), 'os.path.join', 'os.path.join', (['root', 'name'], {}), '(root, name)\n', (5471, 5483), False, 'import os\n'), ((5620, 5639), 'PIL.Image.open', 'Image.open', (['im_path'], {}), '(im_path)\n', (5630, 5639), False, 'from PIL import Image\n'), ((5702, 5717), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (5710, 5717), True, 'import numpy as np\n'), ((5761, 5797), 'numpy.maximum', 'np.maximum', (['maximum', 'matriz[:, :, 0]'], {}), '(maximum, matriz[:, :, 0])\n', (5771, 5797), True, 'import numpy as np\n'), ((5824, 5860), 'numpy.maximum', 'np.maximum', (['maximum', 'matriz[:, :, 1]'], {}), '(maximum, matriz[:, :, 1])\n', (5834, 5860), True, 'import numpy as np\n'), ((5887, 5923), 'numpy.maximum', 'np.maximum', (['maximum', 'matriz[:, :, 2]'], {}), '(maximum, matriz[:, :, 2])\n', (5897, 5923), True, 'import numpy as np\n'), ((2221, 2266), 'numpy.save', 'np.save', (['savepath', 'matriX'], {'allow_pickle': '(False)'}), '(savepath, matriX, allow_pickle=False)\n', (2228, 2266), True, 'import numpy as np\n'), ((1919, 1938), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (1935, 1938), False, 'import os\n'), ((2073, 2106), 'os.path.isfile', 'os.path.isfile', (["(savepath + '.npy')"], {}), "(savepath + '.npy')\n", (2087, 2106), False, 'import os\n'), ((2169, 2190), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (2181, 2190), False, 'import os\n'), ((2840, 2861), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (2852, 2861), False, 'import os\n')]
|
import torch
import sys
import os
sys.path.append(os.getcwd())
sys.path.append(os.path.dirname(os.path.dirname(os.getcwd())))
from unimodals.MVAE import TSEncoder, TSDecoder # noqa
from utils.helper_modules import Sequential2 # noqa
from objective_functions.objectives_for_supervised_learning import MFM_objective # noqa
from torch import nn # noqa
from unimodals.common_models import MLP # noqa
from training_structures.Supervised_Learning import train, test # noqa
from datasets.affect.get_data import get_dataloader # noqa
from fusions.common_fusions import Concat # noqa
classes = 2
n_latent = 256
dim_0 = 35
dim_1 = 74
dim_2 = 300
timestep = 50
# mosi_data.pkl, mosei_senti_data.pkl
# mosi_raw.pkl, mosei_raw.pkl, sarcasm.pkl, humor.pkl
# raw_path: mosi.hdf5, mosei.hdf5, sarcasm_raw_text.pkl, humor_raw_text.pkl
traindata, validdata, test_robust = get_dataloader(
'/home/paul/MultiBench/mosi_raw.pkl', task='classification', robust_test=False, max_pad=True, max_seq_len=timestep)
encoders = [TSEncoder(dim_0, 30, n_latent, timestep, returnvar=False).cuda(), TSEncoder(
dim_1, 30, n_latent, timestep, returnvar=False).cuda(), TSEncoder(dim_2, 30, n_latent, timestep, returnvar=False).cuda()]
decoders = [TSDecoder(dim_0, 30, n_latent, timestep).cuda(), TSDecoder(
dim_1, 30, n_latent, timestep).cuda(), TSDecoder(dim_2, 30, n_latent, timestep).cuda()]
fuse = Sequential2(Concat(), MLP(3*n_latent, n_latent, n_latent//2)).cuda()
intermediates = [MLP(n_latent, n_latent//2, n_latent//2).cuda(), MLP(n_latent,
n_latent//2, n_latent//2).cuda(), MLP(n_latent, n_latent//2, n_latent//2).cuda()]
head = MLP(n_latent//2, 20, classes).cuda()
argsdict = {'decoders': decoders, 'intermediates': intermediates}
additional_modules = decoders+intermediates
objective = MFM_objective(2.0, [torch.nn.MSELoss(
), torch.nn.MSELoss(), torch.nn.MSELoss()], [1.0, 1.0, 1.0])
train(encoders, fuse, head, traindata, validdata, 200, additional_modules,
objective=objective, objective_args_dict=argsdict, save='mosi_mfm_best.pt')
print("Testing:")
model = torch.load('mosi_mfm_best.pt').cuda()
test(model=model, test_dataloaders_all=test_robust,
dataset='mosi', is_packed=False, no_robust=True)
|
[
"unimodals.MVAE.TSEncoder",
"datasets.affect.get_data.get_dataloader",
"training_structures.Supervised_Learning.train",
"torch.nn.MSELoss",
"unimodals.MVAE.TSDecoder",
"os.getcwd",
"fusions.common_fusions.Concat",
"torch.load",
"training_structures.Supervised_Learning.test",
"unimodals.common_models.MLP"
] |
[((859, 993), 'datasets.affect.get_data.get_dataloader', 'get_dataloader', (['"""/home/paul/MultiBench/mosi_raw.pkl"""'], {'task': '"""classification"""', 'robust_test': '(False)', 'max_pad': '(True)', 'max_seq_len': 'timestep'}), "('/home/paul/MultiBench/mosi_raw.pkl', task='classification',\n robust_test=False, max_pad=True, max_seq_len=timestep)\n", (873, 993), False, 'from datasets.affect.get_data import get_dataloader\n'), ((1954, 2108), 'training_structures.Supervised_Learning.train', 'train', (['encoders', 'fuse', 'head', 'traindata', 'validdata', '(200)', 'additional_modules'], {'objective': 'objective', 'objective_args_dict': 'argsdict', 'save': '"""mosi_mfm_best.pt"""'}), "(encoders, fuse, head, traindata, validdata, 200, additional_modules,\n objective=objective, objective_args_dict=argsdict, save='mosi_mfm_best.pt')\n", (1959, 2108), False, 'from training_structures.Supervised_Learning import train, test\n'), ((2177, 2281), 'training_structures.Supervised_Learning.test', 'test', ([], {'model': 'model', 'test_dataloaders_all': 'test_robust', 'dataset': '"""mosi"""', 'is_packed': '(False)', 'no_robust': '(True)'}), "(model=model, test_dataloaders_all=test_robust, dataset='mosi',\n is_packed=False, no_robust=True)\n", (2181, 2281), False, 'from training_structures.Supervised_Learning import train, test\n'), ((51, 62), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (60, 62), False, 'import os\n'), ((1692, 1723), 'unimodals.common_models.MLP', 'MLP', (['(n_latent // 2)', '(20)', 'classes'], {}), '(n_latent // 2, 20, classes)\n', (1695, 1723), False, 'from unimodals.common_models import MLP\n'), ((1874, 1892), 'torch.nn.MSELoss', 'torch.nn.MSELoss', ([], {}), '()\n', (1890, 1892), False, 'import torch\n'), ((1895, 1913), 'torch.nn.MSELoss', 'torch.nn.MSELoss', ([], {}), '()\n', (1911, 1913), False, 'import torch\n'), ((1915, 1933), 'torch.nn.MSELoss', 'torch.nn.MSELoss', ([], {}), '()\n', (1931, 1933), False, 'import torch\n'), ((2138, 2168), 'torch.load', 'torch.load', (['"""mosi_mfm_best.pt"""'], {}), "('mosi_mfm_best.pt')\n", (2148, 2168), False, 'import torch\n'), ((112, 123), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (121, 123), False, 'import os\n'), ((1008, 1065), 'unimodals.MVAE.TSEncoder', 'TSEncoder', (['dim_0', '(30)', 'n_latent', 'timestep'], {'returnvar': '(False)'}), '(dim_0, 30, n_latent, timestep, returnvar=False)\n', (1017, 1065), False, 'from unimodals.MVAE import TSEncoder, TSDecoder\n'), ((1074, 1131), 'unimodals.MVAE.TSEncoder', 'TSEncoder', (['dim_1', '(30)', 'n_latent', 'timestep'], {'returnvar': '(False)'}), '(dim_1, 30, n_latent, timestep, returnvar=False)\n', (1083, 1131), False, 'from unimodals.MVAE import TSEncoder, TSDecoder\n'), ((1145, 1202), 'unimodals.MVAE.TSEncoder', 'TSEncoder', (['dim_2', '(30)', 'n_latent', 'timestep'], {'returnvar': '(False)'}), '(dim_2, 30, n_latent, timestep, returnvar=False)\n', (1154, 1202), False, 'from unimodals.MVAE import TSEncoder, TSDecoder\n'), ((1224, 1264), 'unimodals.MVAE.TSDecoder', 'TSDecoder', (['dim_0', '(30)', 'n_latent', 'timestep'], {}), '(dim_0, 30, n_latent, timestep)\n', (1233, 1264), False, 'from unimodals.MVAE import TSEncoder, TSDecoder\n'), ((1273, 1313), 'unimodals.MVAE.TSDecoder', 'TSDecoder', (['dim_1', '(30)', 'n_latent', 'timestep'], {}), '(dim_1, 30, n_latent, timestep)\n', (1282, 1313), False, 'from unimodals.MVAE import TSEncoder, TSDecoder\n'), ((1327, 1367), 'unimodals.MVAE.TSDecoder', 'TSDecoder', (['dim_2', '(30)', 'n_latent', 'timestep'], {}), '(dim_2, 30, n_latent, timestep)\n', (1336, 1367), False, 'from unimodals.MVAE import TSEncoder, TSDecoder\n'), ((1396, 1404), 'fusions.common_fusions.Concat', 'Concat', ([], {}), '()\n', (1402, 1404), False, 'from fusions.common_fusions import Concat\n'), ((1406, 1448), 'unimodals.common_models.MLP', 'MLP', (['(3 * n_latent)', 'n_latent', '(n_latent // 2)'], {}), '(3 * n_latent, n_latent, n_latent // 2)\n', (1409, 1448), False, 'from unimodals.common_models import MLP\n'), ((1471, 1514), 'unimodals.common_models.MLP', 'MLP', (['n_latent', '(n_latent // 2)', '(n_latent // 2)'], {}), '(n_latent, n_latent // 2, n_latent // 2)\n', (1474, 1514), False, 'from unimodals.common_models import MLP\n'), ((1519, 1562), 'unimodals.common_models.MLP', 'MLP', (['n_latent', '(n_latent // 2)', '(n_latent // 2)'], {}), '(n_latent, n_latent // 2, n_latent // 2)\n', (1522, 1562), False, 'from unimodals.common_models import MLP\n'), ((1636, 1679), 'unimodals.common_models.MLP', 'MLP', (['n_latent', '(n_latent // 2)', '(n_latent // 2)'], {}), '(n_latent, n_latent // 2, n_latent // 2)\n', (1639, 1679), False, 'from unimodals.common_models import MLP\n')]
|
"""Command Line Interface of the nerblackbox package."""
import os
import subprocess
from os.path import join
import click
from typing import Dict, Any
from nerblackbox.modules.main import NerBlackBoxMain
########################################################################################################################
# CLI
########################################################################################################################
@click.group()
@click.option(
"--data_dir", default="data", type=str, help="[str] relative path of data directory"
)
@click.option(
"--modify/--no-modify", default=False, help="[bool] if flag=set_up_dataset"
)
@click.option(
"--val_fraction", default=None, type=float, help="[float] if flag=set_up_dataset"
)
@click.option(
"--verbose/--no-verbose", default=False, help="[bool] if flag=set_up_dataset"
)
@click.option("--run_name", default=None, type=str, help="[str] if flag=run_experiment")
@click.option("--device", default=None, type=str, help="[str] if flag=run_experiment")
@click.option("--fp16/--no-fp16", default=False, help="[bool] if flag=run_experiment")
@click.option("--results/--no-results", default=False, help="[bool] if flag=clear_data")
@click.pass_context
def nerbb(ctx, **kwargs_optional):
ctx.ensure_object(dict)
# kwargs
kwargs = {k: v for k, v in kwargs_optional.items() if v is not None}
# environ
base_dir = os.getcwd()
data_dir = kwargs.pop("data_dir")
os.environ["BASE_DIR"] = base_dir
os.environ["DATA_DIR"] = join(base_dir, data_dir)
# print('BASE_DIR = ', os.environ.get('BASE_DIR'))
# print('DATA_DIR = ', os.environ.get('DATA_DIR'))
# context
ctx.obj = kwargs
########################################################################################################################
# COMMANDS HELPER FUNCTION
########################################################################################################################
def _run_nerblackbox_main(_ctx_obj: Dict[str, Any], _kwargs: Dict[str, str]) -> None:
"""
given context (_ctx_obj) and all relevant arguments (_kwargs), invoke NerBlackBoxMain
is used by every nerbb command
"""
kwargs = dict(**_ctx_obj, **_kwargs)
nerblackbox_main = NerBlackBoxMain(**kwargs)
nerblackbox_main.main()
########################################################################################################################
# COMMANDS
########################################################################################################################
@nerbb.command(name="analyze_data")
@click.pass_context
@click.argument("dataset_name")
def analyze_data(ctx, dataset_name: str):
"""analyze a dataset."""
kwargs = {
"flag": "analyze_data",
"dataset_name": dataset_name,
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="clear_data")
@click.pass_context
def clear_data(ctx):
"""clear data (checkpoints and optionally results)."""
kwargs = {
"flag": "clear_data",
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="download")
@click.pass_context
def download(ctx):
"""
download & prepare built-in datasets, prepare experiment configuration.
needs to be called exactly once before any other CLI/API commands of the package are executed
in case built-in datasets shall be used.
"""
kwargs = {
"flag": "download",
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="get_experiments")
@click.pass_context
def get_experiments(ctx):
"""get overview on experiments."""
kwargs = {
"flag": "get_experiments",
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="get_experiment_results")
@click.pass_context
@click.argument("experiment_name")
def get_experiment_results(ctx, experiment_name: str):
"""get results for a single experiment."""
kwargs = {
"flag": "get_experiment_results",
"experiment_name": experiment_name,
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="get_experiments_results")
@click.pass_context
def get_experiments_results(ctx):
"""get results for multiple experiments."""
kwargs = {
"flag": "get_experiments_results",
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="init")
@click.pass_context
def init(ctx):
"""
initialize the data_dir directory.
needs to be called exactly once before any other CLI/API commands of the package are executed.
"""
kwargs = {
"flag": "init",
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="mlflow")
def mlflow():
"""show detailed experiment results in mlflow (port = 5000)."""
cd_dir = f'{join(os.environ.get("DATA_DIR"), "results")}'
subprocess.run(f"cd {cd_dir}; mlflow ui", shell=True)
@nerbb.command(name="predict")
@click.pass_context
@click.argument("experiment_name")
@click.argument("text_input")
def predict(ctx, experiment_name: str, text_input: str):
"""predict labels for text_input using the best model of a single experiment."""
kwargs = {
"flag": "predict",
"experiment_name": experiment_name,
"text_input": text_input,
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="predict_proba")
@click.pass_context
@click.argument("experiment_name")
@click.argument("text_input")
def predict_proba(ctx, experiment_name: str, text_input: str):
"""predict label probabilities for text_input using the best model of a single experiment."""
kwargs = {
"flag": "predict_proba",
"experiment_name": experiment_name,
"text_input": text_input,
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="run_experiment")
@click.pass_context
@click.argument("experiment_name")
def run_experiment(ctx, experiment_name: str):
"""run a single experiment."""
kwargs = {
"flag": "run_experiment",
"experiment_name": experiment_name,
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="set_up_dataset")
@click.pass_context
@click.argument("dataset_name")
def set_up_dataset(ctx, dataset_name: str):
"""set up a dataset using the associated Formatter class."""
kwargs = {
"flag": "set_up_dataset",
"dataset_name": dataset_name,
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="show_experiment_config")
@click.pass_context
@click.argument("experiment_name")
def show_experiment_config(ctx, experiment_name: str):
"""show a single experiment configuration in detail."""
kwargs = {
"flag": "show_experiment_config",
"experiment_name": experiment_name,
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="show_experiment_configs")
@click.pass_context
def show_experiment_configs(ctx):
"""show overview on all available experiment configurations."""
kwargs = {
"flag": "show_experiment_configs",
}
_run_nerblackbox_main(ctx.obj, kwargs)
@nerbb.command(name="tensorboard")
def tensorboard():
"""show detailed experiment results in tensorboard. (port = 6006)."""
cd_dir = f'{join(os.environ.get("DATA_DIR"), "results")}'
subprocess.run(
f"cd {cd_dir}; tensorboard --logdir tensorboard --reload_multifile=true",
shell=True,
)
|
[
"subprocess.run",
"click.argument",
"os.getcwd",
"click.option",
"nerblackbox.modules.main.NerBlackBoxMain",
"os.environ.get",
"click.group",
"os.path.join"
] |
[((457, 470), 'click.group', 'click.group', ([], {}), '()\n', (468, 470), False, 'import click\n'), ((472, 575), 'click.option', 'click.option', (['"""--data_dir"""'], {'default': '"""data"""', 'type': 'str', 'help': '"""[str] relative path of data directory"""'}), "('--data_dir', default='data', type=str, help=\n '[str] relative path of data directory')\n", (484, 575), False, 'import click\n'), ((578, 672), 'click.option', 'click.option', (['"""--modify/--no-modify"""'], {'default': '(False)', 'help': '"""[bool] if flag=set_up_dataset"""'}), "('--modify/--no-modify', default=False, help=\n '[bool] if flag=set_up_dataset')\n", (590, 672), False, 'import click\n'), ((675, 775), 'click.option', 'click.option', (['"""--val_fraction"""'], {'default': 'None', 'type': 'float', 'help': '"""[float] if flag=set_up_dataset"""'}), "('--val_fraction', default=None, type=float, help=\n '[float] if flag=set_up_dataset')\n", (687, 775), False, 'import click\n'), ((778, 874), 'click.option', 'click.option', (['"""--verbose/--no-verbose"""'], {'default': '(False)', 'help': '"""[bool] if flag=set_up_dataset"""'}), "('--verbose/--no-verbose', default=False, help=\n '[bool] if flag=set_up_dataset')\n", (790, 874), False, 'import click\n'), ((877, 969), 'click.option', 'click.option', (['"""--run_name"""'], {'default': 'None', 'type': 'str', 'help': '"""[str] if flag=run_experiment"""'}), "('--run_name', default=None, type=str, help=\n '[str] if flag=run_experiment')\n", (889, 969), False, 'import click\n'), ((966, 1056), 'click.option', 'click.option', (['"""--device"""'], {'default': 'None', 'type': 'str', 'help': '"""[str] if flag=run_experiment"""'}), "('--device', default=None, type=str, help=\n '[str] if flag=run_experiment')\n", (978, 1056), False, 'import click\n'), ((1053, 1143), 'click.option', 'click.option', (['"""--fp16/--no-fp16"""'], {'default': '(False)', 'help': '"""[bool] if flag=run_experiment"""'}), "('--fp16/--no-fp16', default=False, help=\n '[bool] if flag=run_experiment')\n", (1065, 1143), False, 'import click\n'), ((1140, 1232), 'click.option', 'click.option', (['"""--results/--no-results"""'], {'default': '(False)', 'help': '"""[bool] if flag=clear_data"""'}), "('--results/--no-results', default=False, help=\n '[bool] if flag=clear_data')\n", (1152, 1232), False, 'import click\n'), ((2645, 2675), 'click.argument', 'click.argument', (['"""dataset_name"""'], {}), "('dataset_name')\n", (2659, 2675), False, 'import click\n'), ((3805, 3838), 'click.argument', 'click.argument', (['"""experiment_name"""'], {}), "('experiment_name')\n", (3819, 3838), False, 'import click\n'), ((4944, 4977), 'click.argument', 'click.argument', (['"""experiment_name"""'], {}), "('experiment_name')\n", (4958, 4977), False, 'import click\n'), ((4979, 5007), 'click.argument', 'click.argument', (['"""text_input"""'], {}), "('text_input')\n", (4993, 5007), False, 'import click\n'), ((5379, 5412), 'click.argument', 'click.argument', (['"""experiment_name"""'], {}), "('experiment_name')\n", (5393, 5412), False, 'import click\n'), ((5414, 5442), 'click.argument', 'click.argument', (['"""text_input"""'], {}), "('text_input')\n", (5428, 5442), False, 'import click\n'), ((5840, 5873), 'click.argument', 'click.argument', (['"""experiment_name"""'], {}), "('experiment_name')\n", (5854, 5873), False, 'import click\n'), ((6159, 6189), 'click.argument', 'click.argument', (['"""dataset_name"""'], {}), "('dataset_name')\n", (6173, 6189), False, 'import click\n'), ((6504, 6537), 'click.argument', 'click.argument', (['"""experiment_name"""'], {}), "('experiment_name')\n", (6518, 6537), False, 'import click\n'), ((1428, 1439), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1437, 1439), False, 'import os\n'), ((1545, 1569), 'os.path.join', 'join', (['base_dir', 'data_dir'], {}), '(base_dir, data_dir)\n', (1549, 1569), False, 'from os.path import join\n'), ((2279, 2304), 'nerblackbox.modules.main.NerBlackBoxMain', 'NerBlackBoxMain', ([], {}), '(**kwargs)\n', (2294, 2304), False, 'from nerblackbox.modules.main import NerBlackBoxMain\n'), ((4836, 4889), 'subprocess.run', 'subprocess.run', (['f"""cd {cd_dir}; mlflow ui"""'], {'shell': '(True)'}), "(f'cd {cd_dir}; mlflow ui', shell=True)\n", (4850, 4889), False, 'import subprocess\n'), ((7277, 7386), 'subprocess.run', 'subprocess.run', (['f"""cd {cd_dir}; tensorboard --logdir tensorboard --reload_multifile=true"""'], {'shell': '(True)'}), "(\n f'cd {cd_dir}; tensorboard --logdir tensorboard --reload_multifile=true',\n shell=True)\n", (7291, 7386), False, 'import subprocess\n'), ((4791, 4817), 'os.environ.get', 'os.environ.get', (['"""DATA_DIR"""'], {}), "('DATA_DIR')\n", (4805, 4817), False, 'import os\n'), ((7232, 7258), 'os.environ.get', 'os.environ.get', (['"""DATA_DIR"""'], {}), "('DATA_DIR')\n", (7246, 7258), False, 'import os\n')]
|
from config import logininfo
import re,json,time,configparser,logging,sys,os,requests,asyncio
def login(login_url, username, password):
#请求头
my_headers = {
'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36',
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding' : 'gzip',
'Accept-Language' : 'zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4'
}
#获取token
sss = requests.Session()
try:
r = sss.get(login_url, headers = my_headers)
except:
#logging.error('[error]fail to login,check your config and network')
return
reg = r'<input type="hidden" name="nonce" value="(.*)">'
pattern = re.compile(reg)
result = pattern.findall(r.content.decode('utf-8'))
token = result[0]
#postdata
my_data = {
'name' : username,
'password' : password,
'nonce' : token,
}
#登录后
try:
r = sss.post(login_url, headers = my_headers, data = my_data)
except:
#logging.error('[error]fail to login,check your config and network')
return
if r.ok == True:
logging.info('[success]login ok,start the robot...')
return sss
else:
pass
#logging.error('[error]fail to login,check your config and network')
#取配置文件
def readConf(configFile,subject,key):
cf = configparser.ConfigParser()
filename = cf.read(configFile)
return cf.get(subject,key)
#取用户列表
def get_user_list():
theSession = login(logininfo.login_url,logininfo.username,logininfo.password)
apiUrl = 'http://ip:port/api/v1/users' #ctfd 地址
try:
responseJson = theSession.get(apiUrl)
except:
logging.error('[error]fail to get api info,continue.')
return []
jsonInfo = json.loads(responseJson.text)
if jsonInfo['success'] != True:
logging.error("error to get userlist")
return []
userList = eval(str(jsonInfo['data']))
return userList
#取提交flag信息
def get_attempt_info():
theSession = login(logininfo.login_url,logininfo.username,logininfo.password)
apiUrl = 'http://ip:port/api/v1/submissions' #ctfd 地址
try:
responseJson = theSession.get(apiUrl)
except:
logging.error('[error0]fail to get api info,continue.')
return []
jsonInfo = json.loads(responseJson.text)
if jsonInfo['success'] != True:
logging.error("error to get attemptlist")
return []
allList = eval(str(jsonInfo['data']))
return allList
#异步循环发送请求
async def deal_user_list():
global userLen,userList
while True:
try:
tmpList = get_user_list()
tmpLen = len(tmpList)
print(userLen,tmpLen)
if tmpLen == 0:
await asyncio.sleep(3)
continue
if userLen < tmpLen:
for i in range(userLen,tmpLen):
message = tmpList[i]['name']+" 成功注册~"
requests.get(logininfo.group_api+message)
userLen = tmpLen
userList = tmpList
else:
userLen = tmpLen
userlist = tmpList
await asyncio.sleep(3)
except TypeError:
logging.error('[error1]fail to get api info,continue.')
continue
await asyncio.sleep(3)
async def deal_attemp_list():
global userLen,userList,allLen,allList
while True:
try:
tmpallList = get_attempt_info()
tmpallLen = len(tmpallList)
if tmpallLen == 0:
await asyncio.sleep(3)
continue
if allLen < tmpallLen:
for i in range(allLen,tmpallLen):
if tmpallList[i]['type'] == "correct":
chaname = ""
for s in userList:
if str(s['id']) == str(tmpallList[i]['user_id']):
chaname = s['name']
if chaname == "":
continue
await asyncio.sleep(3)
message = "恭喜" + chaname + "做出" + str(tmpallList[i]['challenge']['category'])+"题目-" + str(tmpallList[i]['challenge']['name'])
#requests.get(logininfo.url_api+message)
requests.get(logininfo.group_api+message)
allLen = tmpallLen
allList = tmpallList
else:
allLen = tmpallLen
allList = tmpallList
await asyncio.sleep(3)
except TypeError:
logging.error('[error2]fail to get api info,continue.')
continue
if __name__ == ("__main__"):
logging.basicConfig(filename='err.log',level=logging.ERROR,format='%(asctime)s %(filename)s[line:%(lineno)d] %(message)s',datefmt='%Y-%m-%d')
# 全局变量声明
userList = get_user_list()
#userLen = 0
userLen = len(userList)
allList = get_attempt_info()
allLen = len(allList)
#allLen = 0
loop = asyncio.get_event_loop()
tasks = [deal_user_list(),deal_attemp_list()]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
|
[
"logging.error",
"asyncio.get_event_loop",
"json.loads",
"logging.basicConfig",
"asyncio.sleep",
"requests.Session",
"logging.info",
"requests.get",
"asyncio.wait",
"configparser.ConfigParser",
"re.compile"
] |
[((536, 554), 'requests.Session', 'requests.Session', ([], {}), '()\n', (552, 554), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((796, 811), 're.compile', 're.compile', (['reg'], {}), '(reg)\n', (806, 811), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((1449, 1476), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1474, 1476), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((1869, 1898), 'json.loads', 'json.loads', (['responseJson.text'], {}), '(responseJson.text)\n', (1879, 1898), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((2403, 2432), 'json.loads', 'json.loads', (['responseJson.text'], {}), '(responseJson.text)\n', (2413, 2432), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((4857, 5011), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""err.log"""', 'level': 'logging.ERROR', 'format': '"""%(asctime)s %(filename)s[line:%(lineno)d] %(message)s"""', 'datefmt': '"""%Y-%m-%d"""'}), "(filename='err.log', level=logging.ERROR, format=\n '%(asctime)s %(filename)s[line:%(lineno)d] %(message)s', datefmt='%Y-%m-%d'\n )\n", (4876, 5011), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((5172, 5196), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (5194, 5196), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((1228, 1280), 'logging.info', 'logging.info', (['"""[success]login ok,start the robot..."""'], {}), "('[success]login ok,start the robot...')\n", (1240, 1280), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((1943, 1981), 'logging.error', 'logging.error', (['"""error to get userlist"""'], {}), "('error to get userlist')\n", (1956, 1981), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((2477, 2518), 'logging.error', 'logging.error', (['"""error to get attemptlist"""'], {}), "('error to get attemptlist')\n", (2490, 2518), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((5275, 5294), 'asyncio.wait', 'asyncio.wait', (['tasks'], {}), '(tasks)\n', (5287, 5294), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((1781, 1835), 'logging.error', 'logging.error', (['"""[error]fail to get api info,continue."""'], {}), "('[error]fail to get api info,continue.')\n", (1794, 1835), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((2314, 2369), 'logging.error', 'logging.error', (['"""[error0]fail to get api info,continue."""'], {}), "('[error0]fail to get api info,continue.')\n", (2327, 2369), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((3265, 3281), 'asyncio.sleep', 'asyncio.sleep', (['(3)'], {}), '(3)\n', (3278, 3281), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((3320, 3375), 'logging.error', 'logging.error', (['"""[error1]fail to get api info,continue."""'], {}), "('[error1]fail to get api info,continue.')\n", (3333, 3375), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((4683, 4699), 'asyncio.sleep', 'asyncio.sleep', (['(3)'], {}), '(3)\n', (4696, 4699), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((4738, 4793), 'logging.error', 'logging.error', (['"""[error2]fail to get api info,continue."""'], {}), "('[error2]fail to get api info,continue.')\n", (4751, 4793), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((2850, 2866), 'asyncio.sleep', 'asyncio.sleep', (['(3)'], {}), '(3)\n', (2863, 2866), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((3051, 3094), 'requests.get', 'requests.get', (['(logininfo.group_api + message)'], {}), '(logininfo.group_api + message)\n', (3063, 3094), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((3415, 3431), 'asyncio.sleep', 'asyncio.sleep', (['(3)'], {}), '(3)\n', (3428, 3431), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((3672, 3688), 'asyncio.sleep', 'asyncio.sleep', (['(3)'], {}), '(3)\n', (3685, 3688), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((4461, 4504), 'requests.get', 'requests.get', (['(logininfo.group_api + message)'], {}), '(logininfo.group_api + message)\n', (4473, 4504), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n'), ((4205, 4221), 'asyncio.sleep', 'asyncio.sleep', (['(3)'], {}), '(3)\n', (4218, 4221), False, 'import re, json, time, configparser, logging, sys, os, requests, asyncio\n')]
|
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, <NAME>"
__email__ = "<EMAIL>"
__license__ = "BSD"
from snakemake.shell import shell
from os import path
import shutil
import tempfile
shell.executable("bash")
luascript = snakemake.params.get("lua_script")
if luascript:
luascriptprefix = "-lua {}".format(luascript)
else:
luascriptprefix = ""
basepath = snakemake.params.get("base_path")
basepathprefix = "-base-path {}".format(basepath) if basepath else ""
conf = snakemake.params.get("conf")
conf = conf if conf else ""
threads = snakemake.threads
threadsprefix = "-p {}".format(str(threads)) if threads else ""
outcalls = snakemake.output[0]
if outcalls.endswith(".vcf.gz"):
outprefix = "| bcftools view -Oz"
elif outcalls.endswith(".bcf"):
outprefix = "| bcftools view -Ob"
else:
outprefix = ""
incalls = snakemake.input[0]
if incalls.endswith(".bcf"):
incalls = "<(bcftools view {})".format(incalls)
log = snakemake.log_fmt_shell(stdout=False, stderr=True)
shell(
"(vcfanno {threadsprefix} {luascriptprefix} "
"{basepathprefix} "
"{conf} "
"{incalls} | sed -e 's/Number=A/Number=1/g' {outprefix} > {outcalls}) {log}"
)
|
[
"snakemake.shell.shell.executable",
"snakemake.shell.shell"
] |
[((192, 216), 'snakemake.shell.shell.executable', 'shell.executable', (['"""bash"""'], {}), "('bash')\n", (208, 216), False, 'from snakemake.shell import shell\n'), ((1002, 1162), 'snakemake.shell.shell', 'shell', (['"""(vcfanno {threadsprefix} {luascriptprefix} {basepathprefix} {conf} {incalls} | sed -e \'s/Number=A/Number=1/g\' {outprefix} > {outcalls}) {log}"""'], {}), '(\n "(vcfanno {threadsprefix} {luascriptprefix} {basepathprefix} {conf} {incalls} | sed -e \'s/Number=A/Number=1/g\' {outprefix} > {outcalls}) {log}"\n )\n', (1007, 1162), False, 'from snakemake.shell import shell\n')]
|
import cv2 as cv
import numpy as np
image = cv.imread("boy.jpg",cv.IMREAD_COLOR) # we can even read it in grayscale image also
#image is the cv::mat object of the image
# COVERTING THE IMAGE TO GRAY SCLAE USING cvtColor method
gray_scale = cv.cvtColor(image, cv.COLOR_BGR2GRAY)
cv.imshow("Original Image",image)
cv.imshow("Gray Scale Image",gray_scale)
# RESIZING THE IMAGE
gray = cv.resize(gray_scale,(200,200))
cv.imshow("RESIZED GRAY IMAGE",gray)
ret,thresh_1 = cv.threshold(src = gray,thresh=100, maxval= 255, type = cv.THRESH_BINARY)
ret,thresh_2 = cv.threshold(src = gray,thresh=90,maxval=255,type=cv.THRESH_BINARY_INV)
ret,thresh_3 = cv.threshold(src = gray,thresh=90,maxval=255,type=cv.THRESH_TRUNC)
ret,thresh_4 = cv.threshold(src = gray,thresh=90,maxval=255,type=cv.THRESH_TOZERO)
ret,thresh_5 = cv.threshold(src = gray,thresh=90,maxval=255,type=cv.THRESH_TOZERO_INV)
print(ret)
cv.imshow("Thresh Binary Image",thresh_1)
cv.imshow("Thresh Binary Inverted Image",thresh_2)
cv.imshow("Thresh Truncated Image",thresh_3)
cv.imshow("Thresh TOZERO Image",thresh_4)
cv.imshow("Thresh TOZERO INVERSE Image",thresh_5)
cv.waitKey(0)
|
[
"cv2.cvtColor",
"cv2.waitKey",
"cv2.threshold",
"cv2.imread",
"cv2.imshow",
"cv2.resize"
] |
[((48, 85), 'cv2.imread', 'cv.imread', (['"""boy.jpg"""', 'cv.IMREAD_COLOR'], {}), "('boy.jpg', cv.IMREAD_COLOR)\n", (57, 85), True, 'import cv2 as cv\n'), ((249, 286), 'cv2.cvtColor', 'cv.cvtColor', (['image', 'cv.COLOR_BGR2GRAY'], {}), '(image, cv.COLOR_BGR2GRAY)\n', (260, 286), True, 'import cv2 as cv\n'), ((290, 324), 'cv2.imshow', 'cv.imshow', (['"""Original Image"""', 'image'], {}), "('Original Image', image)\n", (299, 324), True, 'import cv2 as cv\n'), ((325, 366), 'cv2.imshow', 'cv.imshow', (['"""Gray Scale Image"""', 'gray_scale'], {}), "('Gray Scale Image', gray_scale)\n", (334, 366), True, 'import cv2 as cv\n'), ((398, 431), 'cv2.resize', 'cv.resize', (['gray_scale', '(200, 200)'], {}), '(gray_scale, (200, 200))\n', (407, 431), True, 'import cv2 as cv\n'), ((432, 469), 'cv2.imshow', 'cv.imshow', (['"""RESIZED GRAY IMAGE"""', 'gray'], {}), "('RESIZED GRAY IMAGE', gray)\n", (441, 469), True, 'import cv2 as cv\n'), ((487, 556), 'cv2.threshold', 'cv.threshold', ([], {'src': 'gray', 'thresh': '(100)', 'maxval': '(255)', 'type': 'cv.THRESH_BINARY'}), '(src=gray, thresh=100, maxval=255, type=cv.THRESH_BINARY)\n', (499, 556), True, 'import cv2 as cv\n'), ((577, 649), 'cv2.threshold', 'cv.threshold', ([], {'src': 'gray', 'thresh': '(90)', 'maxval': '(255)', 'type': 'cv.THRESH_BINARY_INV'}), '(src=gray, thresh=90, maxval=255, type=cv.THRESH_BINARY_INV)\n', (589, 649), True, 'import cv2 as cv\n'), ((665, 732), 'cv2.threshold', 'cv.threshold', ([], {'src': 'gray', 'thresh': '(90)', 'maxval': '(255)', 'type': 'cv.THRESH_TRUNC'}), '(src=gray, thresh=90, maxval=255, type=cv.THRESH_TRUNC)\n', (677, 732), True, 'import cv2 as cv\n'), ((748, 816), 'cv2.threshold', 'cv.threshold', ([], {'src': 'gray', 'thresh': '(90)', 'maxval': '(255)', 'type': 'cv.THRESH_TOZERO'}), '(src=gray, thresh=90, maxval=255, type=cv.THRESH_TOZERO)\n', (760, 816), True, 'import cv2 as cv\n'), ((832, 904), 'cv2.threshold', 'cv.threshold', ([], {'src': 'gray', 'thresh': '(90)', 'maxval': '(255)', 'type': 'cv.THRESH_TOZERO_INV'}), '(src=gray, thresh=90, maxval=255, type=cv.THRESH_TOZERO_INV)\n', (844, 904), True, 'import cv2 as cv\n'), ((919, 961), 'cv2.imshow', 'cv.imshow', (['"""Thresh Binary Image"""', 'thresh_1'], {}), "('Thresh Binary Image', thresh_1)\n", (928, 961), True, 'import cv2 as cv\n'), ((962, 1013), 'cv2.imshow', 'cv.imshow', (['"""Thresh Binary Inverted Image"""', 'thresh_2'], {}), "('Thresh Binary Inverted Image', thresh_2)\n", (971, 1013), True, 'import cv2 as cv\n'), ((1014, 1059), 'cv2.imshow', 'cv.imshow', (['"""Thresh Truncated Image"""', 'thresh_3'], {}), "('Thresh Truncated Image', thresh_3)\n", (1023, 1059), True, 'import cv2 as cv\n'), ((1060, 1102), 'cv2.imshow', 'cv.imshow', (['"""Thresh TOZERO Image"""', 'thresh_4'], {}), "('Thresh TOZERO Image', thresh_4)\n", (1069, 1102), True, 'import cv2 as cv\n'), ((1103, 1153), 'cv2.imshow', 'cv.imshow', (['"""Thresh TOZERO INVERSE Image"""', 'thresh_5'], {}), "('Thresh TOZERO INVERSE Image', thresh_5)\n", (1112, 1153), True, 'import cv2 as cv\n'), ((1154, 1167), 'cv2.waitKey', 'cv.waitKey', (['(0)'], {}), '(0)\n', (1164, 1167), True, 'import cv2 as cv\n')]
|
import random
import numpy as np
from utils import splitPoly
import matplotlib.patches as patches
import matplotlib.path as path
from matplotlib.transforms import Bbox
import cartopy.crs as ccrs
from spot import Spot
class Star:
# Stellar Radius in RSun, inclincation in degrees
# Limb darkening grid resolution (pixel*pixel grid)
# Rotation period in days
def __init__(self, params):
self.radius = params.rad_star
self.inc = params.sinc
self.res = params.res
self.period = params.prot
self.u = params.u
self.spots = None
self.initial_band = params.high_band
self.low_band = params.low_band
self.cycle = params.stellar_cycle
self.active_region = list(self.initial_band)
self.active_region_vel = [-(params.high_band[0]-params.low_band[0])/self.cycle, -(params.high_band[1] - params.low_band[1])/self.cycle]
self.params = params # Needed for new spot generation
# Create globe structure and set up initial projections
self.globe = ccrs.Globe(semimajor_axis=self.radius, semiminor_axis=self.radius, ellipse='sphere', flattening=1e-9)
self.rotated_proj = ccrs.RotatedPole(pole_longitude=180, pole_latitude=90-self.inc, central_rotated_longitude=0, globe=self.globe)
self.geodetic_proj = ccrs.Geodetic(globe=self.globe)
self.orth_proj = ccrs.Orthographic(globe=self.globe, central_latitude = self.inc, central_longitude=0)
# Visible surface
edge = 90
self.lon1, self.lat1, self.lon2, self.lat2 = -edge, -edge, edge, edge
# Circular grid for limb darkening formula scaled to unity
x = np.linspace(-1,1,self.res)
x, y = np.meshgrid(x,x)
self.grid = np.sqrt(x**2 + y**2)
self.greater_mask = np.ma.masked_greater(self.grid,1).mask
self.grid[self.greater_mask] = np.nan
self.totalGridSquares = self.res**2 - self.greater_mask.sum()
self.grid_x, self.grid_y = (x*self.radius, y*self.radius) # Re-scale grid back to given star radius
# Unspotted Flux
self.unspottedFlux = self.limbDarken()
self.totalUnspottedFlux = self.totalFlux(self.unspottedFlux)
# Spotted Flux
self.spottedFlux = None
self.totalSpottedFlux = None
# Apply quadratic limb darkening to model
def limbDarken(self):
mu = np.sqrt(1-self.grid**2)
mu_1 = 1-mu
u1 = self.u[0]
u2 = self.u[1]
unspottedFlux = 1-u1*mu_1-u2*(mu_1**2)
return unspottedFlux
# Add spots
def addSpots(self, spots):
self.spots = spots
self.spottedFlux = self.mapSpots()
self.totalSpottedFlux = self.totalFlux(self.spottedFlux)
# Life Cycle management
def update(self, cur_phase, t):
# Update projections
cur_long = 360*((cur_phase)%1)
self.updateProjections(cur_long)
# If spots, update them
if not self.spots == None:
self.updateSpots(t)
self.spottedFlux = self.mapSpots()
self.totalSpottedFlux = self.totalFlux(self.spottedFlux)
def updateProjections(self, cur_long):
# Calculte Projections based on current rotation
self.rotated_proj = ccrs.RotatedPole(pole_longitude=cur_long-180, pole_latitude=90-self.inc, central_rotated_longitude=0, globe=self.globe)
self.orth_proj = ccrs.Orthographic(globe=self.globe, central_latitude = self.inc, central_longitude=cur_long)
def updateSpots(self, t, dt=0):
# If no spots then ignore
if not self.spots == None:
# Update active latitudes first
if dt > 0: self.updateActiveRegion(dt)
# Update spots and remove if dead
doCull = []
for spot in self.spots:
if dt > 0: spot.update(self, t, dt)
if spot.dead: doCull.append(spot)
# Remove dead spots and replace
if len(doCull) > 0:
spotsToAddBack = len(doCull)
for obj in doCull:
self.spots.remove(obj)
for i in range(spotsToAddBack):
self.spots.append(Spot.gen_spot(self.params, self, t))
def updateActiveRegion(self, dt):
self.active_region[0] += dt*self.active_region_vel[0]
self.active_region[1] += dt*self.active_region_vel[1]
# Reset when lower than lower band limit
if self.active_region[0] < self.low_band[0] or self.active_region[1] < self.low_band[1]:
self.active_region = list(self.initial_band)
# Spot masking and mapping
def maskPixels(self, path):
XY = np.dstack((self.grid_x, self.grid_y))
XY_flat = XY.reshape((-1, 2))
mask_flat = path.contains_points(XY_flat)
mask = mask_flat.reshape(self.grid_x.shape)
return mask
def mapSpots(self):
# Create new flux array
spottedFlux = self.unspottedFlux*np.ones(self.unspottedFlux.shape)
# Map Spots
for i, spot in enumerate(self.spots):
# Get polygon
spotPoly = spot.poly
# Transform spot coords from Geodetic coord system to rotated projection
spot_vs = self.rotated_proj.transform_points(self.geodetic_proj, spotPoly.vertices[:,0], spotPoly.vertices[:,1])[:,0:2]
# Split poly to avoid issues at boundary
polys = splitPoly(spot_vs, 180)
for poly in polys:
# Get vertices of spot/tissot polygon
spot_vs = poly.get_xy()
# Mask in rotated projection (use mpl.Path.clip_to_bbox function)
spot_path = patches.Path(spot_vs).clip_to_bbox(Bbox([[self.lon1,self.lat1],[self.lon2,self.lat2]]))
# If spot in visible area calculate flux change
if len(spot_path.vertices):
# Transform masked path to orth projection as this is coordinate space LD grid is in
spot_vs = self.orth_proj.transform_points(self.rotated_proj, spot_path.vertices[:,0], spot_path.vertices[:,1])[:,0:2]
spot_path = patches.Path(spot_vs)
# Find pixels contained in mask and multiply by spot brightnesss
mask = self.maskPixels(spot_path)
spottedFlux[mask] = spottedFlux[mask]*spot.brightness
return spottedFlux
# Manage transit
def transit(self, planet, time, dt):
I = []
D = []
Time = []
planetPoly = patches.CirclePolygon((0,0),1,100)
while (planet.isTransiting(time)):
# Carry on now integrating planet across surface but don't rotate star
planetFlux = self.unspottedFlux*np.ones(self.unspottedFlux.shape) if self.spottedFlux is None else self.spottedFlux*np.ones(self.spottedFlux.shape)
# Find position of planet and scale to star's radius
X, Y = planet.skyPosAtTime(time)
planet_vx = self.radius*(planetPoly.get_path().vertices[:,0]*planet.rad + X)
planet_vy = self.radius*(planetPoly.get_path().vertices[:,1]*planet.rad + Y)
planet_path = path.Path(np.column_stack((planet_vx,planet_vy)))
# Find pixles contained within planet's disk and set to 0
mask = self.maskPixels(planet_path)
planetFlux[mask] = 0
totalTransitFlux = self.totalFlux(planetFlux)
I.append(totalTransitFlux)
if self.spots is None:
D.append(self.totalUnspottedFlux - totalTransitFlux)
else:
D.append(self.totalSpottedFlux - totalTransitFlux)
Time.append(time)
time += dt
return I, D, Time, time
# Helper func to sum over grid of flux values
def totalFlux(self, flx):
totalFlux = flx[~self.greater_mask].sum()/self.totalGridSquares
return totalFlux
|
[
"numpy.dstack",
"utils.splitPoly",
"matplotlib.patches.Path",
"cartopy.crs.RotatedPole",
"cartopy.crs.Geodetic",
"numpy.meshgrid",
"matplotlib.path.contains_points",
"matplotlib.transforms.Bbox",
"numpy.ma.masked_greater",
"numpy.column_stack",
"spot.Spot.gen_spot",
"numpy.ones",
"matplotlib.patches.CirclePolygon",
"numpy.linspace",
"cartopy.crs.Globe",
"cartopy.crs.Orthographic",
"numpy.sqrt"
] |
[((1059, 1166), 'cartopy.crs.Globe', 'ccrs.Globe', ([], {'semimajor_axis': 'self.radius', 'semiminor_axis': 'self.radius', 'ellipse': '"""sphere"""', 'flattening': '(1e-09)'}), "(semimajor_axis=self.radius, semiminor_axis=self.radius, ellipse=\n 'sphere', flattening=1e-09)\n", (1069, 1166), True, 'import cartopy.crs as ccrs\n'), ((1189, 1305), 'cartopy.crs.RotatedPole', 'ccrs.RotatedPole', ([], {'pole_longitude': '(180)', 'pole_latitude': '(90 - self.inc)', 'central_rotated_longitude': '(0)', 'globe': 'self.globe'}), '(pole_longitude=180, pole_latitude=90 - self.inc,\n central_rotated_longitude=0, globe=self.globe)\n', (1205, 1305), True, 'import cartopy.crs as ccrs\n'), ((1329, 1360), 'cartopy.crs.Geodetic', 'ccrs.Geodetic', ([], {'globe': 'self.globe'}), '(globe=self.globe)\n', (1342, 1360), True, 'import cartopy.crs as ccrs\n'), ((1386, 1473), 'cartopy.crs.Orthographic', 'ccrs.Orthographic', ([], {'globe': 'self.globe', 'central_latitude': 'self.inc', 'central_longitude': '(0)'}), '(globe=self.globe, central_latitude=self.inc,\n central_longitude=0)\n', (1403, 1473), True, 'import cartopy.crs as ccrs\n'), ((1683, 1711), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', 'self.res'], {}), '(-1, 1, self.res)\n', (1694, 1711), True, 'import numpy as np\n'), ((1725, 1742), 'numpy.meshgrid', 'np.meshgrid', (['x', 'x'], {}), '(x, x)\n', (1736, 1742), True, 'import numpy as np\n'), ((1762, 1786), 'numpy.sqrt', 'np.sqrt', (['(x ** 2 + y ** 2)'], {}), '(x ** 2 + y ** 2)\n', (1769, 1786), True, 'import numpy as np\n'), ((2411, 2438), 'numpy.sqrt', 'np.sqrt', (['(1 - self.grid ** 2)'], {}), '(1 - self.grid ** 2)\n', (2418, 2438), True, 'import numpy as np\n'), ((3300, 3427), 'cartopy.crs.RotatedPole', 'ccrs.RotatedPole', ([], {'pole_longitude': '(cur_long - 180)', 'pole_latitude': '(90 - self.inc)', 'central_rotated_longitude': '(0)', 'globe': 'self.globe'}), '(pole_longitude=cur_long - 180, pole_latitude=90 - self.inc,\n central_rotated_longitude=0, globe=self.globe)\n', (3316, 3427), True, 'import cartopy.crs as ccrs\n'), ((3445, 3539), 'cartopy.crs.Orthographic', 'ccrs.Orthographic', ([], {'globe': 'self.globe', 'central_latitude': 'self.inc', 'central_longitude': 'cur_long'}), '(globe=self.globe, central_latitude=self.inc,\n central_longitude=cur_long)\n', (3462, 3539), True, 'import cartopy.crs as ccrs\n'), ((4750, 4787), 'numpy.dstack', 'np.dstack', (['(self.grid_x, self.grid_y)'], {}), '((self.grid_x, self.grid_y))\n', (4759, 4787), True, 'import numpy as np\n'), ((4846, 4875), 'matplotlib.path.contains_points', 'path.contains_points', (['XY_flat'], {}), '(XY_flat)\n', (4866, 4875), True, 'import matplotlib.path as path\n'), ((6663, 6700), 'matplotlib.patches.CirclePolygon', 'patches.CirclePolygon', (['(0, 0)', '(1)', '(100)'], {}), '((0, 0), 1, 100)\n', (6684, 6700), True, 'import matplotlib.patches as patches\n'), ((1811, 1845), 'numpy.ma.masked_greater', 'np.ma.masked_greater', (['self.grid', '(1)'], {}), '(self.grid, 1)\n', (1831, 1845), True, 'import numpy as np\n'), ((5050, 5083), 'numpy.ones', 'np.ones', (['self.unspottedFlux.shape'], {}), '(self.unspottedFlux.shape)\n', (5057, 5083), True, 'import numpy as np\n'), ((5514, 5537), 'utils.splitPoly', 'splitPoly', (['spot_vs', '(180)'], {}), '(spot_vs, 180)\n', (5523, 5537), False, 'from utils import splitPoly\n'), ((7309, 7348), 'numpy.column_stack', 'np.column_stack', (['(planet_vx, planet_vy)'], {}), '((planet_vx, planet_vy))\n', (7324, 7348), True, 'import numpy as np\n'), ((5822, 5876), 'matplotlib.transforms.Bbox', 'Bbox', (['[[self.lon1, self.lat1], [self.lon2, self.lat2]]'], {}), '([[self.lon1, self.lat1], [self.lon2, self.lat2]])\n', (5826, 5876), False, 'from matplotlib.transforms import Bbox\n'), ((6259, 6280), 'matplotlib.patches.Path', 'patches.Path', (['spot_vs'], {}), '(spot_vs)\n', (6271, 6280), True, 'import matplotlib.patches as patches\n'), ((6868, 6901), 'numpy.ones', 'np.ones', (['self.unspottedFlux.shape'], {}), '(self.unspottedFlux.shape)\n', (6875, 6901), True, 'import numpy as np\n'), ((6952, 6983), 'numpy.ones', 'np.ones', (['self.spottedFlux.shape'], {}), '(self.spottedFlux.shape)\n', (6959, 6983), True, 'import numpy as np\n'), ((4262, 4297), 'spot.Spot.gen_spot', 'Spot.gen_spot', (['self.params', 'self', 't'], {}), '(self.params, self, t)\n', (4275, 4297), False, 'from spot import Spot\n'), ((5787, 5808), 'matplotlib.patches.Path', 'patches.Path', (['spot_vs'], {}), '(spot_vs)\n', (5799, 5808), True, 'import matplotlib.patches as patches\n')]
|
import json
import os
import shortuuid
from typing import List, NamedTuple, Optional
from .settings import LNBITS_PATH
class Extension(NamedTuple):
code: str
is_valid: bool
name: Optional[str] = None
short_description: Optional[str] = None
icon: Optional[str] = None
contributors: Optional[List[str]] = None
class ExtensionManager:
def __init__(self, *, disabled: list = []):
self._disabled = disabled
self._extension_folders: List[str] = [x[1] for x in os.walk(os.path.join(LNBITS_PATH, "extensions"))][0]
@property
def extensions(self) -> List[Extension]:
output = []
for extension in [ext for ext in self._extension_folders if ext not in self._disabled]:
try:
with open(os.path.join(LNBITS_PATH, "extensions", extension, "config.json")) as json_file:
config = json.load(json_file)
is_valid = True
except Exception:
config = {}
is_valid = False
output.append(Extension(**{**{"code": extension, "is_valid": is_valid}, **config}))
return output
class Status:
OK = 200
CREATED = 201
NO_CONTENT = 204
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
METHOD_NOT_ALLOWED = 405
UPGRADE_REQUIRED = 426
TOO_MANY_REQUESTS = 429
INTERNAL_SERVER_ERROR = 500
def urlsafe_short_hash() -> str:
return shortuuid.uuid()
|
[
"shortuuid.uuid",
"json.load",
"os.path.join"
] |
[((1493, 1509), 'shortuuid.uuid', 'shortuuid.uuid', ([], {}), '()\n', (1507, 1509), False, 'import shortuuid\n'), ((887, 907), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (896, 907), False, 'import json\n'), ((512, 551), 'os.path.join', 'os.path.join', (['LNBITS_PATH', '"""extensions"""'], {}), "(LNBITS_PATH, 'extensions')\n", (524, 551), False, 'import os\n'), ((777, 842), 'os.path.join', 'os.path.join', (['LNBITS_PATH', '"""extensions"""', 'extension', '"""config.json"""'], {}), "(LNBITS_PATH, 'extensions', extension, 'config.json')\n", (789, 842), False, 'import os\n')]
|
"""
Create summary statistics / plots for runs from
evcouplings app
Authors:
<NAME>
"""
# chose backend for command-line usage
import matplotlib
matplotlib.use("Agg")
from collections import defaultdict
import filelock
import pandas as pd
import click
import matplotlib.pyplot as plt
from evcouplings.utils.system import valid_file
from evcouplings.utils.config import read_config_file, InvalidParameterError
from evcouplings.utils.pipeline import FINAL_CONFIG_SUFFIX
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
def protein_monomer(prefix, configs):
"""
Create results summary for run using
protein_monomer pipeline
# TODO
"""
MIN_PROBABILITY = 0.9
ali_table = pd.DataFrame()
prefix_to_cfgs = {}
data = defaultdict(lambda: defaultdict())
# go through all config files
for cfg_file in configs:
# check if the file exists and has contents
# since run might not yet have finished or crashed
if valid_file(cfg_file):
# job input configuration
C = read_config_file(cfg_file)
sub_prefix = C["global"]["prefix"]
domain_threshold = C["align"]["domain_threshold"]
sub_index = (domain_threshold, sub_prefix)
final_state_cfg = sub_prefix + FINAL_CONFIG_SUFFIX
if not valid_file(final_state_cfg):
continue
# read final output state of job
R = read_config_file(final_state_cfg)
data[sub_index]["identities"] = R["identities_file"]
data[sub_index]["frequencies"] = R["frequencies_file"]
data[sub_index]["minimum_column_coverage"] = C["align"]["minimum_column_coverage"]
stat_file = R["statistics_file"]
ec_file = R.get("ec_file", "")
ec_comp_file = R.get("ec_compared_longrange_file", "")
prefix_to_cfgs[(sub_prefix)] = (C, R)
# read and modify alignment statistics
if valid_file(stat_file):
# get alignment stats for current job
stat_df = pd.read_csv(stat_file)
n_eff = R["effective_sequences"]
if n_eff is not None:
stat_df.loc[0, "N_eff"] = n_eff
stat_df.loc[0, "domain_threshold"] = domain_threshold
L = stat_df.loc[0, "num_cov"]
# try to get number of significant ECs in addition
if valid_file(ec_file):
ecs = pd.read_csv(ec_file)
min_seq_dist = C["compare"]["min_sequence_distance"]
num_sig = len(ecs.query(
"abs(i-j) >= @min_seq_dist and probability >= @MIN_PROBABILITY"
))
stat_df.loc[0, "num_significant"] = num_sig
# try to get EC precision in addition
if valid_file(ec_comp_file):
ec_comp = pd.read_csv(ec_comp_file)
stat_df.loc[0, "precision"] = ec_comp.iloc[L]["precision"]
# finally, append to global table
ali_table = ali_table.append(stat_df)
# sort table by sequence search threshold
ali_table = ali_table.sort_values(by="domain_threshold")
# when saving files, have to aquire lock to make sure
# jobs don't start overwriting results
# make plots and save
fig = _protein_monomer_plot(ali_table, data)
plot_file = prefix + "_job_statistics_summary.pdf"
lock_plot = filelock.FileLock(plot_file)
with lock_plot:
fig.savefig(plot_file, bbox_inches="tight")
# save ali statistics table
table_file = prefix + "_job_statistics_summary.csv"
lock_table = filelock.FileLock(table_file)
with lock_table:
ali_table.to_csv(
table_file, index=False, float_format="%.3f"
)
return ali_table
def _protein_monomer_plot(ali_table, data):
"""
# TODO
"""
import seaborn as sns
sns.set_palette("Paired", len(ali_table), None)
FONTSIZE = 16
# set up plot and grid
fig = plt.figure(figsize=(15, 15))
gridsize = ((3, 2))
ax_cov = plt.subplot2grid(gridsize, (0, 0), colspan=1)
ax_distr = plt.subplot2grid(gridsize, (0, 1), colspan=1)
ax_gaps = plt.subplot2grid(gridsize, (1, 0), colspan=2)
ax_sig = plt.subplot2grid(gridsize, (2, 0), colspan=1)
ax_comp = plt.subplot2grid(gridsize, (2, 1), colspan=1)
# 1) Number of sequences, coverage
l_seqs = ax_cov.plot(
ali_table.domain_threshold, ali_table.N_eff / ali_table.num_cov,
"ok-", label="# Sequences"
)
ax_cov.set_xlabel("Domain inclusion threshold")
ax_cov.set_ylabel("# effective sequences / L")
ax_cov.set_title("Sequences and coverage", fontsize=FONTSIZE)
ax_cov.legend(loc="lower left")
ax_cov2 = ax_cov.twinx()
l_cov = ax_cov2.plot(
ali_table.domain_threshold, ali_table.num_cov / ali_table.seqlen,
"o-", label="Coverage", color="#2079b4"
)
ax_cov2.set_ylabel("Coverage (% of region)")
ax_cov2.legend(loc="lower right")
ax_cov2.set_ylim(0, 1)
# 2) sequence identity & coverage distributions
for (domain_threshold, subjob), subdata in sorted(data.items()):
# sequence identities to query
if valid_file(subdata["identities"]):
ids = pd.read_csv(subdata["identities"]).identity_to_query.dropna()
ax_distr.hist(
ids, histtype="step", range=(0, 1.0),
bins=100, normed=True, cumulative=True, linewidth=3,
label=str(domain_threshold)
)
ali_table.loc[ali_table.prefix == subjob, "average_identity"] = ids.mean()
# coverage distribution
if valid_file(subdata["frequencies"]):
freqs = pd.read_csv(subdata["frequencies"])
# print(freqs.head())
ax_gaps.plot(
freqs.i, 1 - freqs.loc[:, "-"], "o", linewidth=3,
label=str(domain_threshold)
)
mincov = subdata["minimum_column_coverage"]
if mincov > 1:
mincov /= 100
ax_gaps.axhline(mincov, ls="--", color="k")
ax_distr.set_xlabel("% sequence identity to query")
ax_distr.set_title("Sequence identity distribution", fontsize=FONTSIZE)
ax_distr.set_xlim(0, 1)
ax_distr.set_ylim(0, 1)
ax_distr.legend()
ax_gaps.set_title("Gap statistics", fontsize=FONTSIZE)
ax_gaps.set_xlabel("Sequence index")
ax_gaps.set_ylabel("Column coverage (1 - % gaps)")
ax_gaps.autoscale(enable=True, axis='x', tight=True)
ax_gaps.set_ylim(0, 1)
ax_gaps.legend(loc="best")
# number of significant ECs, EC precision
if "num_significant" in ali_table.columns:
ax_sig.plot(
ali_table.domain_threshold,
ali_table.num_significant / ali_table.num_cov,
"ok-"
)
ax_sig.set_title("Significant ECs", fontsize=FONTSIZE)
ax_sig.set_xlabel("Domain inclusion threshold")
ax_sig.set_ylabel("Fraction of significant ECs (% of L)")
if "precision" in ali_table.columns:
ax_comp.plot(ali_table.domain_threshold, ali_table.precision, "ok-")
ax_comp.set_title("Comparison to 3D (top L ECs)", fontsize=FONTSIZE)
ax_comp.set_xlabel("Domain inclusion threshold")
ax_comp.set_ylabel("EC precision")
ax_comp.set_ylim(0, 1)
return fig
def protein_complex(prefix, configs):
"""
Create results summary for run using
protein_complex pipeline
"""
# TODO: this is only designed to work with skewnormal threshold
MIN_PROBABILITY = 0.9
# number of inter ECs to check for precision
NUM_INTER = 5
# TODO: create segments global variable and import
FIRST_SEGMENT = "A_1"
SECOND_SEGMENT = "B_1"
ali_table = pd.DataFrame()
prefix_to_cfgs = {}
data = defaultdict(lambda: defaultdict())
# go through all config files
for cfg_file in configs:
# check if the file exists and has contents
# since run might not yet have finished or crashed
if valid_file(cfg_file):
# job input configuration
C = read_config_file(cfg_file)
sub_prefix = C["global"]["prefix"]
sub_index = (sub_prefix)
final_state_cfg = sub_prefix + FINAL_CONFIG_SUFFIX
if not valid_file(final_state_cfg):
continue
# read final output state of job
R = read_config_file(final_state_cfg)
data[sub_index]["identities"] = R["identities_file"]
data[sub_index]["frequencies"] = R["frequencies_file"]
data[sub_index]["minimum_column_coverage"] = C["concatenate"]["minimum_column_coverage"]
stat_file = R["statistics_file"]
ec_file = R.get("ec_file", "")
ec_comp_file = R.get("ec_compared_longrange_file", "")
concat_stat_file = R.get("concatentation_statistics_file", "")
first_stat_file = R.get("first_statistics_file","")
second_stat_file = R.get("second_statistics_file","")
prefix_to_cfgs[(sub_prefix)] = (C, R)
# read and modify alignment statistics
if valid_file(stat_file):
# get alignment stats for current job
stat_df = pd.read_csv(stat_file)
n_eff = R["effective_sequences"]
if n_eff is not None:
stat_df.loc[0, "N_eff"] = n_eff
L = stat_df.loc[0, "num_cov"]
# try to get concatenation statistics in addition
if valid_file(concat_stat_file):
concat_stat_df = pd.read_csv(concat_stat_file)
# get and save n sequences per monomer aln
n_seqs_1 = concat_stat_df.loc[0, "num_seqs_1"]
n_seqs_2 = concat_stat_df.loc[0, "num_seqs_2"]
stat_df.loc[0, "first_n_seqs"] = int(n_seqs_1)
stat_df.loc[0, "second_n_seqs"] = int(n_seqs_2)
# get and save median n paralogs per monomer aln
n_paralogs_1 = concat_stat_df.loc[0, "median_num_per_species_1"]
n_paralogs_2 = concat_stat_df.loc[0, "median_num_per_species_2"]
stat_df.loc[0, "median_num_per_species_1"] = n_paralogs_1
stat_df.loc[0, "median_num_per_species_2"] = n_paralogs_2
# try to get number of significant ECs in addition
if valid_file(ec_file):
ecs = pd.read_csv(ec_file)
#number of significant monomer Ecs
min_seq_dist = C["compare"]["min_sequence_distance"]
num_sig = len(ecs.query(
"abs(i-j) >= @min_seq_dist and probability >= @MIN_PROBABILITY"
))
# number of inter-protein ECs significant
num_sig_inter = len(ecs.query(
"segment_i != segment_j and probability >= @MIN_PROBABILITY"
))
stat_df.loc[0, "num_significant"] = int(num_sig)
#rank of top inter contact
top_inter_rank = ecs.query("segment_i != segment_j").index[0]
stat_df.loc[0, "top_inter_rank"] = int(top_inter_rank)
# try to get EC precision in addition
if valid_file(ec_comp_file):
ec_comp = pd.read_csv(ec_comp_file)
ec_comp_1 = ec_comp.query("segment_i == segment_j == @FIRST_SEGMENT")
ec_comp_2 = ec_comp.query("segment_i == segment_j == @SECOND_SEGMENT")
ec_comp_inter = ec_comp.query("segment_i != segment_j")
# use the monomer statistics files to figure out how many sites in each monomer
if valid_file(first_stat_file) and valid_file(second_stat_file):
stats_1 = pd.read_csv(first_stat_file)
L_1 = L = stats_1.loc[0, "num_cov"]
stats_2 = pd.read_csv(second_stat_file)
L_2 = L = stats_2.loc[0, "num_cov"]
# precision of monomer 1
stat_df.loc[0, "first_monomer_precision"] = ec_comp_1.iloc[L_1]["segmentwise_precision"]
# precicions of monomer 2
stat_df.loc[0, "second_monomer_precision"]= ec_comp_2.iloc[L_2]["segmentwise_precision"]
# precision of top 5 inter
stat_df.loc[0, "inter_precision"] = ec_comp_inter.iloc[NUM_INTER]["segmentwise_precision"]
# finally, append to global table
ali_table = ali_table.append(stat_df)
# save ali statistics table
table_file = prefix + "_job_statistics_summary.csv"
lock_table = filelock.FileLock(table_file)
with lock_table:
ali_table.to_csv(
table_file, index=False, float_format="%.3f"
)
return ali_table
PIPELINE_TO_SUMMARIZER = {
"protein_monomer": protein_monomer,
"protein_complex": protein_complex,
}
@click.command(context_settings=CONTEXT_SETTINGS)
# run settings
@click.argument('pipeline', nargs=1, required=True)
@click.argument('prefix', nargs=1, required=True)
@click.argument('configs', nargs=-1)
def app(**kwargs):
"""
Create summary statistics for evcouplings pipeline runs
"""
try:
summarizer = PIPELINE_TO_SUMMARIZER[kwargs["pipeline"]]
except KeyError:
raise InvalidParameterError(
"Not a valid pipeline, valid selections are: {}".format(
",".join(PIPELINE_TO_SUMMARIZER.keys())
)
)
summarizer(kwargs["prefix"], kwargs["configs"])
if __name__ == '__main__':
app()
|
[
"pandas.DataFrame",
"click.argument",
"evcouplings.utils.config.read_config_file",
"filelock.FileLock",
"pandas.read_csv",
"matplotlib.pyplot.subplot2grid",
"click.command",
"collections.defaultdict",
"matplotlib.pyplot.figure",
"matplotlib.use",
"evcouplings.utils.system.valid_file"
] |
[((149, 170), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (163, 170), False, 'import matplotlib\n'), ((13545, 13593), 'click.command', 'click.command', ([], {'context_settings': 'CONTEXT_SETTINGS'}), '(context_settings=CONTEXT_SETTINGS)\n', (13558, 13593), False, 'import click\n'), ((13610, 13660), 'click.argument', 'click.argument', (['"""pipeline"""'], {'nargs': '(1)', 'required': '(True)'}), "('pipeline', nargs=1, required=True)\n", (13624, 13660), False, 'import click\n'), ((13662, 13710), 'click.argument', 'click.argument', (['"""prefix"""'], {'nargs': '(1)', 'required': '(True)'}), "('prefix', nargs=1, required=True)\n", (13676, 13710), False, 'import click\n'), ((13712, 13747), 'click.argument', 'click.argument', (['"""configs"""'], {'nargs': '(-1)'}), "('configs', nargs=-1)\n", (13726, 13747), False, 'import click\n'), ((717, 731), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (729, 731), True, 'import pandas as pd\n'), ((3517, 3545), 'filelock.FileLock', 'filelock.FileLock', (['plot_file'], {}), '(plot_file)\n', (3534, 3545), False, 'import filelock\n'), ((3724, 3753), 'filelock.FileLock', 'filelock.FileLock', (['table_file'], {}), '(table_file)\n', (3741, 3753), False, 'import filelock\n'), ((4097, 4125), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(15, 15)'}), '(figsize=(15, 15))\n', (4107, 4125), True, 'import matplotlib.pyplot as plt\n'), ((4163, 4208), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['gridsize', '(0, 0)'], {'colspan': '(1)'}), '(gridsize, (0, 0), colspan=1)\n', (4179, 4208), True, 'import matplotlib.pyplot as plt\n'), ((4224, 4269), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['gridsize', '(0, 1)'], {'colspan': '(1)'}), '(gridsize, (0, 1), colspan=1)\n', (4240, 4269), True, 'import matplotlib.pyplot as plt\n'), ((4284, 4329), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['gridsize', '(1, 0)'], {'colspan': '(2)'}), '(gridsize, (1, 0), colspan=2)\n', (4300, 4329), True, 'import matplotlib.pyplot as plt\n'), ((4343, 4388), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['gridsize', '(2, 0)'], {'colspan': '(1)'}), '(gridsize, (2, 0), colspan=1)\n', (4359, 4388), True, 'import matplotlib.pyplot as plt\n'), ((4403, 4448), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['gridsize', '(2, 1)'], {'colspan': '(1)'}), '(gridsize, (2, 1), colspan=1)\n', (4419, 4448), True, 'import matplotlib.pyplot as plt\n'), ((7846, 7860), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (7858, 7860), True, 'import pandas as pd\n'), ((13266, 13295), 'filelock.FileLock', 'filelock.FileLock', (['table_file'], {}), '(table_file)\n', (13283, 13295), False, 'import filelock\n'), ((988, 1008), 'evcouplings.utils.system.valid_file', 'valid_file', (['cfg_file'], {}), '(cfg_file)\n', (998, 1008), False, 'from evcouplings.utils.system import valid_file\n'), ((5304, 5337), 'evcouplings.utils.system.valid_file', 'valid_file', (["subdata['identities']"], {}), "(subdata['identities'])\n", (5314, 5337), False, 'from evcouplings.utils.system import valid_file\n'), ((5759, 5793), 'evcouplings.utils.system.valid_file', 'valid_file', (["subdata['frequencies']"], {}), "(subdata['frequencies'])\n", (5769, 5793), False, 'from evcouplings.utils.system import valid_file\n'), ((8117, 8137), 'evcouplings.utils.system.valid_file', 'valid_file', (['cfg_file'], {}), '(cfg_file)\n', (8127, 8137), False, 'from evcouplings.utils.system import valid_file\n'), ((787, 800), 'collections.defaultdict', 'defaultdict', ([], {}), '()\n', (798, 800), False, 'from collections import defaultdict\n'), ((1064, 1090), 'evcouplings.utils.config.read_config_file', 'read_config_file', (['cfg_file'], {}), '(cfg_file)\n', (1080, 1090), False, 'from evcouplings.utils.config import read_config_file, InvalidParameterError\n'), ((1454, 1487), 'evcouplings.utils.config.read_config_file', 'read_config_file', (['final_state_cfg'], {}), '(final_state_cfg)\n', (1470, 1487), False, 'from evcouplings.utils.config import read_config_file, InvalidParameterError\n'), ((1989, 2010), 'evcouplings.utils.system.valid_file', 'valid_file', (['stat_file'], {}), '(stat_file)\n', (1999, 2010), False, 'from evcouplings.utils.system import valid_file\n'), ((5815, 5850), 'pandas.read_csv', 'pd.read_csv', (["subdata['frequencies']"], {}), "(subdata['frequencies'])\n", (5826, 5850), True, 'import pandas as pd\n'), ((7916, 7929), 'collections.defaultdict', 'defaultdict', ([], {}), '()\n', (7927, 7929), False, 'from collections import defaultdict\n'), ((8201, 8227), 'evcouplings.utils.config.read_config_file', 'read_config_file', (['cfg_file'], {}), '(cfg_file)\n', (8217, 8227), False, 'from evcouplings.utils.config import read_config_file, InvalidParameterError\n'), ((8539, 8572), 'evcouplings.utils.config.read_config_file', 'read_config_file', (['final_state_cfg'], {}), '(final_state_cfg)\n', (8555, 8572), False, 'from evcouplings.utils.config import read_config_file, InvalidParameterError\n'), ((9333, 9354), 'evcouplings.utils.system.valid_file', 'valid_file', (['stat_file'], {}), '(stat_file)\n', (9343, 9354), False, 'from evcouplings.utils.system import valid_file\n'), ((1338, 1365), 'evcouplings.utils.system.valid_file', 'valid_file', (['final_state_cfg'], {}), '(final_state_cfg)\n', (1348, 1365), False, 'from evcouplings.utils.system import valid_file\n'), ((2092, 2114), 'pandas.read_csv', 'pd.read_csv', (['stat_file'], {}), '(stat_file)\n', (2103, 2114), True, 'import pandas as pd\n'), ((2459, 2478), 'evcouplings.utils.system.valid_file', 'valid_file', (['ec_file'], {}), '(ec_file)\n', (2469, 2478), False, 'from evcouplings.utils.system import valid_file\n'), ((2894, 2918), 'evcouplings.utils.system.valid_file', 'valid_file', (['ec_comp_file'], {}), '(ec_comp_file)\n', (2904, 2918), False, 'from evcouplings.utils.system import valid_file\n'), ((8411, 8438), 'evcouplings.utils.system.valid_file', 'valid_file', (['final_state_cfg'], {}), '(final_state_cfg)\n', (8421, 8438), False, 'from evcouplings.utils.system import valid_file\n'), ((9444, 9466), 'pandas.read_csv', 'pd.read_csv', (['stat_file'], {}), '(stat_file)\n', (9455, 9466), True, 'import pandas as pd\n'), ((9764, 9792), 'evcouplings.utils.system.valid_file', 'valid_file', (['concat_stat_file'], {}), '(concat_stat_file)\n', (9774, 9792), False, 'from evcouplings.utils.system import valid_file\n'), ((10729, 10748), 'evcouplings.utils.system.valid_file', 'valid_file', (['ec_file'], {}), '(ec_file)\n', (10739, 10748), False, 'from evcouplings.utils.system import valid_file\n'), ((11716, 11740), 'evcouplings.utils.system.valid_file', 'valid_file', (['ec_comp_file'], {}), '(ec_comp_file)\n', (11726, 11740), False, 'from evcouplings.utils.system import valid_file\n'), ((2506, 2526), 'pandas.read_csv', 'pd.read_csv', (['ec_file'], {}), '(ec_file)\n', (2517, 2526), True, 'import pandas as pd\n'), ((2950, 2975), 'pandas.read_csv', 'pd.read_csv', (['ec_comp_file'], {}), '(ec_comp_file)\n', (2961, 2975), True, 'import pandas as pd\n'), ((9835, 9864), 'pandas.read_csv', 'pd.read_csv', (['concat_stat_file'], {}), '(concat_stat_file)\n', (9846, 9864), True, 'import pandas as pd\n'), ((10780, 10800), 'pandas.read_csv', 'pd.read_csv', (['ec_file'], {}), '(ec_file)\n', (10791, 10800), True, 'import pandas as pd\n'), ((11776, 11801), 'pandas.read_csv', 'pd.read_csv', (['ec_comp_file'], {}), '(ec_comp_file)\n', (11787, 11801), True, 'import pandas as pd\n'), ((5357, 5391), 'pandas.read_csv', 'pd.read_csv', (["subdata['identities']"], {}), "(subdata['identities'])\n", (5368, 5391), True, 'import pandas as pd\n'), ((12203, 12230), 'evcouplings.utils.system.valid_file', 'valid_file', (['first_stat_file'], {}), '(first_stat_file)\n', (12213, 12230), False, 'from evcouplings.utils.system import valid_file\n'), ((12235, 12263), 'evcouplings.utils.system.valid_file', 'valid_file', (['second_stat_file'], {}), '(second_stat_file)\n', (12245, 12263), False, 'from evcouplings.utils.system import valid_file\n'), ((12303, 12331), 'pandas.read_csv', 'pd.read_csv', (['first_stat_file'], {}), '(first_stat_file)\n', (12314, 12331), True, 'import pandas as pd\n'), ((12435, 12464), 'pandas.read_csv', 'pd.read_csv', (['second_stat_file'], {}), '(second_stat_file)\n', (12446, 12464), True, 'import pandas as pd\n')]
|
import shutil
import os
import argparse
import unittest
import io
from tokenizer.tokenizer import Tokenizer
class TestTokenizer(unittest.TestCase):
MODEL_DIR = os.path.expanduser('~/.cache/diaparser')
def setUp(self):
self.args = {
'lang': 'it',
'verbose': True
}
def test_download_resources(self):
tokenizer = Tokenizer(self.args['lang'])
self.assertTrue(os.path.isdir(self.MODEL_DIR))
self.assertTrue(os.path.exists(os.path.join(self.MODEL_DIR, 'tokenizer', self.args['lang'])))
self.assertTrue(os.path.exists(os.path.join(self.MODEL_DIR, 'tokenizer', self.args['lang'], 'tokenize')))
def test_tokenize(self):
tokenizer = Tokenizer(self.args['lang'])
sentences = tokenizer.predict('Ha chiamato il dr. Rossi.Vuole salutarti.')
self.assertEqual(len(sentences), 2)
def test_corpus_load(self):
tokenizer = Tokenizer(self.args['lang'])
sin = io.StringIO("Un corazziere contro Scalfaro. L'attore le disse baciami o torno a riprendermelo.")
for line in tokenizer.format(tokenizer.predict(sin.read())):
if line and not line.startswith('#'):
# CoNLL-U format has 10 tsv:
assert len(line.split('\t')) == 10, line
|
[
"io.StringIO",
"os.path.join",
"os.path.isdir",
"tokenizer.tokenizer.Tokenizer",
"os.path.expanduser"
] |
[((167, 207), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.cache/diaparser"""'], {}), "('~/.cache/diaparser')\n", (185, 207), False, 'import os\n'), ((388, 416), 'tokenizer.tokenizer.Tokenizer', 'Tokenizer', (["self.args['lang']"], {}), "(self.args['lang'])\n", (397, 416), False, 'from tokenizer.tokenizer import Tokenizer\n'), ((751, 779), 'tokenizer.tokenizer.Tokenizer', 'Tokenizer', (["self.args['lang']"], {}), "(self.args['lang'])\n", (760, 779), False, 'from tokenizer.tokenizer import Tokenizer\n'), ((960, 988), 'tokenizer.tokenizer.Tokenizer', 'Tokenizer', (["self.args['lang']"], {}), "(self.args['lang'])\n", (969, 988), False, 'from tokenizer.tokenizer import Tokenizer\n'), ((1003, 1109), 'io.StringIO', 'io.StringIO', (['"""Un corazziere contro Scalfaro. L\'attore le disse baciami o torno a riprendermelo."""'], {}), '(\n "Un corazziere contro Scalfaro. L\'attore le disse baciami o torno a riprendermelo."\n )\n', (1014, 1109), False, 'import io\n'), ((450, 479), 'os.path.isdir', 'os.path.isdir', (['self.MODEL_DIR'], {}), '(self.MODEL_DIR)\n', (463, 479), False, 'import os\n'), ((520, 580), 'os.path.join', 'os.path.join', (['self.MODEL_DIR', '"""tokenizer"""', "self.args['lang']"], {}), "(self.MODEL_DIR, 'tokenizer', self.args['lang'])\n", (532, 580), False, 'import os\n'), ((622, 694), 'os.path.join', 'os.path.join', (['self.MODEL_DIR', '"""tokenizer"""', "self.args['lang']", '"""tokenize"""'], {}), "(self.MODEL_DIR, 'tokenizer', self.args['lang'], 'tokenize')\n", (634, 694), False, 'import os\n')]
|
"""
This file contains source code from another GitHub project. The comments made there apply. The source code
was licensed under the MIT License. The license text and a detailed reference can be found in the license
subfolder at models/east_open_cv/license. Many thanks to the author of the code.
For reasons of clarity unneeded parts of the original code were not taken over. The original project can
be found on the https://github.com/ZER-0-NE/EAST-Detector-for-text-detection-using-OpenCV page.
For a better understanding the documentation has been supplemented in parts. Code completely or predominantly
taken from the source was marked with "External code".
"""
import time
import cv2
import numpy as np
from imutils.object_detection import non_max_suppression
import bridges_config as config
class EastOpenCvBridge:
"""A bridge class for connecting to a text detector
"""
def __init__(self):
"""The constructor
"""
self.load_model()
def load_model(self):
"""Loads the underlying model together with its pre-trained weights.
"""
try:
self.model = cv2.dnn.readNet(config.EAST_OPENCV_MODEL_PATH)
except:
print('Error in method {0} in module {1}'.format('load_model', 'east_open_cv_bridge.py'))
def scann(self, image):
"""External code (add try...except and an extension)
Examines the passed image for text regions and returns them as a collection of boxes in the
form of a NumPy array. The passed image must be a raster image.
:param image:The image to be examined.
:return:A NumPy array of predicted text areas.
"""
try:
# load the input image and grab the image dimensions
self.orig = image.copy()
(H, W) = image.shape[:2]
# set the new width and height and then determine the ratio in change
# for both the width and height, should be multiple of 32
(newW, newH) = (320, 320)
rW = W / float(newW)
rH = H / float(newH)
# resize the image and grab the new image dimensions
image = cv2.resize(image, (newW, newH))
(H, W) = image.shape[:2]
# define the two output layer names for the EAST detector model that
# we are interested -- the first is the output probabilities and the
# second can be used to derive the bounding box coordinates of text
self.layerNames = [
"feature_fusion/Conv_7/Sigmoid",
"feature_fusion/concat_3"]
# construct a blob from the image and then perform a forward pass of
# the model to obtain the two output layer sets
blob = cv2.dnn.blobFromImage(image, 1.0, (W, H),
(123.68, 116.78, 103.94), swapRB=True, crop=False)
start = time.time()
self.model.setInput(blob)
(scores, geometry) = self.model.forward(self.layerNames)
end = time.time()
# grab the number of rows and columns from the scores volume, then
# initialize our set of bounding box rectangles and corresponding
# confidence scores
(numRows, numCols) = scores.shape[2:4]
rects = [] # stores the bounding box coordiantes for text regions
confidences = [] # stores the probability associated with each bounding box region in rects
# loop over the number of rows
for y in range(0, numRows):
# extract the scores (probabilities), followed by the geometrical
# data used to derive potential bounding box coordinates that
# surround text
scoresData = scores[0, 0, y]
xData0 = geometry[0, 0, y]
xData1 = geometry[0, 1, y]
xData2 = geometry[0, 2, y]
xData3 = geometry[0, 3, y]
anglesData = geometry[0, 4, y]
# loop over the number of columns
for x in range(0, numCols):
# if our score does not have sufficient probability, ignore it
if scoresData[x] < 0.5:
continue
# compute the offset factor as our resulting feature maps will
# be 4x smaller than the input image
(offsetX, offsetY) = (x * 4.0, y * 4.0)
# extract the rotation angle for the prediction and then
# compute the sin and cosine
angle = anglesData[x]
cos = np.cos(angle)
sin = np.sin(angle)
# use the geometry volume to derive the width and height of
# the bounding box
h = xData0[x] + xData2[x]
w = xData1[x] + xData3[x]
# compute both the starting and ending (x, y)-coordinates for
# the text prediction bounding box
endX = int(offsetX + (cos * xData1[x]) + (sin * xData2[x]))
endY = int(offsetY - (sin * xData1[x]) + (cos * xData2[x]))
startX = int(endX - w)
startY = int(endY - h)
# add the bounding box coordinates and probability score to
# our respective lists
rects.append((startX, startY, endX, endY))
confidences.append(scoresData[x])
# apply non-maxima suppression to suppress weak, overlapping bounding boxes
boxes = non_max_suppression(np.array(rects), probs=confidences)
"""
Extension to the original code to return a usable format.
"""
newboxes = []
# loop over the bounding boxes
for (startX, startY, endX, endY) in boxes:
# scale the bounding box coordinates based on the respective ratios
startX = int(startX * rW)
startY = int(startY * rH)
endX = int(endX * rW)
endY = int(endY * rH)
box = []
box.append([startX, startY])
box.append([endX, startY])
box.append([endX, endY])
box.append([startX, endY])
newboxes.append(box)
return np.asarray(newboxes)
except:
print('Error in method {0} in module {1}'.format('scann', 'east_open_cv_bridge.py'))
return None
|
[
"numpy.asarray",
"cv2.dnn.blobFromImage",
"time.time",
"cv2.dnn.readNet",
"numpy.sin",
"numpy.array",
"numpy.cos",
"cv2.resize"
] |
[((1172, 1218), 'cv2.dnn.readNet', 'cv2.dnn.readNet', (['config.EAST_OPENCV_MODEL_PATH'], {}), '(config.EAST_OPENCV_MODEL_PATH)\n', (1187, 1218), False, 'import cv2\n'), ((2235, 2266), 'cv2.resize', 'cv2.resize', (['image', '(newW, newH)'], {}), '(image, (newW, newH))\n', (2245, 2266), False, 'import cv2\n'), ((2844, 2941), 'cv2.dnn.blobFromImage', 'cv2.dnn.blobFromImage', (['image', '(1.0)', '(W, H)', '(123.68, 116.78, 103.94)'], {'swapRB': '(True)', 'crop': '(False)'}), '(image, 1.0, (W, H), (123.68, 116.78, 103.94), swapRB=\n True, crop=False)\n', (2865, 2941), False, 'import cv2\n'), ((3000, 3011), 'time.time', 'time.time', ([], {}), '()\n', (3009, 3011), False, 'import time\n'), ((3140, 3151), 'time.time', 'time.time', ([], {}), '()\n', (3149, 3151), False, 'import time\n'), ((6630, 6650), 'numpy.asarray', 'np.asarray', (['newboxes'], {}), '(newboxes)\n', (6640, 6650), True, 'import numpy as np\n'), ((5843, 5858), 'numpy.array', 'np.array', (['rects'], {}), '(rects)\n', (5851, 5858), True, 'import numpy as np\n'), ((4802, 4815), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (4808, 4815), True, 'import numpy as np\n'), ((4843, 4856), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (4849, 4856), True, 'import numpy as np\n')]
|
from Class.rqlite import rqlite
import simple_acme_dns, requests, json, time, sys, os
class Cert(rqlite):
def updateCert(self,data):
print("updating",data[0])
response = self.execute(['UPDATE certs SET fullchain = ?,privkey = ?,updated = ? WHERE domain = ?',data[1],data[2],data[3],data[0]])
print(json.dumps(response, indent=4, sort_keys=True))
def buildbuildUrls(self,urls,domain,token):
response = []
for url in urls:
subdomain = ""
parts = domain.split(".")
if len(parts) > 2:
parts = parts[:len(parts) -2]
subdomain = '.'.join(parts)
#api.dns.com/mahkey/%domain%/%sub%/TXT/add/%token%
url = url.replace("domain",domain.replace(subdomain+".",""))
subdomain = "_acme-challenge." + subdomain
url = url.replace("sub",subdomain)
url = url.replace("token",token)
response.append(url)
return response
def buildUrls(self,domain,token,api):
apis = self.query(["SELECT * FROM apis WHERE name = ?",api])
if apis is False: return False
if 'values' not in apis['results'][0]: return False
apis = apis['results'][0]['values'][0]
response = {"up":[],"down":[]}
urls = apis[2].split(",")
response['up'] = self.buildbuildUrls(urls,domain,token)
urls = apis[3].split(",")
response['down'] = self.buildbuildUrls(urls,domain,token)
return response
def getCert(self,domain,email,api):
directory = "https://acme-v02.api.letsencrypt.org/directory"
#directory = "https://acme-staging-v02.api.letsencrypt.org/directory"
try:
client = simple_acme_dns.ACMEClient(domains=[domain],email=email,directory=directory,nameservers=["8.8.8.8", "1.1.1.1"],new_account=True,generate_csr=True)
except Exception as e:
print(e)
return False
for acmeDomain, token in client.request_verification_tokens():
print("adding {domain} --> {token}".format(domain=acmeDomain, token=token))
urls = self.buildUrls(domain,token,api)
if urls is False: return False
for url in urls['up']:
r = requests.get(url,allow_redirects=False)
if (r.status_code != 200): return False
print("Waiting for dns propagation")
try:
if client.check_dns_propagation(timeout=1200):
print("Requesting certificate")
client.request_certificate()
fullchain = client.certificate.decode()
privkey = client.private_key.decode()
self.updateCert([domain,fullchain,privkey,int(time.time())])
else:
print("Failed to issue certificate for " + str(client.domains))
client.deactivate_account()
return False
except Exception as e:
print(e)
return False
finally:
for url in urls['down']:
r = requests.get(url,allow_redirects=False)
if (r.status_code != 200): return False
return True
def renew(self):
status = self.status()
if status is False:
print("rqlite gone")
return False
state = status['store']['raft']['state']
if state != "Leader":
print("Not leader, aborting.")
return False
print("Getting certs")
domains = self.query(['SELECT * FROM certs'])
if domains is False:
print("rqlite gone")
return False
if 'values' not in domains['results'][0]:
print("no certs added")
return False
for row in domains['results'][0]['values']:
if row[4] == None:
print("Missing cert for",row[0])
response = self.getCert(row[0],row[1],row[3])
if response is False:
print("Failed to get cert for",row[0])
return False
else:
print("Checking cert for",row[0])
if time.time() > (row[6] + (86400 * 30)):
print("Certificate is older than 30 days")
response = self.getCert(row[0],row[1],row[3])
if response is False:
print("Failed to get cert for",row[0])
return False
|
[
"requests.get",
"simple_acme_dns.ACMEClient",
"json.dumps",
"time.time"
] |
[((328, 374), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'sort_keys': '(True)'}), '(response, indent=4, sort_keys=True)\n', (338, 374), False, 'import simple_acme_dns, requests, json, time, sys, os\n'), ((1739, 1899), 'simple_acme_dns.ACMEClient', 'simple_acme_dns.ACMEClient', ([], {'domains': '[domain]', 'email': 'email', 'directory': 'directory', 'nameservers': "['8.8.8.8', '1.1.1.1']", 'new_account': '(True)', 'generate_csr': '(True)'}), "(domains=[domain], email=email, directory=\n directory, nameservers=['8.8.8.8', '1.1.1.1'], new_account=True,\n generate_csr=True)\n", (1765, 1899), False, 'import simple_acme_dns, requests, json, time, sys, os\n'), ((2273, 2313), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(False)'}), '(url, allow_redirects=False)\n', (2285, 2313), False, 'import simple_acme_dns, requests, json, time, sys, os\n'), ((3089, 3129), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(False)'}), '(url, allow_redirects=False)\n', (3101, 3129), False, 'import simple_acme_dns, requests, json, time, sys, os\n'), ((4191, 4202), 'time.time', 'time.time', ([], {}), '()\n', (4200, 4202), False, 'import simple_acme_dns, requests, json, time, sys, os\n'), ((2752, 2763), 'time.time', 'time.time', ([], {}), '()\n', (2761, 2763), False, 'import simple_acme_dns, requests, json, time, sys, os\n')]
|
from os import path
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# read the contents of README file
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Run setup script
setup(name='pyxtf',
version='1.2',
description='eXtended Triton Format (XTF) file interface',
long_description=long_description,
long_description_content_type='text/markdown',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy>=1.11'],
install_requires=['numpy>=1.11', 'matplotlib>=1.5.1'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'': ['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language :: Python :: 3 :: Only'
])
if __name__ == '__main__':
main()
|
[
"os.path.dirname",
"tools.generate_pyi.generate_pyi",
"os.path.join",
"setuptools.setup"
] |
[((165, 195), 'tools.generate_pyi.generate_pyi', 'generate_pyi', (['pyxtf.xtf_ctypes'], {}), '(pyxtf.xtf_ctypes)\n', (177, 195), False, 'from tools.generate_pyi import generate_pyi\n'), ((235, 272), 'tools.generate_pyi.generate_pyi', 'generate_pyi', (['pyxtf.vendors.kongsberg'], {}), '(pyxtf.vendors.kongsberg)\n', (247, 272), False, 'from tools.generate_pyi import generate_pyi\n'), ((513, 1290), 'setuptools.setup', 'setup', ([], {'name': '"""pyxtf"""', 'version': '"""1.2"""', 'description': '"""eXtended Triton Format (XTF) file interface"""', 'long_description': 'long_description', 'long_description_content_type': '"""text/markdown"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/oysstu/pyxtf"""', 'license': '"""MIT"""', 'setup_requires': "['numpy>=1.11']", 'install_requires': "['numpy>=1.11', 'matplotlib>=1.5.1']", 'packages': "['pyxtf', 'pyxtf.vendors']", 'package_data': "{'': ['*.pyi']}", 'use_2to3': '(False)', 'classifiers': "['License :: OSI Approved :: MIT License',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Other Audience',\n 'Intended Audience :: Science/Research', 'Natural Language :: English',\n 'Topic :: Scientific/Engineering',\n 'Programming Language :: Python :: 3 :: Only']"}), "(name='pyxtf', version='1.2', description=\n 'eXtended Triton Format (XTF) file interface', long_description=\n long_description, long_description_content_type='text/markdown', author\n ='<NAME>', author_email='<EMAIL>', url=\n 'https://github.com/oysstu/pyxtf', license='MIT', setup_requires=[\n 'numpy>=1.11'], install_requires=['numpy>=1.11', 'matplotlib>=1.5.1'],\n packages=['pyxtf', 'pyxtf.vendors'], package_data={'': ['*.pyi']},\n use_2to3=False, classifiers=['License :: OSI Approved :: MIT License',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Other Audience',\n 'Intended Audience :: Science/Research', 'Natural Language :: English',\n 'Topic :: Scientific/Engineering',\n 'Programming Language :: Python :: 3 :: Only'])\n", (518, 1290), False, 'from setuptools import setup\n'), ((347, 369), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (359, 369), False, 'from os import path\n'), ((385, 423), 'os.path.join', 'path.join', (['this_directory', '"""README.md"""'], {}), "(this_directory, 'README.md')\n", (394, 423), False, 'from os import path\n')]
|
#!/usr/bin/env python3
import argparse
from os import walk
from pprint import pprint
from re import fullmatch
from sys import argv
def is_excluded(file, excluded):
return any([fullmatch(ex, file) for ex in excluded])
def is_included(file, included):
return any([fullmatch(ex, file) for ex in included])
def get_files(root_dir, excluded=(), included=('.*',)):
for root, dirs, files in walk(root_dir):
for f in files:
if is_included(f, included) and not is_excluded(f, excluded):
yield root, f
def count_words(line):
n = 0
for s in line.split(' '):
if s.strip():
n += 1
return n
def count_letters(line):
return len(line.strip())
def count(root, file):
line_count = 0
word_count = 0
letter_count = 0
with open(f'{root}/{file}', 'rt', encoding='utf-8') as f:
for line in f:
if line.strip():
line_count += 1
word_count += count_words(line)
letter_count += count_letters(line)
return line_count, word_count, letter_count
def count_all(dirs=('.',), excluded=(), included=('.*',)):
total_files = 0
total_lines = 0
total_words = 0
total_letters = 0
for d in dirs:
for root, file in get_files(d, excluded, included):
total_files += 1
line_count, word_count, letter_count = count(root, file)
total_lines += line_count
total_words += word_count
total_letters += letter_count
return {'files': total_files, 'lines': total_lines, 'words': total_words, 'letters': total_letters}
def parse_args(args):
parser = argparse.ArgumentParser(description='Count files, lines, words and letters.')
parser.add_argument('-d', '--dirs', nargs='*', default=['.'], help='Directories to count in')
parser.add_argument('-e', '--excluded', nargs='*', default=[],
help='File name exclusion patterns, e.g .*Test\\..* .*IT\\..*')
parser.add_argument('-i', '--included', nargs='*', default=['.*'],
help='File name inclusion patterns, e.g .*\\.groovy .*\\.java .*\\.py')
return parser.parse_args(args)
def main(dirs=('.',), excluded=(), included=('.*',)):
result = count_all(dirs, excluded=excluded, included=included)
pprint(result)
if __name__ == '__main__': # pragma: no cover
main(**parse_args(argv[1:]).__dict__)
# code_dirs = ['/Users/magnus/git/rsimulator/rsimulator-camel-direct',
# '/Users/magnus/git/rsimulator/rsimulator-core',
# '/Users/magnus/git/rsimulator/rsimulator-cxf-rt-transport']
# code_dirs = ['/Users/magnus/git/rsimulator']
#
# result = count_all(
# code_dirs,
# # excluded=('.*Test\..*', '.*IT\..*', 'test.*'),
# included=('.*\.groovy', '.*\.java', '.*\.kt', '.*\.py', 'Jenkinsfile'))
# pprint(result)
|
[
"re.fullmatch",
"pprint.pprint",
"os.walk",
"argparse.ArgumentParser"
] |
[((402, 416), 'os.walk', 'walk', (['root_dir'], {}), '(root_dir)\n', (406, 416), False, 'from os import walk\n'), ((1680, 1757), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Count files, lines, words and letters."""'}), "(description='Count files, lines, words and letters.')\n", (1703, 1757), False, 'import argparse\n'), ((2340, 2354), 'pprint.pprint', 'pprint', (['result'], {}), '(result)\n', (2346, 2354), False, 'from pprint import pprint\n'), ((182, 201), 're.fullmatch', 'fullmatch', (['ex', 'file'], {}), '(ex, file)\n', (191, 201), False, 'from re import fullmatch\n'), ((274, 293), 're.fullmatch', 'fullmatch', (['ex', 'file'], {}), '(ex, file)\n', (283, 293), False, 'from re import fullmatch\n')]
|
from unittest import TestCase
from unittest.mock import patch, mock_open
from datetime import datetime
import responses
from pygrocy import Grocy
from pygrocy.grocy import Product
from pygrocy.grocy import Group
from pygrocy.grocy import ShoppingListProduct
from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient
class TestGrocy(TestCase):
def setUp(self):
self.grocy = Grocy("https://example.com", "api_key")
def test_init(self):
assert isinstance(self.grocy, Grocy)
@responses.activate
def test_get_chores_valid_no_details(self):
resp = [
{
"chore_id": "1",
"last_tracked_time": "2019-11-18 00:00:00",
"next_estimated_execution_time": "2019-11-25 00:00:00",
"track_date_only": "1"
},
{
"chore_id": "2",
"last_tracked_time": "2019-11-16 00:00:00",
"next_estimated_execution_time": "2019-11-23 00:00:00",
"track_date_only": "1"
},
{
"chore_id": "3",
"last_tracked_time": "2019-11-10 00:00:00",
"next_estimated_execution_time": "2019-12-10 00:00:00",
"track_date_only": "1"
},
{
"chore_id": "4",
"last_tracked_time": "2019-11-18 00:00:00",
"next_estimated_execution_time": "2019-11-25 00:00:00",
"track_date_only": "1",
}
]
responses.add(responses.GET, "https://example.com:9192/api/chores", json=resp, status=200)
chores = self.grocy.chores(get_details=False)
assert isinstance(chores, list)
assert len(chores) == 4
assert chores[0].chore_id == 1
assert chores[1].chore_id == 2
assert chores[2].chore_id == 3
assert chores[3].chore_id == 4
@responses.activate
def test_product_get_details_valid(self):
current_stock_response = CurrentStockResponse({
"product_id": 0,
"amount": "0.33",
"best_before_date": "2019-05-02"
})
product = Product(current_stock_response)
api_client = GrocyApiClient("https://example.com", "api_key")
resp = {
"product": {
"id": 0,
"name": "string",
"description": "string",
"location_id": 0,
"qu_id_purchase": 0,
"qu_id_stock": 0,
"qu_factor_purchase_to_stock": 0,
"barcode": "string",
"product_group_id": 0,
"min_stock_amount": 0,
"default_best_before_days": 0,
"picture_file_name": "string",
"allow_partial_units_in_stock": True,
"row_created_timestamp": "2019-05-02T18:30:48.041Z"
},
"quantity_unit_purchase": {
"id": 0,
"name": "string",
"name_plural": "string",
"description": "string",
"row_created_timestamp": "2019-05-02T18:30:48.041Z"
},
"quantity_unit_stock": {
"id": 0,
"name": "string",
"name_plural": "string",
"description": "string",
"row_created_timestamp": "2019-05-02T18:30:48.041Z"
},
"last_purchased": "2019-05-02",
"last_used": "2019-05-02T18:30:48.041Z",
"stock_amount": 0,
"stock_amount_opened": 0,
"next_best_before_date": "2019-05-02T18:30:48.041Z",
"last_price": 0,
"location": {
"id": 0,
"name": "string",
"description": "string",
"row_created_timestamp": "2019-05-02T18:30:48.041Z"
}
}
responses.add(responses.GET, "https://example.com:9192/api/stock/products/0", json=resp, status=200)
product.get_details(api_client)
assert product.name == "string"
assert product.product_group_id == 0
@responses.activate
def test_product_get_details_invalid_no_data(self):
current_stock_response = CurrentStockResponse({
"product_id": 0,
"amount": "0.33",
"best_before_date": "2019-05-02"
})
product = Product(current_stock_response)
api_client = GrocyApiClient("https://example.com", "api_key")
responses.add(responses.GET, "https://example.com:9192/api/stock/products/0", status=200)
product.get_details(api_client)
assert product.name is None
@responses.activate
def test_get_stock_valid(self):
resp = [
{
"product_id": 0,
"amount": "0.33",
"best_before_date": "2019-05-02"
}
]
responses.add(responses.GET, "https://example.com:9192/api/stock", json=resp, status=200)
stock = self.grocy.stock()
assert isinstance(stock, list)
assert len(stock) == 1
for prod in stock:
assert isinstance(prod, Product)
@responses.activate
def test_get_stock_invalid_no_data(self):
responses.add(responses.GET, "https://example.com:9192/api/stock", status=200)
assert self.grocy.stock() is None
@responses.activate
def test_get_stock_invalid_missing_data(self):
resp = [
{
}
]
responses.add(responses.GET, "https://example.com:9192/api/stock", json=resp, status=200)
@responses.activate
def test_get_shopping_list_valid(self):
resp = [
{
"id": 1,
"product_id": 6,
"note": "string",
"amount": 2,
"row_created_timestamp": "2019-04-17 10:30:00",
"shopping_list_id": 1,
"done": 0
}
]
responses.add(responses.GET, "https://example.com:9192/api/objects/shopping_list", json=resp, status=200)
shopping_list = self.grocy.shopping_list()
assert isinstance(shopping_list, list)
assert len(shopping_list) == 1
for item in shopping_list:
assert isinstance(item, ShoppingListProduct)
@responses.activate
def test_get_shopping_list_invalid_no_data(self):
responses.add(responses.GET, "https://example.com:9192/api/objects/shopping_list", status=400)
assert self.grocy.shopping_list() is None
@responses.activate
def test_get_shopping_list_invalid_missing_data(self):
resp = [
{
}
]
responses.add(responses.GET, "https://example.com:9192/api/objects/shopping_list", json=resp, status=200)
@responses.activate
def test_add_missing_product_to_shopping_list_valid(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/add-missing-products", status=204)
assert self.grocy.add_missing_product_to_shopping_list().status_code == 204
@responses.activate
def test_add_missing_product_to_shopping_list_error(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/add-missing-products", status=400)
assert self.grocy.add_missing_product_to_shopping_list().status_code != 204
@responses.activate
def test_add_product_to_shopping_list_valid(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/add-product", status=204)
assert self.grocy.add_product_to_shopping_list(1).status_code == 204
@responses.activate
def test_add_product_to_shopping_list_error(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/add-product", status=400)
assert self.grocy.add_product_to_shopping_list(1).status_code != 204
@responses.activate
def test_clear_shopping_list_valid(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/clear", status=204)
assert self.grocy.clear_shopping_list().status_code == 204
@responses.activate
def test_clear_shopping_list_error(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/clear", status=400)
assert self.grocy.clear_shopping_list().status_code != 204
@responses.activate
def test_remove_product_in_shopping_list_valid(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/remove-product", status=204)
assert self.grocy.remove_product_in_shopping_list(1).status_code == 204
@responses.activate
def test_remove_product_in_shopping_list_error(self):
responses.add(responses.POST, "https://example.com:9192/api/stock/shoppinglist/remove-product", status=400)
assert self.grocy.remove_product_in_shopping_list(1).status_code != 204
@responses.activate
def test_get_product_groups_valid(self):
resp = [
{
"id": 1,
"name": "string",
"description": "string",
"row_created_timestamp": "2019-04-17 10:30:00",
}
]
responses.add(responses.GET, "https://example.com:9192/api/objects/product_groups", json=resp, status=200)
product_groups_list = self.grocy.product_groups()
assert isinstance(product_groups_list, list)
assert len(product_groups_list) == 1
for item in product_groups_list:
assert isinstance(item, Group)
@responses.activate
def test_get_product_groups_invalid_no_data(self):
responses.add(responses.GET, "https://example.com:9192/api/objects/product_groups", status=400)
assert self.grocy.product_groups() is None
@responses.activate
def test_get_product_groups_invalid_missing_data(self):
resp = [
{
}
]
responses.add(responses.GET, "https://example.com:9192/api/objects/product_groups", json=resp, status=200)
@responses.activate
def test_upload_product_picture_valid(self):
with patch("os.path.exists" ) as m_exist:
with patch("builtins.open", mock_open()) as m_open:
m_exist.return_value = True
api_client = GrocyApiClient("https://example.com", "api_key")
responses.add(responses.PUT, "https://example.com:9192/api/files/productpictures/MS5qcGc=", status=204)
assert api_client.upload_product_picture(1,"/somepath/pic.jpg").status_code == 204
@responses.activate
def test_upload_product_picture_invalid_missing_data(self):
with patch("os.path.exists" ) as m_exist:
m_exist.return_value = False
api_client = GrocyApiClient("https://example.com", "api_key")
responses.add(responses.PUT, "https://example.com:9192/api/files/productpictures/MS5qcGc=", status=204)
assert api_client.upload_product_picture(1,"/somepath/pic.jpg") is None
@responses.activate
def test_upload_product_picture_error(self):
with patch("os.path.exists" ) as m_exist:
with patch("builtins.open", mock_open()) as m_open:
m_exist.return_value = True
api_client = GrocyApiClient("https://example.com", "api_key")
responses.add(responses.PUT, "https://example.com:9192/api/files/productpictures/MS5qcGc=", status=400)
assert api_client.upload_product_picture(1,"/somepath/pic.jpg").status_code != 204
@responses.activate
def test_update_product_pic_valid(self):
api_client = GrocyApiClient("https://example.com", "api_key")
responses.add(responses.PUT, "https://example.com:9192/api/objects/products/1", status=204)
assert api_client.update_product_pic(1).status_code == 204
@responses.activate
def test_update_product_pic_error(self):
api_client = GrocyApiClient("https://example.com", "api_key")
responses.add(responses.PUT, "https://example.com:9192/api/objects/products/1", status=400)
assert api_client.update_product_pic(1).status_code != 204
@responses.activate
def test_get_expiring_products_valid(self):
resp = {
"expiring_products" : [
{
"product_id": 0,
"amount": "0.33",
"best_before_date": "2019-05-02",
"amount_opened": "0"
}
],
"expired_products": [],
"missing_products": []
}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
expiring_product = self.grocy.expiring_products()
assert isinstance(expiring_product, list)
assert len(expiring_product) == 1
for prod in expiring_product:
assert isinstance(prod, Product)
@responses.activate
def test_get_expiring_invalid_no_data(self):
resp = {
"expiring_products": [],
"expired_products": [],
"missing_products": []
}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
assert not self.grocy.expiring_products()
@responses.activate
def test_get_expiring_invalid_missing_data(self):
resp = {}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
@responses.activate
def test_get_expired_products_valid(self):
resp = {
"expired_products" : [
{
"product_id": 0,
"amount": "0.33",
"best_before_date": "2019-05-02",
"amount_opened": "0"
}
],
"expiring_products": [],
"missing_products": []
}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
expired_product = self.grocy.expired_products()
assert isinstance(expired_product, list)
assert len(expired_product) == 1
for prod in expired_product:
assert isinstance(prod, Product)
@responses.activate
def test_get_expired_invalid_no_data(self):
resp = {
"expiring_products": [],
"expired_products": [],
"missing_products": []
}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
assert not self.grocy.expired_products()
@responses.activate
def test_get_expired_invalid_missing_data(self):
resp = {}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
@responses.activate
def test_get_missing_products_valid(self):
resp = {
"missing_products" : [
{
"product_id": 0,
"amount": "0.33",
"best_before_date": "2019-05-02",
"amount_opened": "0"
}
],
"expired_products": [],
"expiring_products": []
}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
missing_product = self.grocy.missing_products()
assert isinstance(missing_product, list)
assert len(missing_product) == 1
for prod in missing_product:
assert isinstance(prod, Product)
@responses.activate
def test_get_missing_invalid_no_data(self):
resp = {
"expiring_products": [],
"expired_products": [],
"missing_products": []
}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
assert not self.grocy.missing_products()
@responses.activate
def test_get_stock_invalid_missing_data(self):
resp = {}
responses.add(responses.GET, "https://example.com:9192/api/stock/volatile", json=resp, status=200)
@responses.activate
def test_get_userfields_valid(self):
resp = {
"uf1": 0,
"uf2": "string"
}
responses.add(responses.GET, "https://example.com:9192/api/userfields/chores/1", json=resp, status=200)
a_chore_uf = self.grocy.get_userfields("chores",1)
assert a_chore_uf['uf1'] == 0
@responses.activate
def test_get_userfields_invalid_no_data(self):
resp = []
responses.add(responses.GET, "https://example.com:9192/api/userfields/chores/1", json=resp ,status=200)
assert not self.grocy.get_userfields("chores",1)
@responses.activate
def test_set_userfields_valid(self):
responses.add(responses.PUT, "https://example.com:9192/api/userfields/chores/1", status=204)
assert self.grocy.set_userfields("chores",1,"auserfield","value").status_code == 204
@responses.activate
def test_set_userfields_error(self):
responses.add(responses.PUT, "https://example.com:9192/api/userfields/chores/1", status=400)
assert self.grocy.set_userfields("chores",1,"auserfield","value").status_code != 204
@responses.activate
def test_get_last_db_changed_valid(self):
resp = { "changed_time": "2019-09-18T05:30:58.598Z" }
responses.add(responses.GET, "https://example.com:9192/api/system/db-changed-time", json=resp, status=200)
timestamp = self.grocy.get_last_db_changed()
assert isinstance(timestamp, datetime)
@responses.activate
def test_get_last_db_changed_invalid_no_data(self):
resp = {}
responses.add(responses.GET, "https://example.com:9192/api/system/db-changed-time", json=resp ,status=200)
assert self.grocy.get_last_db_changed() is None
|
[
"responses.add",
"unittest.mock.patch",
"unittest.mock.mock_open",
"pygrocy.Grocy",
"pygrocy.grocy_api_client.CurrentStockResponse",
"pygrocy.grocy.Product",
"pygrocy.grocy_api_client.GrocyApiClient"
] |
[((403, 442), 'pygrocy.Grocy', 'Grocy', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (408, 442), False, 'from pygrocy import Grocy\n'), ((1571, 1666), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/chores"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/chores', json=\n resp, status=200)\n", (1584, 1666), False, 'import responses\n'), ((2066, 2161), 'pygrocy.grocy_api_client.CurrentStockResponse', 'CurrentStockResponse', (["{'product_id': 0, 'amount': '0.33', 'best_before_date': '2019-05-02'}"], {}), "({'product_id': 0, 'amount': '0.33', 'best_before_date':\n '2019-05-02'})\n", (2086, 2161), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((2222, 2253), 'pygrocy.grocy.Product', 'Product', (['current_stock_response'], {}), '(current_stock_response)\n', (2229, 2253), False, 'from pygrocy.grocy import Product\n'), ((2276, 2324), 'pygrocy.grocy_api_client.GrocyApiClient', 'GrocyApiClient', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (2290, 2324), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((3980, 4084), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/products/0"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/stock/products/0', json=resp, status=200)\n", (3993, 4084), False, 'import responses\n'), ((4322, 4417), 'pygrocy.grocy_api_client.CurrentStockResponse', 'CurrentStockResponse', (["{'product_id': 0, 'amount': '0.33', 'best_before_date': '2019-05-02'}"], {}), "({'product_id': 0, 'amount': '0.33', 'best_before_date':\n '2019-05-02'})\n", (4342, 4417), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((4478, 4509), 'pygrocy.grocy.Product', 'Product', (['current_stock_response'], {}), '(current_stock_response)\n', (4485, 4509), False, 'from pygrocy.grocy import Product\n'), ((4532, 4580), 'pygrocy.grocy_api_client.GrocyApiClient', 'GrocyApiClient', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (4546, 4580), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((4590, 4683), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/products/0"""'], {'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/stock/products/0', status=200)\n", (4603, 4683), False, 'import responses\n'), ((4998, 5092), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock', json=\n resp, status=200)\n", (5011, 5092), False, 'import responses\n'), ((5346, 5424), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock"""'], {'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock', status=200)\n", (5359, 5424), False, 'import responses\n'), ((5607, 5701), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock', json=\n resp, status=200)\n", (5620, 5701), False, 'import responses\n'), ((6087, 6201), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/objects/shopping_list"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/objects/shopping_list', json=resp, status=200\n )\n", (6100, 6201), False, 'import responses\n'), ((6531, 6629), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/objects/shopping_list"""'], {'status': '(400)'}), "(responses.GET,\n 'https://example.com:9192/api/objects/shopping_list', status=400)\n", (6544, 6629), False, 'import responses\n'), ((6831, 6945), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/objects/shopping_list"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/objects/shopping_list', json=resp, status=200\n )\n", (6844, 6945), False, 'import responses\n'), ((7041, 7162), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/add-missing-products"""'], {'status': '(204)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/add-missing-products',\n status=204)\n", (7054, 7162), False, 'import responses\n'), ((7343, 7464), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/add-missing-products"""'], {'status': '(400)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/add-missing-products',\n status=400)\n", (7356, 7464), False, 'import responses\n'), ((7637, 7745), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/add-product"""'], {'status': '(204)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/add-product', status=204)\n", (7650, 7745), False, 'import responses\n'), ((7915, 8023), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/add-product"""'], {'status': '(400)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/add-product', status=400)\n", (7928, 8023), False, 'import responses\n'), ((8184, 8286), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/clear"""'], {'status': '(204)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/clear', status=204)\n", (8197, 8286), False, 'import responses\n'), ((8437, 8539), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/clear"""'], {'status': '(400)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/clear', status=400)\n", (8450, 8539), False, 'import responses\n'), ((8702, 8817), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/remove-product"""'], {'status': '(204)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/remove-product',\n status=204)\n", (8715, 8817), False, 'import responses\n'), ((8989, 9104), 'responses.add', 'responses.add', (['responses.POST', '"""https://example.com:9192/api/stock/shoppinglist/remove-product"""'], {'status': '(400)'}), "(responses.POST,\n 'https://example.com:9192/api/stock/shoppinglist/remove-product',\n status=400)\n", (9002, 9104), False, 'import responses\n'), ((9482, 9596), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/objects/product_groups"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/objects/product_groups', json=resp,\n status=200)\n", (9495, 9596), False, 'import responses\n'), ((9938, 10037), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/objects/product_groups"""'], {'status': '(400)'}), "(responses.GET,\n 'https://example.com:9192/api/objects/product_groups', status=400)\n", (9951, 10037), False, 'import responses\n'), ((10241, 10355), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/objects/product_groups"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/objects/product_groups', json=resp,\n status=200)\n", (10254, 10355), False, 'import responses\n'), ((11995, 12043), 'pygrocy.grocy_api_client.GrocyApiClient', 'GrocyApiClient', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (12009, 12043), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((12052, 12147), 'responses.add', 'responses.add', (['responses.PUT', '"""https://example.com:9192/api/objects/products/1"""'], {'status': '(204)'}), "(responses.PUT,\n 'https://example.com:9192/api/objects/products/1', status=204)\n", (12065, 12147), False, 'import responses\n'), ((12310, 12358), 'pygrocy.grocy_api_client.GrocyApiClient', 'GrocyApiClient', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (12324, 12358), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((12367, 12462), 'responses.add', 'responses.add', (['responses.PUT', '"""https://example.com:9192/api/objects/products/1"""'], {'status': '(400)'}), "(responses.PUT,\n 'https://example.com:9192/api/objects/products/1', status=400)\n", (12380, 12462), False, 'import responses\n'), ((12984, 13086), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (12997, 13086), False, 'import responses\n'), ((13535, 13637), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (13548, 13637), False, 'import responses\n'), ((13790, 13892), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (13803, 13892), False, 'import responses\n'), ((14341, 14443), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (14354, 14443), False, 'import responses\n'), ((14886, 14988), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (14899, 14988), False, 'import responses\n'), ((15139, 15241), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (15152, 15241), False, 'import responses\n'), ((15690, 15792), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (15703, 15792), False, 'import responses\n'), ((16235, 16337), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (16248, 16337), False, 'import responses\n'), ((16486, 16588), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/stock/volatile"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET, 'https://example.com:9192/api/stock/volatile',\n json=resp, status=200)\n", (16499, 16588), False, 'import responses\n'), ((16766, 16873), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/userfields/chores/1"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/userfields/chores/1', json=resp, status=200)\n", (16779, 16873), False, 'import responses\n'), ((17072, 17179), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/userfields/chores/1"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/userfields/chores/1', json=resp, status=200)\n", (17085, 17179), False, 'import responses\n'), ((17309, 17405), 'responses.add', 'responses.add', (['responses.PUT', '"""https://example.com:9192/api/userfields/chores/1"""'], {'status': '(204)'}), "(responses.PUT,\n 'https://example.com:9192/api/userfields/chores/1', status=204)\n", (17322, 17405), False, 'import responses\n'), ((17577, 17673), 'responses.add', 'responses.add', (['responses.PUT', '"""https://example.com:9192/api/userfields/chores/1"""'], {'status': '(400)'}), "(responses.PUT,\n 'https://example.com:9192/api/userfields/chores/1', status=400)\n", (17590, 17673), False, 'import responses\n'), ((17922, 18036), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/system/db-changed-time"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/system/db-changed-time', json=resp,\n status=200)\n", (17935, 18036), False, 'import responses\n'), ((18239, 18353), 'responses.add', 'responses.add', (['responses.GET', '"""https://example.com:9192/api/system/db-changed-time"""'], {'json': 'resp', 'status': '(200)'}), "(responses.GET,\n 'https://example.com:9192/api/system/db-changed-time', json=resp,\n status=200)\n", (18252, 18353), False, 'import responses\n'), ((10443, 10466), 'unittest.mock.patch', 'patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (10448, 10466), False, 'from unittest.mock import patch, mock_open\n'), ((10999, 11022), 'unittest.mock.patch', 'patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (11004, 11022), False, 'from unittest.mock import patch, mock_open\n'), ((11102, 11150), 'pygrocy.grocy_api_client.GrocyApiClient', 'GrocyApiClient', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (11116, 11150), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((11163, 11270), 'responses.add', 'responses.add', (['responses.PUT', '"""https://example.com:9192/api/files/productpictures/MS5qcGc="""'], {'status': '(204)'}), "(responses.PUT,\n 'https://example.com:9192/api/files/productpictures/MS5qcGc=', status=204)\n", (11176, 11270), False, 'import responses\n'), ((11446, 11469), 'unittest.mock.patch', 'patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (11451, 11469), False, 'from unittest.mock import patch, mock_open\n'), ((10617, 10665), 'pygrocy.grocy_api_client.GrocyApiClient', 'GrocyApiClient', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (10631, 10665), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((10682, 10789), 'responses.add', 'responses.add', (['responses.PUT', '"""https://example.com:9192/api/files/productpictures/MS5qcGc="""'], {'status': '(204)'}), "(responses.PUT,\n 'https://example.com:9192/api/files/productpictures/MS5qcGc=', status=204)\n", (10695, 10789), False, 'import responses\n'), ((11620, 11668), 'pygrocy.grocy_api_client.GrocyApiClient', 'GrocyApiClient', (['"""https://example.com"""', '"""api_key"""'], {}), "('https://example.com', 'api_key')\n", (11634, 11668), False, 'from pygrocy.grocy_api_client import CurrentStockResponse, GrocyApiClient\n'), ((11685, 11792), 'responses.add', 'responses.add', (['responses.PUT', '"""https://example.com:9192/api/files/productpictures/MS5qcGc="""'], {'status': '(400)'}), "(responses.PUT,\n 'https://example.com:9192/api/files/productpictures/MS5qcGc=', status=400)\n", (11698, 11792), False, 'import responses\n'), ((10520, 10531), 'unittest.mock.mock_open', 'mock_open', ([], {}), '()\n', (10529, 10531), False, 'from unittest.mock import patch, mock_open\n'), ((11523, 11534), 'unittest.mock.mock_open', 'mock_open', ([], {}), '()\n', (11532, 11534), False, 'from unittest.mock import patch, mock_open\n')]
|
# Copyright - Transporation, Bots, and Disability Lab - Carnegie Mellon University
# Released under MIT License
"""
Common Operations/Codes that are re-written on Baxter
"""
import numpy as np
from pyquaternion import Quaternion
from alloy.math import *
__all__ = [
'convert_joint_angles_to_numpy','transform_pose_into_rotation_matrix',
'calculate_pose_difference'
]
def convert_joint_angles_to_numpy(joint_angles, joint_names):
"""Convert the dictionary based joint angles given by baxter interface to
a numpy array according to the given joint names
"""
arr = np.zeros(7)
for i, key in enumerate(joint_names):
arr[i] = joint_angles[key]
return arr
def transform_pose_into_rotation_matrix(pose_np):
#pose_np = pose_to_numpy(pose)
translation_comp = pose_np[0:3]
trans_mat = Quaternion(pose_np[3:]).transformation_matrix
trans_mat[0:3,3] = translation_comp
return trans_mat
def calculate_pose_difference(p1, p2):
"""Calculate the pose error from p1 to p2. Note the resulting
error is calculated in the frame of p1 and not the base frame
do p[0:3] = p[0:3] - np.cross(x[0:3],p[3:])
"""
error = np.zeros(6,)
#the position error is just the difference in position
error[0:3] = p2[0:3] - p1[0:3]
#orientation error is more tricky
desire_q = Quaternion(p2[3:])
error_q = desire_q * Quaternion(p1[3:]).inverse
error[3:] = error_q.axis * error_q.angle
return error
#transform_quaternion = Quaternion(pose_np[3:]). Quaternion(pose_np[3:])
# def calculate_pose_difference(p1, p2):
# """Calculate the error from p1 to p2. Note the resulting
# error is calculated in the frame of p1 and not the base frame
# do p[0:3] = p[0:3] - np.cross(x[0:3],p[3:])
# """
# mat1 = transform_pose_into_rotation_matrix(p1)
# mat2 = transform_pose_into_rotation_matrix(p2)
# error = calculate_error_between_two_transformation_matrix(mat1, mat2)
# return calculate_error_between_two_transformation_matrix(mat1, mat2)
|
[
"pyquaternion.Quaternion",
"numpy.zeros"
] |
[((595, 606), 'numpy.zeros', 'np.zeros', (['(7)'], {}), '(7)\n', (603, 606), True, 'import numpy as np\n'), ((1186, 1197), 'numpy.zeros', 'np.zeros', (['(6)'], {}), '(6)\n', (1194, 1197), True, 'import numpy as np\n'), ((1346, 1364), 'pyquaternion.Quaternion', 'Quaternion', (['p2[3:]'], {}), '(p2[3:])\n', (1356, 1364), False, 'from pyquaternion import Quaternion\n'), ((837, 860), 'pyquaternion.Quaternion', 'Quaternion', (['pose_np[3:]'], {}), '(pose_np[3:])\n', (847, 860), False, 'from pyquaternion import Quaternion\n'), ((1390, 1408), 'pyquaternion.Quaternion', 'Quaternion', (['p1[3:]'], {}), '(p1[3:])\n', (1400, 1408), False, 'from pyquaternion import Quaternion\n')]
|
from django.conf.urls import url
from .views import impersonate, list_users, search_users, stop_impersonate
try:
# Django <=1.9
from django.conf.urls import patterns
except ImportError:
patterns = None
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
if patterns is not None:
urlpatterns = patterns('', *urlpatterns)
|
[
"django.conf.urls.patterns",
"django.conf.urls.url"
] |
[((237, 294), 'django.conf.urls.url', 'url', (['"""^stop/$"""', 'stop_impersonate'], {'name': '"""impersonate-stop"""'}), "('^stop/$', stop_impersonate, name='impersonate-stop')\n", (240, 294), False, 'from django.conf.urls import url\n'), ((317, 417), 'django.conf.urls.url', 'url', (['"""^list/$"""', 'list_users', "{'template': 'impersonate/list_users.html'}"], {'name': '"""impersonate-list"""'}), "('^list/$', list_users, {'template': 'impersonate/list_users.html'},\n name='impersonate-list')\n", (320, 417), False, 'from django.conf.urls import url\n'), ((444, 553), 'django.conf.urls.url', 'url', (['"""^search/$"""', 'search_users', "{'template': 'impersonate/search_users.html'}"], {'name': '"""impersonate-search"""'}), "('^search/$', search_users, {'template': 'impersonate/search_users.html'\n }, name='impersonate-search')\n", (447, 553), False, 'from django.conf.urls import url\n'), ((579, 639), 'django.conf.urls.url', 'url', (['"""^(?P<uid>.+)/$"""', 'impersonate'], {'name': '"""impersonate-start"""'}), "('^(?P<uid>.+)/$', impersonate, name='impersonate-start')\n", (582, 639), False, 'from django.conf.urls import url\n'), ((704, 730), 'django.conf.urls.patterns', 'patterns', (['""""""', '*urlpatterns'], {}), "('', *urlpatterns)\n", (712, 730), False, 'from django.conf.urls import patterns\n')]
|